TokenBucket.cc

Go to the documentation of this file.
00001 /*
00002  *    Copyright 2006 Intel Corporation
00003  * 
00004  *    Licensed under the Apache License, Version 2.0 (the "License");
00005  *    you may not use this file except in compliance with the License.
00006  *    You may obtain a copy of the License at
00007  * 
00008  *        http://www.apache.org/licenses/LICENSE-2.0
00009  * 
00010  *    Unless required by applicable law or agreed to in writing, software
00011  *    distributed under the License is distributed on an "AS IS" BASIS,
00012  *    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00013  *    See the License for the specific language governing permissions and
00014  *    limitations under the License.
00015  */
00016 
00017 
00018 #include "TokenBucket.h"
00019 
00020 namespace oasys {
00021 
00022 //----------------------------------------------------------------------
00023 TokenBucket::TokenBucket(const char* logpath,
00024                          u_int32_t   depth,   /* in bits */
00025                          u_int32_t   rate     /* in seconds */)
00026     : Logger("TokenBucket", logpath),
00027       depth_(depth),
00028       rate_(rate),
00029       tokens_(depth) // initialize full
00030 {
00031     log_debug("initialized token bucket with depth %u and rate %u",
00032               depth_, rate_);
00033     last_update_.get_time();
00034 }
00035 
00036 //----------------------------------------------------------------------
00037 void
00038 TokenBucket::update()
00039 {
00040     Time now;
00041     now.get_time();
00042 
00043     if (tokens_ == depth_) {
00044         log_debug("update: bucket already full, nothing to update");
00045         last_update_ = now;
00046         return;
00047     }
00048 
00049     u_int32_t elapsed = (now - last_update_).in_milliseconds();
00050     u_int32_t new_tokens = (rate_ * elapsed) / 1000;
00051 
00052     if (new_tokens != 0) {
00053         if ((tokens_ + new_tokens) > depth_) {
00054             new_tokens = depth_ - tokens_;
00055         }
00056 
00057         log_debug("update: filling %u/%u spent tokens after %u milliseconds",
00058                   new_tokens, depth_ - tokens_, elapsed);
00059         tokens_ += new_tokens;
00060         last_update_ = now;
00061         
00062     } else {
00063         // there's a chance that, for a slow rate, that the elapsed
00064         // time isn't enough to fill even a single token. in this
00065         // case, we leave last_update_ to where it was before,
00066         // otherwise we might starve the bucket.
00067         log_debug("update: %u milliseconds elapsed not enough to fill any tokens",
00068                   elapsed);
00069     }
00070 }
00071 
00072 //----------------------------------------------------------------------
00073 bool
00074 TokenBucket::drain(u_int32_t length)
00075 {
00076     update();
00077 
00078     if (length <= tokens_) {
00079         log_debug("drain: draining %u/%u tokens from bucket",
00080                   length, tokens_);
00081         tokens_ -= length;
00082         return true;
00083     } else {
00084         log_debug("drain: not enough tokens (%u) to drain %u from bucket",
00085                   tokens_, length);
00086         return false;
00087     }
00088 }
00089 
00090 //----------------------------------------------------------------------
00091 u_int32_t
00092 TokenBucket::time_to_fill()
00093 {
00094     update();
00095     
00096     u_int32_t t = ((depth_ - tokens_) * 1000) / rate_;
00097 
00098     log_debug("time_to_fill: %u tokens will be full in %u msecs",
00099               (depth_ - tokens_), t);
00100     return t;
00101 }
00102 
00103 //----------------------------------------------------------------------
00104 void
00105 TokenBucket::empty()
00106 {
00107     tokens_      = 0;
00108     last_update_.get_time();
00109 
00110     log_debug("empty: clearing bucket");
00111 }
00112 
00113 } // namespace oasys

Generated on Sat Sep 8 08:36:18 2007 for DTN Reference Implementation by  doxygen 1.5.3