Skip to content

Commit

Permalink
fix: change rate limit to slow drippy instead of bursty (#343)
Browse files Browse the repository at this point in the history
* fix: change rate limit to slow drippy instead of bursty

* chore: `black .`

* fix: make rate limit a slow drip instead of bursty

* Update _session.py

---------

Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
  • Loading branch information
BobTheBuidler and github-actions[bot] authored Dec 20, 2024
1 parent 4d79561 commit fa5fc4e
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 11 deletions.
4 changes: 1 addition & 3 deletions dank_mids/_requests.py
Original file line number Diff line number Diff line change
Expand Up @@ -635,9 +635,7 @@ def should_retry(self, e: Exception) -> bool:
_log_debug("Dank too loud. Bisecting batch and retrying.")
elif isinstance(e, BadResponse) and (_needs_full_request_spec(e) or _is_call_revert(e)):
pass
elif "429" not in str_e and all(
err not in str_e for err in constants.TOO_MUCH_DATA_ERRS
):
elif "429" not in str_e and all(err not in str_e for err in constants.TOO_MUCH_DATA_ERRS):
_log_warning("unexpected %s: %s", e.__class__.__name__, e)
return len(self) > 1

Expand Down
12 changes: 4 additions & 8 deletions dank_mids/helpers/_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,9 +104,8 @@ def __new__(cls, value, phrase, description=""):
HTTPStatusExtended.CLOUDFLARE_TIMEOUT, # type: ignore [attr-defined]
}


# default is 50 requests/second
limiters = defaultdict(lambda: AsyncLimiter(ENVS.REQUESTS_PER_SECOND, 1))
limiters = defaultdict(lambda: AsyncLimiter(5, 5 / ENVS.REQUESTS_PER_SECOND))

_rate_limit_waiters = {}

Expand Down Expand Up @@ -228,6 +227,7 @@ async def handle_too_many_requests(self, endpoint: str, error: ClientResponseErr
if (now := time()) > getattr(limiter, "_last_updated_at", 0) + 10:
current_rate = limiter._rate_per_sec
new_rate = current_rate * 0.97
limiter.time_period /= 0.97
limiter._rate_per_sec = new_rate
limiter._last_updated_at = now
_logger_info(
Expand All @@ -251,12 +251,8 @@ async def handle_too_many_requests(self, endpoint: str, error: ClientResponseErr
self._log_rate_limited(retry_after)
if retry_after > 30:
_logger_warning("severe rate limiting from your provider")
acquire_capacity_for_x_requests = retry_after / secs_between_requests
while acquire_capacity_for_x_requests:
# the limiter does this check that we need to work around
get_now = min(acquire_capacity_for_x_requests, limiter.max_rate)
await limiter.acquire(get_now)
acquire_capacity_for_x_requests -= get_now
# the limiter handles the timing
await limiter.acquire(5)

def _log_rate_limited(self, try_after: float) -> None:
if not self._limited:
Expand Down

0 comments on commit fa5fc4e

Please sign in to comment.