Merge pull request #551 from matrix-org/daniel/flake8
Fix flake8 warnings for new flake8
This commit is contained in:
commit
de11b5b9b5
|
@ -16,3 +16,4 @@ ignore =
|
||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
max-line-length = 90
|
max-line-length = 90
|
||||||
|
ignore = W503
|
||||||
|
|
|
@ -574,7 +574,7 @@ class Auth(object):
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
403,
|
403,
|
||||||
"Application service has not registered this user"
|
"Application service has not registered this user"
|
||||||
)
|
)
|
||||||
defer.returnValue(user_id)
|
defer.returnValue(user_id)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
|
|
@ -12,3 +12,22 @@
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import sys
|
||||||
|
sys.dont_write_bytecode = True
|
||||||
|
|
||||||
|
from synapse.python_dependencies import (
|
||||||
|
check_requirements, MissingRequirementError
|
||||||
|
) # NOQA
|
||||||
|
|
||||||
|
try:
|
||||||
|
check_requirements()
|
||||||
|
except MissingRequirementError as e:
|
||||||
|
message = "\n".join([
|
||||||
|
"Missing Requirement: %s" % (e.message,),
|
||||||
|
"To install run:",
|
||||||
|
" pip install --upgrade --force \"%s\"" % (e.dependency,),
|
||||||
|
"",
|
||||||
|
])
|
||||||
|
sys.stderr.writelines(message)
|
||||||
|
sys.exit(1)
|
||||||
|
|
|
@ -14,27 +14,22 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import sys
|
import synapse
|
||||||
from synapse.rest import ClientRestResource
|
|
||||||
|
import contextlib
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import resource
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
sys.dont_write_bytecode = True
|
|
||||||
from synapse.python_dependencies import (
|
from synapse.python_dependencies import (
|
||||||
check_requirements, DEPENDENCY_LINKS, MissingRequirementError
|
check_requirements, DEPENDENCY_LINKS
|
||||||
)
|
)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
from synapse.rest import ClientRestResource
|
||||||
try:
|
|
||||||
check_requirements()
|
|
||||||
except MissingRequirementError as e:
|
|
||||||
message = "\n".join([
|
|
||||||
"Missing Requirement: %s" % (e.message,),
|
|
||||||
"To install run:",
|
|
||||||
" pip install --upgrade --force \"%s\"" % (e.dependency,),
|
|
||||||
"",
|
|
||||||
])
|
|
||||||
sys.stderr.writelines(message)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
from synapse.storage.engines import create_engine, IncorrectDatabaseSetup
|
from synapse.storage.engines import create_engine, IncorrectDatabaseSetup
|
||||||
from synapse.storage import are_all_users_on_domain
|
from synapse.storage import are_all_users_on_domain
|
||||||
from synapse.storage.prepare_database import UpgradeDatabaseException
|
from synapse.storage.prepare_database import UpgradeDatabaseException
|
||||||
|
@ -73,17 +68,6 @@ from synapse import events
|
||||||
|
|
||||||
from daemonize import Daemonize
|
from daemonize import Daemonize
|
||||||
|
|
||||||
import synapse
|
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import resource
|
|
||||||
import subprocess
|
|
||||||
import time
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger("synapse.app.homeserver")
|
logger = logging.getLogger("synapse.app.homeserver")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||||
pushing.
|
pushing.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(ApplicationServiceApi, self).__init__(hs)
|
super(ApplicationServiceApi, self).__init__(hs)
|
||||||
self.clock = hs.get_clock()
|
self.clock = hs.get_clock()
|
||||||
|
|
||||||
|
|
|
@ -57,7 +57,7 @@ class FederationClient(FederationBase):
|
||||||
cache_name="get_pdu_cache",
|
cache_name="get_pdu_cache",
|
||||||
clock=self._clock,
|
clock=self._clock,
|
||||||
max_len=1000,
|
max_len=1000,
|
||||||
expiry_ms=120*1000,
|
expiry_ms=120 * 1000,
|
||||||
reset_expiry_on_get=False,
|
reset_expiry_on_get=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -147,7 +147,7 @@ class BaseHandler(object):
|
||||||
)
|
)
|
||||||
if not allowed:
|
if not allowed:
|
||||||
raise LimitExceededError(
|
raise LimitExceededError(
|
||||||
retry_after_ms=int(1000*(time_allowed - time_now)),
|
retry_after_ms=int(1000 * (time_allowed - time_now)),
|
||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
|
|
@ -175,8 +175,8 @@ class DirectoryHandler(BaseHandler):
|
||||||
# If this server is in the list of servers, return it first.
|
# If this server is in the list of servers, return it first.
|
||||||
if self.server_name in servers:
|
if self.server_name in servers:
|
||||||
servers = (
|
servers = (
|
||||||
[self.server_name]
|
[self.server_name] +
|
||||||
+ [s for s in servers if s != self.server_name]
|
[s for s in servers if s != self.server_name]
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
servers = list(servers)
|
servers = list(servers)
|
||||||
|
|
|
@ -130,7 +130,7 @@ class EventStreamHandler(BaseHandler):
|
||||||
|
|
||||||
# Add some randomness to this value to try and mitigate against
|
# Add some randomness to this value to try and mitigate against
|
||||||
# thundering herds on restart.
|
# thundering herds on restart.
|
||||||
timeout = random.randint(int(timeout*0.9), int(timeout*1.1))
|
timeout = random.randint(int(timeout * 0.9), int(timeout * 1.1))
|
||||||
|
|
||||||
events, tokens = yield self.notifier.get_events_for(
|
events, tokens = yield self.notifier.get_events_for(
|
||||||
auth_user, pagin_config, timeout,
|
auth_user, pagin_config, timeout,
|
||||||
|
|
|
@ -34,7 +34,7 @@ metrics = synapse.metrics.get_metrics_for(__name__)
|
||||||
|
|
||||||
|
|
||||||
# Don't bother bumping "last active" time if it differs by less than 60 seconds
|
# Don't bother bumping "last active" time if it differs by less than 60 seconds
|
||||||
LAST_ACTIVE_GRANULARITY = 60*1000
|
LAST_ACTIVE_GRANULARITY = 60 * 1000
|
||||||
|
|
||||||
# Keep no more than this number of offline serial revisions
|
# Keep no more than this number of offline serial revisions
|
||||||
MAX_OFFLINE_SERIALS = 1000
|
MAX_OFFLINE_SERIALS = 1000
|
||||||
|
|
|
@ -213,7 +213,7 @@ class RegistrationHandler(BaseHandler):
|
||||||
400,
|
400,
|
||||||
"User ID must only contain characters which do not"
|
"User ID must only contain characters which do not"
|
||||||
" require URL encoding."
|
" require URL encoding."
|
||||||
)
|
)
|
||||||
user = UserID(localpart, self.hs.hostname)
|
user = UserID(localpart, self.hs.hostname)
|
||||||
user_id = user.to_string()
|
user_id = user.to_string()
|
||||||
|
|
||||||
|
|
|
@ -927,7 +927,7 @@ class RoomContextHandler(BaseHandler):
|
||||||
Returns:
|
Returns:
|
||||||
dict, or None if the event isn't found
|
dict, or None if the event isn't found
|
||||||
"""
|
"""
|
||||||
before_limit = math.floor(limit/2.)
|
before_limit = math.floor(limit / 2.)
|
||||||
after_limit = limit - before_limit
|
after_limit = limit - before_limit
|
||||||
|
|
||||||
now_token = yield self.hs.get_event_sources().get_current_token()
|
now_token = yield self.hs.get_event_sources().get_current_token()
|
||||||
|
|
|
@ -152,7 +152,7 @@ class MatrixFederationHttpClient(object):
|
||||||
|
|
||||||
return self.clock.time_bound_deferred(
|
return self.clock.time_bound_deferred(
|
||||||
request_deferred,
|
request_deferred,
|
||||||
time_out=timeout/1000. if timeout else 60,
|
time_out=timeout / 1000. if timeout else 60,
|
||||||
)
|
)
|
||||||
|
|
||||||
response = yield preserve_context_over_fn(
|
response = yield preserve_context_over_fn(
|
||||||
|
|
|
@ -308,7 +308,7 @@ class Notifier(object):
|
||||||
def timed_out():
|
def timed_out():
|
||||||
if listener:
|
if listener:
|
||||||
listener.deferred.cancel()
|
listener.deferred.cancel()
|
||||||
timer = self.clock.call_later(timeout/1000., timed_out)
|
timer = self.clock.call_later(timeout / 1000., timed_out)
|
||||||
|
|
||||||
prev_token = from_token
|
prev_token = from_token
|
||||||
while not result:
|
while not result:
|
||||||
|
|
|
@ -304,7 +304,7 @@ def _flatten_dict(d, prefix=[], result={}):
|
||||||
if isinstance(value, basestring):
|
if isinstance(value, basestring):
|
||||||
result[".".join(prefix + [key])] = value.lower()
|
result[".".join(prefix + [key])] = value.lower()
|
||||||
elif hasattr(value, "items"):
|
elif hasattr(value, "items"):
|
||||||
_flatten_dict(value, prefix=(prefix+[key]), result=result)
|
_flatten_dict(value, prefix=(prefix + [key]), result=result)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
|
@ -89,7 +89,7 @@ class LoginRestServlet(ClientV1RestServlet):
|
||||||
LoginRestServlet.SAML2_TYPE):
|
LoginRestServlet.SAML2_TYPE):
|
||||||
relay_state = ""
|
relay_state = ""
|
||||||
if "relay_state" in login_submission:
|
if "relay_state" in login_submission:
|
||||||
relay_state = "&RelayState="+urllib.quote(
|
relay_state = "&RelayState=" + urllib.quote(
|
||||||
login_submission["relay_state"])
|
login_submission["relay_state"])
|
||||||
result = {
|
result = {
|
||||||
"uri": "%s%s" % (self.idp_redirect_url, relay_state)
|
"uri": "%s%s" % (self.idp_redirect_url, relay_state)
|
||||||
|
|
|
@ -52,7 +52,7 @@ class PusherRestServlet(ClientV1RestServlet):
|
||||||
if i not in content:
|
if i not in content:
|
||||||
missing.append(i)
|
missing.append(i)
|
||||||
if len(missing):
|
if len(missing):
|
||||||
raise SynapseError(400, "Missing parameters: "+','.join(missing),
|
raise SynapseError(400, "Missing parameters: " + ','.join(missing),
|
||||||
errcode=Codes.MISSING_PARAM)
|
errcode=Codes.MISSING_PARAM)
|
||||||
|
|
||||||
logger.debug("set pushkey %s to kind %s", content['pushkey'], content['kind'])
|
logger.debug("set pushkey %s to kind %s", content['pushkey'], content['kind'])
|
||||||
|
@ -83,7 +83,7 @@ class PusherRestServlet(ClientV1RestServlet):
|
||||||
data=content['data']
|
data=content['data']
|
||||||
)
|
)
|
||||||
except PusherConfigException as pce:
|
except PusherConfigException as pce:
|
||||||
raise SynapseError(400, "Config Error: "+pce.message,
|
raise SynapseError(400, "Config Error: " + pce.message,
|
||||||
errcode=Codes.MISSING_PARAM)
|
errcode=Codes.MISSING_PARAM)
|
||||||
|
|
||||||
defer.returnValue((200, {}))
|
defer.returnValue((200, {}))
|
||||||
|
|
|
@ -38,7 +38,8 @@ logger = logging.getLogger(__name__)
|
||||||
if hasattr(hmac, "compare_digest"):
|
if hasattr(hmac, "compare_digest"):
|
||||||
compare_digest = hmac.compare_digest
|
compare_digest = hmac.compare_digest
|
||||||
else:
|
else:
|
||||||
compare_digest = lambda a, b: a == b
|
def compare_digest(a, b):
|
||||||
|
return a == b
|
||||||
|
|
||||||
|
|
||||||
class RegisterRestServlet(ClientV1RestServlet):
|
class RegisterRestServlet(ClientV1RestServlet):
|
||||||
|
|
|
@ -34,7 +34,8 @@ from synapse.util.async import run_on_reactor
|
||||||
if hasattr(hmac, "compare_digest"):
|
if hasattr(hmac, "compare_digest"):
|
||||||
compare_digest = hmac.compare_digest
|
compare_digest = hmac.compare_digest
|
||||||
else:
|
else:
|
||||||
compare_digest = lambda a, b: a == b
|
def compare_digest(a, b):
|
||||||
|
return a == b
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
|
@ -26,9 +26,7 @@ class VersionsRestServlet(RestServlet):
|
||||||
|
|
||||||
def on_GET(self, request):
|
def on_GET(self, request):
|
||||||
return (200, {
|
return (200, {
|
||||||
"versions": [
|
"versions": ["r0.0.1"]
|
||||||
"r0.0.1",
|
|
||||||
]
|
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ from twisted.web.client import BrowserLikePolicyForHTTPS
|
||||||
from twisted.enterprise import adbapi
|
from twisted.enterprise import adbapi
|
||||||
|
|
||||||
from synapse.federation import initialize_http_replication
|
from synapse.federation import initialize_http_replication
|
||||||
from synapse.http.client import SimpleHttpClient, InsecureInterceptableContextFactory
|
from synapse.http.client import SimpleHttpClient, InsecureInterceptableContextFactory
|
||||||
from synapse.notifier import Notifier
|
from synapse.notifier import Notifier
|
||||||
from synapse.api.auth import Auth
|
from synapse.api.auth import Auth
|
||||||
from synapse.handlers import Handlers
|
from synapse.handlers import Handlers
|
||||||
|
|
|
@ -63,7 +63,7 @@ class StateHandler(object):
|
||||||
cache_name="state_cache",
|
cache_name="state_cache",
|
||||||
clock=self.clock,
|
clock=self.clock,
|
||||||
max_len=SIZE_OF_CACHE,
|
max_len=SIZE_OF_CACHE,
|
||||||
expiry_ms=EVICTION_TIMEOUT_SECONDS*1000,
|
expiry_ms=EVICTION_TIMEOUT_SECONDS * 1000,
|
||||||
reset_expiry_on_get=True,
|
reset_expiry_on_get=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -59,7 +59,7 @@ logger = logging.getLogger(__name__)
|
||||||
# Number of msec of granularity to store the user IP 'last seen' time. Smaller
|
# Number of msec of granularity to store the user IP 'last seen' time. Smaller
|
||||||
# times give more inserts into the database even for readonly API hits
|
# times give more inserts into the database even for readonly API hits
|
||||||
# 120 seconds == 2 minutes
|
# 120 seconds == 2 minutes
|
||||||
LAST_SEEN_GRANULARITY = 120*1000
|
LAST_SEEN_GRANULARITY = 120 * 1000
|
||||||
|
|
||||||
|
|
||||||
class DataStore(RoomMemberStore, RoomStore,
|
class DataStore(RoomMemberStore, RoomStore,
|
||||||
|
|
|
@ -185,7 +185,7 @@ class SQLBaseStore(object):
|
||||||
time_then = self._previous_loop_ts
|
time_then = self._previous_loop_ts
|
||||||
self._previous_loop_ts = time_now
|
self._previous_loop_ts = time_now
|
||||||
|
|
||||||
ratio = (curr - prev)/(time_now - time_then)
|
ratio = (curr - prev) / (time_now - time_then)
|
||||||
|
|
||||||
top_three_counters = self._txn_perf_counters.interval(
|
top_three_counters = self._txn_perf_counters.interval(
|
||||||
time_now - time_then, limit=3
|
time_now - time_then, limit=3
|
||||||
|
@ -643,7 +643,10 @@ class SQLBaseStore(object):
|
||||||
if not iterable:
|
if not iterable:
|
||||||
defer.returnValue(results)
|
defer.returnValue(results)
|
||||||
|
|
||||||
chunks = [iterable[i:i+batch_size] for i in xrange(0, len(iterable), batch_size)]
|
chunks = [
|
||||||
|
iterable[i:i + batch_size]
|
||||||
|
for i in xrange(0, len(iterable), batch_size)
|
||||||
|
]
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
rows = yield self.runInteraction(
|
rows = yield self.runInteraction(
|
||||||
desc,
|
desc,
|
||||||
|
|
|
@ -54,7 +54,7 @@ class Sqlite3Engine(object):
|
||||||
|
|
||||||
def _parse_match_info(buf):
|
def _parse_match_info(buf):
|
||||||
bufsize = len(buf)
|
bufsize = len(buf)
|
||||||
return [struct.unpack('@I', buf[i:i+4])[0] for i in range(0, bufsize, 4)]
|
return [struct.unpack('@I', buf[i:i + 4])[0] for i in range(0, bufsize, 4)]
|
||||||
|
|
||||||
|
|
||||||
def _rank(raw_match_info):
|
def _rank(raw_match_info):
|
||||||
|
|
|
@ -58,7 +58,7 @@ class EventFederationStore(SQLBaseStore):
|
||||||
new_front = set()
|
new_front = set()
|
||||||
front_list = list(front)
|
front_list = list(front)
|
||||||
chunks = [
|
chunks = [
|
||||||
front_list[x:x+100]
|
front_list[x:x + 100]
|
||||||
for x in xrange(0, len(front), 100)
|
for x in xrange(0, len(front), 100)
|
||||||
]
|
]
|
||||||
for chunk in chunks:
|
for chunk in chunks:
|
||||||
|
|
|
@ -84,7 +84,7 @@ class EventsStore(SQLBaseStore):
|
||||||
event.internal_metadata.stream_ordering = stream
|
event.internal_metadata.stream_ordering = stream
|
||||||
|
|
||||||
chunks = [
|
chunks = [
|
||||||
events_and_contexts[x:x+100]
|
events_and_contexts[x:x + 100]
|
||||||
for x in xrange(0, len(events_and_contexts), 100)
|
for x in xrange(0, len(events_and_contexts), 100)
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -740,7 +740,7 @@ class EventsStore(SQLBaseStore):
|
||||||
rows = []
|
rows = []
|
||||||
N = 200
|
N = 200
|
||||||
for i in range(1 + len(events) / N):
|
for i in range(1 + len(events) / N):
|
||||||
evs = events[i*N:(i + 1)*N]
|
evs = events[i * N:(i + 1) * N]
|
||||||
if not evs:
|
if not evs:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -755,7 +755,7 @@ class EventsStore(SQLBaseStore):
|
||||||
" LEFT JOIN rejections as rej USING (event_id)"
|
" LEFT JOIN rejections as rej USING (event_id)"
|
||||||
" LEFT JOIN redactions as r ON e.event_id = r.redacts"
|
" LEFT JOIN redactions as r ON e.event_id = r.redacts"
|
||||||
" WHERE e.event_id IN (%s)"
|
" WHERE e.event_id IN (%s)"
|
||||||
) % (",".join(["?"]*len(evs)),)
|
) % (",".join(["?"] * len(evs)),)
|
||||||
|
|
||||||
txn.execute(sql, evs)
|
txn.execute(sql, evs)
|
||||||
rows.extend(self.cursor_to_dict(txn))
|
rows.extend(self.cursor_to_dict(txn))
|
||||||
|
|
|
@ -168,7 +168,7 @@ class StreamStore(SQLBaseStore):
|
||||||
|
|
||||||
results = {}
|
results = {}
|
||||||
room_ids = list(room_ids)
|
room_ids = list(room_ids)
|
||||||
for rm_ids in (room_ids[i:i+20] for i in xrange(0, len(room_ids), 20)):
|
for rm_ids in (room_ids[i:i + 20] for i in xrange(0, len(room_ids), 20)):
|
||||||
res = yield defer.gatherResults([
|
res = yield defer.gatherResults([
|
||||||
self.get_room_events_stream_for_room(
|
self.get_room_events_stream_for_room(
|
||||||
room_id, from_key, to_key, limit
|
room_id, from_key, to_key, limit
|
||||||
|
|
|
@ -46,7 +46,7 @@ class Clock(object):
|
||||||
|
|
||||||
def looping_call(self, f, msec):
|
def looping_call(self, f, msec):
|
||||||
l = task.LoopingCall(f)
|
l = task.LoopingCall(f)
|
||||||
l.start(msec/1000.0, now=False)
|
l.start(msec / 1000.0, now=False)
|
||||||
return l
|
return l
|
||||||
|
|
||||||
def stop_looping_call(self, loop):
|
def stop_looping_call(self, loop):
|
||||||
|
|
|
@ -149,7 +149,7 @@ class CacheDescriptor(object):
|
||||||
self.lru = lru
|
self.lru = lru
|
||||||
self.tree = tree
|
self.tree = tree
|
||||||
|
|
||||||
self.arg_names = inspect.getargspec(orig).args[1:num_args+1]
|
self.arg_names = inspect.getargspec(orig).args[1:num_args + 1]
|
||||||
|
|
||||||
if len(self.arg_names) < self.num_args:
|
if len(self.arg_names) < self.num_args:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
|
@ -250,7 +250,7 @@ class CacheListDescriptor(object):
|
||||||
self.num_args = num_args
|
self.num_args = num_args
|
||||||
self.list_name = list_name
|
self.list_name = list_name
|
||||||
|
|
||||||
self.arg_names = inspect.getargspec(orig).args[1:num_args+1]
|
self.arg_names = inspect.getargspec(orig).args[1:num_args + 1]
|
||||||
self.list_pos = self.arg_names.index(self.list_name)
|
self.list_pos = self.arg_names.index(self.list_name)
|
||||||
|
|
||||||
self.cache = cache
|
self.cache = cache
|
||||||
|
|
|
@ -55,7 +55,7 @@ class ExpiringCache(object):
|
||||||
def f():
|
def f():
|
||||||
self._prune_cache()
|
self._prune_cache()
|
||||||
|
|
||||||
self._clock.looping_call(f, self._expiry_ms/2)
|
self._clock.looping_call(f, self._expiry_ms / 2)
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
def __setitem__(self, key, value):
|
||||||
now = self._clock.time_msec()
|
now = self._clock.time_msec()
|
||||||
|
|
|
@ -58,7 +58,7 @@ class TreeCache(object):
|
||||||
|
|
||||||
if n:
|
if n:
|
||||||
break
|
break
|
||||||
node_and_keys[i+1][0].pop(k)
|
node_and_keys[i + 1][0].pop(k)
|
||||||
|
|
||||||
popped, cnt = _strip_and_count_entires(popped)
|
popped, cnt = _strip_and_count_entires(popped)
|
||||||
self.size -= cnt
|
self.size -= cnt
|
||||||
|
|
|
@ -111,7 +111,7 @@ def time_function(f):
|
||||||
_log_debug_as_f(
|
_log_debug_as_f(
|
||||||
f,
|
f,
|
||||||
"[FUNC END] {%s-%d} %f",
|
"[FUNC END] {%s-%d} %f",
|
||||||
(func_name, id, end-start,),
|
(func_name, id, end - start,),
|
||||||
)
|
)
|
||||||
|
|
||||||
return r
|
return r
|
||||||
|
|
|
@ -163,7 +163,7 @@ class _PerHostRatelimiter(object):
|
||||||
"Ratelimit [%s]: sleeping req",
|
"Ratelimit [%s]: sleeping req",
|
||||||
id(request_id),
|
id(request_id),
|
||||||
)
|
)
|
||||||
ret_defer = sleep(self.sleep_msec/1000.0)
|
ret_defer = sleep(self.sleep_msec / 1000.0)
|
||||||
|
|
||||||
self.sleeping_requests.add(request_id)
|
self.sleeping_requests.add(request_id)
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue