replace 'except:' with 'except Exception:'
what could possibly go wrong
This commit is contained in:
parent
ecf198aab8
commit
eaaabc6c4f
|
@ -19,7 +19,7 @@ import sys
|
|||
|
||||
try:
|
||||
import affinity
|
||||
except:
|
||||
except Exception:
|
||||
affinity = None
|
||||
|
||||
from daemonize import Daemonize
|
||||
|
|
|
@ -123,7 +123,7 @@ class _ServiceQueuer(object):
|
|||
with Measure(self.clock, "servicequeuer.send"):
|
||||
try:
|
||||
yield self.txn_ctrl.send(service, events)
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("AS request failed")
|
||||
finally:
|
||||
self.requests_in_flight.discard(service.id)
|
||||
|
|
|
@ -303,7 +303,7 @@ def read_gc_thresholds(thresholds):
|
|||
return (
|
||||
int(thresholds[0]), int(thresholds[1]), int(thresholds[2]),
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
raise ConfigError(
|
||||
"Value of `gc_threshold` must be a list of three integers if set"
|
||||
)
|
||||
|
|
|
@ -34,7 +34,7 @@ class ServerContextFactory(ssl.ContextFactory):
|
|||
try:
|
||||
_ecCurve = _OpenSSLECCurve(_defaultCurveName)
|
||||
_ecCurve.addECKeyToContext(context)
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Failed to enable elliptic curve for TLS")
|
||||
context.set_options(SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3)
|
||||
context.use_certificate_chain_file(config.tls_certificate_file)
|
||||
|
|
|
@ -43,7 +43,7 @@ def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
|
|||
message_hash_base64 = event.hashes[name]
|
||||
try:
|
||||
message_hash_bytes = decode_base64(message_hash_base64)
|
||||
except:
|
||||
except Exception:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"Invalid base64: %s" % (message_hash_base64,),
|
||||
|
|
|
@ -759,7 +759,7 @@ def _handle_key_deferred(verify_request):
|
|||
))
|
||||
try:
|
||||
verify_signed_json(json_object, server_name, verify_key)
|
||||
except:
|
||||
except Exception:
|
||||
raise SynapseError(
|
||||
401,
|
||||
"Invalid signature for server %s with key %s:%s" % (
|
||||
|
|
|
@ -443,12 +443,12 @@ def _check_power_levels(event, auth_events):
|
|||
for k, v in user_list.items():
|
||||
try:
|
||||
UserID.from_string(k)
|
||||
except:
|
||||
except Exception:
|
||||
raise SynapseError(400, "Not a valid user_id: %s" % (k,))
|
||||
|
||||
try:
|
||||
int(v)
|
||||
except:
|
||||
except Exception:
|
||||
raise SynapseError(400, "Not a valid power level: %s" % (v,))
|
||||
|
||||
key = (event.type, event.state_key, )
|
||||
|
|
|
@ -22,7 +22,7 @@ class SpamChecker(object):
|
|||
config = None
|
||||
try:
|
||||
module, config = hs.config.spam_checker
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if module is not None:
|
||||
|
|
|
@ -112,7 +112,7 @@ class Authenticator(object):
|
|||
key = strip_quotes(param_dict["key"])
|
||||
sig = strip_quotes(param_dict["sig"])
|
||||
return (origin, key, sig)
|
||||
except:
|
||||
except Exception:
|
||||
raise AuthenticationError(
|
||||
400, "Malformed Authorization header", Codes.UNAUTHORIZED
|
||||
)
|
||||
|
@ -177,7 +177,7 @@ class BaseFederationServlet(object):
|
|||
if self.REQUIRE_AUTH:
|
||||
logger.exception("authenticate_request failed")
|
||||
raise
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("authenticate_request failed")
|
||||
raise
|
||||
|
||||
|
@ -270,7 +270,7 @@ class FederationSendServlet(BaseFederationServlet):
|
|||
code, response = yield self.handler.on_incoming_transaction(
|
||||
transaction_data
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("on_incoming_transaction failed")
|
||||
raise
|
||||
|
||||
|
|
|
@ -227,7 +227,7 @@ class FederationHandler(BaseHandler):
|
|||
state, auth_chain = yield self.replication_layer.get_state_for_room(
|
||||
origin, pdu.room_id, pdu.event_id,
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Failed to get state for event: %s", pdu.event_id)
|
||||
|
||||
yield self._process_received_pdu(
|
||||
|
@ -461,7 +461,7 @@ class FederationHandler(BaseHandler):
|
|||
def check_match(id):
|
||||
try:
|
||||
return server_name == get_domain_from_id(id)
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
# Parses mapping `event_id -> (type, state_key) -> state event_id`
|
||||
|
@ -499,7 +499,7 @@ class FederationHandler(BaseHandler):
|
|||
continue
|
||||
try:
|
||||
domain = get_domain_from_id(ev.state_key)
|
||||
except:
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if domain != server_name:
|
||||
|
@ -738,7 +738,7 @@ class FederationHandler(BaseHandler):
|
|||
joined_domains[dom] = min(d, old_d)
|
||||
else:
|
||||
joined_domains[dom] = d
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return sorted(joined_domains.items(), key=lambda d: d[1])
|
||||
|
@ -940,7 +940,7 @@ class FederationHandler(BaseHandler):
|
|||
room_creator_user_id="",
|
||||
is_public=False
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
# FIXME
|
||||
pass
|
||||
|
||||
|
@ -1775,7 +1775,7 @@ class FederationHandler(BaseHandler):
|
|||
[e_id for e_id, _ in event.auth_events]
|
||||
)
|
||||
seen_events = set(have_events.keys())
|
||||
except:
|
||||
except Exception:
|
||||
# FIXME:
|
||||
logger.exception("Failed to get auth chain")
|
||||
|
||||
|
@ -1899,7 +1899,7 @@ class FederationHandler(BaseHandler):
|
|||
except AuthError:
|
||||
pass
|
||||
|
||||
except:
|
||||
except Exception:
|
||||
# FIXME:
|
||||
logger.exception("Failed to query auth chain")
|
||||
|
||||
|
@ -1966,7 +1966,7 @@ class FederationHandler(BaseHandler):
|
|||
def get_next(it, opt=None):
|
||||
try:
|
||||
return it.next()
|
||||
except:
|
||||
except Exception:
|
||||
return opt
|
||||
|
||||
current_local = get_next(local_iter)
|
||||
|
|
|
@ -214,7 +214,7 @@ class InitialSyncHandler(BaseHandler):
|
|||
})
|
||||
|
||||
d["account_data"] = account_data_events
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Failed to get snapshot")
|
||||
|
||||
yield concurrently_execute(handle_room, room_list, 10)
|
||||
|
|
|
@ -563,7 +563,7 @@ class MessageHandler(BaseHandler):
|
|||
try:
|
||||
dump = ujson.dumps(unfreeze(event.content))
|
||||
ujson.loads(dump)
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Failed to encode content: %r", event.content)
|
||||
raise
|
||||
|
||||
|
|
|
@ -364,7 +364,7 @@ class PresenceHandler(object):
|
|||
)
|
||||
|
||||
preserve_fn(self._update_states)(changes)
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Exception in _handle_timeouts loop")
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
|
|
@ -118,7 +118,7 @@ class ProfileHandler(BaseHandler):
|
|||
logger.exception("Failed to get displayname")
|
||||
|
||||
raise
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Failed to get displayname")
|
||||
else:
|
||||
defer.returnValue(result["displayname"])
|
||||
|
@ -165,7 +165,7 @@ class ProfileHandler(BaseHandler):
|
|||
if e.code != 404:
|
||||
logger.exception("Failed to get avatar_url")
|
||||
raise
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Failed to get avatar_url")
|
||||
|
||||
defer.returnValue(result["avatar_url"])
|
||||
|
@ -266,7 +266,7 @@ class ProfileHandler(BaseHandler):
|
|||
},
|
||||
ignore_backoff=True,
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Failed to get avatar_url")
|
||||
|
||||
yield self.store.update_remote_profile_cache(
|
||||
|
|
|
@ -289,7 +289,7 @@ class RegistrationHandler(BaseHandler):
|
|||
try:
|
||||
identity_handler = self.hs.get_handlers().identity_handler
|
||||
threepid = yield identity_handler.threepid_from_creds(c)
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Couldn't validate 3pid")
|
||||
raise RegistrationError(400, "Couldn't validate 3pid")
|
||||
|
||||
|
|
|
@ -108,7 +108,7 @@ class RoomCreationHandler(BaseHandler):
|
|||
for i in invite_list:
|
||||
try:
|
||||
UserID.from_string(i)
|
||||
except:
|
||||
except Exception:
|
||||
raise SynapseError(400, "Invalid user_id: %s" % (i,))
|
||||
|
||||
invite_3pid_list = config.get("invite_3pid", [])
|
||||
|
|
|
@ -61,7 +61,7 @@ class SearchHandler(BaseHandler):
|
|||
assert batch_group is not None
|
||||
assert batch_group_key is not None
|
||||
assert batch_token is not None
|
||||
except:
|
||||
except Exception:
|
||||
raise SynapseError(400, "Invalid batch")
|
||||
|
||||
try:
|
||||
|
|
|
@ -550,7 +550,7 @@ class MatrixFederationHttpClient(object):
|
|||
length = yield _readBodyToFile(
|
||||
response, output_stream, max_size
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Failed to download body")
|
||||
raise
|
||||
|
||||
|
|
|
@ -130,7 +130,7 @@ def wrap_request_handler(request_handler, include_metrics=False):
|
|||
pretty_print=_request_user_agent_is_curl(request),
|
||||
version_string=self.version_string,
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed handle request %s.%s on %r: %r",
|
||||
request_handler.__module__,
|
||||
|
|
|
@ -48,7 +48,7 @@ def parse_integer_from_args(args, name, default=None, required=False):
|
|||
if name in args:
|
||||
try:
|
||||
return int(args[name][0])
|
||||
except:
|
||||
except Exception:
|
||||
message = "Query parameter %r must be an integer" % (name,)
|
||||
raise SynapseError(400, message)
|
||||
else:
|
||||
|
@ -88,7 +88,7 @@ def parse_boolean_from_args(args, name, default=None, required=False):
|
|||
"true": True,
|
||||
"false": False,
|
||||
}[args[name][0]]
|
||||
except:
|
||||
except Exception:
|
||||
message = (
|
||||
"Boolean query parameter %r must be one of"
|
||||
" ['true', 'false']"
|
||||
|
@ -162,7 +162,7 @@ def parse_json_value_from_request(request):
|
|||
"""
|
||||
try:
|
||||
content_bytes = request.content.read()
|
||||
except:
|
||||
except Exception:
|
||||
raise SynapseError(400, "Error reading JSON content.")
|
||||
|
||||
try:
|
||||
|
|
|
@ -67,7 +67,7 @@ class SynapseRequest(Request):
|
|||
ru_utime, ru_stime = context.get_resource_usage()
|
||||
db_txn_count = context.db_txn_count
|
||||
db_txn_duration = context.db_txn_duration
|
||||
except:
|
||||
except Exception:
|
||||
ru_utime, ru_stime = (0, 0)
|
||||
db_txn_count, db_txn_duration = (0, 0)
|
||||
|
||||
|
|
|
@ -289,7 +289,7 @@ class Notifier(object):
|
|||
for user_stream in user_streams:
|
||||
try:
|
||||
user_stream.notify(stream_key, new_token, time_now_ms)
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Failed to notify listener")
|
||||
|
||||
self.notify_replication()
|
||||
|
|
|
@ -121,7 +121,7 @@ class EmailPusher(object):
|
|||
starting_max_ordering = self.max_stream_ordering
|
||||
try:
|
||||
yield self._unsafe_process()
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Exception processing notifs")
|
||||
if self.max_stream_ordering == starting_max_ordering:
|
||||
break
|
||||
|
|
|
@ -131,7 +131,7 @@ class HttpPusher(object):
|
|||
starting_max_ordering = self.max_stream_ordering
|
||||
try:
|
||||
yield self._unsafe_process()
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Exception processing notifs")
|
||||
if self.max_stream_ordering == starting_max_ordering:
|
||||
break
|
||||
|
@ -314,7 +314,7 @@ class HttpPusher(object):
|
|||
defer.returnValue([])
|
||||
try:
|
||||
resp = yield self.http_client.post_json_get_json(self.url, notification_dict)
|
||||
except:
|
||||
except Exception:
|
||||
logger.warn("Failed to push %s ", self.url)
|
||||
defer.returnValue(False)
|
||||
rejected = []
|
||||
|
@ -345,7 +345,7 @@ class HttpPusher(object):
|
|||
}
|
||||
try:
|
||||
resp = yield self.http_client.post_json_get_json(self.url, d)
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Failed to push %s ", self.url)
|
||||
defer.returnValue(False)
|
||||
rejected = []
|
||||
|
|
|
@ -27,7 +27,7 @@ logger = logging.getLogger(__name__)
|
|||
try:
|
||||
from synapse.push.emailpusher import EmailPusher
|
||||
from synapse.push.mailer import Mailer, load_jinja2_templates
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
|
|
|
@ -137,7 +137,7 @@ class PusherPool:
|
|||
)
|
||||
|
||||
yield preserve_context_over_deferred(defer.gatherResults(deferreds))
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Exception in pusher on_new_notifications")
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -162,7 +162,7 @@ class PusherPool:
|
|||
)
|
||||
|
||||
yield preserve_context_over_deferred(defer.gatherResults(deferreds))
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Exception in pusher on_new_receipts")
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -188,7 +188,7 @@ class PusherPool:
|
|||
for pusherdict in pushers:
|
||||
try:
|
||||
p = self.pusher_factory.create_pusher(pusherdict)
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Couldn't start a pusher: caught Exception")
|
||||
continue
|
||||
if p:
|
||||
|
|
|
@ -162,7 +162,7 @@ class ReplicationStreamer(object):
|
|||
)
|
||||
try:
|
||||
updates, current_token = yield stream.get_updates()
|
||||
except:
|
||||
except Exception:
|
||||
logger.info("Failed to handle stream %s", stream.NAME)
|
||||
raise
|
||||
|
||||
|
|
|
@ -93,7 +93,7 @@ class ClientDirectoryServer(ClientV1RestServlet):
|
|||
)
|
||||
except SynapseError as e:
|
||||
raise e
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Failed to create association")
|
||||
raise
|
||||
except AuthError:
|
||||
|
|
|
@ -78,7 +78,7 @@ class PresenceStatusRestServlet(ClientV1RestServlet):
|
|||
raise KeyError()
|
||||
except SynapseError as e:
|
||||
raise e
|
||||
except:
|
||||
except Exception:
|
||||
raise SynapseError(400, "Unable to parse state")
|
||||
|
||||
yield self.presence_handler.set_state(user, state)
|
||||
|
|
|
@ -52,7 +52,7 @@ class ProfileDisplaynameRestServlet(ClientV1RestServlet):
|
|||
|
||||
try:
|
||||
new_name = content["displayname"]
|
||||
except:
|
||||
except Exception:
|
||||
defer.returnValue((400, "Unable to parse name"))
|
||||
|
||||
yield self.profile_handler.set_displayname(
|
||||
|
@ -94,7 +94,7 @@ class ProfileAvatarURLRestServlet(ClientV1RestServlet):
|
|||
content = parse_json_object_from_request(request)
|
||||
try:
|
||||
new_name = content["avatar_url"]
|
||||
except:
|
||||
except Exception:
|
||||
defer.returnValue((400, "Unable to parse name"))
|
||||
|
||||
yield self.profile_handler.set_avatar_url(
|
||||
|
|
|
@ -238,7 +238,7 @@ class JoinRoomAliasServlet(ClientV1RestServlet):
|
|||
|
||||
try:
|
||||
content = parse_json_object_from_request(request)
|
||||
except:
|
||||
except Exception:
|
||||
# Turns out we used to ignore the body entirely, and some clients
|
||||
# cheekily send invalid bodies.
|
||||
content = {}
|
||||
|
@ -247,7 +247,7 @@ class JoinRoomAliasServlet(ClientV1RestServlet):
|
|||
room_id = room_identifier
|
||||
try:
|
||||
remote_room_hosts = request.args["server_name"]
|
||||
except:
|
||||
except Exception:
|
||||
remote_room_hosts = None
|
||||
elif RoomAlias.is_valid(room_identifier):
|
||||
handler = self.handlers.room_member_handler
|
||||
|
@ -587,7 +587,7 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
|
|||
|
||||
try:
|
||||
content = parse_json_object_from_request(request)
|
||||
except:
|
||||
except Exception:
|
||||
# Turns out we used to ignore the body entirely, and some clients
|
||||
# cheekily send invalid bodies.
|
||||
content = {}
|
||||
|
|
|
@ -50,7 +50,7 @@ class GetFilterRestServlet(RestServlet):
|
|||
|
||||
try:
|
||||
filter_id = int(filter_id)
|
||||
except:
|
||||
except Exception:
|
||||
raise SynapseError(400, "Invalid filter_id")
|
||||
|
||||
try:
|
||||
|
|
|
@ -125,7 +125,7 @@ class SyncRestServlet(RestServlet):
|
|||
filter_object = json.loads(filter_id)
|
||||
set_timeline_upper_limit(filter_object,
|
||||
self.hs.config.filter_timeline_limit)
|
||||
except:
|
||||
except Exception:
|
||||
raise SynapseError(400, "Invalid filter JSON")
|
||||
self.filtering.check_valid_filter(filter_object)
|
||||
filter = FilterCollection(filter_object)
|
||||
|
|
|
@ -65,7 +65,7 @@ class UserDirectorySearchRestServlet(RestServlet):
|
|||
|
||||
try:
|
||||
search_term = body["search_term"]
|
||||
except:
|
||||
except Exception:
|
||||
raise SynapseError(400, "`search_term` is required field")
|
||||
|
||||
results = yield self.user_directory_handler.search_users(
|
||||
|
|
|
@ -213,7 +213,7 @@ class RemoteKey(Resource):
|
|||
)
|
||||
except KeyLookupError as e:
|
||||
logger.info("Failed to fetch key: %s", e)
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Failed to get key for %r", server_name)
|
||||
yield self.query_keys(
|
||||
request, query, query_remote_on_cache_miss=False
|
||||
|
|
|
@ -45,7 +45,7 @@ def parse_media_id(request):
|
|||
except UnicodeDecodeError:
|
||||
pass
|
||||
return server_name, media_id, file_name
|
||||
except:
|
||||
except Exception:
|
||||
raise SynapseError(
|
||||
404,
|
||||
"Invalid media id token %r" % (request.postpath,),
|
||||
|
|
|
@ -310,7 +310,7 @@ class MediaRepository(object):
|
|||
media_length=length,
|
||||
filesystem_id=file_id,
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
os.remove(fname)
|
||||
raise
|
||||
|
||||
|
|
|
@ -367,7 +367,7 @@ class PreviewUrlResource(Resource):
|
|||
dirs = self.filepaths.url_cache_filepath_dirs_to_delete(media_id)
|
||||
for dir in dirs:
|
||||
os.rmdir(dir)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
yield self.store.delete_url_cache(removed_media)
|
||||
|
@ -397,7 +397,7 @@ class PreviewUrlResource(Resource):
|
|||
dirs = self.filepaths.url_cache_filepath_dirs_to_delete(media_id)
|
||||
for dir in dirs:
|
||||
os.rmdir(dir)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
thumbnail_dir = self.filepaths.url_cache_thumbnail_directory(media_id)
|
||||
|
@ -415,7 +415,7 @@ class PreviewUrlResource(Resource):
|
|||
dirs = self.filepaths.url_cache_thumbnail_dirs_to_delete(media_id)
|
||||
for dir in dirs:
|
||||
os.rmdir(dir)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
yield self.store.delete_url_cache_media(removed_media)
|
||||
|
|
|
@ -560,7 +560,7 @@ def _resolve_with_state(unconflicted_state_ids, conflicted_state_ds, auth_event_
|
|||
resolved_state = _resolve_state_events(
|
||||
conflicted_state, auth_events
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Failed to resolve state")
|
||||
raise
|
||||
|
||||
|
|
|
@ -103,7 +103,7 @@ class LoggingTransaction(object):
|
|||
"[SQL values] {%s} %r",
|
||||
self.name, args[0]
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
# Don't let logging failures stop SQL from working
|
||||
pass
|
||||
|
||||
|
|
|
@ -98,7 +98,7 @@ class BackgroundUpdateStore(SQLBaseStore):
|
|||
result = yield self.do_next_background_update(
|
||||
self.BACKGROUND_UPDATE_DURATION_MS
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Error doing update")
|
||||
else:
|
||||
if result is None:
|
||||
|
|
|
@ -1481,7 +1481,7 @@ class EventsStore(SQLBaseStore):
|
|||
for i in ids
|
||||
if i in res
|
||||
])
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Failed to callback")
|
||||
with PreserveLoggingContext():
|
||||
reactor.callFromThread(fire, event_list, row_dict)
|
||||
|
|
|
@ -66,7 +66,7 @@ def prepare_database(db_conn, database_engine, config):
|
|||
|
||||
cur.close()
|
||||
db_conn.commit()
|
||||
except:
|
||||
except Exception:
|
||||
db_conn.rollback()
|
||||
raise
|
||||
|
||||
|
|
|
@ -636,7 +636,7 @@ class RoomMemberStore(SQLBaseStore):
|
|||
room_id = row["room_id"]
|
||||
try:
|
||||
content = json.loads(row["content"])
|
||||
except:
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
display_name = content.get("displayname", None)
|
||||
|
|
|
@ -22,7 +22,7 @@ def run_create(cur, database_engine, *args, **kwargs):
|
|||
# NULL indicates user was not registered by an appservice.
|
||||
try:
|
||||
cur.execute("ALTER TABLE users ADD COLUMN appservice_id TEXT")
|
||||
except:
|
||||
except Exception:
|
||||
# Maybe we already added the column? Hope so...
|
||||
pass
|
||||
|
||||
|
|
|
@ -81,7 +81,7 @@ class SearchStore(BackgroundUpdateStore):
|
|||
etype = row["type"]
|
||||
try:
|
||||
content = json.loads(row["content"])
|
||||
except:
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if etype == "m.room.message":
|
||||
|
@ -407,7 +407,7 @@ class SearchStore(BackgroundUpdateStore):
|
|||
origin_server_ts, stream = pagination_token.split(",")
|
||||
origin_server_ts = int(origin_server_ts)
|
||||
stream = int(stream)
|
||||
except:
|
||||
except Exception:
|
||||
raise SynapseError(400, "Invalid pagination token")
|
||||
|
||||
clauses.append(
|
||||
|
|
|
@ -80,13 +80,13 @@ class PaginationConfig(object):
|
|||
from_tok = None # For backwards compat.
|
||||
elif from_tok:
|
||||
from_tok = StreamToken.from_string(from_tok)
|
||||
except:
|
||||
except Exception:
|
||||
raise SynapseError(400, "'from' paramater is invalid")
|
||||
|
||||
try:
|
||||
if to_tok:
|
||||
to_tok = StreamToken.from_string(to_tok)
|
||||
except:
|
||||
except Exception:
|
||||
raise SynapseError(400, "'to' paramater is invalid")
|
||||
|
||||
limit = get_param("limit", None)
|
||||
|
@ -98,7 +98,7 @@ class PaginationConfig(object):
|
|||
|
||||
try:
|
||||
return PaginationConfig(from_tok, to_tok, direction, limit)
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Failed to create pagination config")
|
||||
raise SynapseError(400, "Invalid request.")
|
||||
|
||||
|
|
|
@ -127,7 +127,7 @@ class DomainSpecificString(
|
|||
try:
|
||||
cls.from_string(s)
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
__str__ = to_string
|
||||
|
@ -213,7 +213,7 @@ class StreamToken(
|
|||
# i.e. old token from before receipt_key
|
||||
keys.append("0")
|
||||
return cls(*keys)
|
||||
except:
|
||||
except Exception:
|
||||
raise SynapseError(400, "Invalid Token")
|
||||
|
||||
def to_string(self):
|
||||
|
@ -299,7 +299,7 @@ class RoomStreamToken(namedtuple("_StreamToken", "topological stream")):
|
|||
if string[0] == 't':
|
||||
parts = string[1:].split('-', 1)
|
||||
return cls(topological=int(parts[0]), stream=int(parts[1]))
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
raise SynapseError(400, "Invalid token %r" % (string,))
|
||||
|
||||
|
@ -308,7 +308,7 @@ class RoomStreamToken(namedtuple("_StreamToken", "topological stream")):
|
|||
try:
|
||||
if string[0] == 's':
|
||||
return cls(topological=None, stream=int(string[1:]))
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
raise SynapseError(400, "Invalid token %r" % (string,))
|
||||
|
||||
|
|
|
@ -82,7 +82,7 @@ class Clock(object):
|
|||
def cancel_call_later(self, timer, ignore_errs=False):
|
||||
try:
|
||||
timer.cancel()
|
||||
except:
|
||||
except Exception:
|
||||
if not ignore_errs:
|
||||
raise
|
||||
|
||||
|
@ -97,12 +97,12 @@ class Clock(object):
|
|||
|
||||
try:
|
||||
ret_deferred.errback(e)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
given_deferred.cancel()
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
timer = None
|
||||
|
@ -110,7 +110,7 @@ class Clock(object):
|
|||
def cancel(res):
|
||||
try:
|
||||
self.cancel_call_later(timer)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
return res
|
||||
|
||||
|
@ -119,7 +119,7 @@ class Clock(object):
|
|||
def success(res):
|
||||
try:
|
||||
ret_deferred.callback(res)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return res
|
||||
|
@ -127,7 +127,7 @@ class Clock(object):
|
|||
def err(res):
|
||||
try:
|
||||
ret_deferred.errback(res)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
given_deferred.addCallbacks(callback=success, errback=err)
|
||||
|
|
|
@ -73,7 +73,7 @@ class ObservableDeferred(object):
|
|||
try:
|
||||
# TODO: Handle errors here.
|
||||
self._observers.pop().callback(r)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
return r
|
||||
|
||||
|
@ -83,7 +83,7 @@ class ObservableDeferred(object):
|
|||
try:
|
||||
# TODO: Handle errors here.
|
||||
self._observers.pop().errback(f)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if consumeErrors:
|
||||
|
@ -205,7 +205,7 @@ class Linearizer(object):
|
|||
try:
|
||||
with PreserveLoggingContext():
|
||||
yield current_defer
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Unexpected exception in Linearizer")
|
||||
|
||||
logger.info("Acquired linearizer lock %r for key %r", self.name,
|
||||
|
|
|
@ -42,7 +42,7 @@ try:
|
|||
|
||||
def get_thread_resource_usage():
|
||||
return resource.getrusage(RUSAGE_THREAD)
|
||||
except:
|
||||
except Exception:
|
||||
# If the system doesn't support resource.getrusage(RUSAGE_THREAD) then we
|
||||
# won't track resource usage by returning None.
|
||||
def get_thread_resource_usage():
|
||||
|
|
|
@ -189,7 +189,7 @@ class RetryDestinationLimiter(object):
|
|||
yield self.store.set_destination_retry_timings(
|
||||
self.destination, retry_last_ts, self.retry_interval
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to store set_destination_retry_timings",
|
||||
)
|
||||
|
|
|
@ -65,7 +65,7 @@ class ApplicationServiceStoreTestCase(unittest.TestCase):
|
|||
for f in self.as_yaml_files:
|
||||
try:
|
||||
os.remove(f)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _add_appservice(self, as_token, id, url, hs_token, sender):
|
||||
|
|
|
@ -184,7 +184,7 @@ class MockHttpResource(HttpServer):
|
|||
mock_request.args = urlparse.parse_qs(path.split('?')[1])
|
||||
mock_request.path = path.split('?')[0]
|
||||
path = mock_request.path
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
for (method, pattern, func) in self.callbacks:
|
||||
|
@ -364,13 +364,13 @@ class MemoryDataStore(object):
|
|||
return {
|
||||
"name": self.tokens_to_users[token],
|
||||
}
|
||||
except:
|
||||
except Exception:
|
||||
raise StoreError(400, "User does not exist.")
|
||||
|
||||
def get_room(self, room_id):
|
||||
try:
|
||||
return self.rooms[room_id]
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def store_room(self, room_id, room_creator_user_id, is_public):
|
||||
|
@ -499,7 +499,7 @@ class DeferredMockCallable(object):
|
|||
for _, _, d in self.expectations:
|
||||
try:
|
||||
d.errback(failure)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
raise failure
|
||||
|
|
Loading…
Reference in New Issue