Fix new flake8 errors (#7489)
This is a cherry-pick of 1a1da60ad2
(#7470)
to the release-v1.13.0 branch.
This commit is contained in:
parent
fa4af2c3af
commit
edd3b0747c
|
@ -0,0 +1 @@
|
|||
Fix linting errors in new version of Flake8.
|
|
@ -22,6 +22,7 @@ import sys
|
|||
import traceback
|
||||
|
||||
from daemonize import Daemonize
|
||||
from typing_extensions import NoReturn
|
||||
|
||||
from twisted.internet import defer, error, reactor
|
||||
from twisted.protocols.tls import TLSMemoryBIOFactory
|
||||
|
@ -139,9 +140,9 @@ def start_reactor(
|
|||
run()
|
||||
|
||||
|
||||
def quit_with_error(error_string):
|
||||
def quit_with_error(error_string: str) -> NoReturn:
|
||||
message_lines = error_string.split("\n")
|
||||
line_length = max(len(l) for l in message_lines if len(l) < 80) + 2
|
||||
line_length = max(len(line) for line in message_lines if len(line) < 80) + 2
|
||||
sys.stderr.write("*" * line_length + "\n")
|
||||
for line in message_lines:
|
||||
sys.stderr.write(" %s\n" % (line.rstrip(),))
|
||||
|
|
|
@ -522,7 +522,7 @@ class ServerConfig(Config):
|
|||
)
|
||||
|
||||
def has_tls_listener(self) -> bool:
|
||||
return any(l["tls"] for l in self.listeners)
|
||||
return any(listener["tls"] for listener in self.listeners)
|
||||
|
||||
def generate_config_section(
|
||||
self, server_name, data_dir_path, open_private_ports, listeners, **kwargs
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
|
||||
import logging
|
||||
from collections import namedtuple
|
||||
from typing import Callable, List
|
||||
from typing import Callable, Iterable, List, TypeVar
|
||||
|
||||
from prometheus_client import Counter
|
||||
|
||||
|
@ -42,12 +42,14 @@ users_woken_by_stream_counter = Counter(
|
|||
"synapse_notifier_users_woken_by_stream", "", ["stream"]
|
||||
)
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
# TODO(paul): Should be shared somewhere
|
||||
def count(func, l):
|
||||
"""Return the number of items in l for which func returns true."""
|
||||
def count(func: Callable[[T], bool], it: Iterable[T]) -> int:
|
||||
"""Return the number of items in it for which func returns true."""
|
||||
n = 0
|
||||
for x in l:
|
||||
for x in it:
|
||||
if func(x):
|
||||
n += 1
|
||||
return n
|
||||
|
|
|
@ -19,6 +19,7 @@ import logging
|
|||
import time
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from typing import Iterable, List, TypeVar
|
||||
|
||||
from six.moves import urllib
|
||||
|
||||
|
@ -41,6 +42,8 @@ from synapse.visibility import filter_events_for_client
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
MESSAGE_FROM_PERSON_IN_ROOM = (
|
||||
"You have a message on %(app)s from %(person)s in the %(room)s room..."
|
||||
|
@ -638,10 +641,10 @@ def safe_text(raw_text):
|
|||
)
|
||||
|
||||
|
||||
def deduped_ordered_list(l):
|
||||
def deduped_ordered_list(it: Iterable[T]) -> List[T]:
|
||||
seen = set()
|
||||
ret = []
|
||||
for item in l:
|
||||
for item in it:
|
||||
if item not in seen:
|
||||
seen.add(item)
|
||||
ret.append(item)
|
||||
|
|
|
@ -212,9 +212,9 @@ class LoggingTransaction:
|
|||
def executemany(self, sql: str, *args: Any):
|
||||
self._do_execute(self.txn.executemany, sql, *args)
|
||||
|
||||
def _make_sql_one_line(self, sql):
|
||||
def _make_sql_one_line(self, sql: str) -> str:
|
||||
"Strip newlines out of SQL so that the loggers in the DB are on one line"
|
||||
return " ".join(l.strip() for l in sql.splitlines() if l.strip())
|
||||
return " ".join(line.strip() for line in sql.splitlines() if line.strip())
|
||||
|
||||
def _do_execute(self, func, sql, *args):
|
||||
sql = self._make_sql_one_line(sql)
|
||||
|
|
|
@ -122,7 +122,7 @@ class ConfigLoadingTestCase(unittest.TestCase):
|
|||
|
||||
with open(self.file, "r") as f:
|
||||
contents = f.readlines()
|
||||
contents = [l for l in contents if needle not in l]
|
||||
contents = [line for line in contents if needle not in line]
|
||||
with open(self.file, "w") as f:
|
||||
f.write("".join(contents))
|
||||
|
||||
|
|
Loading…
Reference in New Issue