From b800834351f3f15c9de4996b18149a7e8cae0c34 Mon Sep 17 00:00:00 2001 From: Vincent Breitmoser Date: Sat, 9 Jun 2018 22:50:29 +0200 Subject: [PATCH 001/205] add note that the affinity package is required for the cpu_affinity setting --- synapse/config/server.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/synapse/config/server.py b/synapse/config/server.py index 968ecd9ea0..1b8aac9b75 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -201,6 +201,8 @@ class ServerConfig(Config): # different cores. See # https://www.mirantis.com/blog/improve-performance-python-programs-restricting-single-cpu/. # + # This setting requires the affinity package to be installed! + # # cpu_affinity: 0xFFFFFFFF # Whether to serve a web client from the HTTP/HTTPS root resource. From 3d605853c8e649ab4b3f91fb0a32cc77ef05d71f Mon Sep 17 00:00:00 2001 From: Jeroen Date: Sun, 24 Jun 2018 22:38:43 +0200 Subject: [PATCH 002/205] send SNI for federation requests --- synapse/app/client_reader.py | 2 ++ synapse/app/event_creator.py | 2 ++ synapse/app/federation_reader.py | 2 ++ synapse/app/federation_sender.py | 2 ++ synapse/app/frontend_proxy.py | 2 ++ synapse/app/homeserver.py | 2 ++ synapse/app/media_repository.py | 2 ++ synapse/app/user_dir.py | 2 ++ synapse/config/tls.py | 9 +++++++ synapse/crypto/context_factory.py | 34 +++++++++++++++++++++++++- synapse/crypto/keyclient.py | 4 +-- synapse/crypto/keyring.py | 4 +-- synapse/http/endpoint.py | 11 ++++----- synapse/http/matrixfederationclient.py | 4 +-- tests/utils.py | 2 ++ 15 files changed, 71 insertions(+), 13 deletions(-) diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index 654ddb8414..1c885e321c 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -153,11 +153,13 @@ def start(config_options): database_engine = create_engine(config.database_config) tls_server_context_factory = context_factory.ServerContextFactory(config) + tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) ss = ClientReaderServer( config.server_name, db_config=config.database_config, tls_server_context_factory=tls_server_context_factory, + tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py index 441467093a..ccbdd8b2df 100644 --- a/synapse/app/event_creator.py +++ b/synapse/app/event_creator.py @@ -171,11 +171,13 @@ def start(config_options): database_engine = create_engine(config.database_config) tls_server_context_factory = context_factory.ServerContextFactory(config) + tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) ss = EventCreatorServer( config.server_name, db_config=config.database_config, tls_server_context_factory=tls_server_context_factory, + tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index b2415cc671..7fd20322f4 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -142,11 +142,13 @@ def start(config_options): database_engine = create_engine(config.database_config) tls_server_context_factory = context_factory.ServerContextFactory(config) + tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) ss = FederationReaderServer( config.server_name, db_config=config.database_config, tls_server_context_factory=tls_server_context_factory, + tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py index 13d2b70053..67dffaccbb 100644 --- a/synapse/app/federation_sender.py +++ b/synapse/app/federation_sender.py @@ -185,11 +185,13 @@ def start(config_options): config.send_federation = True tls_server_context_factory = context_factory.ServerContextFactory(config) + tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) ps = FederationSenderServer( config.server_name, db_config=config.database_config, tls_server_context_factory=tls_server_context_factory, + tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index d2bae4ad03..d5638087a0 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -209,11 +209,13 @@ def start(config_options): database_engine = create_engine(config.database_config) tls_server_context_factory = context_factory.ServerContextFactory(config) + tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) ss = FrontendProxyServer( config.server_name, db_config=config.database_config, tls_server_context_factory=tls_server_context_factory, + tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index ae5fc751d5..d48d68bc7e 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -321,6 +321,7 @@ def setup(config_options): events.USE_FROZEN_DICTS = config.use_frozen_dicts tls_server_context_factory = context_factory.ServerContextFactory(config) + tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) database_engine = create_engine(config.database_config) config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection @@ -329,6 +330,7 @@ def setup(config_options): config.server_name, db_config=config.database_config, tls_server_context_factory=tls_server_context_factory, + tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py index 19a682cce3..cc3f31a438 100644 --- a/synapse/app/media_repository.py +++ b/synapse/app/media_repository.py @@ -156,11 +156,13 @@ def start(config_options): database_engine = create_engine(config.database_config) tls_server_context_factory = context_factory.ServerContextFactory(config) + tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) ss = MediaRepositoryServer( config.server_name, db_config=config.database_config, tls_server_context_factory=tls_server_context_factory, + tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py index f5726e3df6..51e7917ce2 100644 --- a/synapse/app/user_dir.py +++ b/synapse/app/user_dir.py @@ -213,11 +213,13 @@ def start(config_options): config.update_user_directory = True tls_server_context_factory = context_factory.ServerContextFactory(config) + tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) ps = UserDirectoryServer( config.server_name, db_config=config.database_config, tls_server_context_factory=tls_server_context_factory, + tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, diff --git a/synapse/config/tls.py b/synapse/config/tls.py index b66154bc7c..4e7d1bd93e 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -47,6 +47,8 @@ class TlsConfig(Config): self.tls_fingerprints = config["tls_fingerprints"] + self.tls_ignore_certificate_validation = config.get("tls_ignore_certificate_validation", False) + # Check that our own certificate is included in the list of fingerprints # and include it if it is not. x509_certificate_bytes = crypto.dump_certificate( @@ -73,6 +75,8 @@ class TlsConfig(Config): tls_private_key_path = base_key_name + ".tls.key" tls_dh_params_path = base_key_name + ".tls.dh" + tls_ignore_certificate_validation = False + return """\ # PEM encoded X509 certificate for TLS. # You can replace the self-signed certificate that synapse @@ -117,6 +121,11 @@ class TlsConfig(Config): # tls_fingerprints: [] # tls_fingerprints: [{"sha256": ""}] + + # Ignore certificate validation for TLS client connections to other + # homeservers using federation. Don't enable this in a production + # environment, unless you know what you are doing! + tls_ignore_certificate_validation: %(tls_ignore_certificate_validation)s """ % locals() def read_tls_certificate(self, cert_path): diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 0397f73ab4..297a5fb045 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -14,7 +14,8 @@ from twisted.internet import ssl from OpenSSL import SSL, crypto -from twisted.internet._sslverify import _defaultCurveName +from twisted.internet._sslverify import _defaultCurveName, ClientTLSOptions, OpenSSLCertificateOptions, \ + optionsForClientTLS import logging @@ -48,3 +49,34 @@ class ServerContextFactory(ssl.ContextFactory): def getContext(self): return self._context + + +class ClientTLSOptionsNoCertVerification(ClientTLSOptions): + """Redefinition of ClientTLSOptions to completely ignore certificate + validation. Should be kept in sync with the original class in Twisted. + This version of ClientTLSOptions is only intended for development use.""" + + def __init__(self, *args, **kwargs): + super(ClientTLSOptionsNoCertVerification, self).__init__(*args, **kwargs) + + def do_nothing(*_args, **_kwargs): + pass + + self._ctx.set_info_callback(do_nothing) + + +class ClientTLSOptionsFactory(object): + """Factory for Twisted ClientTLSOptions that are used to make connections + to remote servers for federation.""" + + def __init__(self, config): + self._ignore_certificate_validation = config.tls_ignore_certificate_validation + + def get_options(self, host): + if self._ignore_certificate_validation: + return ClientTLSOptionsNoCertVerification( + unicode(host), + OpenSSLCertificateOptions(verify=False).getContext() + ) + else: + return optionsForClientTLS(unicode(host)) diff --git a/synapse/crypto/keyclient.py b/synapse/crypto/keyclient.py index f1fd488b90..8e48f8a9cf 100644 --- a/synapse/crypto/keyclient.py +++ b/synapse/crypto/keyclient.py @@ -28,14 +28,14 @@ KEY_API_V1 = b"/_matrix/key/v1/" @defer.inlineCallbacks -def fetch_server_key(server_name, ssl_context_factory, path=KEY_API_V1): +def fetch_server_key(server_name, tls_client_options_factory, path=KEY_API_V1): """Fetch the keys for a remote server.""" factory = SynapseKeyClientFactory() factory.path = path factory.host = server_name endpoint = matrix_federation_endpoint( - reactor, server_name, ssl_context_factory, timeout=30 + reactor, server_name, tls_client_options_factory, timeout=30 ) for i in range(5): diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 9b17ef0a08..9a1bb211fb 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -510,7 +510,7 @@ class Keyring(object): continue (response, tls_certificate) = yield fetch_server_key( - server_name, self.hs.tls_server_context_factory, + server_name, self.tls_client_options_factory, path=(b"/_matrix/key/v2/server/%s" % ( urllib.quote(requested_key_id), )).encode("ascii"), @@ -653,7 +653,7 @@ class Keyring(object): # Try to fetch the key from the remote server. (response, tls_certificate) = yield fetch_server_key( - server_name, self.hs.tls_server_context_factory + server_name, self.hs.tls_client_options_factory ) # Check the response. diff --git a/synapse/http/endpoint.py b/synapse/http/endpoint.py index 87a482650d..e783f95719 100644 --- a/synapse/http/endpoint.py +++ b/synapse/http/endpoint.py @@ -26,7 +26,6 @@ import time logger = logging.getLogger(__name__) - SERVER_CACHE = {} # our record of an individual server which can be tried to reach a destination. @@ -38,15 +37,15 @@ _Server = collections.namedtuple( ) -def matrix_federation_endpoint(reactor, destination, ssl_context_factory=None, +def matrix_federation_endpoint(reactor, destination, tls_client_options_factory=None, timeout=None): """Construct an endpoint for the given matrix destination. Args: reactor: Twisted reactor. destination (bytes): The name of the server to connect to. - ssl_context_factory (twisted.internet.ssl.ContextFactory): Factory - which generates SSL contexts to use for TLS. + tls_client_options_factory (synapse.crypto.context_factory.ClientTLSOptionsFactory): + Factory which generates TLS options for client connections. timeout (int): connection timeout in seconds """ @@ -59,13 +58,13 @@ def matrix_federation_endpoint(reactor, destination, ssl_context_factory=None, if timeout is not None: endpoint_kw_args.update(timeout=timeout) - if ssl_context_factory is None: + if tls_client_options_factory is None: transport_endpoint = HostnameEndpoint default_port = 8008 else: def transport_endpoint(reactor, host, port, timeout): return wrapClientTLS( - ssl_context_factory, + tls_client_options_factory.get_options(unicode(host)), HostnameEndpoint(reactor, host, port, timeout=timeout)) default_port = 8448 diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 4e0399e762..66796a202f 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -62,14 +62,14 @@ MAX_SHORT_RETRIES = 3 class MatrixFederationEndpointFactory(object): def __init__(self, hs): - self.tls_server_context_factory = hs.tls_server_context_factory + self.tls_client_options_factory = hs.tls_client_options_factory def endpointForURI(self, uri): destination = uri.netloc return matrix_federation_endpoint( reactor, destination, timeout=10, - ssl_context_factory=self.tls_server_context_factory + tls_client_options_factory = self.tls_client_options_factory ) diff --git a/tests/utils.py b/tests/utils.py index 189fd2711c..0b501e0786 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -114,6 +114,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None database_engine=db_engine, room_list_handler=object(), tls_server_context_factory=Mock(), + tls_client_options_factory=Mock(), reactor=reactor, **kargs ) @@ -134,6 +135,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None database_engine=db_engine, room_list_handler=object(), tls_server_context_factory=Mock(), + tls_client_options_factory=Mock(), **kargs ) From 07b4f88de9ee534629db29aa4fc398607e7d866e Mon Sep 17 00:00:00 2001 From: Jeroen Date: Mon, 25 Jun 2018 12:31:16 +0200 Subject: [PATCH 003/205] formatting changes for pep8 --- synapse/config/tls.py | 4 +++- synapse/crypto/context_factory.py | 4 ++-- synapse/http/endpoint.py | 3 ++- synapse/http/matrixfederationclient.py | 2 +- 4 files changed, 8 insertions(+), 5 deletions(-) diff --git a/synapse/config/tls.py b/synapse/config/tls.py index 4e7d1bd93e..b09dc986ab 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -47,7 +47,9 @@ class TlsConfig(Config): self.tls_fingerprints = config["tls_fingerprints"] - self.tls_ignore_certificate_validation = config.get("tls_ignore_certificate_validation", False) + self.tls_ignore_certificate_validation = config.get( + "tls_ignore_certificate_validation", False + ) # Check that our own certificate is included in the list of fingerprints # and include it if it is not. diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 297a5fb045..415899e62b 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -14,8 +14,8 @@ from twisted.internet import ssl from OpenSSL import SSL, crypto -from twisted.internet._sslverify import _defaultCurveName, ClientTLSOptions, OpenSSLCertificateOptions, \ - optionsForClientTLS +from twisted.internet._sslverify import _defaultCurveName, ClientTLSOptions, \ + OpenSSLCertificateOptions, optionsForClientTLS import logging diff --git a/synapse/http/endpoint.py b/synapse/http/endpoint.py index e783f95719..abf4862084 100644 --- a/synapse/http/endpoint.py +++ b/synapse/http/endpoint.py @@ -44,7 +44,8 @@ def matrix_federation_endpoint(reactor, destination, tls_client_options_factory= Args: reactor: Twisted reactor. destination (bytes): The name of the server to connect to. - tls_client_options_factory (synapse.crypto.context_factory.ClientTLSOptionsFactory): + tls_client_options_factory + (synapse.crypto.context_factory.ClientTLSOptionsFactory): Factory which generates TLS options for client connections. timeout (int): connection timeout in seconds """ diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 66796a202f..b48d05fcd2 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -69,7 +69,7 @@ class MatrixFederationEndpointFactory(object): return matrix_federation_endpoint( reactor, destination, timeout=10, - tls_client_options_factory = self.tls_client_options_factory + tls_client_options_factory=self.tls_client_options_factory ) From b7f34ee348c9f064d98922dfdbe24a3cf18ca00a Mon Sep 17 00:00:00 2001 From: Jeroen Date: Tue, 26 Jun 2018 20:41:05 +0200 Subject: [PATCH 004/205] allow self-signed certificates --- synapse/config/tls.py | 11 ------ synapse/crypto/context_factory.py | 60 ++++++++++++++++++------------- synapse/http/endpoint.py | 2 +- 3 files changed, 37 insertions(+), 36 deletions(-) diff --git a/synapse/config/tls.py b/synapse/config/tls.py index b09dc986ab..b66154bc7c 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -47,10 +47,6 @@ class TlsConfig(Config): self.tls_fingerprints = config["tls_fingerprints"] - self.tls_ignore_certificate_validation = config.get( - "tls_ignore_certificate_validation", False - ) - # Check that our own certificate is included in the list of fingerprints # and include it if it is not. x509_certificate_bytes = crypto.dump_certificate( @@ -77,8 +73,6 @@ class TlsConfig(Config): tls_private_key_path = base_key_name + ".tls.key" tls_dh_params_path = base_key_name + ".tls.dh" - tls_ignore_certificate_validation = False - return """\ # PEM encoded X509 certificate for TLS. # You can replace the self-signed certificate that synapse @@ -123,11 +117,6 @@ class TlsConfig(Config): # tls_fingerprints: [] # tls_fingerprints: [{"sha256": ""}] - - # Ignore certificate validation for TLS client connections to other - # homeservers using federation. Don't enable this in a production - # environment, unless you know what you are doing! - tls_ignore_certificate_validation: %(tls_ignore_certificate_validation)s """ % locals() def read_tls_certificate(self, cert_path): diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 415899e62b..e93afbf975 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -11,18 +11,19 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -from twisted.internet import ssl -from OpenSSL import SSL, crypto -from twisted.internet._sslverify import _defaultCurveName, ClientTLSOptions, \ - OpenSSLCertificateOptions, optionsForClientTLS - import logging +import idna +from OpenSSL import SSL, crypto +from twisted.internet.ssl import ContextFactory, CertificateOptions +from twisted.internet._sslverify import _defaultCurveName, _tolerateErrors +from twisted.internet.interfaces import IOpenSSLClientConnectionCreator +from zope.interface import implementer + logger = logging.getLogger(__name__) -class ServerContextFactory(ssl.ContextFactory): +class ServerContextFactory(ContextFactory): """Factory for PyOpenSSL SSL contexts that are used to handle incoming connections and to make connections to remote servers.""" @@ -51,18 +52,31 @@ class ServerContextFactory(ssl.ContextFactory): return self._context -class ClientTLSOptionsNoCertVerification(ClientTLSOptions): - """Redefinition of ClientTLSOptions to completely ignore certificate - validation. Should be kept in sync with the original class in Twisted. - This version of ClientTLSOptions is only intended for development use.""" +@implementer(IOpenSSLClientConnectionCreator) +class ClientTLSOptions(object): + """ + Client creator for TLS without certificate identity verification. This is a + copy of twisted.internet._sslverify.ClientTLSOptions with the identity + verification left out. For documentation, see the twisted documentation. + """ - def __init__(self, *args, **kwargs): - super(ClientTLSOptionsNoCertVerification, self).__init__(*args, **kwargs) + def __init__(self, hostname, ctx): + self._ctx = ctx + self._hostname = hostname + self._hostnameBytes = idna.encode(hostname) + ctx.set_info_callback( + _tolerateErrors(self._identityVerifyingInfoCallback) + ) - def do_nothing(*_args, **_kwargs): - pass + def clientConnectionForTLS(self, tlsProtocol): + context = self._ctx + connection = SSL.Connection(context, None) + connection.set_app_data(tlsProtocol) + return connection - self._ctx.set_info_callback(do_nothing) + def _identityVerifyingInfoCallback(self, connection, where, ret): + if where & SSL.SSL_CB_HANDSHAKE_START: + connection.set_tlsext_host_name(self._hostnameBytes) class ClientTLSOptionsFactory(object): @@ -70,13 +84,11 @@ class ClientTLSOptionsFactory(object): to remote servers for federation.""" def __init__(self, config): - self._ignore_certificate_validation = config.tls_ignore_certificate_validation + # We don't use config options yet + pass def get_options(self, host): - if self._ignore_certificate_validation: - return ClientTLSOptionsNoCertVerification( - unicode(host), - OpenSSLCertificateOptions(verify=False).getContext() - ) - else: - return optionsForClientTLS(unicode(host)) + return ClientTLSOptions( + unicode(host), + CertificateOptions(verify=False).getContext() + ) diff --git a/synapse/http/endpoint.py b/synapse/http/endpoint.py index abf4862084..39432da452 100644 --- a/synapse/http/endpoint.py +++ b/synapse/http/endpoint.py @@ -65,7 +65,7 @@ def matrix_federation_endpoint(reactor, destination, tls_client_options_factory= else: def transport_endpoint(reactor, host, port, timeout): return wrapClientTLS( - tls_client_options_factory.get_options(unicode(host)), + tls_client_options_factory.get_options(host), HostnameEndpoint(reactor, host, port, timeout=timeout)) default_port = 8448 From 26651b0d6a55788ff7ee62eda2051f7a85aa45e7 Mon Sep 17 00:00:00 2001 From: Jeroen Date: Tue, 26 Jun 2018 21:10:45 +0200 Subject: [PATCH 005/205] towncrier changelog --- changelog.d/1491.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/1491.feature diff --git a/changelog.d/1491.feature b/changelog.d/1491.feature new file mode 100644 index 0000000000..77b6d6ca09 --- /dev/null +++ b/changelog.d/1491.feature @@ -0,0 +1 @@ +Add support for the SNI extension to federation TLS connections \ No newline at end of file From 95341a8f6f9c3b7ebcb3a428de65b18740234f3e Mon Sep 17 00:00:00 2001 From: Jeroen Date: Tue, 26 Jun 2018 21:15:14 +0200 Subject: [PATCH 006/205] take idna implementation from twisted --- synapse/crypto/context_factory.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index e93afbf975..57694e18b0 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -13,8 +13,8 @@ # limitations under the License. import logging -import idna from OpenSSL import SSL, crypto +from twisted.internet._idna import _idnaBytes from twisted.internet.ssl import ContextFactory, CertificateOptions from twisted.internet._sslverify import _defaultCurveName, _tolerateErrors from twisted.internet.interfaces import IOpenSSLClientConnectionCreator @@ -63,7 +63,7 @@ class ClientTLSOptions(object): def __init__(self, hostname, ctx): self._ctx = ctx self._hostname = hostname - self._hostnameBytes = idna.encode(hostname) + self._hostnameBytes = _idnaBytes(hostname) ctx.set_info_callback( _tolerateErrors(self._identityVerifyingInfoCallback) ) From fe089b13cbfb1eeb99de30378f01fdc71ca097c1 Mon Sep 17 00:00:00 2001 From: Benedikt Heine Date: Tue, 17 Jul 2018 09:02:45 +0200 Subject: [PATCH 007/205] [Docker] Build docker image via compose It's much easier to build the image via docker-compose instead of an error-prone low-level docker call. Signed-off-by: Benedikt Heine --- contrib/docker/README.md | 8 +------- contrib/docker/docker-compose.yml | 1 + 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/contrib/docker/README.md b/contrib/docker/README.md index 61592109cb..562cdaac2b 100644 --- a/contrib/docker/README.md +++ b/contrib/docker/README.md @@ -9,13 +9,7 @@ use that server. ## Build -Build the docker image with the `docker build` command from the root of the synapse repository. - -``` -docker build -t docker.io/matrixdotorg/synapse . -``` - -The `-t` option sets the image tag. Official images are tagged `matrixdotorg/synapse:` where `` is the same as the release tag in the synapse git repository. +Build the docker image with the `docker-compose build` command. You may have a local Python wheel cache available, in which case copy the relevant packages in the ``cache/`` directory at the root of the project. diff --git a/contrib/docker/docker-compose.yml b/contrib/docker/docker-compose.yml index 0b531949e0..3a8dfbae34 100644 --- a/contrib/docker/docker-compose.yml +++ b/contrib/docker/docker-compose.yml @@ -6,6 +6,7 @@ version: '3' services: synapse: + build: ../.. image: docker.io/matrixdotorg/synapse:latest # Since snyapse does not retry to connect to the database, restart upon # failure From f1dd89fe8662be52bd4012b36b402f9380dac4e5 Mon Sep 17 00:00:00 2001 From: Benedikt Heine Date: Tue, 17 Jul 2018 09:07:03 +0200 Subject: [PATCH 008/205] [Docker] Use sorted multiline package lists This matches docker best practices. Signed-off-by: Benedikt Heine --- Dockerfile | 22 +++++++++++++++++++--- changelog.d/3543.misc | 1 + 2 files changed, 20 insertions(+), 3 deletions(-) create mode 100644 changelog.d/3543.misc diff --git a/Dockerfile b/Dockerfile index 565341fee3..0242be5f68 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,16 +1,32 @@ FROM docker.io/python:2-alpine3.7 -RUN apk add --no-cache --virtual .nacl_deps su-exec build-base libffi-dev zlib-dev libressl-dev libjpeg-turbo-dev linux-headers postgresql-dev libxslt-dev +RUN apk add --no-cache --virtual .nacl_deps \ + build-base \ + libffi-dev \ + libjpeg-turbo-dev \ + libressl-dev \ + libxslt-dev \ + linux-headers \ + postgresql-dev \ + su-exec \ + zlib-dev COPY . /synapse # A wheel cache may be provided in ./cache for faster build RUN cd /synapse \ - && pip install --upgrade pip setuptools psycopg2 lxml \ + && pip install --upgrade \ + lxml \ + pip \ + psycopg2 \ + setuptools \ && mkdir -p /synapse/cache \ && pip install -f /synapse/cache --upgrade --process-dependency-links . \ && mv /synapse/contrib/docker/start.py /synapse/contrib/docker/conf / \ - && rm -rf setup.py setup.cfg synapse + && rm -rf \ + setup.cfg \ + setup.py \ + synapse VOLUME ["/data"] diff --git a/changelog.d/3543.misc b/changelog.d/3543.misc new file mode 100644 index 0000000000..d231d17749 --- /dev/null +++ b/changelog.d/3543.misc @@ -0,0 +1 @@ +Improve Dockerfile and docker-compose instructions From 7417951117fb096d433b620264f2c034500e41d3 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 24 Jul 2018 13:46:39 +0100 Subject: [PATCH 009/205] Update ISSUE_TEMPLATE.md request backticks for logs --- .github/ISSUE_TEMPLATE.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index d2050a3e44..21acb3202a 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -27,8 +27,9 @@ Describe here the problem that you are experiencing, or the feature you are requ Describe how what happens differs from what you expected. -If you can identify any relevant log snippets from _homeserver.log_, please include -those here (please be careful to remove any personal or private data): + ### Version information From 78a691d005b925b6211a3d090add34c6efb1c0f4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 25 Jul 2018 16:00:38 +0100 Subject: [PATCH 010/205] Split out DB writes in federation handler This will allow us to easily add an internal replication API to proxy these reqeusts to master, so that we can move federation APIs to workers. --- synapse/handlers/federation.py | 165 ++++++++++++++++----------------- synapse/storage/events.py | 14 ++- 2 files changed, 94 insertions(+), 85 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 145c1a21d4..06700d5038 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -444,7 +444,7 @@ class FederationHandler(BaseHandler): yield self._handle_new_events(origin, event_infos) try: - context, event_stream_id, max_stream_id = yield self._handle_new_event( + context = yield self._handle_new_event( origin, event, state=state, @@ -469,17 +469,6 @@ class FederationHandler(BaseHandler): except StoreError: logger.exception("Failed to store room.") - extra_users = [] - if event.type == EventTypes.Member: - target_user_id = event.state_key - target_user = UserID.from_string(target_user_id) - extra_users.append(target_user) - - self.notifier.on_new_room_event( - event, event_stream_id, max_stream_id, - extra_users=extra_users - ) - if event.type == EventTypes.Member: if event.membership == Membership.JOIN: # Only fire user_joined_room if the user has acutally @@ -501,7 +490,7 @@ class FederationHandler(BaseHandler): if newly_joined: user = UserID.from_string(event.state_key) - yield user_joined_room(self.distributor, user, event.room_id) + yield self.user_joined_room(user, event.room_id) @log_function @defer.inlineCallbacks @@ -942,7 +931,7 @@ class FederationHandler(BaseHandler): self.room_queues[room_id] = [] - yield self.store.clean_room_for_join(room_id) + yield self._clean_room_for_join(room_id) handled_events = set() @@ -981,15 +970,10 @@ class FederationHandler(BaseHandler): # FIXME pass - event_stream_id, max_stream_id = yield self._persist_auth_tree( + yield self._persist_auth_tree( origin, auth_chain, state, event ) - self.notifier.on_new_room_event( - event, event_stream_id, max_stream_id, - extra_users=[joinee] - ) - logger.debug("Finished joining %s to %s", joinee, room_id) finally: room_queue = self.room_queues[room_id] @@ -1084,7 +1068,7 @@ class FederationHandler(BaseHandler): # would introduce the danger of backwards-compatibility problems. event.internal_metadata.send_on_behalf_of = origin - context, event_stream_id, max_stream_id = yield self._handle_new_event( + context = yield self._handle_new_event( origin, event ) @@ -1094,20 +1078,10 @@ class FederationHandler(BaseHandler): event.signatures, ) - extra_users = [] - if event.type == EventTypes.Member: - target_user_id = event.state_key - target_user = UserID.from_string(target_user_id) - extra_users.append(target_user) - - self.notifier.on_new_room_event( - event, event_stream_id, max_stream_id, extra_users=extra_users - ) - if event.type == EventTypes.Member: if event.content["membership"] == Membership.JOIN: user = UserID.from_string(event.state_key) - yield user_joined_room(self.distributor, user, event.room_id) + yield self.user_joined_room(user, event.room_id) prev_state_ids = yield context.get_prev_state_ids(self.store) @@ -1176,17 +1150,7 @@ class FederationHandler(BaseHandler): ) context = yield self.state_handler.compute_event_context(event) - - event_stream_id, max_stream_id = yield self.store.persist_event( - event, - context=context, - ) - - target_user = UserID.from_string(event.state_key) - self.notifier.on_new_room_event( - event, event_stream_id, max_stream_id, - extra_users=[target_user], - ) + yield self._persist_events([(event, context)]) defer.returnValue(event) @@ -1217,17 +1181,7 @@ class FederationHandler(BaseHandler): ) context = yield self.state_handler.compute_event_context(event) - - event_stream_id, max_stream_id = yield self.store.persist_event( - event, - context=context, - ) - - target_user = UserID.from_string(event.state_key) - self.notifier.on_new_room_event( - event, event_stream_id, max_stream_id, - extra_users=[target_user], - ) + yield self._persist_events([(event, context)]) defer.returnValue(event) @@ -1318,7 +1272,7 @@ class FederationHandler(BaseHandler): event.internal_metadata.outlier = False - context, event_stream_id, max_stream_id = yield self._handle_new_event( + yield self._handle_new_event( origin, event ) @@ -1328,16 +1282,6 @@ class FederationHandler(BaseHandler): event.signatures, ) - extra_users = [] - if event.type == EventTypes.Member: - target_user_id = event.state_key - target_user = UserID.from_string(target_user_id) - extra_users.append(target_user) - - self.notifier.on_new_room_event( - event, event_stream_id, max_stream_id, extra_users=extra_users - ) - defer.returnValue(None) @defer.inlineCallbacks @@ -1472,9 +1416,8 @@ class FederationHandler(BaseHandler): event, context ) - event_stream_id, max_stream_id = yield self.store.persist_event( - event, - context=context, + yield self._persist_events( + [(event, context)], backfilled=backfilled, ) except: # noqa: E722, as we reraise the exception this is fine. @@ -1487,15 +1430,7 @@ class FederationHandler(BaseHandler): six.reraise(tp, value, tb) - if not backfilled: - # this intentionally does not yield: we don't care about the result - # and don't need to wait for it. - logcontext.run_in_background( - self.pusher_pool.on_new_notifications, - event_stream_id, max_stream_id, - ) - - defer.returnValue((context, event_stream_id, max_stream_id)) + defer.returnValue(context) @defer.inlineCallbacks def _handle_new_events(self, origin, event_infos, backfilled=False): @@ -1517,7 +1452,7 @@ class FederationHandler(BaseHandler): ], consumeErrors=True, )) - yield self.store.persist_events( + yield self._persist_events( [ (ev_info["event"], context) for ev_info, context in zip(event_infos, contexts) @@ -1605,7 +1540,7 @@ class FederationHandler(BaseHandler): raise events_to_context[e.event_id].rejected = RejectedReason.AUTH_ERROR - yield self.store.persist_events( + yield self._persist_events( [ (e, events_to_context[e.event_id]) for e in itertools.chain(auth_events, state) @@ -1616,12 +1551,10 @@ class FederationHandler(BaseHandler): event, old_state=state ) - event_stream_id, max_stream_id = yield self.store.persist_event( - event, new_event_context, + yield self._persist_events( + [(event, new_event_context)], ) - defer.returnValue((event_stream_id, max_stream_id)) - @defer.inlineCallbacks def _prep_event(self, origin, event, state=None, auth_events=None): """ @@ -2347,3 +2280,69 @@ class FederationHandler(BaseHandler): ) if "valid" not in response or not response["valid"]: raise AuthError(403, "Third party certificate was invalid") + + @defer.inlineCallbacks + def _persist_events(self, event_and_contexts, backfilled=False): + """Persists events and tells the notifier/pushers about them, if + necessary. + + Args: + event_and_contexts(list[tuple[FrozenEvent, EventContext]]) + backfilled (bool): Whether these events are a result of + backfilling or not + + Returns: + Deferred + """ + max_stream_id = yield self.store.persist_events( + event_and_contexts, + backfilled=backfilled, + ) + + if not backfilled: # Never notify for backfilled events + for event, _ in event_and_contexts: + self._notify_persisted_event(event, max_stream_id) + + def _notify_persisted_event(self, event, max_stream_id): + """Checks to see if notifier/pushers should be notified about the + event or not. + + Args: + event (FrozenEvent) + max_stream_id (int): The max_stream_id returned by persist_events + """ + + extra_users = [] + if event.type == EventTypes.Member: + target_user_id = event.state_key + + # We notify for memberships if its an invite for one of our + # users + if event.internal_metadata.is_outlier(): + if event.membership != Membership.INVITE: + if not self.is_mine_id(target_user_id): + return + + target_user = UserID.from_string(target_user_id) + extra_users.append(target_user) + elif event.internal_metadata.is_outlier(): + return + + event_stream_id = event.internal_metadata.stream_ordering + self.notifier.on_new_room_event( + event, event_stream_id, max_stream_id, + extra_users=extra_users + ) + + logcontext.run_in_background( + self.pusher_pool.on_new_notifications, + event_stream_id, max_stream_id, + ) + + def _clean_room_for_join(self, room_id): + return self.store.clean_room_for_join(room_id) + + def user_joined_room(self, user, room_id): + """Called when a new user has joined the room + """ + return user_joined_room(self.distributor, user, room_id) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index 200f5ec95f..e3910ed282 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -231,12 +231,18 @@ class EventsStore(EventsWorkerStore): self._state_resolution_handler = hs.get_state_resolution_handler() + @defer.inlineCallbacks def persist_events(self, events_and_contexts, backfilled=False): """ Write events to the database Args: events_and_contexts: list of tuples of (event, context) - backfilled: ? + backfilled (bool): Whether the results are retrieved from federation + via backfill or not. Used to determine if they're "new" events + which might update the current state etc. + + Returns: + Deferred[int]: he stream ordering of the latest persisted event """ partitioned = {} for event, ctx in events_and_contexts: @@ -253,10 +259,14 @@ class EventsStore(EventsWorkerStore): for room_id in partitioned: self._maybe_start_persisting(room_id) - return make_deferred_yieldable( + yield make_deferred_yieldable( defer.gatherResults(deferreds, consumeErrors=True) ) + max_persisted_id = yield self._stream_id_gen.get_current_token() + + defer.returnValue(max_persisted_id) + @defer.inlineCallbacks @log_function def persist_event(self, event, context, backfilled=False): From 21e878ebb60afd27269fb5b4d6df3d0d8c570a7f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 26 Jul 2018 12:48:51 +0100 Subject: [PATCH 011/205] Make EventStore inherit from EventFederationStore (since it uses methods therein) Turns out that we had a bunch of things which were incorrectly importing EventWorkerStore from events.py rather than events_worker.py, which broke once I removed the import into events.py. --- synapse/storage/__init__.py | 2 +- synapse/storage/appservice.py | 2 +- synapse/storage/event_federation.py | 2 +- synapse/storage/events.py | 6 ++++-- synapse/storage/roommember.py | 2 +- synapse/storage/stream.py | 2 +- 6 files changed, 9 insertions(+), 7 deletions(-) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index ba88a54979..3bd63cd195 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -66,6 +66,7 @@ class DataStore(RoomMemberStore, RoomStore, PresenceStore, TransactionStore, DirectoryStore, KeyStore, StateStore, SignatureStore, ApplicationServiceStore, + EventsStore, EventFederationStore, MediaRepositoryStore, RejectionsStore, @@ -73,7 +74,6 @@ class DataStore(RoomMemberStore, RoomStore, PusherStore, PushRuleStore, ApplicationServiceTransactionStore, - EventsStore, ReceiptsStore, EndToEndKeyStore, SearchStore, diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index 9f12b360bc..31248d5e06 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -22,7 +22,7 @@ from twisted.internet import defer from synapse.appservice import AppServiceTransaction from synapse.config.appservice import load_appservices -from synapse.storage.events import EventsWorkerStore +from synapse.storage.events_worker import EventsWorkerStore from ._base import SQLBaseStore diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py index 65f2d19e20..14500ee59c 100644 --- a/synapse/storage/event_federation.py +++ b/synapse/storage/event_federation.py @@ -25,7 +25,7 @@ from twisted.internet import defer from synapse.api.errors import StoreError from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage._base import SQLBaseStore -from synapse.storage.events import EventsWorkerStore +from synapse.storage.events_worker import EventsWorkerStore from synapse.storage.signatures import SignatureWorkerStore from synapse.util.caches.descriptors import cached diff --git a/synapse/storage/events.py b/synapse/storage/events.py index 200f5ec95f..dbbfe04880 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -34,7 +34,7 @@ from synapse.api.errors import SynapseError from synapse.events import EventBase # noqa: F401 from synapse.events.snapshot import EventContext # noqa: F401 from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.storage.events_worker import EventsWorkerStore +from synapse.storage.event_federation import EventFederationStore from synapse.types import RoomStreamToken, get_domain_from_id from synapse.util.async import ObservableDeferred from synapse.util.caches.descriptors import cached, cachedInlineCallbacks @@ -193,7 +193,9 @@ def _retry_on_integrity_error(func): return f -class EventsStore(EventsWorkerStore): +# inherits from EventFederationStore so that we can call _update_backward_extremities +# and _handle_mult_prev_events (though arguably those could both be moved in here) +class EventsStore(EventFederationStore): EVENT_ORIGIN_SERVER_TS_NAME = "event_origin_server_ts" EVENT_FIELDS_SENDER_URL_UPDATE_NAME = "event_fields_sender_url" diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index 027bf8c85e..10dce21cea 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -24,7 +24,7 @@ from canonicaljson import json from twisted.internet import defer from synapse.api.constants import EventTypes, Membership -from synapse.storage.events import EventsWorkerStore +from synapse.storage.events_worker import EventsWorkerStore from synapse.types import get_domain_from_id from synapse.util.async import Linearizer from synapse.util.caches import intern_string diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py index 66856342f0..9d85dbb1d6 100644 --- a/synapse/storage/stream.py +++ b/synapse/storage/stream.py @@ -43,7 +43,7 @@ from twisted.internet import defer from synapse.storage._base import SQLBaseStore from synapse.storage.engines import PostgresEngine -from synapse.storage.events import EventsWorkerStore +from synapse.storage.events_worker import EventsWorkerStore from synapse.types import RoomStreamToken from synapse.util.caches.stream_change_cache import StreamChangeCache from synapse.util.logcontext import make_deferred_yieldable, run_in_background From a15ed522675c06b4a451c7c9a8a1d2865e86fae5 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 26 Jul 2018 12:53:18 +0100 Subject: [PATCH 012/205] changelog --- changelog.d/3612.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3612.misc diff --git a/changelog.d/3612.misc b/changelog.d/3612.misc new file mode 100644 index 0000000000..f90d2f2ff5 --- /dev/null +++ b/changelog.d/3612.misc @@ -0,0 +1 @@ +Make EventStore inherit from EventFederationStore From 8e3f75b39abb846687ac905d8e31d7f41372e5d7 Mon Sep 17 00:00:00 2001 From: Jeroen Date: Fri, 27 Jul 2018 12:17:31 +0200 Subject: [PATCH 013/205] fix accidental removal of hs --- synapse/crypto/keyring.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 762e4b8014..30e2742102 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -512,7 +512,7 @@ class Keyring(object): continue (response, tls_certificate) = yield fetch_server_key( - server_name, self.tls_client_options_factory, + server_name, self.hs.tls_client_options_factory, path=(b"/_matrix/key/v2/server/%s" % ( urllib.quote(requested_key_id), )).encode("ascii"), From f102c05856623fdea45309d58c759d1d784d19fd Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Sun, 10 Jun 2018 22:38:50 +0100 Subject: [PATCH 014/205] Rewrite cache list decorator Because it was complicated and annoyed me. I suspect this will be more efficient too. --- changelog.d/3384.misc | 1 + synapse/util/caches/descriptors.py | 133 +++++++++++++------------- tests/util/caches/test_descriptors.py | 61 ++++++++++++ 3 files changed, 127 insertions(+), 68 deletions(-) create mode 100644 changelog.d/3384.misc diff --git a/changelog.d/3384.misc b/changelog.d/3384.misc new file mode 100644 index 0000000000..5d56c876d9 --- /dev/null +++ b/changelog.d/3384.misc @@ -0,0 +1 @@ +Rewrite cache list decorator diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index f8a07df6b8..1a8c99d387 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -473,105 +473,101 @@ class CacheListDescriptor(_CacheDescriptorBase): @functools.wraps(self.orig) def wrapped(*args, **kwargs): - # If we're passed a cache_context then we'll want to call its invalidate() - # whenever we are invalidated + # If we're passed a cache_context then we'll want to call its + # invalidate() whenever we are invalidated invalidate_callback = kwargs.pop("on_invalidate", None) arg_dict = inspect.getcallargs(self.orig, obj, *args, **kwargs) keyargs = [arg_dict[arg_nm] for arg_nm in self.arg_names] list_args = arg_dict[self.list_name] - # cached is a dict arg -> deferred, where deferred results in a - # 2-tuple (`arg`, `result`) results = {} - cached_defers = {} - missing = [] + + def update_results_dict(res, arg): + results[arg] = res + + # list of deferreds to wait for + cached_defers = [] + + missing = set() # If the cache takes a single arg then that is used as the key, # otherwise a tuple is used. if num_args == 1: - def cache_get(arg): - return cache.get(arg, callback=invalidate_callback) + def arg_to_cache_key(arg): + return arg else: - key = list(keyargs) + keylist = list(keyargs) - def cache_get(arg): - key[self.list_pos] = arg - return cache.get(tuple(key), callback=invalidate_callback) + def arg_to_cache_key(arg): + keylist[self.list_pos] = arg + return tuple(keylist) for arg in list_args: try: - res = cache_get(arg) - + res = cache.get(arg_to_cache_key(arg), + callback=invalidate_callback) if not isinstance(res, ObservableDeferred): results[arg] = res elif not res.has_succeeded(): res = res.observe() - res.addCallback(lambda r, arg: (arg, r), arg) - cached_defers[arg] = res + res.addCallback(update_results_dict, arg) + cached_defers.append(res) else: results[arg] = res.get_result() except KeyError: - missing.append(arg) + missing.add(arg) if missing: - args_to_call = dict(arg_dict) - args_to_call[self.list_name] = missing + # we need an observable deferred for each entry in the list, + # which we put in the cache. Each deferred resolves with the + # relevant result for that key. + deferreds_map = {} + for arg in missing: + deferred = defer.Deferred() + deferreds_map[arg] = deferred + key = arg_to_cache_key(arg) + observable = ObservableDeferred(deferred) + cache.set(key, observable) - ret_d = defer.maybeDeferred( + def complete_all(res): + # the wrapped function has completed. It returns a + # a dict. We can now resolve the observable deferreds in + # the cache and update our own result map. + for e in missing: + val = res.get(e, None) + deferreds_map[e].callback(val) + results[e] = val + + def errback(f): + # the wrapped function has failed. Invalidate any cache + # entries we're supposed to be populating, and fail + # their deferreds. + for e in missing: + key = arg_to_cache_key(e) + cache.invalidate(key) + deferreds_map[e].errback(f) + + # return the failure, to propagate to our caller. + return f + + args_to_call = dict(arg_dict) + args_to_call[self.list_name] = list(missing) + + cached_defers.append(defer.maybeDeferred( logcontext.preserve_fn(self.function_to_call), **args_to_call - ) - - ret_d = ObservableDeferred(ret_d) - - # We need to create deferreds for each arg in the list so that - # we can insert the new deferred into the cache. - for arg in missing: - observer = ret_d.observe() - observer.addCallback(lambda r, arg: r.get(arg, None), arg) - - observer = ObservableDeferred(observer) - - if num_args == 1: - cache.set( - arg, observer, - callback=invalidate_callback - ) - - def invalidate(f, key): - cache.invalidate(key) - return f - observer.addErrback(invalidate, arg) - else: - key = list(keyargs) - key[self.list_pos] = arg - cache.set( - tuple(key), observer, - callback=invalidate_callback - ) - - def invalidate(f, key): - cache.invalidate(key) - return f - observer.addErrback(invalidate, tuple(key)) - - res = observer.observe() - res.addCallback(lambda r, arg: (arg, r), arg) - - cached_defers[arg] = res + ).addCallbacks(complete_all, errback)) if cached_defers: - def update_results_dict(res): - results.update(res) - return results - - return logcontext.make_deferred_yieldable(defer.gatherResults( - list(cached_defers.values()), + d = defer.gatherResults( + cached_defers, consumeErrors=True, - ).addCallback(update_results_dict).addErrback( + ).addCallbacks( + lambda _: results, unwrapFirstError - )) + ) + return logcontext.make_deferred_yieldable(d) else: return results @@ -625,7 +621,8 @@ def cachedList(cached_method_name, list_name, num_args=None, inlineCallbacks=Fal cache. Args: - cache (Cache): The underlying cache to use. + cached_method_name (str): The name of the single-item lookup method. + This is only used to find the cache to use. list_name (str): The name of the argument that is the list to use to do batch lookups in the cache. num_args (int): Number of arguments to use as the key in the cache diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py index 8176a7dabd..1ac967b63e 100644 --- a/tests/util/caches/test_descriptors.py +++ b/tests/util/caches/test_descriptors.py @@ -273,3 +273,64 @@ class DescriptorTestCase(unittest.TestCase): r = yield obj.fn(2, 3) self.assertEqual(r, 'chips') obj.mock.assert_not_called() + + +@unittest.DEBUG +class CachedListDescriptorTestCase(unittest.TestCase): + @defer.inlineCallbacks + def test_cache(self): + class Cls(object): + def __init__(self): + self.mock = mock.Mock() + + @descriptors.cached() + def fn(self, arg1, arg2): + pass + + @descriptors.cachedList("fn", "args1", inlineCallbacks=True) + def list_fn(self, args1, arg2): + assert ( + logcontext.LoggingContext.current_context().request == "c1" + ) + # we want this to behave like an asynchronous function + yield run_on_reactor() + assert ( + logcontext.LoggingContext.current_context().request == "c1" + ) + defer.returnValue(self.mock(args1, arg2)) + + with logcontext.LoggingContext() as c1: + c1.request = "c1" + obj = Cls() + obj.mock.return_value = {10: 'fish', 20: 'chips'} + d1 = obj.list_fn([10, 20], 2) + self.assertEqual( + logcontext.LoggingContext.current_context(), + logcontext.LoggingContext.sentinel, + ) + r = yield d1 + self.assertEqual( + logcontext.LoggingContext.current_context(), + c1 + ) + obj.mock.assert_called_once_with([10, 20], 2) + self.assertEqual(r, {10: 'fish', 20: 'chips'}) + obj.mock.reset_mock() + + # a call with different params should call the mock again + obj.mock.return_value = {30: 'peas'} + r = yield obj.list_fn([20, 30], 2) + obj.mock.assert_called_once_with([30], 2) + self.assertEqual(r, {20: 'chips', 30: 'peas'}) + obj.mock.reset_mock() + + # all the values should now be cached + r = yield obj.fn(10, 2) + self.assertEqual(r, 'fish') + r = yield obj.fn(20, 2) + self.assertEqual(r, 'chips') + r = yield obj.fn(30, 2) + self.assertEqual(r, 'peas') + r = yield obj.list_fn([10, 20, 30], 2) + obj.mock.assert_not_called() + self.assertEqual(r, {10: 'fish', 20: 'chips', 30: 'peas'}) From ad7cd95a7842bc2eafb5bd69467978e60b4ac0d8 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 27 Jul 2018 15:02:57 +0100 Subject: [PATCH 015/205] Newsfile --- changelog.d/3621.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3621.misc diff --git a/changelog.d/3621.misc b/changelog.d/3621.misc new file mode 100644 index 0000000000..83e5fc8aa1 --- /dev/null +++ b/changelog.d/3621.misc @@ -0,0 +1 @@ +Refactor FederationHandler to move DB writes into separate functions From e9b2d047f68b74e231609ce40978f4452ac9e22f Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Fri, 27 Jul 2018 15:12:50 +0100 Subject: [PATCH 016/205] make /context lazyload & filter aware (#3567) make /context lazyload & filter aware. --- changelog.d/3567.feature | 1 + synapse/handlers/room.py | 24 +++++++++++++++++++++--- synapse/handlers/search.py | 2 +- synapse/rest/client/v1/room.py | 9 +++++++++ synapse/storage/stream.py | 14 +++++++++++--- 5 files changed, 43 insertions(+), 7 deletions(-) create mode 100644 changelog.d/3567.feature diff --git a/changelog.d/3567.feature b/changelog.d/3567.feature new file mode 100644 index 0000000000..c74c1f57a9 --- /dev/null +++ b/changelog.d/3567.feature @@ -0,0 +1 @@ +make the /context API filter & lazy-load aware as per MSC1227 diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 003b848c00..7b7804d9b2 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -15,6 +15,7 @@ # limitations under the License. """Contains functions for performing events on rooms.""" +import itertools import logging import math import string @@ -401,7 +402,7 @@ class RoomContextHandler(object): self.store = hs.get_datastore() @defer.inlineCallbacks - def get_event_context(self, user, room_id, event_id, limit): + def get_event_context(self, user, room_id, event_id, limit, event_filter): """Retrieves events, pagination tokens and state around a given event in a room. @@ -411,6 +412,8 @@ class RoomContextHandler(object): event_id (str) limit (int): The maximum number of events to return in total (excluding state). + event_filter (Filter|None): the filter to apply to the events returned + (excluding the target event_id) Returns: dict, or None if the event isn't found @@ -443,7 +446,7 @@ class RoomContextHandler(object): ) results = yield self.store.get_events_around( - room_id, event_id, before_limit, after_limit + room_id, event_id, before_limit, after_limit, event_filter ) results["events_before"] = yield filter_evts(results["events_before"]) @@ -455,8 +458,23 @@ class RoomContextHandler(object): else: last_event_id = event_id + types = None + filtered_types = None + if event_filter and event_filter.lazy_load_members(): + members = set(ev.sender for ev in itertools.chain( + results["events_before"], + (results["event"],), + results["events_after"], + )) + filtered_types = [EventTypes.Member] + types = [(EventTypes.Member, member) for member in members] + + # XXX: why do we return the state as of the last event rather than the + # first? Shouldn't we be consistent with /sync? + # https://github.com/matrix-org/matrix-doc/issues/687 + state = yield self.store.get_state_for_events( - [last_event_id], None + [last_event_id], types, filtered_types=filtered_types, ) results["state"] = list(state[last_event_id].values()) diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index 69ae9731d5..c464adbd0b 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -287,7 +287,7 @@ class SearchHandler(BaseHandler): contexts = {} for event in allowed_events: res = yield self.store.get_events_around( - event.room_id, event.event_id, before_limit, after_limit + event.room_id, event.event_id, before_limit, after_limit, ) logger.info( diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index b7bd878c90..13c331550b 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -531,11 +531,20 @@ class RoomEventContextServlet(ClientV1RestServlet): limit = parse_integer(request, "limit", default=10) + # picking the API shape for symmetry with /messages + filter_bytes = parse_string(request, "filter") + if filter_bytes: + filter_json = urlparse.unquote(filter_bytes).decode("UTF-8") + event_filter = Filter(json.loads(filter_json)) + else: + event_filter = None + results = yield self.room_context_handler.get_event_context( requester.user, room_id, event_id, limit, + event_filter, ) if not results: diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py index 66856342f0..25d0097b58 100644 --- a/synapse/storage/stream.py +++ b/synapse/storage/stream.py @@ -527,7 +527,9 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): ) @defer.inlineCallbacks - def get_events_around(self, room_id, event_id, before_limit, after_limit): + def get_events_around( + self, room_id, event_id, before_limit, after_limit, event_filter=None, + ): """Retrieve events and pagination tokens around a given event in a room. @@ -536,6 +538,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): event_id (str) before_limit (int) after_limit (int) + event_filter (Filter|None) Returns: dict @@ -543,7 +546,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): results = yield self.runInteraction( "get_events_around", self._get_events_around_txn, - room_id, event_id, before_limit, after_limit + room_id, event_id, before_limit, after_limit, event_filter, ) events_before = yield self._get_events( @@ -563,7 +566,9 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): "end": results["after"]["token"], }) - def _get_events_around_txn(self, txn, room_id, event_id, before_limit, after_limit): + def _get_events_around_txn( + self, txn, room_id, event_id, before_limit, after_limit, event_filter, + ): """Retrieves event_ids and pagination tokens around a given event in a room. @@ -572,6 +577,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): event_id (str) before_limit (int) after_limit (int) + event_filter (Filter|None) Returns: dict @@ -601,11 +607,13 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore): rows, start_token = self._paginate_room_events_txn( txn, room_id, before_token, direction='b', limit=before_limit, + event_filter=event_filter, ) events_before = [r.event_id for r in rows] rows, end_token = self._paginate_room_events_txn( txn, room_id, after_token, direction='f', limit=after_limit, + event_filter=event_filter, ) events_after = [r.event_id for r in rows] From a8cbce0ced662f92ce7576dd44e5fefe98ffae62 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 27 Jul 2018 16:17:17 +0100 Subject: [PATCH 017/205] fix invalidation --- synapse/util/caches/descriptors.py | 2 +- tests/util/caches/test_descriptors.py | 42 ++++++++++++++++++++++++++- 2 files changed, 42 insertions(+), 2 deletions(-) diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index 1a8c99d387..861c24809c 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -528,7 +528,7 @@ class CacheListDescriptor(_CacheDescriptorBase): deferreds_map[arg] = deferred key = arg_to_cache_key(arg) observable = ObservableDeferred(deferred) - cache.set(key, observable) + cache.set(key, observable, callback=invalidate_callback) def complete_all(res): # the wrapped function has completed. It returns a diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py index 1ac967b63e..ca8a7c907f 100644 --- a/tests/util/caches/test_descriptors.py +++ b/tests/util/caches/test_descriptors.py @@ -275,7 +275,6 @@ class DescriptorTestCase(unittest.TestCase): obj.mock.assert_not_called() -@unittest.DEBUG class CachedListDescriptorTestCase(unittest.TestCase): @defer.inlineCallbacks def test_cache(self): @@ -334,3 +333,44 @@ class CachedListDescriptorTestCase(unittest.TestCase): r = yield obj.list_fn([10, 20, 30], 2) obj.mock.assert_not_called() self.assertEqual(r, {10: 'fish', 20: 'chips', 30: 'peas'}) + + @defer.inlineCallbacks + def test_invalidate(self): + """Make sure that invalidation callbacks are called.""" + class Cls(object): + def __init__(self): + self.mock = mock.Mock() + + @descriptors.cached() + def fn(self, arg1, arg2): + pass + + @descriptors.cachedList("fn", "args1", inlineCallbacks=True) + def list_fn(self, args1, arg2): + # we want this to behave like an asynchronous function + yield run_on_reactor() + defer.returnValue(self.mock(args1, arg2)) + + obj = Cls() + invalidate0 = mock.Mock() + invalidate1 = mock.Mock() + + # cache miss + obj.mock.return_value = {10: 'fish', 20: 'chips'} + r1 = yield obj.list_fn([10, 20], 2, on_invalidate=invalidate0) + obj.mock.assert_called_once_with([10, 20], 2) + self.assertEqual(r1, {10: 'fish', 20: 'chips'}) + obj.mock.reset_mock() + + # cache hit + r2 = yield obj.list_fn([10, 20], 2, on_invalidate=invalidate1) + obj.mock.assert_not_called() + self.assertEqual(r2, {10: 'fish', 20: 'chips'}) + + invalidate0.assert_not_called() + invalidate1.assert_not_called() + + # now if we invalidate the keys, both invalidations should get called + obj.fn.invalidate((10, 2)) + invalidate0.assert_called_once() + invalidate1.assert_called_once() From 2903e65affdcbd77ec794b0f998afb9e24f50215 Mon Sep 17 00:00:00 2001 From: Jeroen Date: Sun, 29 Jul 2018 19:47:08 +0200 Subject: [PATCH 018/205] fix isort --- synapse/crypto/context_factory.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 57694e18b0..569377e653 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -13,12 +13,13 @@ # limitations under the License. import logging +from zope.interface import implementer + from OpenSSL import SSL, crypto from twisted.internet._idna import _idnaBytes -from twisted.internet.ssl import ContextFactory, CertificateOptions from twisted.internet._sslverify import _defaultCurveName, _tolerateErrors from twisted.internet.interfaces import IOpenSSLClientConnectionCreator -from zope.interface import implementer +from twisted.internet.ssl import CertificateOptions, ContextFactory logger = logging.getLogger(__name__) From 251e6c1210087069a6133140519de80a4ddf218a Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 30 Jul 2018 15:55:57 +0100 Subject: [PATCH 019/205] limit register and sign in on number of monthly users --- synapse/api/errors.py | 1 + synapse/config/server.py | 5 ++++ synapse/handlers/auth.py | 13 +++++++++ synapse/handlers/register.py | 18 ++++++++++-- synapse/storage/__init__.py | 34 +++++++++++++++++++++++ tests/handlers/test_auth.py | 49 ++++++++++++++++++++++++++++++++- tests/handlers/test_register.py | 49 +++++++++++++++++++++++++++++++++ 7 files changed, 166 insertions(+), 3 deletions(-) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index 6074df292f..14f5540280 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -55,6 +55,7 @@ class Codes(object): SERVER_NOT_TRUSTED = "M_SERVER_NOT_TRUSTED" CONSENT_NOT_GIVEN = "M_CONSENT_NOT_GIVEN" CANNOT_LEAVE_SERVER_NOTICE_ROOM = "M_CANNOT_LEAVE_SERVER_NOTICE_ROOM" + MAU_LIMIT_EXCEEDED = "M_MAU_LIMIT_EXCEEDED" class CodeMessageException(RuntimeError): diff --git a/synapse/config/server.py b/synapse/config/server.py index 18102656b0..8b335bff3f 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -67,6 +67,11 @@ class ServerConfig(Config): "block_non_admin_invites", False, ) + # Options to control access by tracking MAU + self.limit_usage_by_mau = config.get("limit_usage_by_mau", False) + self.max_mau_value = config.get( + "max_mau_value", 0, + ) # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist = None federation_domain_whitelist = config.get( diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 402e44cdef..f3734f11bd 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -519,6 +519,7 @@ class AuthHandler(BaseHandler): """ logger.info("Logging in user %s on device %s", user_id, device_id) access_token = yield self.issue_access_token(user_id, device_id) + self._check_mau_limits() # the device *should* have been registered before we got here; however, # it's possible we raced against a DELETE operation. The thing we @@ -729,6 +730,7 @@ class AuthHandler(BaseHandler): defer.returnValue(access_token) def validate_short_term_login_token_and_get_user_id(self, login_token): + self._check_mau_limits() auth_api = self.hs.get_auth() try: macaroon = pymacaroons.Macaroon.deserialize(login_token) @@ -892,6 +894,17 @@ class AuthHandler(BaseHandler): else: return defer.succeed(False) + def _check_mau_limits(self): + """ + Ensure that if mau blocking is enabled that invalid users cannot + log in. + """ + if self.hs.config.limit_usage_by_mau is True: + current_mau = self.store.count_monthly_users() + if current_mau >= self.hs.config.max_mau_value: + raise AuthError( + 403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED + ) @attr.s class MacaroonGenerator(object): diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 7caff0cbc8..f46b8355c0 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -45,7 +45,7 @@ class RegistrationHandler(BaseHandler): hs (synapse.server.HomeServer): """ super(RegistrationHandler, self).__init__(hs) - + self.hs = hs self.auth = hs.get_auth() self._auth_handler = hs.get_auth_handler() self.profile_handler = hs.get_profile_handler() @@ -144,6 +144,7 @@ class RegistrationHandler(BaseHandler): Raises: RegistrationError if there was a problem registering. """ + self._check_mau_limits() password_hash = None if password: password_hash = yield self.auth_handler().hash(password) @@ -288,6 +289,7 @@ class RegistrationHandler(BaseHandler): 400, "User ID can only contain characters a-z, 0-9, or '=_-./'", ) + self._check_mau_limits() user = UserID(localpart, self.hs.hostname) user_id = user.to_string() @@ -437,7 +439,7 @@ class RegistrationHandler(BaseHandler): """ if localpart is None: raise SynapseError(400, "Request must include user id") - + self._check_mau_limits() need_register = True try: @@ -531,3 +533,15 @@ class RegistrationHandler(BaseHandler): remote_room_hosts=remote_room_hosts, action="join", ) + + def _check_mau_limits(self): + """ + Do not accept registrations if monthly active user limits exceeded + and limiting is enabled + """ + if self.hs.config.limit_usage_by_mau is True: + current_mau = self.store.count_monthly_users() + if current_mau >= self.hs.config.max_mau_value: + raise RegistrationError( + 403, "MAU Limit Exceeded", Codes.MAU_LIMIT_EXCEEDED + ) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index ba88a54979..6a75bf0e52 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -19,6 +19,7 @@ import logging import time from dateutil import tz +from prometheus_client import Gauge from synapse.api.constants import PresenceState from synapse.storage.devices import DeviceStore @@ -60,6 +61,13 @@ from .util.id_generators import ChainedIdGenerator, IdGenerator, StreamIdGenerat logger = logging.getLogger(__name__) +# Gauges to expose monthly active user control metrics +current_mau_gauge = Gauge("synapse_admin_current_mau", "Current MAU") +max_mau_value_gauge = Gauge("synapse_admin_max_mau_value", "MAU Limit") +limit_usage_by_mau_gauge = Gauge( + "synapse_admin_limit_usage_by_mau", "MAU Limiting enabled" +) + class DataStore(RoomMemberStore, RoomStore, RegistrationStore, StreamStore, ProfileStore, @@ -266,6 +274,32 @@ class DataStore(RoomMemberStore, RoomStore, return self.runInteraction("count_users", _count_users) + def count_monthly_users(self): + """ + Counts the number of users who used this homeserver in the last 30 days + This method should be refactored with count_daily_users - the only + reason not to is waiting on definition of mau + returns: + int: count of current monthly active users + """ + def _count_monthly_users(txn): + thirty_days_ago = int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) + sql = """ + SELECT COUNT(*) FROM user_ips + WHERE last_seen > ? + """ + txn.execute(sql, (thirty_days_ago,)) + count, = txn.fetchone() + + self._current_mau = count + current_mau_gauge.set(self._current_mau) + max_mau_value_gauge.set(self.hs.config.max_mau_value) + limit_usage_by_mau_gauge.set(self.hs.config.limit_usage_by_mau) + logger.info("calling mau stats") + return count + return self.runInteraction("count_monthly_users", _count_monthly_users) + + def count_r30_users(self): """ Counts the number of 30 day retained users, defined as:- diff --git a/tests/handlers/test_auth.py b/tests/handlers/test_auth.py index 2e5e8e4dec..57f78a6bec 100644 --- a/tests/handlers/test_auth.py +++ b/tests/handlers/test_auth.py @@ -12,15 +12,17 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +from mock import Mock import pymacaroons from twisted.internet import defer import synapse +from synapse.api.errors import AuthError import synapse.api.errors from synapse.handlers.auth import AuthHandler + from tests import unittest from tests.utils import setup_test_homeserver @@ -37,6 +39,10 @@ class AuthTestCase(unittest.TestCase): self.hs.handlers = AuthHandlers(self.hs) self.auth_handler = self.hs.handlers.auth_handler self.macaroon_generator = self.hs.get_macaroon_generator() + # MAU tests + self.hs.config.max_mau_value = 50 + self.small_number_of_users = 1 + self.large_number_of_users = 100 def test_token_is_a_macaroon(self): token = self.macaroon_generator.generate_access_token("some_user") @@ -113,3 +119,44 @@ class AuthTestCase(unittest.TestCase): self.auth_handler.validate_short_term_login_token_and_get_user_id( macaroon.serialize() ) + + @defer.inlineCallbacks + def test_mau_limits_disabled(self): + self.hs.config.limit_usage_by_mau = False + # Ensure does not throw exception + yield self.auth_handler.get_access_token_for_user_id('user_a') + + self.auth_handler.validate_short_term_login_token_and_get_user_id( + self._get_macaroon().serialize() + ) + + @defer.inlineCallbacks + def test_mau_limits_exceeded(self): + self.hs.config.limit_usage_by_mau = True + self.hs.get_datastore().count_monthly_users = Mock( + return_value=self.large_number_of_users + ) + with self.assertRaises(AuthError): + yield self.auth_handler.get_access_token_for_user_id('user_a') + with self.assertRaises(AuthError): + self.auth_handler.validate_short_term_login_token_and_get_user_id( + self._get_macaroon().serialize() + ) + + @defer.inlineCallbacks + def test_mau_limits_not_exceeded(self): + self.hs.config.limit_usage_by_mau = True + self.hs.get_datastore().count_monthly_users = Mock( + return_value=self.small_number_of_users + ) + # Ensure does not raise exception + yield self.auth_handler.get_access_token_for_user_id('user_a') + self.auth_handler.validate_short_term_login_token_and_get_user_id( + self._get_macaroon().serialize() + ) + + def _get_macaroon(self): + token = self.macaroon_generator.generate_short_term_login_token( + "user_a", 5000 + ) + return pymacaroons.Macaroon.deserialize(token) diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 025fa1be81..a5a8e7c954 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -17,6 +17,7 @@ from mock import Mock from twisted.internet import defer +from synapse.api.errors import RegistrationError from synapse.handlers.register import RegistrationHandler from synapse.types import UserID, create_requester @@ -77,3 +78,51 @@ class RegistrationTestCase(unittest.TestCase): requester, local_part, display_name) self.assertEquals(result_user_id, user_id) self.assertEquals(result_token, 'secret') + + @defer.inlineCallbacks + def test_cannot_register_when_mau_limits_exceeded(self): + local_part = "someone" + display_name = "someone" + requester = create_requester("@as:test") + store = self.hs.get_datastore() + self.hs.config.limit_usage_by_mau = False + self.hs.config.max_mau_value = 50 + lots_of_users = 100 + small_number_users = 1 + + store.count_monthly_users = Mock(return_value=lots_of_users) + + # Ensure does not throw exception + yield self.handler.get_or_create_user(requester, 'a', display_name) + + self.hs.config.limit_usage_by_mau = True + + with self.assertRaises(RegistrationError): + yield self.handler.get_or_create_user(requester, 'b', display_name) + + store.count_monthly_users = Mock(return_value=small_number_users) + + self._macaroon_mock_generator("another_secret") + + # Ensure does not throw exception + yield self.handler.get_or_create_user("@neil:matrix.org", 'c', "Neil") + + self._macaroon_mock_generator("another another secret") + store.count_monthly_users = Mock(return_value=lots_of_users) + with self.assertRaises(RegistrationError): + yield self.handler.register(localpart=local_part) + + self._macaroon_mock_generator("another another secret") + store.count_monthly_users = Mock(return_value=lots_of_users) + with self.assertRaises(RegistrationError): + yield self.handler.register_saml2(local_part) + + def _macaroon_mock_generator(self, secret): + """ + Reset macaroon generator in the case where the test creates multiple users + """ + macaroon_generator = Mock( + generate_access_token=Mock(return_value=secret)) + self.hs.get_macaroon_generator = Mock(return_value=macaroon_generator) + self.hs.handlers = RegistrationHandlers(self.hs) + self.handler = self.hs.get_handlers().registration_handler From 9b13817e067d370f3ddb860baef884bf460449b5 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 30 Jul 2018 22:07:07 +0100 Subject: [PATCH 020/205] factor out metrics from __init__ to app/homeserver --- synapse/app/homeserver.py | 20 +++++++++++++++++++- synapse/storage/__init__.py | 37 +++++++++++++------------------------ 2 files changed, 32 insertions(+), 25 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 57b815d777..a1512ccea4 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -18,6 +18,7 @@ import logging import os import sys +from prometheus_client import Gauge from six import iteritems from twisted.application import service @@ -299,7 +300,12 @@ class SynapseHomeServer(HomeServer): except IncorrectDatabaseSetup as e: quit_with_error(e.message) - +# Gauges to expose monthly active user control metrics +current_mau_gauge = Gauge("synapse_admin_current_mau", "Current MAU") +max_mau_value_gauge = Gauge("synapse_admin_max_mau_value", "MAU Limit") +limit_usage_by_mau_gauge = Gauge( + "synapse_admin_limit_usage_by_mau", "MAU Limiting enabled" +) def setup(config_options): """ Args: @@ -512,6 +518,18 @@ def run(hs): # table will decrease clock.looping_call(generate_user_daily_visit_stats, 5 * 60 * 1000) + def generate_monthly_active_users(): + count = 0 + if hs.config.limit_usage_by_mau: + count = hs.get_datastore().count_monthly_users() + logger.info("NJ count is %d" % (count,)) + current_mau_gauge.set(float(count)) + max_mau_value_gauge.set(float(hs.config.max_mau_value)) + limit_usage_by_mau_gauge.set(float(hs.config.limit_usage_by_mau)) + + generate_monthly_active_users() + clock.looping_call(generate_monthly_active_users, 5 * 60 * 1000) + if hs.config.report_stats: logger.info("Scheduling stats reporting for 3 hour intervals") clock.looping_call(start_phone_stats_home, 3 * 60 * 60 * 1000) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 6a75bf0e52..7b8215bf08 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -19,7 +19,6 @@ import logging import time from dateutil import tz -from prometheus_client import Gauge from synapse.api.constants import PresenceState from synapse.storage.devices import DeviceStore @@ -61,14 +60,6 @@ from .util.id_generators import ChainedIdGenerator, IdGenerator, StreamIdGenerat logger = logging.getLogger(__name__) -# Gauges to expose monthly active user control metrics -current_mau_gauge = Gauge("synapse_admin_current_mau", "Current MAU") -max_mau_value_gauge = Gauge("synapse_admin_max_mau_value", "MAU Limit") -limit_usage_by_mau_gauge = Gauge( - "synapse_admin_limit_usage_by_mau", "MAU Limiting enabled" -) - - class DataStore(RoomMemberStore, RoomStore, RegistrationStore, StreamStore, ProfileStore, PresenceStore, TransactionStore, @@ -102,6 +93,7 @@ class DataStore(RoomMemberStore, RoomStore, self._clock = hs.get_clock() self.database_engine = hs.database_engine + self.db_conn = db_conn self._stream_id_gen = StreamIdGenerator( db_conn, "events", "stream_ordering", extra_tables=[("local_invites", "stream_id")] @@ -282,22 +274,19 @@ class DataStore(RoomMemberStore, RoomStore, returns: int: count of current monthly active users """ - def _count_monthly_users(txn): - thirty_days_ago = int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) - sql = """ - SELECT COUNT(*) FROM user_ips - WHERE last_seen > ? - """ - txn.execute(sql, (thirty_days_ago,)) - count, = txn.fetchone() - self._current_mau = count - current_mau_gauge.set(self._current_mau) - max_mau_value_gauge.set(self.hs.config.max_mau_value) - limit_usage_by_mau_gauge.set(self.hs.config.limit_usage_by_mau) - logger.info("calling mau stats") - return count - return self.runInteraction("count_monthly_users", _count_monthly_users) + thirty_days_ago = int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) + sql = """ + SELECT COALESCE(count(*), 0) FROM ( + SELECT user_id FROM user_ips + WHERE last_seen > ? + GROUP BY user_id + ) u + """ + txn = self.db_conn.cursor() + txn.execute(sql, (thirty_days_ago,)) + count, = txn.fetchone() + return count def count_r30_users(self): From cefac79c105515fb2e5b05fbbc0fdb9b5b977f30 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 30 Jul 2018 22:08:09 +0100 Subject: [PATCH 021/205] monthly_active_tests --- tests/storage/test__init__.py | 48 +++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 tests/storage/test__init__.py diff --git a/tests/storage/test__init__.py b/tests/storage/test__init__.py new file mode 100644 index 0000000000..b763d395cc --- /dev/null +++ b/tests/storage/test__init__.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import sys + +from twisted.internet import defer + +import tests.unittest +import tests.utils + + +class InitTestCase(tests.unittest.TestCase): + def __init__(self, *args, **kwargs): + super(InitTestCase, self).__init__(*args, **kwargs) + self.store = None # type: synapse.storage.DataStore + + @defer.inlineCallbacks + def setUp(self): + hs = yield tests.utils.setup_test_homeserver() + hs.config.max_mau_value = 50 + hs.config.limit_usage_by_mau = True + self.store = hs.get_datastore() + + @defer.inlineCallbacks + def test_count_monthly_users(self): + count = yield self.store.count_monthly_users() + self.assertEqual(0, count) + yield self.store.insert_client_ip( + "@user:server1", "access_token", "ip", "user_agent", "device_id" + ) + + yield self.store.insert_client_ip( + "@user:server2", "access_token", "ip", "user_agent", "device_id" + ) + count = self.store.count_monthly_users() + + self.assertEqual(2, count) From fef7e58ac63d58f4a13f0914f51fc5956981af6a Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 30 Jul 2018 22:29:44 +0100 Subject: [PATCH 022/205] actually close conn --- synapse/storage/__init__.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 7b8215bf08..044e988e92 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -283,10 +283,14 @@ class DataStore(RoomMemberStore, RoomStore, GROUP BY user_id ) u """ - txn = self.db_conn.cursor() - txn.execute(sql, (thirty_days_ago,)) - count, = txn.fetchone() - return count + try: + txn = self.db_conn.cursor() + txn.execute(sql, (thirty_days_ago,)) + count, = txn.fetchone() + return count + finally: + txn.close() + def count_r30_users(self): From 21276ff84601745093c2416bbf3db1f0ae56155d Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 30 Jul 2018 22:42:12 +0100 Subject: [PATCH 023/205] remove errant logging --- synapse/app/homeserver.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index a1512ccea4..96c45b7209 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -522,7 +522,6 @@ def run(hs): count = 0 if hs.config.limit_usage_by_mau: count = hs.get_datastore().count_monthly_users() - logger.info("NJ count is %d" % (count,)) current_mau_gauge.set(float(count)) max_mau_value_gauge.set(float(hs.config.max_mau_value)) limit_usage_by_mau_gauge.set(float(hs.config.limit_usage_by_mau)) From 254e8267e20ccde19f89eccdfb67b7aaecb32eec Mon Sep 17 00:00:00 2001 From: Krombel Date: Mon, 30 Jul 2018 19:38:38 +0200 Subject: [PATCH 024/205] Only import secrets when available secrets got introduced in python 3.6 so this class is not available in 3.5 and before. This now checks for the current running version and only tries using secrets if the version is 3.6 or above Signed-Off-By: Matthias Kesler --- changelog.d/3626.bugfix | 1 + synapse/secrets.py | 7 +++---- 2 files changed, 4 insertions(+), 4 deletions(-) create mode 100644 changelog.d/3626.bugfix diff --git a/changelog.d/3626.bugfix b/changelog.d/3626.bugfix new file mode 100644 index 0000000000..9a4b878986 --- /dev/null +++ b/changelog.d/3626.bugfix @@ -0,0 +1 @@ +Only import secrets when available (fix for py < 3.6) diff --git a/synapse/secrets.py b/synapse/secrets.py index f397daaa5e..f05e9ea535 100644 --- a/synapse/secrets.py +++ b/synapse/secrets.py @@ -20,17 +20,16 @@ See https://docs.python.org/3/library/secrets.html#module-secrets for the API used in Python 3.6, and the API emulated in Python 2.7. """ -import six +import sys -if six.PY3: +# secrets is available since python 3.6 +if sys.version_info[0:2] >= (3, 6): import secrets def Secrets(): return secrets - else: - import os import binascii From e908b8683248328dd92479fc81be350336b9c8f4 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 30 Jul 2018 16:24:02 -0600 Subject: [PATCH 025/205] Remove pdu_failures from transactions The field is never read from, and all the opportunities given to populate it are not utilized. It should be very safe to remove this. --- changelog.d/3628.misc | 1 + synapse/federation/federation_server.py | 4 -- synapse/federation/send_queue.py | 63 +------------------------ synapse/federation/transaction_queue.py | 32 ++----------- synapse/federation/transport/server.py | 3 +- synapse/federation/units.py | 1 - tests/handlers/test_typing.py | 1 - 7 files changed, 8 insertions(+), 97 deletions(-) create mode 100644 changelog.d/3628.misc diff --git a/changelog.d/3628.misc b/changelog.d/3628.misc new file mode 100644 index 0000000000..1aebefbe18 --- /dev/null +++ b/changelog.d/3628.misc @@ -0,0 +1 @@ +Remove unused field "pdu_failures" from transactions. diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index e501251b6e..657935d1ac 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -207,10 +207,6 @@ class FederationServer(FederationBase): edu.content ) - pdu_failures = getattr(transaction, "pdu_failures", []) - for fail in pdu_failures: - logger.info("Got failure %r", fail) - response = { "pdus": pdu_results, } diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py index 5157c3860d..0bb468385d 100644 --- a/synapse/federation/send_queue.py +++ b/synapse/federation/send_queue.py @@ -62,8 +62,6 @@ class FederationRemoteSendQueue(object): self.edus = SortedDict() # stream position -> Edu - self.failures = SortedDict() # stream position -> (destination, Failure) - self.device_messages = SortedDict() # stream position -> destination self.pos = 1 @@ -79,7 +77,7 @@ class FederationRemoteSendQueue(object): for queue_name in [ "presence_map", "presence_changed", "keyed_edu", "keyed_edu_changed", - "edus", "failures", "device_messages", "pos_time", + "edus", "device_messages", "pos_time", ]: register(queue_name, getattr(self, queue_name)) @@ -149,12 +147,6 @@ class FederationRemoteSendQueue(object): for key in keys[:i]: del self.edus[key] - # Delete things out of failure map - keys = self.failures.keys() - i = self.failures.bisect_left(position_to_delete) - for key in keys[:i]: - del self.failures[key] - # Delete things out of device map keys = self.device_messages.keys() i = self.device_messages.bisect_left(position_to_delete) @@ -204,13 +196,6 @@ class FederationRemoteSendQueue(object): self.notifier.on_new_replication_data() - def send_failure(self, failure, destination): - """As per TransactionQueue""" - pos = self._next_pos() - - self.failures[pos] = (destination, str(failure)) - self.notifier.on_new_replication_data() - def send_device_messages(self, destination): """As per TransactionQueue""" pos = self._next_pos() @@ -285,17 +270,6 @@ class FederationRemoteSendQueue(object): for (pos, edu) in edus: rows.append((pos, EduRow(edu))) - # Fetch changed failures - i = self.failures.bisect_right(from_token) - j = self.failures.bisect_right(to_token) + 1 - failures = self.failures.items()[i:j] - - for (pos, (destination, failure)) in failures: - rows.append((pos, FailureRow( - destination=destination, - failure=failure, - ))) - # Fetch changed device messages i = self.device_messages.bisect_right(from_token) j = self.device_messages.bisect_right(to_token) + 1 @@ -417,34 +391,6 @@ class EduRow(BaseFederationRow, namedtuple("EduRow", ( buff.edus.setdefault(self.edu.destination, []).append(self.edu) -class FailureRow(BaseFederationRow, namedtuple("FailureRow", ( - "destination", # str - "failure", -))): - """Streams failures to a remote server. Failures are issued when there was - something wrong with a transaction the remote sent us, e.g. it included - an event that was invalid. - """ - - TypeId = "f" - - @staticmethod - def from_data(data): - return FailureRow( - destination=data["destination"], - failure=data["failure"], - ) - - def to_data(self): - return { - "destination": self.destination, - "failure": self.failure, - } - - def add_to_buffer(self, buff): - buff.failures.setdefault(self.destination, []).append(self.failure) - - class DeviceRow(BaseFederationRow, namedtuple("DeviceRow", ( "destination", # str ))): @@ -471,7 +417,6 @@ TypeToRow = { PresenceRow, KeyedEduRow, EduRow, - FailureRow, DeviceRow, ) } @@ -481,7 +426,6 @@ ParsedFederationStreamData = namedtuple("ParsedFederationStreamData", ( "presence", # list(UserPresenceState) "keyed_edus", # dict of destination -> { key -> Edu } "edus", # dict of destination -> [Edu] - "failures", # dict of destination -> [failures] "device_destinations", # set of destinations )) @@ -503,7 +447,6 @@ def process_rows_for_federation(transaction_queue, rows): presence=[], keyed_edus={}, edus={}, - failures={}, device_destinations=set(), ) @@ -532,9 +475,5 @@ def process_rows_for_federation(transaction_queue, rows): edu.destination, edu.edu_type, edu.content, key=None, ) - for destination, failure_list in iteritems(buff.failures): - for failure in failure_list: - transaction_queue.send_failure(destination, failure) - for destination in buff.device_destinations: transaction_queue.send_device_messages(destination) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index 6996d6b695..78f9d40a3a 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -116,9 +116,6 @@ class TransactionQueue(object): ), ) - # destination -> list of tuple(failure, deferred) - self.pending_failures_by_dest = {} - # destination -> stream_id of last successfully sent to-device message. # NB: may be a long or an int. self.last_device_stream_id_by_dest = {} @@ -382,19 +379,6 @@ class TransactionQueue(object): self._attempt_new_transaction(destination) - def send_failure(self, failure, destination): - if destination == self.server_name or destination == "localhost": - return - - if not self.can_send_to(destination): - return - - self.pending_failures_by_dest.setdefault( - destination, [] - ).append(failure) - - self._attempt_new_transaction(destination) - def send_device_messages(self, destination): if destination == self.server_name or destination == "localhost": return @@ -469,7 +453,6 @@ class TransactionQueue(object): pending_pdus = self.pending_pdus_by_dest.pop(destination, []) pending_edus = self.pending_edus_by_dest.pop(destination, []) pending_presence = self.pending_presence_by_dest.pop(destination, {}) - pending_failures = self.pending_failures_by_dest.pop(destination, []) pending_edus.extend( self.pending_edus_keyed_by_dest.pop(destination, {}).values() @@ -497,7 +480,7 @@ class TransactionQueue(object): logger.debug("TX [%s] len(pending_pdus_by_dest[dest]) = %d", destination, len(pending_pdus)) - if not pending_pdus and not pending_edus and not pending_failures: + if not pending_pdus and not pending_edus: logger.debug("TX [%s] Nothing to send", destination) self.last_device_stream_id_by_dest[destination] = ( device_stream_id @@ -507,7 +490,7 @@ class TransactionQueue(object): # END CRITICAL SECTION success = yield self._send_new_transaction( - destination, pending_pdus, pending_edus, pending_failures, + destination, pending_pdus, pending_edus, ) if success: sent_transactions_counter.inc() @@ -584,14 +567,12 @@ class TransactionQueue(object): @measure_func("_send_new_transaction") @defer.inlineCallbacks - def _send_new_transaction(self, destination, pending_pdus, pending_edus, - pending_failures): + def _send_new_transaction(self, destination, pending_pdus, pending_edus): # Sort based on the order field pending_pdus.sort(key=lambda t: t[1]) pdus = [x[0] for x in pending_pdus] edus = pending_edus - failures = [x.get_dict() for x in pending_failures] success = True @@ -601,11 +582,10 @@ class TransactionQueue(object): logger.debug( "TX [%s] {%s} Attempting new transaction" - " (pdus: %d, edus: %d, failures: %d)", + " (pdus: %d, edus: %d)", destination, txn_id, len(pdus), len(edus), - len(failures) ) logger.debug("TX [%s] Persisting transaction...", destination) @@ -617,7 +597,6 @@ class TransactionQueue(object): destination=destination, pdus=pdus, edus=edus, - pdu_failures=failures, ) self._next_txn_id += 1 @@ -627,12 +606,11 @@ class TransactionQueue(object): logger.debug("TX [%s] Persisted transaction", destination) logger.info( "TX [%s] {%s} Sending transaction [%s]," - " (PDUs: %d, EDUs: %d, failures: %d)", + " (PDUs: %d, EDUs: %d)", destination, txn_id, transaction.transaction_id, len(pdus), len(edus), - len(failures), ) # Actually send the transaction diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 8574898f0c..3b5ea9515a 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -283,11 +283,10 @@ class FederationSendServlet(BaseFederationServlet): ) logger.info( - "Received txn %s from %s. (PDUs: %d, EDUs: %d, failures: %d)", + "Received txn %s from %s. (PDUs: %d, EDUs: %d)", transaction_id, origin, len(transaction_data.get("pdus", [])), len(transaction_data.get("edus", [])), - len(transaction_data.get("failures", [])), ) # We should ideally be getting this from the security layer. diff --git a/synapse/federation/units.py b/synapse/federation/units.py index bb1b3b13f7..c5ab14314e 100644 --- a/synapse/federation/units.py +++ b/synapse/federation/units.py @@ -73,7 +73,6 @@ class Transaction(JsonEncodedObject): "previous_ids", "pdus", "edus", - "pdu_failures", ] internal_keys = [ diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index b08856f763..2c263af1a3 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -44,7 +44,6 @@ def _expect_edu(destination, edu_type, content, origin="test"): "content": content, } ], - "pdu_failures": [], } From 6aab397adabeae5c45b75be28691ea1286b90869 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 31 Jul 2018 09:45:35 +0100 Subject: [PATCH 026/205] synapse grafana dashboard --- contrib/grafana/synapse.json | 4961 ++++++++++++++++++++++++++++++++++ 1 file changed, 4961 insertions(+) create mode 100644 contrib/grafana/synapse.json diff --git a/contrib/grafana/synapse.json b/contrib/grafana/synapse.json new file mode 100644 index 0000000000..94a1de58f4 --- /dev/null +++ b/contrib/grafana/synapse.json @@ -0,0 +1,4961 @@ +{ + "__inputs": [ + { + "name": "DS_PROMETHEUS", + "label": "Prometheus", + "description": "", + "type": "datasource", + "pluginId": "prometheus", + "pluginName": "Prometheus" + } + ], + "__requires": [ + { + "type": "grafana", + "id": "grafana", + "name": "Grafana", + "version": "5.2.0" + }, + { + "type": "panel", + "id": "graph", + "name": "Graph", + "version": "5.0.0" + }, + { + "type": "panel", + "id": "heatmap", + "name": "Heatmap", + "version": "5.0.0" + }, + { + "type": "datasource", + "id": "prometheus", + "name": "Prometheus", + "version": "5.0.0" + } + ], + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "$datasource", + "enable": false, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "limit": 100, + "name": "Annotations & Alerts", + "showIn": 0, + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "id": null, + "iteration": 1533026624326, + "links": [ + { + "asDropdown": true, + "icon": "external link", + "keepTime": true, + "tags": [ + "matrix" + ], + "title": "Dashboards", + "type": "dashboards" + } + ], + "panels": [ + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 73, + "panels": [], + "title": "Overview", + "type": "row" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_PROMETHEUS}", + "fill": 1, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 1 + }, + "id": 75, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "process_cpu_seconds:rate2m{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "{{job}}-{{index}} ", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "CPU usage", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "decimals": null, + "format": "percentunit", + "label": null, + "logBase": 1, + "max": "1", + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "cards": { + "cardPadding": 0, + "cardRound": null + }, + "color": { + "cardColor": "#b4ff00", + "colorScale": "sqrt", + "colorScheme": "interpolateSpectral", + "exponent": 0.5, + "mode": "spectrum" + }, + "dataFormat": "tsbuckets", + "datasource": "${DS_PROMETHEUS}", + "gridPos": { + "h": 9, + "w": 12, + "x": 12, + "y": 1 + }, + "heatmap": {}, + "highlightCards": true, + "id": 85, + "legend": { + "show": false + }, + "links": [], + "targets": [ + { + "expr": "sum(rate(synapse_http_server_response_time_seconds_bucket{servlet='RoomSendEventRestServlet',instance=\"$instance\"}[$bucket_size])) by (le)", + "format": "heatmap", + "intervalFactor": 1, + "legendFormat": "{{le}}", + "refId": "A" + } + ], + "title": "Event Send Time", + "tooltip": { + "show": true, + "showHistogram": false + }, + "type": "heatmap", + "xAxis": { + "show": true + }, + "xBucketNumber": null, + "xBucketSize": null, + "yAxis": { + "decimals": null, + "format": "s", + "logBase": 2, + "max": null, + "min": null, + "show": true, + "splitFactor": null + }, + "yBucketBound": "auto", + "yBucketNumber": null, + "yBucketSize": null + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "editable": true, + "error": false, + "fill": 1, + "grid": {}, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 10 + }, + "id": 33, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": false, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "sum(rate(synapse_storage_events_persisted_events{instance=\"$instance\"}[$bucket_size])) without (job,index)", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "", + "refId": "A", + "step": 20, + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Events Persisted", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "hertz", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "collapsed": true, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 17 + }, + "id": 54, + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "editable": true, + "error": false, + "fill": 0, + "grid": {}, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 18 + }, + "id": 34, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": true, + "targets": [ + { + "expr": "process_resident_memory_bytes{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "{{job}} {{index}}", + "refId": "A", + "step": 20, + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Memory", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bytes", + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 18 + }, + "id": 37, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + { + "alias": "/max$/", + "color": "#890F02", + "fill": 0, + "legend": false + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "process_open_fds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", + "format": "time_series", + "hide": false, + "intervalFactor": 2, + "legendFormat": "{{job}}-{{index}}", + "refId": "A", + "step": 20 + }, + { + "expr": "process_max_fds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", + "format": "time_series", + "hide": true, + "intervalFactor": 2, + "legendFormat": "{{job}}-{{index}} max", + "refId": "B", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Open FDs", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "none", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 25 + }, + "id": 48, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_storage_schedule_time_sum{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])/rate(synapse_storage_schedule_time_count[$bucket_size])", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "{{job}}-{{index}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Avg time waiting for db conn", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "decimals": null, + "format": "s", + "label": "", + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 25 + }, + "id": 49, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + { + "alias": "/^up/", + "legend": false, + "yaxis": 2 + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "scrape_duration_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{job}}-{{index}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Prometheus scrape time", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "s", + "label": null, + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "decimals": 0, + "format": "none", + "label": "", + "logBase": 1, + "max": "0", + "min": "-1", + "show": false + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 32 + }, + "id": 50, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(python_twisted_reactor_tick_time_sum{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])/rate(python_twisted_reactor_tick_time_count[$bucket_size])", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{job}}-{{index}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Avg reactor tick time", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "s", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": false + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "editable": true, + "error": false, + "fill": 1, + "grid": {}, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 32 + }, + "id": 5, + "legend": { + "alignAsTable": false, + "avg": false, + "current": false, + "hideEmpty": false, + "hideZero": false, + "max": false, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + { + "alias": "/user/" + }, + { + "alias": "/system/" + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(process_cpu_system_seconds_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "{{job}}-{{index}} system ", + "metric": "", + "refId": "B", + "step": 20 + }, + { + "expr": "rate(process_cpu_user_seconds_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "hide": false, + "interval": "", + "intervalFactor": 1, + "legendFormat": "{{job}}-{{index}} user", + "refId": "A", + "step": 20 + } + ], + "thresholds": [ + { + "colorMode": "custom", + "line": true, + "lineColor": "rgba(216, 200, 27, 0.27)", + "op": "gt", + "value": 0.5 + }, + { + "colorMode": "custom", + "line": true, + "lineColor": "rgba(234, 112, 112, 0.22)", + "op": "gt", + "value": 0.8 + } + ], + "timeFrom": null, + "timeShift": null, + "title": "CPU", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "decimals": null, + "format": "percentunit", + "label": "", + "logBase": 1, + "max": "1.2", + "min": 0, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_PROMETHEUS}", + "fill": 0, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 39 + }, + "id": 53, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "min_over_time(up{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "{{job}}-{{index}}", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Up", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "repeat": null, + "title": "Process info", + "type": "row" + }, + { + "collapsed": true, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 18 + }, + "id": 56, + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "decimals": 1, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 49 + }, + "id": 40, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_storage_events_persisted_by_source_type{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "{{type}}", + "refId": "D" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Events/s Local vs Remote", + "tooltip": { + "shared": true, + "sort": 2, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "hertz", + "label": "", + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "decimals": 1, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 49 + }, + "id": 46, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_storage_events_persisted_by_event_type{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])", + "format": "time_series", + "instant": false, + "intervalFactor": 2, + "legendFormat": "{{type}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Events/s by Type", + "tooltip": { + "shared": false, + "sort": 2, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "hertz", + "label": null, + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": { + "irc-freenode (local)": "#EAB839" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "decimals": 1, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 56 + }, + "id": 44, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "hideEmpty": true, + "hideZero": true, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_storage_events_persisted_by_origin{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "{{origin_entity}} ({{origin_type}})", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Events/s by Origin", + "tooltip": { + "shared": false, + "sort": 2, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "hertz", + "label": null, + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "decimals": 1, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 56 + }, + "id": 45, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "hideEmpty": true, + "hideZero": true, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "sum(rate(synapse_storage_events_persisted_events_sep{job=~\"$job\",index=~\"$index\", type=\"m.room.member\",instance=\"$instance\"}[$bucket_size])) by (origin_type, origin_entity)", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "{{origin_entity}} ({{origin_type}})", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Memberships/s by Origin", + "tooltip": { + "shared": true, + "sort": 2, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "hertz", + "label": null, + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "repeat": null, + "title": "Event persist rates", + "type": "row" + }, + { + "collapsed": true, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 19 + }, + "id": 57, + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "decimals": null, + "editable": true, + "error": false, + "fill": 2, + "grid": {}, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 48 + }, + "id": 4, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "hideEmpty": false, + "hideZero": true, + "max": false, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_http_server_requests_received{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{job}}-{{index}} {{method}} {{servlet}} {{tag}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [ + { + "colorMode": "custom", + "fill": true, + "fillColor": "rgba(216, 200, 27, 0.27)", + "op": "gt", + "value": 100 + }, + { + "colorMode": "custom", + "fill": true, + "fillColor": "rgba(234, 112, 112, 0.22)", + "op": "gt", + "value": 250 + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Request Count by arrival time", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": false, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "hertz", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "editable": true, + "error": false, + "fill": 1, + "grid": {}, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 48 + }, + "id": 32, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_http_server_requests_received{instance=\"$instance\",job=~\"$job\",index=~\"$index\",method!=\"OPTIONS\"}[$bucket_size]) and topk(10,synapse_http_server_requests_received{instance=\"$instance\",job=~\"$job\",method!=\"OPTIONS\"})", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "{{method}} {{servlet}} {{job}}-{{index}}", + "refId": "A", + "step": 20, + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Top 10 Request Counts", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "hertz", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "decimals": null, + "editable": true, + "error": false, + "fill": 2, + "grid": {}, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 56 + }, + "id": 23, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "hideEmpty": false, + "hideZero": true, + "max": false, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_http_server_response_ru_utime_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])+rate(synapse_http_server_response_ru_stime_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "{{job}}-{{index}} {{method}} {{servlet}} {{tag}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [ + { + "colorMode": "custom", + "fill": true, + "fillColor": "rgba(216, 200, 27, 0.27)", + "op": "gt", + "value": 100, + "yaxis": "left" + }, + { + "colorMode": "custom", + "fill": true, + "fillColor": "rgba(234, 112, 112, 0.22)", + "op": "gt", + "value": 250, + "yaxis": "left" + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Total CPU Usage by Endpoint", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": false, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "percentunit", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "decimals": null, + "editable": true, + "error": false, + "fill": 2, + "grid": {}, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 56 + }, + "id": 52, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "hideEmpty": false, + "hideZero": true, + "max": false, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "(rate(synapse_http_server_response_ru_utime_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])+rate(synapse_http_server_response_ru_stime_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])) / rate(synapse_http_server_response_count{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{job}}-{{index}} {{method}} {{servlet}} {{tag}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [ + { + "colorMode": "custom", + "fill": true, + "fillColor": "rgba(216, 200, 27, 0.27)", + "op": "gt", + "value": 100 + }, + { + "colorMode": "custom", + "fill": true, + "fillColor": "rgba(234, 112, 112, 0.22)", + "op": "gt", + "value": 250 + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Average CPU Usage by Endpoint", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": false, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "s", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "editable": true, + "error": false, + "fill": 1, + "grid": {}, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 64 + }, + "id": 7, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "hideEmpty": true, + "hideZero": true, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_http_server_response_db_txn_duration_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{job}}-{{index}} {{method}} {{servlet}} {{tag}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "DB Usage by endpoint", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "percentunit", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "decimals": null, + "editable": true, + "error": false, + "fill": 2, + "grid": {}, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 64 + }, + "id": 47, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "hideEmpty": false, + "hideZero": true, + "max": true, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_http_server_response_time_seconds_sum{instance=\"$instance\",job=~\"$job\",index=~\"$index\",tag!=\"incremental_sync\"}[$bucket_size])/rate(synapse_http_server_response_time_seconds_count{instance=\"$instance\",job=~\"$job\",index=~\"$index\",tag!=\"incremental_sync\"}[$bucket_size])", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{job}}-{{index}} {{method}} {{servlet}} {{tag}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Non-sync avg response time", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": false, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "s", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": false + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_PROMETHEUS}", + "fill": 1, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 72 + }, + "id": 103, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "topk(10,synapse_http_server_in_flight_requests_count{instance=\"$instance\",job=~\"$job\",index=~\"$index\"})", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "{{job}}-{{index}} {{method}} {{servlet}}", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Requests in flight", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "repeat": null, + "title": "Requests", + "type": "row" + }, + { + "collapsed": true, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 20 + }, + "id": 97, + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_PROMETHEUS}", + "fill": 1, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 23 + }, + "id": 99, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_background_process_ru_utime_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])+rate(synapse_background_process_ru_stime_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "{{job}}-{{index}} {{name}}", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "CPU usage by background jobs", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "percentunit", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_PROMETHEUS}", + "fill": 1, + "gridPos": { + "h": 9, + "w": 12, + "x": 12, + "y": 23 + }, + "id": 101, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_background_process_db_txn_duration_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "{{job}}-{{index}} {{name}}", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "DB usage by background jobs", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "percentunit", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "title": "Background jobs", + "type": "row" + }, + { + "collapsed": true, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 21 + }, + "id": 81, + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_PROMETHEUS}", + "fill": 1, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 25 + }, + "id": 79, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_federation_client_sent_transactions{instance=\"$instance\", job=~\"$job\", index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "txn rate", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Outgoing federation transaction rate", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "hertz", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_PROMETHEUS}", + "fill": 1, + "gridPos": { + "h": 9, + "w": 12, + "x": 12, + "y": 25 + }, + "id": 83, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_federation_server_received_pdus{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "pdus", + "refId": "A" + }, + { + "expr": "rate(synapse_federation_server_received_edus{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "edus", + "refId": "B" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Incoming PDU/EDU rate", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "hertz", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "title": "Federation", + "type": "row" + }, + { + "collapsed": true, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 22 + }, + "id": 60, + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 23 + }, + "id": 51, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_push_httppusher_http_pushes_processed{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "processed {{job}}", + "refId": "A", + "step": 20 + }, + { + "expr": "rate(synapse_push_httppusher_http_pushes_failed{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "failed {{job}}", + "refId": "B", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "HTTP Push rate", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "hertz", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "repeat": null, + "title": "Pushes", + "type": "row" + }, + { + "collapsed": true, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 23 + }, + "id": 58, + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "editable": true, + "error": false, + "fill": 0, + "grid": {}, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 25 + }, + "id": 10, + "legend": { + "avg": false, + "current": false, + "hideEmpty": true, + "hideZero": true, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "topk(10, rate(synapse_storage_transaction_time_count{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]))", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{job}}-{{index}} {{desc}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Top DB transactions by txn rate", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "hertz", + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "editable": true, + "error": false, + "fill": 1, + "grid": {}, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 25 + }, + "id": 11, + "legend": { + "avg": false, + "current": false, + "hideEmpty": true, + "hideZero": true, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": true, + "targets": [ + { + "expr": "topk(5, rate(synapse_storage_transaction_time_sum{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]))", + "format": "time_series", + "instant": false, + "interval": "", + "intervalFactor": 1, + "legendFormat": "{{job}}-{{index}} {{desc}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Top DB transactions by total txn time", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "percentunit", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "repeat": null, + "title": "Database", + "type": "row" + }, + { + "collapsed": true, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 24 + }, + "id": 59, + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "editable": true, + "error": false, + "fill": 1, + "grid": {}, + "gridPos": { + "h": 13, + "w": 12, + "x": 0, + "y": 17 + }, + "id": 12, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_util_metrics_block_ru_utime_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\",block_name!=\"wrapped_request_handler\"}[$bucket_size]) + rate(synapse_util_metrics_block_ru_stime_seconds[$bucket_size])", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{job}}-{{index}} {{block_name}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Total CPU Usage by Block", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "percentunit", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "editable": true, + "error": false, + "fill": 1, + "grid": {}, + "gridPos": { + "h": 13, + "w": 12, + "x": 12, + "y": 17 + }, + "id": 26, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "(rate(synapse_util_metrics_block_ru_utime_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) + rate(synapse_util_metrics_block_ru_stime_seconds[$bucket_size])) / rate(synapse_util_metrics_block_count[$bucket_size])", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{job}}-{{index}} {{block_name}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Average CPU Time per Block", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "ms", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "editable": true, + "error": false, + "fill": 1, + "grid": {}, + "gridPos": { + "h": 13, + "w": 12, + "x": 0, + "y": 30 + }, + "id": 13, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_util_metrics_block_db_txn_duration_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\",block_name!=\"wrapped_request_handler\"}[$bucket_size])", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{job}} {{block_name}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Total DB Usage by Block", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "percentunit", + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "editable": true, + "error": false, + "fill": 1, + "grid": {}, + "gridPos": { + "h": 13, + "w": 12, + "x": 12, + "y": 30 + }, + "id": 27, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_util_metrics_block_db_txn_duration_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) / rate(synapse_util_metrics_block_db_txn_count{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{job}}-{{index}} {{block_name}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Average Database Time per Block", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "ms", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "editable": true, + "error": false, + "fill": 1, + "grid": {}, + "gridPos": { + "h": 13, + "w": 12, + "x": 0, + "y": 43 + }, + "id": 28, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": false, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_util_metrics_block_db_txn_duration_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) / rate(synapse_util_metrics_block_db_txn_count{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{job}}-{{index}} {{block_name}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Average Transactions per Block", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "none", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "editable": true, + "error": false, + "fill": 1, + "grid": {}, + "gridPos": { + "h": 13, + "w": 12, + "x": 12, + "y": 43 + }, + "id": 25, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": false, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_util_metrics_block_time_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) / rate(synapse_util_metrics_block_count[$bucket_size])", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{job}}-{{index}} {{block_name}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Average Wallclock Time per Block", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "ms", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "repeat": null, + "title": "Per-block metrics", + "type": "row" + }, + { + "collapsed": true, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 25 + }, + "id": 61, + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "decimals": 2, + "editable": true, + "error": false, + "fill": 0, + "grid": {}, + "gridPos": { + "h": 10, + "w": 12, + "x": 0, + "y": 55 + }, + "id": 1, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "hideEmpty": true, + "hideZero": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_util_caches_cache:hits{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])/rate(synapse_util_caches_cache:total{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "{{name}} {{job}}-{{index}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Cache Hit Ratio", + "tooltip": { + "msResolution": true, + "shared": false, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "decimals": null, + "format": "percentunit", + "label": "", + "logBase": 1, + "max": "1", + "min": 0, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": false + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "editable": true, + "error": false, + "fill": 1, + "grid": {}, + "gridPos": { + "h": 10, + "w": 12, + "x": 12, + "y": 55 + }, + "id": 8, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "hideZero": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "synapse_util_caches_cache:size{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", + "format": "time_series", + "hide": false, + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{name}} {{job}}-{{index}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Cache Size", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "editable": true, + "error": false, + "fill": 1, + "grid": {}, + "gridPos": { + "h": 10, + "w": 12, + "x": 0, + "y": 65 + }, + "id": 38, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "hideZero": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_util_caches_cache:total{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{name}} {{job}}-{{index}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Cache request rate", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "rps", + "logBase": 1, + "max": null, + "min": 0, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "fill": 1, + "gridPos": { + "h": 10, + "w": 12, + "x": 12, + "y": 65 + }, + "id": 39, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "topk(10, rate(synapse_util_caches_cache:total{job=\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size]) - rate(synapse_util_caches_cache:hits{job=\"$job\",instance=\"$instance\"}[$bucket_size]))", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "{{name}} {{job}}-{{index}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Top 10 cache misses", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "rps", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_PROMETHEUS}", + "fill": 1, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 75 + }, + "id": 65, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_util_caches_cache:evicted_size{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "{{name}} {{job}}-{{index}}", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Cache eviction rate", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "transparent": false, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "decimals": null, + "format": "hertz", + "label": "entries / second", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "repeat": null, + "title": "Caches", + "type": "row" + }, + { + "collapsed": true, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 26 + }, + "id": 62, + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_PROMETHEUS}", + "fill": 1, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 90 + }, + "id": 91, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": true, + "steppedLine": false, + "targets": [ + { + "expr": "rate(python_gc_time_sum{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[10m])", + "format": "time_series", + "instant": false, + "intervalFactor": 1, + "legendFormat": "{{job}}-{{index}} gen {{gen}}", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Total GC time by bucket (10m smoothing)", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "decimals": null, + "format": "percentunit", + "label": null, + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "$datasource", + "decimals": 3, + "editable": true, + "error": false, + "fill": 1, + "grid": {}, + "gridPos": { + "h": 9, + "w": 12, + "x": 12, + "y": 90 + }, + "id": 21, + "legend": { + "alignAsTable": true, + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null as zero", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(python_gc_time_sum{instance=\"$instance\",job=~\"$job\"}[$bucket_size])/rate(python_gc_time_count[$bucket_size])", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "{{job}} {{index}} gen {{gen}} ", + "refId": "A", + "step": 20, + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Average GC Time Per Collection", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "s", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_PROMETHEUS}", + "fill": 1, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 99 + }, + "id": 89, + "legend": { + "avg": false, + "current": false, + "hideEmpty": true, + "hideZero": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "python_gc_counts{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "{{job}}-{{index}} gen {{gen}}", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Currently allocated objects", + "tooltip": { + "shared": false, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_PROMETHEUS}", + "fill": 1, + "gridPos": { + "h": 9, + "w": 12, + "x": 12, + "y": 99 + }, + "id": 93, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(python_gc_unreachable_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])/rate(python_gc_time_count{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "{{job}}-{{index}} gen {{gen}}", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Object counts per collection", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_PROMETHEUS}", + "fill": 1, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 108 + }, + "id": 95, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(python_gc_time_count{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "{{job}}-{{index}} gen {{gen}}", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "GC frequency", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "hertz", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "cards": { + "cardPadding": 0, + "cardRound": null + }, + "color": { + "cardColor": "#b4ff00", + "colorScale": "sqrt", + "colorScheme": "interpolateSpectral", + "exponent": 0.5, + "max": null, + "min": 0, + "mode": "spectrum" + }, + "dataFormat": "tsbuckets", + "datasource": "${DS_PROMETHEUS}", + "gridPos": { + "h": 9, + "w": 12, + "x": 12, + "y": 108 + }, + "heatmap": {}, + "highlightCards": true, + "id": 87, + "legend": { + "show": true + }, + "links": [], + "targets": [ + { + "expr": "sum(rate(python_gc_time_bucket[$bucket_size])) by (le)", + "format": "heatmap", + "intervalFactor": 1, + "legendFormat": "{{le}}", + "refId": "A" + } + ], + "title": "GC durations", + "tooltip": { + "show": true, + "showHistogram": false + }, + "type": "heatmap", + "xAxis": { + "show": true + }, + "xBucketNumber": null, + "xBucketSize": null, + "yAxis": { + "decimals": null, + "format": "s", + "logBase": 1, + "max": null, + "min": null, + "show": true, + "splitFactor": null + }, + "yBucketBound": "auto", + "yBucketNumber": null, + "yBucketSize": null + } + ], + "repeat": null, + "title": "GC", + "type": "row" + }, + { + "collapsed": true, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 27 + }, + "id": 63, + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_PROMETHEUS}", + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 19 + }, + "id": 2, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_replication_tcp_resource_user_sync{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "user started/stopped syncing", + "refId": "A", + "step": 20 + }, + { + "expr": "rate(synapse_replication_tcp_resource_federation_ack{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "federation ack", + "refId": "B", + "step": 20 + }, + { + "expr": "rate(synapse_replication_tcp_resource_remove_pusher{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "remove pusher", + "refId": "C", + "step": 20 + }, + { + "expr": "rate(synapse_replication_tcp_resource_invalidate_cache{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "invalidate cache", + "refId": "D", + "step": 20 + }, + { + "expr": "rate(synapse_replication_tcp_resource_user_ip_cache{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "user ip cache", + "refId": "E", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Rate of events on replication master", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "hertz", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_PROMETHEUS}", + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 19 + }, + "id": 41, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "rate(synapse_replication_tcp_resource_stream_updates{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{stream_name}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Outgoing stream updates", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "hertz", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_PROMETHEUS}", + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 26 + }, + "id": 42, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "sum (rate(synapse_replication_tcp_protocol_inbound_commands{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])) without (name, conn_id)", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "{{job}}-{{index}} {{command}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Rate of incoming commands", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "hertz", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_PROMETHEUS}", + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 26 + }, + "id": 43, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "sum (rate(synapse_replication_tcp_protocol_outbound_commands{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])) without (name, conn_id)", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "{{job}}-{{index}} {{command}}", + "refId": "A", + "step": 20 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Rate of outgoing commands", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "hertz", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + } + ], + "repeat": null, + "title": "Replication", + "type": "row" + }, + { + "collapsed": true, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 28 + }, + "id": 69, + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_PROMETHEUS}", + "fill": 1, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 11 + }, + "id": 67, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": " synapse_event_persisted_position{instance=\"$instance\"} - ignoring(index, job, name) group_right(instance) synapse_event_processing_positions{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "{{job}}-{{index}}", + "refId": "A" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Event processing lag", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": "events", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_PROMETHEUS}", + "fill": 1, + "gridPos": { + "h": 9, + "w": 12, + "x": 12, + "y": 11 + }, + "id": 71, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "time()*1000-synapse_event_processing_last_ts{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", + "format": "time_series", + "hide": false, + "intervalFactor": 1, + "legendFormat": "{{job}}-{{index}} {{name}}", + "refId": "B" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Age of last processed event", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "ms", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + } + ], + "title": "Event processing loop positions", + "type": "row" + } + ], + "refresh": "1m", + "schemaVersion": 16, + "style": "dark", + "tags": [ + "matrix" + ], + "templating": { + "list": [ + { + "current": { + "text": "Prometheus", + "value": "Prometheus" + }, + "hide": 0, + "label": null, + "name": "datasource", + "options": [], + "query": "prometheus", + "refresh": 1, + "regex": "", + "type": "datasource" + }, + { + "allFormat": "glob", + "auto": true, + "auto_count": 100, + "auto_min": "30s", + "current": { + "text": "auto", + "value": "$__auto_interval_bucket_size" + }, + "datasource": null, + "hide": 0, + "includeAll": false, + "label": "Bucket Size", + "multi": false, + "multiFormat": "glob", + "name": "bucket_size", + "options": [ + { + "selected": true, + "text": "auto", + "value": "$__auto_interval_bucket_size" + }, + { + "selected": false, + "text": "30s", + "value": "30s" + }, + { + "selected": false, + "text": "1m", + "value": "1m" + }, + { + "selected": false, + "text": "2m", + "value": "2m" + }, + { + "selected": false, + "text": "5m", + "value": "5m" + } + ], + "query": "30s,1m,2m,5m", + "refresh": 2, + "type": "interval" + }, + { + "allValue": null, + "current": {}, + "datasource": "$datasource", + "hide": 0, + "includeAll": false, + "label": null, + "multi": false, + "name": "instance", + "options": [], + "query": "label_values(process_cpu_user_seconds_total{job=~\"synapse.*\"}, instance)", + "refresh": 2, + "regex": "", + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allFormat": "regex wildcard", + "allValue": "", + "current": {}, + "datasource": "$datasource", + "hide": 0, + "hideLabel": false, + "includeAll": true, + "label": "Job", + "multi": true, + "multiFormat": "regex values", + "name": "job", + "options": [], + "query": "label_values(process_cpu_user_seconds_total{job=~\"synapse.*\"}, job)", + "refresh": 2, + "refresh_on_load": false, + "regex": "", + "sort": 1, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allFormat": "regex wildcard", + "allValue": ".*", + "current": {}, + "datasource": "$datasource", + "hide": 0, + "hideLabel": false, + "includeAll": true, + "label": "", + "multi": true, + "multiFormat": "regex values", + "name": "index", + "options": [], + "query": "label_values(process_cpu_user_seconds_total{job=~\"synapse.*\"}, index)", + "refresh": 2, + "refresh_on_load": false, + "regex": "", + "sort": 3, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + } + ] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": { + "now": true, + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "Synapse", + "uid": "000000012", + "version": 125 +} \ No newline at end of file From 9c14c2b5610956f4daa4c472929ccaa2b6b6ae22 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paul=20T=C3=B6tterman?= Date: Tue, 31 Jul 2018 12:48:37 +0300 Subject: [PATCH 027/205] Add some documentation for using the dashboard --- contrib/grafana/README.md | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 contrib/grafana/README.md diff --git a/contrib/grafana/README.md b/contrib/grafana/README.md new file mode 100644 index 0000000000..6a6cc0bed4 --- /dev/null +++ b/contrib/grafana/README.md @@ -0,0 +1,6 @@ +# Using the Synapse Grafana dashboard + +0. Set up Prometheus and Grafana. Out of scope for this readme. Useful documentation about using Grafana with Prometheus: http://docs.grafana.org/features/datasources/prometheus/ +1. Have your Prometheus scrape your Synapse. https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.rst +2. Import dashboard into Grafana. Download `synapse.json`. Import it to Grafana and select the correct Prometheus datasource. http://docs.grafana.org/reference/export_import/ +3. Set up additional recording rules From 7d05406a07994c1b59f4d9224415d336fcf41686 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 31 Jul 2018 12:03:23 +0100 Subject: [PATCH 028/205] fix user_ips counting --- tests/storage/test__init__.py | 44 +++++++++++++++++++++++++++-------- 1 file changed, 34 insertions(+), 10 deletions(-) diff --git a/tests/storage/test__init__.py b/tests/storage/test__init__.py index b763d395cc..c9ae349871 100644 --- a/tests/storage/test__init__.py +++ b/tests/storage/test__init__.py @@ -28,21 +28,45 @@ class InitTestCase(tests.unittest.TestCase): @defer.inlineCallbacks def setUp(self): hs = yield tests.utils.setup_test_homeserver() + hs.config.max_mau_value = 50 hs.config.limit_usage_by_mau = True self.store = hs.get_datastore() + self.clock = hs.get_clock() - @defer.inlineCallbacks def test_count_monthly_users(self): - count = yield self.store.count_monthly_users() - self.assertEqual(0, count) - yield self.store.insert_client_ip( - "@user:server1", "access_token", "ip", "user_agent", "device_id" - ) - - yield self.store.insert_client_ip( - "@user:server2", "access_token", "ip", "user_agent", "device_id" - ) count = self.store.count_monthly_users() + self.assertEqual(0, count) + self._insert_user_ips("@user:server1") + self._insert_user_ips("@user:server2") + + count = self.store.count_monthly_users() self.assertEqual(2, count) + + def _insert_user_ips(self, user): + """ + Helper function to populate user_ips without using batch insertion infra + args: + user (str): specify username i.e. @user:server.com + """ + try: + txn = self.store.db_conn.cursor() + self.store.database_engine.lock_table(txn, "user_ips") + self.store._simple_upsert_txn( + txn, + table="user_ips", + keyvalues={ + "user_id": user, + "access_token": "access_token", + "ip": "ip", + "user_agent": "user_agent", + "device_id": "device_id", + }, + values={ + "last_seen": self.clock.time_msec(), + }, + lock=False, + ) + finally: + txn.close() From 0bc9b9e397b413ca104a3ddb6db181e7a0fb0917 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 31 Jul 2018 13:11:04 +0100 Subject: [PATCH 029/205] reinstate explicit include of EventsWorkerStore --- synapse/storage/events.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index dbbfe04880..a32a306495 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -34,7 +34,9 @@ from synapse.api.errors import SynapseError from synapse.events import EventBase # noqa: F401 from synapse.events.snapshot import EventContext # noqa: F401 from synapse.metrics.background_process_metrics import run_as_background_process +from synapse.storage.background_updates import BackgroundUpdateStore from synapse.storage.event_federation import EventFederationStore +from synapse.storage.events_worker import EventsWorkerStore from synapse.types import RoomStreamToken, get_domain_from_id from synapse.util.async import ObservableDeferred from synapse.util.caches.descriptors import cached, cachedInlineCallbacks @@ -195,7 +197,7 @@ def _retry_on_integrity_error(func): # inherits from EventFederationStore so that we can call _update_backward_extremities # and _handle_mult_prev_events (though arguably those could both be moved in here) -class EventsStore(EventFederationStore): +class EventsStore(EventFederationStore, EventsWorkerStore, BackgroundUpdateStore): EVENT_ORIGIN_SERVER_TS_NAME = "event_origin_server_ts" EVENT_FIELDS_SENDER_URL_UPDATE_NAME = "event_fields_sender_url" From df2235e7fab44a5155134a336a4c27424398c1be Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 31 Jul 2018 13:16:20 +0100 Subject: [PATCH 030/205] coding style --- synapse/app/homeserver.py | 6 +++++- synapse/config/server.py | 2 +- synapse/handlers/auth.py | 3 ++- synapse/storage/__init__.py | 3 +-- .../storage/schema/delta/50/make_event_content_nullable.py | 2 +- tests/handlers/test_auth.py | 4 ++-- tests/storage/test__init__.py | 1 - 7 files changed, 12 insertions(+), 9 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 96c45b7209..82979e7d1b 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -18,9 +18,10 @@ import logging import os import sys -from prometheus_client import Gauge from six import iteritems +from prometheus_client import Gauge + from twisted.application import service from twisted.internet import defer, reactor from twisted.web.resource import EncodingResourceWrapper, NoResource @@ -300,12 +301,15 @@ class SynapseHomeServer(HomeServer): except IncorrectDatabaseSetup as e: quit_with_error(e.message) + # Gauges to expose monthly active user control metrics current_mau_gauge = Gauge("synapse_admin_current_mau", "Current MAU") max_mau_value_gauge = Gauge("synapse_admin_max_mau_value", "MAU Limit") limit_usage_by_mau_gauge = Gauge( "synapse_admin_limit_usage_by_mau", "MAU Limiting enabled" ) + + def setup(config_options): """ Args: diff --git a/synapse/config/server.py b/synapse/config/server.py index 8b335bff3f..9af42a93ad 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -70,7 +70,7 @@ class ServerConfig(Config): # Options to control access by tracking MAU self.limit_usage_by_mau = config.get("limit_usage_by_mau", False) self.max_mau_value = config.get( - "max_mau_value", 0, + "max_mau_value", 0, ) # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist = None diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index f3734f11bd..28f1c1afbb 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -903,9 +903,10 @@ class AuthHandler(BaseHandler): current_mau = self.store.count_monthly_users() if current_mau >= self.hs.config.max_mau_value: raise AuthError( - 403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED + 403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED ) + @attr.s class MacaroonGenerator(object): diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 044e988e92..4747118ed7 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -60,6 +60,7 @@ from .util.id_generators import ChainedIdGenerator, IdGenerator, StreamIdGenerat logger = logging.getLogger(__name__) + class DataStore(RoomMemberStore, RoomStore, RegistrationStore, StreamStore, ProfileStore, PresenceStore, TransactionStore, @@ -291,8 +292,6 @@ class DataStore(RoomMemberStore, RoomStore, finally: txn.close() - - def count_r30_users(self): """ Counts the number of 30 day retained users, defined as:- diff --git a/synapse/storage/schema/delta/50/make_event_content_nullable.py b/synapse/storage/schema/delta/50/make_event_content_nullable.py index 7d27342e39..6dd467b6c5 100644 --- a/synapse/storage/schema/delta/50/make_event_content_nullable.py +++ b/synapse/storage/schema/delta/50/make_event_content_nullable.py @@ -88,5 +88,5 @@ def run_upgrade(cur, database_engine, *args, **kwargs): "UPDATE sqlite_master SET sql=? WHERE tbl_name='events' AND type='table'", (sql, ), ) - cur.execute("PRAGMA schema_version=%i" % (oldver+1,)) + cur.execute("PRAGMA schema_version=%i" % (oldver + 1,)) cur.execute("PRAGMA writable_schema=OFF") diff --git a/tests/handlers/test_auth.py b/tests/handlers/test_auth.py index 57f78a6bec..e01f14a10a 100644 --- a/tests/handlers/test_auth.py +++ b/tests/handlers/test_auth.py @@ -13,16 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. from mock import Mock + import pymacaroons from twisted.internet import defer import synapse -from synapse.api.errors import AuthError import synapse.api.errors +from synapse.api.errors import AuthError from synapse.handlers.auth import AuthHandler - from tests import unittest from tests.utils import setup_test_homeserver diff --git a/tests/storage/test__init__.py b/tests/storage/test__init__.py index c9ae349871..fe6eeeaf10 100644 --- a/tests/storage/test__init__.py +++ b/tests/storage/test__init__.py @@ -12,7 +12,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import sys from twisted.internet import defer From 5bb39b1e0c086bbc65bb93f5b2b0c4c3fb82accf Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 31 Jul 2018 13:22:14 +0100 Subject: [PATCH 031/205] mau limts --- changelog.d/3630.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3630.feature diff --git a/changelog.d/3630.feature b/changelog.d/3630.feature new file mode 100644 index 0000000000..20398da9e0 --- /dev/null +++ b/changelog.d/3630.feature @@ -0,0 +1 @@ +Blocks registration and sign in if max mau value exceeded From d81602b75afc2c39314da1f9a21be436134e5361 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 31 Jul 2018 13:52:49 +0100 Subject: [PATCH 032/205] Add helper base class for generating new replication endpoints This will hopefully reduce the boiler plate required to implement new internal HTTP requests. --- synapse/replication/http/_base.py | 208 ++++++++++++++++++++++++++++++ 1 file changed, 208 insertions(+) create mode 100644 synapse/replication/http/_base.py diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py new file mode 100644 index 0000000000..24f00d95fc --- /dev/null +++ b/synapse/replication/http/_base.py @@ -0,0 +1,208 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import abc +import logging +import re + +from six.moves import urllib + +from twisted.internet import defer + +from synapse.api.errors import ( + CodeMessageException, + MatrixCodeMessageException, + SynapseError, +) +from synapse.util.caches.response_cache import ResponseCache +from synapse.util.stringutils import random_string + +logger = logging.getLogger(__name__) + + +class ReplicationEndpoint(object): + """Helper base class for defining new replication HTTP endpoints. + + This creates an endpoint under `/_synapse/replication/:NAME/:PATH_ARGS..` + (with an `/:txn_id` prefix for cached requests.), where NAME is a name, + PATH_ARGS are a tuple of parameters to be encoded in the URL. + + For example, if `NAME` is "send_event" and `PATH_ARGS` is `("event_id",)`, + with `CACHE` set to true then this generates an endpoint: + + /_synapse/replication/send_event/:event_id/:txn_id + + For POST requests the payload is serialized to json and sent as the body, + while for GET requests the payload is added as query parameters. See + `_serialize_payload` for details. + + Incoming requests are handled by overriding `_handle_request`. Servers + must call `register` to register the path with the HTTP server. + + Requests can be sent by calling the client returned by `make_client`. + + Attributes: + NAME (str): A name for the endpoint, added to the path as well as used + in logging and metrics. + PATH_ARGS (tuple[str]): A list of parameters to be added to the path. + Adding parameters to the path (rather than payload) can make it + easier to follow along in the log files. + POST (bool): True to use POST request with JSON body, or false to use + GET requests with query params. + CACHE (bool): Whether server should cache the result of the request/ + If true then transparently adds a txn_id to all requests, and + `_handle_request` must return a Deferred. + RETRY_ON_TIMEOUT(bool): Whether or not to retry the request when a 504 + is received. + """ + + __metaclass__ = abc.ABCMeta + + NAME = abc.abstractproperty() + PATH_ARGS = abc.abstractproperty() + + POST = True + CACHE = True + RETRY_ON_TIMEOUT = True + + def __init__(self, hs): + if self.CACHE: + self.response_cache = ResponseCache( + hs, "repl." + self.NAME, + timeout_ms=30 * 60 * 1000, + ) + + @abc.abstractmethod + def _serialize_payload(**kwargs): + """Static method that is called when creating a request. + + Concrete implementations should have explicit parameters (rather than + kwargs) so that an appropriate exception is raised if the client is + called with unexpected parameters. All PATH_ARGS must appear in + argument list. + + Returns: + Deferred[dict]|dict: If POST request then dictionary must be JSON + serialisable, otherwise must be appropriate for adding as query + args. + """ + return {} + + @abc.abstractmethod + def _handle_request(self, request, **kwargs): + """Handle incoming request. + + This is called with the request object and PATH_ARGS. + + Returns: + Deferred[dict]: A JSON serialisable dict to be used as response + body of request. + """ + pass + + @classmethod + def make_client(cls, hs): + """Create a client that makes requests. + + Returns a callable that accepts the same parameters as `_serialize_payload`. + """ + clock = hs.get_clock() + host = hs.config.worker_replication_host + port = hs.config.worker_replication_http_port + + client = hs.get_simple_http_client() + + @defer.inlineCallbacks + def send_request(**kwargs): + data = yield cls._serialize_payload(**kwargs) + + url_args = [urllib.parse.quote(kwargs[name]) for name in cls.PATH_ARGS] + + if cls.CACHE: + txn_id = random_string(10) + url_args.append(txn_id) + + if cls.POST: + request_func = client.post_json_get_json + else: + request_func = client.get_json + + uri = "http://%s:%s/_synapse/replication/%s/%s" % ( + host, port, cls.NAME, "/".join(url_args) + ) + + try: + # We keep retrying the same request for timeouts. This is so that we + # have a good idea that the request has either succeeded or failed on + # the master, and so whether we should clean up or not. + while True: + try: + result = yield request_func(uri, data) + break + except CodeMessageException as e: + if e.code != 504 or not cls.RETRY_ON_TIMEOUT: + raise + + logger.warn("send_federation_events_to_master request timed out") + + # If we timed out we probably don't need to worry about backing + # off too much, but lets just wait a little anyway. + yield clock.sleep(1) + except MatrixCodeMessageException as e: + # We convert to SynapseError as we know that it was a SynapseError + # on the master process that we should send to the client. (And + # importantly, not stack traces everywhere) + raise SynapseError(e.code, e.msg, e.errcode) + + defer.returnValue(result) + + return send_request + + def register(self, http_server): + """Called by the server to register this as a handler to the + appropriate path. + """ + + url_args = list(self.PATH_ARGS) + method = "GET" + handler = self._handle_request + if self.POST: + method = "POST" + + if self.CACHE: + handler = self._cached_handler + url_args.append("txn_id") + + args = "/".join("(?P<%s>[^/]+)" % (arg,) for arg in url_args) + pattern = re.compile("^/_synapse/replication/%s/%s$" % ( + self.NAME, + args + )) + + http_server.register_paths(method, [pattern], handler) + + def _cached_handler(self, request, txn_id, **kwargs): + """Wraps `_handle_request` the responses should be cached. + """ + # We just use the txn_id here, but we probably also want to use the + # other PATH_ARGS as well. + + assert self.CACHE + + return self.response_cache.wrap( + txn_id, + self._handle_request, + request, **kwargs + ) From 729b672823132f413800a10f5fa8cac1f9b99008 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 31 Jul 2018 13:53:54 +0100 Subject: [PATCH 033/205] Use new helper base class for ReplicationSendEventRestServlet --- synapse/handlers/message.py | 11 +-- synapse/replication/http/send_event.py | 115 ++++++++----------------- 2 files changed, 40 insertions(+), 86 deletions(-) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 39d7724778..bcb093ba3e 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -30,7 +30,7 @@ from synapse.api.urls import ConsentURIBuilder from synapse.crypto.event_signing import add_hashes_and_signatures from synapse.events.utils import serialize_event from synapse.events.validator import EventValidator -from synapse.replication.http.send_event import send_event_to_master +from synapse.replication.http.send_event import ReplicationSendEventRestServlet from synapse.types import RoomAlias, UserID from synapse.util.async import Linearizer from synapse.util.frozenutils import frozendict_json_encoder @@ -171,7 +171,7 @@ class EventCreationHandler(object): self.notifier = hs.get_notifier() self.config = hs.config - self.http_client = hs.get_simple_http_client() + self.send_event_to_master = ReplicationSendEventRestServlet.make_client(hs) # This is only used to get at ratelimit function, and maybe_kick_guest_users self.base_handler = BaseHandler(hs) @@ -559,12 +559,9 @@ class EventCreationHandler(object): try: # If we're a worker we need to hit out to the master. if self.config.worker_app: - yield send_event_to_master( - clock=self.hs.get_clock(), + yield self.send_event_to_master( + event_id=event.event_id, store=self.store, - client=self.http_client, - host=self.config.worker_replication_host, - port=self.config.worker_replication_http_port, requester=requester, event=event, context=context, diff --git a/synapse/replication/http/send_event.py b/synapse/replication/http/send_event.py index 5227bc333d..50810d94cb 100644 --- a/synapse/replication/http/send_event.py +++ b/synapse/replication/http/send_event.py @@ -14,90 +14,26 @@ # limitations under the License. import logging -import re from twisted.internet import defer -from synapse.api.errors import ( - CodeMessageException, - MatrixCodeMessageException, - SynapseError, -) from synapse.events import FrozenEvent from synapse.events.snapshot import EventContext -from synapse.http.servlet import RestServlet, parse_json_object_from_request +from synapse.http.servlet import parse_json_object_from_request +from synapse.replication.http._base import ReplicationEndpoint from synapse.types import Requester, UserID -from synapse.util.caches.response_cache import ResponseCache from synapse.util.metrics import Measure logger = logging.getLogger(__name__) -@defer.inlineCallbacks -def send_event_to_master(clock, store, client, host, port, requester, event, context, - ratelimit, extra_users): - """Send event to be handled on the master - - Args: - clock (synapse.util.Clock) - store (DataStore) - client (SimpleHttpClient) - host (str): host of master - port (int): port on master listening for HTTP replication - requester (Requester) - event (FrozenEvent) - context (EventContext) - ratelimit (bool) - extra_users (list(UserID)): Any extra users to notify about event - """ - uri = "http://%s:%s/_synapse/replication/send_event/%s" % ( - host, port, event.event_id, - ) - - serialized_context = yield context.serialize(event, store) - - payload = { - "event": event.get_pdu_json(), - "internal_metadata": event.internal_metadata.get_dict(), - "rejected_reason": event.rejected_reason, - "context": serialized_context, - "requester": requester.serialize(), - "ratelimit": ratelimit, - "extra_users": [u.to_string() for u in extra_users], - } - - try: - # We keep retrying the same request for timeouts. This is so that we - # have a good idea that the request has either succeeded or failed on - # the master, and so whether we should clean up or not. - while True: - try: - result = yield client.put_json(uri, payload) - break - except CodeMessageException as e: - if e.code != 504: - raise - - logger.warn("send_event request timed out") - - # If we timed out we probably don't need to worry about backing - # off too much, but lets just wait a little anyway. - yield clock.sleep(1) - except MatrixCodeMessageException as e: - # We convert to SynapseError as we know that it was a SynapseError - # on the master process that we should send to the client. (And - # importantly, not stack traces everywhere) - raise SynapseError(e.code, e.msg, e.errcode) - defer.returnValue(result) - - -class ReplicationSendEventRestServlet(RestServlet): +class ReplicationSendEventRestServlet(ReplicationEndpoint): """Handles events newly created on workers, including persisting and notifying. The API looks like: - POST /_synapse/replication/send_event/:event_id + POST /_synapse/replication/send_event/:event_id/:txn_id { "event": { .. serialized event .. }, @@ -109,27 +45,48 @@ class ReplicationSendEventRestServlet(RestServlet): "extra_users": [], } """ - PATTERNS = [re.compile("^/_synapse/replication/send_event/(?P[^/]+)$")] + NAME = "send_event" + PATH_ARGS = ("event_id",) + POST = True def __init__(self, hs): - super(ReplicationSendEventRestServlet, self).__init__() + super(ReplicationSendEventRestServlet, self).__init__(hs) self.event_creation_handler = hs.get_event_creation_handler() self.store = hs.get_datastore() self.clock = hs.get_clock() - # The responses are tiny, so we may as well cache them for a while - self.response_cache = ResponseCache(hs, "send_event", timeout_ms=30 * 60 * 1000) + @staticmethod + @defer.inlineCallbacks + def _serialize_payload(event_id, store, event, context, requester, + ratelimit, extra_users): + """ + Args: + event_id (str) + store (DataStore) + requester (Requester) + event (FrozenEvent) + context (EventContext) + ratelimit (bool) + extra_users (list(UserID)): Any extra users to notify about event + """ - def on_PUT(self, request, event_id): - return self.response_cache.wrap( - event_id, - self._handle_request, - request - ) + serialized_context = yield context.serialize(event, store) + + payload = { + "event": event.get_pdu_json(), + "internal_metadata": event.internal_metadata.get_dict(), + "rejected_reason": event.rejected_reason, + "context": serialized_context, + "requester": requester.serialize(), + "ratelimit": ratelimit, + "extra_users": [u.to_string() for u in extra_users], + } + + defer.returnValue(payload) @defer.inlineCallbacks - def _handle_request(self, request): + def _handle_request(self, request, event_id): with Measure(self.clock, "repl_send_event_parse"): content = parse_json_object_from_request(request) From 443da003bc46da8d6e46403cfa31ee6a4e4da230 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 31 Jul 2018 14:31:51 +0100 Subject: [PATCH 034/205] Use new helper base class for membership requests --- synapse/handlers/room_member_worker.py | 41 ++-- synapse/replication/http/membership.py | 262 +++++++++---------------- 2 files changed, 108 insertions(+), 195 deletions(-) diff --git a/synapse/handlers/room_member_worker.py b/synapse/handlers/room_member_worker.py index 22d8b4b0d3..acc6eb8099 100644 --- a/synapse/handlers/room_member_worker.py +++ b/synapse/handlers/room_member_worker.py @@ -20,16 +20,24 @@ from twisted.internet import defer from synapse.api.errors import SynapseError from synapse.handlers.room_member import RoomMemberHandler from synapse.replication.http.membership import ( - get_or_register_3pid_guest, - notify_user_membership_change, - remote_join, - remote_reject_invite, + ReplicationRegister3PIDGuestRestServlet as Repl3PID, + ReplicationRemoteJoinRestServlet as ReplRemoteJoin, + ReplicationRemoteRejectInviteRestServlet as ReplRejectInvite, + ReplicationUserJoinedLeftRoomRestServlet as ReplJoinedLeft, ) logger = logging.getLogger(__name__) class RoomMemberWorkerHandler(RoomMemberHandler): + def __init__(self, hs): + super(RoomMemberWorkerHandler, self).__init__(hs) + + self._get_register_3pid_client = Repl3PID.make_client(hs) + self._remote_join_client = ReplRemoteJoin.make_client(hs) + self._remote_reject_client = ReplRejectInvite.make_client(hs) + self._notify_change_client = ReplJoinedLeft.make_client(hs) + @defer.inlineCallbacks def _remote_join(self, requester, remote_room_hosts, room_id, user, content): """Implements RoomMemberHandler._remote_join @@ -37,10 +45,7 @@ class RoomMemberWorkerHandler(RoomMemberHandler): if len(remote_room_hosts) == 0: raise SynapseError(404, "No known servers") - ret = yield remote_join( - self.simple_http_client, - host=self.config.worker_replication_host, - port=self.config.worker_replication_http_port, + ret = yield self._remote_join_client( requester=requester, remote_room_hosts=remote_room_hosts, room_id=room_id, @@ -55,10 +60,7 @@ class RoomMemberWorkerHandler(RoomMemberHandler): def _remote_reject_invite(self, requester, remote_room_hosts, room_id, target): """Implements RoomMemberHandler._remote_reject_invite """ - return remote_reject_invite( - self.simple_http_client, - host=self.config.worker_replication_host, - port=self.config.worker_replication_http_port, + return self._remote_reject_client( requester=requester, remote_room_hosts=remote_room_hosts, room_id=room_id, @@ -68,10 +70,7 @@ class RoomMemberWorkerHandler(RoomMemberHandler): def _user_joined_room(self, target, room_id): """Implements RoomMemberHandler._user_joined_room """ - return notify_user_membership_change( - self.simple_http_client, - host=self.config.worker_replication_host, - port=self.config.worker_replication_http_port, + return self._notify_change_client( user_id=target.to_string(), room_id=room_id, change="joined", @@ -80,10 +79,7 @@ class RoomMemberWorkerHandler(RoomMemberHandler): def _user_left_room(self, target, room_id): """Implements RoomMemberHandler._user_left_room """ - return notify_user_membership_change( - self.simple_http_client, - host=self.config.worker_replication_host, - port=self.config.worker_replication_http_port, + return self._notify_change_client( user_id=target.to_string(), room_id=room_id, change="left", @@ -92,10 +88,7 @@ class RoomMemberWorkerHandler(RoomMemberHandler): def get_or_register_3pid_guest(self, requester, medium, address, inviter_user_id): """Implements RoomMemberHandler.get_or_register_3pid_guest """ - return get_or_register_3pid_guest( - self.simple_http_client, - host=self.config.worker_replication_host, - port=self.config.worker_replication_http_port, + return self._get_register_3pid_client( requester=requester, medium=medium, address=address, diff --git a/synapse/replication/http/membership.py b/synapse/replication/http/membership.py index 6bfc8a5b89..8ad83e8421 100644 --- a/synapse/replication/http/membership.py +++ b/synapse/replication/http/membership.py @@ -14,182 +14,53 @@ # limitations under the License. import logging -import re from twisted.internet import defer -from synapse.api.errors import MatrixCodeMessageException, SynapseError -from synapse.http.servlet import RestServlet, parse_json_object_from_request +from synapse.http.servlet import parse_json_object_from_request +from synapse.replication.http._base import ReplicationEndpoint from synapse.types import Requester, UserID from synapse.util.distributor import user_joined_room, user_left_room logger = logging.getLogger(__name__) -@defer.inlineCallbacks -def remote_join(client, host, port, requester, remote_room_hosts, - room_id, user_id, content): - """Ask the master to do a remote join for the given user to the given room - - Args: - client (SimpleHttpClient) - host (str): host of master - port (int): port on master listening for HTTP replication - requester (Requester) - remote_room_hosts (list[str]): Servers to try and join via - room_id (str) - user_id (str) - content (dict): The event content to use for the join event - - Returns: - Deferred - """ - uri = "http://%s:%s/_synapse/replication/remote_join" % (host, port) - - payload = { - "requester": requester.serialize(), - "remote_room_hosts": remote_room_hosts, - "room_id": room_id, - "user_id": user_id, - "content": content, - } - - try: - result = yield client.post_json_get_json(uri, payload) - except MatrixCodeMessageException as e: - # We convert to SynapseError as we know that it was a SynapseError - # on the master process that we should send to the client. (And - # importantly, not stack traces everywhere) - raise SynapseError(e.code, e.msg, e.errcode) - defer.returnValue(result) - - -@defer.inlineCallbacks -def remote_reject_invite(client, host, port, requester, remote_room_hosts, - room_id, user_id): - """Ask master to reject the invite for the user and room. - - Args: - client (SimpleHttpClient) - host (str): host of master - port (int): port on master listening for HTTP replication - requester (Requester) - remote_room_hosts (list[str]): Servers to try and reject via - room_id (str) - user_id (str) - - Returns: - Deferred - """ - uri = "http://%s:%s/_synapse/replication/remote_reject_invite" % (host, port) - - payload = { - "requester": requester.serialize(), - "remote_room_hosts": remote_room_hosts, - "room_id": room_id, - "user_id": user_id, - } - - try: - result = yield client.post_json_get_json(uri, payload) - except MatrixCodeMessageException as e: - # We convert to SynapseError as we know that it was a SynapseError - # on the master process that we should send to the client. (And - # importantly, not stack traces everywhere) - raise SynapseError(e.code, e.msg, e.errcode) - defer.returnValue(result) - - -@defer.inlineCallbacks -def get_or_register_3pid_guest(client, host, port, requester, - medium, address, inviter_user_id): - """Ask the master to get/create a guest account for given 3PID. - - Args: - client (SimpleHttpClient) - host (str): host of master - port (int): port on master listening for HTTP replication - requester (Requester) - medium (str) - address (str) - inviter_user_id (str): The user ID who is trying to invite the - 3PID - - Returns: - Deferred[(str, str)]: A 2-tuple of `(user_id, access_token)` of the - 3PID guest account. +class ReplicationRemoteJoinRestServlet(ReplicationEndpoint): + """Does a remote join for the given user to the given room """ - uri = "http://%s:%s/_synapse/replication/get_or_register_3pid_guest" % (host, port) - - payload = { - "requester": requester.serialize(), - "medium": medium, - "address": address, - "inviter_user_id": inviter_user_id, - } - - try: - result = yield client.post_json_get_json(uri, payload) - except MatrixCodeMessageException as e: - # We convert to SynapseError as we know that it was a SynapseError - # on the master process that we should send to the client. (And - # importantly, not stack traces everywhere) - raise SynapseError(e.code, e.msg, e.errcode) - defer.returnValue(result) - - -@defer.inlineCallbacks -def notify_user_membership_change(client, host, port, user_id, room_id, change): - """Notify master that a user has joined or left the room - - Args: - client (SimpleHttpClient) - host (str): host of master - port (int): port on master listening for HTTP replication. - user_id (str) - room_id (str) - change (str): Either "join" or "left" - - Returns: - Deferred - """ - assert change in ("joined", "left") - - uri = "http://%s:%s/_synapse/replication/user_%s_room" % (host, port, change) - - payload = { - "user_id": user_id, - "room_id": room_id, - } - - try: - result = yield client.post_json_get_json(uri, payload) - except MatrixCodeMessageException as e: - # We convert to SynapseError as we know that it was a SynapseError - # on the master process that we should send to the client. (And - # importantly, not stack traces everywhere) - raise SynapseError(e.code, e.msg, e.errcode) - defer.returnValue(result) - - -class ReplicationRemoteJoinRestServlet(RestServlet): - PATTERNS = [re.compile("^/_synapse/replication/remote_join$")] + NAME = "remote_join" + PATH_ARGS = ("room_id", "user_id",) def __init__(self, hs): - super(ReplicationRemoteJoinRestServlet, self).__init__() + super(ReplicationRemoteJoinRestServlet, self).__init__(hs) self.federation_handler = hs.get_handlers().federation_handler self.store = hs.get_datastore() self.clock = hs.get_clock() + @staticmethod + def _serialize_payload(requester, room_id, user_id, remote_room_hosts, + content): + """ + Args: + requester(Requester) + room_id (str) + user_id (str) + remote_room_hosts (list[str]): Servers to try and join via + content(dict): The event content to use for the join event + """ + return { + "requester": requester.serialize(), + "remote_room_hosts": remote_room_hosts, + "content": content, + } + @defer.inlineCallbacks - def on_POST(self, request): + def _handle_request(self, request, room_id, user_id): content = parse_json_object_from_request(request) remote_room_hosts = content["remote_room_hosts"] - room_id = content["room_id"] - user_id = content["user_id"] event_content = content["content"] requester = Requester.deserialize(self.store, content["requester"]) @@ -212,23 +83,39 @@ class ReplicationRemoteJoinRestServlet(RestServlet): defer.returnValue((200, {})) -class ReplicationRemoteRejectInviteRestServlet(RestServlet): - PATTERNS = [re.compile("^/_synapse/replication/remote_reject_invite$")] +class ReplicationRemoteRejectInviteRestServlet(ReplicationEndpoint): + """Rejects the invite for the user and room. + """ + + NAME = "remote_reject_invite" + PATH_ARGS = ("room_id", "user_id",) def __init__(self, hs): - super(ReplicationRemoteRejectInviteRestServlet, self).__init__() + super(ReplicationRemoteRejectInviteRestServlet, self).__init__(hs) self.federation_handler = hs.get_handlers().federation_handler self.store = hs.get_datastore() self.clock = hs.get_clock() + @staticmethod + def _serialize_payload(requester, room_id, user_id, remote_room_hosts): + """ + Args: + requester(Requester) + room_id (str) + user_id (str) + remote_room_hosts (list[str]): Servers to try and reject via + """ + return { + "requester": requester.serialize(), + "remote_room_hosts": remote_room_hosts, + } + @defer.inlineCallbacks - def on_POST(self, request): + def _handle_request(self, request, room_id, user_id): content = parse_json_object_from_request(request) remote_room_hosts = content["remote_room_hosts"] - room_id = content["room_id"] - user_id = content["user_id"] requester = Requester.deserialize(self.store, content["requester"]) @@ -264,18 +151,39 @@ class ReplicationRemoteRejectInviteRestServlet(RestServlet): defer.returnValue((200, ret)) -class ReplicationRegister3PIDGuestRestServlet(RestServlet): - PATTERNS = [re.compile("^/_synapse/replication/get_or_register_3pid_guest$")] +class ReplicationRegister3PIDGuestRestServlet(ReplicationEndpoint): + """Gets/creates a guest account for given 3PID. + """ + + NAME = "get_or_register_3pid_guest" + PATH_ARGS = () def __init__(self, hs): - super(ReplicationRegister3PIDGuestRestServlet, self).__init__() + super(ReplicationRegister3PIDGuestRestServlet, self).__init__(hs) self.registeration_handler = hs.get_handlers().registration_handler self.store = hs.get_datastore() self.clock = hs.get_clock() + @staticmethod + def _serialize_payload(requester, medium, address, inviter_user_id): + """ + Args: + requester(Requester) + medium (str) + address (str) + inviter_user_id (str): The user ID who is trying to invite the + 3PID + """ + return { + "requester": requester.serialize(), + "medium": medium, + "address": address, + "inviter_user_id": inviter_user_id, + } + @defer.inlineCallbacks - def on_POST(self, request): + def _handle_request(self, request): content = parse_json_object_from_request(request) medium = content["medium"] @@ -296,23 +204,35 @@ class ReplicationRegister3PIDGuestRestServlet(RestServlet): defer.returnValue((200, ret)) -class ReplicationUserJoinedLeftRoomRestServlet(RestServlet): - PATTERNS = [re.compile("^/_synapse/replication/user_(?Pjoined|left)_room$")] +class ReplicationUserJoinedLeftRoomRestServlet(ReplicationEndpoint): + """Notifies that a user has joined or left the room + """ + + NAME = "membership_change" + PATH_ARGS = ("room_id", "user_id", "change") + CACHE = False # No point caching as should return instantly. def __init__(self, hs): - super(ReplicationUserJoinedLeftRoomRestServlet, self).__init__() + super(ReplicationUserJoinedLeftRoomRestServlet, self).__init__(hs) self.registeration_handler = hs.get_handlers().registration_handler self.store = hs.get_datastore() self.clock = hs.get_clock() self.distributor = hs.get_distributor() - def on_POST(self, request, change): - content = parse_json_object_from_request(request) + @staticmethod + def _serialize_payload(room_id, user_id, change): + """ + Args: + room_id (str) + user_id (str) + change (str): Either "joined" or "left" + """ + assert change in ("joined", "left",) - user_id = content["user_id"] - room_id = content["room_id"] + return {} + def _handle_request(self, request, room_id, user_id, change): logger.info("user membership change: %s in %s", user_id, room_id) user = UserID.from_string(user_id) From 16bd63f32f97a25252bb438413b79f6bdf1e5c1e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 31 Jul 2018 14:48:43 +0100 Subject: [PATCH 035/205] Newsfile --- changelog.d/3632.misc | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 changelog.d/3632.misc diff --git a/changelog.d/3632.misc b/changelog.d/3632.misc new file mode 100644 index 0000000000..e69de29bb2 From bdbdceeafa7755348486e1d0262a662f2529b884 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 31 Jul 2018 15:44:05 +0100 Subject: [PATCH 036/205] rename replication_layer to federation_client I have HAD ENOUGH of trying to remember wtf a replication layer is in terms of classes. --- synapse/handlers/federation.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 49068c06d9..91d8def08b 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -76,7 +76,7 @@ class FederationHandler(BaseHandler): self.hs = hs self.store = hs.get_datastore() - self.replication_layer = hs.get_federation_client() + self.federation_client = hs.get_federation_client() self.state_handler = hs.get_state_handler() self.server_name = hs.hostname self.keyring = hs.get_keyring() @@ -255,7 +255,7 @@ class FederationHandler(BaseHandler): # know about for p in prevs - seen: state, got_auth_chain = ( - yield self.replication_layer.get_state_for_room( + yield self.federation_client.get_state_for_room( origin, pdu.room_id, p ) ) @@ -338,7 +338,7 @@ class FederationHandler(BaseHandler): # # see https://github.com/matrix-org/synapse/pull/1744 - missing_events = yield self.replication_layer.get_missing_events( + missing_events = yield self.federation_client.get_missing_events( origin, pdu.room_id, earliest_events_ids=list(latest), @@ -522,7 +522,7 @@ class FederationHandler(BaseHandler): if dest == self.server_name: raise SynapseError(400, "Can't backfill from self.") - events = yield self.replication_layer.backfill( + events = yield self.federation_client.backfill( dest, room_id, limit=limit, @@ -570,7 +570,7 @@ class FederationHandler(BaseHandler): state_events = {} events_to_state = {} for e_id in edges: - state, auth = yield self.replication_layer.get_state_for_room( + state, auth = yield self.federation_client.get_state_for_room( destination=dest, room_id=room_id, event_id=e_id @@ -612,7 +612,7 @@ class FederationHandler(BaseHandler): results = yield logcontext.make_deferred_yieldable(defer.gatherResults( [ logcontext.run_in_background( - self.replication_layer.get_pdu, + self.federation_client.get_pdu, [dest], event_id, outlier=True, @@ -893,7 +893,7 @@ class FederationHandler(BaseHandler): Invites must be signed by the invitee's server before distribution. """ - pdu = yield self.replication_layer.send_invite( + pdu = yield self.federation_client.send_invite( destination=target_host, room_id=event.room_id, event_id=event.event_id, @@ -955,7 +955,7 @@ class FederationHandler(BaseHandler): target_hosts.insert(0, origin) except ValueError: pass - ret = yield self.replication_layer.send_join(target_hosts, event) + ret = yield self.federation_client.send_join(target_hosts, event) origin = ret["origin"] state = ret["state"] @@ -1211,7 +1211,7 @@ class FederationHandler(BaseHandler): except ValueError: pass - yield self.replication_layer.send_leave( + yield self.federation_client.send_leave( target_hosts, event ) @@ -1234,7 +1234,7 @@ class FederationHandler(BaseHandler): @defer.inlineCallbacks def _make_and_verify_event(self, target_hosts, room_id, user_id, membership, content={},): - origin, pdu = yield self.replication_layer.make_membership_event( + origin, pdu = yield self.federation_client.make_membership_event( target_hosts, room_id, user_id, @@ -1567,7 +1567,7 @@ class FederationHandler(BaseHandler): missing_auth_events.add(e_id) for e_id in missing_auth_events: - m_ev = yield self.replication_layer.get_pdu( + m_ev = yield self.federation_client.get_pdu( [origin], e_id, outlier=True, @@ -1777,7 +1777,7 @@ class FederationHandler(BaseHandler): logger.info("Missing auth: %s", missing_auth) # If we don't have all the auth events, we need to get them. try: - remote_auth_chain = yield self.replication_layer.get_event_auth( + remote_auth_chain = yield self.federation_client.get_event_auth( origin, event.room_id, event.event_id ) @@ -1893,7 +1893,7 @@ class FederationHandler(BaseHandler): try: # 2. Get remote difference. - result = yield self.replication_layer.query_auth( + result = yield self.federation_client.query_auth( origin, event.room_id, event.event_id, @@ -2192,7 +2192,7 @@ class FederationHandler(BaseHandler): yield member_handler.send_membership_event(None, event, context) else: destinations = set(x.split(":", 1)[-1] for x in (sender_user_id, room_id)) - yield self.replication_layer.forward_third_party_invite( + yield self.federation_client.forward_third_party_invite( destinations, room_id, event_dict, From 6ef983ce5cc0a1cd7323ac82c8eed41d72ff3a99 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 31 Jul 2018 16:36:24 +0100 Subject: [PATCH 037/205] api into monthly_active_users table --- synapse/app/homeserver.py | 4 + synapse/storage/monthly_active_users.py | 89 +++++++++++++++++++ synapse/storage/prepare_database.py | 2 +- .../delta/50/make_event_content_nullable.py | 2 +- .../schema/delta/51/monthly_active_users.sql | 23 +++++ tests/storage/test_monthly_active_users.py | 42 +++++++++ 6 files changed, 160 insertions(+), 2 deletions(-) create mode 100644 synapse/storage/monthly_active_users.py create mode 100644 synapse/storage/schema/delta/51/monthly_active_users.sql create mode 100644 tests/storage/test_monthly_active_users.py diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 57b815d777..79772fa61a 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -62,6 +62,7 @@ from synapse.rest.media.v0.content_repository import ContentRepoResource from synapse.server import HomeServer from synapse.storage import are_all_users_on_domain from synapse.storage.engines import IncorrectDatabaseSetup, create_engine +from synapse.storage.monthly_active_users import MonthlyActiveUsersStore from synapse.storage.prepare_database import UpgradeDatabaseException, prepare_database from synapse.util.caches import CACHE_SIZE_FACTOR from synapse.util.httpresourcetree import create_resource_tree @@ -511,6 +512,9 @@ def run(hs): # If you increase the loop period, the accuracy of user_daily_visits # table will decrease clock.looping_call(generate_user_daily_visit_stats, 5 * 60 * 1000) + clock.looping_call( + MonthlyActiveUsersStore(hs).reap_monthly_active_users, 1000 * 60 * 60 + ) if hs.config.report_stats: logger.info("Scheduling stats reporting for 3 hour intervals") diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py new file mode 100644 index 0000000000..373e828c0a --- /dev/null +++ b/synapse/storage/monthly_active_users.py @@ -0,0 +1,89 @@ +from twisted.internet import defer + +from ._base import SQLBaseStore + + +class MonthlyActiveUsersStore(SQLBaseStore): + def __init__(self, hs): + super(MonthlyActiveUsersStore, self).__init__(None, hs) + self._clock = hs.get_clock() + + def reap_monthly_active_users(self): + """ + Cleans out monthly active user table to ensure that no stale + entries exist. + Return: + defered, no return type + """ + def _reap_users(txn): + thirty_days_ago = ( + int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) + ) + sql = "DELETE FROM monthly_active_users WHERE timestamp < ?" + txn.execute(sql, (thirty_days_ago,)) + + return self.runInteraction("reap_monthly_active_users", _reap_users) + + def get_monthly_active_count(self): + """ + Generates current count of monthly active users.abs + return: + defered resolves to int + """ + def _count_users(txn): + sql = """ + SELECT COALESCE(count(*), 0) FROM ( + SELECT user_id FROM monthly_active_users + ) u + """ + txn.execute(sql) + count, = txn.fetchone() + return count + return self.runInteraction("count_users", _count_users) + + def upsert_monthly_active_user(self, user_id): + """ + Updates or inserts monthly active user member + Arguments: + user_id (str): user to add/update + """ + return self._simple_upsert( + desc="upsert_monthly_active_user", + table="monthly_active_users", + keyvalues={ + "user_id": user_id, + }, + values={ + "timestamp": int(self._clock.time_msec()), + }, + lock=False, + ) + + def clean_out_monthly_active_users(self): + pass + + @defer.inlineCallbacks + def is_user_monthly_active(self, user_id): + """ + Checks if a given user is part of the monthly active user group + Arguments: + user_id (str): user to add/update + Return: + bool : True if user part of group, False otherwise + """ + user_present = yield self._simple_select_onecol( + table="monthly_active_users", + keyvalues={ + "user_id": user_id, + }, + retcol="user_id", + desc="is_user_monthly_active", + ) + # jeff = self.cursor_to_dict(res) + result = False + if user_present: + result = True + + defer.returnValue( + result + ) diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index b290f834b3..b364719312 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -25,7 +25,7 @@ logger = logging.getLogger(__name__) # Remember to update this number every time a change is made to database # schema files, so the users will be informed on server restarts. -SCHEMA_VERSION = 50 +SCHEMA_VERSION = 51 dir_path = os.path.abspath(os.path.dirname(__file__)) diff --git a/synapse/storage/schema/delta/50/make_event_content_nullable.py b/synapse/storage/schema/delta/50/make_event_content_nullable.py index 7d27342e39..6dd467b6c5 100644 --- a/synapse/storage/schema/delta/50/make_event_content_nullable.py +++ b/synapse/storage/schema/delta/50/make_event_content_nullable.py @@ -88,5 +88,5 @@ def run_upgrade(cur, database_engine, *args, **kwargs): "UPDATE sqlite_master SET sql=? WHERE tbl_name='events' AND type='table'", (sql, ), ) - cur.execute("PRAGMA schema_version=%i" % (oldver+1,)) + cur.execute("PRAGMA schema_version=%i" % (oldver + 1,)) cur.execute("PRAGMA writable_schema=OFF") diff --git a/synapse/storage/schema/delta/51/monthly_active_users.sql b/synapse/storage/schema/delta/51/monthly_active_users.sql new file mode 100644 index 0000000000..b3c0e1a676 --- /dev/null +++ b/synapse/storage/schema/delta/51/monthly_active_users.sql @@ -0,0 +1,23 @@ +/* Copyright 2018 New Vector Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- a table of users who have requested that their details be erased +CREATE TABLE monthly_active_users ( + user_id TEXT NOT NULL, + timestamp BIGINT NOT NULL +); + +CREATE UNIQUE INDEX monthly_active_users_users ON monthly_active_users(user_id); +CREATE INDEX monthly_active_users_time_stamp ON monthly_active_users(timestamp); diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py new file mode 100644 index 0000000000..9b1ffc6369 --- /dev/null +++ b/tests/storage/test_monthly_active_users.py @@ -0,0 +1,42 @@ +from twisted.internet import defer + +from synapse.storage.monthly_active_users import MonthlyActiveUsersStore + +import tests.unittest +import tests.utils +from tests.utils import setup_test_homeserver + + +class MonthlyActiveUsersTestCase(tests.unittest.TestCase): + def __init__(self, *args, **kwargs): + super(MonthlyActiveUsersTestCase, self).__init__(*args, **kwargs) + self.mau = None + + @defer.inlineCallbacks + def setUp(self): + hs = yield setup_test_homeserver() + self.mau = MonthlyActiveUsersStore(hs) + + @defer.inlineCallbacks + def test_can_insert_and_count_mau(self): + count = yield self.mau.get_monthly_active_count() + self.assertEqual(0, count) + + yield self.mau.upsert_monthly_active_user("@user:server") + count = yield self.mau.get_monthly_active_count() + + self.assertEqual(1, count) + + @defer.inlineCallbacks + def test_is_user_monthly_active(self): + user_id1 = "@user1:server" + user_id2 = "@user2:server" + user_id3 = "@user3:server" + result = yield self.mau.is_user_monthly_active(user_id1) + self.assertFalse(result) + yield self.mau.upsert_monthly_active_user(user_id1) + yield self.mau.upsert_monthly_active_user(user_id2) + result = yield self.mau.is_user_monthly_active(user_id1) + self.assertTrue(result) + result = yield self.mau.is_user_monthly_active(user_id3) + self.assertFalse(result) From 1841672c855c53a95cc586fe33662bcbbb156aed Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 31 Jul 2018 18:26:54 +0100 Subject: [PATCH 038/205] changelog --- changelog.d/3634.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3634.misc diff --git a/changelog.d/3634.misc b/changelog.d/3634.misc new file mode 100644 index 0000000000..2cd6af91ff --- /dev/null +++ b/changelog.d/3634.misc @@ -0,0 +1 @@ +rename replication_layer to federation_client From 70af98e36155ee8d4a0ad79d9d33f891458fd3f6 Mon Sep 17 00:00:00 2001 From: Serban Constantin Date: Fri, 27 Jul 2018 16:19:38 +0300 Subject: [PATCH 039/205] return NotFoundError if room not found Per the Client-Server API[0] we should return `M_NOT_FOUND` if the room isn't found instead of generic SynapseError. This ensures that /directory/list API returns 404 for room not found instead of 400. [0]: https://matrix.org/docs/spec/client_server/unstable.html#get-matrix-client-r0-directory-list-room-roomid Signed-off-by: Serban Constantin --- AUTHORS.rst | 5 ++++- changelog.d/2952.bugfix | 1 + synapse/rest/client/v1/directory.py | 4 ++-- 3 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 changelog.d/2952.bugfix diff --git a/AUTHORS.rst b/AUTHORS.rst index e13ac5ad34..9a83d90153 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -62,4 +62,7 @@ Christoph Witzany * Add LDAP support for authentication Pierre Jaury -* Docker packaging \ No newline at end of file +* Docker packaging + +Serban Constantin + * Small bug fix \ No newline at end of file diff --git a/changelog.d/2952.bugfix b/changelog.d/2952.bugfix new file mode 100644 index 0000000000..e5bc8a8762 --- /dev/null +++ b/changelog.d/2952.bugfix @@ -0,0 +1 @@ +/directory/list API returns 404 for room not found instead of 400 \ No newline at end of file diff --git a/synapse/rest/client/v1/directory.py b/synapse/rest/client/v1/directory.py index 69dcd618cb..97733f3026 100644 --- a/synapse/rest/client/v1/directory.py +++ b/synapse/rest/client/v1/directory.py @@ -18,7 +18,7 @@ import logging from twisted.internet import defer -from synapse.api.errors import AuthError, Codes, SynapseError +from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError from synapse.http.servlet import parse_json_object_from_request from synapse.types import RoomAlias @@ -159,7 +159,7 @@ class ClientDirectoryListServer(ClientV1RestServlet): def on_GET(self, request, room_id): room = yield self.store.get_room(room_id) if room is None: - raise SynapseError(400, "Unknown room") + raise NotFoundError("Unknown room") defer.returnValue((200, { "visibility": "public" if room["is_public"] else "private" From c507fa15ce068b496d320f08fcb6d6aedf4ace32 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 10:20:42 +0100 Subject: [PATCH 040/205] only need to loop if mau limiting is enabled --- synapse/app/homeserver.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 82979e7d1b..2da60682ec 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -531,7 +531,8 @@ def run(hs): limit_usage_by_mau_gauge.set(float(hs.config.limit_usage_by_mau)) generate_monthly_active_users() - clock.looping_call(generate_monthly_active_users, 5 * 60 * 1000) + if hs.config.limit_usage_by_mau: + clock.looping_call(generate_monthly_active_users, 5 * 60 * 1000) if hs.config.report_stats: logger.info("Scheduling stats reporting for 3 hour intervals") From 7931393495c76eef0af9b91c7904c88943197054 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 10:21:56 +0100 Subject: [PATCH 041/205] make count_monthly_users async synapse/handlers/auth.py --- synapse/handlers/register.py | 9 ++++---- synapse/storage/__init__.py | 26 +++++++++++----------- tests/handlers/test_auth.py | 39 +++++++++++++++++++-------------- tests/handlers/test_register.py | 10 +++++---- 4 files changed, 46 insertions(+), 38 deletions(-) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index f46b8355c0..cc935a5e84 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -144,7 +144,7 @@ class RegistrationHandler(BaseHandler): Raises: RegistrationError if there was a problem registering. """ - self._check_mau_limits() + yield self._check_mau_limits() password_hash = None if password: password_hash = yield self.auth_handler().hash(password) @@ -289,7 +289,7 @@ class RegistrationHandler(BaseHandler): 400, "User ID can only contain characters a-z, 0-9, or '=_-./'", ) - self._check_mau_limits() + yield self._check_mau_limits() user = UserID(localpart, self.hs.hostname) user_id = user.to_string() @@ -439,7 +439,7 @@ class RegistrationHandler(BaseHandler): """ if localpart is None: raise SynapseError(400, "Request must include user id") - self._check_mau_limits() + yield self._check_mau_limits() need_register = True try: @@ -534,13 +534,14 @@ class RegistrationHandler(BaseHandler): action="join", ) + @defer.inlineCallbacks def _check_mau_limits(self): """ Do not accept registrations if monthly active user limits exceeded and limiting is enabled """ if self.hs.config.limit_usage_by_mau is True: - current_mau = self.store.count_monthly_users() + current_mau = yield self.store.count_monthly_users() if current_mau >= self.hs.config.max_mau_value: raise RegistrationError( 403, "MAU Limit Exceeded", Codes.MAU_LIMIT_EXCEEDED diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 4747118ed7..f9682832ca 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -273,24 +273,24 @@ class DataStore(RoomMemberStore, RoomStore, This method should be refactored with count_daily_users - the only reason not to is waiting on definition of mau returns: - int: count of current monthly active users + defered: resolves to int """ + def _count_monthly_users(txn): + thirty_days_ago = int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) + sql = """ + SELECT COALESCE(count(*), 0) FROM ( + SELECT user_id FROM user_ips + WHERE last_seen > ? + GROUP BY user_id + ) u + """ - thirty_days_ago = int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) - sql = """ - SELECT COALESCE(count(*), 0) FROM ( - SELECT user_id FROM user_ips - WHERE last_seen > ? - GROUP BY user_id - ) u - """ - try: - txn = self.db_conn.cursor() txn.execute(sql, (thirty_days_ago,)) count, = txn.fetchone() + print "Count is %d" % (count,) return count - finally: - txn.close() + + return self.runInteraction("count_monthly_users", _count_monthly_users) def count_r30_users(self): """ diff --git a/tests/handlers/test_auth.py b/tests/handlers/test_auth.py index e01f14a10a..440a453082 100644 --- a/tests/handlers/test_auth.py +++ b/tests/handlers/test_auth.py @@ -77,38 +77,37 @@ class AuthTestCase(unittest.TestCase): v.satisfy_general(verify_nonce) v.verify(macaroon, self.hs.config.macaroon_secret_key) + @defer.inlineCallbacks def test_short_term_login_token_gives_user_id(self): self.hs.clock.now = 1000 token = self.macaroon_generator.generate_short_term_login_token( "a_user", 5000 ) - - self.assertEqual( - "a_user", - self.auth_handler.validate_short_term_login_token_and_get_user_id( - token - ) + user_id = yield self.auth_handler.validate_short_term_login_token_and_get_user_id( + token ) + self.assertEqual("a_user", user_id) # when we advance the clock, the token should be rejected self.hs.clock.now = 6000 with self.assertRaises(synapse.api.errors.AuthError): - self.auth_handler.validate_short_term_login_token_and_get_user_id( + yield self.auth_handler.validate_short_term_login_token_and_get_user_id( token ) + @defer.inlineCallbacks def test_short_term_login_token_cannot_replace_user_id(self): token = self.macaroon_generator.generate_short_term_login_token( "a_user", 5000 ) macaroon = pymacaroons.Macaroon.deserialize(token) + user_id = yield self.auth_handler.validate_short_term_login_token_and_get_user_id( + macaroon.serialize() + ) self.assertEqual( - "a_user", - self.auth_handler.validate_short_term_login_token_and_get_user_id( - macaroon.serialize() - ) + "a_user", user_id ) # add another "user_id" caveat, which might allow us to override the @@ -116,7 +115,7 @@ class AuthTestCase(unittest.TestCase): macaroon.add_first_party_caveat("user_id = b_user") with self.assertRaises(synapse.api.errors.AuthError): - self.auth_handler.validate_short_term_login_token_and_get_user_id( + yield self.auth_handler.validate_short_term_login_token_and_get_user_id( macaroon.serialize() ) @@ -126,7 +125,7 @@ class AuthTestCase(unittest.TestCase): # Ensure does not throw exception yield self.auth_handler.get_access_token_for_user_id('user_a') - self.auth_handler.validate_short_term_login_token_and_get_user_id( + yield self.auth_handler.validate_short_term_login_token_and_get_user_id( self._get_macaroon().serialize() ) @@ -134,24 +133,30 @@ class AuthTestCase(unittest.TestCase): def test_mau_limits_exceeded(self): self.hs.config.limit_usage_by_mau = True self.hs.get_datastore().count_monthly_users = Mock( - return_value=self.large_number_of_users + return_value=defer.succeed(self.large_number_of_users) ) + with self.assertRaises(AuthError): yield self.auth_handler.get_access_token_for_user_id('user_a') + + self.hs.get_datastore().count_monthly_users = Mock( + return_value=defer.succeed(self.large_number_of_users) + ) with self.assertRaises(AuthError): - self.auth_handler.validate_short_term_login_token_and_get_user_id( + yield self.auth_handler.validate_short_term_login_token_and_get_user_id( self._get_macaroon().serialize() ) @defer.inlineCallbacks def test_mau_limits_not_exceeded(self): self.hs.config.limit_usage_by_mau = True + self.hs.get_datastore().count_monthly_users = Mock( - return_value=self.small_number_of_users + return_value=defer.succeed(self.small_number_of_users) ) # Ensure does not raise exception yield self.auth_handler.get_access_token_for_user_id('user_a') - self.auth_handler.validate_short_term_login_token_and_get_user_id( + yield self.auth_handler.validate_short_term_login_token_and_get_user_id( self._get_macaroon().serialize() ) diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index a5a8e7c954..0937d71cf6 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -90,7 +90,7 @@ class RegistrationTestCase(unittest.TestCase): lots_of_users = 100 small_number_users = 1 - store.count_monthly_users = Mock(return_value=lots_of_users) + store.count_monthly_users = Mock(return_value=defer.succeed(lots_of_users)) # Ensure does not throw exception yield self.handler.get_or_create_user(requester, 'a', display_name) @@ -100,7 +100,7 @@ class RegistrationTestCase(unittest.TestCase): with self.assertRaises(RegistrationError): yield self.handler.get_or_create_user(requester, 'b', display_name) - store.count_monthly_users = Mock(return_value=small_number_users) + store.count_monthly_users = Mock(return_value=defer.succeed(small_number_users)) self._macaroon_mock_generator("another_secret") @@ -108,12 +108,14 @@ class RegistrationTestCase(unittest.TestCase): yield self.handler.get_or_create_user("@neil:matrix.org", 'c', "Neil") self._macaroon_mock_generator("another another secret") - store.count_monthly_users = Mock(return_value=lots_of_users) + store.count_monthly_users = Mock(return_value=defer.succeed(lots_of_users)) + with self.assertRaises(RegistrationError): yield self.handler.register(localpart=local_part) self._macaroon_mock_generator("another another secret") - store.count_monthly_users = Mock(return_value=lots_of_users) + store.count_monthly_users = Mock(return_value=defer.succeed(lots_of_users)) + with self.assertRaises(RegistrationError): yield self.handler.register_saml2(local_part) From 6023cdd2275d08f25a255a95c1f1aeb2eef34eaf Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 10:27:17 +0100 Subject: [PATCH 042/205] remove errant print --- synapse/storage/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index f9682832ca..3ce1b34aea 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -287,7 +287,6 @@ class DataStore(RoomMemberStore, RoomStore, txn.execute(sql, (thirty_days_ago,)) count, = txn.fetchone() - print "Count is %d" % (count,) return count return self.runInteraction("count_monthly_users", _count_monthly_users) From 6e63d6868c29ed169082c941af04518b7011eda1 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Wed, 1 Aug 2018 10:31:22 +0100 Subject: [PATCH 043/205] Update 2952.bugfix --- changelog.d/2952.bugfix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog.d/2952.bugfix b/changelog.d/2952.bugfix index e5bc8a8762..07a3e48304 100644 --- a/changelog.d/2952.bugfix +++ b/changelog.d/2952.bugfix @@ -1 +1 @@ -/directory/list API returns 404 for room not found instead of 400 \ No newline at end of file +Make /directory/list API return 404 for room not found instead of 400 From 2c54f1c225365cc1da363a270f7f8d201193bc4a Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 11:46:59 +0100 Subject: [PATCH 044/205] remove need to plot limit_usage_by_mau --- synapse/app/homeserver.py | 4 ---- synapse/config/server.py | 9 ++++++--- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 2da60682ec..46325d8521 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -305,9 +305,6 @@ class SynapseHomeServer(HomeServer): # Gauges to expose monthly active user control metrics current_mau_gauge = Gauge("synapse_admin_current_mau", "Current MAU") max_mau_value_gauge = Gauge("synapse_admin_max_mau_value", "MAU Limit") -limit_usage_by_mau_gauge = Gauge( - "synapse_admin_limit_usage_by_mau", "MAU Limiting enabled" -) def setup(config_options): @@ -528,7 +525,6 @@ def run(hs): count = hs.get_datastore().count_monthly_users() current_mau_gauge.set(float(count)) max_mau_value_gauge.set(float(hs.config.max_mau_value)) - limit_usage_by_mau_gauge.set(float(hs.config.limit_usage_by_mau)) generate_monthly_active_users() if hs.config.limit_usage_by_mau: diff --git a/synapse/config/server.py b/synapse/config/server.py index 9af42a93ad..a8014e9c50 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -69,9 +69,12 @@ class ServerConfig(Config): # Options to control access by tracking MAU self.limit_usage_by_mau = config.get("limit_usage_by_mau", False) - self.max_mau_value = config.get( - "max_mau_value", 0, - ) + if self.limit_usage_by_mau: + self.max_mau_value = config.get( + "max_mau_value", 0, + ) + else: + self.max_mau_value = 0 # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist = None federation_domain_whitelist = config.get( From 0aba3d361a88c3d1c5b691e36d162ec6b28132c0 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 11:47:58 +0100 Subject: [PATCH 045/205] count_monthly_users() async --- synapse/handlers/auth.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 28f1c1afbb..efe05d4de0 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -519,7 +519,7 @@ class AuthHandler(BaseHandler): """ logger.info("Logging in user %s on device %s", user_id, device_id) access_token = yield self.issue_access_token(user_id, device_id) - self._check_mau_limits() + yield self._check_mau_limits() # the device *should* have been registered before we got here; however, # it's possible we raced against a DELETE operation. The thing we @@ -729,16 +729,18 @@ class AuthHandler(BaseHandler): device_id) defer.returnValue(access_token) + @defer.inlineCallbacks def validate_short_term_login_token_and_get_user_id(self, login_token): - self._check_mau_limits() + yield self._check_mau_limits() auth_api = self.hs.get_auth() + user_id = None try: macaroon = pymacaroons.Macaroon.deserialize(login_token) user_id = auth_api.get_user_id_from_macaroon(macaroon) auth_api.validate_macaroon(macaroon, "login", True, user_id) - return user_id except Exception: raise AuthError(403, "Invalid token", errcode=Codes.FORBIDDEN) + defer.returnValue(user_id) @defer.inlineCallbacks def delete_access_token(self, access_token): @@ -894,13 +896,14 @@ class AuthHandler(BaseHandler): else: return defer.succeed(False) + @defer.inlineCallbacks def _check_mau_limits(self): """ Ensure that if mau blocking is enabled that invalid users cannot log in. """ if self.hs.config.limit_usage_by_mau is True: - current_mau = self.store.count_monthly_users() + current_mau = yield self.store.count_monthly_users() if current_mau >= self.hs.config.max_mau_value: raise AuthError( 403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED From 4e6e00152c65ca9d1c53a290890dc0149c13e48b Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 11:48:37 +0100 Subject: [PATCH 046/205] fix known broken test --- tests/storage/test__init__.py | 45 ++++++++++++++++------------------- 1 file changed, 20 insertions(+), 25 deletions(-) diff --git a/tests/storage/test__init__.py b/tests/storage/test__init__.py index fe6eeeaf10..acc59e4689 100644 --- a/tests/storage/test__init__.py +++ b/tests/storage/test__init__.py @@ -15,7 +15,7 @@ from twisted.internet import defer -import tests.unittest + import tests.utils @@ -33,39 +33,34 @@ class InitTestCase(tests.unittest.TestCase): self.store = hs.get_datastore() self.clock = hs.get_clock() + @defer.inlineCallbacks def test_count_monthly_users(self): - count = self.store.count_monthly_users() + count = yield self.store.count_monthly_users() self.assertEqual(0, count) - self._insert_user_ips("@user:server1") - self._insert_user_ips("@user:server2") + yield self._insert_user_ips("@user:server1") + yield self._insert_user_ips("@user:server2") - count = self.store.count_monthly_users() + count = yield self.store.count_monthly_users() self.assertEqual(2, count) + @defer.inlineCallbacks def _insert_user_ips(self, user): """ Helper function to populate user_ips without using batch insertion infra args: user (str): specify username i.e. @user:server.com """ - try: - txn = self.store.db_conn.cursor() - self.store.database_engine.lock_table(txn, "user_ips") - self.store._simple_upsert_txn( - txn, - table="user_ips", - keyvalues={ - "user_id": user, - "access_token": "access_token", - "ip": "ip", - "user_agent": "user_agent", - "device_id": "device_id", - }, - values={ - "last_seen": self.clock.time_msec(), - }, - lock=False, - ) - finally: - txn.close() + yield self.store._simple_upsert( + table="user_ips", + keyvalues={ + "user_id": user, + "access_token": "access_token", + "ip": "ip", + "user_agent": "user_agent", + "device_id": "device_id", + }, + values={ + "last_seen": self.clock.time_msec(), + } + ) From f9f55599718ba9849b2dee18e253dcaf8f5fcfe7 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 12:03:42 +0100 Subject: [PATCH 047/205] fix comment --- synapse/storage/schema/delta/51/monthly_active_users.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/schema/delta/51/monthly_active_users.sql b/synapse/storage/schema/delta/51/monthly_active_users.sql index b3c0e1a676..f2b6d3e31e 100644 --- a/synapse/storage/schema/delta/51/monthly_active_users.sql +++ b/synapse/storage/schema/delta/51/monthly_active_users.sql @@ -13,7 +13,7 @@ * limitations under the License. */ --- a table of users who have requested that their details be erased +-- a table of monthly active users, for use where blocking based on mau limits CREATE TABLE monthly_active_users ( user_id TEXT NOT NULL, timestamp BIGINT NOT NULL From 4e5ac901ddd15e8938605c5ac4312be804997ed5 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 12:03:57 +0100 Subject: [PATCH 048/205] clean up --- synapse/storage/monthly_active_users.py | 18 +++--------------- 1 file changed, 3 insertions(+), 15 deletions(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 373e828c0a..03eeea792e 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -31,11 +31,8 @@ class MonthlyActiveUsersStore(SQLBaseStore): defered resolves to int """ def _count_users(txn): - sql = """ - SELECT COALESCE(count(*), 0) FROM ( - SELECT user_id FROM monthly_active_users - ) u - """ + sql = "SELECT COALESCE(count(*), 0) FROM monthly_active_users" + txn.execute(sql) count, = txn.fetchone() return count @@ -59,9 +56,6 @@ class MonthlyActiveUsersStore(SQLBaseStore): lock=False, ) - def clean_out_monthly_active_users(self): - pass - @defer.inlineCallbacks def is_user_monthly_active(self, user_id): """ @@ -79,11 +73,5 @@ class MonthlyActiveUsersStore(SQLBaseStore): retcol="user_id", desc="is_user_monthly_active", ) - # jeff = self.cursor_to_dict(res) - result = False - if user_present: - result = True - defer.returnValue( - result - ) + defer.returnValue(bool(user_present)) From 4b256b92713624018d86f7966d8d2b02122b052c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 1 Aug 2018 13:39:07 +0100 Subject: [PATCH 049/205] _persist_auth_tree no longer returns anything --- synapse/handlers/federation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 617452be6c..2e3cbe2aab 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -400,7 +400,7 @@ class FederationHandler(BaseHandler): ) try: - event_stream_id, max_stream_id = yield self._persist_auth_tree( + yield self._persist_auth_tree( origin, auth_chain, state, event ) except AuthError as e: From a6d7b749150d1673117f19c6a134ade8b8c2071b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 1 Aug 2018 13:39:14 +0100 Subject: [PATCH 050/205] update docs --- synapse/handlers/federation.py | 8 +++++--- synapse/storage/events.py | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 2e3cbe2aab..21e1c48eef 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1438,6 +1438,8 @@ class FederationHandler(BaseHandler): should not depend on one another, e.g. this should be used to persist a bunch of outliers, but not a chunk of individual events that depend on each other for state calculations. + + Notifies about the events where appropriate. """ contexts = yield logcontext.make_deferred_yieldable(defer.gatherResults( [ @@ -1464,7 +1466,8 @@ class FederationHandler(BaseHandler): def _persist_auth_tree(self, origin, auth_events, state, event): """Checks the auth chain is valid (and passes auth checks) for the state and event. Then persists the auth chain and state atomically. - Persists the event seperately. + Persists the event separately. Notifies about the persisted events + where appropriate. Will attempt to fetch missing auth events. @@ -1475,8 +1478,7 @@ class FederationHandler(BaseHandler): event (Event) Returns: - 2-tuple of (event_stream_id, max_stream_id) from the persist_event - call for `event` + Deferred """ events_to_context = {} for e in itertools.chain(auth_events, state): diff --git a/synapse/storage/events.py b/synapse/storage/events.py index bbf6e42195..ee9159c102 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -242,7 +242,7 @@ class EventsStore(EventsWorkerStore): which might update the current state etc. Returns: - Deferred[int]: he stream ordering of the latest persisted event + Deferred[int]: the stream ordering of the latest persisted event """ partitioned = {} for event, ctx in events_and_contexts: From 6eed16d8a2335c97675b6b8661869848f397ea29 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 14:02:10 +0100 Subject: [PATCH 051/205] fix test for py3 --- tests/handlers/test_auth.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/handlers/test_auth.py b/tests/handlers/test_auth.py index 440a453082..55eab9e9cf 100644 --- a/tests/handlers/test_auth.py +++ b/tests/handlers/test_auth.py @@ -156,6 +156,10 @@ class AuthTestCase(unittest.TestCase): ) # Ensure does not raise exception yield self.auth_handler.get_access_token_for_user_id('user_a') + + self.hs.get_datastore().count_monthly_users = Mock( + return_value=defer.succeed(self.small_number_of_users) + ) yield self.auth_handler.validate_short_term_login_token_and_get_user_id( self._get_macaroon().serialize() ) From c480c4c962eaf9dd979d35930162d00473635fcf Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 14:25:58 +0100 Subject: [PATCH 052/205] fix isort --- tests/storage/test__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/storage/test__init__.py b/tests/storage/test__init__.py index acc59e4689..f19cb1265c 100644 --- a/tests/storage/test__init__.py +++ b/tests/storage/test__init__.py @@ -15,7 +15,6 @@ from twisted.internet import defer - import tests.utils From da7785147df442eb9cdc1031fa5fea12b7b25334 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 2 Aug 2018 00:54:06 +1000 Subject: [PATCH 053/205] Python 3: Convert some unicode/bytes uses (#3569) --- changelog.d/3569.bugfix | 1 + synapse/api/auth.py | 4 +-- synapse/federation/transport/server.py | 2 +- synapse/handlers/auth.py | 29 ++++++++++++++------ synapse/handlers/register.py | 2 +- synapse/http/server.py | 35 +++++++++++++++++------- synapse/http/servlet.py | 10 ++++++- synapse/rest/client/v1/admin.py | 22 ++++++++++----- synapse/rest/client/v2_alpha/register.py | 12 ++++---- synapse/rest/media/v1/media_storage.py | 2 +- synapse/state.py | 2 +- synapse/storage/events.py | 14 +++++++--- synapse/storage/signatures.py | 2 +- synapse/types.py | 2 +- synapse/util/frozenutils.py | 6 ++-- tests/api/test_auth.py | 35 +++++++++++++----------- tests/utils.py | 9 ++++-- 17 files changed, 122 insertions(+), 67 deletions(-) create mode 100644 changelog.d/3569.bugfix diff --git a/changelog.d/3569.bugfix b/changelog.d/3569.bugfix new file mode 100644 index 0000000000..d77f035ee0 --- /dev/null +++ b/changelog.d/3569.bugfix @@ -0,0 +1 @@ +Unicode passwords are now normalised before hashing, preventing the instance where two different devices or browsers might send a different UTF-8 sequence for the password. diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 073229b4c4..5bbbe8e2e7 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -252,10 +252,10 @@ class Auth(object): if ip_address not in app_service.ip_range_whitelist: defer.returnValue((None, None)) - if "user_id" not in request.args: + if b"user_id" not in request.args: defer.returnValue((app_service.sender, app_service)) - user_id = request.args["user_id"][0] + user_id = request.args[b"user_id"][0].decode('utf8') if app_service.sender == user_id: defer.returnValue((app_service.sender, app_service)) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 3b5ea9515a..eae5f2b427 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -165,7 +165,7 @@ def _parse_auth_header(header_bytes): param_dict = dict(kv.split("=") for kv in params) def strip_quotes(value): - if value.startswith(b"\""): + if value.startswith("\""): return value[1:-1] else: return value diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 402e44cdef..5d03bfa5f7 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -15,6 +15,7 @@ # limitations under the License. import logging +import unicodedata import attr import bcrypt @@ -626,6 +627,7 @@ class AuthHandler(BaseHandler): # special case to check for "password" for the check_password interface # for the auth providers password = login_submission.get("password") + if login_type == LoginType.PASSWORD: if not self._password_enabled: raise SynapseError(400, "Password login has been disabled.") @@ -707,9 +709,10 @@ class AuthHandler(BaseHandler): multiple inexact matches. Args: - user_id (str): complete @user:id + user_id (unicode): complete @user:id + password (unicode): the provided password Returns: - (str) the canonical_user_id, or None if unknown user / bad password + (unicode) the canonical_user_id, or None if unknown user / bad password """ lookupres = yield self._find_user_id_and_pwd_hash(user_id) if not lookupres: @@ -849,14 +852,19 @@ class AuthHandler(BaseHandler): """Computes a secure hash of password. Args: - password (str): Password to hash. + password (unicode): Password to hash. Returns: - Deferred(str): Hashed password. + Deferred(unicode): Hashed password. """ def _do_hash(): - return bcrypt.hashpw(password.encode('utf8') + self.hs.config.password_pepper, - bcrypt.gensalt(self.bcrypt_rounds)) + # Normalise the Unicode in the password + pw = unicodedata.normalize("NFKC", password) + + return bcrypt.hashpw( + pw.encode('utf8') + self.hs.config.password_pepper.encode("utf8"), + bcrypt.gensalt(self.bcrypt_rounds), + ).decode('ascii') return make_deferred_yieldable( threads.deferToThreadPool( @@ -868,16 +876,19 @@ class AuthHandler(BaseHandler): """Validates that self.hash(password) == stored_hash. Args: - password (str): Password to hash. - stored_hash (str): Expected hash value. + password (unicode): Password to hash. + stored_hash (unicode): Expected hash value. Returns: Deferred(bool): Whether self.hash(password) == stored_hash. """ def _do_validate_hash(): + # Normalise the Unicode in the password + pw = unicodedata.normalize("NFKC", password) + return bcrypt.checkpw( - password.encode('utf8') + self.hs.config.password_pepper, + pw.encode('utf8') + self.hs.config.password_pepper.encode("utf8"), stored_hash.encode('utf8') ) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 7caff0cbc8..234f8e8019 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -131,7 +131,7 @@ class RegistrationHandler(BaseHandler): Args: localpart : The local part of the user ID to register. If None, one will be generated. - password (str) : The password to assign to this user so they can + password (unicode) : The password to assign to this user so they can login again. This can be None which means they cannot login again via a password (e.g. the user is an application service user). generate_token (bool): Whether a new access token should be diff --git a/synapse/http/server.py b/synapse/http/server.py index c70fdbdfd2..1940c1c4f4 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -13,12 +13,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + import cgi import collections import logging -import urllib -from six.moves import http_client +from six import PY3 +from six.moves import http_client, urllib from canonicaljson import encode_canonical_json, encode_pretty_printed_json, json @@ -264,6 +265,7 @@ class JsonResource(HttpServer, resource.Resource): self.hs = hs def register_paths(self, method, path_patterns, callback): + method = method.encode("utf-8") # method is bytes on py3 for path_pattern in path_patterns: logger.debug("Registering for %s %s", method, path_pattern.pattern) self.path_regexs.setdefault(method, []).append( @@ -296,8 +298,19 @@ class JsonResource(HttpServer, resource.Resource): # here. If it throws an exception, that is handled by the wrapper # installed by @request_handler. + def _unquote(s): + if PY3: + # On Python 3, unquote is unicode -> unicode + return urllib.parse.unquote(s) + else: + # On Python 2, unquote is bytes -> bytes We need to encode the + # URL again (as it was decoded by _get_handler_for request), as + # ASCII because it's a URL, and then decode it to get the UTF-8 + # characters that were quoted. + return urllib.parse.unquote(s.encode('ascii')).decode('utf8') + kwargs = intern_dict({ - name: urllib.unquote(value).decode("UTF-8") if value else value + name: _unquote(value) if value else value for name, value in group_dict.items() }) @@ -313,9 +326,9 @@ class JsonResource(HttpServer, resource.Resource): request (twisted.web.http.Request): Returns: - Tuple[Callable, dict[str, str]]: callback method, and the dict - mapping keys to path components as specified in the handler's - path match regexp. + Tuple[Callable, dict[unicode, unicode]]: callback method, and the + dict mapping keys to path components as specified in the + handler's path match regexp. The callback will normally be a method registered via register_paths, so will return (possibly via Deferred) either @@ -327,7 +340,7 @@ class JsonResource(HttpServer, resource.Resource): # Loop through all the registered callbacks to check if the method # and path regex match for path_entry in self.path_regexs.get(request.method, []): - m = path_entry.pattern.match(request.path) + m = path_entry.pattern.match(request.path.decode('ascii')) if m: # We found a match! return path_entry.callback, m.groupdict() @@ -383,7 +396,7 @@ class RootRedirect(resource.Resource): self.url = path def render_GET(self, request): - return redirectTo(self.url, request) + return redirectTo(self.url.encode('ascii'), request) def getChild(self, name, request): if len(name) == 0: @@ -404,12 +417,14 @@ def respond_with_json(request, code, json_object, send_cors=False, return if pretty_print: - json_bytes = encode_pretty_printed_json(json_object) + "\n" + json_bytes = (encode_pretty_printed_json(json_object) + "\n" + ).encode("utf-8") else: if canonical_json or synapse.events.USE_FROZEN_DICTS: + # canonicaljson already encodes to bytes json_bytes = encode_canonical_json(json_object) else: - json_bytes = json.dumps(json_object) + json_bytes = json.dumps(json_object).encode("utf-8") return respond_with_json_bytes( request, code, json_bytes, diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index 882816dc8f..69f7085291 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -171,8 +171,16 @@ def parse_json_value_from_request(request, allow_empty_body=False): if not content_bytes and allow_empty_body: return None + # Decode to Unicode so that simplejson will return Unicode strings on + # Python 2 try: - content = json.loads(content_bytes) + content_unicode = content_bytes.decode('utf8') + except UnicodeDecodeError: + logger.warn("Unable to decode UTF-8") + raise SynapseError(400, "Content not JSON.", errcode=Codes.NOT_JSON) + + try: + content = json.loads(content_unicode) except Exception as e: logger.warn("Unable to parse JSON: %s", e) raise SynapseError(400, "Content not JSON.", errcode=Codes.NOT_JSON) diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py index 99f6c6e3c3..80d625eecc 100644 --- a/synapse/rest/client/v1/admin.py +++ b/synapse/rest/client/v1/admin.py @@ -18,6 +18,7 @@ import hashlib import hmac import logging +from six import text_type from six.moves import http_client from twisted.internet import defer @@ -131,7 +132,10 @@ class UserRegisterServlet(ClientV1RestServlet): 400, "username must be specified", errcode=Codes.BAD_JSON, ) else: - if (not isinstance(body['username'], str) or len(body['username']) > 512): + if ( + not isinstance(body['username'], text_type) + or len(body['username']) > 512 + ): raise SynapseError(400, "Invalid username") username = body["username"].encode("utf-8") @@ -143,7 +147,10 @@ class UserRegisterServlet(ClientV1RestServlet): 400, "password must be specified", errcode=Codes.BAD_JSON, ) else: - if (not isinstance(body['password'], str) or len(body['password']) > 512): + if ( + not isinstance(body['password'], text_type) + or len(body['password']) > 512 + ): raise SynapseError(400, "Invalid password") password = body["password"].encode("utf-8") @@ -166,17 +173,18 @@ class UserRegisterServlet(ClientV1RestServlet): want_mac.update(b"admin" if admin else b"notadmin") want_mac = want_mac.hexdigest() - if not hmac.compare_digest(want_mac, got_mac): - raise SynapseError( - 403, "HMAC incorrect", - ) + if not hmac.compare_digest(want_mac, got_mac.encode('ascii')): + raise SynapseError(403, "HMAC incorrect") # Reuse the parts of RegisterRestServlet to reduce code duplication from synapse.rest.client.v2_alpha.register import RegisterRestServlet + register = RegisterRestServlet(self.hs) (user_id, _) = yield register.registration_handler.register( - localpart=username.lower(), password=password, admin=bool(admin), + localpart=body['username'].lower(), + password=body["password"], + admin=bool(admin), generate_token=False, ) diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index d6cf915d86..2f64155d13 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -193,15 +193,15 @@ class RegisterRestServlet(RestServlet): def on_POST(self, request): body = parse_json_object_from_request(request) - kind = "user" - if "kind" in request.args: - kind = request.args["kind"][0] + kind = b"user" + if b"kind" in request.args: + kind = request.args[b"kind"][0] - if kind == "guest": + if kind == b"guest": ret = yield self._do_guest_registration(body) defer.returnValue(ret) return - elif kind != "user": + elif kind != b"user": raise UnrecognizedRequestError( "Do not understand membership kind: %s" % (kind,) ) @@ -389,8 +389,8 @@ class RegisterRestServlet(RestServlet): assert_params_in_dict(params, ["password"]) desired_username = params.get("username", None) - new_password = params.get("password", None) guest_access_token = params.get("guest_access_token", None) + new_password = params.get("password", None) if desired_username is not None: desired_username = desired_username.lower() diff --git a/synapse/rest/media/v1/media_storage.py b/synapse/rest/media/v1/media_storage.py index b25993fcb5..a6189224ee 100644 --- a/synapse/rest/media/v1/media_storage.py +++ b/synapse/rest/media/v1/media_storage.py @@ -177,7 +177,7 @@ class MediaStorage(object): if res: with res: consumer = BackgroundFileConsumer( - open(local_path, "w"), self.hs.get_reactor()) + open(local_path, "wb"), self.hs.get_reactor()) yield res.write_to_consumer(consumer) yield consumer.wait() defer.returnValue(local_path) diff --git a/synapse/state.py b/synapse/state.py index 033f55d967..e1092b97a9 100644 --- a/synapse/state.py +++ b/synapse/state.py @@ -577,7 +577,7 @@ def _make_state_cache_entry( def _ordered_events(events): def key_func(e): - return -int(e.depth), hashlib.sha1(e.event_id.encode()).hexdigest() + return -int(e.depth), hashlib.sha1(e.event_id.encode('ascii')).hexdigest() return sorted(events, key=key_func) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index c98e524ba1..61223da1a5 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -67,7 +67,13 @@ state_delta_reuse_delta_counter = Counter( def encode_json(json_object): - return frozendict_json_encoder.encode(json_object) + """ + Encode a Python object as JSON and return it in a Unicode string. + """ + out = frozendict_json_encoder.encode(json_object) + if isinstance(out, bytes): + out = out.decode('utf8') + return out class _EventPeristenceQueue(object): @@ -1058,7 +1064,7 @@ class EventsStore(EventFederationStore, EventsWorkerStore, BackgroundUpdateStore metadata_json = encode_json( event.internal_metadata.get_dict() - ).decode("UTF-8") + ) sql = ( "UPDATE event_json SET internal_metadata = ?" @@ -1172,8 +1178,8 @@ class EventsStore(EventFederationStore, EventsWorkerStore, BackgroundUpdateStore "room_id": event.room_id, "internal_metadata": encode_json( event.internal_metadata.get_dict() - ).decode("UTF-8"), - "json": encode_json(event_dict(event)).decode("UTF-8"), + ), + "json": encode_json(event_dict(event)), } for event, _ in events_and_contexts ], diff --git a/synapse/storage/signatures.py b/synapse/storage/signatures.py index 470212aa2a..5623391f6e 100644 --- a/synapse/storage/signatures.py +++ b/synapse/storage/signatures.py @@ -74,7 +74,7 @@ class SignatureWorkerStore(SQLBaseStore): txn (cursor): event_id (str): Id for the Event. Returns: - A dict of algorithm -> hash. + A dict[unicode, bytes] of algorithm -> hash. """ query = ( "SELECT algorithm, hash" diff --git a/synapse/types.py b/synapse/types.py index 08f058f714..41afb27a74 100644 --- a/synapse/types.py +++ b/synapse/types.py @@ -137,7 +137,7 @@ class DomainSpecificString( @classmethod def from_string(cls, s): """Parse the string given by 's' into a structure object.""" - if len(s) < 1 or s[0] != cls.SIGIL: + if len(s) < 1 or s[0:1] != cls.SIGIL: raise SynapseError(400, "Expected %s string to start with '%s'" % ( cls.__name__, cls.SIGIL, )) diff --git a/synapse/util/frozenutils.py b/synapse/util/frozenutils.py index 581c6052ac..014edea971 100644 --- a/synapse/util/frozenutils.py +++ b/synapse/util/frozenutils.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from six import string_types +from six import binary_type, text_type from canonicaljson import json from frozendict import frozendict @@ -26,7 +26,7 @@ def freeze(o): if isinstance(o, frozendict): return o - if isinstance(o, string_types): + if isinstance(o, (binary_type, text_type)): return o try: @@ -41,7 +41,7 @@ def unfreeze(o): if isinstance(o, (dict, frozendict)): return dict({k: unfreeze(v) for k, v in o.items()}) - if isinstance(o, string_types): + if isinstance(o, (binary_type, text_type)): return o try: diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 5f158ec4b9..a82d737e71 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -46,7 +46,7 @@ class AuthTestCase(unittest.TestCase): self.auth = Auth(self.hs) self.test_user = "@foo:bar" - self.test_token = "_test_token_" + self.test_token = b"_test_token_" # this is overridden for the appservice tests self.store.get_app_service_by_token = Mock(return_value=None) @@ -61,7 +61,7 @@ class AuthTestCase(unittest.TestCase): self.store.get_user_by_access_token = Mock(return_value=user_info) request = Mock(args={}) - request.args["access_token"] = [self.test_token] + request.args[b"access_token"] = [self.test_token] request.requestHeaders.getRawHeaders = mock_getRawHeaders() requester = yield self.auth.get_user_by_req(request) self.assertEquals(requester.user.to_string(), self.test_user) @@ -70,7 +70,7 @@ class AuthTestCase(unittest.TestCase): self.store.get_user_by_access_token = Mock(return_value=None) request = Mock(args={}) - request.args["access_token"] = [self.test_token] + request.args[b"access_token"] = [self.test_token] request.requestHeaders.getRawHeaders = mock_getRawHeaders() d = self.auth.get_user_by_req(request) self.failureResultOf(d, AuthError) @@ -98,7 +98,7 @@ class AuthTestCase(unittest.TestCase): request = Mock(args={}) request.getClientIP.return_value = "127.0.0.1" - request.args["access_token"] = [self.test_token] + request.args[b"access_token"] = [self.test_token] request.requestHeaders.getRawHeaders = mock_getRawHeaders() requester = yield self.auth.get_user_by_req(request) self.assertEquals(requester.user.to_string(), self.test_user) @@ -115,7 +115,7 @@ class AuthTestCase(unittest.TestCase): request = Mock(args={}) request.getClientIP.return_value = "192.168.10.10" - request.args["access_token"] = [self.test_token] + request.args[b"access_token"] = [self.test_token] request.requestHeaders.getRawHeaders = mock_getRawHeaders() requester = yield self.auth.get_user_by_req(request) self.assertEquals(requester.user.to_string(), self.test_user) @@ -131,7 +131,7 @@ class AuthTestCase(unittest.TestCase): request = Mock(args={}) request.getClientIP.return_value = "131.111.8.42" - request.args["access_token"] = [self.test_token] + request.args[b"access_token"] = [self.test_token] request.requestHeaders.getRawHeaders = mock_getRawHeaders() d = self.auth.get_user_by_req(request) self.failureResultOf(d, AuthError) @@ -141,7 +141,7 @@ class AuthTestCase(unittest.TestCase): self.store.get_user_by_access_token = Mock(return_value=None) request = Mock(args={}) - request.args["access_token"] = [self.test_token] + request.args[b"access_token"] = [self.test_token] request.requestHeaders.getRawHeaders = mock_getRawHeaders() d = self.auth.get_user_by_req(request) self.failureResultOf(d, AuthError) @@ -158,7 +158,7 @@ class AuthTestCase(unittest.TestCase): @defer.inlineCallbacks def test_get_user_by_req_appservice_valid_token_valid_user_id(self): - masquerading_user_id = "@doppelganger:matrix.org" + masquerading_user_id = b"@doppelganger:matrix.org" app_service = Mock( token="foobar", url="a_url", sender=self.test_user, ip_range_whitelist=None, @@ -169,14 +169,17 @@ class AuthTestCase(unittest.TestCase): request = Mock(args={}) request.getClientIP.return_value = "127.0.0.1" - request.args["access_token"] = [self.test_token] - request.args["user_id"] = [masquerading_user_id] + request.args[b"access_token"] = [self.test_token] + request.args[b"user_id"] = [masquerading_user_id] request.requestHeaders.getRawHeaders = mock_getRawHeaders() requester = yield self.auth.get_user_by_req(request) - self.assertEquals(requester.user.to_string(), masquerading_user_id) + self.assertEquals( + requester.user.to_string(), + masquerading_user_id.decode('utf8') + ) def test_get_user_by_req_appservice_valid_token_bad_user_id(self): - masquerading_user_id = "@doppelganger:matrix.org" + masquerading_user_id = b"@doppelganger:matrix.org" app_service = Mock( token="foobar", url="a_url", sender=self.test_user, ip_range_whitelist=None, @@ -187,8 +190,8 @@ class AuthTestCase(unittest.TestCase): request = Mock(args={}) request.getClientIP.return_value = "127.0.0.1" - request.args["access_token"] = [self.test_token] - request.args["user_id"] = [masquerading_user_id] + request.args[b"access_token"] = [self.test_token] + request.args[b"user_id"] = [masquerading_user_id] request.requestHeaders.getRawHeaders = mock_getRawHeaders() d = self.auth.get_user_by_req(request) self.failureResultOf(d, AuthError) @@ -418,7 +421,7 @@ class AuthTestCase(unittest.TestCase): # check the token works request = Mock(args={}) - request.args["access_token"] = [token] + request.args[b"access_token"] = [token.encode('ascii')] request.requestHeaders.getRawHeaders = mock_getRawHeaders() requester = yield self.auth.get_user_by_req(request, allow_guest=True) self.assertEqual(UserID.from_string(USER_ID), requester.user) @@ -431,7 +434,7 @@ class AuthTestCase(unittest.TestCase): # the token should *not* work now request = Mock(args={}) - request.args["access_token"] = [guest_tok] + request.args[b"access_token"] = [guest_tok.encode('ascii')] request.requestHeaders.getRawHeaders = mock_getRawHeaders() with self.assertRaises(AuthError) as cm: diff --git a/tests/utils.py b/tests/utils.py index c3dbff8507..9bff3ff3b9 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -193,7 +193,7 @@ class MockHttpResource(HttpServer): self.prefix = prefix def trigger_get(self, path): - return self.trigger("GET", path, None) + return self.trigger(b"GET", path, None) @patch('twisted.web.http.Request') @defer.inlineCallbacks @@ -227,7 +227,7 @@ class MockHttpResource(HttpServer): headers = {} if federation_auth: - headers[b"Authorization"] = ["X-Matrix origin=test,key=,sig="] + headers[b"Authorization"] = [b"X-Matrix origin=test,key=,sig="] mock_request.requestHeaders.getRawHeaders = mock_getRawHeaders(headers) # return the right path if the event requires it @@ -241,6 +241,9 @@ class MockHttpResource(HttpServer): except Exception: pass + if isinstance(path, bytes): + path = path.decode('utf8') + for (method, pattern, func) in self.callbacks: if http_method != method: continue @@ -249,7 +252,7 @@ class MockHttpResource(HttpServer): if matcher: try: args = [ - urlparse.unquote(u).decode("UTF-8") + urlparse.unquote(u) for u in matcher.groups() ] From c82ccd3027dd9599ac0214adfd351996cf658681 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 1 Aug 2018 11:24:19 +0100 Subject: [PATCH 054/205] Factor out exception handling in federation_client Factor out the error handling from make_membership_event, send_join, and send_leave, so that it can be shared. --- synapse/federation/federation_client.py | 277 +++++++++++++----------- 1 file changed, 148 insertions(+), 129 deletions(-) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 62d7ed13cf..baa9c3586f 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -48,6 +48,13 @@ sent_queries_counter = Counter("synapse_federation_client_sent_queries", "", ["t PDU_RETRY_TIME_MS = 1 * 60 * 1000 +class InvalidResponseError(RuntimeError): + """Helper for _try_destination_list: indicates that the server returned a response + we couldn't parse + """ + pass + + class FederationClient(FederationBase): def __init__(self, hs): super(FederationClient, self).__init__(hs) @@ -458,6 +465,61 @@ class FederationClient(FederationBase): defer.returnValue(signed_auth) @defer.inlineCallbacks + def _try_destination_list(self, description, destinations, callback): + """Try an operation on a series of servers, until it succeeds + + Args: + description (unicode): description of the operation we're doing, for logging + + destinations (Iterable[unicode]): list of server_names to try + + callback (callable): Function to run for each server. Passed a single + argument: the server_name to try. May return a deferred. + + If the callback raises a CodeMessageException with a 300/400 code, + attempts to perform the operation stop immediately and the exception is + reraised. + + Otherwise, if the callback raises an Exception the error is logged and the + next server tried. Normally the stacktrace is logged but this is + suppressed if the exception is an InvalidResponseError. + + Returns: + The [Deferred] result of callback, if it succeeds + + Raises: + CodeMessageException if the chosen remote server returns a 300/400 code. + + RuntimeError if no servers were reachable. + """ + for destination in destinations: + if destination == self.server_name: + continue + + try: + res = yield callback(destination) + defer.returnValue(res) + except InvalidResponseError as e: + logger.warn( + "Failed to %s via %s: %s", + description, destination, e, + ) + except CodeMessageException as e: + if not 500 <= e.code < 600: + raise + else: + logger.warn( + "Failed to %s via %s: %i %s", + description, destination, e.code, e.message, + ) + except Exception: + logger.warn( + "Failed to %s via %s", + description, destination, exc_info=1, + ) + + raise RuntimeError("Failed to %s via any server", description) + def make_membership_event(self, destinations, room_id, user_id, membership, content={},): """ @@ -492,50 +554,35 @@ class FederationClient(FederationBase): "make_membership_event called with membership='%s', must be one of %s" % (membership, ",".join(valid_memberships)) ) - for destination in destinations: - if destination == self.server_name: - continue - try: - ret = yield self.transport_layer.make_membership_event( - destination, room_id, user_id, membership - ) + @defer.inlineCallbacks + def send_request(destination): + ret = yield self.transport_layer.make_membership_event( + destination, room_id, user_id, membership + ) - pdu_dict = ret["event"] + pdu_dict = ret["event"] - logger.debug("Got response to make_%s: %s", membership, pdu_dict) + logger.debug("Got response to make_%s: %s", membership, pdu_dict) - pdu_dict["content"].update(content) + pdu_dict["content"].update(content) - # The protoevent received over the JSON wire may not have all - # the required fields. Lets just gloss over that because - # there's some we never care about - if "prev_state" not in pdu_dict: - pdu_dict["prev_state"] = [] + # The protoevent received over the JSON wire may not have all + # the required fields. Lets just gloss over that because + # there's some we never care about + if "prev_state" not in pdu_dict: + pdu_dict["prev_state"] = [] - ev = builder.EventBuilder(pdu_dict) + ev = builder.EventBuilder(pdu_dict) - defer.returnValue( - (destination, ev) - ) - break - except CodeMessageException as e: - if not 500 <= e.code < 600: - raise - else: - logger.warn( - "Failed to make_%s via %s: %s", - membership, destination, e.message - ) - except Exception as e: - logger.warn( - "Failed to make_%s via %s: %s", - membership, destination, e.message - ) + defer.returnValue( + (destination, ev) + ) - raise RuntimeError("Failed to send to any server.") + return self._try_destination_list( + "make_" + membership, destinations, send_request, + ) - @defer.inlineCallbacks def send_join(self, destinations, pdu): """Sends a join event to one of a list of homeservers. @@ -558,87 +605,70 @@ class FederationClient(FederationBase): Fails with a ``RuntimeError`` if no servers were reachable. """ - for destination in destinations: - if destination == self.server_name: - continue + @defer.inlineCallbacks + def send_request(destination): + time_now = self._clock.time_msec() + _, content = yield self.transport_layer.send_join( + destination=destination, + room_id=pdu.room_id, + event_id=pdu.event_id, + content=pdu.get_pdu_json(time_now), + ) - try: - time_now = self._clock.time_msec() - _, content = yield self.transport_layer.send_join( - destination=destination, - room_id=pdu.room_id, - event_id=pdu.event_id, - content=pdu.get_pdu_json(time_now), - ) + logger.debug("Got content: %s", content) - logger.debug("Got content: %s", content) + state = [ + event_from_pdu_json(p, outlier=True) + for p in content.get("state", []) + ] - state = [ - event_from_pdu_json(p, outlier=True) - for p in content.get("state", []) - ] + auth_chain = [ + event_from_pdu_json(p, outlier=True) + for p in content.get("auth_chain", []) + ] - auth_chain = [ - event_from_pdu_json(p, outlier=True) - for p in content.get("auth_chain", []) - ] + pdus = { + p.event_id: p + for p in itertools.chain(state, auth_chain) + } - pdus = { - p.event_id: p - for p in itertools.chain(state, auth_chain) - } + valid_pdus = yield self._check_sigs_and_hash_and_fetch( + destination, list(pdus.values()), + outlier=True, + ) - valid_pdus = yield self._check_sigs_and_hash_and_fetch( - destination, list(pdus.values()), - outlier=True, - ) + valid_pdus_map = { + p.event_id: p + for p in valid_pdus + } - valid_pdus_map = { - p.event_id: p - for p in valid_pdus - } + # NB: We *need* to copy to ensure that we don't have multiple + # references being passed on, as that causes... issues. + signed_state = [ + copy.copy(valid_pdus_map[p.event_id]) + for p in state + if p.event_id in valid_pdus_map + ] - # NB: We *need* to copy to ensure that we don't have multiple - # references being passed on, as that causes... issues. - signed_state = [ - copy.copy(valid_pdus_map[p.event_id]) - for p in state - if p.event_id in valid_pdus_map - ] + signed_auth = [ + valid_pdus_map[p.event_id] + for p in auth_chain + if p.event_id in valid_pdus_map + ] - signed_auth = [ - valid_pdus_map[p.event_id] - for p in auth_chain - if p.event_id in valid_pdus_map - ] + # NB: We *need* to copy to ensure that we don't have multiple + # references being passed on, as that causes... issues. + for s in signed_state: + s.internal_metadata = copy.deepcopy(s.internal_metadata) - # NB: We *need* to copy to ensure that we don't have multiple - # references being passed on, as that causes... issues. - for s in signed_state: - s.internal_metadata = copy.deepcopy(s.internal_metadata) + auth_chain.sort(key=lambda e: e.depth) - auth_chain.sort(key=lambda e: e.depth) - - defer.returnValue({ - "state": signed_state, - "auth_chain": signed_auth, - "origin": destination, - }) - except CodeMessageException as e: - if not 500 <= e.code < 600: - raise - else: - logger.exception( - "Failed to send_join via %s: %s", - destination, e.message - ) - except Exception as e: - logger.exception( - "Failed to send_join via %s: %s", - destination, e.message - ) - - raise RuntimeError("Failed to send to any server.") + defer.returnValue({ + "state": signed_state, + "auth_chain": signed_auth, + "origin": destination, + }) + return self._try_destination_list("send_join", destinations, send_request) @defer.inlineCallbacks def send_invite(self, destination, room_id, event_id, pdu): @@ -663,7 +693,6 @@ class FederationClient(FederationBase): defer.returnValue(pdu) - @defer.inlineCallbacks def send_leave(self, destinations, pdu): """Sends a leave event to one of a list of homeservers. @@ -681,34 +710,24 @@ class FederationClient(FederationBase): Deferred: resolves to None. Fails with a ``CodeMessageException`` if the chosen remote server - returns a non-200 code. + returns a 300/400 code. Fails with a ``RuntimeError`` if no servers were reachable. """ - for destination in destinations: - if destination == self.server_name: - continue + @defer.inlineCallbacks + def send_request(destination): + time_now = self._clock.time_msec() + _, content = yield self.transport_layer.send_leave( + destination=destination, + room_id=pdu.room_id, + event_id=pdu.event_id, + content=pdu.get_pdu_json(time_now), + ) - try: - time_now = self._clock.time_msec() - _, content = yield self.transport_layer.send_leave( - destination=destination, - room_id=pdu.room_id, - event_id=pdu.event_id, - content=pdu.get_pdu_json(time_now), - ) + logger.debug("Got content: %s", content) + defer.returnValue(None) - logger.debug("Got content: %s", content) - defer.returnValue(None) - except CodeMessageException: - raise - except Exception as e: - logger.exception( - "Failed to send_leave via %s: %s", - destination, e.message - ) - - raise RuntimeError("Failed to send to any server.") + return self._try_destination_list("send_leave", destinations, send_request) def get_public_rooms(self, destination, limit=None, since_token=None, search_filter=None, include_all_networks=False, From fa7dc889f19f581e81624245ce7820525066eff3 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 1 Aug 2018 13:47:07 +0100 Subject: [PATCH 055/205] Be more careful which errors we send back over the C-S API We really shouldn't be sending all CodeMessageExceptions back over the C-S API; it will include things like 401s which we shouldn't proxy. That means that we need to explicitly turn a few HttpResponseExceptions into SynapseErrors in the federation layer. The effect of the latter is that the matrix errcode will get passed through correctly to calling clients, which might help with some of the random M_UNKNOWN errors when trying to join rooms. --- synapse/api/errors.py | 11 ---------- synapse/federation/federation_client.py | 29 +++++++++++++++---------- synapse/http/server.py | 14 +++++------- 3 files changed, 22 insertions(+), 32 deletions(-) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index 6074df292f..cf48829c8b 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -69,9 +69,6 @@ class CodeMessageException(RuntimeError): self.code = code self.msg = msg - def error_dict(self): - return cs_error(self.msg) - class MatrixCodeMessageException(CodeMessageException): """An error from a general matrix endpoint, eg. from a proxied Matrix API call. @@ -308,14 +305,6 @@ class LimitExceededError(SynapseError): ) -def cs_exception(exception): - if isinstance(exception, CodeMessageException): - return exception.error_dict() - else: - logger.error("Unknown exception type: %s", type(exception)) - return {} - - def cs_error(msg, code=Codes.UNKNOWN, **kwargs): """ Utility method for constructing an error response for client-server interactions. diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index baa9c3586f..0b09f93ca9 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -488,7 +488,7 @@ class FederationClient(FederationBase): The [Deferred] result of callback, if it succeeds Raises: - CodeMessageException if the chosen remote server returns a 300/400 code. + SynapseError if the chosen remote server returns a 300/400 code. RuntimeError if no servers were reachable. """ @@ -504,9 +504,9 @@ class FederationClient(FederationBase): "Failed to %s via %s: %s", description, destination, e, ) - except CodeMessageException as e: + except HttpResponseException as e: if not 500 <= e.code < 600: - raise + raise SynapseError.from_http_response_exception(e) else: logger.warn( "Failed to %s via %s: %i %s", @@ -543,7 +543,7 @@ class FederationClient(FederationBase): Deferred: resolves to a tuple of (origin (str), event (object)) where origin is the remote homeserver which generated the event. - Fails with a ``CodeMessageException`` if the chosen remote server + Fails with a ``SynapseError`` if the chosen remote server returns a 300/400 code. Fails with a ``RuntimeError`` if no servers were reachable. @@ -599,7 +599,7 @@ class FederationClient(FederationBase): giving the serer the event was sent to, ``state`` (?) and ``auth_chain``. - Fails with a ``CodeMessageException`` if the chosen remote server + Fails with a ``SynapseError`` if the chosen remote server returns a 300/400 code. Fails with a ``RuntimeError`` if no servers were reachable. @@ -673,12 +673,17 @@ class FederationClient(FederationBase): @defer.inlineCallbacks def send_invite(self, destination, room_id, event_id, pdu): time_now = self._clock.time_msec() - code, content = yield self.transport_layer.send_invite( - destination=destination, - room_id=room_id, - event_id=event_id, - content=pdu.get_pdu_json(time_now), - ) + try: + code, content = yield self.transport_layer.send_invite( + destination=destination, + room_id=room_id, + event_id=event_id, + content=pdu.get_pdu_json(time_now), + ) + except HttpResponseException as e: + if e.code == 403: + raise SynapseError.from_http_response_exception(e) + raise pdu_dict = content["event"] @@ -709,7 +714,7 @@ class FederationClient(FederationBase): Return: Deferred: resolves to None. - Fails with a ``CodeMessageException`` if the chosen remote server + Fails with a ``SynapseError`` if the chosen remote server returns a 300/400 code. Fails with a ``RuntimeError`` if no servers were reachable. diff --git a/synapse/http/server.py b/synapse/http/server.py index 1940c1c4f4..6dacb31037 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -36,7 +36,6 @@ from synapse.api.errors import ( Codes, SynapseError, UnrecognizedRequestError, - cs_exception, ) from synapse.http.request_metrics import requests_counter from synapse.util.caches import intern_dict @@ -77,16 +76,13 @@ def wrap_json_request_handler(h): def wrapped_request_handler(self, request): try: yield h(self, request) - except CodeMessageException as e: + except SynapseError as e: code = e.code - if isinstance(e, SynapseError): - logger.info( - "%s SynapseError: %s - %s", request, code, e.msg - ) - else: - logger.exception(e) + logger.info( + "%s SynapseError: %s - %s", request, code, e.msg + ) respond_with_json( - request, code, cs_exception(e), send_cors=True, + request, code, e.error_dict(), send_cors=True, pretty_print=_request_user_agent_is_curl(request), ) From 018d75a148ced6945aca7b095a272e0edba5aae1 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 1 Aug 2018 14:58:16 +0100 Subject: [PATCH 056/205] Refactor code for turning HttpResponseException into SynapseError This commit replaces SynapseError.from_http_response_exception with HttpResponseException.to_synapse_error. The new method actually returns a ProxiedRequestError, which allows us to pass through additional metadata from the API call. --- synapse/api/errors.py | 84 ++++++++++++++--------- synapse/federation/federation_client.py | 4 +- synapse/rest/media/v1/media_repository.py | 2 +- 3 files changed, 56 insertions(+), 34 deletions(-) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index cf48829c8b..7476c90ed3 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -105,38 +105,28 @@ class SynapseError(CodeMessageException): self.errcode, ) - @classmethod - def from_http_response_exception(cls, err): - """Make a SynapseError based on an HTTPResponseException - This is useful when a proxied request has failed, and we need to - decide how to map the failure onto a matrix error to send back to the - client. +class ProxiedRequestError(SynapseError): + """An error from a general matrix endpoint, eg. from a proxied Matrix API call. - An attempt is made to parse the body of the http response as a matrix - error. If that succeeds, the errcode and error message from the body - are used as the errcode and error message in the new synapse error. + Attributes: + errcode (str): Matrix error code e.g 'M_FORBIDDEN' + """ + def __init__(self, code, msg, errcode=Codes.UNKNOWN, additional_fields=None): + super(ProxiedRequestError, self).__init__( + code, msg, errcode + ) + if additional_fields is None: + self._additional_fields = {} + else: + self._additional_fields = dict(additional_fields) - Otherwise, the errcode is set to M_UNKNOWN, and the error message is - set to the reason code from the HTTP response. - - Args: - err (HttpResponseException): - - Returns: - SynapseError: - """ - # try to parse the body as json, to get better errcode/msg, but - # default to M_UNKNOWN with the HTTP status as the error text - try: - j = json.loads(err.response) - except ValueError: - j = {} - errcode = j.get('errcode', Codes.UNKNOWN) - errmsg = j.get('error', err.msg) - - res = SynapseError(err.code, errmsg, errcode) - return res + def error_dict(self): + return cs_error( + self.msg, + self.errcode, + **self._additional_fields + ) class ConsentNotGivenError(SynapseError): @@ -361,7 +351,7 @@ class HttpResponseException(CodeMessageException): Represents an HTTP-level failure of an outbound request Attributes: - response (str): body of response + response (bytes): body of response """ def __init__(self, code, msg, response): """ @@ -369,7 +359,39 @@ class HttpResponseException(CodeMessageException): Args: code (int): HTTP status code msg (str): reason phrase from HTTP response status line - response (str): body of response + response (bytes): body of response """ super(HttpResponseException, self).__init__(code, msg) self.response = response + + def to_synapse_error(self): + """Make a SynapseError based on an HTTPResponseException + + This is useful when a proxied request has failed, and we need to + decide how to map the failure onto a matrix error to send back to the + client. + + An attempt is made to parse the body of the http response as a matrix + error. If that succeeds, the errcode and error message from the body + are used as the errcode and error message in the new synapse error. + + Otherwise, the errcode is set to M_UNKNOWN, and the error message is + set to the reason code from the HTTP response. + + Returns: + SynapseError: + """ + # try to parse the body as json, to get better errcode/msg, but + # default to M_UNKNOWN with the HTTP status as the error text + try: + j = json.loads(self.response) + except ValueError: + j = {} + + if not isinstance(j, dict): + j = {} + + errcode = j.pop('errcode', Codes.UNKNOWN) + errmsg = j.pop('error', self.msg) + + return ProxiedRequestError(self.code, errmsg, errcode, j) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 0b09f93ca9..7550e11b6e 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -506,7 +506,7 @@ class FederationClient(FederationBase): ) except HttpResponseException as e: if not 500 <= e.code < 600: - raise SynapseError.from_http_response_exception(e) + raise e.to_synapse_error() else: logger.warn( "Failed to %s via %s: %i %s", @@ -682,7 +682,7 @@ class FederationClient(FederationBase): ) except HttpResponseException as e: if e.code == 403: - raise SynapseError.from_http_response_exception(e) + raise e.to_synapse_error() raise pdu_dict = content["event"] diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 174ad20123..8fb413d825 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -379,7 +379,7 @@ class MediaRepository(object): logger.warn("HTTP error fetching remote media %s/%s: %s", server_name, media_id, e.response) if e.code == twisted.web.http.NOT_FOUND: - raise SynapseError.from_http_response_exception(e) + raise e.to_synapse_error() raise SynapseError(502, "Failed to fetch remote media") except SynapseError: From 01e93f48ed3dd78fda45a37733251659af19dde3 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 1 Aug 2018 15:04:50 +0100 Subject: [PATCH 057/205] Kill off MatrixCodeMessageException This code brings the SimpleHttpClient into line with the MatrixFederationHttpClient by having it raise HttpResponseExceptions when a request fails (rather than trying to parse for matrix errors and maybe raising MatrixCodeMessageException). Then, whenever we were checking for MatrixCodeMessageException and turning them into SynapseErrors, we now need to check for HttpResponseExceptions and call to_synapse_error. --- synapse/api/errors.py | 11 ----- synapse/handlers/identity.py | 25 ++++------- synapse/http/client.py | 61 ++++++++++++-------------- synapse/replication/http/membership.py | 18 ++++---- synapse/replication/http/send_event.py | 10 ++--- 5 files changed, 47 insertions(+), 78 deletions(-) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index 7476c90ed3..3568362389 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -70,17 +70,6 @@ class CodeMessageException(RuntimeError): self.msg = msg -class MatrixCodeMessageException(CodeMessageException): - """An error from a general matrix endpoint, eg. from a proxied Matrix API call. - - Attributes: - errcode (str): Matrix error code e.g 'M_FORBIDDEN' - """ - def __init__(self, code, msg, errcode=Codes.UNKNOWN): - super(MatrixCodeMessageException, self).__init__(code, msg) - self.errcode = errcode - - class SynapseError(CodeMessageException): """A base exception type for matrix errors which have an errcode and error message (as well as an HTTP status code). diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index 8c8aedb2b8..1d36d967c3 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -26,7 +26,7 @@ from twisted.internet import defer from synapse.api.errors import ( CodeMessageException, Codes, - MatrixCodeMessageException, + HttpResponseException, SynapseError, ) @@ -85,7 +85,6 @@ class IdentityHandler(BaseHandler): ) defer.returnValue(None) - data = {} try: data = yield self.http_client.get_json( "https://%s%s" % ( @@ -94,11 +93,9 @@ class IdentityHandler(BaseHandler): ), {'sid': creds['sid'], 'client_secret': client_secret} ) - except MatrixCodeMessageException as e: + except HttpResponseException as e: logger.info("getValidated3pid failed with Matrix error: %r", e) - raise SynapseError(e.code, e.msg, e.errcode) - except CodeMessageException as e: - data = json.loads(e.msg) + raise e.to_synapse_error() if 'medium' in data: defer.returnValue(data) @@ -136,7 +133,7 @@ class IdentityHandler(BaseHandler): ) logger.debug("bound threepid %r to %s", creds, mxid) except CodeMessageException as e: - data = json.loads(e.msg) + data = json.loads(e.msg) # XXX WAT? defer.returnValue(data) @defer.inlineCallbacks @@ -209,12 +206,9 @@ class IdentityHandler(BaseHandler): params ) defer.returnValue(data) - except MatrixCodeMessageException as e: - logger.info("Proxied requestToken failed with Matrix error: %r", e) - raise SynapseError(e.code, e.msg, e.errcode) - except CodeMessageException as e: + except HttpResponseException as e: logger.info("Proxied requestToken failed: %r", e) - raise e + raise e.to_synapse_error() @defer.inlineCallbacks def requestMsisdnToken( @@ -244,9 +238,6 @@ class IdentityHandler(BaseHandler): params ) defer.returnValue(data) - except MatrixCodeMessageException as e: - logger.info("Proxied requestToken failed with Matrix error: %r", e) - raise SynapseError(e.code, e.msg, e.errcode) - except CodeMessageException as e: + except HttpResponseException as e: logger.info("Proxied requestToken failed: %r", e) - raise e + raise e.to_synapse_error() diff --git a/synapse/http/client.py b/synapse/http/client.py index 25b6307884..3771e0b3f6 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -39,12 +39,7 @@ from twisted.web.client import ( from twisted.web.http import PotentialDataLoss from twisted.web.http_headers import Headers -from synapse.api.errors import ( - CodeMessageException, - Codes, - MatrixCodeMessageException, - SynapseError, -) +from synapse.api.errors import Codes, HttpResponseException, SynapseError from synapse.http import cancelled_to_request_timed_out_error, redact_uri from synapse.http.endpoint import SpiderEndpoint from synapse.util.async import add_timeout_to_deferred @@ -132,6 +127,11 @@ class SimpleHttpClient(object): Returns: Deferred[object]: parsed json + + Raises: + HttpResponseException: On a non-2xx HTTP response. + + ValueError: if the response was not JSON """ # TODO: Do we ever want to log message contents? @@ -155,7 +155,10 @@ class SimpleHttpClient(object): body = yield make_deferred_yieldable(readBody(response)) - defer.returnValue(json.loads(body)) + if 200 <= response.code < 300: + defer.returnValue(json.loads(body)) + else: + raise HttpResponseException(response.code, response.phrase, body) @defer.inlineCallbacks def post_json_get_json(self, uri, post_json, headers=None): @@ -169,6 +172,11 @@ class SimpleHttpClient(object): Returns: Deferred[object]: parsed json + + Raises: + HttpResponseException: On a non-2xx HTTP response. + + ValueError: if the response was not JSON """ json_str = encode_canonical_json(post_json) @@ -193,9 +201,7 @@ class SimpleHttpClient(object): if 200 <= response.code < 300: defer.returnValue(json.loads(body)) else: - raise self._exceptionFromFailedRequest(response, body) - - defer.returnValue(json.loads(body)) + raise HttpResponseException(response.code, response.phrase, body) @defer.inlineCallbacks def get_json(self, uri, args={}, headers=None): @@ -213,14 +219,12 @@ class SimpleHttpClient(object): Deferred: Succeeds when we get *any* 2xx HTTP response, with the HTTP body as JSON. Raises: - On a non-2xx HTTP response. The response body will be used as the - error message. + HttpResponseException On a non-2xx HTTP response. + + ValueError: if the response was not JSON """ - try: - body = yield self.get_raw(uri, args, headers=headers) - defer.returnValue(json.loads(body)) - except CodeMessageException as e: - raise self._exceptionFromFailedRequest(e.code, e.msg) + body = yield self.get_raw(uri, args, headers=headers) + defer.returnValue(json.loads(body)) @defer.inlineCallbacks def put_json(self, uri, json_body, args={}, headers=None): @@ -239,7 +243,9 @@ class SimpleHttpClient(object): Deferred: Succeeds when we get *any* 2xx HTTP response, with the HTTP body as JSON. Raises: - On a non-2xx HTTP response. + HttpResponseException On a non-2xx HTTP response. + + ValueError: if the response was not JSON """ if len(args): query_bytes = urllib.urlencode(args, True) @@ -266,10 +272,7 @@ class SimpleHttpClient(object): if 200 <= response.code < 300: defer.returnValue(json.loads(body)) else: - # NB: This is explicitly not json.loads(body)'d because the contract - # of CodeMessageException is a *string* message. Callers can always - # load it into JSON if they want. - raise CodeMessageException(response.code, body) + raise HttpResponseException(response.code, response.phrase, body) @defer.inlineCallbacks def get_raw(self, uri, args={}, headers=None): @@ -287,8 +290,7 @@ class SimpleHttpClient(object): Deferred: Succeeds when we get *any* 2xx HTTP response, with the HTTP body at text. Raises: - On a non-2xx HTTP response. The response body will be used as the - error message. + HttpResponseException on a non-2xx HTTP response. """ if len(args): query_bytes = urllib.urlencode(args, True) @@ -311,16 +313,7 @@ class SimpleHttpClient(object): if 200 <= response.code < 300: defer.returnValue(body) else: - raise CodeMessageException(response.code, body) - - def _exceptionFromFailedRequest(self, response, body): - try: - jsonBody = json.loads(body) - errcode = jsonBody['errcode'] - error = jsonBody['error'] - return MatrixCodeMessageException(response.code, error, errcode) - except (ValueError, KeyError): - return CodeMessageException(response.code, body) + raise HttpResponseException(response.code, response.phrase, body) # XXX: FIXME: This is horribly copy-pasted from matrixfederationclient. # The two should be factored out. diff --git a/synapse/replication/http/membership.py b/synapse/replication/http/membership.py index 6bfc8a5b89..7a3cfb159c 100644 --- a/synapse/replication/http/membership.py +++ b/synapse/replication/http/membership.py @@ -18,7 +18,7 @@ import re from twisted.internet import defer -from synapse.api.errors import MatrixCodeMessageException, SynapseError +from synapse.api.errors import HttpResponseException from synapse.http.servlet import RestServlet, parse_json_object_from_request from synapse.types import Requester, UserID from synapse.util.distributor import user_joined_room, user_left_room @@ -56,11 +56,11 @@ def remote_join(client, host, port, requester, remote_room_hosts, try: result = yield client.post_json_get_json(uri, payload) - except MatrixCodeMessageException as e: + except HttpResponseException as e: # We convert to SynapseError as we know that it was a SynapseError # on the master process that we should send to the client. (And # importantly, not stack traces everywhere) - raise SynapseError(e.code, e.msg, e.errcode) + raise e.to_synapse_error() defer.returnValue(result) @@ -92,11 +92,11 @@ def remote_reject_invite(client, host, port, requester, remote_room_hosts, try: result = yield client.post_json_get_json(uri, payload) - except MatrixCodeMessageException as e: + except HttpResponseException as e: # We convert to SynapseError as we know that it was a SynapseError # on the master process that we should send to the client. (And # importantly, not stack traces everywhere) - raise SynapseError(e.code, e.msg, e.errcode) + raise e.to_synapse_error() defer.returnValue(result) @@ -131,11 +131,11 @@ def get_or_register_3pid_guest(client, host, port, requester, try: result = yield client.post_json_get_json(uri, payload) - except MatrixCodeMessageException as e: + except HttpResponseException as e: # We convert to SynapseError as we know that it was a SynapseError # on the master process that we should send to the client. (And # importantly, not stack traces everywhere) - raise SynapseError(e.code, e.msg, e.errcode) + raise e.to_synapse_error() defer.returnValue(result) @@ -165,11 +165,11 @@ def notify_user_membership_change(client, host, port, user_id, room_id, change): try: result = yield client.post_json_get_json(uri, payload) - except MatrixCodeMessageException as e: + except HttpResponseException as e: # We convert to SynapseError as we know that it was a SynapseError # on the master process that we should send to the client. (And # importantly, not stack traces everywhere) - raise SynapseError(e.code, e.msg, e.errcode) + raise e.to_synapse_error() defer.returnValue(result) diff --git a/synapse/replication/http/send_event.py b/synapse/replication/http/send_event.py index 5227bc333d..d3509dc288 100644 --- a/synapse/replication/http/send_event.py +++ b/synapse/replication/http/send_event.py @@ -18,11 +18,7 @@ import re from twisted.internet import defer -from synapse.api.errors import ( - CodeMessageException, - MatrixCodeMessageException, - SynapseError, -) +from synapse.api.errors import CodeMessageException, HttpResponseException from synapse.events import FrozenEvent from synapse.events.snapshot import EventContext from synapse.http.servlet import RestServlet, parse_json_object_from_request @@ -83,11 +79,11 @@ def send_event_to_master(clock, store, client, host, port, requester, event, con # If we timed out we probably don't need to worry about backing # off too much, but lets just wait a little anyway. yield clock.sleep(1) - except MatrixCodeMessageException as e: + except HttpResponseException as e: # We convert to SynapseError as we know that it was a SynapseError # on the master process that we should send to the client. (And # importantly, not stack traces everywhere) - raise SynapseError(e.code, e.msg, e.errcode) + raise e.to_synapse_error() defer.returnValue(result) From 38b98e5a98414eeba2d8e3671875f66a7b27645a Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 1 Aug 2018 16:07:49 +0100 Subject: [PATCH 058/205] changelog --- changelog.d/3638.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3638.misc diff --git a/changelog.d/3638.misc b/changelog.d/3638.misc new file mode 100644 index 0000000000..9faab15cf2 --- /dev/null +++ b/changelog.d/3638.misc @@ -0,0 +1 @@ +Factor out exception handling in federation_client From 7ff44d92154875c55addc079b0ad7f303c699933 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 16:17:00 +0100 Subject: [PATCH 059/205] improve clarity --- changelog.d/3630.feature | 2 +- synapse/storage/__init__.py | 9 +++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/changelog.d/3630.feature b/changelog.d/3630.feature index 20398da9e0..8007a04840 100644 --- a/changelog.d/3630.feature +++ b/changelog.d/3630.feature @@ -1 +1 @@ -Blocks registration and sign in if max mau value exceeded +Add ability to limit number of monthly active users on the server diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 12282a8b3d..134e4a80f1 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -268,12 +268,13 @@ class DataStore(RoomMemberStore, RoomStore, return self.runInteraction("count_users", _count_users) def count_monthly_users(self): - """ - Counts the number of users who used this homeserver in the last 30 days + """Counts the number of users who used this homeserver in the last 30 days + This method should be refactored with count_daily_users - the only reason not to is waiting on definition of mau - returns: - defered: resolves to int + + Returns: + Defered[int] """ def _count_monthly_users(txn): thirty_days_ago = int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) From b7f203a56658bdd6d7bf39e82fc1ffd0dae1f61a Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 16:17:42 +0100 Subject: [PATCH 060/205] count_monthly_users is now async --- synapse/app/homeserver.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 46325d8521..fba51c26e8 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -519,10 +519,11 @@ def run(hs): # table will decrease clock.looping_call(generate_user_daily_visit_stats, 5 * 60 * 1000) + @defer.inlineCallbacks def generate_monthly_active_users(): count = 0 if hs.config.limit_usage_by_mau: - count = hs.get_datastore().count_monthly_users() + count = yield hs.get_datastore().count_monthly_users() current_mau_gauge.set(float(count)) max_mau_value_gauge.set(float(hs.config.max_mau_value)) From 908be65e646ba933d595f3d8874cee69774e7243 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 1 Aug 2018 16:23:31 +0100 Subject: [PATCH 061/205] changelog --- changelog.d/3639.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3639.feature diff --git a/changelog.d/3639.feature b/changelog.d/3639.feature new file mode 100644 index 0000000000..c8c387e219 --- /dev/null +++ b/changelog.d/3639.feature @@ -0,0 +1 @@ +When we fail to join a room over federation, pass the error code back to the client. \ No newline at end of file From ec716a35b219d147dee51733b55573952799a549 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 17:54:37 +0100 Subject: [PATCH 062/205] change monthly_active_users table to be a single column --- synapse/storage/monthly_active_users.py | 10 +++------- .../storage/schema/delta/51/monthly_active_users.sql | 4 +--- tests/storage/test_monthly_active_users.py | 6 +++--- 3 files changed, 7 insertions(+), 13 deletions(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 03eeea792e..7b3f13aedf 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -38,22 +38,18 @@ class MonthlyActiveUsersStore(SQLBaseStore): return count return self.runInteraction("count_users", _count_users) - def upsert_monthly_active_user(self, user_id): + def insert_monthly_active_user(self, user_id): """ Updates or inserts monthly active user member Arguments: user_id (str): user to add/update """ - return self._simple_upsert( + return self._simple_insert( desc="upsert_monthly_active_user", table="monthly_active_users", - keyvalues={ + values={ "user_id": user_id, }, - values={ - "timestamp": int(self._clock.time_msec()), - }, - lock=False, ) @defer.inlineCallbacks diff --git a/synapse/storage/schema/delta/51/monthly_active_users.sql b/synapse/storage/schema/delta/51/monthly_active_users.sql index f2b6d3e31e..590b1abab2 100644 --- a/synapse/storage/schema/delta/51/monthly_active_users.sql +++ b/synapse/storage/schema/delta/51/monthly_active_users.sql @@ -15,9 +15,7 @@ -- a table of monthly active users, for use where blocking based on mau limits CREATE TABLE monthly_active_users ( - user_id TEXT NOT NULL, - timestamp BIGINT NOT NULL + user_id TEXT NOT NULL ); CREATE UNIQUE INDEX monthly_active_users_users ON monthly_active_users(user_id); -CREATE INDEX monthly_active_users_time_stamp ON monthly_active_users(timestamp); diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index 9b1ffc6369..7a8432ce69 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -22,7 +22,7 @@ class MonthlyActiveUsersTestCase(tests.unittest.TestCase): count = yield self.mau.get_monthly_active_count() self.assertEqual(0, count) - yield self.mau.upsert_monthly_active_user("@user:server") + yield self.mau.insert_monthly_active_user("@user:server") count = yield self.mau.get_monthly_active_count() self.assertEqual(1, count) @@ -34,8 +34,8 @@ class MonthlyActiveUsersTestCase(tests.unittest.TestCase): user_id3 = "@user3:server" result = yield self.mau.is_user_monthly_active(user_id1) self.assertFalse(result) - yield self.mau.upsert_monthly_active_user(user_id1) - yield self.mau.upsert_monthly_active_user(user_id2) + yield self.mau.insert_monthly_active_user(user_id1) + yield self.mau.insert_monthly_active_user(user_id2) result = yield self.mau.is_user_monthly_active(user_id1) self.assertTrue(result) result = yield self.mau.is_user_monthly_active(user_id3) From c21d82bab3b32e2f59c9ef09a1a10b337ce45db4 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 23:24:38 +0100 Subject: [PATCH 063/205] normalise reaping query --- synapse/storage/monthly_active_users.py | 41 +++++++++++++++++++++++-- 1 file changed, 38 insertions(+), 3 deletions(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 7b3f13aedf..0741c7fa61 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -7,6 +7,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): def __init__(self, hs): super(MonthlyActiveUsersStore, self).__init__(None, hs) self._clock = hs.get_clock() + self.max_mau_value = hs.config.max_mau_value def reap_monthly_active_users(self): """ @@ -19,8 +20,42 @@ class MonthlyActiveUsersStore(SQLBaseStore): thirty_days_ago = ( int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) ) - sql = "DELETE FROM monthly_active_users WHERE timestamp < ?" - txn.execute(sql, (thirty_days_ago,)) + + # Query deletes the union of users that have either: + # * not visited in the last 30 days + # * exceeded the total max_mau_value threshold. Where there is + # an excess, more recent users are favoured - this is to cover + # the case where the limit has been step change reduced. + # + sql = """ + DELETE FROM monthly_active_users + WHERE user_id + IN ( + SELECT * FROM ( + SELECT monthly_active_users.user_id + FROM monthly_active_users + LEFT JOIN ( + SELECT user_id, max(last_seen) AS last_seen + FROM user_ips + GROUP BY user_id + ) AS uip ON uip.user_id=monthly_active_users.user_id + ORDER BY uip.last_seen desc LIMIT -1 OFFSET ? + ) + UNION + SELECT * FROM ( + SELECT monthly_active_users.user_id + FROM monthly_active_users + LEFT JOIN ( + SELECT user_id, max(last_seen) AS last_seen + FROM user_ips + GROUP BY user_id + ) AS uip ON uip.user_id=monthly_active_users.user_id + WHERE uip.last_seen < ? + ) + ) + """ + + txn.execute(sql, (self.max_mau_value, thirty_days_ago,)) return self.runInteraction("reap_monthly_active_users", _reap_users) @@ -45,7 +80,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): user_id (str): user to add/update """ return self._simple_insert( - desc="upsert_monthly_active_user", + desc="insert_monthly_active_user", table="monthly_active_users", values={ "user_id": user_id, From 08281fe6b7cdd9620d28d2aa6b725bdb0a351bbc Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 23:26:24 +0100 Subject: [PATCH 064/205] self.db_conn unused --- synapse/storage/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 134e4a80f1..04ff1f8006 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -94,7 +94,6 @@ class DataStore(RoomMemberStore, RoomStore, self._clock = hs.get_clock() self.database_engine = hs.database_engine - self.db_conn = db_conn self._stream_id_gen = StreamIdGenerator( db_conn, "events", "stream_ordering", extra_tables=[("local_invites", "stream_id")] From 165e06703340667dd88eb73dfe299a4d3298b94b Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 2 Aug 2018 10:59:58 +0100 Subject: [PATCH 065/205] Revert "change monthly_active_users table to be a single column" This reverts commit ec716a35b219d147dee51733b55573952799a549. --- synapse/storage/monthly_active_users.py | 10 +++++++--- .../storage/schema/delta/51/monthly_active_users.sql | 4 +++- tests/storage/test_monthly_active_users.py | 6 +++--- 3 files changed, 13 insertions(+), 7 deletions(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 7b3f13aedf..03eeea792e 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -38,18 +38,22 @@ class MonthlyActiveUsersStore(SQLBaseStore): return count return self.runInteraction("count_users", _count_users) - def insert_monthly_active_user(self, user_id): + def upsert_monthly_active_user(self, user_id): """ Updates or inserts monthly active user member Arguments: user_id (str): user to add/update """ - return self._simple_insert( + return self._simple_upsert( desc="upsert_monthly_active_user", table="monthly_active_users", - values={ + keyvalues={ "user_id": user_id, }, + values={ + "timestamp": int(self._clock.time_msec()), + }, + lock=False, ) @defer.inlineCallbacks diff --git a/synapse/storage/schema/delta/51/monthly_active_users.sql b/synapse/storage/schema/delta/51/monthly_active_users.sql index 590b1abab2..f2b6d3e31e 100644 --- a/synapse/storage/schema/delta/51/monthly_active_users.sql +++ b/synapse/storage/schema/delta/51/monthly_active_users.sql @@ -15,7 +15,9 @@ -- a table of monthly active users, for use where blocking based on mau limits CREATE TABLE monthly_active_users ( - user_id TEXT NOT NULL + user_id TEXT NOT NULL, + timestamp BIGINT NOT NULL ); CREATE UNIQUE INDEX monthly_active_users_users ON monthly_active_users(user_id); +CREATE INDEX monthly_active_users_time_stamp ON monthly_active_users(timestamp); diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index 7a8432ce69..9b1ffc6369 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -22,7 +22,7 @@ class MonthlyActiveUsersTestCase(tests.unittest.TestCase): count = yield self.mau.get_monthly_active_count() self.assertEqual(0, count) - yield self.mau.insert_monthly_active_user("@user:server") + yield self.mau.upsert_monthly_active_user("@user:server") count = yield self.mau.get_monthly_active_count() self.assertEqual(1, count) @@ -34,8 +34,8 @@ class MonthlyActiveUsersTestCase(tests.unittest.TestCase): user_id3 = "@user3:server" result = yield self.mau.is_user_monthly_active(user_id1) self.assertFalse(result) - yield self.mau.insert_monthly_active_user(user_id1) - yield self.mau.insert_monthly_active_user(user_id2) + yield self.mau.upsert_monthly_active_user(user_id1) + yield self.mau.upsert_monthly_active_user(user_id2) result = yield self.mau.is_user_monthly_active(user_id1) self.assertTrue(result) result = yield self.mau.is_user_monthly_active(user_id3) From 00f99f74b1b875bb7ac6b0623994cabad4a59cc6 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 2 Aug 2018 13:47:19 +0100 Subject: [PATCH 066/205] insertion into monthly_active_users --- synapse/api/auth.py | 2 +- synapse/storage/__init__.py | 2 + synapse/storage/client_ips.py | 22 ++++++++- synapse/storage/monthly_active_users.py | 18 ++++--- tests/storage/test_client_ips.py | 66 +++++++++++++++++++++++-- 5 files changed, 99 insertions(+), 11 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 5bbbe8e2e7..d8022bcf8e 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -213,7 +213,7 @@ class Auth(object): default=[b""] )[0] if user and access_token and ip_addr: - self.store.insert_client_ip( + yield self.store.insert_client_ip( user_id=user.to_string(), access_token=access_token, ip=ip_addr, diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 04ff1f8006..2120f46ed5 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -39,6 +39,7 @@ from .filtering import FilteringStore from .group_server import GroupServerStore from .keys import KeyStore from .media_repository import MediaRepositoryStore +from .monthly_active_users import MonthlyActiveUsersStore from .openid import OpenIdStore from .presence import PresenceStore, UserPresenceState from .profile import ProfileStore @@ -87,6 +88,7 @@ class DataStore(RoomMemberStore, RoomStore, UserDirectoryStore, GroupServerStore, UserErasureStore, + MonthlyActiveUsersStore, ): def __init__(self, db_conn, hs): diff --git a/synapse/storage/client_ips.py b/synapse/storage/client_ips.py index b8cefd43d6..506915a1ef 100644 --- a/synapse/storage/client_ips.py +++ b/synapse/storage/client_ips.py @@ -35,6 +35,7 @@ LAST_SEEN_GRANULARITY = 120 * 1000 class ClientIpStore(background_updates.BackgroundUpdateStore): def __init__(self, db_conn, hs): + self.client_ip_last_seen = Cache( name="client_ip_last_seen", keylen=4, @@ -74,6 +75,7 @@ class ClientIpStore(background_updates.BackgroundUpdateStore): "before", "shutdown", self._update_client_ips_batch ) + @defer.inlineCallbacks def insert_client_ip(self, user_id, access_token, ip, user_agent, device_id, now=None): if not now: @@ -84,7 +86,7 @@ class ClientIpStore(background_updates.BackgroundUpdateStore): last_seen = self.client_ip_last_seen.get(key) except KeyError: last_seen = None - + yield self._populate_monthly_active_users(user_id) # Rate-limited inserts if last_seen is not None and (now - last_seen) < LAST_SEEN_GRANULARITY: return @@ -93,6 +95,24 @@ class ClientIpStore(background_updates.BackgroundUpdateStore): self._batch_row_update[key] = (user_agent, device_id, now) + @defer.inlineCallbacks + def _populate_monthly_active_users(self, user_id): + store = self.hs.get_datastore() + print "entering _populate_monthly_active_users" + if self.hs.config.limit_usage_by_mau: + print "self.hs.config.limit_usage_by_mau is TRUE" + is_user_monthly_active = yield store.is_user_monthly_active(user_id) + print "is_user_monthly_active is %r" % is_user_monthly_active + if is_user_monthly_active: + yield store.upsert_monthly_active_user(user_id) + else: + count = yield store.get_monthly_active_count() + print "count is %d" % count + if count < self.hs.config.max_mau_value: + print "count is less than self.hs.config.max_mau_value " + res = yield store.upsert_monthly_active_user(user_id) + print "upsert response is %r" % res + def _update_client_ips_batch(self): def update(): to_update = self._batch_row_update diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 2337438c58..aada9bd2b9 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -4,7 +4,7 @@ from ._base import SQLBaseStore class MonthlyActiveUsersStore(SQLBaseStore): - def __init__(self, hs): + def __init__(self, dbconn, hs): super(MonthlyActiveUsersStore, self).__init__(None, hs) self._clock = hs.get_clock() self.max_mau_value = hs.config.max_mau_value @@ -14,24 +14,28 @@ class MonthlyActiveUsersStore(SQLBaseStore): Cleans out monthly active user table to ensure that no stale entries exist. Return: - defered, no return type + Defered() """ def _reap_users(txn): thirty_days_ago = ( int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) ) - sql = "DELETE FROM monthly_active_users WHERE timestamp < ?" - txn.execute(sql, (thirty_days_ago,)) + sql = """ + DELETE FROM monthly_active_users + ORDER BY timestamp desc + LIMIT -1 OFFSET ? + """ + txn.execute(sql, (self.max_mau_value,)) return self.runInteraction("reap_monthly_active_users", _reap_users) def get_monthly_active_count(self): """ Generates current count of monthly active users.abs - return: - defered resolves to int + Return: + Defered(int): Number of current monthly active users """ def _count_users(txn): sql = "SELECT COALESCE(count(*), 0) FROM monthly_active_users" @@ -46,6 +50,8 @@ class MonthlyActiveUsersStore(SQLBaseStore): Updates or inserts monthly active user member Arguments: user_id (str): user to add/update + Deferred(bool): True if a new entry was created, False if an + existing one was updated. """ return self._simple_upsert( desc="upsert_monthly_active_user", diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index bd6fda6cb1..e1552510cc 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -12,6 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from mock import Mock from twisted.internet import defer @@ -27,9 +28,9 @@ class ClientIpStoreTestCase(tests.unittest.TestCase): @defer.inlineCallbacks def setUp(self): - hs = yield tests.utils.setup_test_homeserver() - self.store = hs.get_datastore() - self.clock = hs.get_clock() + self.hs = yield tests.utils.setup_test_homeserver() + self.store = self.hs.get_datastore() + self.clock = self.hs.get_clock() @defer.inlineCallbacks def test_insert_new_client_ip(self): @@ -54,3 +55,62 @@ class ClientIpStoreTestCase(tests.unittest.TestCase): }, r ) + + @defer.inlineCallbacks + def test_disabled_monthly_active_user(self): + self.hs.config.limit_usage_by_mau = False + self.hs.config.max_mau_value = 50 + user_id = "@user:server" + yield self.store.insert_client_ip( + user_id, "access_token", "ip", "user_agent", "device_id", + ) + active = yield self.store.is_user_monthly_active(user_id) + self.assertFalse(active) + + @defer.inlineCallbacks + def test_adding_monthly_active_user_when_full(self): + self.hs.config.limit_usage_by_mau = True + self.hs.config.max_mau_value = 50 + lots_of_users = 100 + user_id = "@user:server" + + self.store.get_monthly_active_count = Mock( + return_value=defer.succeed(lots_of_users) + ) + yield self.store.insert_client_ip( + user_id, "access_token", "ip", "user_agent", "device_id", + ) + active = yield self.store.is_user_monthly_active(user_id) + self.assertFalse(active) + + @defer.inlineCallbacks + def test_adding_monthly_active_user_when_space(self): + self.hs.config.limit_usage_by_mau = True + self.hs.config.max_mau_value = 50 + user_id = "@user:server" + active = yield self.store.is_user_monthly_active(user_id) + self.assertFalse(active) + + yield self.store.insert_client_ip( + user_id, "access_token", "ip", "user_agent", "device_id", + ) + active = yield self.store.is_user_monthly_active(user_id) + self.assertTrue(active) + + @defer.inlineCallbacks + def test_updating_monthly_active_user_when_space(self): + self.hs.config.limit_usage_by_mau = True + self.hs.config.max_mau_value = 50 + user_id = "@user:server" + + active = yield self.store.is_user_monthly_active(user_id) + self.assertFalse(active) + + yield self.store.insert_client_ip( + user_id, "access_token", "ip", "user_agent", "device_id", + ) + yield self.store.insert_client_ip( + user_id, "access_token", "ip", "user_agent", "device_id", + ) + active = yield self.store.is_user_monthly_active(user_id) + self.assertTrue(active) From 0a65450d044fb580d789013dcdac48b10c930761 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 2 Aug 2018 11:53:52 +0100 Subject: [PATCH 067/205] Validation for events/rooms in fed requests When we get a federation request which refers to an event id, make sure that said event is in the room the caller claims it is in. (patch supplied by @turt2live) --- synapse/federation/federation_server.py | 1 + synapse/handlers/federation.py | 35 ++++++++++++++++++++++++- synapse/storage/event_federation.py | 29 ++++++++++++++++++++ 3 files changed, 64 insertions(+), 1 deletion(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 48f26db67c..10e71c78ce 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -425,6 +425,7 @@ class FederationServer(FederationBase): ret = yield self.handler.on_query_auth( origin, event_id, + room_id, signed_auth, content.get("rejects", []), content.get("missing", []), diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 20fb46fc89..12eeb7c4cd 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1349,6 +1349,9 @@ class FederationHandler(BaseHandler): def get_state_for_pdu(self, room_id, event_id): """Returns the state at the event. i.e. not including said event. """ + + yield self._verify_events_in_room([event_id], room_id) + state_groups = yield self.store.get_state_groups( room_id, [event_id] ) @@ -1391,6 +1394,9 @@ class FederationHandler(BaseHandler): def get_state_ids_for_pdu(self, room_id, event_id): """Returns the state at the event. i.e. not including said event. """ + + yield self._verify_events_in_room([event_id], room_id) + state_groups = yield self.store.get_state_groups_ids( room_id, [event_id] ) @@ -1420,6 +1426,8 @@ class FederationHandler(BaseHandler): if not in_room: raise AuthError(403, "Host not in room.") + yield self._verify_events_in_room(pdu_list, room_id) + events = yield self.store.get_backfill_events( room_id, pdu_list, @@ -1706,8 +1714,17 @@ class FederationHandler(BaseHandler): defer.returnValue(context) @defer.inlineCallbacks - def on_query_auth(self, origin, event_id, remote_auth_chain, rejects, + def on_query_auth(self, origin, event_id, room_id, remote_auth_chain, rejects, missing): + in_room = yield self.auth.check_host_in_room( + room_id, + origin + ) + if not in_room: + raise AuthError(403, "Host not in room.") + + yield self._verify_events_in_room([event_id], room_id) + # Just go through and process each event in `remote_auth_chain`. We # don't want to fall into the trap of `missing` being wrong. for e in remote_auth_chain: @@ -2368,3 +2385,19 @@ class FederationHandler(BaseHandler): ) if "valid" not in response or not response["valid"]: raise AuthError(403, "Third party certificate was invalid") + + @defer.inlineCallbacks + def _verify_events_in_room(self, pdu_ids, room_id): + """Checks whether the given PDU IDs are in the given room or not. + + Args: + pdu_ids (list): list of PDU IDs + room_id (str): the room ID that the PDUs should be in + + Raises: + AuthError: if one or more of the PDUs does not belong to the + given room. + """ + room_ids = yield self.store.get_room_ids_for_events(pdu_ids) + if len(room_ids) != 1 or room_ids[0] != room_id: + raise AuthError(403, "Events must belong to the given room") diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py index 8d366d1b91..e860fe1a1e 100644 --- a/synapse/storage/event_federation.py +++ b/synapse/storage/event_federation.py @@ -295,6 +295,35 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, get_forward_extremeties_for_room_txn ) + def get_room_ids_for_events(self, event_ids): + """Get a list of room IDs for which the given events belong. + + Args: + event_ids (list): the events to look up the room of + + Returns: + list, the room IDs for the events + """ + return self.runInteraction( + "get_room_ids_for_events", + self._get_room_ids_for_events, event_ids + ) + + def _get_room_ids_for_events(self, txn, event_ids): + logger.debug("_get_room_ids_for_events: %s", repr(event_ids)) + + base_sql = ( + "SELECT DISTINCT room_id FROM events" + " WHERE event_id IN (%s)" + ) + + txn.execute( + base_sql % (",".join(["?"] * len(event_ids)),), + event_ids + ) + + return [r[0] for r in txn] + def get_backfill_events(self, room_id, event_list, limit): """Get a list of Events for a given topic that occurred before (and including) the events in event_list. Return a list of max size `limit` From c4ffbecb6856602743198f1a8d4c25a8bb7afd4d Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 2 Aug 2018 13:51:05 +0100 Subject: [PATCH 068/205] fix test, update constructor call --- tests/storage/test_monthly_active_users.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index 9b1ffc6369..d8d25a6069 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -15,7 +15,7 @@ class MonthlyActiveUsersTestCase(tests.unittest.TestCase): @defer.inlineCallbacks def setUp(self): hs = yield setup_test_homeserver() - self.mau = MonthlyActiveUsersStore(hs) + self.mau = MonthlyActiveUsersStore(None, hs) @defer.inlineCallbacks def test_can_insert_and_count_mau(self): From 14fa9d4d92eaa242d44a2823bbd9908be2f02d81 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 2 Aug 2018 13:23:48 +0100 Subject: [PATCH 069/205] Avoid extra db lookups Since we're about to look up the events themselves anyway, we can skip the extra db queries here. --- synapse/handlers/federation.py | 38 +++++++++-------------------- synapse/storage/event_federation.py | 30 +---------------------- synapse/storage/events_worker.py | 20 ++++++++++----- 3 files changed, 26 insertions(+), 62 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 12eeb7c4cd..60391d07c4 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1350,7 +1350,9 @@ class FederationHandler(BaseHandler): """Returns the state at the event. i.e. not including said event. """ - yield self._verify_events_in_room([event_id], room_id) + event = yield self.store.get_event( + event_id, allow_none=False, check_room_id=room_id, + ) state_groups = yield self.store.get_state_groups( room_id, [event_id] @@ -1362,8 +1364,7 @@ class FederationHandler(BaseHandler): (e.type, e.state_key): e for e in state } - event = yield self.store.get_event(event_id) - if event and event.is_state(): + if event.is_state(): # Get previous state if "replaces_state" in event.unsigned: prev_id = event.unsigned["replaces_state"] @@ -1394,8 +1395,9 @@ class FederationHandler(BaseHandler): def get_state_ids_for_pdu(self, room_id, event_id): """Returns the state at the event. i.e. not including said event. """ - - yield self._verify_events_in_room([event_id], room_id) + event = yield self.store.get_event( + event_id, allow_none=False, check_room_id=room_id, + ) state_groups = yield self.store.get_state_groups_ids( room_id, [event_id] @@ -1405,8 +1407,7 @@ class FederationHandler(BaseHandler): _, state = state_groups.items().pop() results = state - event = yield self.store.get_event(event_id) - if event and event.is_state(): + if event.is_state(): # Get previous state if "replaces_state" in event.unsigned: prev_id = event.unsigned["replaces_state"] @@ -1426,8 +1427,6 @@ class FederationHandler(BaseHandler): if not in_room: raise AuthError(403, "Host not in room.") - yield self._verify_events_in_room(pdu_list, room_id) - events = yield self.store.get_backfill_events( room_id, pdu_list, @@ -1723,7 +1722,9 @@ class FederationHandler(BaseHandler): if not in_room: raise AuthError(403, "Host not in room.") - yield self._verify_events_in_room([event_id], room_id) + event = yield self.store.get_event( + event_id, allow_none=False, check_room_id=room_id + ) # Just go through and process each event in `remote_auth_chain`. We # don't want to fall into the trap of `missing` being wrong. @@ -1734,7 +1735,6 @@ class FederationHandler(BaseHandler): pass # Now get the current auth_chain for the event. - event = yield self.store.get_event(event_id) local_auth_chain = yield self.store.get_auth_chain( [auth_id for auth_id, _ in event.auth_events], include_given=True @@ -2385,19 +2385,3 @@ class FederationHandler(BaseHandler): ) if "valid" not in response or not response["valid"]: raise AuthError(403, "Third party certificate was invalid") - - @defer.inlineCallbacks - def _verify_events_in_room(self, pdu_ids, room_id): - """Checks whether the given PDU IDs are in the given room or not. - - Args: - pdu_ids (list): list of PDU IDs - room_id (str): the room ID that the PDUs should be in - - Raises: - AuthError: if one or more of the PDUs does not belong to the - given room. - """ - room_ids = yield self.store.get_room_ids_for_events(pdu_ids) - if len(room_ids) != 1 or room_ids[0] != room_id: - raise AuthError(403, "Events must belong to the given room") diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py index e860fe1a1e..7cd77c1c29 100644 --- a/synapse/storage/event_federation.py +++ b/synapse/storage/event_federation.py @@ -295,35 +295,6 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, get_forward_extremeties_for_room_txn ) - def get_room_ids_for_events(self, event_ids): - """Get a list of room IDs for which the given events belong. - - Args: - event_ids (list): the events to look up the room of - - Returns: - list, the room IDs for the events - """ - return self.runInteraction( - "get_room_ids_for_events", - self._get_room_ids_for_events, event_ids - ) - - def _get_room_ids_for_events(self, txn, event_ids): - logger.debug("_get_room_ids_for_events: %s", repr(event_ids)) - - base_sql = ( - "SELECT DISTINCT room_id FROM events" - " WHERE event_id IN (%s)" - ) - - txn.execute( - base_sql % (",".join(["?"] * len(event_ids)),), - event_ids - ) - - return [r[0] for r in txn] - def get_backfill_events(self, room_id, event_list, limit): """Get a list of Events for a given topic that occurred before (and including) the events in event_list. Return a list of max size `limit` @@ -372,6 +343,7 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, table="events", keyvalues={ "event_id": event_id, + "room_id": room_id, }, retcol="depth", allow_none=True, diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index 67433606c6..6b8a8e908f 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -19,7 +19,7 @@ from canonicaljson import json from twisted.internet import defer -from synapse.api.errors import SynapseError +from synapse.api.errors import NotFoundError # these are only included to make the type annotations work from synapse.events import EventBase # noqa: F401 from synapse.events import FrozenEvent @@ -76,7 +76,7 @@ class EventsWorkerStore(SQLBaseStore): @defer.inlineCallbacks def get_event(self, event_id, check_redacted=True, get_prev_content=False, allow_rejected=False, - allow_none=False): + allow_none=False, check_room_id=None): """Get an event from the database by event_id. Args: @@ -87,7 +87,9 @@ class EventsWorkerStore(SQLBaseStore): include the previous states content in the unsigned field. allow_rejected (bool): If True return rejected events. allow_none (bool): If True, return None if no event found, if - False throw an exception. + False throw a NotFoundError + check_room_id (str|None): if not None, check the room of the found event. + If there is a mismatch, behave as per allow_none. Returns: Deferred : A FrozenEvent. @@ -99,10 +101,16 @@ class EventsWorkerStore(SQLBaseStore): allow_rejected=allow_rejected, ) - if not events and not allow_none: - raise SynapseError(404, "Could not find event %s" % (event_id,)) + event = events[0] if events else None - defer.returnValue(events[0] if events else None) + if event is not None and check_room_id is not None: + if event.room_id != check_room_id: + event = None + + if event is None and not allow_none: + raise NotFoundError("Could not find event %s" % (event_id,)) + + defer.returnValue(event) @defer.inlineCallbacks def get_events(self, event_ids, check_redacted=True, From a0134042922b7d1a128150e1be0d7213c1e9e138 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 2 Aug 2018 14:00:29 +0100 Subject: [PATCH 070/205] changelog --- changelog.d/3641.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3641.bugfix diff --git a/changelog.d/3641.bugfix b/changelog.d/3641.bugfix new file mode 100644 index 0000000000..02181975c0 --- /dev/null +++ b/changelog.d/3641.bugfix @@ -0,0 +1 @@ +Fix a potential event disclosure issue \ No newline at end of file From 0bf5ec0db700f189ba36360ea8424d9761658905 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 2 Aug 2018 15:03:27 +0100 Subject: [PATCH 071/205] Check room visibility for /event/ requests Make sure that the user has permission to view the requeseted event for /event/{eventId} and /room/{roomId}/event/{eventId} requests. Also check that the event is in the given room for /room/{roomId}/event/{eventId}, for sanity. --- synapse/handlers/events.py | 25 +++++++++++++++++++++---- synapse/rest/client/v1/events.py | 2 +- synapse/rest/client/v1/room.py | 2 +- 3 files changed, 23 insertions(+), 6 deletions(-) diff --git a/synapse/handlers/events.py b/synapse/handlers/events.py index c3f2d7feff..f772e62c28 100644 --- a/synapse/handlers/events.py +++ b/synapse/handlers/events.py @@ -19,10 +19,12 @@ import random from twisted.internet import defer from synapse.api.constants import EventTypes, Membership +from synapse.api.errors import AuthError from synapse.events import EventBase from synapse.events.utils import serialize_event from synapse.types import UserID from synapse.util.logutils import log_function +from synapse.visibility import filter_events_for_client from ._base import BaseHandler @@ -129,11 +131,13 @@ class EventStreamHandler(BaseHandler): class EventHandler(BaseHandler): @defer.inlineCallbacks - def get_event(self, user, event_id): + def get_event(self, user, room_id, event_id): """Retrieve a single specified event. Args: user (synapse.types.UserID): The user requesting the event + room_id (str|None): The expected room id. We'll return None if the + event's room does not match. event_id (str): The event ID to obtain. Returns: dict: An event, or None if there is no event matching this ID. @@ -142,13 +146,26 @@ class EventHandler(BaseHandler): AuthError if the user does not have the rights to inspect this event. """ - event = yield self.store.get_event(event_id) + event = yield self.store.get_event(event_id, check_room_id=room_id) if not event: defer.returnValue(None) return - if hasattr(event, "room_id"): - yield self.auth.check_joined_room(event.room_id, user.to_string()) + users = yield self.store.get_users_in_room(event.room_id) + is_peeking = user.to_string() not in users + + filtered = yield filter_events_for_client( + self.store, + user.to_string(), + [event], + is_peeking=is_peeking + ) + + if not filtered: + raise AuthError( + 403, + "You don't have permission to access that event." + ) defer.returnValue(event) diff --git a/synapse/rest/client/v1/events.py b/synapse/rest/client/v1/events.py index b70c9c2806..0f3a2e8b51 100644 --- a/synapse/rest/client/v1/events.py +++ b/synapse/rest/client/v1/events.py @@ -88,7 +88,7 @@ class EventRestServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request, event_id): requester = yield self.auth.get_user_by_req(request) - event = yield self.event_handler.get_event(requester.user, event_id) + event = yield self.event_handler.get_event(requester.user, None, event_id) time_now = self.clock.time_msec() if event: diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index 3d62447854..2a679ac830 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -508,7 +508,7 @@ class RoomEventServlet(ClientV1RestServlet): @defer.inlineCallbacks def on_GET(self, request, room_id, event_id): requester = yield self.auth.get_user_by_req(request) - event = yield self.event_handler.get_event(requester.user, event_id) + event = yield self.event_handler.get_event(requester.user, room_id, event_id) time_now = self.clock.time_msec() if event: From 8cefc690c9206c8f0936b4cd6a5b0823d9c7beb6 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 2 Aug 2018 15:11:19 +0100 Subject: [PATCH 072/205] changelogs --- changelog.d/3641.bugfix | 2 +- changelog.d/3642.bugfix | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/3642.bugfix diff --git a/changelog.d/3641.bugfix b/changelog.d/3641.bugfix index 02181975c0..149349baa2 100644 --- a/changelog.d/3641.bugfix +++ b/changelog.d/3641.bugfix @@ -1 +1 @@ -Fix a potential event disclosure issue \ No newline at end of file +Fix a potential issue where servers could request events for rooms they have not joined. diff --git a/changelog.d/3642.bugfix b/changelog.d/3642.bugfix new file mode 100644 index 0000000000..e2e9b209de --- /dev/null +++ b/changelog.d/3642.bugfix @@ -0,0 +1 @@ +Fix a potential issue where users could see events in private joins before they joined From 14a4e7d5a4e3c90195a907aa9677c058f2932cdb Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 2 Aug 2018 15:31:04 +0100 Subject: [PATCH 073/205] Prepare 0.33.1 --- CHANGES.rst | 10 ++++++++++ changelog.d/3641.bugfix | 1 - changelog.d/3642.bugfix | 1 - synapse/__init__.py | 2 +- 4 files changed, 11 insertions(+), 3 deletions(-) delete mode 100644 changelog.d/3641.bugfix delete mode 100644 changelog.d/3642.bugfix diff --git a/CHANGES.rst b/CHANGES.rst index da6e0eb195..dff195be6e 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,13 @@ +Synapse 0.33.1 (2018-08-02) +=========================== + +Bugfixes +-------- + +- Fix a potential issue where servers could request events for rooms they have not joined. (`#3641 `_) +- Fix a potential issue where users could see events in private joins before they joined (`#3642 `_) + + Synapse 0.33.0 (2018-07-19) =========================== diff --git a/changelog.d/3641.bugfix b/changelog.d/3641.bugfix deleted file mode 100644 index 149349baa2..0000000000 --- a/changelog.d/3641.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a potential issue where servers could request events for rooms they have not joined. diff --git a/changelog.d/3642.bugfix b/changelog.d/3642.bugfix deleted file mode 100644 index e2e9b209de..0000000000 --- a/changelog.d/3642.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a potential issue where users could see events in private joins before they joined diff --git a/synapse/__init__.py b/synapse/__init__.py index 5c0f2f83aa..1810cb6fcd 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -17,4 +17,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.33.0" +__version__ = "0.33.1" From db1f33fb363e827d0b9225a9544cb512bbfa2886 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 2 Aug 2018 15:33:53 +0100 Subject: [PATCH 074/205] fix changelog typos --- CHANGES.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index dff195be6e..60be8e7b5d 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -5,7 +5,7 @@ Bugfixes -------- - Fix a potential issue where servers could request events for rooms they have not joined. (`#3641 `_) -- Fix a potential issue where users could see events in private joins before they joined (`#3642 `_) +- Fix a potential issue where users could see events in private rooms before they joined. (`#3642 `_) Synapse 0.33.0 (2018-07-19) From c2a83349f026c964302c6ad50a402c4cd664367f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 2 Aug 2018 15:35:42 +0100 Subject: [PATCH 075/205] changelog: this is a security release --- CHANGES.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 60be8e7b5d..a1cc88fe21 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,8 +1,8 @@ Synapse 0.33.1 (2018-08-02) =========================== -Bugfixes --------- +SECURITY FIXES +-------------- - Fix a potential issue where servers could request events for rooms they have not joined. (`#3641 `_) - Fix a potential issue where users could see events in private rooms before they joined. (`#3642 `_) From 9180061b49907da363f2e3bfa0061f913d7ccf7a Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 2 Aug 2018 15:55:29 +0100 Subject: [PATCH 076/205] remove unused count_monthly_users --- synapse/storage/__init__.py | 25 -------------- tests/storage/test__init__.py | 65 ----------------------------------- 2 files changed, 90 deletions(-) delete mode 100644 tests/storage/test__init__.py diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 2120f46ed5..23b4a8d76d 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -268,31 +268,6 @@ class DataStore(RoomMemberStore, RoomStore, return self.runInteraction("count_users", _count_users) - def count_monthly_users(self): - """Counts the number of users who used this homeserver in the last 30 days - - This method should be refactored with count_daily_users - the only - reason not to is waiting on definition of mau - - Returns: - Defered[int] - """ - def _count_monthly_users(txn): - thirty_days_ago = int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) - sql = """ - SELECT COALESCE(count(*), 0) FROM ( - SELECT user_id FROM user_ips - WHERE last_seen > ? - GROUP BY user_id - ) u - """ - - txn.execute(sql, (thirty_days_ago,)) - count, = txn.fetchone() - return count - - return self.runInteraction("count_monthly_users", _count_monthly_users) - def count_r30_users(self): """ Counts the number of 30 day retained users, defined as:- diff --git a/tests/storage/test__init__.py b/tests/storage/test__init__.py deleted file mode 100644 index f19cb1265c..0000000000 --- a/tests/storage/test__init__.py +++ /dev/null @@ -1,65 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2018 New Vector Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from twisted.internet import defer - -import tests.utils - - -class InitTestCase(tests.unittest.TestCase): - def __init__(self, *args, **kwargs): - super(InitTestCase, self).__init__(*args, **kwargs) - self.store = None # type: synapse.storage.DataStore - - @defer.inlineCallbacks - def setUp(self): - hs = yield tests.utils.setup_test_homeserver() - - hs.config.max_mau_value = 50 - hs.config.limit_usage_by_mau = True - self.store = hs.get_datastore() - self.clock = hs.get_clock() - - @defer.inlineCallbacks - def test_count_monthly_users(self): - count = yield self.store.count_monthly_users() - self.assertEqual(0, count) - - yield self._insert_user_ips("@user:server1") - yield self._insert_user_ips("@user:server2") - - count = yield self.store.count_monthly_users() - self.assertEqual(2, count) - - @defer.inlineCallbacks - def _insert_user_ips(self, user): - """ - Helper function to populate user_ips without using batch insertion infra - args: - user (str): specify username i.e. @user:server.com - """ - yield self.store._simple_upsert( - table="user_ips", - keyvalues={ - "user_id": user, - "access_token": "access_token", - "ip": "ip", - "user_agent": "user_agent", - "device_id": "device_id", - }, - values={ - "last_seen": self.clock.time_msec(), - } - ) From 74b1d46ad9ae692774f2e9d71cbbe1cea91b4070 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 2 Aug 2018 16:57:35 +0100 Subject: [PATCH 077/205] do mau checks based on monthly_active_users table --- synapse/api/auth.py | 13 ++++++ synapse/handlers/auth.py | 10 ++--- synapse/handlers/register.py | 10 ++--- synapse/storage/client_ips.py | 15 +++---- tests/api/test_auth.py | 31 +++++++++++++- tests/handlers/test_auth.py | 8 ++-- tests/handlers/test_register.py | 71 ++++++++++++++++----------------- 7 files changed, 97 insertions(+), 61 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index d8022bcf8e..943a488339 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -773,3 +773,16 @@ class Auth(object): raise AuthError( 403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN ) + + @defer.inlineCallbacks + def check_auth_blocking(self, error): + """Checks if the user should be rejected for some external reason, + such as monthly active user limiting or global disable flag + Args: + error (Error): The error that should be raised if user is to be + blocked + """ + if self.hs.config.limit_usage_by_mau is True: + current_mau = yield self.store.get_monthly_active_count() + if current_mau >= self.hs.config.max_mau_value: + raise error diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 184eef09d0..8f9cff92e8 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -913,12 +913,10 @@ class AuthHandler(BaseHandler): Ensure that if mau blocking is enabled that invalid users cannot log in. """ - if self.hs.config.limit_usage_by_mau is True: - current_mau = yield self.store.count_monthly_users() - if current_mau >= self.hs.config.max_mau_value: - raise AuthError( - 403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED - ) + error = AuthError( + 403, "Monthly Active User limits exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED + ) + yield self.auth.check_auth_blocking(error) @attr.s diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 289704b241..706ed8c292 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -540,9 +540,7 @@ class RegistrationHandler(BaseHandler): Do not accept registrations if monthly active user limits exceeded and limiting is enabled """ - if self.hs.config.limit_usage_by_mau is True: - current_mau = yield self.store.count_monthly_users() - if current_mau >= self.hs.config.max_mau_value: - raise RegistrationError( - 403, "MAU Limit Exceeded", Codes.MAU_LIMIT_EXCEEDED - ) + error = RegistrationError( + 403, "Monthly Active User limits exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED + ) + yield self.auth.check_auth_blocking(error) diff --git a/synapse/storage/client_ips.py b/synapse/storage/client_ips.py index 506915a1ef..83d64d1563 100644 --- a/synapse/storage/client_ips.py +++ b/synapse/storage/client_ips.py @@ -97,21 +97,22 @@ class ClientIpStore(background_updates.BackgroundUpdateStore): @defer.inlineCallbacks def _populate_monthly_active_users(self, user_id): + """Checks on the state of monthly active user limits and optionally + add the user to the monthly active tables + + Args: + user_id(str): the user_id to query + """ + store = self.hs.get_datastore() - print "entering _populate_monthly_active_users" if self.hs.config.limit_usage_by_mau: - print "self.hs.config.limit_usage_by_mau is TRUE" is_user_monthly_active = yield store.is_user_monthly_active(user_id) - print "is_user_monthly_active is %r" % is_user_monthly_active if is_user_monthly_active: yield store.upsert_monthly_active_user(user_id) else: count = yield store.get_monthly_active_count() - print "count is %d" % count if count < self.hs.config.max_mau_value: - print "count is less than self.hs.config.max_mau_value " - res = yield store.upsert_monthly_active_user(user_id) - print "upsert response is %r" % res + yield store.upsert_monthly_active_user(user_id) def _update_client_ips_batch(self): def update(): diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index a82d737e71..54bdf28663 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -21,7 +21,7 @@ from twisted.internet import defer import synapse.handlers.auth from synapse.api.auth import Auth -from synapse.api.errors import AuthError +from synapse.api.errors import AuthError, Codes from synapse.types import UserID from tests import unittest @@ -444,3 +444,32 @@ class AuthTestCase(unittest.TestCase): self.assertEqual("Guest access token used for regular user", cm.exception.msg) self.store.get_user_by_id.assert_called_with(USER_ID) + + @defer.inlineCallbacks + def test_blocking_mau(self): + self.hs.config.limit_usage_by_mau = False + self.hs.config.max_mau_value = 50 + lots_of_users = 100 + small_number_of_users = 1 + + error = AuthError( + 403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED + ) + + # Ensure no error thrown + yield self.auth.check_auth_blocking(error) + + self.hs.config.limit_usage_by_mau = True + + self.store.get_monthly_active_count = Mock( + return_value=defer.succeed(lots_of_users) + ) + + with self.assertRaises(AuthError): + yield self.auth.check_auth_blocking(error) + + # Ensure does not throw an error + self.store.get_monthly_active_count = Mock( + return_value=defer.succeed(small_number_of_users) + ) + yield self.auth.check_auth_blocking(error) diff --git a/tests/handlers/test_auth.py b/tests/handlers/test_auth.py index 55eab9e9cf..8a9bf2d5fd 100644 --- a/tests/handlers/test_auth.py +++ b/tests/handlers/test_auth.py @@ -132,14 +132,14 @@ class AuthTestCase(unittest.TestCase): @defer.inlineCallbacks def test_mau_limits_exceeded(self): self.hs.config.limit_usage_by_mau = True - self.hs.get_datastore().count_monthly_users = Mock( + self.hs.get_datastore().get_monthly_active_count = Mock( return_value=defer.succeed(self.large_number_of_users) ) with self.assertRaises(AuthError): yield self.auth_handler.get_access_token_for_user_id('user_a') - self.hs.get_datastore().count_monthly_users = Mock( + self.hs.get_datastore().get_monthly_active_count = Mock( return_value=defer.succeed(self.large_number_of_users) ) with self.assertRaises(AuthError): @@ -151,13 +151,13 @@ class AuthTestCase(unittest.TestCase): def test_mau_limits_not_exceeded(self): self.hs.config.limit_usage_by_mau = True - self.hs.get_datastore().count_monthly_users = Mock( + self.hs.get_datastore().get_monthly_active_count = Mock( return_value=defer.succeed(self.small_number_of_users) ) # Ensure does not raise exception yield self.auth_handler.get_access_token_for_user_id('user_a') - self.hs.get_datastore().count_monthly_users = Mock( + self.hs.get_datastore().get_monthly_active_count = Mock( return_value=defer.succeed(self.small_number_of_users) ) yield self.auth_handler.validate_short_term_login_token_and_get_user_id( diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 0937d71cf6..6b5b8b3772 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -50,6 +50,10 @@ class RegistrationTestCase(unittest.TestCase): self.hs.get_macaroon_generator = Mock(return_value=self.macaroon_generator) self.hs.handlers = RegistrationHandlers(self.hs) self.handler = self.hs.get_handlers().registration_handler + self.store = self.hs.get_datastore() + self.hs.config.max_mau_value = 50 + self.lots_of_users = 100 + self.small_number_of_users = 1 @defer.inlineCallbacks def test_user_is_created_and_logged_in_if_doesnt_exist(self): @@ -80,51 +84,44 @@ class RegistrationTestCase(unittest.TestCase): self.assertEquals(result_token, 'secret') @defer.inlineCallbacks - def test_cannot_register_when_mau_limits_exceeded(self): - local_part = "someone" - display_name = "someone" - requester = create_requester("@as:test") - store = self.hs.get_datastore() + def test_mau_limits_when_disabled(self): self.hs.config.limit_usage_by_mau = False - self.hs.config.max_mau_value = 50 - lots_of_users = 100 - small_number_users = 1 - - store.count_monthly_users = Mock(return_value=defer.succeed(lots_of_users)) - # Ensure does not throw exception - yield self.handler.get_or_create_user(requester, 'a', display_name) + yield self.handler.get_or_create_user("requester", 'a', "display_name") + @defer.inlineCallbacks + def test_get_or_create_user_mau_not_blocked(self): self.hs.config.limit_usage_by_mau = True - - with self.assertRaises(RegistrationError): - yield self.handler.get_or_create_user(requester, 'b', display_name) - - store.count_monthly_users = Mock(return_value=defer.succeed(small_number_users)) - - self._macaroon_mock_generator("another_secret") - + self.store.count_monthly_users = Mock( + return_value=defer.succeed(self.small_number_of_users) + ) # Ensure does not throw exception - yield self.handler.get_or_create_user("@neil:matrix.org", 'c', "Neil") + yield self.handler.get_or_create_user("@user:server", 'c', "User") - self._macaroon_mock_generator("another another secret") - store.count_monthly_users = Mock(return_value=defer.succeed(lots_of_users)) + @defer.inlineCallbacks + def test_get_or_create_user_mau_blocked(self): + self.hs.config.limit_usage_by_mau = True + self.store.get_monthly_active_count = Mock( + return_value=defer.succeed(self.lots_of_users) + ) with self.assertRaises(RegistrationError): - yield self.handler.register(localpart=local_part) - - self._macaroon_mock_generator("another another secret") - store.count_monthly_users = Mock(return_value=defer.succeed(lots_of_users)) + yield self.handler.get_or_create_user("requester", 'b', "display_name") + @defer.inlineCallbacks + def test_register_mau_blocked(self): + self.hs.config.limit_usage_by_mau = True + self.store.get_monthly_active_count = Mock( + return_value=defer.succeed(self.lots_of_users) + ) with self.assertRaises(RegistrationError): - yield self.handler.register_saml2(local_part) + yield self.handler.register(localpart="local_part") - def _macaroon_mock_generator(self, secret): - """ - Reset macaroon generator in the case where the test creates multiple users - """ - macaroon_generator = Mock( - generate_access_token=Mock(return_value=secret)) - self.hs.get_macaroon_generator = Mock(return_value=macaroon_generator) - self.hs.handlers = RegistrationHandlers(self.hs) - self.handler = self.hs.get_handlers().registration_handler + @defer.inlineCallbacks + def test_register_saml2_mau_blocked(self): + self.hs.config.limit_usage_by_mau = True + self.store.get_monthly_active_count = Mock( + return_value=defer.succeed(self.lots_of_users) + ) + with self.assertRaises(RegistrationError): + yield self.handler.register_saml2(localpart="local_part") From 0d25724419c43313f9b8345b07aaebe3eeeeee7e Mon Sep 17 00:00:00 2001 From: Michael Kaye <1917473+michaelkaye@users.noreply.github.com> Date: Mon, 21 May 2018 14:45:33 +0100 Subject: [PATCH 078/205] Refactor docker locations and README. This addresses #3224 --- README.rst | 2 +- contrib/docker/README.md | 110 +--------------- Dockerfile => docker/Dockerfile | 2 +- docker/README.md | 122 ++++++++++++++++++ .../docker => docker}/conf/homeserver.yaml | 0 {contrib/docker => docker}/conf/log.config | 0 {contrib/docker => docker}/start.py | 0 7 files changed, 126 insertions(+), 110 deletions(-) rename Dockerfile => docker/Dockerfile (90%) create mode 100644 docker/README.md rename {contrib/docker => docker}/conf/homeserver.yaml (100%) rename {contrib/docker => docker}/conf/log.config (100%) rename {contrib/docker => docker}/start.py (100%) diff --git a/README.rst b/README.rst index 5fdfad345f..90bd49c9a1 100644 --- a/README.rst +++ b/README.rst @@ -157,7 +157,7 @@ if you prefer. In case of problems, please see the _`Troubleshooting` section below. -There is an offical synapse image available at https://hub.docker.com/r/matrixdotorg/synapse/tags/ which can be used with the docker-compose file available at `contrib/docker`. Further information on this including configuration options is available in `contrib/docker/README.md`. +There is an offical synapse image available at https://hub.docker.com/r/matrixdotorg/synapse/tags/ which can be used with the docker-compose file available at `contrib/docker`. Further information on this including configuration options is available in the README on hub.docker.com. Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a Dockerfile to automate a synapse server in a single Docker image, at https://hub.docker.com/r/avhost/docker-matrix/tags/ diff --git a/contrib/docker/README.md b/contrib/docker/README.md index 562cdaac2b..10b15b0407 100644 --- a/contrib/docker/README.md +++ b/contrib/docker/README.md @@ -1,23 +1,5 @@ # Synapse Docker -The `matrixdotorg/synapse` Docker image will run Synapse as a single process. It does not provide a -database server or a TURN server, you should run these separately. - -If you run a Postgres server, you should simply include it in the same Compose -project or set the proper environment variables and the image will automatically -use that server. - -## Build - -Build the docker image with the `docker-compose build` command. - -You may have a local Python wheel cache available, in which case copy the relevant packages in the ``cache/`` directory at the root of the project. - -## Run - -This image is designed to run either with an automatically generated configuration -file or with a custom configuration that requires manual edition. - ### Automated configuration It is recommended that you use Docker Compose to run your containers, including @@ -54,94 +36,6 @@ Then, customize your configuration and run the server: docker-compose up -d ``` -### Without Compose +### More information -If you do not wish to use Compose, you may still run this image using plain -Docker commands. Note that the following is just a guideline and you may need -to add parameters to the docker run command to account for the network situation -with your postgres database. - -``` -docker run \ - -d \ - --name synapse \ - -v ${DATA_PATH}:/data \ - -e SYNAPSE_SERVER_NAME=my.matrix.host \ - -e SYNAPSE_REPORT_STATS=yes \ - docker.io/matrixdotorg/synapse:latest -``` - -## Volumes - -The image expects a single volume, located at ``/data``, that will hold: - -* temporary files during uploads; -* uploaded media and thumbnails; -* the SQLite database if you do not configure postgres; -* the appservices configuration. - -You are free to use separate volumes depending on storage endpoints at your -disposal. For instance, ``/data/media`` coud be stored on a large but low -performance hdd storage while other files could be stored on high performance -endpoints. - -In order to setup an application service, simply create an ``appservices`` -directory in the data volume and write the application service Yaml -configuration file there. Multiple application services are supported. - -## Environment - -Unless you specify a custom path for the configuration file, a very generic -file will be generated, based on the following environment settings. -These are a good starting point for setting up your own deployment. - -Global settings: - -* ``UID``, the user id Synapse will run as [default 991] -* ``GID``, the group id Synapse will run as [default 991] -* ``SYNAPSE_CONFIG_PATH``, path to a custom config file - -If ``SYNAPSE_CONFIG_PATH`` is set, you should generate a configuration file -then customize it manually. No other environment variable is required. - -Otherwise, a dynamic configuration file will be used. The following environment -variables are available for configuration: - -* ``SYNAPSE_SERVER_NAME`` (mandatory), the current server public hostname. -* ``SYNAPSE_REPORT_STATS``, (mandatory, ``yes`` or ``no``), enable anonymous - statistics reporting back to the Matrix project which helps us to get funding. -* ``SYNAPSE_NO_TLS``, set this variable to disable TLS in Synapse (use this if - you run your own TLS-capable reverse proxy). -* ``SYNAPSE_ENABLE_REGISTRATION``, set this variable to enable registration on - the Synapse instance. -* ``SYNAPSE_ALLOW_GUEST``, set this variable to allow guest joining this server. -* ``SYNAPSE_EVENT_CACHE_SIZE``, the event cache size [default `10K`]. -* ``SYNAPSE_CACHE_FACTOR``, the cache factor [default `0.5`]. -* ``SYNAPSE_RECAPTCHA_PUBLIC_KEY``, set this variable to the recaptcha public - key in order to enable recaptcha upon registration. -* ``SYNAPSE_RECAPTCHA_PRIVATE_KEY``, set this variable to the recaptcha private - key in order to enable recaptcha upon registration. -* ``SYNAPSE_TURN_URIS``, set this variable to the coma-separated list of TURN - uris to enable TURN for this homeserver. -* ``SYNAPSE_TURN_SECRET``, set this to the TURN shared secret if required. - -Shared secrets, that will be initialized to random values if not set: - -* ``SYNAPSE_REGISTRATION_SHARED_SECRET``, secret for registrering users if - registration is disable. -* ``SYNAPSE_MACAROON_SECRET_KEY`` secret for signing access tokens - to the server. - -Database specific values (will use SQLite if not set): - -* `POSTGRES_DB` - The database name for the synapse postgres database. [default: `synapse`] -* `POSTGRES_HOST` - The host of the postgres database if you wish to use postgresql instead of sqlite3. [default: `db` which is useful when using a container on the same docker network in a compose file where the postgres service is called `db`] -* `POSTGRES_PASSWORD` - The password for the synapse postgres database. **If this is set then postgres will be used instead of sqlite3.** [default: none] **NOTE**: You are highly encouraged to use postgresql! Please use the compose file to make it easier to deploy. -* `POSTGRES_USER` - The user for the synapse postgres database. [default: `matrix`] - -Mail server specific values (will not send emails if not set): - -* ``SYNAPSE_SMTP_HOST``, hostname to the mail server. -* ``SYNAPSE_SMTP_PORT``, TCP port for accessing the mail server [default ``25``]. -* ``SYNAPSE_SMTP_USER``, username for authenticating against the mail server if any. -* ``SYNAPSE_SMTP_PASSWORD``, password for authenticating against the mail server if any. +For more information on required environment variables and mounts, see the main docker documentation at `docker/README.md` diff --git a/Dockerfile b/docker/Dockerfile similarity index 90% rename from Dockerfile rename to docker/Dockerfile index 0242be5f68..26fb3a6bff 100644 --- a/Dockerfile +++ b/docker/Dockerfile @@ -22,7 +22,7 @@ RUN cd /synapse \ setuptools \ && mkdir -p /synapse/cache \ && pip install -f /synapse/cache --upgrade --process-dependency-links . \ - && mv /synapse/contrib/docker/start.py /synapse/contrib/docker/conf / \ + && mv /synapse/docker/start.py /synapse/docker/conf / \ && rm -rf \ setup.cfg \ setup.py \ diff --git a/docker/README.md b/docker/README.md new file mode 100644 index 0000000000..f60ea49234 --- /dev/null +++ b/docker/README.md @@ -0,0 +1,122 @@ +# Synapse Docker + +This Docker image will run Synapse as a single process. It does not provide a database +server or a TURN server, you should run these separately. + +## Run + +We do not currently offer a `latest` image, as this has somewhat undefined semantics. +We instead release only tagged versions so upgrading between releases is entirely +within your control. + +### Using docker-compose (easier) + +This image is designed to run either with an automatically generated configuration +file or with a custom configuration that requires manual edition. + +An easy way to make use of this image is via docker-compose, see the (https://github.com/matrix-org/synapse/tree/develop/contrib/docker)[contrib] section of the synapse project for examples. + +### Without Compose (harder) + +If you do not wish to use Compose, you may still run this image using plain +Docker commands. Note that the following is just a guideline and you may need +to add parameters to the docker run command to account for the network situation +with your postgres database. + +``` +docker run \ + -d \ + --name synapse \ + -v ${DATA_PATH}:/data \ + -e SYNAPSE_SERVER_NAME=my.matrix.host \ + -e SYNAPSE_REPORT_STATS=yes \ + docker.io/matrixdotorg/synapse:latest +``` + +## Volumes + +The image expects a single volume, located at ``/data``, that will hold: + +* temporary files during uploads; +* uploaded media and thumbnails; +* the SQLite database if you do not configure postgres; +* the appservices configuration. + +You are free to use separate volumes depending on storage endpoints at your +disposal. For instance, ``/data/media`` coud be stored on a large but low +performance hdd storage while other files could be stored on high performance +endpoints. + +In order to setup an application service, simply create an ``appservices`` +directory in the data volume and write the application service Yaml +configuration file there. Multiple application services are supported. + +## Environment + +Unless you specify a custom path for the configuration file, a very generic +file will be generated, based on the following environment settings. +These are a good starting point for setting up your own deployment. + +Global settings: + +* ``UID``, the user id Synapse will run as [default 991] +* ``GID``, the group id Synapse will run as [default 991] +* ``SYNAPSE_CONFIG_PATH``, path to a custom config file + +If ``SYNAPSE_CONFIG_PATH`` is set, you should generate a configuration file +then customize it manually. No other environment variable is required. + +Otherwise, a dynamic configuration file will be used. The following environment +variables are available for configuration: + +* ``SYNAPSE_SERVER_NAME`` (mandatory), the current server public hostname. +* ``SYNAPSE_REPORT_STATS``, (mandatory, ``yes`` or ``no``), enable anonymous + statistics reporting back to the Matrix project which helps us to get funding. +* ``SYNAPSE_NO_TLS``, set this variable to disable TLS in Synapse (use this if + you run your own TLS-capable reverse proxy). +* ``SYNAPSE_ENABLE_REGISTRATION``, set this variable to enable registration on + the Synapse instance. +* ``SYNAPSE_ALLOW_GUEST``, set this variable to allow guest joining this server. +* ``SYNAPSE_EVENT_CACHE_SIZE``, the event cache size [default `10K`]. +* ``SYNAPSE_CACHE_FACTOR``, the cache factor [default `0.5`]. +* ``SYNAPSE_RECAPTCHA_PUBLIC_KEY``, set this variable to the recaptcha public + key in order to enable recaptcha upon registration. +* ``SYNAPSE_RECAPTCHA_PRIVATE_KEY``, set this variable to the recaptcha private + key in order to enable recaptcha upon registration. +* ``SYNAPSE_TURN_URIS``, set this variable to the coma-separated list of TURN + uris to enable TURN for this homeserver. +* ``SYNAPSE_TURN_SECRET``, set this to the TURN shared secret if required. + +Shared secrets, that will be initialized to random values if not set: + +* ``SYNAPSE_REGISTRATION_SHARED_SECRET``, secret for registrering users if + registration is disable. +* ``SYNAPSE_MACAROON_SECRET_KEY`` secret for signing access tokens + to the server. + +Database specific values (will use SQLite if not set): + +* `POSTGRES_DB` - The database name for the synapse postgres database. [default: `synapse`] +* `POSTGRES_HOST` - The host of the postgres database if you wish to use postgresql instead of sqlite3. [default: `db` which is useful when using a container on the same docker network in a compose file where the postgres service is called `db`] +* `POSTGRES_PASSWORD` - The password for the synapse postgres database. **If this is set then postgres will be used instead of sqlite3.** [default: none] **NOTE**: You are highly encouraged to use postgresql! Please use the compose file to make it easier to deploy. +* `POSTGRES_USER` - The user for the synapse postgres database. [default: `matrix`] + +Mail server specific values (will not send emails if not set): + +* ``SYNAPSE_SMTP_HOST``, hostname to the mail server. +* ``SYNAPSE_SMTP_PORT``, TCP port for accessing the mail server [default ``25``]. +* ``SYNAPSE_SMTP_USER``, username for authenticating against the mail server if any. +* ``SYNAPSE_SMTP_PASSWORD``, password for authenticating against the mail server if any. + +## Build + +Build the docker image with the `docker build` command from the root of the synapse repository. + +``` +docker build -t docker.io/matrixdotorg/synapse . -f docker/Dockerfile +``` + +The `-t` option sets the image tag. Official images are tagged `matrixdotorg/synapse:` where `` is the same as the release tag in the synapse git repository. + +You may have a local Python wheel cache available, in which case copy the relevant +packages in the ``cache/`` directory at the root of the project. diff --git a/contrib/docker/conf/homeserver.yaml b/docker/conf/homeserver.yaml similarity index 100% rename from contrib/docker/conf/homeserver.yaml rename to docker/conf/homeserver.yaml diff --git a/contrib/docker/conf/log.config b/docker/conf/log.config similarity index 100% rename from contrib/docker/conf/log.config rename to docker/conf/log.config diff --git a/contrib/docker/start.py b/docker/start.py similarity index 100% rename from contrib/docker/start.py rename to docker/start.py From 26a37f3d4dcd2257005d9afe1d90af470198e111 Mon Sep 17 00:00:00 2001 From: Michael Kaye <1917473+michaelkaye@users.noreply.github.com> Date: Fri, 8 Jun 2018 13:35:07 +0100 Subject: [PATCH 079/205] Do not include docker files in python build --- MANIFEST.in | 1 + 1 file changed, 1 insertion(+) diff --git a/MANIFEST.in b/MANIFEST.in index 7076b608d4..1ff98d95df 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -35,3 +35,4 @@ recursive-include changelog.d * prune .github prune demo/etc +prune docker From 1758f4e1c793cd5f219469c9d5c340eea071b6c8 Mon Sep 17 00:00:00 2001 From: Michael Kaye <1917473+michaelkaye@users.noreply.github.com> Date: Fri, 8 Jun 2018 13:37:39 +0100 Subject: [PATCH 080/205] Address SPAG issues --- docker/README.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docker/README.md b/docker/README.md index f60ea49234..8303a7fecd 100644 --- a/docker/README.md +++ b/docker/README.md @@ -12,9 +12,11 @@ within your control. ### Using docker-compose (easier) This image is designed to run either with an automatically generated configuration -file or with a custom configuration that requires manual edition. +file or with a custom configuration that requires manual editing. -An easy way to make use of this image is via docker-compose, see the (https://github.com/matrix-org/synapse/tree/develop/contrib/docker)[contrib] section of the synapse project for examples. +An easy way to make use of this image is via docker-compose. See the +(https://github.com/matrix-org/synapse/tree/develop/contrib/docker)[contrib] +section of the synapse project for examples. ### Without Compose (harder) From 489949879ede04f104852f11993c734e9109cd16 Mon Sep 17 00:00:00 2001 From: Michael Kaye <1917473+michaelkaye@users.noreply.github.com> Date: Thu, 2 Aug 2018 18:49:50 +0100 Subject: [PATCH 081/205] Add news entry --- changelog.d/3644.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3644.misc diff --git a/changelog.d/3644.misc b/changelog.d/3644.misc new file mode 100644 index 0000000000..2347fc8500 --- /dev/null +++ b/changelog.d/3644.misc @@ -0,0 +1 @@ +Refactor location of docker build script. From 4c4a123bd0ab21b4e3df008c21dc8727e6230dba Mon Sep 17 00:00:00 2001 From: Michael Kaye <1917473+michaelkaye@users.noreply.github.com> Date: Thu, 2 Aug 2018 18:53:44 +0100 Subject: [PATCH 082/205] Mention the word "newsfragment" in CONTRIBUTING.rst --- CONTRIBUTING.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 6c295cfbfe..aa2738eea3 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -51,7 +51,7 @@ makes it horribly hard to review otherwise. Changelog ~~~~~~~~~ -All changes, even minor ones, need a corresponding changelog +All changes, even minor ones, need a corresponding changelog / newsfragment entry. These are managed by Towncrier (https://github.com/hawkowl/towncrier). From 70baa61e95ae09cc920e79bf4a9b23f278ab9f33 Mon Sep 17 00:00:00 2001 From: Michael Kaye <1917473+michaelkaye@users.noreply.github.com> Date: Thu, 2 Aug 2018 18:58:43 +0100 Subject: [PATCH 083/205] newsfragment --- changelog.d/3645.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3645.misc diff --git a/changelog.d/3645.misc b/changelog.d/3645.misc new file mode 100644 index 0000000000..0fe6b28da1 --- /dev/null +++ b/changelog.d/3645.misc @@ -0,0 +1 @@ +Update CONTRIBUTING to mention newsfragments. From 4ecb4bdac963ae967da810de6134d85b50c999f9 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 2 Aug 2018 22:41:05 +0100 Subject: [PATCH 084/205] wip attempt at caching --- synapse/storage/monthly_active_users.py | 56 +++++++++++++++++++++---- 1 file changed, 47 insertions(+), 9 deletions(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index aada9bd2b9..19846cefd9 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -1,4 +1,6 @@ from twisted.internet import defer +from synapse.util.caches.descriptors import cachedInlineCallbacks +from synapse.storage.engines import PostgresEngine, Sqlite3Engine from ._base import SQLBaseStore @@ -20,17 +22,53 @@ class MonthlyActiveUsersStore(SQLBaseStore): thirty_days_ago = ( int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) ) - sql = "DELETE FROM monthly_active_users WHERE timestamp < ?" - txn.execute(sql, (thirty_days_ago,)) - sql = """ - DELETE FROM monthly_active_users - ORDER BY timestamp desc - LIMIT -1 OFFSET ? - """ - txn.execute(sql, (self.max_mau_value,)) + + if isinstance(self.database_engine, PostgresEngine): + sql = """ + DELETE FROM monthly_active_users + WHERE timestamp < ? + RETURNING user_id + """ + txn.execute(sql, (thirty_days_ago,)) + res = txn.fetchall() + for r in res: + self.is_user_monthly_active.invalidate(r) + + sql = """ + DELETE FROM monthly_active_users + ORDER BY timestamp desc + LIMIT -1 OFFSET ? + RETURNING user_id + """ + txn.execute(sql, (self.max_mau_value,)) + res = txn.fetchall() + for r in res: + self.is_user_monthly_active.invalidate(r) + print r + self.get_monthly_active_count.invalidate() + elif isinstance(self.database_engine, Sqlite3Engine): + sql = "DELETE FROM monthly_active_users WHERE timestamp < ?" + + txn.execute(sql, (thirty_days_ago,)) + sql = """ + DELETE FROM monthly_active_users + ORDER BY timestamp desc + LIMIT -1 OFFSET ? + """ + txn.execute(sql, (self.max_mau_value,)) + + # It seems poor to invalidate the whole cache, but the alternative + # is to select then delete which has it's own problem. + # It seems unlikely that anyone using this feature on large datasets + # would be using sqlite and if they are then there will be + # larger perf issues than this one to encourage an upgrade to postgres. + + self.is_user_monthly_active.invalidate_all() + self.get_monthly_active_count.invalidate_all() return self.runInteraction("reap_monthly_active_users", _reap_users) + @cachedInlineCallbacks(num_args=0) def get_monthly_active_count(self): """ Generates current count of monthly active users.abs @@ -65,7 +103,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): lock=False, ) - @defer.inlineCallbacks + @cachedInlineCallbacks(num_args=1) def is_user_monthly_active(self, user_id): """ Checks if a given user is part of the monthly active user group From 5e2f7b80844bc8ee401beab494948c093338f404 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 2 Aug 2018 22:47:48 +0100 Subject: [PATCH 085/205] typo --- synapse/storage/monthly_active_users.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 19846cefd9..2872ba4cae 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -58,7 +58,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): txn.execute(sql, (self.max_mau_value,)) # It seems poor to invalidate the whole cache, but the alternative - # is to select then delete which has it's own problem. + # is to select then delete which has its own problems. # It seems unlikely that anyone using this feature on large datasets # would be using sqlite and if they are then there will be # larger perf issues than this one to encourage an upgrade to postgres. From c0affa7b4ff1e0775a8a5e6d704596b5383112a5 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 2 Aug 2018 23:03:01 +0100 Subject: [PATCH 086/205] update generate_monthly_active_users, and reap_monthly_active_users --- synapse/app/homeserver.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 5f0ca51ac7..7d4ea493bc 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -520,14 +520,14 @@ def run(hs): # table will decrease clock.looping_call(generate_user_daily_visit_stats, 5 * 60 * 1000) clock.looping_call( - MonthlyActiveUsersStore(hs).reap_monthly_active_users, 1000 * 60 * 60 + hs.get_datastore().reap_monthly_active_users, 1000 * 60 * 60 ) @defer.inlineCallbacks def generate_monthly_active_users(): count = 0 if hs.config.limit_usage_by_mau: - count = yield hs.get_datastore().count_monthly_users() + count = yield hs.get_datastore().get_monthly_active_count() current_mau_gauge.set(float(count)) max_mau_value_gauge.set(float(hs.config.max_mau_value)) From feacd13932b848546f75a8f646546c3215d35316 Mon Sep 17 00:00:00 2001 From: Michael Kaye <1917473+michaelkaye@users.noreply.github.com> Date: Fri, 3 Aug 2018 09:10:41 +0100 Subject: [PATCH 087/205] Update README.rst wrap at 80ish --- README.rst | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index 90bd49c9a1..1d602adc7e 100644 --- a/README.rst +++ b/README.rst @@ -157,12 +157,19 @@ if you prefer. In case of problems, please see the _`Troubleshooting` section below. -There is an offical synapse image available at https://hub.docker.com/r/matrixdotorg/synapse/tags/ which can be used with the docker-compose file available at `contrib/docker`. Further information on this including configuration options is available in the README on hub.docker.com. +There is an offical synapse image available at +https://hub.docker.com/r/matrixdotorg/synapse/tags/ which can be used with +the docker-compose file available at `contrib/docker`. Further information on +this including configuration options is available in the README on +hub.docker.com. -Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a Dockerfile to automate a synapse server in a single Docker image, at https://hub.docker.com/r/avhost/docker-matrix/tags/ +Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a +Dockerfile to automate a synapse server in a single Docker image, at +https://hub.docker.com/r/avhost/docker-matrix/tags/ Also, Martin Giess has created an auto-deployment process with vagrant/ansible, -tested with VirtualBox/AWS/DigitalOcean - see https://github.com/EMnify/matrix-synapse-auto-deploy +tested with VirtualBox/AWS/DigitalOcean - see +https://github.com/EMnify/matrix-synapse-auto-deploy for details. Configuring synapse From 637b11b9edd7de93aeabb3688ab2dda6c58015a3 Mon Sep 17 00:00:00 2001 From: Michael Kaye <1917473+michaelkaye@users.noreply.github.com> Date: Fri, 3 Aug 2018 09:13:54 +0100 Subject: [PATCH 088/205] Update README.rst Link to contrib/docker --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 1d602adc7e..4c5971d043 100644 --- a/README.rst +++ b/README.rst @@ -159,7 +159,7 @@ In case of problems, please see the _`Troubleshooting` section below. There is an offical synapse image available at https://hub.docker.com/r/matrixdotorg/synapse/tags/ which can be used with -the docker-compose file available at `contrib/docker`. Further information on +the docker-compose file available at `contrib/docker `_. Further information on this including configuration options is available in the README on hub.docker.com. From c3f596180f19299aee6735cb9c91e3f61d073ccc Mon Sep 17 00:00:00 2001 From: Michael Kaye <1917473+michaelkaye@users.noreply.github.com> Date: Fri, 3 Aug 2018 09:15:19 +0100 Subject: [PATCH 089/205] Update README.md Link to docker/README.md --- contrib/docker/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contrib/docker/README.md b/contrib/docker/README.md index 10b15b0407..05254e5192 100644 --- a/contrib/docker/README.md +++ b/contrib/docker/README.md @@ -38,4 +38,4 @@ docker-compose up -d ### More information -For more information on required environment variables and mounts, see the main docker documentation at `docker/README.md` +For more information on required environment variables and mounts, see the main docker documentation at [/docker/README.md](../../docker/README.md) From 42960aa0475edc59f83d6a47c423d9e5f12e9417 Mon Sep 17 00:00:00 2001 From: Michael Kaye <1917473+michaelkaye@users.noreply.github.com> Date: Fri, 3 Aug 2018 09:16:01 +0100 Subject: [PATCH 090/205] Update README.md Link to contrib/docker --- docker/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/README.md b/docker/README.md index 8303a7fecd..038c78f7c0 100644 --- a/docker/README.md +++ b/docker/README.md @@ -15,7 +15,7 @@ This image is designed to run either with an automatically generated configurati file or with a custom configuration that requires manual editing. An easy way to make use of this image is via docker-compose. See the -(https://github.com/matrix-org/synapse/tree/develop/contrib/docker)[contrib] +[contrib/docker](../contrib/docker) section of the synapse project for examples. ### Without Compose (harder) From 950807d93a264b6d10ece386d227dc4069f7d0da Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 3 Aug 2018 13:49:53 +0100 Subject: [PATCH 091/205] fix caching and tests --- synapse/app/homeserver.py | 1 - synapse/storage/monthly_active_users.py | 91 ++++++++++------------ tests/storage/test_monthly_active_users.py | 50 ++++++++---- 3 files changed, 80 insertions(+), 62 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 7d4ea493bc..3a67db8b30 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -64,7 +64,6 @@ from synapse.rest.media.v0.content_repository import ContentRepoResource from synapse.server import HomeServer from synapse.storage import are_all_users_on_domain from synapse.storage.engines import IncorrectDatabaseSetup, create_engine -from synapse.storage.monthly_active_users import MonthlyActiveUsersStore from synapse.storage.prepare_database import UpgradeDatabaseException, prepare_database from synapse.util.caches import CACHE_SIZE_FACTOR from synapse.util.httpresourcetree import create_resource_tree diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 2872ba4cae..d06c90cbcc 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -1,6 +1,21 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from twisted.internet import defer -from synapse.util.caches.descriptors import cachedInlineCallbacks -from synapse.storage.engines import PostgresEngine, Sqlite3Engine + +from synapse.util.caches.descriptors import cached, cachedInlineCallbacks from ._base import SQLBaseStore @@ -9,7 +24,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): def __init__(self, dbconn, hs): super(MonthlyActiveUsersStore, self).__init__(None, hs) self._clock = hs.get_clock() - self.max_mau_value = hs.config.max_mau_value + self.hs = hs def reap_monthly_active_users(self): """ @@ -19,62 +34,41 @@ class MonthlyActiveUsersStore(SQLBaseStore): Defered() """ def _reap_users(txn): + thirty_days_ago = ( int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) ) - if isinstance(self.database_engine, PostgresEngine): - sql = """ - DELETE FROM monthly_active_users - WHERE timestamp < ? - RETURNING user_id - """ - txn.execute(sql, (thirty_days_ago,)) - res = txn.fetchall() - for r in res: - self.is_user_monthly_active.invalidate(r) + sql = "DELETE FROM monthly_active_users WHERE timestamp < ?" - sql = """ - DELETE FROM monthly_active_users - ORDER BY timestamp desc - LIMIT -1 OFFSET ? - RETURNING user_id - """ - txn.execute(sql, (self.max_mau_value,)) - res = txn.fetchall() - for r in res: - self.is_user_monthly_active.invalidate(r) - print r - self.get_monthly_active_count.invalidate() - elif isinstance(self.database_engine, Sqlite3Engine): - sql = "DELETE FROM monthly_active_users WHERE timestamp < ?" + txn.execute(sql, (thirty_days_ago,)) + sql = """ + DELETE FROM monthly_active_users + ORDER BY timestamp desc + LIMIT -1 OFFSET ? + """ + txn.execute(sql, (self.hs.config.max_mau_value,)) - txn.execute(sql, (thirty_days_ago,)) - sql = """ - DELETE FROM monthly_active_users - ORDER BY timestamp desc - LIMIT -1 OFFSET ? - """ - txn.execute(sql, (self.max_mau_value,)) + res = self.runInteraction("reap_monthly_active_users", _reap_users) + # It seems poor to invalidate the whole cache, Postgres supports + # 'Returning' which would allow me to invalidate only the + # specific users, but sqlite has no way to do this and instead + # I would need to SELECT and the DELETE which without locking + # is racy. + # Have resolved to invalidate the whole cache for now and do + # something about it if and when the perf becomes significant + self.is_user_monthly_active.invalidate_all() + self.get_monthly_active_count.invalidate_all() + return res - # It seems poor to invalidate the whole cache, but the alternative - # is to select then delete which has its own problems. - # It seems unlikely that anyone using this feature on large datasets - # would be using sqlite and if they are then there will be - # larger perf issues than this one to encourage an upgrade to postgres. - - self.is_user_monthly_active.invalidate_all() - self.get_monthly_active_count.invalidate_all() - - return self.runInteraction("reap_monthly_active_users", _reap_users) - - @cachedInlineCallbacks(num_args=0) + @cached(num_args=0) def get_monthly_active_count(self): """ Generates current count of monthly active users.abs Return: Defered(int): Number of current monthly active users """ + def _count_users(txn): sql = "SELECT COALESCE(count(*), 0) FROM monthly_active_users" @@ -91,7 +85,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): Deferred(bool): True if a new entry was created, False if an existing one was updated. """ - return self._simple_upsert( + self._simple_upsert( desc="upsert_monthly_active_user", table="monthly_active_users", keyvalues={ @@ -102,6 +96,8 @@ class MonthlyActiveUsersStore(SQLBaseStore): }, lock=False, ) + self.is_user_monthly_active.invalidate((user_id,)) + self.get_monthly_active_count.invalidate(()) @cachedInlineCallbacks(num_args=1) def is_user_monthly_active(self, user_id): @@ -120,5 +116,4 @@ class MonthlyActiveUsersStore(SQLBaseStore): retcol="user_id", desc="is_user_monthly_active", ) - defer.returnValue(bool(user_present)) diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index d8d25a6069..c32109ecc5 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -1,6 +1,19 @@ -from twisted.internet import defer +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. -from synapse.storage.monthly_active_users import MonthlyActiveUsersStore +from twisted.internet import defer import tests.unittest import tests.utils @@ -10,20 +23,19 @@ from tests.utils import setup_test_homeserver class MonthlyActiveUsersTestCase(tests.unittest.TestCase): def __init__(self, *args, **kwargs): super(MonthlyActiveUsersTestCase, self).__init__(*args, **kwargs) - self.mau = None @defer.inlineCallbacks def setUp(self): - hs = yield setup_test_homeserver() - self.mau = MonthlyActiveUsersStore(None, hs) + self.hs = yield setup_test_homeserver() + self.store = self.hs.get_datastore() @defer.inlineCallbacks def test_can_insert_and_count_mau(self): - count = yield self.mau.get_monthly_active_count() + count = yield self.store.get_monthly_active_count() self.assertEqual(0, count) - yield self.mau.upsert_monthly_active_user("@user:server") - count = yield self.mau.get_monthly_active_count() + yield self.store.upsert_monthly_active_user("@user:server") + count = yield self.store.get_monthly_active_count() self.assertEqual(1, count) @@ -32,11 +44,23 @@ class MonthlyActiveUsersTestCase(tests.unittest.TestCase): user_id1 = "@user1:server" user_id2 = "@user2:server" user_id3 = "@user3:server" - result = yield self.mau.is_user_monthly_active(user_id1) + result = yield self.store.is_user_monthly_active(user_id1) self.assertFalse(result) - yield self.mau.upsert_monthly_active_user(user_id1) - yield self.mau.upsert_monthly_active_user(user_id2) - result = yield self.mau.is_user_monthly_active(user_id1) + yield self.store.upsert_monthly_active_user(user_id1) + yield self.store.upsert_monthly_active_user(user_id2) + result = yield self.store.is_user_monthly_active(user_id1) self.assertTrue(result) - result = yield self.mau.is_user_monthly_active(user_id3) + result = yield self.store.is_user_monthly_active(user_id3) self.assertFalse(result) + + @defer.inlineCallbacks + def test_reap_monthly_active_users(self): + self.hs.config.max_mau_value = 5 + initial_users = 10 + for i in range(initial_users): + yield self.store.upsert_monthly_active_user("@user%d:server" % i) + count = yield self.store.get_monthly_active_count() + self.assertTrue(count, initial_users) + yield self.store.reap_monthly_active_users() + count = yield self.store.get_monthly_active_count() + self.assertTrue(count, initial_users - self.hs.config.max_mau_value) From da90337d8952999f6c2c7dbcbb006b46bce17020 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 3 Aug 2018 14:05:20 +0100 Subject: [PATCH 092/205] Add ability to limit number of monthly active users on the server --- changelog.d/3633.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3633.feature diff --git a/changelog.d/3633.feature b/changelog.d/3633.feature new file mode 100644 index 0000000000..8007a04840 --- /dev/null +++ b/changelog.d/3633.feature @@ -0,0 +1 @@ +Add ability to limit number of monthly active users on the server From b2aab04d2c6021e6999fdc3c204db63d7c6f9334 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 3 Aug 2018 14:12:56 +0100 Subject: [PATCH 093/205] fix py3 test failure --- tests/storage/test_client_ips.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index e1552510cc..0cd290176e 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -34,6 +34,7 @@ class ClientIpStoreTestCase(tests.unittest.TestCase): @defer.inlineCallbacks def test_insert_new_client_ip(self): + self.hs.config.max_mau_value = 50 self.clock.now = 12345678 user_id = "@user:id" yield self.store.insert_client_ip( From 5593ff6773c7556b85514ee172c4b6c06898f35e Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 3 Aug 2018 14:59:17 +0100 Subject: [PATCH 094/205] fix (lots of) py3 test failures --- synapse/config/server.py | 4 ++-- tests/storage/test_client_ips.py | 1 - tests/utils.py | 1 + 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/config/server.py b/synapse/config/server.py index 6a471a0a5e..8fd2319759 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -69,12 +69,12 @@ class ServerConfig(Config): # Options to control access by tracking MAU self.limit_usage_by_mau = config.get("limit_usage_by_mau", False) + self.max_mau_value = 0 if self.limit_usage_by_mau: self.max_mau_value = config.get( "max_mau_value", 0, ) - else: - self.max_mau_value = 0 + # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist = None federation_domain_whitelist = config.get( diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index 0cd290176e..e1552510cc 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -34,7 +34,6 @@ class ClientIpStoreTestCase(tests.unittest.TestCase): @defer.inlineCallbacks def test_insert_new_client_ip(self): - self.hs.config.max_mau_value = 50 self.clock.now = 12345678 user_id = "@user:id" yield self.store.insert_client_ip( diff --git a/tests/utils.py b/tests/utils.py index 9bff3ff3b9..ec40428e74 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -73,6 +73,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None config.block_events_without_consent_error = None config.media_storage_providers = [] config.auto_join_rooms = [] + config.limit_usage_by_mau = False # disable user directory updates, because they get done in the # background, which upsets the test runner. From 15c1ae45e52c31d03f162c5b201f0ccafa032885 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 3 Aug 2018 16:04:29 +0100 Subject: [PATCH 095/205] Docstrings for BaseFederationServlet ... to save me reverse-engineering this stuff again. --- synapse/federation/transport/server.py | 47 ++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index eae5f2b427..93bd899b86 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -190,6 +190,41 @@ def _parse_auth_header(header_bytes): class BaseFederationServlet(object): + """Abstract base class for federation servlet classes. + + The servlet object should have a PATH attribute which takes the form of a regexp to + match against the request path (excluding the /federation/v1 prefix). + + The servlet should also implement one or more of on_GET, on_POST, on_PUT, to match + the appropriate HTTP method. These methods have the signature: + + on_(self, origin, content, query, **kwargs) + + With arguments: + + origin (unicode|None): The authenticated server_name of the calling server, + unless REQUIRE_AUTH is set to False and authentication failed. + + content (unicode|None): decoded json body of the request. None if the + request was a GET. + + query (dict[bytes, list[bytes]]): Query params from the request. url-decoded + (ie, '+' and '%xx' are decoded) but note that it is *not* utf8-decoded + yet. + + **kwargs (dict[unicode, unicode]): the dict mapping keys to path + components as specified in the path match regexp. + + Returns: + Deferred[(int, object)|None]: either (response code, response object) to + return a JSON response, or None if the request has already been handled. + + Raises: + SynapseError: to return an error code + + Exception: other exceptions will be caught, logged, and a 500 will be + returned. + """ REQUIRE_AUTH = True def __init__(self, handler, authenticator, ratelimiter, server_name): @@ -204,6 +239,18 @@ class BaseFederationServlet(object): @defer.inlineCallbacks @functools.wraps(func) def new_func(request, *args, **kwargs): + """ A callback which can be passed to HttpServer.RegisterPaths + + Args: + request (twisted.web.http.Request): + *args: unused? + **kwargs (dict[unicode, unicode]): the dict mapping keys to path + components as specified in the path match regexp. + + Returns: + Deferred[(int, object)|None]: (response code, response object) as returned + by the callback method. None if the request has already been handled. + """ content = None if request.method in ["PUT", "POST"]: # TODO: Handle other method types? other content types? From 0ca459ea334ff86016bda241c0c823178789c215 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 25 Jul 2018 22:10:39 +0100 Subject: [PATCH 096/205] Basic support for room versioning This is the first tranche of support for room versioning. It includes: * setting the default room version in the config file * new room_version param on the createRoom API * storing the version of newly-created rooms in the m.room.create event * fishing the version of existing rooms out of the m.room.create event --- synapse/api/constants.py | 6 ++++ synapse/api/errors.py | 2 ++ synapse/config/server.py | 14 +++++++++ synapse/handlers/room.py | 27 ++++++++++++++++- synapse/replication/slave/storage/events.py | 2 +- synapse/storage/state.py | 33 +++++++++++++++++++-- tests/utils.py | 4 +++ 7 files changed, 83 insertions(+), 5 deletions(-) diff --git a/synapse/api/constants.py b/synapse/api/constants.py index 4df930c8d1..a27bf3b32d 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2017 Vector Creations Ltd +# Copyright 2018 New Vector Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -94,3 +95,8 @@ class RoomCreationPreset(object): class ThirdPartyEntityKind(object): USER = "user" LOCATION = "location" + + +# vdh-test-version is a placeholder to get room versioning support working and tested +# until we have a working v2. +KNOWN_ROOM_VERSIONS = {"1", "vdh-test-version"} diff --git a/synapse/api/errors.py b/synapse/api/errors.py index b41d595059..477ca07a24 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd +# Copyright 2018 New Vector Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -56,6 +57,7 @@ class Codes(object): CONSENT_NOT_GIVEN = "M_CONSENT_NOT_GIVEN" CANNOT_LEAVE_SERVER_NOTICE_ROOM = "M_CANNOT_LEAVE_SERVER_NOTICE_ROOM" MAU_LIMIT_EXCEEDED = "M_MAU_LIMIT_EXCEEDED" + UNSUPPORTED_ROOM_VERSION = "M_UNSUPPORTED_ROOM_VERSION" class CodeMessageException(RuntimeError): diff --git a/synapse/config/server.py b/synapse/config/server.py index 6a471a0a5e..68ef2789d3 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -16,6 +16,7 @@ import logging +from synapse.api.constants import KNOWN_ROOM_VERSIONS from synapse.http.endpoint import parse_and_validate_server_name from ._base import Config, ConfigError @@ -75,6 +76,16 @@ class ServerConfig(Config): ) else: self.max_mau_value = 0 + + # the version of rooms created by default on this server + self.default_room_version = str(config.get( + "default_room_version", "1", + )) + if self.default_room_version not in KNOWN_ROOM_VERSIONS: + raise ConfigError("Unrecognised value '%s' for default_room_version" % ( + self.default_room_version, + )) + # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist = None federation_domain_whitelist = config.get( @@ -249,6 +260,9 @@ class ServerConfig(Config): # (except those sent by local server admins). The default is False. # block_non_admin_invites: True + # The room_version of rooms which are created by default by this server. + # default_room_version: 1 + # Restrict federation to the following whitelist of domains. # N.B. we recommend also firewalling your federation listener to limit # inbound federation traffic as early as possible, rather than relying diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 7b7804d9b2..a526b684e9 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -21,9 +21,16 @@ import math import string from collections import OrderedDict +from six import string_types + from twisted.internet import defer -from synapse.api.constants import EventTypes, JoinRules, RoomCreationPreset +from synapse.api.constants import ( + KNOWN_ROOM_VERSIONS, + EventTypes, + JoinRules, + RoomCreationPreset, +) from synapse.api.errors import AuthError, Codes, StoreError, SynapseError from synapse.types import RoomAlias, RoomID, RoomStreamToken, StreamToken, UserID from synapse.util import stringutils @@ -99,6 +106,21 @@ class RoomCreationHandler(BaseHandler): if ratelimit: yield self.ratelimit(requester) + room_version = config.get("room_version", self.hs.config.default_room_version) + if not isinstance(room_version, string_types): + raise SynapseError( + 400, + "room_version must be a string", + Codes.BAD_JSON, + ) + + if room_version not in KNOWN_ROOM_VERSIONS: + raise SynapseError( + 400, + "Your homeserver does not support this room version", + Codes.UNSUPPORTED_ROOM_VERSION, + ) + if "room_alias_name" in config: for wchar in string.whitespace: if wchar in config["room_alias_name"]: @@ -184,6 +206,9 @@ class RoomCreationHandler(BaseHandler): creation_content = config.get("creation_content", {}) + # override any attempt to set room versions via the creation_content + creation_content["room_version"] = room_version + room_member_handler = self.hs.get_room_member_handler() yield self._send_events_for_new_room( diff --git a/synapse/replication/slave/storage/events.py b/synapse/replication/slave/storage/events.py index bdb5eee4af..4830c68f35 100644 --- a/synapse/replication/slave/storage/events.py +++ b/synapse/replication/slave/storage/events.py @@ -44,8 +44,8 @@ class SlavedEventStore(EventFederationWorkerStore, RoomMemberWorkerStore, EventPushActionsWorkerStore, StreamWorkerStore, - EventsWorkerStore, StateGroupWorkerStore, + EventsWorkerStore, SignatureWorkerStore, UserErasureWorkerStore, BaseSlavedStore): diff --git a/synapse/storage/state.py b/synapse/storage/state.py index b27b3ae144..17b14d464b 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -21,15 +21,17 @@ from six.moves import range from twisted.internet import defer +from synapse.api.constants import EventTypes +from synapse.api.errors import NotFoundError +from synapse.storage._base import SQLBaseStore from synapse.storage.background_updates import BackgroundUpdateStore from synapse.storage.engines import PostgresEngine +from synapse.storage.events_worker import EventsWorkerStore from synapse.util.caches import get_cache_factor_for, intern_string from synapse.util.caches.descriptors import cached, cachedList from synapse.util.caches.dictionary_cache import DictionaryCache from synapse.util.stringutils import to_ascii -from ._base import SQLBaseStore - logger = logging.getLogger(__name__) @@ -46,7 +48,8 @@ class _GetStateGroupDelta(namedtuple("_GetStateGroupDelta", ("prev_group", "delt return len(self.delta_ids) if self.delta_ids else 0 -class StateGroupWorkerStore(SQLBaseStore): +# this inherits from EventsWorkerStore because it calls self.get_events +class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): """The parts of StateGroupStore that can be called from workers. """ @@ -61,6 +64,30 @@ class StateGroupWorkerStore(SQLBaseStore): "*stateGroupCache*", 500000 * get_cache_factor_for("stateGroupCache") ) + @defer.inlineCallbacks + def get_room_version(self, room_id): + """Get the room_version of a given room + + Args: + room_id (str) + + Returns: + Deferred[str] + + Raises: + NotFoundError if the room is unknown + """ + # for now we do this by looking at the create event. We may want to cache this + # more intelligently in future. + state_ids = yield self.get_current_state_ids(room_id) + create_id = state_ids.get((EventTypes.Create, "")) + + if not create_id: + raise NotFoundError("Unknown room") + + create_event = yield self.get_event(create_id) + defer.returnValue(create_event.content.get("room_version", "1")) + @cached(max_entries=100000, iterable=True) def get_current_state_ids(self, room_id): """Get the current state event ids for a room based on the diff --git a/tests/utils.py b/tests/utils.py index 9bff3ff3b9..9e188a8ed4 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -74,6 +74,10 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None config.media_storage_providers = [] config.auto_join_rooms = [] + # we need a sane default_room_version, otherwise attempts to create rooms will + # fail. + config.default_room_version = "1" + # disable user directory updates, because they get done in the # background, which upsets the test runner. config.update_user_directory = False From 0d63d93ca834771324f0c5b9340346ad2113a4b1 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 25 Jul 2018 22:25:41 +0100 Subject: [PATCH 097/205] Enforce compatibility when processing make_join requests Reject make_join requests from servers which do not support the room version. Also include the room version in the response. --- synapse/api/errors.py | 22 ++++++++++++++++++++++ synapse/federation/federation_server.py | 21 ++++++++++++++++++--- synapse/federation/transport/server.py | 24 +++++++++++++++++++++++- 3 files changed, 63 insertions(+), 4 deletions(-) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index 477ca07a24..70400347bc 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -58,6 +58,7 @@ class Codes(object): CANNOT_LEAVE_SERVER_NOTICE_ROOM = "M_CANNOT_LEAVE_SERVER_NOTICE_ROOM" MAU_LIMIT_EXCEEDED = "M_MAU_LIMIT_EXCEEDED" UNSUPPORTED_ROOM_VERSION = "M_UNSUPPORTED_ROOM_VERSION" + INCOMPATIBLE_ROOM_VERSION = "M_INCOMPATIBLE_ROOM_VERSION" class CodeMessageException(RuntimeError): @@ -287,6 +288,27 @@ class LimitExceededError(SynapseError): ) +class IncompatibleRoomVersionError(SynapseError): + """A server is trying to join a room whose version it does not support.""" + + def __init__(self, room_version): + super(IncompatibleRoomVersionError, self).__init__( + code=400, + msg="Your homeserver does not support the features required to " + "join this room", + errcode=Codes.INCOMPATIBLE_ROOM_VERSION, + ) + + self._room_version = room_version + + def error_dict(self): + return cs_error( + self.msg, + self.errcode, + room_version=self._room_version, + ) + + def cs_error(msg, code=Codes.UNKNOWN, **kwargs): """ Utility method for constructing an error response for client-server interactions. diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index bf89d568af..2b62f687b6 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -27,7 +27,13 @@ from twisted.internet.abstract import isIPAddress from twisted.python import failure from synapse.api.constants import EventTypes -from synapse.api.errors import AuthError, FederationError, NotFoundError, SynapseError +from synapse.api.errors import ( + AuthError, + FederationError, + IncompatibleRoomVersionError, + NotFoundError, + SynapseError, +) from synapse.crypto.event_signing import compute_event_signature from synapse.federation.federation_base import FederationBase, event_from_pdu_json from synapse.federation.persistence import TransactionActions @@ -323,12 +329,21 @@ class FederationServer(FederationBase): defer.returnValue((200, resp)) @defer.inlineCallbacks - def on_make_join_request(self, origin, room_id, user_id): + def on_make_join_request(self, origin, room_id, user_id, supported_versions): origin_host, _ = parse_server_name(origin) yield self.check_server_matches_acl(origin_host, room_id) + + room_version = yield self.store.get_room_version(room_id) + if room_version not in supported_versions: + logger.warn("Room version %s not in %s", room_version, supported_versions) + raise IncompatibleRoomVersionError(room_version=room_version) + pdu = yield self.handler.on_make_join_request(room_id, user_id) time_now = self._clock.time_msec() - defer.returnValue({"event": pdu.get_pdu_json(time_now)}) + defer.returnValue({ + "event": pdu.get_pdu_json(time_now), + "room_version": room_version, + }) @defer.inlineCallbacks def on_invite_request(self, origin, content): diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 93bd899b86..77969a4f38 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -431,9 +431,31 @@ class FederationMakeJoinServlet(BaseFederationServlet): PATH = "/make_join/(?P[^/]*)/(?P[^/]*)" @defer.inlineCallbacks - def on_GET(self, origin, content, query, context, user_id): + def on_GET(self, origin, _content, query, context, user_id): + """ + Args: + origin (unicode): The authenticated server_name of the calling server + + _content (None): (GETs don't have bodies) + + query (dict[bytes, list[bytes]]): Query params from the request. + + **kwargs (dict[unicode, unicode]): the dict mapping keys to path + components as specified in the path match regexp. + + Returns: + Deferred[(int, object)|None]: either (response code, response object) to + return a JSON response, or None if the request has already been handled. + """ + versions = query.get(b'ver') + if versions is not None: + supported_versions = [v.decode("utf-8") for v in versions] + else: + supported_versions = ["1"] + content = yield self.handler.on_make_join_request( origin, context, user_id, + supported_versions=supported_versions, ) defer.returnValue((200, content)) From 3777fa26aaf36245168980054c4eebf96169dd13 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 1 Aug 2018 15:35:29 +0100 Subject: [PATCH 098/205] sanity check response from make_join --- synapse/federation/federation_client.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 7550e11b6e..de4b813a15 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -561,7 +561,9 @@ class FederationClient(FederationBase): destination, room_id, user_id, membership ) - pdu_dict = ret["event"] + pdu_dict = ret.get("event", None) + if not isinstance(pdu_dict, dict): + raise InvalidResponseError("Bad 'event' field in response") logger.debug("Got response to make_%s: %s", membership, pdu_dict) From e10830e9765eb3897da5af6ffb4809badb8e3009 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 3 Aug 2018 17:55:50 +0100 Subject: [PATCH 099/205] wip commit - tests failing --- synapse/api/auth.py | 6 +- synapse/storage/client_ips.py | 21 +------ synapse/storage/monthly_active_users.py | 66 ++++++++++++++++------ tests/storage/test_client_ips.py | 12 ++-- tests/storage/test_monthly_active_users.py | 14 ++--- 5 files changed, 66 insertions(+), 53 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 943a488339..d8ebbbc6e8 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -775,7 +775,7 @@ class Auth(object): ) @defer.inlineCallbacks - def check_auth_blocking(self, error): + def check_auth_blocking(self): """Checks if the user should be rejected for some external reason, such as monthly active user limiting or global disable flag Args: @@ -785,4 +785,6 @@ class Auth(object): if self.hs.config.limit_usage_by_mau is True: current_mau = yield self.store.get_monthly_active_count() if current_mau >= self.hs.config.max_mau_value: - raise error + raise AuthError( + 403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED + ) diff --git a/synapse/storage/client_ips.py b/synapse/storage/client_ips.py index 83d64d1563..2489527f2c 100644 --- a/synapse/storage/client_ips.py +++ b/synapse/storage/client_ips.py @@ -86,7 +86,7 @@ class ClientIpStore(background_updates.BackgroundUpdateStore): last_seen = self.client_ip_last_seen.get(key) except KeyError: last_seen = None - yield self._populate_monthly_active_users(user_id) + yield self.populate_monthly_active_users(user_id) # Rate-limited inserts if last_seen is not None and (now - last_seen) < LAST_SEEN_GRANULARITY: return @@ -95,25 +95,6 @@ class ClientIpStore(background_updates.BackgroundUpdateStore): self._batch_row_update[key] = (user_agent, device_id, now) - @defer.inlineCallbacks - def _populate_monthly_active_users(self, user_id): - """Checks on the state of monthly active user limits and optionally - add the user to the monthly active tables - - Args: - user_id(str): the user_id to query - """ - - store = self.hs.get_datastore() - if self.hs.config.limit_usage_by_mau: - is_user_monthly_active = yield store.is_user_monthly_active(user_id) - if is_user_monthly_active: - yield store.upsert_monthly_active_user(user_id) - else: - count = yield store.get_monthly_active_count() - if count < self.hs.config.max_mau_value: - yield store.upsert_monthly_active_user(user_id) - def _update_client_ips_batch(self): def update(): to_update = self._batch_row_update diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index d06c90cbcc..6def6830d0 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -19,6 +19,10 @@ from synapse.util.caches.descriptors import cached, cachedInlineCallbacks from ._base import SQLBaseStore +# Number of msec of granularity to store the monthly_active_user timestamp +# This means it is not necessary to update the table on every request +LAST_SEEN_GRANULARITY = 60 * 60 * 1000 + class MonthlyActiveUsersStore(SQLBaseStore): def __init__(self, dbconn, hs): @@ -30,8 +34,9 @@ class MonthlyActiveUsersStore(SQLBaseStore): """ Cleans out monthly active user table to ensure that no stale entries exist. - Return: - Defered() + + Returns: + Deferred[] """ def _reap_users(txn): @@ -49,7 +54,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): """ txn.execute(sql, (self.hs.config.max_mau_value,)) - res = self.runInteraction("reap_monthly_active_users", _reap_users) + res = yield self.runInteraction("reap_monthly_active_users", _reap_users) # It seems poor to invalidate the whole cache, Postgres supports # 'Returning' which would allow me to invalidate only the # specific users, but sqlite has no way to do this and instead @@ -57,16 +62,16 @@ class MonthlyActiveUsersStore(SQLBaseStore): # is racy. # Have resolved to invalidate the whole cache for now and do # something about it if and when the perf becomes significant - self.is_user_monthly_active.invalidate_all() + self._user_last_seen_monthly_active.invalidate_all() self.get_monthly_active_count.invalidate_all() return res @cached(num_args=0) def get_monthly_active_count(self): - """ - Generates current count of monthly active users.abs - Return: - Defered(int): Number of current monthly active users + """Generates current count of monthly active users.abs + + Returns: + Defered[int]: Number of current monthly active users """ def _count_users(txn): @@ -82,10 +87,10 @@ class MonthlyActiveUsersStore(SQLBaseStore): Updates or inserts monthly active user member Arguments: user_id (str): user to add/update - Deferred(bool): True if a new entry was created, False if an + Deferred[bool]: True if a new entry was created, False if an existing one was updated. """ - self._simple_upsert( + is_insert = self._simple_upsert( desc="upsert_monthly_active_user", table="monthly_active_users", keyvalues={ @@ -96,24 +101,49 @@ class MonthlyActiveUsersStore(SQLBaseStore): }, lock=False, ) - self.is_user_monthly_active.invalidate((user_id,)) - self.get_monthly_active_count.invalidate(()) + if is_insert: + self._user_last_seen_monthly_active.invalidate((user_id,)) + self.get_monthly_active_count.invalidate(()) @cachedInlineCallbacks(num_args=1) - def is_user_monthly_active(self, user_id): + def _user_last_seen_monthly_active(self, user_id): """ Checks if a given user is part of the monthly active user group Arguments: user_id (str): user to add/update Return: - bool : True if user part of group, False otherwise + int : timestamp since last seen, None if never seen + """ - user_present = yield self._simple_select_onecol( + result = yield self._simple_select_onecol( table="monthly_active_users", keyvalues={ "user_id": user_id, }, - retcol="user_id", - desc="is_user_monthly_active", + retcol="timestamp", + desc="_user_last_seen_monthly_active", ) - defer.returnValue(bool(user_present)) + timestamp = None + if len(result) > 0: + timestamp = result[0] + defer.returnValue(timestamp) + + @defer.inlineCallbacks + def populate_monthly_active_users(self, user_id): + """Checks on the state of monthly active user limits and optionally + add the user to the monthly active tables + + Args: + user_id(str): the user_id to query + """ + + if self.hs.config.limit_usage_by_mau: + last_seen_timestamp = yield self._user_last_seen_monthly_active(user_id) + now = self.hs.get_clock().time_msec() + + if last_seen_timestamp is None: + count = yield self.get_monthly_active_count() + if count < self.hs.config.max_mau_value: + yield self.upsert_monthly_active_user(user_id) + elif now - last_seen_timestamp > LAST_SEEN_GRANULARITY: + yield self.upsert_monthly_active_user(user_id) diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index e1552510cc..7a58c6eb24 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -64,7 +64,7 @@ class ClientIpStoreTestCase(tests.unittest.TestCase): yield self.store.insert_client_ip( user_id, "access_token", "ip", "user_agent", "device_id", ) - active = yield self.store.is_user_monthly_active(user_id) + active = yield self.store._user_last_seen_monthly_active(user_id) self.assertFalse(active) @defer.inlineCallbacks @@ -80,7 +80,7 @@ class ClientIpStoreTestCase(tests.unittest.TestCase): yield self.store.insert_client_ip( user_id, "access_token", "ip", "user_agent", "device_id", ) - active = yield self.store.is_user_monthly_active(user_id) + active = yield self.store._user_last_seen_monthly_active(user_id) self.assertFalse(active) @defer.inlineCallbacks @@ -88,13 +88,13 @@ class ClientIpStoreTestCase(tests.unittest.TestCase): self.hs.config.limit_usage_by_mau = True self.hs.config.max_mau_value = 50 user_id = "@user:server" - active = yield self.store.is_user_monthly_active(user_id) + active = yield self.store._user_last_seen_monthly_active(user_id) self.assertFalse(active) yield self.store.insert_client_ip( user_id, "access_token", "ip", "user_agent", "device_id", ) - active = yield self.store.is_user_monthly_active(user_id) + active = yield self.store._user_last_seen_monthly_active(user_id) self.assertTrue(active) @defer.inlineCallbacks @@ -103,7 +103,7 @@ class ClientIpStoreTestCase(tests.unittest.TestCase): self.hs.config.max_mau_value = 50 user_id = "@user:server" - active = yield self.store.is_user_monthly_active(user_id) + active = yield self.store._user_last_seen_monthly_active(user_id) self.assertFalse(active) yield self.store.insert_client_ip( @@ -112,5 +112,5 @@ class ClientIpStoreTestCase(tests.unittest.TestCase): yield self.store.insert_client_ip( user_id, "access_token", "ip", "user_agent", "device_id", ) - active = yield self.store.is_user_monthly_active(user_id) + active = yield self.store._user_last_seen_monthly_active(user_id) self.assertTrue(active) diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index c32109ecc5..0bfd24a7fb 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -40,18 +40,18 @@ class MonthlyActiveUsersTestCase(tests.unittest.TestCase): self.assertEqual(1, count) @defer.inlineCallbacks - def test_is_user_monthly_active(self): + def test__user_last_seen_monthly_active(self): user_id1 = "@user1:server" user_id2 = "@user2:server" user_id3 = "@user3:server" - result = yield self.store.is_user_monthly_active(user_id1) - self.assertFalse(result) + result = yield self.store._user_last_seen_monthly_active(user_id1) + self.assertFalse(result == 0) yield self.store.upsert_monthly_active_user(user_id1) yield self.store.upsert_monthly_active_user(user_id2) - result = yield self.store.is_user_monthly_active(user_id1) - self.assertTrue(result) - result = yield self.store.is_user_monthly_active(user_id3) - self.assertFalse(result) + result = yield self.store._user_last_seen_monthly_active(user_id1) + self.assertTrue(result > 0) + result = yield self.store._user_last_seen_monthly_active(user_id3) + self.assertFalse(result == 0) @defer.inlineCallbacks def test_reap_monthly_active_users(self): From 16d970189299c2ef83df4107e0cf1054cfb9da42 Mon Sep 17 00:00:00 2001 From: Will Hunt Date: Fri, 3 Aug 2018 19:08:05 +0100 Subject: [PATCH 100/205] Return M_NOT_FOUND when a profile could not be found. (#3596) --- changelog.d/3585.bugfix | 1 + synapse/handlers/profile.py | 89 ++++++++++++++++++++++++------------- 2 files changed, 59 insertions(+), 31 deletions(-) create mode 100644 changelog.d/3585.bugfix diff --git a/changelog.d/3585.bugfix b/changelog.d/3585.bugfix new file mode 100644 index 0000000000..e8ae1d8cb4 --- /dev/null +++ b/changelog.d/3585.bugfix @@ -0,0 +1 @@ +Respond with M_NOT_FOUND when profiles are not found locally or over federation. Fixes #3585 diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index cb5c6d587e..9af2e8f869 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -17,7 +17,13 @@ import logging from twisted.internet import defer -from synapse.api.errors import AuthError, CodeMessageException, SynapseError +from synapse.api.errors import ( + AuthError, + CodeMessageException, + Codes, + StoreError, + SynapseError, +) from synapse.metrics.background_process_metrics import run_as_background_process from synapse.types import UserID, get_domain_from_id @@ -49,12 +55,17 @@ class ProfileHandler(BaseHandler): def get_profile(self, user_id): target_user = UserID.from_string(user_id) if self.hs.is_mine(target_user): - displayname = yield self.store.get_profile_displayname( - target_user.localpart - ) - avatar_url = yield self.store.get_profile_avatar_url( - target_user.localpart - ) + try: + displayname = yield self.store.get_profile_displayname( + target_user.localpart + ) + avatar_url = yield self.store.get_profile_avatar_url( + target_user.localpart + ) + except StoreError as e: + if e.code == 404: + raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND) + raise defer.returnValue({ "displayname": displayname, @@ -74,7 +85,6 @@ class ProfileHandler(BaseHandler): except CodeMessageException as e: if e.code != 404: logger.exception("Failed to get displayname") - raise @defer.inlineCallbacks @@ -85,12 +95,17 @@ class ProfileHandler(BaseHandler): """ target_user = UserID.from_string(user_id) if self.hs.is_mine(target_user): - displayname = yield self.store.get_profile_displayname( - target_user.localpart - ) - avatar_url = yield self.store.get_profile_avatar_url( - target_user.localpart - ) + try: + displayname = yield self.store.get_profile_displayname( + target_user.localpart + ) + avatar_url = yield self.store.get_profile_avatar_url( + target_user.localpart + ) + except StoreError as e: + if e.code == 404: + raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND) + raise defer.returnValue({ "displayname": displayname, @@ -103,9 +118,14 @@ class ProfileHandler(BaseHandler): @defer.inlineCallbacks def get_displayname(self, target_user): if self.hs.is_mine(target_user): - displayname = yield self.store.get_profile_displayname( - target_user.localpart - ) + try: + displayname = yield self.store.get_profile_displayname( + target_user.localpart + ) + except StoreError as e: + if e.code == 404: + raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND) + raise defer.returnValue(displayname) else: @@ -122,7 +142,6 @@ class ProfileHandler(BaseHandler): except CodeMessageException as e: if e.code != 404: logger.exception("Failed to get displayname") - raise except Exception: logger.exception("Failed to get displayname") @@ -157,10 +176,14 @@ class ProfileHandler(BaseHandler): @defer.inlineCallbacks def get_avatar_url(self, target_user): if self.hs.is_mine(target_user): - avatar_url = yield self.store.get_profile_avatar_url( - target_user.localpart - ) - + try: + avatar_url = yield self.store.get_profile_avatar_url( + target_user.localpart + ) + except StoreError as e: + if e.code == 404: + raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND) + raise defer.returnValue(avatar_url) else: try: @@ -213,16 +236,20 @@ class ProfileHandler(BaseHandler): just_field = args.get("field", None) response = {} + try: + if just_field is None or just_field == "displayname": + response["displayname"] = yield self.store.get_profile_displayname( + user.localpart + ) - if just_field is None or just_field == "displayname": - response["displayname"] = yield self.store.get_profile_displayname( - user.localpart - ) - - if just_field is None or just_field == "avatar_url": - response["avatar_url"] = yield self.store.get_profile_avatar_url( - user.localpart - ) + if just_field is None or just_field == "avatar_url": + response["avatar_url"] = yield self.store.get_profile_avatar_url( + user.localpart + ) + except StoreError as e: + if e.code == 404: + raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND) + raise defer.returnValue(response) From 886be75ad1bc60e016611b453b9644e8db17a9f1 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 3 Aug 2018 22:29:03 +0100 Subject: [PATCH 101/205] bug fixes --- synapse/handlers/auth.py | 15 ++------------- synapse/handlers/register.py | 8 ++++---- synapse/storage/monthly_active_users.py | 3 +-- tests/api/test_auth.py | 10 +++------- tests/handlers/test_register.py | 1 - 5 files changed, 10 insertions(+), 27 deletions(-) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 8f9cff92e8..7ea8ce9f94 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -520,7 +520,7 @@ class AuthHandler(BaseHandler): """ logger.info("Logging in user %s on device %s", user_id, device_id) access_token = yield self.issue_access_token(user_id, device_id) - yield self._check_mau_limits() + yield self.auth.check_auth_blocking() # the device *should* have been registered before we got here; however, # it's possible we raced against a DELETE operation. The thing we @@ -734,7 +734,7 @@ class AuthHandler(BaseHandler): @defer.inlineCallbacks def validate_short_term_login_token_and_get_user_id(self, login_token): - yield self._check_mau_limits() + yield self.auth.check_auth_blocking() auth_api = self.hs.get_auth() user_id = None try: @@ -907,17 +907,6 @@ class AuthHandler(BaseHandler): else: return defer.succeed(False) - @defer.inlineCallbacks - def _check_mau_limits(self): - """ - Ensure that if mau blocking is enabled that invalid users cannot - log in. - """ - error = AuthError( - 403, "Monthly Active User limits exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED - ) - yield self.auth.check_auth_blocking(error) - @attr.s class MacaroonGenerator(object): diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 706ed8c292..8cf0a36a8f 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -540,7 +540,7 @@ class RegistrationHandler(BaseHandler): Do not accept registrations if monthly active user limits exceeded and limiting is enabled """ - error = RegistrationError( - 403, "Monthly Active User limits exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED - ) - yield self.auth.check_auth_blocking(error) + try: + yield self.auth.check_auth_blocking() + except AuthError as e: + raise RegistrationError(e.code, e.message, e.errcode) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 6def6830d0..135837507a 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -54,7 +54,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): """ txn.execute(sql, (self.hs.config.max_mau_value,)) - res = yield self.runInteraction("reap_monthly_active_users", _reap_users) + yield self.runInteraction("reap_monthly_active_users", _reap_users) # It seems poor to invalidate the whole cache, Postgres supports # 'Returning' which would allow me to invalidate only the # specific users, but sqlite has no way to do this and instead @@ -64,7 +64,6 @@ class MonthlyActiveUsersStore(SQLBaseStore): # something about it if and when the perf becomes significant self._user_last_seen_monthly_active.invalidate_all() self.get_monthly_active_count.invalidate_all() - return res @cached(num_args=0) def get_monthly_active_count(self): diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 54bdf28663..e963963c73 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -452,12 +452,8 @@ class AuthTestCase(unittest.TestCase): lots_of_users = 100 small_number_of_users = 1 - error = AuthError( - 403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED - ) - # Ensure no error thrown - yield self.auth.check_auth_blocking(error) + yield self.auth.check_auth_blocking() self.hs.config.limit_usage_by_mau = True @@ -466,10 +462,10 @@ class AuthTestCase(unittest.TestCase): ) with self.assertRaises(AuthError): - yield self.auth.check_auth_blocking(error) + yield self.auth.check_auth_blocking() # Ensure does not throw an error self.store.get_monthly_active_count = Mock( return_value=defer.succeed(small_number_of_users) ) - yield self.auth.check_auth_blocking(error) + yield self.auth.check_auth_blocking() diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 6b5b8b3772..4ea59a58de 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -104,7 +104,6 @@ class RegistrationTestCase(unittest.TestCase): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(RegistrationError): yield self.handler.get_or_create_user("requester", 'b', "display_name") From d08296f9f278f39372bb5db99266d627614ecc96 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 3 Aug 2018 23:00:16 +0100 Subject: [PATCH 102/205] remove unused import --- tests/api/test_auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index e963963c73..5dc3398300 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -21,7 +21,7 @@ from twisted.internet import defer import synapse.handlers.auth from synapse.api.auth import Auth -from synapse.api.errors import AuthError, Codes +from synapse.api.errors import AuthError from synapse.types import UserID from tests import unittest From e40a510fbff931b5e6b295351847b95fb8c69e71 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 3 Aug 2018 23:19:13 +0100 Subject: [PATCH 103/205] py3 fix --- synapse/handlers/register.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 8cf0a36a8f..0e16bbe0ee 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -543,4 +543,4 @@ class RegistrationHandler(BaseHandler): try: yield self.auth.check_auth_blocking() except AuthError as e: - raise RegistrationError(e.code, e.message, e.errcode) + raise RegistrationError(e.code, str(e), e.errcode) From 42c6823827a12cdb4374bc6e64d32d4ce4f526ec Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Sat, 4 Aug 2018 22:07:04 +0100 Subject: [PATCH 104/205] disable HS from config --- synapse/api/auth.py | 4 ++++ synapse/api/errors.py | 1 + synapse/config/server.py | 4 ++++ tests/api/test_auth.py | 11 ++++++++++- tests/utils.py | 2 ++ 5 files changed, 21 insertions(+), 1 deletion(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index d8ebbbc6e8..089f166152 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -782,6 +782,10 @@ class Auth(object): error (Error): The error that should be raised if user is to be blocked """ + if self.hs.config.hs_disabled: + raise AuthError( + 403, self.hs.config.hs_disabled_message, errcode=Codes.HS_DISABLED + ) if self.hs.config.limit_usage_by_mau is True: current_mau = yield self.store.get_monthly_active_count() if current_mau >= self.hs.config.max_mau_value: diff --git a/synapse/api/errors.py b/synapse/api/errors.py index b41d595059..466240248a 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -56,6 +56,7 @@ class Codes(object): CONSENT_NOT_GIVEN = "M_CONSENT_NOT_GIVEN" CANNOT_LEAVE_SERVER_NOTICE_ROOM = "M_CANNOT_LEAVE_SERVER_NOTICE_ROOM" MAU_LIMIT_EXCEEDED = "M_MAU_LIMIT_EXCEEDED" + HS_DISABLED = "M_HS_DISABLED" class CodeMessageException(RuntimeError): diff --git a/synapse/config/server.py b/synapse/config/server.py index 8fd2319759..2e1e2f5961 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -75,6 +75,10 @@ class ServerConfig(Config): "max_mau_value", 0, ) + # Options to disable HS + self.hs_disabled = config.get("hs_disabled", False) + self.hs_disabled_message = config.get("hs_disabled_message", "") + # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist = None federation_domain_whitelist = config.get( diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 5dc3398300..fbb96361a8 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -21,7 +21,7 @@ from twisted.internet import defer import synapse.handlers.auth from synapse.api.auth import Auth -from synapse.api.errors import AuthError +from synapse.api.errors import AuthError, Codes from synapse.types import UserID from tests import unittest @@ -469,3 +469,12 @@ class AuthTestCase(unittest.TestCase): return_value=defer.succeed(small_number_of_users) ) yield self.auth.check_auth_blocking() + + @defer.inlineCallbacks + def test_hs_disabled(self): + self.hs.config.hs_disabled = True + self.hs.config.hs_disabled_message = "Reason for being disabled" + with self.assertRaises(AuthError) as e: + yield self.auth.check_auth_blocking() + self.assertEquals(e.exception.errcode, Codes.HS_DISABLED) + self.assertEquals(e.exception.code, 403) diff --git a/tests/utils.py b/tests/utils.py index ec40428e74..a0aa38d264 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -74,6 +74,8 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None config.media_storage_providers = [] config.auto_join_rooms = [] config.limit_usage_by_mau = False + config.hs_disabled = False + config.hs_disabled_message = "" # disable user directory updates, because they get done in the # background, which upsets the test runner. From f900d508244b4277065d34dd9a05224fd60d5221 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 6 Aug 2018 13:45:37 +0100 Subject: [PATCH 105/205] include known room versions in outgoing make_joins --- synapse/federation/federation_client.py | 8 +++++--- synapse/federation/transport/client.py | 5 ++++- synapse/handlers/federation.py | 13 +++++++++++-- synapse/http/matrixfederationclient.py | 7 +++++-- 4 files changed, 25 insertions(+), 8 deletions(-) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index de4b813a15..7ec1d7a889 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -521,7 +521,7 @@ class FederationClient(FederationBase): raise RuntimeError("Failed to %s via any server", description) def make_membership_event(self, destinations, room_id, user_id, membership, - content={},): + content, params): """ Creates an m.room.member event, with context, without participating in the room. @@ -537,8 +537,10 @@ class FederationClient(FederationBase): user_id (str): The user whose membership is being evented. membership (str): The "membership" property of the event. Must be one of "join" or "leave". - content (object): Any additional data to put into the content field + content (dict): Any additional data to put into the content field of the event. + params (dict[str, str|Iterable[str]]): Query parameters to include in the + request. Return: Deferred: resolves to a tuple of (origin (str), event (object)) where origin is the remote homeserver which generated the event. @@ -558,7 +560,7 @@ class FederationClient(FederationBase): @defer.inlineCallbacks def send_request(destination): ret = yield self.transport_layer.make_membership_event( - destination, room_id, user_id, membership + destination, room_id, user_id, membership, params, ) pdu_dict = ret.get("event", None) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index 4529d454af..b4fbe2c9d5 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -195,7 +195,7 @@ class TransportLayerClient(object): @defer.inlineCallbacks @log_function - def make_membership_event(self, destination, room_id, user_id, membership): + def make_membership_event(self, destination, room_id, user_id, membership, params): """Asks a remote server to build and sign us a membership event Note that this does not append any events to any graphs. @@ -205,6 +205,8 @@ class TransportLayerClient(object): room_id (str): room to join/leave user_id (str): user to be joined/left membership (str): one of join/leave + params (dict[str, str|Iterable[str]]): Query parameters to include in the + request. Returns: Deferred: Succeeds when we get a 2xx HTTP response. The result @@ -241,6 +243,7 @@ class TransportLayerClient(object): content = yield self.client.get_json( destination=destination, path=path, + args=params, retry_on_dns_fail=retry_on_dns_fail, timeout=20000, ignore_backoff=ignore_backoff, diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 533b82c783..0dffd44e22 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -30,7 +30,12 @@ from unpaddedbase64 import decode_base64 from twisted.internet import defer -from synapse.api.constants import EventTypes, Membership, RejectedReason +from synapse.api.constants import ( + KNOWN_ROOM_VERSIONS, + EventTypes, + Membership, + RejectedReason, +) from synapse.api.errors import ( AuthError, CodeMessageException, @@ -922,6 +927,9 @@ class FederationHandler(BaseHandler): joinee, "join", content, + params={ + "ver": KNOWN_ROOM_VERSIONS, + }, ) # This shouldn't happen, because the RoomMemberHandler has a @@ -1187,13 +1195,14 @@ class FederationHandler(BaseHandler): @defer.inlineCallbacks def _make_and_verify_event(self, target_hosts, room_id, user_id, membership, - content={},): + content={}, params=None): origin, pdu = yield self.federation_client.make_membership_event( target_hosts, room_id, user_id, membership, content, + params=params, ) logger.debug("Got response to make_%s: %s", membership, pdu) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index bf1aa29502..b3f5415aa6 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -439,7 +439,7 @@ class MatrixFederationHttpClient(object): defer.returnValue(json.loads(body)) @defer.inlineCallbacks - def get_json(self, destination, path, args={}, retry_on_dns_fail=True, + def get_json(self, destination, path, args=None, retry_on_dns_fail=True, timeout=None, ignore_backoff=False): """ GETs some json from the given host homeserver and path @@ -447,7 +447,7 @@ class MatrixFederationHttpClient(object): destination (str): The remote server to send the HTTP request to. path (str): The HTTP path. - args (dict): A dictionary used to create query strings, defaults to + args (dict|None): A dictionary used to create query strings, defaults to None. timeout (int): How long to try (in ms) the destination for before giving up. None indicates no timeout and that the request will @@ -702,6 +702,9 @@ def check_content_type_is_json(headers): def encode_query_args(args): + if args is None: + return b"" + encoded_args = {} for k, vs in args.items(): if isinstance(vs, string_types): From 051a99c4006a7deb1f9256df0a25c702dcdb451d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 6 Aug 2018 14:29:31 +0100 Subject: [PATCH 106/205] Fix isort --- synapse/replication/http/_base.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index a7d1b2dabe..4de3825fda 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -21,10 +21,7 @@ from six.moves import urllib from twisted.internet import defer -from synapse.api.errors import ( - CodeMessageException, - HttpResponseException, -) +from synapse.api.errors import CodeMessageException, HttpResponseException from synapse.util.caches.response_cache import ResponseCache from synapse.util.stringutils import random_string From e26dbd82ef5f1d755be9a62165556ebce041af10 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 25 Jul 2018 16:32:05 +0100 Subject: [PATCH 107/205] Add replication APIs for persisting federation events --- synapse/app/federation_reader.py | 8 + synapse/handlers/federation.py | 44 ++++- synapse/replication/http/__init__.py | 3 +- synapse/replication/http/federation.py | 245 +++++++++++++++++++++++++ 4 files changed, 290 insertions(+), 10 deletions(-) create mode 100644 synapse/replication/http/federation.py diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index 7af00b8bcf..c512b4be87 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -32,9 +32,13 @@ from synapse.http.site import SynapseSite from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore +from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore from synapse.replication.slave.storage.directory import DirectoryStore from synapse.replication.slave.storage.events import SlavedEventStore from synapse.replication.slave.storage.keys import SlavedKeyStore +from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore +from synapse.replication.slave.storage.pushers import SlavedPusherStore +from synapse.replication.slave.storage.receipts import SlavedReceiptsStore from synapse.replication.slave.storage.room import RoomStore from synapse.replication.slave.storage.transactions import TransactionStore from synapse.replication.tcp.client import ReplicationClientHandler @@ -49,6 +53,10 @@ logger = logging.getLogger("synapse.app.federation_reader") class FederationReaderSlavedStore( + SlavedApplicationServiceStore, + SlavedPusherStore, + SlavedPushRuleStore, + SlavedReceiptsStore, SlavedEventStore, SlavedKeyStore, RoomStore, diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 533b82c783..0524dec942 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -44,6 +44,8 @@ from synapse.crypto.event_signing import ( compute_event_signature, ) from synapse.events.validator import EventValidator +from synapse.replication.http.federation import send_federation_events_to_master +from synapse.replication.http.membership import notify_user_membership_change from synapse.state import resolve_events_with_factory from synapse.types import UserID, get_domain_from_id from synapse.util import logcontext, unwrapFirstError @@ -86,6 +88,8 @@ class FederationHandler(BaseHandler): self.spam_checker = hs.get_spam_checker() self.event_creation_handler = hs.get_event_creation_handler() self._server_notices_mxid = hs.config.server_notices_mxid + self.config = hs.config + self.http_client = hs.get_simple_http_client() # When joining a room we need to queue any events for that room up self.room_queues = {} @@ -2288,7 +2292,7 @@ class FederationHandler(BaseHandler): for revocation. """ try: - response = yield self.hs.get_simple_http_client().get_json( + response = yield self.http_client.get_json( url, {"public_key": public_key} ) @@ -2313,14 +2317,25 @@ class FederationHandler(BaseHandler): Returns: Deferred """ - max_stream_id = yield self.store.persist_events( - event_and_contexts, - backfilled=backfilled, - ) + if self.config.worker_app: + yield send_federation_events_to_master( + clock=self.hs.get_clock(), + store=self.store, + client=self.http_client, + host=self.config.worker_replication_host, + port=self.config.worker_replication_http_port, + event_and_contexts=event_and_contexts, + backfilled=backfilled + ) + else: + max_stream_id = yield self.store.persist_events( + event_and_contexts, + backfilled=backfilled, + ) - if not backfilled: # Never notify for backfilled events - for event, _ in event_and_contexts: - self._notify_persisted_event(event, max_stream_id) + if not backfilled: # Never notify for backfilled events + for event, _ in event_and_contexts: + self._notify_persisted_event(event, max_stream_id) def _notify_persisted_event(self, event, max_stream_id): """Checks to see if notifier/pushers should be notified about the @@ -2359,9 +2374,20 @@ class FederationHandler(BaseHandler): ) def _clean_room_for_join(self, room_id): + # TODO move this out to master return self.store.clean_room_for_join(room_id) def user_joined_room(self, user, room_id): """Called when a new user has joined the room """ - return user_joined_room(self.distributor, user, room_id) + if self.config.worker_app: + return notify_user_membership_change( + client=self.http_client, + host=self.config.worker_replication_host, + port=self.config.worker_replication_http_port, + room_id=room_id, + user_id=user.to_string(), + change="joined", + ) + else: + return user_joined_room(self.distributor, user, room_id) diff --git a/synapse/replication/http/__init__.py b/synapse/replication/http/__init__.py index 589ee94c66..19f214281e 100644 --- a/synapse/replication/http/__init__.py +++ b/synapse/replication/http/__init__.py @@ -14,7 +14,7 @@ # limitations under the License. from synapse.http.server import JsonResource -from synapse.replication.http import membership, send_event +from synapse.replication.http import federation, membership, send_event REPLICATION_PREFIX = "/_synapse/replication" @@ -27,3 +27,4 @@ class ReplicationRestResource(JsonResource): def register_servlets(self, hs): send_event.register_servlets(hs, self) membership.register_servlets(hs, self) + federation.register_servlets(hs, self) diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py new file mode 100644 index 0000000000..f39aaa89be --- /dev/null +++ b/synapse/replication/http/federation.py @@ -0,0 +1,245 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from twisted.internet import defer + +from synapse.api.constants import EventTypes, Membership +from synapse.events import FrozenEvent +from synapse.events.snapshot import EventContext +from synapse.http.servlet import parse_json_object_from_request +from synapse.replication.http._base import ReplicationEndpoint +from synapse.types import UserID +from synapse.util.logcontext import run_in_background +from synapse.util.metrics import Measure + +logger = logging.getLogger(__name__) + + +class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint): + """Handles events newly received from federation, including persisting and + notifying. + + The API looks like: + + POST /_synapse/replication/fed_send_events/:txn_id + + { + "events": [{ + "event": { .. serialized event .. }, + "internal_metadata": { .. serialized internal_metadata .. }, + "rejected_reason": .., // The event.rejected_reason field + "context": { .. serialized event context .. }, + }], + "backfilled": false + """ + + NAME = "fed_send_events" + PATH_ARGS = () + + def __init__(self, hs): + super(ReplicationFederationSendEventsRestServlet, self).__init__(hs) + + self.store = hs.get_datastore() + self.clock = hs.get_clock() + self.is_mine_id = hs.is_mine_id + self.notifier = hs.get_notifier() + self.pusher_pool = hs.get_pusherpool() + + @defer.inlineCallbacks + @staticmethod + def _serialize_payload(store, event_and_contexts, backfilled): + """ + Args: + store + event_and_contexts (list[tuple[FrozenEvent, EventContext]]) + backfilled (bool): Whether or not the events are the result of + backfilling + """ + event_payloads = [] + for event, context in event_and_contexts: + serialized_context = yield context.serialize(event, store) + + event_payloads.append({ + "event": event.get_pdu_json(), + "internal_metadata": event.internal_metadata.get_dict(), + "rejected_reason": event.rejected_reason, + "context": serialized_context, + }) + + payload = { + "events": event_payloads, + "backfilled": backfilled, + } + + defer.returnValue(payload) + + @defer.inlineCallbacks + def _handle_request(self, request): + with Measure(self.clock, "repl_fed_send_events_parse"): + content = parse_json_object_from_request(request) + + backfilled = content["backfilled"] + + event_payloads = content["events"] + + event_and_contexts = [] + for event_payload in event_payloads: + event_dict = event_payload["event"] + internal_metadata = event_payload["internal_metadata"] + rejected_reason = event_payload["rejected_reason"] + event = FrozenEvent(event_dict, internal_metadata, rejected_reason) + + context = yield EventContext.deserialize( + self.store, event_payload["context"], + ) + + event_and_contexts.append((event, context)) + + logger.info( + "Got %d events from federation", + len(event_and_contexts), + ) + + max_stream_id = yield self.store.persist_events( + event_and_contexts, + backfilled=backfilled + ) + + if not backfilled: + for event, _ in event_and_contexts: + self._notify_persisted_event(event, max_stream_id) + + defer.returnValue((200, {})) + + def _notify_persisted_event(self, event, max_stream_id): + extra_users = [] + if event.type == EventTypes.Member: + target_user_id = event.state_key + + # We notify for memberships if its an invite for one of our + # users + if event.internal_metadata.is_outlier(): + if event.membership != Membership.INVITE: + if not self.is_mine_id(target_user_id): + return + + target_user = UserID.from_string(target_user_id) + extra_users.append(target_user) + elif event.internal_metadata.is_outlier(): + return + + event_stream_id = event.internal_metadata.stream_ordering + self.notifier.on_new_room_event( + event, event_stream_id, max_stream_id, + extra_users=extra_users + ) + + run_in_background( + self.pusher_pool.on_new_notifications, + event_stream_id, max_stream_id, + ) + + +class ReplicationFederationSendEduRestServlet(ReplicationEndpoint): + """Handles EDUs newly received from federation, including persisting and + notifying. + """ + + NAME = "fed_send_edu" + PATH_ARGS = ("edu_type",) + + def __init__(self, hs): + super(ReplicationFederationSendEduRestServlet, self).__init__(hs) + + self.store = hs.get_datastore() + self.clock = hs.get_clock() + self.registry = hs.get_federation_registry() + + @staticmethod + def _serialize_payload(edu_type, origin, content): + return { + "origin": origin, + "content": content, + } + + @defer.inlineCallbacks + def _handle_request(self, request, edu_type): + with Measure(self.clock, "repl_fed_send_edu_parse"): + content = parse_json_object_from_request(request) + + origin = content["origin"] + edu_content = content["content"] + + logger.info( + "Got %r edu from $s", + edu_type, origin, + ) + + result = yield self.registry.on_edu(edu_type, origin, edu_content) + + defer.returnValue((200, result)) + + +class ReplicationGetQueryRestServlet(ReplicationEndpoint): + """Handle responding to queries from federation. + """ + + NAME = "fed_query" + PATH_ARGS = ("query_type",) + + # This is a query, so let's not bother caching + CACHE = False + + def __init__(self, hs): + super(ReplicationGetQueryRestServlet, self).__init__(hs) + + self.store = hs.get_datastore() + self.clock = hs.get_clock() + self.registry = hs.get_federation_registry() + + @staticmethod + def _serialize_payload(query_type, args): + """ + Args: + query_type (str) + args (dict): The arguments received for the given query type + """ + return { + "args": args, + } + + @defer.inlineCallbacks + def _handle_request(self, request, query_type): + with Measure(self.clock, "repl_fed_query_parse"): + content = parse_json_object_from_request(request) + + args = content["args"] + + logger.info( + "Got %r query", + query_type, + ) + + result = yield self.registry.on_query(query_type, args) + + defer.returnValue((200, result)) + + +def register_servlets(hs, http_server): + ReplicationFederationSendEventsRestServlet(hs).register(http_server) + ReplicationFederationSendEduRestServlet(hs).register(http_server) + ReplicationGetQueryRestServlet(hs).register(http_server) From a3f5bf79a0fc0ea6d59069945f53717a3e9c6581 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 26 Jul 2018 11:44:22 +0100 Subject: [PATCH 108/205] Add EDU/query handling over replication --- synapse/federation/federation_server.py | 43 +++++++++++++++++++++++++ synapse/handlers/federation.py | 24 +++++++------- synapse/replication/http/federation.py | 2 +- synapse/server.py | 6 +++- 4 files changed, 62 insertions(+), 13 deletions(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index bf89d568af..941e30a596 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -33,6 +33,10 @@ from synapse.federation.federation_base import FederationBase, event_from_pdu_js from synapse.federation.persistence import TransactionActions from synapse.federation.units import Edu, Transaction from synapse.http.endpoint import parse_server_name +from synapse.replication.http.federation import ( + ReplicationFederationSendEduRestServlet, + ReplicationGetQueryRestServlet, +) from synapse.types import get_domain_from_id from synapse.util import async from synapse.util.caches.response_cache import ResponseCache @@ -745,6 +749,8 @@ class FederationHandlerRegistry(object): if edu_type in self.edu_handlers: raise KeyError("Already have an EDU handler for %s" % (edu_type,)) + logger.info("Registering federation EDU handler for %r", edu_type) + self.edu_handlers[edu_type] = handler def register_query_handler(self, query_type, handler): @@ -763,6 +769,8 @@ class FederationHandlerRegistry(object): "Already have a Query handler for %s" % (query_type,) ) + logger.info("Registering federation query handler for %r", query_type) + self.query_handlers[query_type] = handler @defer.inlineCallbacks @@ -785,3 +793,38 @@ class FederationHandlerRegistry(object): raise NotFoundError("No handler for Query type '%s'" % (query_type,)) return handler(args) + + +class ReplicationFederationHandlerRegistry(FederationHandlerRegistry): + def __init__(self, hs): + self.config = hs.config + self.http_client = hs.get_simple_http_client() + self.clock = hs.get_clock() + + self._get_query_client = ReplicationGetQueryRestServlet.make_client(hs) + self._send_edu = ReplicationFederationSendEduRestServlet.make_client(hs) + + super(ReplicationFederationHandlerRegistry, self).__init__() + + def on_edu(self, edu_type, origin, content): + handler = self.edu_handlers.get(edu_type) + if handler: + return super(ReplicationFederationHandlerRegistry, self).on_edu( + edu_type, origin, content, + ) + + return self._send_edu( + edu_type=edu_type, + origin=origin, + content=content, + ) + + def on_query(self, query_type, args): + handler = self.query_handlers.get(query_type) + if handler: + return handler(args) + + return self._get_query_client( + query_type=query_type, + args=args, + ) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 0524dec942..d2cbb12df3 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -44,8 +44,10 @@ from synapse.crypto.event_signing import ( compute_event_signature, ) from synapse.events.validator import EventValidator -from synapse.replication.http.federation import send_federation_events_to_master -from synapse.replication.http.membership import notify_user_membership_change +from synapse.replication.http.federation import ( + ReplicationFederationSendEventsRestServlet, +) +from synapse.replication.http.membership import ReplicationUserJoinedLeftRoomRestServlet from synapse.state import resolve_events_with_factory from synapse.types import UserID, get_domain_from_id from synapse.util import logcontext, unwrapFirstError @@ -91,6 +93,13 @@ class FederationHandler(BaseHandler): self.config = hs.config self.http_client = hs.get_simple_http_client() + self._send_events_to_master = ( + ReplicationFederationSendEventsRestServlet.make_client(hs) + ) + self._notify_user_membership_change = ( + ReplicationUserJoinedLeftRoomRestServlet.make_client(hs) + ) + # When joining a room we need to queue any events for that room up self.room_queues = {} self._room_pdu_linearizer = Linearizer("fed_room_pdu") @@ -2318,12 +2327,8 @@ class FederationHandler(BaseHandler): Deferred """ if self.config.worker_app: - yield send_federation_events_to_master( - clock=self.hs.get_clock(), + yield self._send_events_to_master( store=self.store, - client=self.http_client, - host=self.config.worker_replication_host, - port=self.config.worker_replication_http_port, event_and_contexts=event_and_contexts, backfilled=backfilled ) @@ -2381,10 +2386,7 @@ class FederationHandler(BaseHandler): """Called when a new user has joined the room """ if self.config.worker_app: - return notify_user_membership_change( - client=self.http_client, - host=self.config.worker_replication_host, - port=self.config.worker_replication_http_port, + return self._notify_user_membership_change( room_id=room_id, user_id=user.to_string(), change="joined", diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py index f39aaa89be..3fa7bd64c7 100644 --- a/synapse/replication/http/federation.py +++ b/synapse/replication/http/federation.py @@ -59,8 +59,8 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint): self.notifier = hs.get_notifier() self.pusher_pool = hs.get_pusherpool() - @defer.inlineCallbacks @staticmethod + @defer.inlineCallbacks def _serialize_payload(store, event_and_contexts, backfilled): """ Args: diff --git a/synapse/server.py b/synapse/server.py index 140be9ebe8..26228d8c72 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -36,6 +36,7 @@ from synapse.federation.federation_client import FederationClient from synapse.federation.federation_server import ( FederationHandlerRegistry, FederationServer, + ReplicationFederationHandlerRegistry, ) from synapse.federation.send_queue import FederationRemoteSendQueue from synapse.federation.transaction_queue import TransactionQueue @@ -423,7 +424,10 @@ class HomeServer(object): return RoomMemberMasterHandler(self) def build_federation_registry(self): - return FederationHandlerRegistry() + if self.config.worker_app: + return ReplicationFederationHandlerRegistry(self) + else: + return FederationHandlerRegistry() def build_server_notices_manager(self): if self.config.worker_app: From 1e2bed9656a4826ed7fdc82bc096df775bc38baf Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 26 Jul 2018 11:45:12 +0100 Subject: [PATCH 109/205] Import all functions from TransactionStore --- synapse/replication/slave/storage/transactions.py | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/synapse/replication/slave/storage/transactions.py b/synapse/replication/slave/storage/transactions.py index 9c9a5eadd9..cd6416c47c 100644 --- a/synapse/replication/slave/storage/transactions.py +++ b/synapse/replication/slave/storage/transactions.py @@ -13,19 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.storage import DataStore from synapse.storage.transactions import TransactionStore from ._base import BaseSlavedStore -class TransactionStore(BaseSlavedStore): - get_destination_retry_timings = TransactionStore.__dict__[ - "get_destination_retry_timings" - ] - _get_destination_retry_timings = DataStore._get_destination_retry_timings.__func__ - set_destination_retry_timings = DataStore.set_destination_retry_timings.__func__ - _set_destination_retry_timings = DataStore._set_destination_retry_timings.__func__ - - prep_send_transaction = DataStore.prep_send_transaction.__func__ - delivered_txn = DataStore.delivered_txn.__func__ +class TransactionStore(TransactionStore, BaseSlavedStore): + pass From 62ace05c4521caeed023e5b9dadd993afe5c154f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 26 Jul 2018 13:31:59 +0100 Subject: [PATCH 110/205] Move necessary storage functions to worker classes --- synapse/storage/events.py | 82 ------------------------------- synapse/storage/events_worker.py | 84 ++++++++++++++++++++++++++++++++ synapse/storage/room.py | 32 ++++++------ 3 files changed, 100 insertions(+), 98 deletions(-) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index e8e5a0fe44..5374796fd7 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -1430,88 +1430,6 @@ class EventsStore(EventFederationStore, EventsWorkerStore, BackgroundUpdateStore (event.event_id, event.redacts) ) - @defer.inlineCallbacks - def have_events_in_timeline(self, event_ids): - """Given a list of event ids, check if we have already processed and - stored them as non outliers. - """ - rows = yield self._simple_select_many_batch( - table="events", - retcols=("event_id",), - column="event_id", - iterable=list(event_ids), - keyvalues={"outlier": False}, - desc="have_events_in_timeline", - ) - - defer.returnValue(set(r["event_id"] for r in rows)) - - @defer.inlineCallbacks - def have_seen_events(self, event_ids): - """Given a list of event ids, check if we have already processed them. - - Args: - event_ids (iterable[str]): - - Returns: - Deferred[set[str]]: The events we have already seen. - """ - results = set() - - def have_seen_events_txn(txn, chunk): - sql = ( - "SELECT event_id FROM events as e WHERE e.event_id IN (%s)" - % (",".join("?" * len(chunk)), ) - ) - txn.execute(sql, chunk) - for (event_id, ) in txn: - results.add(event_id) - - # break the input up into chunks of 100 - input_iterator = iter(event_ids) - for chunk in iter(lambda: list(itertools.islice(input_iterator, 100)), - []): - yield self.runInteraction( - "have_seen_events", - have_seen_events_txn, - chunk, - ) - defer.returnValue(results) - - def get_seen_events_with_rejections(self, event_ids): - """Given a list of event ids, check if we rejected them. - - Args: - event_ids (list[str]) - - Returns: - Deferred[dict[str, str|None): - Has an entry for each event id we already have seen. Maps to - the rejected reason string if we rejected the event, else maps - to None. - """ - if not event_ids: - return defer.succeed({}) - - def f(txn): - sql = ( - "SELECT e.event_id, reason FROM events as e " - "LEFT JOIN rejections as r ON e.event_id = r.event_id " - "WHERE e.event_id = ?" - ) - - res = {} - for event_id in event_ids: - txn.execute(sql, (event_id,)) - row = txn.fetchone() - if row: - _, rejected = row - res[event_id] = rejected - - return res - - return self.runInteraction("get_rejection_reasons", f) - @defer.inlineCallbacks def count_daily_messages(self): """ diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index 9b4cfeb899..e1dc2c62fd 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -12,7 +12,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import itertools import logging + from collections import namedtuple from canonicaljson import json @@ -442,3 +444,85 @@ class EventsWorkerStore(SQLBaseStore): self._get_event_cache.prefill((original_ev.event_id,), cache_entry) defer.returnValue(cache_entry) + + @defer.inlineCallbacks + def have_events_in_timeline(self, event_ids): + """Given a list of event ids, check if we have already processed and + stored them as non outliers. + """ + rows = yield self._simple_select_many_batch( + table="events", + retcols=("event_id",), + column="event_id", + iterable=list(event_ids), + keyvalues={"outlier": False}, + desc="have_events_in_timeline", + ) + + defer.returnValue(set(r["event_id"] for r in rows)) + + @defer.inlineCallbacks + def have_seen_events(self, event_ids): + """Given a list of event ids, check if we have already processed them. + + Args: + event_ids (iterable[str]): + + Returns: + Deferred[set[str]]: The events we have already seen. + """ + results = set() + + def have_seen_events_txn(txn, chunk): + sql = ( + "SELECT event_id FROM events as e WHERE e.event_id IN (%s)" + % (",".join("?" * len(chunk)), ) + ) + txn.execute(sql, chunk) + for (event_id, ) in txn: + results.add(event_id) + + # break the input up into chunks of 100 + input_iterator = iter(event_ids) + for chunk in iter(lambda: list(itertools.islice(input_iterator, 100)), + []): + yield self.runInteraction( + "have_seen_events", + have_seen_events_txn, + chunk, + ) + defer.returnValue(results) + + def get_seen_events_with_rejections(self, event_ids): + """Given a list of event ids, check if we rejected them. + + Args: + event_ids (list[str]) + + Returns: + Deferred[dict[str, str|None): + Has an entry for each event id we already have seen. Maps to + the rejected reason string if we rejected the event, else maps + to None. + """ + if not event_ids: + return defer.succeed({}) + + def f(txn): + sql = ( + "SELECT e.event_id, reason FROM events as e " + "LEFT JOIN rejections as r ON e.event_id = r.event_id " + "WHERE e.event_id = ?" + ) + + res = {} + for event_id in event_ids: + txn.execute(sql, (event_id,)) + row = txn.fetchone() + if row: + _, rejected = row + res[event_id] = rejected + + return res + + return self.runInteraction("get_rejection_reasons", f) diff --git a/synapse/storage/room.py b/synapse/storage/room.py index 3147fb6827..3378fc77d1 100644 --- a/synapse/storage/room.py +++ b/synapse/storage/room.py @@ -41,6 +41,22 @@ RatelimitOverride = collections.namedtuple( class RoomWorkerStore(SQLBaseStore): + def get_room(self, room_id): + """Retrieve a room. + + Args: + room_id (str): The ID of the room to retrieve. + Returns: + A namedtuple containing the room information, or an empty list. + """ + return self._simple_select_one( + table="rooms", + keyvalues={"room_id": room_id}, + retcols=("room_id", "is_public", "creator"), + desc="get_room", + allow_none=True, + ) + def get_public_room_ids(self): return self._simple_select_onecol( table="rooms", @@ -215,22 +231,6 @@ class RoomStore(RoomWorkerStore, SearchStore): logger.error("store_room with room_id=%s failed: %s", room_id, e) raise StoreError(500, "Problem creating room.") - def get_room(self, room_id): - """Retrieve a room. - - Args: - room_id (str): The ID of the room to retrieve. - Returns: - A namedtuple containing the room information, or an empty list. - """ - return self._simple_select_one( - table="rooms", - keyvalues={"room_id": room_id}, - retcols=("room_id", "is_public", "creator"), - desc="get_room", - allow_none=True, - ) - @defer.inlineCallbacks def set_room_is_public(self, room_id, is_public): def set_room_is_public_txn(txn, next_id): From 96a9a296455e2c08be4e0375cf784e4113fa51e1 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 26 Jul 2018 13:35:36 +0100 Subject: [PATCH 111/205] Pull in necessary stores in federation_reader --- synapse/app/federation_reader.py | 2 ++ synapse/storage/events_worker.py | 1 - 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index c512b4be87..0c557a8242 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -36,6 +36,7 @@ from synapse.replication.slave.storage.appservice import SlavedApplicationServic from synapse.replication.slave.storage.directory import DirectoryStore from synapse.replication.slave.storage.events import SlavedEventStore from synapse.replication.slave.storage.keys import SlavedKeyStore +from synapse.replication.slave.storage.profile import SlavedProfileStore from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore from synapse.replication.slave.storage.pushers import SlavedPusherStore from synapse.replication.slave.storage.receipts import SlavedReceiptsStore @@ -53,6 +54,7 @@ logger = logging.getLogger("synapse.app.federation_reader") class FederationReaderSlavedStore( + SlavedProfileStore, SlavedApplicationServiceStore, SlavedPusherStore, SlavedPushRuleStore, diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index e1dc2c62fd..59822178ff 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -14,7 +14,6 @@ # limitations under the License. import itertools import logging - from collections import namedtuple from canonicaljson import json From 19a17068f1bc98a1556ff618b544b5fbf57eeba0 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 6 Aug 2018 15:15:19 +0100 Subject: [PATCH 112/205] Check m.room.create for sane room_versions --- synapse/event_auth.py | 10 +++++++++- synapse/federation/federation_client.py | 26 ++++++++++++++++++++++--- 2 files changed, 32 insertions(+), 4 deletions(-) diff --git a/synapse/event_auth.py b/synapse/event_auth.py index b32f64e729..6baeccca38 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -20,7 +20,7 @@ from signedjson.key import decode_verify_key_bytes from signedjson.sign import SignatureVerifyException, verify_signed_json from unpaddedbase64 import decode_base64 -from synapse.api.constants import EventTypes, JoinRules, Membership +from synapse.api.constants import KNOWN_ROOM_VERSIONS, EventTypes, JoinRules, Membership from synapse.api.errors import AuthError, EventSizeError, SynapseError from synapse.types import UserID, get_domain_from_id @@ -83,6 +83,14 @@ def check(event, auth_events, do_sig_check=True, do_size_check=True): 403, "Creation event's room_id domain does not match sender's" ) + + room_version = event.content.get("room_version", "1") + if room_version not in KNOWN_ROOM_VERSIONS: + raise AuthError( + 403, + "room appears to have unsupported version %s" % ( + room_version, + )) # FIXME logger.debug("Allowing! %s", event) return diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 7ec1d7a889..c9f3c2d352 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -25,7 +25,7 @@ from prometheus_client import Counter from twisted.internet import defer -from synapse.api.constants import Membership +from synapse.api.constants import KNOWN_ROOM_VERSIONS, EventTypes, Membership from synapse.api.errors import ( CodeMessageException, FederationDeniedError, @@ -518,7 +518,7 @@ class FederationClient(FederationBase): description, destination, exc_info=1, ) - raise RuntimeError("Failed to %s via any server", description) + raise RuntimeError("Failed to %s via any server" % (description, )) def make_membership_event(self, destinations, room_id, user_id, membership, content, params): @@ -609,6 +609,26 @@ class FederationClient(FederationBase): Fails with a ``RuntimeError`` if no servers were reachable. """ + def check_authchain_validity(signed_auth_chain): + for e in signed_auth_chain: + if e.type == EventTypes.Create: + create_event = e + break + else: + raise InvalidResponseError( + "no %s in auth chain" % (EventTypes.Create,), + ) + + # the room version should be sane. + room_version = create_event.content.get("room_version", "1") + if room_version not in KNOWN_ROOM_VERSIONS: + # This shouldn't be possible, because the remote server should have + # rejected the join attempt during make_join. + raise InvalidResponseError( + "room appears to have unsupported version %s" % ( + room_version, + )) + @defer.inlineCallbacks def send_request(destination): time_now = self._clock.time_msec() @@ -665,7 +685,7 @@ class FederationClient(FederationBase): for s in signed_state: s.internal_metadata = copy.deepcopy(s.internal_metadata) - auth_chain.sort(key=lambda e: e.depth) + check_authchain_validity(signed_auth) defer.returnValue({ "state": signed_state, From 7f3f1085612a3c46a5e3c2a6a376b42024f6b6cb Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 6 Aug 2018 16:30:52 +0100 Subject: [PATCH 113/205] changelog --- changelog.d/3654.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3654.feature diff --git a/changelog.d/3654.feature b/changelog.d/3654.feature new file mode 100644 index 0000000000..35c95580bc --- /dev/null +++ b/changelog.d/3654.feature @@ -0,0 +1 @@ +Basic support for room versioning From 16d78be315fd71d8bdb39af53317b3f7dd1dbb32 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 6 Aug 2018 17:51:15 +0100 Subject: [PATCH 114/205] make use of _simple_select_one_onecol, improved comments --- synapse/storage/monthly_active_users.py | 19 +++++++++++-------- .../schema/delta/51/monthly_active_users.sql | 4 ++++ 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 135837507a..c28c698c6c 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -15,7 +15,7 @@ from twisted.internet import defer -from synapse.util.caches.descriptors import cached, cachedInlineCallbacks +from synapse.util.caches.descriptors import cached from ._base import SQLBaseStore @@ -104,7 +104,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): self._user_last_seen_monthly_active.invalidate((user_id,)) self.get_monthly_active_count.invalidate(()) - @cachedInlineCallbacks(num_args=1) + @cached(num_args=1) def _user_last_seen_monthly_active(self, user_id): """ Checks if a given user is part of the monthly active user group @@ -114,18 +114,16 @@ class MonthlyActiveUsersStore(SQLBaseStore): int : timestamp since last seen, None if never seen """ - result = yield self._simple_select_onecol( + + return(self._simple_select_one_onecol( table="monthly_active_users", keyvalues={ "user_id": user_id, }, retcol="timestamp", + allow_none=True, desc="_user_last_seen_monthly_active", - ) - timestamp = None - if len(result) > 0: - timestamp = result[0] - defer.returnValue(timestamp) + )) @defer.inlineCallbacks def populate_monthly_active_users(self, user_id): @@ -140,6 +138,11 @@ class MonthlyActiveUsersStore(SQLBaseStore): last_seen_timestamp = yield self._user_last_seen_monthly_active(user_id) now = self.hs.get_clock().time_msec() + # We want to reduce to the total number of db writes, and are happy + # to trade accuracy of timestamp in order to lighten load. This means + # We always insert new users (where MAU threshold has not been reached), + # but only update if we have not previously seen the user for + # LAST_SEEN_GRANULARITY ms if last_seen_timestamp is None: count = yield self.get_monthly_active_count() if count < self.hs.config.max_mau_value: diff --git a/synapse/storage/schema/delta/51/monthly_active_users.sql b/synapse/storage/schema/delta/51/monthly_active_users.sql index f2b6d3e31e..10aac90ce1 100644 --- a/synapse/storage/schema/delta/51/monthly_active_users.sql +++ b/synapse/storage/schema/delta/51/monthly_active_users.sql @@ -16,6 +16,10 @@ -- a table of monthly active users, for use where blocking based on mau limits CREATE TABLE monthly_active_users ( user_id TEXT NOT NULL, + -- Last time we saw the user. Not guaranteed to be accurate due to rate limiting + -- on updates, Granularity of updates governed by + -- syanpse.storage.monthly_active_users.LAST_SEEN_GRANULARITY + -- Measured in ms since epoch. timestamp BIGINT NOT NULL ); From 1911c037cbace498c0827eb5abe6ed3f64acb671 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 6 Aug 2018 18:01:46 +0100 Subject: [PATCH 115/205] update comments to reflect new sig --- synapse/api/auth.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index d8ebbbc6e8..91b23ff1d7 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -778,10 +778,7 @@ class Auth(object): def check_auth_blocking(self): """Checks if the user should be rejected for some external reason, such as monthly active user limiting or global disable flag - Args: - error (Error): The error that should be raised if user is to be - blocked - """ + """ if self.hs.config.limit_usage_by_mau is True: current_mau = yield self.store.get_monthly_active_count() if current_mau >= self.hs.config.max_mau_value: From e54794f5b658992627eab5400b13199128eec0a1 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 6 Aug 2018 21:55:54 +0100 Subject: [PATCH 116/205] Fix postgres compatibility bug --- synapse/storage/monthly_active_users.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index c28c698c6c..abe1e6bb99 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -43,14 +43,25 @@ class MonthlyActiveUsersStore(SQLBaseStore): thirty_days_ago = ( int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) ) - + # Purge stale users sql = "DELETE FROM monthly_active_users WHERE timestamp < ?" - txn.execute(sql, (thirty_days_ago,)) + + # If MAU user count still exceeds the MAU threshold, then delete on + # a least recently active basis. + # Note it is not possible to write this query using OFFSET due to + # incompatibilities in how sqlite an postgres support the feature. + # sqlite requires 'LIMIT -1 OFFSET ?', the LIMIT must be present + # While Postgres does not require 'LIMIT', but also does not support + # negative LIMIT values. So there is no way to write it that both can + # support sql = """ DELETE FROM monthly_active_users - ORDER BY timestamp desc - LIMIT -1 OFFSET ? + WHERE user_id NOT IN ( + SELECT user_id FROM monthly_active_users + ORDER BY timestamp DESC + LIMIT ? + ) """ txn.execute(sql, (self.hs.config.max_mau_value,)) From 7daa8a78c5183a46ae462c0718939ca52fa0130f Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 6 Aug 2018 22:55:05 +0100 Subject: [PATCH 117/205] load mau limit threepids --- synapse/config/server.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/synapse/config/server.py b/synapse/config/server.py index 8fd2319759..cacac253ab 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -74,6 +74,9 @@ class ServerConfig(Config): self.max_mau_value = config.get( "max_mau_value", 0, ) + self.mau_limits_reserved_threepids = config.get( + "mau_limit_reserved_threepid", [] + ) # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist = None From fbe255f9a4c46fe82a394b9efab6ad8811ae71e0 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 6 Aug 2018 23:24:54 +0100 Subject: [PATCH 118/205] add default mau_limits_reserved_threepids --- tests/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/utils.py b/tests/utils.py index ec40428e74..baeed5d600 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -74,6 +74,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None config.media_storage_providers = [] config.auto_join_rooms = [] config.limit_usage_by_mau = False + config.mau_limits_reserved_threepids = [] # disable user directory updates, because they get done in the # background, which upsets the test runner. From a74b25faaaf47b4696cb30207ef2eae6a3a5d8c7 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 6 Aug 2018 23:25:25 +0100 Subject: [PATCH 119/205] WIP building out mau reserved users --- synapse/storage/monthly_active_users.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index abe1e6bb99..6a37d6fc22 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -19,16 +19,30 @@ from synapse.util.caches.descriptors import cached from ._base import SQLBaseStore + # Number of msec of granularity to store the monthly_active_user timestamp # This means it is not necessary to update the table on every request LAST_SEEN_GRANULARITY = 60 * 60 * 1000 class MonthlyActiveUsersStore(SQLBaseStore): + @defer.inlineCallbacks def __init__(self, dbconn, hs): super(MonthlyActiveUsersStore, self).__init__(None, hs) self._clock = hs.get_clock() self.hs = hs + threepids = self.hs.config.mau_limits_reserved_threepids + self.reserved_user_ids = set() + for tp in threepids: + user_id = yield hs.get_datastore().get_user_id_by_threepid( + tp["medium"], tp["address"] + ) + if user_id: + self.reserved_user_ids.add(user_id) + else: + logger.warning( + "mau limit reserved threepid %s not found in db" % tp + ) def reap_monthly_active_users(self): """ @@ -78,7 +92,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): @cached(num_args=0) def get_monthly_active_count(self): - """Generates current count of monthly active users.abs + """Generates current count of monthly active users Returns: Defered[int]: Number of current monthly active users From ca9bc1f4feaaabce48595ee2e387529f5bd365e6 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 7 Aug 2018 10:00:03 +0100 Subject: [PATCH 120/205] Fix occasional glitches in the synapse_event_persisted_position metric Every so often this metric glitched to a negative number. I'm assuming it was due to backfilled events. --- synapse/storage/events.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index e8e5a0fe44..ce32e8fefd 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -485,9 +485,14 @@ class EventsStore(EventFederationStore, EventsWorkerStore, BackgroundUpdateStore new_forward_extremeties=new_forward_extremeties, ) persist_event_counter.inc(len(chunk)) - synapse.metrics.event_persisted_position.set( - chunk[-1][0].internal_metadata.stream_ordering, - ) + + if not backfilled: + # backfilled events have negative stream orderings, so we don't + # want to set the event_persisted_position to that. + synapse.metrics.event_persisted_position.set( + chunk[-1][0].internal_metadata.stream_ordering, + ) + for event, context in chunk: if context.app_service: origin_type = "local" From 865d07cd381214c38628cc4ad4200a5cdd45f5d3 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 7 Aug 2018 10:02:01 +0100 Subject: [PATCH 121/205] changelog --- changelog.d/3658.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3658.bugfix diff --git a/changelog.d/3658.bugfix b/changelog.d/3658.bugfix new file mode 100644 index 0000000000..556011a150 --- /dev/null +++ b/changelog.d/3658.bugfix @@ -0,0 +1 @@ +Fix occasional glitches in the synapse_event_persisted_position metric From 3b9662339b77b7aeafa9cf6282adfbcc13b10fed Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Tue, 7 Aug 2018 21:10:52 +1000 Subject: [PATCH 122/205] version --- synapse/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/__init__.py b/synapse/__init__.py index 1810cb6fcd..a14d578e36 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -17,4 +17,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.33.1" +__version__ = "0.33.2" From cf8ef11f357ec520751ba021326c666683f9e6bd Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Tue, 7 Aug 2018 21:11:24 +1000 Subject: [PATCH 123/205] changelog --- CHANGES.md | 64 ++++++++++++++++++++++++++++++++++++++++ changelog.d/2952.bugfix | 1 - changelog.d/2970.feature | 1 - changelog.d/3331.feature | 1 - changelog.d/3350.misc | 1 - changelog.d/3367.misc | 1 - changelog.d/3384.misc | 1 - changelog.d/3391.bugfix | 1 - changelog.d/3415.misc | 0 changelog.d/3460.misc | 0 changelog.d/3514.bugfix | 1 - changelog.d/3520.bugfix | 1 - changelog.d/3543.misc | 1 - changelog.d/3548.bugfix | 1 - changelog.d/3552.misc | 1 - changelog.d/3553.feature | 1 - changelog.d/3554.feature | 1 - changelog.d/3555.feature | 1 - changelog.d/3556.feature | 1 - changelog.d/3559.misc | 1 - changelog.d/3562.misc | 0 changelog.d/3567.feature | 1 - changelog.d/3569.bugfix | 1 - changelog.d/3570.bugfix | 1 - changelog.d/3571.misc | 1 - changelog.d/3572.misc | 1 - changelog.d/3577.misc | 0 changelog.d/3579.misc | 1 - changelog.d/3581.misc | 1 - changelog.d/3582.misc | 1 - changelog.d/3584.misc | 1 - changelog.d/3585.bugfix | 1 - changelog.d/3586.misc | 1 - changelog.d/3587.misc | 1 - changelog.d/3590.misc | 1 - changelog.d/3591.misc | 1 - changelog.d/3592.misc | 1 - changelog.d/3595.misc | 1 - changelog.d/3597.feature | 1 - changelog.d/3601.bugfix | 1 - changelog.d/3604.feature | 1 - changelog.d/3605.bugfix | 1 - changelog.d/3606.misc | 1 - changelog.d/3607.bugfix | 1 - changelog.d/3609.misc | 1 - changelog.d/3610.feature | 1 - changelog.d/3612.misc | 1 - changelog.d/3613.misc | 1 - changelog.d/3614.misc | 1 - changelog.d/3616.misc | 1 - changelog.d/3621.misc | 1 - changelog.d/3626.bugfix | 1 - changelog.d/3628.misc | 1 - changelog.d/3630.feature | 1 - changelog.d/3634.misc | 1 - changelog.d/3638.misc | 1 - changelog.d/3639.feature | 1 - changelog.d/3644.misc | 1 - changelog.d/3645.misc | 1 - 59 files changed, 64 insertions(+), 54 deletions(-) delete mode 100644 changelog.d/2952.bugfix delete mode 100644 changelog.d/2970.feature delete mode 100644 changelog.d/3331.feature delete mode 100644 changelog.d/3350.misc delete mode 100644 changelog.d/3367.misc delete mode 100644 changelog.d/3384.misc delete mode 100644 changelog.d/3391.bugfix delete mode 100644 changelog.d/3415.misc delete mode 100644 changelog.d/3460.misc delete mode 100644 changelog.d/3514.bugfix delete mode 100644 changelog.d/3520.bugfix delete mode 100644 changelog.d/3543.misc delete mode 100644 changelog.d/3548.bugfix delete mode 100644 changelog.d/3552.misc delete mode 100644 changelog.d/3553.feature delete mode 100644 changelog.d/3554.feature delete mode 100644 changelog.d/3555.feature delete mode 100644 changelog.d/3556.feature delete mode 100644 changelog.d/3559.misc delete mode 100644 changelog.d/3562.misc delete mode 100644 changelog.d/3567.feature delete mode 100644 changelog.d/3569.bugfix delete mode 100644 changelog.d/3570.bugfix delete mode 100644 changelog.d/3571.misc delete mode 100644 changelog.d/3572.misc delete mode 100644 changelog.d/3577.misc delete mode 100644 changelog.d/3579.misc delete mode 100644 changelog.d/3581.misc delete mode 100644 changelog.d/3582.misc delete mode 100644 changelog.d/3584.misc delete mode 100644 changelog.d/3585.bugfix delete mode 100644 changelog.d/3586.misc delete mode 100644 changelog.d/3587.misc delete mode 100644 changelog.d/3590.misc delete mode 100644 changelog.d/3591.misc delete mode 100644 changelog.d/3592.misc delete mode 100644 changelog.d/3595.misc delete mode 100644 changelog.d/3597.feature delete mode 100644 changelog.d/3601.bugfix delete mode 100644 changelog.d/3604.feature delete mode 100644 changelog.d/3605.bugfix delete mode 100644 changelog.d/3606.misc delete mode 100644 changelog.d/3607.bugfix delete mode 100644 changelog.d/3609.misc delete mode 100644 changelog.d/3610.feature delete mode 100644 changelog.d/3612.misc delete mode 100644 changelog.d/3613.misc delete mode 100644 changelog.d/3614.misc delete mode 100644 changelog.d/3616.misc delete mode 100644 changelog.d/3621.misc delete mode 100644 changelog.d/3626.bugfix delete mode 100644 changelog.d/3628.misc delete mode 100644 changelog.d/3630.feature delete mode 100644 changelog.d/3634.misc delete mode 100644 changelog.d/3638.misc delete mode 100644 changelog.d/3639.feature delete mode 100644 changelog.d/3644.misc delete mode 100644 changelog.d/3645.misc diff --git a/CHANGES.md b/CHANGES.md index a4c3ce31ae..83a48c8b26 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,67 @@ +Synapse 0.33.2 (2018-08-07) +=========================== + +Features +-------- + +- add support for the lazy_loaded_members filter as per MSC1227 ([\#2970](https://github.com/matrix-org/synapse/issues/2970>)) +- add support for the include_redundant_members filter param as per MSC1227 ([\#3331](https://github.com/matrix-org/synapse/issues/3331>)) +- Add metrics to track resource usage by background processes ([\#3553](https://github.com/matrix-org/synapse/issues/3553>), [\#3556](https://github.com/matrix-org/synapse/issues/3556>), [\#3604](https://github.com/matrix-org/synapse/issues/3604>), [\#3610](https://github.com/matrix-org/synapse/issues/3610>)) +- Add `code` label to `synapse_http_server_response_time_seconds` prometheus metric ([\#3554](https://github.com/matrix-org/synapse/issues/3554>)) +- Add support for client_reader to handle more APIs ([\#3555](https://github.com/matrix-org/synapse/issues/3555>), [\#3597](https://github.com/matrix-org/synapse/issues/3597>)) +- make the /context API filter & lazy-load aware as per MSC1227 ([\#3567](https://github.com/matrix-org/synapse/issues/3567>)) +- Add ability to limit number of monthly active users on the server ([\#3630](https://github.com/matrix-org/synapse/issues/3630>)) +- When we fail to join a room over federation, pass the error code back to the client. ([\#3639](https://github.com/matrix-org/synapse/issues/3639>)) + + +Bugfixes +-------- + +- Make /directory/list API return 404 for room not found instead of 400 ([\#2952](https://github.com/matrix-org/synapse/issues/2952>)) +- Default inviter_display_name to mxid for email invites ([\#3391](https://github.com/matrix-org/synapse/issues/3391>)) +- Don't generate TURN credentials if no TURN config options are set ([\#3514](https://github.com/matrix-org/synapse/issues/3514>)) +- Correctly announce deleted devices over federation ([\#3520](https://github.com/matrix-org/synapse/issues/3520>)) +- Catch failures saving metrics captured by Measure, and instead log the faulty metrics information for further analysis. ([\#3548](https://github.com/matrix-org/synapse/issues/3548>)) +- Unicode passwords are now normalised before hashing, preventing the instance where two different devices or browsers might send a different UTF-8 sequence for the password. ([\#3569](https://github.com/matrix-org/synapse/issues/3569>)) +- Fix potential stack overflow and deadlock under heavy load ([\#3570](https://github.com/matrix-org/synapse/issues/3570>)) +- Respond with M_NOT_FOUND when profiles are not found locally or over federation. Fixes #3585 ([\#3585](https://github.com/matrix-org/synapse/issues/3585>)) +- Fix failure to persist events over federation under load ([\#3601](https://github.com/matrix-org/synapse/issues/3601>)) +- Fix updating of cached remote profiles ([\#3605](https://github.com/matrix-org/synapse/issues/3605>)) +- Fix 'tuple index out of range' error ([\#3607](https://github.com/matrix-org/synapse/issues/3607>)) +- Only import secrets when available (fix for py < 3.6) ([\#3626](https://github.com/matrix-org/synapse/issues/3626>)) + + +Internal Changes +---------------- + +- Remove redundant checks on who_forgot_in_room ([\#3350](https://github.com/matrix-org/synapse/issues/3350>)) +- Remove unnecessary event re-signing hacks ([\#3367](https://github.com/matrix-org/synapse/issues/3367>)) +- Rewrite cache list decorator ([\#3384](https://github.com/matrix-org/synapse/issues/3384>)) +- ([\#3415](https://github.com/matrix-org/synapse/issues/3415>), [\#3460](https://github.com/matrix-org/synapse/issues/3460>), [\#3562](https://github.com/matrix-org/synapse/issues/3562>), [\#3577](https://github.com/matrix-org/synapse/issues/3577>)) +- Improve Dockerfile and docker-compose instructions ([\#3543](https://github.com/matrix-org/synapse/issues/3543>)) +- Release notes are now in the Markdown format. ([\#3552](https://github.com/matrix-org/synapse/issues/3552>)) +- add config for pep8 ([\#3559](https://github.com/matrix-org/synapse/issues/3559>)) +- Merge Linearizer and Limiter ([\#3571](https://github.com/matrix-org/synapse/issues/3571>), [\#3572](https://github.com/matrix-org/synapse/issues/3572>)) +- Lazily load state on master process when using workers to reduce DB consumption ([\#3579](https://github.com/matrix-org/synapse/issues/3579>), [\#3581](https://github.com/matrix-org/synapse/issues/3581>), [\#3582](https://github.com/matrix-org/synapse/issues/3582>), [\#3584](https://github.com/matrix-org/synapse/issues/3584>)) +- Fixes and optimisations for resolve_state_groups ([\#3586](https://github.com/matrix-org/synapse/issues/3586>)) +- Improve logging for exceptions when handling PDUs ([\#3587](https://github.com/matrix-org/synapse/issues/3587>)) +- Add some measure blocks to persist_events ([\#3590](https://github.com/matrix-org/synapse/issues/3590>)) +- Fix some random logcontext leaks. ([\#3591](https://github.com/matrix-org/synapse/issues/3591>), [\#3606](https://github.com/matrix-org/synapse/issues/3606>)) +- Speed up calculating state deltas in persist_event loop ([\#3592](https://github.com/matrix-org/synapse/issues/3592>)) +- Attempt to reduce amount of state pulled out of DB during persist_events ([\#3595](https://github.com/matrix-org/synapse/issues/3595>)) +- Fix a documentation typo in on_make_leave_request ([\#3609](https://github.com/matrix-org/synapse/issues/3609>)) +- Make EventStore inherit from EventFederationStore ([\#3612](https://github.com/matrix-org/synapse/issues/3612>)) +- Remove some redundant joins on event_edges.room_id ([\#3613](https://github.com/matrix-org/synapse/issues/3613>)) +- Stop populating events.content ([\#3614](https://github.com/matrix-org/synapse/issues/3614>)) +- Update the /send_leave path registration to use event_id rather than a transaction ID. ([\#3616](https://github.com/matrix-org/synapse/issues/3616>)) +- Refactor FederationHandler to move DB writes into separate functions ([\#3621](https://github.com/matrix-org/synapse/issues/3621>)) +- Remove unused field "pdu_failures" from transactions. ([\#3628](https://github.com/matrix-org/synapse/issues/3628>)) +- rename replication_layer to federation_client ([\#3634](https://github.com/matrix-org/synapse/issues/3634>)) +- Factor out exception handling in federation_client ([\#3638](https://github.com/matrix-org/synapse/issues/3638>)) +- Refactor location of docker build script. ([\#3644](https://github.com/matrix-org/synapse/issues/3644>)) +- Update CONTRIBUTING to mention newsfragments. ([\#3645](https://github.com/matrix-org/synapse/issues/3645>)) + + Synapse 0.33.1 (2018-08-02) =========================== diff --git a/changelog.d/2952.bugfix b/changelog.d/2952.bugfix deleted file mode 100644 index 07a3e48304..0000000000 --- a/changelog.d/2952.bugfix +++ /dev/null @@ -1 +0,0 @@ -Make /directory/list API return 404 for room not found instead of 400 diff --git a/changelog.d/2970.feature b/changelog.d/2970.feature deleted file mode 100644 index 5eb928563f..0000000000 --- a/changelog.d/2970.feature +++ /dev/null @@ -1 +0,0 @@ -add support for the lazy_loaded_members filter as per MSC1227 diff --git a/changelog.d/3331.feature b/changelog.d/3331.feature deleted file mode 100644 index e574b9bcc3..0000000000 --- a/changelog.d/3331.feature +++ /dev/null @@ -1 +0,0 @@ -add support for the include_redundant_members filter param as per MSC1227 diff --git a/changelog.d/3350.misc b/changelog.d/3350.misc deleted file mode 100644 index 3713cd6d63..0000000000 --- a/changelog.d/3350.misc +++ /dev/null @@ -1 +0,0 @@ -Remove redundant checks on who_forgot_in_room \ No newline at end of file diff --git a/changelog.d/3367.misc b/changelog.d/3367.misc deleted file mode 100644 index 1f21ddea48..0000000000 --- a/changelog.d/3367.misc +++ /dev/null @@ -1 +0,0 @@ -Remove unnecessary event re-signing hacks \ No newline at end of file diff --git a/changelog.d/3384.misc b/changelog.d/3384.misc deleted file mode 100644 index 5d56c876d9..0000000000 --- a/changelog.d/3384.misc +++ /dev/null @@ -1 +0,0 @@ -Rewrite cache list decorator diff --git a/changelog.d/3391.bugfix b/changelog.d/3391.bugfix deleted file mode 100644 index 88eeb50df2..0000000000 --- a/changelog.d/3391.bugfix +++ /dev/null @@ -1 +0,0 @@ -Default inviter_display_name to mxid for email invites \ No newline at end of file diff --git a/changelog.d/3415.misc b/changelog.d/3415.misc deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/changelog.d/3460.misc b/changelog.d/3460.misc deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/changelog.d/3514.bugfix b/changelog.d/3514.bugfix deleted file mode 100644 index 460fe24ac9..0000000000 --- a/changelog.d/3514.bugfix +++ /dev/null @@ -1 +0,0 @@ -Don't generate TURN credentials if no TURN config options are set diff --git a/changelog.d/3520.bugfix b/changelog.d/3520.bugfix deleted file mode 100644 index 9278cb3708..0000000000 --- a/changelog.d/3520.bugfix +++ /dev/null @@ -1 +0,0 @@ -Correctly announce deleted devices over federation diff --git a/changelog.d/3543.misc b/changelog.d/3543.misc deleted file mode 100644 index d231d17749..0000000000 --- a/changelog.d/3543.misc +++ /dev/null @@ -1 +0,0 @@ -Improve Dockerfile and docker-compose instructions diff --git a/changelog.d/3548.bugfix b/changelog.d/3548.bugfix deleted file mode 100644 index 38dc3b1232..0000000000 --- a/changelog.d/3548.bugfix +++ /dev/null @@ -1 +0,0 @@ -Catch failures saving metrics captured by Measure, and instead log the faulty metrics information for further analysis. diff --git a/changelog.d/3552.misc b/changelog.d/3552.misc deleted file mode 100644 index 709c3282b4..0000000000 --- a/changelog.d/3552.misc +++ /dev/null @@ -1 +0,0 @@ -Release notes are now in the Markdown format. diff --git a/changelog.d/3553.feature b/changelog.d/3553.feature deleted file mode 100644 index 77a294cb9f..0000000000 --- a/changelog.d/3553.feature +++ /dev/null @@ -1 +0,0 @@ -Add metrics to track resource usage by background processes diff --git a/changelog.d/3554.feature b/changelog.d/3554.feature deleted file mode 100644 index b00397872c..0000000000 --- a/changelog.d/3554.feature +++ /dev/null @@ -1 +0,0 @@ -Add `code` label to `synapse_http_server_response_time_seconds` prometheus metric diff --git a/changelog.d/3555.feature b/changelog.d/3555.feature deleted file mode 100644 index ea4a85e0ae..0000000000 --- a/changelog.d/3555.feature +++ /dev/null @@ -1 +0,0 @@ -Add support for client_reader to handle more APIs diff --git a/changelog.d/3556.feature b/changelog.d/3556.feature deleted file mode 100644 index 77a294cb9f..0000000000 --- a/changelog.d/3556.feature +++ /dev/null @@ -1 +0,0 @@ -Add metrics to track resource usage by background processes diff --git a/changelog.d/3559.misc b/changelog.d/3559.misc deleted file mode 100644 index 26df859e45..0000000000 --- a/changelog.d/3559.misc +++ /dev/null @@ -1 +0,0 @@ -add config for pep8 diff --git a/changelog.d/3562.misc b/changelog.d/3562.misc deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/changelog.d/3567.feature b/changelog.d/3567.feature deleted file mode 100644 index c74c1f57a9..0000000000 --- a/changelog.d/3567.feature +++ /dev/null @@ -1 +0,0 @@ -make the /context API filter & lazy-load aware as per MSC1227 diff --git a/changelog.d/3569.bugfix b/changelog.d/3569.bugfix deleted file mode 100644 index d77f035ee0..0000000000 --- a/changelog.d/3569.bugfix +++ /dev/null @@ -1 +0,0 @@ -Unicode passwords are now normalised before hashing, preventing the instance where two different devices or browsers might send a different UTF-8 sequence for the password. diff --git a/changelog.d/3570.bugfix b/changelog.d/3570.bugfix deleted file mode 100644 index cec5158a99..0000000000 --- a/changelog.d/3570.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix potential stack overflow and deadlock under heavy load \ No newline at end of file diff --git a/changelog.d/3571.misc b/changelog.d/3571.misc deleted file mode 100644 index 8908324e68..0000000000 --- a/changelog.d/3571.misc +++ /dev/null @@ -1 +0,0 @@ -Merge Linearizer and Limiter diff --git a/changelog.d/3572.misc b/changelog.d/3572.misc deleted file mode 100644 index 8908324e68..0000000000 --- a/changelog.d/3572.misc +++ /dev/null @@ -1 +0,0 @@ -Merge Linearizer and Limiter diff --git a/changelog.d/3577.misc b/changelog.d/3577.misc deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/changelog.d/3579.misc b/changelog.d/3579.misc deleted file mode 100644 index 2374dc0c44..0000000000 --- a/changelog.d/3579.misc +++ /dev/null @@ -1 +0,0 @@ -Lazily load state on master process when using workers to reduce DB consumption diff --git a/changelog.d/3581.misc b/changelog.d/3581.misc deleted file mode 100644 index 2374dc0c44..0000000000 --- a/changelog.d/3581.misc +++ /dev/null @@ -1 +0,0 @@ -Lazily load state on master process when using workers to reduce DB consumption diff --git a/changelog.d/3582.misc b/changelog.d/3582.misc deleted file mode 100644 index 2374dc0c44..0000000000 --- a/changelog.d/3582.misc +++ /dev/null @@ -1 +0,0 @@ -Lazily load state on master process when using workers to reduce DB consumption diff --git a/changelog.d/3584.misc b/changelog.d/3584.misc deleted file mode 100644 index 2374dc0c44..0000000000 --- a/changelog.d/3584.misc +++ /dev/null @@ -1 +0,0 @@ -Lazily load state on master process when using workers to reduce DB consumption diff --git a/changelog.d/3585.bugfix b/changelog.d/3585.bugfix deleted file mode 100644 index e8ae1d8cb4..0000000000 --- a/changelog.d/3585.bugfix +++ /dev/null @@ -1 +0,0 @@ -Respond with M_NOT_FOUND when profiles are not found locally or over federation. Fixes #3585 diff --git a/changelog.d/3586.misc b/changelog.d/3586.misc deleted file mode 100644 index e853e2481b..0000000000 --- a/changelog.d/3586.misc +++ /dev/null @@ -1 +0,0 @@ -Fixes and optimisations for resolve_state_groups diff --git a/changelog.d/3587.misc b/changelog.d/3587.misc deleted file mode 100644 index 75a3479910..0000000000 --- a/changelog.d/3587.misc +++ /dev/null @@ -1 +0,0 @@ -Improve logging for exceptions when handling PDUs \ No newline at end of file diff --git a/changelog.d/3590.misc b/changelog.d/3590.misc deleted file mode 100644 index 0f1688fd0f..0000000000 --- a/changelog.d/3590.misc +++ /dev/null @@ -1 +0,0 @@ -Add some measure blocks to persist_events diff --git a/changelog.d/3591.misc b/changelog.d/3591.misc deleted file mode 100644 index f0137766a0..0000000000 --- a/changelog.d/3591.misc +++ /dev/null @@ -1 +0,0 @@ -Fix some random logcontext leaks. \ No newline at end of file diff --git a/changelog.d/3592.misc b/changelog.d/3592.misc deleted file mode 100644 index 60129569c2..0000000000 --- a/changelog.d/3592.misc +++ /dev/null @@ -1 +0,0 @@ -Speed up calculating state deltas in persist_event loop diff --git a/changelog.d/3595.misc b/changelog.d/3595.misc deleted file mode 100644 index 85903504cc..0000000000 --- a/changelog.d/3595.misc +++ /dev/null @@ -1 +0,0 @@ -Attempt to reduce amount of state pulled out of DB during persist_events diff --git a/changelog.d/3597.feature b/changelog.d/3597.feature deleted file mode 100644 index ea4a85e0ae..0000000000 --- a/changelog.d/3597.feature +++ /dev/null @@ -1 +0,0 @@ -Add support for client_reader to handle more APIs diff --git a/changelog.d/3601.bugfix b/changelog.d/3601.bugfix deleted file mode 100644 index 1678b261d0..0000000000 --- a/changelog.d/3601.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix failure to persist events over federation under load diff --git a/changelog.d/3604.feature b/changelog.d/3604.feature deleted file mode 100644 index 77a294cb9f..0000000000 --- a/changelog.d/3604.feature +++ /dev/null @@ -1 +0,0 @@ -Add metrics to track resource usage by background processes diff --git a/changelog.d/3605.bugfix b/changelog.d/3605.bugfix deleted file mode 100644 index 786da546eb..0000000000 --- a/changelog.d/3605.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix updating of cached remote profiles diff --git a/changelog.d/3606.misc b/changelog.d/3606.misc deleted file mode 100644 index f0137766a0..0000000000 --- a/changelog.d/3606.misc +++ /dev/null @@ -1 +0,0 @@ -Fix some random logcontext leaks. \ No newline at end of file diff --git a/changelog.d/3607.bugfix b/changelog.d/3607.bugfix deleted file mode 100644 index 7ad64593b8..0000000000 --- a/changelog.d/3607.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix 'tuple index out of range' error \ No newline at end of file diff --git a/changelog.d/3609.misc b/changelog.d/3609.misc deleted file mode 100644 index 5b9566d076..0000000000 --- a/changelog.d/3609.misc +++ /dev/null @@ -1 +0,0 @@ -Fix a documentation typo in on_make_leave_request diff --git a/changelog.d/3610.feature b/changelog.d/3610.feature deleted file mode 100644 index 77a294cb9f..0000000000 --- a/changelog.d/3610.feature +++ /dev/null @@ -1 +0,0 @@ -Add metrics to track resource usage by background processes diff --git a/changelog.d/3612.misc b/changelog.d/3612.misc deleted file mode 100644 index f90d2f2ff5..0000000000 --- a/changelog.d/3612.misc +++ /dev/null @@ -1 +0,0 @@ -Make EventStore inherit from EventFederationStore diff --git a/changelog.d/3613.misc b/changelog.d/3613.misc deleted file mode 100644 index d9378f6b49..0000000000 --- a/changelog.d/3613.misc +++ /dev/null @@ -1 +0,0 @@ -Remove some redundant joins on event_edges.room_id diff --git a/changelog.d/3614.misc b/changelog.d/3614.misc deleted file mode 100644 index 356f28471e..0000000000 --- a/changelog.d/3614.misc +++ /dev/null @@ -1 +0,0 @@ -Stop populating events.content diff --git a/changelog.d/3616.misc b/changelog.d/3616.misc deleted file mode 100644 index 04629faa36..0000000000 --- a/changelog.d/3616.misc +++ /dev/null @@ -1 +0,0 @@ -Update the /send_leave path registration to use event_id rather than a transaction ID. diff --git a/changelog.d/3621.misc b/changelog.d/3621.misc deleted file mode 100644 index 83e5fc8aa1..0000000000 --- a/changelog.d/3621.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor FederationHandler to move DB writes into separate functions diff --git a/changelog.d/3626.bugfix b/changelog.d/3626.bugfix deleted file mode 100644 index 9a4b878986..0000000000 --- a/changelog.d/3626.bugfix +++ /dev/null @@ -1 +0,0 @@ -Only import secrets when available (fix for py < 3.6) diff --git a/changelog.d/3628.misc b/changelog.d/3628.misc deleted file mode 100644 index 1aebefbe18..0000000000 --- a/changelog.d/3628.misc +++ /dev/null @@ -1 +0,0 @@ -Remove unused field "pdu_failures" from transactions. diff --git a/changelog.d/3630.feature b/changelog.d/3630.feature deleted file mode 100644 index 8007a04840..0000000000 --- a/changelog.d/3630.feature +++ /dev/null @@ -1 +0,0 @@ -Add ability to limit number of monthly active users on the server diff --git a/changelog.d/3634.misc b/changelog.d/3634.misc deleted file mode 100644 index 2cd6af91ff..0000000000 --- a/changelog.d/3634.misc +++ /dev/null @@ -1 +0,0 @@ -rename replication_layer to federation_client diff --git a/changelog.d/3638.misc b/changelog.d/3638.misc deleted file mode 100644 index 9faab15cf2..0000000000 --- a/changelog.d/3638.misc +++ /dev/null @@ -1 +0,0 @@ -Factor out exception handling in federation_client diff --git a/changelog.d/3639.feature b/changelog.d/3639.feature deleted file mode 100644 index c8c387e219..0000000000 --- a/changelog.d/3639.feature +++ /dev/null @@ -1 +0,0 @@ -When we fail to join a room over federation, pass the error code back to the client. \ No newline at end of file diff --git a/changelog.d/3644.misc b/changelog.d/3644.misc deleted file mode 100644 index 2347fc8500..0000000000 --- a/changelog.d/3644.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor location of docker build script. diff --git a/changelog.d/3645.misc b/changelog.d/3645.misc deleted file mode 100644 index 0fe6b28da1..0000000000 --- a/changelog.d/3645.misc +++ /dev/null @@ -1 +0,0 @@ -Update CONTRIBUTING to mention newsfragments. From 848431be1d586a6c91a9defcc09e30e817961768 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Tue, 7 Aug 2018 21:13:22 +1000 Subject: [PATCH 124/205] fix for rc1 --- CHANGES.md | 4 ++-- synapse/__init__.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 83a48c8b26..b9caf34c0d 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,5 +1,5 @@ -Synapse 0.33.2 (2018-08-07) -=========================== +Synapse 0.33.2rc1 (2018-08-07) +============================== Features -------- diff --git a/synapse/__init__.py b/synapse/__init__.py index a14d578e36..807d78d79f 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -17,4 +17,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.33.2" +__version__ = "0.33.2rc1" From 23cd86554ecde6fc85ef842c3768eacfb5cd6f91 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Tue, 7 Aug 2018 21:20:42 +1000 Subject: [PATCH 125/205] fix changelog --- CHANGES.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index b9caf34c0d..299780e9ec 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -12,6 +12,7 @@ Features - make the /context API filter & lazy-load aware as per MSC1227 ([\#3567](https://github.com/matrix-org/synapse/issues/3567>)) - Add ability to limit number of monthly active users on the server ([\#3630](https://github.com/matrix-org/synapse/issues/3630>)) - When we fail to join a room over federation, pass the error code back to the client. ([\#3639](https://github.com/matrix-org/synapse/issues/3639>)) +- Add a new /admin/register API for non-interactively creating users. ([\#3415](https://github.com/matrix-org/synapse/issues/3415>)) Bugfixes @@ -37,7 +38,9 @@ Internal Changes - Remove redundant checks on who_forgot_in_room ([\#3350](https://github.com/matrix-org/synapse/issues/3350>)) - Remove unnecessary event re-signing hacks ([\#3367](https://github.com/matrix-org/synapse/issues/3367>)) - Rewrite cache list decorator ([\#3384](https://github.com/matrix-org/synapse/issues/3384>)) -- ([\#3415](https://github.com/matrix-org/synapse/issues/3415>), [\#3460](https://github.com/matrix-org/synapse/issues/3460>), [\#3562](https://github.com/matrix-org/synapse/issues/3562>), [\#3577](https://github.com/matrix-org/synapse/issues/3577>)) +- Move v1-only REST APIs into their own module. ([\#3460](https://github.com/matrix-org/synapse/issues/3460>)) +- Replace more instances of Python 2-only iteritems and itervalues uses. ([\#3562](https://github.com/matrix-org/synapse/issues/3562>)) +- Refactor EventContext to accept state during init ([\#3577](https://github.com/matrix-org/synapse/issues/3577>)) - Improve Dockerfile and docker-compose instructions ([\#3543](https://github.com/matrix-org/synapse/issues/3543>)) - Release notes are now in the Markdown format. ([\#3552](https://github.com/matrix-org/synapse/issues/3552>)) - add config for pep8 ([\#3559](https://github.com/matrix-org/synapse/issues/3559>)) From 9b92720d88f27c2961d4c0e078cfb2205f248dec Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 7 Aug 2018 00:41:23 +0100 Subject: [PATCH 126/205] fix event lag graph --- contrib/grafana/synapse.json | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/contrib/grafana/synapse.json b/contrib/grafana/synapse.json index 94a1de58f4..c58612594a 100644 --- a/contrib/grafana/synapse.json +++ b/contrib/grafana/synapse.json @@ -54,7 +54,7 @@ "gnetId": null, "graphTooltip": 0, "id": null, - "iteration": 1533026624326, + "iteration": 1533598785368, "links": [ { "asDropdown": true, @@ -4629,7 +4629,7 @@ "h": 9, "w": 12, "x": 0, - "y": 11 + "y": 29 }, "id": 67, "legend": { @@ -4655,11 +4655,11 @@ "steppedLine": false, "targets": [ { - "expr": " synapse_event_persisted_position{instance=\"$instance\"} - ignoring(index, job, name) group_right(instance) synapse_event_processing_positions{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", + "expr": " synapse_event_persisted_position{instance=\"$instance\",job=\"synapse\"} - ignoring(index, job, name) group_right() synapse_event_processing_positions{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", "format": "time_series", "interval": "", "intervalFactor": 1, - "legendFormat": "{{job}}-{{index}}", + "legendFormat": "{{job}}-{{index}} ", "refId": "A" } ], @@ -4697,7 +4697,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -4710,7 +4714,7 @@ "h": 9, "w": 12, "x": 12, - "y": 11 + "y": 29 }, "id": 71, "legend": { @@ -4778,7 +4782,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } } ], "title": "Event processing loop positions", @@ -4957,5 +4965,5 @@ "timezone": "", "title": "Synapse", "uid": "000000012", - "version": 125 + "version": 127 } \ No newline at end of file From 3523f5432a79659ac365dc2ff1212aa1a3707d8e Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 7 Aug 2018 12:51:57 +0100 Subject: [PATCH 127/205] Don't expose default_room_version as config opt --- synapse/api/constants.py | 3 +++ synapse/config/server.py | 14 -------------- synapse/handlers/room.py | 3 ++- 3 files changed, 5 insertions(+), 15 deletions(-) diff --git a/synapse/api/constants.py b/synapse/api/constants.py index a27bf3b32d..b0da506f6d 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -97,6 +97,9 @@ class ThirdPartyEntityKind(object): LOCATION = "location" +# the version we will give rooms which are created on this server +DEFAULT_ROOM_VERSION = "1" + # vdh-test-version is a placeholder to get room versioning support working and tested # until we have a working v2. KNOWN_ROOM_VERSIONS = {"1", "vdh-test-version"} diff --git a/synapse/config/server.py b/synapse/config/server.py index 68ef2789d3..6a471a0a5e 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -16,7 +16,6 @@ import logging -from synapse.api.constants import KNOWN_ROOM_VERSIONS from synapse.http.endpoint import parse_and_validate_server_name from ._base import Config, ConfigError @@ -76,16 +75,6 @@ class ServerConfig(Config): ) else: self.max_mau_value = 0 - - # the version of rooms created by default on this server - self.default_room_version = str(config.get( - "default_room_version", "1", - )) - if self.default_room_version not in KNOWN_ROOM_VERSIONS: - raise ConfigError("Unrecognised value '%s' for default_room_version" % ( - self.default_room_version, - )) - # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist = None federation_domain_whitelist = config.get( @@ -260,9 +249,6 @@ class ServerConfig(Config): # (except those sent by local server admins). The default is False. # block_non_admin_invites: True - # The room_version of rooms which are created by default by this server. - # default_room_version: 1 - # Restrict federation to the following whitelist of domains. # N.B. we recommend also firewalling your federation listener to limit # inbound federation traffic as early as possible, rather than relying diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index a526b684e9..6a17c42238 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -26,6 +26,7 @@ from six import string_types from twisted.internet import defer from synapse.api.constants import ( + DEFAULT_ROOM_VERSION, KNOWN_ROOM_VERSIONS, EventTypes, JoinRules, @@ -106,7 +107,7 @@ class RoomCreationHandler(BaseHandler): if ratelimit: yield self.ratelimit(requester) - room_version = config.get("room_version", self.hs.config.default_room_version) + room_version = config.get("room_version", DEFAULT_ROOM_VERSION) if not isinstance(room_version, string_types): raise SynapseError( 400, From e8eba2b4e3a99d35f08c96205ebb18211adddcb9 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 7 Aug 2018 17:49:43 +0100 Subject: [PATCH 128/205] implement reserved users for mau limits --- synapse/app/homeserver.py | 6 +++ synapse/config/server.py | 2 +- synapse/storage/monthly_active_users.py | 45 +++++++++++++---- tests/storage/test_monthly_active_users.py | 59 +++++++++++++++++++++- 4 files changed, 99 insertions(+), 13 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 3a67db8b30..a4a65e7286 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -518,6 +518,8 @@ def run(hs): # If you increase the loop period, the accuracy of user_daily_visits # table will decrease clock.looping_call(generate_user_daily_visit_stats, 5 * 60 * 1000) + + # monthly active user limiting functionality clock.looping_call( hs.get_datastore().reap_monthly_active_users, 1000 * 60 * 60 ) @@ -530,9 +532,13 @@ def run(hs): current_mau_gauge.set(float(count)) max_mau_value_gauge.set(float(hs.config.max_mau_value)) + hs.get_datastore().initialise_reserved_users( + hs.config.mau_limits_reserved_threepids + ) generate_monthly_active_users() if hs.config.limit_usage_by_mau: clock.looping_call(generate_monthly_active_users, 5 * 60 * 1000) + # End of monthly active user settings if hs.config.report_stats: logger.info("Scheduling stats reporting for 3 hour intervals") diff --git a/synapse/config/server.py b/synapse/config/server.py index cacac253ab..114d7a9815 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -75,7 +75,7 @@ class ServerConfig(Config): "max_mau_value", 0, ) self.mau_limits_reserved_threepids = config.get( - "mau_limit_reserved_threepid", [] + "mau_limit_reserved_threepids", [] ) # FIXME: federation_domain_whitelist needs sytests diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 6a37d6fc22..168f564ed5 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -12,6 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import logging from twisted.internet import defer @@ -19,6 +20,7 @@ from synapse.util.caches.descriptors import cached from ._base import SQLBaseStore +logger = logging.getLogger(__name__) # Number of msec of granularity to store the monthly_active_user timestamp # This means it is not necessary to update the table on every request @@ -26,24 +28,31 @@ LAST_SEEN_GRANULARITY = 60 * 60 * 1000 class MonthlyActiveUsersStore(SQLBaseStore): - @defer.inlineCallbacks def __init__(self, dbconn, hs): super(MonthlyActiveUsersStore, self).__init__(None, hs) self._clock = hs.get_clock() self.hs = hs - threepids = self.hs.config.mau_limits_reserved_threepids - self.reserved_user_ids = set() + self.reserved_users = () + + @defer.inlineCallbacks + def initialise_reserved_users(self, threepids): + # TODO Why can't I do this in init? + store = self.hs.get_datastore() + reserved_user_list = [] for tp in threepids: - user_id = yield hs.get_datastore().get_user_id_by_threepid( + user_id = yield store.get_user_id_by_threepid( tp["medium"], tp["address"] ) if user_id: - self.reserved_user_ids.add(user_id) + self.upsert_monthly_active_user(user_id) + reserved_user_list.append(user_id) else: logger.warning( "mau limit reserved threepid %s not found in db" % tp ) + self.reserved_users = tuple(reserved_user_list) + @defer.inlineCallbacks def reap_monthly_active_users(self): """ Cleans out monthly active user table to ensure that no stale @@ -58,8 +67,20 @@ class MonthlyActiveUsersStore(SQLBaseStore): int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) ) # Purge stale users - sql = "DELETE FROM monthly_active_users WHERE timestamp < ?" - txn.execute(sql, (thirty_days_ago,)) + + # questionmarks is a hack to overcome sqlite not supporting + # tuples in 'WHERE IN %s' + questionmarks = '?' * len(self.reserved_users) + query_args = [thirty_days_ago] + query_args.extend(self.reserved_users) + + sql = """ + DELETE FROM monthly_active_users + WHERE timestamp < ? + AND user_id NOT IN ({}) + """.format(','.join(questionmarks)) + + txn.execute(sql, query_args) # If MAU user count still exceeds the MAU threshold, then delete on # a least recently active basis. @@ -69,6 +90,8 @@ class MonthlyActiveUsersStore(SQLBaseStore): # While Postgres does not require 'LIMIT', but also does not support # negative LIMIT values. So there is no way to write it that both can # support + query_args = [self.hs.config.max_mau_value] + query_args.extend(self.reserved_users) sql = """ DELETE FROM monthly_active_users WHERE user_id NOT IN ( @@ -76,8 +99,9 @@ class MonthlyActiveUsersStore(SQLBaseStore): ORDER BY timestamp DESC LIMIT ? ) - """ - txn.execute(sql, (self.hs.config.max_mau_value,)) + AND user_id NOT IN ({}) + """.format(','.join(questionmarks)) + txn.execute(sql, query_args) yield self.runInteraction("reap_monthly_active_users", _reap_users) # It seems poor to invalidate the whole cache, Postgres supports @@ -136,7 +160,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): Arguments: user_id (str): user to add/update Return: - int : timestamp since last seen, None if never seen + Deferred[int] : timestamp since last seen, None if never seen """ @@ -158,7 +182,6 @@ class MonthlyActiveUsersStore(SQLBaseStore): Args: user_id(str): the user_id to query """ - if self.hs.config.limit_usage_by_mau: last_seen_timestamp = yield self._user_last_seen_monthly_active(user_id) now = self.hs.get_clock().time_msec() diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index 0bfd24a7fb..cbd480cd42 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -19,6 +19,8 @@ import tests.unittest import tests.utils from tests.utils import setup_test_homeserver +FORTY_DAYS = 40 * 24 * 60 * 60 + class MonthlyActiveUsersTestCase(tests.unittest.TestCase): def __init__(self, *args, **kwargs): @@ -29,6 +31,56 @@ class MonthlyActiveUsersTestCase(tests.unittest.TestCase): self.hs = yield setup_test_homeserver() self.store = self.hs.get_datastore() + @defer.inlineCallbacks + def test_initialise_reserved_users(self): + + user1 = "@user1:server" + user1_email = "user1@matrix.org" + user2 = "@user2:server" + user2_email = "user2@matrix.org" + threepids = [ + {'medium': 'email', 'address': user1_email}, + {'medium': 'email', 'address': user2_email} + ] + user_num = len(threepids) + + yield self.store.register( + user_id=user1, + token="123", + password_hash=None) + + yield self.store.register( + user_id=user2, + token="456", + password_hash=None) + + now = int(self.hs.get_clock().time_msec()) + yield self.store.user_add_threepid(user1, "email", user1_email, now, now) + yield self.store.user_add_threepid(user2, "email", user2_email, now, now) + yield self.store.initialise_reserved_users(threepids) + + active_count = yield self.store.get_monthly_active_count() + + # Test total counts + self.assertEquals(active_count, user_num) + + # Test user is marked as active + + timestamp = yield self.store._user_last_seen_monthly_active(user1) + self.assertTrue(timestamp) + timestamp = yield self.store._user_last_seen_monthly_active(user2) + self.assertTrue(timestamp) + + # Test that users are never removed from the db. + self.hs.config.max_mau_value = 0 + + self.hs.get_clock().advance_time(FORTY_DAYS) + + yield self.store.reap_monthly_active_users() + + active_count = yield self.store.get_monthly_active_count() + self.assertEquals(active_count, user_num) + @defer.inlineCallbacks def test_can_insert_and_count_mau(self): count = yield self.store.get_monthly_active_count() @@ -63,4 +115,9 @@ class MonthlyActiveUsersTestCase(tests.unittest.TestCase): self.assertTrue(count, initial_users) yield self.store.reap_monthly_active_users() count = yield self.store.get_monthly_active_count() - self.assertTrue(count, initial_users - self.hs.config.max_mau_value) + self.assertEquals(count, initial_users - self.hs.config.max_mau_value) + + self.hs.get_clock().advance_time(FORTY_DAYS) + yield self.store.reap_monthly_active_users() + count = yield self.store.get_monthly_active_count() + self.assertEquals(count, 0) From ef3589063adddd1eee89f136e6a54250cce414a1 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 7 Aug 2018 17:59:27 +0100 Subject: [PATCH 129/205] prevent total number of reserved users being too large --- synapse/storage/monthly_active_users.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 168f564ed5..54de5a8686 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -39,7 +39,9 @@ class MonthlyActiveUsersStore(SQLBaseStore): # TODO Why can't I do this in init? store = self.hs.get_datastore() reserved_user_list = [] - for tp in threepids: + + # Do not add more reserved users than the total allowable number + for tp in threepids[:self.hs.config.max_mau_value]: user_id = yield store.get_user_id_by_threepid( tp["medium"], tp["address"] ) From 53bca4690b5c94fb1506dbc628ce2ef0f770745f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 7 Aug 2018 19:09:48 +0100 Subject: [PATCH 130/205] more metrics for the federation and appservice senders --- synapse/federation/transaction_queue.py | 10 +++++++++- synapse/handlers/appservice.py | 10 ++++++++++ synapse/metrics/__init__.py | 13 +++++++++++++ 3 files changed, 32 insertions(+), 1 deletion(-) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index 78f9d40a3a..f603c8a368 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -26,6 +26,8 @@ from synapse.api.errors import FederationDeniedError, HttpResponseException from synapse.handlers.presence import format_user_presence_state, get_interested_remotes from synapse.metrics import ( LaterGauge, + event_processing_loop_counter, + event_processing_loop_room_count, events_processed_counter, sent_edus_counter, sent_transactions_counter, @@ -253,7 +255,13 @@ class TransactionQueue(object): synapse.metrics.event_processing_last_ts.labels( "federation_sender").set(ts) - events_processed_counter.inc(len(events)) + events_processed_counter.inc(len(events)) + + event_processing_loop_room_count.labels( + "federation_sender" + ).inc(len(events_by_room)) + + event_processing_loop_counter.labels("federation_sender").inc() synapse.metrics.event_processing_positions.labels( "federation_sender").set(next_token) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index ee41aed69e..f0f89af7dc 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -23,6 +23,10 @@ from twisted.internet import defer import synapse from synapse.api.constants import EventTypes +from synapse.metrics import ( + event_processing_loop_counter, + event_processing_loop_room_count, +) from synapse.metrics.background_process_metrics import run_as_background_process from synapse.util.logcontext import make_deferred_yieldable, run_in_background from synapse.util.metrics import Measure @@ -136,6 +140,12 @@ class ApplicationServicesHandler(object): events_processed_counter.inc(len(events)) + event_processing_loop_room_count.labels( + "appservice_sender" + ).inc(len(events_by_room)) + + event_processing_loop_counter.labels("appservice_sender").inc() + synapse.metrics.event_processing_lag.labels( "appservice_sender").set(now - ts) synapse.metrics.event_processing_last_ts.labels( diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index a9158fc066..b7cb3a730a 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -174,6 +174,19 @@ sent_transactions_counter = Counter("synapse_federation_client_sent_transactions events_processed_counter = Counter("synapse_federation_client_events_processed", "") +event_processing_loop_counter = Counter( + "synapse_event_processing_loop", + "Event processing loop iterations", + ["name"], +) + +event_processing_loop_room_count = Counter( + "synapse_event_processing_loop_room_count", + "Rooms seen per event processing loop iteration", + ["name"], +) + + # Used to track where various components have processed in the event stream, # e.g. federation sending, appservice sending, etc. event_processing_positions = Gauge("synapse_event_processing_positions", "", ["name"]) From bab94da79cb71cc483184c2732707e40f7a708ce Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 7 Aug 2018 22:11:45 +0100 Subject: [PATCH 131/205] fix metric name --- synapse/metrics/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index b7cb3a730a..550f8443f7 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -175,7 +175,7 @@ sent_transactions_counter = Counter("synapse_federation_client_sent_transactions events_processed_counter = Counter("synapse_federation_client_events_processed", "") event_processing_loop_counter = Counter( - "synapse_event_processing_loop", + "synapse_event_processing_loop_count", "Event processing loop iterations", ["name"], ) From e6d73b858231567abfe47b05d3e04376c9016b1a Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 7 Aug 2018 22:14:35 +0100 Subject: [PATCH 132/205] changelog --- changelog.d/3664.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3664.feature diff --git a/changelog.d/3664.feature b/changelog.d/3664.feature new file mode 100644 index 0000000000..184dde9939 --- /dev/null +++ b/changelog.d/3664.feature @@ -0,0 +1 @@ +Add some metrics for the appservice and federation event sending loops From 501141763245647f41636621867ad1bacc47e6b5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 8 Aug 2018 10:29:58 +0100 Subject: [PATCH 133/205] Fixup logging and docstrings --- synapse/replication/http/_base.py | 6 +++-- synapse/replication/http/membership.py | 36 ++++++++++++++++++++++++++ 2 files changed, 40 insertions(+), 2 deletions(-) diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index 4de3825fda..619ddab540 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -151,7 +151,7 @@ class ReplicationEndpoint(object): if e.code != 504 or not cls.RETRY_ON_TIMEOUT: raise - logger.warn("send_federation_events_to_master request timed out") + logger.warn("%s request timed out", cls.NAME) # If we timed out we probably don't need to worry about backing # off too much, but lets just wait a little anyway. @@ -190,7 +190,9 @@ class ReplicationEndpoint(object): http_server.register_paths(method, [pattern], handler) def _cached_handler(self, request, txn_id, **kwargs): - """Wraps `_handle_request` the responses should be cached. + """Called on new incoming requests when caching is enabled. Checks + if their is a cached response for the request and returns that, + otherwise calls `_handle_request` and caches its response. """ # We just use the txn_id here, but we probably also want to use the # other PATH_ARGS as well. diff --git a/synapse/replication/http/membership.py b/synapse/replication/http/membership.py index 8ad83e8421..e58bebf12a 100644 --- a/synapse/replication/http/membership.py +++ b/synapse/replication/http/membership.py @@ -27,6 +27,16 @@ logger = logging.getLogger(__name__) class ReplicationRemoteJoinRestServlet(ReplicationEndpoint): """Does a remote join for the given user to the given room + + Request format: + + POST /_synapse/replication/remote_join/:room_id/:user_id + + { + "requester": ..., + "remote_room_hosts": [...], + "content": { ... } + } """ NAME = "remote_join" @@ -85,6 +95,15 @@ class ReplicationRemoteJoinRestServlet(ReplicationEndpoint): class ReplicationRemoteRejectInviteRestServlet(ReplicationEndpoint): """Rejects the invite for the user and room. + + Request format: + + POST /_synapse/replication/remote_reject_invite/:room_id/:user_id + + { + "requester": ..., + "remote_room_hosts": [...], + } """ NAME = "remote_reject_invite" @@ -153,6 +172,17 @@ class ReplicationRemoteRejectInviteRestServlet(ReplicationEndpoint): class ReplicationRegister3PIDGuestRestServlet(ReplicationEndpoint): """Gets/creates a guest account for given 3PID. + + Request format: + + POST /_synapse/replication/get_or_register_3pid_guest/ + + { + "requester": ..., + "medium": ..., + "address": ..., + "inviter_user_id": ... + } """ NAME = "get_or_register_3pid_guest" @@ -206,6 +236,12 @@ class ReplicationRegister3PIDGuestRestServlet(ReplicationEndpoint): class ReplicationUserJoinedLeftRoomRestServlet(ReplicationEndpoint): """Notifies that a user has joined or left the room + + Request format: + + POST /_synapse/replication/membership_change/:room_id/:user_id/:change + + {} """ NAME = "membership_change" From bebe325e6cdd83f7bacd8ad71f6cdd23273c88db Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 8 Aug 2018 10:35:47 +0100 Subject: [PATCH 134/205] Rename POST param to METHOD --- synapse/replication/http/_base.py | 34 +++++++++++++++++--------- synapse/replication/http/send_event.py | 1 - 2 files changed, 22 insertions(+), 13 deletions(-) diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index 619ddab540..53a0fd459a 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -40,8 +40,8 @@ class ReplicationEndpoint(object): /_synapse/replication/send_event/:event_id/:txn_id - For POST requests the payload is serialized to json and sent as the body, - while for GET requests the payload is added as query parameters. See + For POST/PUT requests the payload is serialized to json and sent as the + body, while for GET requests the payload is added as query parameters. See `_serialize_payload` for details. Incoming requests are handled by overriding `_handle_request`. Servers @@ -55,8 +55,9 @@ class ReplicationEndpoint(object): PATH_ARGS (tuple[str]): A list of parameters to be added to the path. Adding parameters to the path (rather than payload) can make it easier to follow along in the log files. - POST (bool): True to use POST request with JSON body, or false to use - GET requests with query params. + METHOD (str): The method of the HTTP request, defaults to POST. Can be + one of POST, PUT or GET. If GET then the payload is sent as query + parameters rather than a JSON body. CACHE (bool): Whether server should cache the result of the request/ If true then transparently adds a txn_id to all requests, and `_handle_request` must return a Deferred. @@ -69,7 +70,7 @@ class ReplicationEndpoint(object): NAME = abc.abstractproperty() PATH_ARGS = abc.abstractproperty() - POST = True + METHOD = "POST" CACHE = True RETRY_ON_TIMEOUT = True @@ -80,6 +81,8 @@ class ReplicationEndpoint(object): timeout_ms=30 * 60 * 1000, ) + assert self.METHOD in ("PUT", "POST", "GET") + @abc.abstractmethod def _serialize_payload(**kwargs): """Static method that is called when creating a request. @@ -90,9 +93,9 @@ class ReplicationEndpoint(object): argument list. Returns: - Deferred[dict]|dict: If POST request then dictionary must be JSON - serialisable, otherwise must be appropriate for adding as query - args. + Deferred[dict]|dict: If POST/PUT request then dictionary must be + JSON serialisable, otherwise must be appropriate for adding as + query args. """ return {} @@ -130,10 +133,18 @@ class ReplicationEndpoint(object): txn_id = random_string(10) url_args.append(txn_id) - if cls.POST: + if cls.METHOD == "POST": request_func = client.post_json_get_json - else: + elif cls.METHOD == "PUT": + request_func = client.put_json + elif cls.METHOD == "GET": request_func = client.get_json + else: + # We have already asserted in the constructor that a + # compatible was picked, but lets be paranoid. + raise Exception( + "Unknown METHOD on %s replication endpoint" % (cls.NAME,) + ) uri = "http://%s:%s/_synapse/replication/%s/%s" % ( host, port, cls.NAME, "/".join(url_args) @@ -174,8 +185,7 @@ class ReplicationEndpoint(object): url_args = list(self.PATH_ARGS) method = "GET" handler = self._handle_request - if self.POST: - method = "POST" + method = self.METHOD if self.CACHE: handler = self._cached_handler diff --git a/synapse/replication/http/send_event.py b/synapse/replication/http/send_event.py index 50810d94cb..5b52c91650 100644 --- a/synapse/replication/http/send_event.py +++ b/synapse/replication/http/send_event.py @@ -47,7 +47,6 @@ class ReplicationSendEventRestServlet(ReplicationEndpoint): """ NAME = "send_event" PATH_ARGS = ("event_id",) - POST = True def __init__(self, hs): super(ReplicationSendEventRestServlet, self).__init__(hs) From 7f3d897e7af7c5227fc27355ead082055a8c0ecf Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 8 Aug 2018 11:46:23 +0100 Subject: [PATCH 135/205] mock config.max_mau_value --- tests/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/utils.py b/tests/utils.py index baeed5d600..e7894819c0 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -74,6 +74,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None config.media_storage_providers = [] config.auto_join_rooms = [] config.limit_usage_by_mau = False + config.max_mau_value = 50 config.mau_limits_reserved_threepids = [] # disable user directory updates, because they get done in the From 360ba89c50ea5cbf824e54f04d536b89b57f3304 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 8 Aug 2018 11:54:55 +0100 Subject: [PATCH 136/205] Don't fail requests to unbind 3pids for non supporting ID servers Older identity servers may not support the unbind 3pid request, so we shouldn't fail the requests if we received one of 400/404/501. The request still fails if we receive e.g. 500 responses, allowing clients to retry requests on transient identity server errors that otherwise do support the API. Fixes #3661 --- changelog.d/3661.bugfix | 1 + synapse/handlers/auth.py | 20 ++++++++++++++--- synapse/handlers/deactivate_account.py | 13 +++++++++-- synapse/handlers/identity.py | 30 +++++++++++++++++-------- synapse/rest/client/v1/admin.py | 11 +++++++-- synapse/rest/client/v2_alpha/account.py | 22 ++++++++++++++---- 6 files changed, 77 insertions(+), 20 deletions(-) create mode 100644 changelog.d/3661.bugfix diff --git a/changelog.d/3661.bugfix b/changelog.d/3661.bugfix new file mode 100644 index 0000000000..f2b4703d80 --- /dev/null +++ b/changelog.d/3661.bugfix @@ -0,0 +1 @@ +Fix bug on deleting 3pid when using identity servers that don't support unbind API diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 184eef09d0..da17e73fdd 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -828,12 +828,26 @@ class AuthHandler(BaseHandler): @defer.inlineCallbacks def delete_threepid(self, user_id, medium, address): + """Attempts to unbind the 3pid on the identity servers and deletes it + from the local database. + + Args: + user_id (str) + medium (str) + address (str) + + Returns: + Deferred[bool]: Returns True if successfully unbound the 3pid on + the identity server, False if identity server doesn't support the + unbind API. + """ + # 'Canonicalise' email addresses as per above if medium == 'email': address = address.lower() identity_handler = self.hs.get_handlers().identity_handler - yield identity_handler.unbind_threepid( + result = yield identity_handler.try_unbind_threepid( user_id, { 'medium': medium, @@ -841,10 +855,10 @@ class AuthHandler(BaseHandler): }, ) - ret = yield self.store.user_delete_threepid( + yield self.store.user_delete_threepid( user_id, medium, address, ) - defer.returnValue(ret) + defer.returnValue(result) def _save_session(self, session): # TODO: Persistent storage diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index b3c5a9ee64..b078df4a76 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -51,7 +51,8 @@ class DeactivateAccountHandler(BaseHandler): erase_data (bool): whether to GDPR-erase the user's data Returns: - Deferred + Deferred[bool]: True if identity server supports removing + threepids, otherwise False. """ # FIXME: Theoretically there is a race here wherein user resets # password using threepid. @@ -60,16 +61,22 @@ class DeactivateAccountHandler(BaseHandler): # leave the user still active so they can try again. # Ideally we would prevent password resets and then do this in the # background thread. + + # This will be set to false if the identity server doesn't support + # unbinding + identity_server_supports_unbinding = True + threepids = yield self.store.user_get_threepids(user_id) for threepid in threepids: try: - yield self._identity_handler.unbind_threepid( + result = yield self._identity_handler.try_unbind_threepid( user_id, { 'medium': threepid['medium'], 'address': threepid['address'], }, ) + identity_server_supports_unbinding &= result except Exception: # Do we want this to be a fatal error or should we carry on? logger.exception("Failed to remove threepid from ID server") @@ -103,6 +110,8 @@ class DeactivateAccountHandler(BaseHandler): # parts users from rooms (if it isn't already running) self._start_user_parting() + defer.returnValue(identity_server_supports_unbinding) + def _start_user_parting(self): """ Start the process that goes through the table of users diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index 1d36d967c3..a0f5fecc96 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -137,15 +137,19 @@ class IdentityHandler(BaseHandler): defer.returnValue(data) @defer.inlineCallbacks - def unbind_threepid(self, mxid, threepid): - """ - Removes a binding from an identity server + def try_unbind_threepid(self, mxid, threepid): + """Removes a binding from an identity server + Args: mxid (str): Matrix user ID of binding to be removed threepid (dict): Dict with medium & address of binding to be removed + Raises: + SynapseError: If we failed to contact the identity server + Returns: - Deferred[bool]: True on success, otherwise False + Deferred[bool]: True on success, otherwise False if the identity + server doesn't support unbinding """ logger.debug("unbinding threepid %r from %s", threepid, mxid) if not self.trusted_id_servers: @@ -175,11 +179,19 @@ class IdentityHandler(BaseHandler): content=content, destination_is=id_server, ) - yield self.http_client.post_json_get_json( - url, - content, - headers, - ) + try: + yield self.http_client.post_json_get_json( + url, + content, + headers, + ) + except HttpResponseException as e: + if e.code in (400, 404, 501,): + # The remote server probably doesn't support unbinding (yet) + defer.returnValue(False) + else: + raise SynapseError(502, "Failed to contact identity server") + defer.returnValue(True) @defer.inlineCallbacks diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py index 80d625eecc..ad536ab570 100644 --- a/synapse/rest/client/v1/admin.py +++ b/synapse/rest/client/v1/admin.py @@ -391,10 +391,17 @@ class DeactivateAccountRestServlet(ClientV1RestServlet): if not is_admin: raise AuthError(403, "You are not a server admin") - yield self._deactivate_account_handler.deactivate_account( + result = yield self._deactivate_account_handler.deactivate_account( target_user_id, erase, ) - defer.returnValue((200, {})) + if result: + id_server_unbind_result = "success" + else: + id_server_unbind_result = "no-support" + + defer.returnValue((200, { + "id_server_unbind_result": id_server_unbind_result, + })) class ShutdownRoomRestServlet(ClientV1RestServlet): diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index eeae466d82..372648cafd 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -209,10 +209,17 @@ class DeactivateAccountRestServlet(RestServlet): yield self.auth_handler.validate_user_via_ui_auth( requester, body, self.hs.get_ip_from_request(request), ) - yield self._deactivate_account_handler.deactivate_account( + result = yield self._deactivate_account_handler.deactivate_account( requester.user.to_string(), erase, ) - defer.returnValue((200, {})) + if result: + id_server_unbind_result = "success" + else: + id_server_unbind_result = "no-support" + + defer.returnValue((200, { + "id_server_unbind_result": id_server_unbind_result, + })) class EmailThreepidRequestTokenRestServlet(RestServlet): @@ -364,7 +371,7 @@ class ThreepidDeleteRestServlet(RestServlet): user_id = requester.user.to_string() try: - yield self.auth_handler.delete_threepid( + ret = yield self.auth_handler.delete_threepid( user_id, body['medium'], body['address'] ) except Exception: @@ -374,7 +381,14 @@ class ThreepidDeleteRestServlet(RestServlet): logger.exception("Failed to remove threepid") raise SynapseError(500, "Failed to remove threepid") - defer.returnValue((200, {})) + if ret: + id_server_unbind_result = "success" + else: + id_server_unbind_result = "no-support" + + defer.returnValue((200, { + "id_server_unbind_result": id_server_unbind_result, + })) class WhoamiRestServlet(RestServlet): From d92675a4861eab5f5eec0e8a39a7505d5bda850d Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 8 Aug 2018 12:06:42 +0100 Subject: [PATCH 137/205] Ability to whitelist specific threepids against monthly active user limiting --- changelog.d/3662.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3662.feature diff --git a/changelog.d/3662.feature b/changelog.d/3662.feature new file mode 100644 index 0000000000..daacef086d --- /dev/null +++ b/changelog.d/3662.feature @@ -0,0 +1 @@ +Ability to whitelist specific threepids against monthly active user limiting From 312ae747466f7c60c0148b75446f357263330148 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 8 Aug 2018 13:33:16 +0100 Subject: [PATCH 138/205] typos --- synapse/storage/monthly_active_users.py | 4 ++-- synapse/storage/schema/delta/51/monthly_active_users.sql | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index abe1e6bb99..8b3beaf26a 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -50,7 +50,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): # If MAU user count still exceeds the MAU threshold, then delete on # a least recently active basis. # Note it is not possible to write this query using OFFSET due to - # incompatibilities in how sqlite an postgres support the feature. + # incompatibilities in how sqlite and postgres support the feature. # sqlite requires 'LIMIT -1 OFFSET ?', the LIMIT must be present # While Postgres does not require 'LIMIT', but also does not support # negative LIMIT values. So there is no way to write it that both can @@ -78,7 +78,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): @cached(num_args=0) def get_monthly_active_count(self): - """Generates current count of monthly active users.abs + """Generates current count of monthly active users Returns: Defered[int]: Number of current monthly active users diff --git a/synapse/storage/schema/delta/51/monthly_active_users.sql b/synapse/storage/schema/delta/51/monthly_active_users.sql index 10aac90ce1..c9d537d5a3 100644 --- a/synapse/storage/schema/delta/51/monthly_active_users.sql +++ b/synapse/storage/schema/delta/51/monthly_active_users.sql @@ -18,7 +18,7 @@ CREATE TABLE monthly_active_users ( user_id TEXT NOT NULL, -- Last time we saw the user. Not guaranteed to be accurate due to rate limiting -- on updates, Granularity of updates governed by - -- syanpse.storage.monthly_active_users.LAST_SEEN_GRANULARITY + -- synapse.storage.monthly_active_users.LAST_SEEN_GRANULARITY -- Measured in ms since epoch. timestamp BIGINT NOT NULL ); From 54685d294dd15bb8b9a9928b8d6f371ae4236e25 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 8 Aug 2018 15:38:54 +0100 Subject: [PATCH 139/205] Ability to disable client/server Synapse via conf toggle --- changelog.d/3655.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3655.feature diff --git a/changelog.d/3655.feature b/changelog.d/3655.feature new file mode 100644 index 0000000000..1134e549e7 --- /dev/null +++ b/changelog.d/3655.feature @@ -0,0 +1 @@ +Ability to disable client/server Synapse via conf toggle From 839a317c9627ff0d61f91504a2cfca275f31d7b2 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 8 Aug 2018 17:39:04 +0100 Subject: [PATCH 140/205] fix pep8 too many lines --- synapse/api/errors.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index 69e6ffb5a3..dc3bed5fcb 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -62,7 +62,6 @@ class Codes(object): INCOMPATIBLE_ROOM_VERSION = "M_INCOMPATIBLE_ROOM_VERSION" - class CodeMessageException(RuntimeError): """An exception with integer code and message string attributes. From e92fb00f32c63de6ea50ba1cbbadf74060ea143d Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 8 Aug 2018 17:54:49 +0100 Subject: [PATCH 141/205] sync auth blocking --- synapse/handlers/sync.py | 16 +++++++++----- tests/handlers/test_sync.py | 42 +++++++++++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+), 5 deletions(-) create mode 100644 tests/handlers/test_sync.py diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index dff1f67dcb..f748d9afb0 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -191,6 +191,7 @@ class SyncHandler(object): self.clock = hs.get_clock() self.response_cache = ResponseCache(hs, "sync") self.state = hs.get_state_handler() + self.auth = hs.get_auth() # ExpiringCache((User, Device)) -> LruCache(state_key => event_id) self.lazy_loaded_members_cache = ExpiringCache( @@ -198,18 +199,23 @@ class SyncHandler(object): max_len=0, expiry_ms=LAZY_LOADED_MEMBERS_CACHE_MAX_AGE, ) + @defer.inlineCallbacks def wait_for_sync_for_user(self, sync_config, since_token=None, timeout=0, full_state=False): """Get the sync for a client if we have new data for it now. Otherwise wait for new data to arrive on the server. If the timeout expires, then return an empty sync result. Returns: - A Deferred SyncResult. + Deferred[SyncResult] """ - return self.response_cache.wrap( - sync_config.request_key, - self._wait_for_sync_for_user, - sync_config, since_token, timeout, full_state, + yield self.auth.check_auth_blocking() + + defer.returnValue( + self.response_cache.wrap( + sync_config.request_key, + self._wait_for_sync_for_user, + sync_config, since_token, timeout, full_state, + ) ) @defer.inlineCallbacks diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py new file mode 100644 index 0000000000..3b1b4d4923 --- /dev/null +++ b/tests/handlers/test_sync.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from twisted.internet import defer + +import tests.unittest +import tests.utils +from tests.utils import setup_test_homeserver +from synapse.handlers.sync import SyncHandler, SyncConfig +from synapse.types import UserID + + +class SyncTestCase(tests.unittest.TestCase): + """ Tests Sync Handler. """ + + @defer.inlineCallbacks + def setUp(self): + self.hs = yield setup_test_homeserver() + self.sync_handler = SyncHandler(self.hs) + + @defer.inlineCallbacks + def test_wait_for_sync_for_user_auth_blocking(self): + sync_config = SyncConfig( + user=UserID("@user","server"), + filter_collection=None, + is_guest=False, + request_key="request_key", + device_id="device_id", + ) + res = yield self.sync_handler.wait_for_sync_for_user(sync_config) + print res From d5c0ce4cadb4848da174bf5a5563de0ebba4790f Mon Sep 17 00:00:00 2001 From: Jeroen Date: Wed, 8 Aug 2018 19:25:01 +0200 Subject: [PATCH 142/205] updated docstring for ServerContextFactory --- synapse/crypto/context_factory.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 569377e653..29a75e1873 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -26,7 +26,7 @@ logger = logging.getLogger(__name__) class ServerContextFactory(ContextFactory): """Factory for PyOpenSSL SSL contexts that are used to handle incoming - connections and to make connections to remote servers.""" + connections.""" def __init__(self, config): self._context = SSL.Context(SSL.SSLv23_METHOD) From 2511f3f8a082585e680dad200069dec77b066a6a Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 9 Aug 2018 12:22:01 +1000 Subject: [PATCH 143/205] Test fixes for Python 3 (#3647) --- changelog.d/3647.misc | 1 + tests/handlers/test_typing.py | 4 +++- tests/rest/client/test_transactions.py | 4 ++-- tests/rest/client/v1/test_admin.py | 10 +++++----- tests/rest/client/v1/test_profile.py | 8 ++++---- tests/rest/client/v1/utils.py | 10 +++++----- tests/rest/client/v2_alpha/test_filter.py | 22 ++++++++++----------- tests/rest/client/v2_alpha/test_register.py | 14 ++++++------- tests/rest/client/v2_alpha/test_sync.py | 4 ++-- tests/server.py | 22 +++++++++++++++++++-- tests/storage/test_event_federation.py | 2 +- tests/storage/test_state.py | 2 +- tests/test_server.py | 11 ++++------- tests/utils.py | 9 +++++---- 14 files changed, 71 insertions(+), 52 deletions(-) create mode 100644 changelog.d/3647.misc diff --git a/changelog.d/3647.misc b/changelog.d/3647.misc new file mode 100644 index 0000000000..dbc66dae60 --- /dev/null +++ b/changelog.d/3647.misc @@ -0,0 +1 @@ +Tests now correctly execute on Python 3. diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index 2c263af1a3..f422cf3c5a 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -48,7 +48,9 @@ def _expect_edu(destination, edu_type, content, origin="test"): def _make_edu_json(origin, edu_type, content): - return json.dumps(_expect_edu("test", edu_type, content, origin=origin)) + return json.dumps( + _expect_edu("test", edu_type, content, origin=origin) + ).encode('utf8') class TypingNotificationsTestCase(unittest.TestCase): diff --git a/tests/rest/client/test_transactions.py b/tests/rest/client/test_transactions.py index 34e68ae82f..d46c27e7e9 100644 --- a/tests/rest/client/test_transactions.py +++ b/tests/rest/client/test_transactions.py @@ -85,7 +85,7 @@ class HttpTransactionCacheTestCase(unittest.TestCase): try: yield self.cache.fetch_or_execute(self.mock_key, cb) except Exception as e: - self.assertEqual(e.message, "boo") + self.assertEqual(e.args[0], "boo") self.assertIs(LoggingContext.current_context(), test_context) res = yield self.cache.fetch_or_execute(self.mock_key, cb) @@ -111,7 +111,7 @@ class HttpTransactionCacheTestCase(unittest.TestCase): try: yield self.cache.fetch_or_execute(self.mock_key, cb) except Exception as e: - self.assertEqual(e.message, "boo") + self.assertEqual(e.args[0], "boo") self.assertIs(LoggingContext.current_context(), test_context) res = yield self.cache.fetch_or_execute(self.mock_key, cb) diff --git a/tests/rest/client/v1/test_admin.py b/tests/rest/client/v1/test_admin.py index 8c90145601..fb28883d30 100644 --- a/tests/rest/client/v1/test_admin.py +++ b/tests/rest/client/v1/test_admin.py @@ -140,7 +140,7 @@ class UserRegisterTestCase(unittest.TestCase): "admin": True, "mac": want_mac, } - ).encode('utf8') + ) request, channel = make_request("POST", self.url, body.encode('utf8')) render(request, self.resource, self.clock) @@ -168,7 +168,7 @@ class UserRegisterTestCase(unittest.TestCase): "admin": True, "mac": want_mac, } - ).encode('utf8') + ) request, channel = make_request("POST", self.url, body.encode('utf8')) render(request, self.resource, self.clock) @@ -195,7 +195,7 @@ class UserRegisterTestCase(unittest.TestCase): "admin": True, "mac": want_mac, } - ).encode('utf8') + ) request, channel = make_request("POST", self.url, body.encode('utf8')) render(request, self.resource, self.clock) @@ -253,7 +253,7 @@ class UserRegisterTestCase(unittest.TestCase): self.assertEqual('Invalid username', channel.json_body["error"]) # Must not have null bytes - body = json.dumps({"nonce": nonce(), "username": b"abcd\x00"}) + body = json.dumps({"nonce": nonce(), "username": u"abcd\u0000"}) request, channel = make_request("POST", self.url, body.encode('utf8')) render(request, self.resource, self.clock) @@ -289,7 +289,7 @@ class UserRegisterTestCase(unittest.TestCase): self.assertEqual('Invalid password', channel.json_body["error"]) # Must not have null bytes - body = json.dumps({"nonce": nonce(), "username": "a", "password": b"abcd\x00"}) + body = json.dumps({"nonce": nonce(), "username": "a", "password": u"abcd\u0000"}) request, channel = make_request("POST", self.url, body.encode('utf8')) render(request, self.resource, self.clock) diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py index d71cc8e0db..0516ce3cfb 100644 --- a/tests/rest/client/v1/test_profile.py +++ b/tests/rest/client/v1/test_profile.py @@ -80,7 +80,7 @@ class ProfileTestCase(unittest.TestCase): (code, response) = yield self.mock_resource.trigger( "PUT", "/profile/%s/displayname" % (myid), - '{"displayname": "Frank Jr."}' + b'{"displayname": "Frank Jr."}' ) self.assertEquals(200, code) @@ -95,7 +95,7 @@ class ProfileTestCase(unittest.TestCase): (code, response) = yield self.mock_resource.trigger( "PUT", "/profile/%s/displayname" % ("@4567:test"), - '{"displayname": "Frank Jr."}' + b'{"displayname": "Frank Jr."}' ) self.assertTrue( @@ -122,7 +122,7 @@ class ProfileTestCase(unittest.TestCase): (code, response) = yield self.mock_resource.trigger( "PUT", "/profile/%s/displayname" % ("@opaque:elsewhere"), - '{"displayname":"bob"}' + b'{"displayname":"bob"}' ) self.assertTrue( @@ -151,7 +151,7 @@ class ProfileTestCase(unittest.TestCase): (code, response) = yield self.mock_resource.trigger( "PUT", "/profile/%s/avatar_url" % (myid), - '{"avatar_url": "http://my.server/pic.gif"}' + b'{"avatar_url": "http://my.server/pic.gif"}' ) self.assertEquals(200, code) diff --git a/tests/rest/client/v1/utils.py b/tests/rest/client/v1/utils.py index 41de8e0762..e3bc5f378d 100644 --- a/tests/rest/client/v1/utils.py +++ b/tests/rest/client/v1/utils.py @@ -105,7 +105,7 @@ class RestTestCase(unittest.TestCase): "password": "test", "type": "m.login.password" })) - self.assertEquals(200, code) + self.assertEquals(200, code, msg=response) defer.returnValue(response) @defer.inlineCallbacks @@ -149,14 +149,14 @@ class RestHelper(object): def create_room_as(self, room_creator, is_public=True, tok=None): temp_id = self.auth_user_id self.auth_user_id = room_creator - path = b"/_matrix/client/r0/createRoom" + path = "/_matrix/client/r0/createRoom" content = {} if not is_public: content["visibility"] = "private" if tok: - path = path + b"?access_token=%s" % tok.encode('ascii') + path = path + "?access_token=%s" % tok - request, channel = make_request(b"POST", path, json.dumps(content).encode('utf8')) + request, channel = make_request("POST", path, json.dumps(content).encode('utf8')) request.render(self.resource) wait_until_result(self.hs.get_reactor(), channel) @@ -205,7 +205,7 @@ class RestHelper(object): data = {"membership": membership} request, channel = make_request( - b"PUT", path.encode('ascii'), json.dumps(data).encode('utf8') + "PUT", path, json.dumps(data).encode('utf8') ) request.render(self.resource) diff --git a/tests/rest/client/v2_alpha/test_filter.py b/tests/rest/client/v2_alpha/test_filter.py index e890f0feac..de33b10a5f 100644 --- a/tests/rest/client/v2_alpha/test_filter.py +++ b/tests/rest/client/v2_alpha/test_filter.py @@ -33,7 +33,7 @@ PATH_PREFIX = "/_matrix/client/v2_alpha" class FilterTestCase(unittest.TestCase): - USER_ID = b"@apple:test" + USER_ID = "@apple:test" EXAMPLE_FILTER = {"room": {"timeline": {"types": ["m.room.message"]}}} EXAMPLE_FILTER_JSON = b'{"room": {"timeline": {"types": ["m.room.message"]}}}' TO_REGISTER = [filter] @@ -72,8 +72,8 @@ class FilterTestCase(unittest.TestCase): def test_add_filter(self): request, channel = make_request( - b"POST", - b"/_matrix/client/r0/user/%s/filter" % (self.USER_ID), + "POST", + "/_matrix/client/r0/user/%s/filter" % (self.USER_ID), self.EXAMPLE_FILTER_JSON, ) request.render(self.resource) @@ -87,8 +87,8 @@ class FilterTestCase(unittest.TestCase): def test_add_filter_for_other_user(self): request, channel = make_request( - b"POST", - b"/_matrix/client/r0/user/%s/filter" % (b"@watermelon:test"), + "POST", + "/_matrix/client/r0/user/%s/filter" % ("@watermelon:test"), self.EXAMPLE_FILTER_JSON, ) request.render(self.resource) @@ -101,8 +101,8 @@ class FilterTestCase(unittest.TestCase): _is_mine = self.hs.is_mine self.hs.is_mine = lambda target_user: False request, channel = make_request( - b"POST", - b"/_matrix/client/r0/user/%s/filter" % (self.USER_ID), + "POST", + "/_matrix/client/r0/user/%s/filter" % (self.USER_ID), self.EXAMPLE_FILTER_JSON, ) request.render(self.resource) @@ -119,7 +119,7 @@ class FilterTestCase(unittest.TestCase): self.clock.advance(1) filter_id = filter_id.result request, channel = make_request( - b"GET", b"/_matrix/client/r0/user/%s/filter/%s" % (self.USER_ID, filter_id) + "GET", "/_matrix/client/r0/user/%s/filter/%s" % (self.USER_ID, filter_id) ) request.render(self.resource) wait_until_result(self.clock, channel) @@ -129,7 +129,7 @@ class FilterTestCase(unittest.TestCase): def test_get_filter_non_existant(self): request, channel = make_request( - b"GET", "/_matrix/client/r0/user/%s/filter/12382148321" % (self.USER_ID) + "GET", "/_matrix/client/r0/user/%s/filter/12382148321" % (self.USER_ID) ) request.render(self.resource) wait_until_result(self.clock, channel) @@ -141,7 +141,7 @@ class FilterTestCase(unittest.TestCase): # in errors.py def test_get_filter_invalid_id(self): request, channel = make_request( - b"GET", "/_matrix/client/r0/user/%s/filter/foobar" % (self.USER_ID) + "GET", "/_matrix/client/r0/user/%s/filter/foobar" % (self.USER_ID) ) request.render(self.resource) wait_until_result(self.clock, channel) @@ -151,7 +151,7 @@ class FilterTestCase(unittest.TestCase): # No ID also returns an invalid_id error def test_get_filter_no_id(self): request, channel = make_request( - b"GET", "/_matrix/client/r0/user/%s/filter/" % (self.USER_ID) + "GET", "/_matrix/client/r0/user/%s/filter/" % (self.USER_ID) ) request.render(self.resource) wait_until_result(self.clock, channel) diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index e004d8fc73..f6293f11a8 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -81,7 +81,7 @@ class RegisterRestServletTestCase(unittest.TestCase): "access_token": token, "home_server": self.hs.hostname, } - self.assertDictContainsSubset(det_data, json.loads(channel.result["body"])) + self.assertDictContainsSubset(det_data, channel.json_body) def test_POST_appservice_registration_invalid(self): self.appservice = None # no application service exists @@ -102,7 +102,7 @@ class RegisterRestServletTestCase(unittest.TestCase): self.assertEquals(channel.result["code"], b"400", channel.result) self.assertEquals( - json.loads(channel.result["body"])["error"], "Invalid password" + channel.json_body["error"], "Invalid password" ) def test_POST_bad_username(self): @@ -113,7 +113,7 @@ class RegisterRestServletTestCase(unittest.TestCase): self.assertEquals(channel.result["code"], b"400", channel.result) self.assertEquals( - json.loads(channel.result["body"])["error"], "Invalid username" + channel.json_body["error"], "Invalid username" ) def test_POST_user_valid(self): @@ -140,7 +140,7 @@ class RegisterRestServletTestCase(unittest.TestCase): "device_id": device_id, } self.assertEquals(channel.result["code"], b"200", channel.result) - self.assertDictContainsSubset(det_data, json.loads(channel.result["body"])) + self.assertDictContainsSubset(det_data, channel.json_body) self.auth_handler.get_login_tuple_for_user_id( user_id, device_id=device_id, initial_device_display_name=None ) @@ -158,7 +158,7 @@ class RegisterRestServletTestCase(unittest.TestCase): self.assertEquals(channel.result["code"], b"403", channel.result) self.assertEquals( - json.loads(channel.result["body"])["error"], + channel.json_body["error"], "Registration has been disabled", ) @@ -178,7 +178,7 @@ class RegisterRestServletTestCase(unittest.TestCase): "device_id": "guest_device", } self.assertEquals(channel.result["code"], b"200", channel.result) - self.assertDictContainsSubset(det_data, json.loads(channel.result["body"])) + self.assertDictContainsSubset(det_data, channel.json_body) def test_POST_disabled_guest_registration(self): self.hs.config.allow_guest_access = False @@ -189,5 +189,5 @@ class RegisterRestServletTestCase(unittest.TestCase): self.assertEquals(channel.result["code"], b"403", channel.result) self.assertEquals( - json.loads(channel.result["body"])["error"], "Guest access is disabled" + channel.json_body["error"], "Guest access is disabled" ) diff --git a/tests/rest/client/v2_alpha/test_sync.py b/tests/rest/client/v2_alpha/test_sync.py index 03ec3993b2..bafc0d1df0 100644 --- a/tests/rest/client/v2_alpha/test_sync.py +++ b/tests/rest/client/v2_alpha/test_sync.py @@ -32,7 +32,7 @@ PATH_PREFIX = "/_matrix/client/v2_alpha" class FilterTestCase(unittest.TestCase): - USER_ID = b"@apple:test" + USER_ID = "@apple:test" TO_REGISTER = [sync] def setUp(self): @@ -68,7 +68,7 @@ class FilterTestCase(unittest.TestCase): r.register_servlets(self.hs, self.resource) def test_sync_argless(self): - request, channel = make_request(b"GET", b"/_matrix/client/r0/sync") + request, channel = make_request("GET", "/_matrix/client/r0/sync") request.render(self.resource) wait_until_result(self.clock, channel) diff --git a/tests/server.py b/tests/server.py index c611dd6059..e249668d21 100644 --- a/tests/server.py +++ b/tests/server.py @@ -11,6 +11,7 @@ from twisted.python.failure import Failure from twisted.test.proto_helpers import MemoryReactorClock from synapse.http.site import SynapseRequest +from synapse.util import Clock from tests.utils import setup_test_homeserver as _sth @@ -28,7 +29,13 @@ class FakeChannel(object): def json_body(self): if not self.result: raise Exception("No result yet.") - return json.loads(self.result["body"]) + return json.loads(self.result["body"].decode('utf8')) + + @property + def code(self): + if not self.result: + raise Exception("No result yet.") + return int(self.result["code"]) def writeHeaders(self, version, code, reason, headers): self.result["version"] = version @@ -79,11 +86,16 @@ def make_request(method, path, content=b""): Make a web request using the given method and path, feed it the content, and return the Request and the Channel underneath. """ + if not isinstance(method, bytes): + method = method.encode('ascii') + + if not isinstance(path, bytes): + path = path.encode('ascii') # Decorate it to be the full path if not path.startswith(b"/_matrix"): path = b"/_matrix/client/r0/" + path - path = path.replace("//", "/") + path = path.replace(b"//", b"/") if isinstance(content, text_type): content = content.encode('utf8') @@ -191,3 +203,9 @@ def setup_test_homeserver(*args, **kwargs): clock.threadpool = ThreadPool() pool.threadpool = ThreadPool() return d + + +def get_clock(): + clock = ThreadedMemoryReactorClock() + hs_clock = Clock(clock) + return (clock, hs_clock) diff --git a/tests/storage/test_event_federation.py b/tests/storage/test_event_federation.py index 30683e7888..69412c5aad 100644 --- a/tests/storage/test_event_federation.py +++ b/tests/storage/test_event_federation.py @@ -49,7 +49,7 @@ class EventFederationWorkerStoreTestCase(tests.unittest.TestCase): 'INSERT INTO event_reference_hashes ' '(event_id, algorithm, hash) ' "VALUES (?, 'sha256', ?)" - ), (event_id, 'ffff')) + ), (event_id, b'ffff')) for i in range(0, 11): yield self.store.runInteraction("insert", insert_event, i) diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py index 7a76d67b8c..f7871cd426 100644 --- a/tests/storage/test_state.py +++ b/tests/storage/test_state.py @@ -176,7 +176,7 @@ class StateStoreTestCase(tests.unittest.TestCase): room_id = self.room.to_string() group_ids = yield self.store.get_state_groups_ids(room_id, [e5.event_id]) - group = group_ids.keys()[0] + group = list(group_ids.keys())[0] # test _get_some_state_from_cache correctly filters out members with types=[] (state_dict, is_all) = yield self.store._get_some_state_from_cache( diff --git a/tests/test_server.py b/tests/test_server.py index 7e063c0290..fc396226ea 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -1,4 +1,3 @@ -import json import re from twisted.internet.defer import Deferred @@ -104,9 +103,8 @@ class JsonResourceTests(unittest.TestCase): request.render(res) self.assertEqual(channel.result["code"], b'403') - reply_body = json.loads(channel.result["body"]) - self.assertEqual(reply_body["error"], "Forbidden!!one!") - self.assertEqual(reply_body["errcode"], "M_FORBIDDEN") + self.assertEqual(channel.json_body["error"], "Forbidden!!one!") + self.assertEqual(channel.json_body["errcode"], "M_FORBIDDEN") def test_no_handler(self): """ @@ -126,6 +124,5 @@ class JsonResourceTests(unittest.TestCase): request.render(res) self.assertEqual(channel.result["code"], b'400') - reply_body = json.loads(channel.result["body"]) - self.assertEqual(reply_body["error"], "Unrecognized request") - self.assertEqual(reply_body["errcode"], "M_UNRECOGNIZED") + self.assertEqual(channel.json_body["error"], "Unrecognized request") + self.assertEqual(channel.json_body["errcode"], "M_UNRECOGNIZED") diff --git a/tests/utils.py b/tests/utils.py index 151db4b890..5d49692c58 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -153,8 +153,9 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None # Need to let the HS build an auth handler and then mess with it # because AuthHandler's constructor requires the HS, so we can't make one # beforehand and pass it in to the HS's constructor (chicken / egg) - hs.get_auth_handler().hash = lambda p: hashlib.md5(p).hexdigest() - hs.get_auth_handler().validate_hash = lambda p, h: hashlib.md5(p).hexdigest() == h + hs.get_auth_handler().hash = lambda p: hashlib.md5(p.encode('utf8')).hexdigest() + hs.get_auth_handler().validate_hash = lambda p, h: hashlib.md5( + p.encode('utf8')).hexdigest() == h fed = kargs.get("resource_for_federation", None) if fed: @@ -227,8 +228,8 @@ class MockHttpResource(HttpServer): mock_content.configure_mock(**config) mock_request.content = mock_content - mock_request.method = http_method - mock_request.uri = path + mock_request.method = http_method.encode('ascii') + mock_request.uri = path.encode('ascii') mock_request.getClientIP.return_value = "-" From 62564797f5f9c6b1295a98a9742ae226b87a135e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 9 Aug 2018 09:56:10 +0100 Subject: [PATCH 144/205] Fixup wording and remove dead code --- changelog.d/3632.misc | 1 + synapse/replication/http/_base.py | 3 +-- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/changelog.d/3632.misc b/changelog.d/3632.misc index e69de29bb2..9d64bbe83b 100644 --- a/changelog.d/3632.misc +++ b/changelog.d/3632.misc @@ -0,0 +1 @@ +Refactor HTTP replication endpoints to reduce code duplication diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index 53a0fd459a..5e5376cf58 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -183,7 +183,6 @@ class ReplicationEndpoint(object): """ url_args = list(self.PATH_ARGS) - method = "GET" handler = self._handle_request method = self.METHOD @@ -201,7 +200,7 @@ class ReplicationEndpoint(object): def _cached_handler(self, request, txn_id, **kwargs): """Called on new incoming requests when caching is enabled. Checks - if their is a cached response for the request and returns that, + if there is a cached response for the request and returns that, otherwise calls `_handle_request` and caches its response. """ # We just use the txn_id here, but we probably also want to use the From bf7598f582cdfbd7db0fed55afce28bcc51a4801 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 9 Aug 2018 10:09:56 +0100 Subject: [PATCH 145/205] Log when we 3pid/unbind request fails --- synapse/handlers/identity.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index a0f5fecc96..5feb3f22a6 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -188,8 +188,10 @@ class IdentityHandler(BaseHandler): except HttpResponseException as e: if e.code in (400, 404, 501,): # The remote server probably doesn't support unbinding (yet) + logger.warn("Received %d response while unbinding threepid", e.code) defer.returnValue(False) else: + logger.error("Failed to unbind threepid on identity server: %s", e) raise SynapseError(502, "Failed to contact identity server") defer.returnValue(True) From 3d6f9fba19f0aa32a700a4ccc949ad329135eb71 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 9 Aug 2018 19:19:30 +1000 Subject: [PATCH 146/205] fix the changelog & template --- CHANGES.md | 98 +++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 50 insertions(+), 50 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 299780e9ec..8317e44f37 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -4,65 +4,65 @@ Synapse 0.33.2rc1 (2018-08-07) Features -------- -- add support for the lazy_loaded_members filter as per MSC1227 ([\#2970](https://github.com/matrix-org/synapse/issues/2970>)) -- add support for the include_redundant_members filter param as per MSC1227 ([\#3331](https://github.com/matrix-org/synapse/issues/3331>)) -- Add metrics to track resource usage by background processes ([\#3553](https://github.com/matrix-org/synapse/issues/3553>), [\#3556](https://github.com/matrix-org/synapse/issues/3556>), [\#3604](https://github.com/matrix-org/synapse/issues/3604>), [\#3610](https://github.com/matrix-org/synapse/issues/3610>)) -- Add `code` label to `synapse_http_server_response_time_seconds` prometheus metric ([\#3554](https://github.com/matrix-org/synapse/issues/3554>)) -- Add support for client_reader to handle more APIs ([\#3555](https://github.com/matrix-org/synapse/issues/3555>), [\#3597](https://github.com/matrix-org/synapse/issues/3597>)) -- make the /context API filter & lazy-load aware as per MSC1227 ([\#3567](https://github.com/matrix-org/synapse/issues/3567>)) -- Add ability to limit number of monthly active users on the server ([\#3630](https://github.com/matrix-org/synapse/issues/3630>)) -- When we fail to join a room over federation, pass the error code back to the client. ([\#3639](https://github.com/matrix-org/synapse/issues/3639>)) -- Add a new /admin/register API for non-interactively creating users. ([\#3415](https://github.com/matrix-org/synapse/issues/3415>)) +- add support for the lazy_loaded_members filter as per MSC1227 ([\#2970](https://github.com/matrix-org/synapse/issues/2970)) +- add support for the include_redundant_members filter param as per MSC1227 ([\#3331](https://github.com/matrix-org/synapse/issues/3331)) +- Add metrics to track resource usage by background processes ([\#3553](https://github.com/matrix-org/synapse/issues/3553), [\#3556](https://github.com/matrix-org/synapse/issues/3556), [\#3604](https://github.com/matrix-org/synapse/issues/3604), [\#3610](https://github.com/matrix-org/synapse/issues/3610)) +- Add `code` label to `synapse_http_server_response_time_seconds` prometheus metric ([\#3554](https://github.com/matrix-org/synapse/issues/3554)) +- Add support for client_reader to handle more APIs ([\#3555](https://github.com/matrix-org/synapse/issues/3555), [\#3597](https://github.com/matrix-org/synapse/issues/3597)) +- make the /context API filter & lazy-load aware as per MSC1227 ([\#3567](https://github.com/matrix-org/synapse/issues/3567)) +- Add ability to limit number of monthly active users on the server ([\#3630](https://github.com/matrix-org/synapse/issues/3630)) +- When we fail to join a room over federation, pass the error code back to the client. ([\#3639](https://github.com/matrix-org/synapse/issues/3639)) +- Add a new /admin/register API for non-interactively creating users. ([\#3415](https://github.com/matrix-org/synapse/issues/3415)) Bugfixes -------- -- Make /directory/list API return 404 for room not found instead of 400 ([\#2952](https://github.com/matrix-org/synapse/issues/2952>)) -- Default inviter_display_name to mxid for email invites ([\#3391](https://github.com/matrix-org/synapse/issues/3391>)) -- Don't generate TURN credentials if no TURN config options are set ([\#3514](https://github.com/matrix-org/synapse/issues/3514>)) -- Correctly announce deleted devices over federation ([\#3520](https://github.com/matrix-org/synapse/issues/3520>)) -- Catch failures saving metrics captured by Measure, and instead log the faulty metrics information for further analysis. ([\#3548](https://github.com/matrix-org/synapse/issues/3548>)) -- Unicode passwords are now normalised before hashing, preventing the instance where two different devices or browsers might send a different UTF-8 sequence for the password. ([\#3569](https://github.com/matrix-org/synapse/issues/3569>)) -- Fix potential stack overflow and deadlock under heavy load ([\#3570](https://github.com/matrix-org/synapse/issues/3570>)) -- Respond with M_NOT_FOUND when profiles are not found locally or over federation. Fixes #3585 ([\#3585](https://github.com/matrix-org/synapse/issues/3585>)) -- Fix failure to persist events over federation under load ([\#3601](https://github.com/matrix-org/synapse/issues/3601>)) -- Fix updating of cached remote profiles ([\#3605](https://github.com/matrix-org/synapse/issues/3605>)) -- Fix 'tuple index out of range' error ([\#3607](https://github.com/matrix-org/synapse/issues/3607>)) -- Only import secrets when available (fix for py < 3.6) ([\#3626](https://github.com/matrix-org/synapse/issues/3626>)) +- Make /directory/list API return 404 for room not found instead of 400 ([\#2952](https://github.com/matrix-org/synapse/issues/2952)) +- Default inviter_display_name to mxid for email invites ([\#3391](https://github.com/matrix-org/synapse/issues/3391)) +- Don't generate TURN credentials if no TURN config options are set ([\#3514](https://github.com/matrix-org/synapse/issues/3514)) +- Correctly announce deleted devices over federation ([\#3520](https://github.com/matrix-org/synapse/issues/3520)) +- Catch failures saving metrics captured by Measure, and instead log the faulty metrics information for further analysis. ([\#3548](https://github.com/matrix-org/synapse/issues/3548)) +- Unicode passwords are now normalised before hashing, preventing the instance where two different devices or browsers might send a different UTF-8 sequence for the password. ([\#3569](https://github.com/matrix-org/synapse/issues/3569)) +- Fix potential stack overflow and deadlock under heavy load ([\#3570](https://github.com/matrix-org/synapse/issues/3570)) +- Respond with M_NOT_FOUND when profiles are not found locally or over federation. Fixes #3585 ([\#3585](https://github.com/matrix-org/synapse/issues/3585)) +- Fix failure to persist events over federation under load ([\#3601](https://github.com/matrix-org/synapse/issues/3601)) +- Fix updating of cached remote profiles ([\#3605](https://github.com/matrix-org/synapse/issues/3605)) +- Fix 'tuple index out of range' error ([\#3607](https://github.com/matrix-org/synapse/issues/3607)) +- Only import secrets when available (fix for py < 3.6) ([\#3626](https://github.com/matrix-org/synapse/issues/3626)) Internal Changes ---------------- -- Remove redundant checks on who_forgot_in_room ([\#3350](https://github.com/matrix-org/synapse/issues/3350>)) -- Remove unnecessary event re-signing hacks ([\#3367](https://github.com/matrix-org/synapse/issues/3367>)) -- Rewrite cache list decorator ([\#3384](https://github.com/matrix-org/synapse/issues/3384>)) -- Move v1-only REST APIs into their own module. ([\#3460](https://github.com/matrix-org/synapse/issues/3460>)) -- Replace more instances of Python 2-only iteritems and itervalues uses. ([\#3562](https://github.com/matrix-org/synapse/issues/3562>)) -- Refactor EventContext to accept state during init ([\#3577](https://github.com/matrix-org/synapse/issues/3577>)) -- Improve Dockerfile and docker-compose instructions ([\#3543](https://github.com/matrix-org/synapse/issues/3543>)) -- Release notes are now in the Markdown format. ([\#3552](https://github.com/matrix-org/synapse/issues/3552>)) -- add config for pep8 ([\#3559](https://github.com/matrix-org/synapse/issues/3559>)) -- Merge Linearizer and Limiter ([\#3571](https://github.com/matrix-org/synapse/issues/3571>), [\#3572](https://github.com/matrix-org/synapse/issues/3572>)) -- Lazily load state on master process when using workers to reduce DB consumption ([\#3579](https://github.com/matrix-org/synapse/issues/3579>), [\#3581](https://github.com/matrix-org/synapse/issues/3581>), [\#3582](https://github.com/matrix-org/synapse/issues/3582>), [\#3584](https://github.com/matrix-org/synapse/issues/3584>)) -- Fixes and optimisations for resolve_state_groups ([\#3586](https://github.com/matrix-org/synapse/issues/3586>)) -- Improve logging for exceptions when handling PDUs ([\#3587](https://github.com/matrix-org/synapse/issues/3587>)) -- Add some measure blocks to persist_events ([\#3590](https://github.com/matrix-org/synapse/issues/3590>)) -- Fix some random logcontext leaks. ([\#3591](https://github.com/matrix-org/synapse/issues/3591>), [\#3606](https://github.com/matrix-org/synapse/issues/3606>)) -- Speed up calculating state deltas in persist_event loop ([\#3592](https://github.com/matrix-org/synapse/issues/3592>)) -- Attempt to reduce amount of state pulled out of DB during persist_events ([\#3595](https://github.com/matrix-org/synapse/issues/3595>)) -- Fix a documentation typo in on_make_leave_request ([\#3609](https://github.com/matrix-org/synapse/issues/3609>)) -- Make EventStore inherit from EventFederationStore ([\#3612](https://github.com/matrix-org/synapse/issues/3612>)) -- Remove some redundant joins on event_edges.room_id ([\#3613](https://github.com/matrix-org/synapse/issues/3613>)) -- Stop populating events.content ([\#3614](https://github.com/matrix-org/synapse/issues/3614>)) -- Update the /send_leave path registration to use event_id rather than a transaction ID. ([\#3616](https://github.com/matrix-org/synapse/issues/3616>)) -- Refactor FederationHandler to move DB writes into separate functions ([\#3621](https://github.com/matrix-org/synapse/issues/3621>)) -- Remove unused field "pdu_failures" from transactions. ([\#3628](https://github.com/matrix-org/synapse/issues/3628>)) -- rename replication_layer to federation_client ([\#3634](https://github.com/matrix-org/synapse/issues/3634>)) -- Factor out exception handling in federation_client ([\#3638](https://github.com/matrix-org/synapse/issues/3638>)) -- Refactor location of docker build script. ([\#3644](https://github.com/matrix-org/synapse/issues/3644>)) -- Update CONTRIBUTING to mention newsfragments. ([\#3645](https://github.com/matrix-org/synapse/issues/3645>)) +- Remove redundant checks on who_forgot_in_room ([\#3350](https://github.com/matrix-org/synapse/issues/3350)) +- Remove unnecessary event re-signing hacks ([\#3367](https://github.com/matrix-org/synapse/issues/3367)) +- Rewrite cache list decorator ([\#3384](https://github.com/matrix-org/synapse/issues/3384)) +- Move v1-only REST APIs into their own module. ([\#3460](https://github.com/matrix-org/synapse/issues/3460)) +- Replace more instances of Python 2-only iteritems and itervalues uses. ([\#3562](https://github.com/matrix-org/synapse/issues/3562)) +- Refactor EventContext to accept state during init ([\#3577](https://github.com/matrix-org/synapse/issues/3577)) +- Improve Dockerfile and docker-compose instructions ([\#3543](https://github.com/matrix-org/synapse/issues/3543)) +- Release notes are now in the Markdown format. ([\#3552](https://github.com/matrix-org/synapse/issues/3552)) +- add config for pep8 ([\#3559](https://github.com/matrix-org/synapse/issues/3559)) +- Merge Linearizer and Limiter ([\#3571](https://github.com/matrix-org/synapse/issues/3571), [\#3572](https://github.com/matrix-org/synapse/issues/3572)) +- Lazily load state on master process when using workers to reduce DB consumption ([\#3579](https://github.com/matrix-org/synapse/issues/3579), [\#3581](https://github.com/matrix-org/synapse/issues/3581), [\#3582](https://github.com/matrix-org/synapse/issues/3582), [\#3584](https://github.com/matrix-org/synapse/issues/3584)) +- Fixes and optimisations for resolve_state_groups ([\#3586](https://github.com/matrix-org/synapse/issues/3586)) +- Improve logging for exceptions when handling PDUs ([\#3587](https://github.com/matrix-org/synapse/issues/3587)) +- Add some measure blocks to persist_events ([\#3590](https://github.com/matrix-org/synapse/issues/3590)) +- Fix some random logcontext leaks. ([\#3591](https://github.com/matrix-org/synapse/issues/3591), [\#3606](https://github.com/matrix-org/synapse/issues/3606)) +- Speed up calculating state deltas in persist_event loop ([\#3592](https://github.com/matrix-org/synapse/issues/3592)) +- Attempt to reduce amount of state pulled out of DB during persist_events ([\#3595](https://github.com/matrix-org/synapse/issues/3595)) +- Fix a documentation typo in on_make_leave_request ([\#3609](https://github.com/matrix-org/synapse/issues/3609)) +- Make EventStore inherit from EventFederationStore ([\#3612](https://github.com/matrix-org/synapse/issues/3612)) +- Remove some redundant joins on event_edges.room_id ([\#3613](https://github.com/matrix-org/synapse/issues/3613)) +- Stop populating events.content ([\#3614](https://github.com/matrix-org/synapse/issues/3614)) +- Update the /send_leave path registration to use event_id rather than a transaction ID. ([\#3616](https://github.com/matrix-org/synapse/issues/3616)) +- Refactor FederationHandler to move DB writes into separate functions ([\#3621](https://github.com/matrix-org/synapse/issues/3621)) +- Remove unused field "pdu_failures" from transactions. ([\#3628](https://github.com/matrix-org/synapse/issues/3628)) +- rename replication_layer to federation_client ([\#3634](https://github.com/matrix-org/synapse/issues/3634)) +- Factor out exception handling in federation_client ([\#3638](https://github.com/matrix-org/synapse/issues/3638)) +- Refactor location of docker build script. ([\#3644](https://github.com/matrix-org/synapse/issues/3644)) +- Update CONTRIBUTING to mention newsfragments. ([\#3645](https://github.com/matrix-org/synapse/issues/3645)) Synapse 0.33.1 (2018-08-02) diff --git a/pyproject.toml b/pyproject.toml index f4d6f0c6bb..dd099dc9c8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,7 +2,7 @@ package = "synapse" filename = "CHANGES.md" directory = "changelog.d" - issue_format = "[\\#{issue}](https://github.com/matrix-org/synapse/issues/{issue}>)" + issue_format = "[\\#{issue}](https://github.com/matrix-org/synapse/issues/{issue})" [[tool.towncrier.type]] directory = "feature" From dafe90a6be17f045f1e27ed8703320e6b0f1dbe8 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 9 Aug 2018 19:20:41 +1000 Subject: [PATCH 147/205] 0.33.2 --- synapse/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/__init__.py b/synapse/__init__.py index 807d78d79f..a14d578e36 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -17,4 +17,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.33.2rc1" +__version__ = "0.33.2" From 67dbe4c89944cf5957f806ad63f60880e2242b23 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 9 Aug 2018 19:21:07 +1000 Subject: [PATCH 148/205] 0.33.2 changelog --- CHANGES.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 8317e44f37..a299110a6b 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,9 @@ +Synapse 0.33.2 (2018-08-09) +=========================== + +No significant changes. + + Synapse 0.33.2rc1 (2018-08-07) ============================== From 72d1902bbeaa029e7003aecaeaf9bc5a41d17646 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 9 Aug 2018 10:23:49 +0100 Subject: [PATCH 149/205] Fixup doc comments --- synapse/federation/federation_server.py | 11 +++++++++++ synapse/replication/http/federation.py | 17 +++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index d23c1cf13b..3bc26c4ab4 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -811,6 +811,13 @@ class FederationHandlerRegistry(object): class ReplicationFederationHandlerRegistry(FederationHandlerRegistry): + """A FederationHandlerRegistry for worker processes. + + When receiving EDU or queries it will check if an appropriate handler has + been registered on the worker, if there isn't one then it calls off to the + master process. + """ + def __init__(self, hs): self.config = hs.config self.http_client = hs.get_simple_http_client() @@ -822,6 +829,8 @@ class ReplicationFederationHandlerRegistry(FederationHandlerRegistry): super(ReplicationFederationHandlerRegistry, self).__init__() def on_edu(self, edu_type, origin, content): + """Overrides FederationHandlerRegistry + """ handler = self.edu_handlers.get(edu_type) if handler: return super(ReplicationFederationHandlerRegistry, self).on_edu( @@ -835,6 +844,8 @@ class ReplicationFederationHandlerRegistry(FederationHandlerRegistry): ) def on_query(self, query_type, args): + """Overrides FederationHandlerRegistry + """ handler = self.query_handlers.get(query_type) if handler: return handler(args) diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py index 3fa7bd64c7..3e6cbbf5a1 100644 --- a/synapse/replication/http/federation.py +++ b/synapse/replication/http/federation.py @@ -157,6 +157,15 @@ class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint): class ReplicationFederationSendEduRestServlet(ReplicationEndpoint): """Handles EDUs newly received from federation, including persisting and notifying. + + Request format: + + POST /_synapse/replication/fed_send_edu/:edu_type/:txn_id + + { + "origin": ..., + "content: { ... } + } """ NAME = "fed_send_edu" @@ -196,6 +205,14 @@ class ReplicationFederationSendEduRestServlet(ReplicationEndpoint): class ReplicationGetQueryRestServlet(ReplicationEndpoint): """Handle responding to queries from federation. + + Request format: + + POST /_synapse/replication/fed_query/:query_type + + { + "args": { ... } + } """ NAME = "fed_query" From b179537f2a51f4de52e2625939cc32eeba75cd6b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 9 Aug 2018 10:29:48 +0100 Subject: [PATCH 150/205] Move clean_room_for_join to master --- synapse/handlers/federation.py | 16 ++++++++++-- synapse/replication/http/federation.py | 35 ++++++++++++++++++++++++++ 2 files changed, 49 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 37d2307d0a..acabca1d25 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -50,6 +50,7 @@ from synapse.crypto.event_signing import ( ) from synapse.events.validator import EventValidator from synapse.replication.http.federation import ( + ReplicationCleanRoomRestServlet, ReplicationFederationSendEventsRestServlet, ) from synapse.replication.http.membership import ReplicationUserJoinedLeftRoomRestServlet @@ -104,6 +105,9 @@ class FederationHandler(BaseHandler): self._notify_user_membership_change = ( ReplicationUserJoinedLeftRoomRestServlet.make_client(hs) ) + self._clean_room_for_join_client = ( + ReplicationCleanRoomRestServlet.make_client(hs) + ) # When joining a room we need to queue any events for that room up self.room_queues = {} @@ -2388,8 +2392,16 @@ class FederationHandler(BaseHandler): ) def _clean_room_for_join(self, room_id): - # TODO move this out to master - return self.store.clean_room_for_join(room_id) + """Called to clean up any data in DB for a given room, ready for the + server to join the room. + + Args: + room_id (str) + """ + if self.config.worker_app: + return self._clean_room_for_join_client(room_id) + else: + return self.store.clean_room_for_join(room_id) def user_joined_room(self, user, room_id): """Called when a new user has joined the room diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py index 3e6cbbf5a1..7b0b1cd32e 100644 --- a/synapse/replication/http/federation.py +++ b/synapse/replication/http/federation.py @@ -256,7 +256,42 @@ class ReplicationGetQueryRestServlet(ReplicationEndpoint): defer.returnValue((200, result)) +class ReplicationCleanRoomRestServlet(ReplicationEndpoint): + """Called to clean up any data in DB for a given room, ready for the + server to join the room. + + Request format: + + POST /_synapse/replication/fed_query/:fed_cleanup_room/:txn_id + + {} + """ + + NAME = "fed_cleanup_room" + PATH_ARGS = ("room_id",) + + def __init__(self, hs): + super(ReplicationCleanRoomRestServlet, self).__init__(hs) + + self.store = hs.get_datastore() + + @staticmethod + def _serialize_payload(room_id, args): + """ + Args: + room_id (str) + """ + return {} + + @defer.inlineCallbacks + def _handle_request(self, request, room_id): + yield self.store.clean_room_for_join(room_id) + + defer.returnValue((200, {})) + + def register_servlets(hs, http_server): ReplicationFederationSendEventsRestServlet(hs).register(http_server) ReplicationFederationSendEduRestServlet(hs).register(http_server) ReplicationGetQueryRestServlet(hs).register(http_server) + ReplicationCleanRoomRestServlet(hs).register(http_server) From 8876ce7f77ecdf543cc23fb8333e9020791e9376 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 9 Aug 2018 10:30:50 +0100 Subject: [PATCH 151/205] Newsfile --- changelog.d/3653.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3653.feature diff --git a/changelog.d/3653.feature b/changelog.d/3653.feature new file mode 100644 index 0000000000..6c5422994f --- /dev/null +++ b/changelog.d/3653.feature @@ -0,0 +1 @@ +Support more federation endpoints on workers From 5c6226707d2588c9f2669512adabc00687295445 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 9 Aug 2018 10:33:55 +0100 Subject: [PATCH 152/205] Update docs/workers.rst --- docs/workers.rst | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/docs/workers.rst b/docs/workers.rst index c5b37c3ded..ac9efb621f 100644 --- a/docs/workers.rst +++ b/docs/workers.rst @@ -173,10 +173,23 @@ endpoints matching the following regular expressions:: ^/_matrix/federation/v1/backfill/ ^/_matrix/federation/v1/get_missing_events/ ^/_matrix/federation/v1/publicRooms + ^/_matrix/federation/v1/query/ + ^/_matrix/federation/v1/make_join/ + ^/_matrix/federation/v1/make_leave/ + ^/_matrix/federation/v1/send_join/ + ^/_matrix/federation/v1/send_leave/ + ^/_matrix/federation/v1/invite/ + ^/_matrix/federation/v1/query_auth/ + ^/_matrix/federation/v1/event_auth/ + ^/_matrix/federation/v1/exchange_third_party_invite/ + ^/_matrix/federation/v1/send/ The above endpoints should all be routed to the federation_reader worker by the reverse-proxy configuration. +The `^/_matrix/federation/v1/send/` endpoint must only be handled by a single +instance. + ``synapse.app.federation_sender`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 69ce057ea613f425d5ef6ace03d0019a8e4fdf49 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 9 Aug 2018 12:26:27 +0100 Subject: [PATCH 153/205] block sync if auth checks fail --- synapse/handlers/sync.py | 12 +++++------- tests/handlers/test_sync.py | 19 +++++++++++++------ 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index f748d9afb0..776ddca638 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -209,14 +209,12 @@ class SyncHandler(object): Deferred[SyncResult] """ yield self.auth.check_auth_blocking() - - defer.returnValue( - self.response_cache.wrap( - sync_config.request_key, - self._wait_for_sync_for_user, - sync_config, since_token, timeout, full_state, - ) + res = yield self.response_cache.wrap( + sync_config.request_key, + self._wait_for_sync_for_user, + sync_config, since_token, timeout, full_state, ) + defer.returnValue(res) @defer.inlineCallbacks def _wait_for_sync_for_user(self, sync_config, since_token, timeout, diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index 3b1b4d4923..497e4bd933 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -14,11 +14,14 @@ # limitations under the License. from twisted.internet import defer +from synapse.api.errors import AuthError +from synapse.api.filtering import DEFAULT_FILTER_COLLECTION +from synapse.handlers.sync import SyncConfig, SyncHandler +from synapse.types import UserID + import tests.unittest import tests.utils from tests.utils import setup_test_homeserver -from synapse.handlers.sync import SyncHandler, SyncConfig -from synapse.types import UserID class SyncTestCase(tests.unittest.TestCase): @@ -32,11 +35,15 @@ class SyncTestCase(tests.unittest.TestCase): @defer.inlineCallbacks def test_wait_for_sync_for_user_auth_blocking(self): sync_config = SyncConfig( - user=UserID("@user","server"), - filter_collection=None, + user=UserID("@user", "server"), + filter_collection=DEFAULT_FILTER_COLLECTION, is_guest=False, request_key="request_key", device_id="device_id", ) - res = yield self.sync_handler.wait_for_sync_for_user(sync_config) - print res + # Ensure that an exception is not thrown + yield self.sync_handler.wait_for_sync_for_user(sync_config) + self.hs.config.hs_disabled = True + + with self.assertRaises(AuthError): + yield self.sync_handler.wait_for_sync_for_user(sync_config) From d967653705854dac98cde06cb3de54113e704e60 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Christian=20Gr=C3=BCnhage?= Date: Thu, 9 Aug 2018 13:21:30 +0200 Subject: [PATCH 154/205] update docker base-image to alpine 3.8 --- changelog.d/3669.misc | 1 + docker/Dockerfile | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/3669.misc diff --git a/changelog.d/3669.misc b/changelog.d/3669.misc new file mode 100644 index 0000000000..fc579ddc60 --- /dev/null +++ b/changelog.d/3669.misc @@ -0,0 +1 @@ +Update docker base image from alpine 3.7 to 3.8. diff --git a/docker/Dockerfile b/docker/Dockerfile index 26fb3a6bff..777976217d 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/python:2-alpine3.7 +FROM docker.io/python:2-alpine3.8 RUN apk add --no-cache --virtual .nacl_deps \ build-base \ From c6b28fb4791d88e064cd88686dd32e7ed7834525 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 9 Aug 2018 12:45:58 +0100 Subject: [PATCH 155/205] Where server is disabled, block ability for locked out users to read new messages --- changelog.d/3670.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3670.feature diff --git a/changelog.d/3670.feature b/changelog.d/3670.feature new file mode 100644 index 0000000000..ba00f2d2ec --- /dev/null +++ b/changelog.d/3670.feature @@ -0,0 +1 @@ +Where server is disabled, block ability for locked out users to read new messages From 09cf13089858902f3cdcb49b9f9bc3d214ba6337 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 9 Aug 2018 17:39:12 +0100 Subject: [PATCH 156/205] only block on sync where user is not part of the mau cohort --- synapse/api/auth.py | 13 ++++++++++-- synapse/handlers/sync.py | 7 ++++++- tests/handlers/test_sync.py | 40 ++++++++++++++++++++++++++++--------- 3 files changed, 48 insertions(+), 12 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 9c62ec4374..170039fc82 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -775,17 +775,26 @@ class Auth(object): ) @defer.inlineCallbacks - def check_auth_blocking(self): + def check_auth_blocking(self, user_id=None): """Checks if the user should be rejected for some external reason, such as monthly active user limiting or global disable flag + + Args: + user_id(str): If present, checks for presence against existing MAU cohort """ if self.hs.config.hs_disabled: raise AuthError( 403, self.hs.config.hs_disabled_message, errcode=Codes.HS_DISABLED ) if self.hs.config.limit_usage_by_mau is True: + # If the user is already part of the MAU cohort + if user_id: + timestamp = yield self.store._user_last_seen_monthly_active(user_id) + if timestamp: + return + # Else if there is no room in the MAU bucket, bail current_mau = yield self.store.get_monthly_active_count() if current_mau >= self.hs.config.max_mau_value: raise AuthError( 403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED - ) + ) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 776ddca638..d3b26a4106 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -208,7 +208,12 @@ class SyncHandler(object): Returns: Deferred[SyncResult] """ - yield self.auth.check_auth_blocking() + # If the user is not part of the mau group, then check that limits have + # not been exceeded (if not part of the group by this point, almost certain + # auth_blocking will occur) + user_id = sync_config.user.to_string() + yield self.auth.check_auth_blocking(user_id) + res = yield self.response_cache.wrap( sync_config.request_key, self._wait_for_sync_for_user, diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index 497e4bd933..b95a8743a7 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. from twisted.internet import defer +from synapse.api.errors import AuthError, Codes -from synapse.api.errors import AuthError from synapse.api.filtering import DEFAULT_FILTER_COLLECTION from synapse.handlers.sync import SyncConfig, SyncHandler from synapse.types import UserID @@ -31,19 +31,41 @@ class SyncTestCase(tests.unittest.TestCase): def setUp(self): self.hs = yield setup_test_homeserver() self.sync_handler = SyncHandler(self.hs) + self.store = self.hs.get_datastore() @defer.inlineCallbacks def test_wait_for_sync_for_user_auth_blocking(self): - sync_config = SyncConfig( - user=UserID("@user", "server"), + + user_id1 = "@user1:server" + user_id2 = "@user2:server" + sync_config = self._generate_sync_config(user_id1) + + self.hs.config.limit_usage_by_mau = True + self.hs.config.max_mau_value = 1 + + # Check that the happy case does not throw errors + yield self.store.upsert_monthly_active_user(user_id1) + yield self.sync_handler.wait_for_sync_for_user(sync_config) + + # Test that global lock works + self.hs.config.hs_disabled = True + with self.assertRaises(AuthError) as e: + yield self.sync_handler.wait_for_sync_for_user(sync_config) + self.assertEquals(e.exception.errcode, Codes.HS_DISABLED) + + self.hs.config.hs_disabled = False + + sync_config = self._generate_sync_config(user_id2) + + with self.assertRaises(AuthError) as e: + yield self.sync_handler.wait_for_sync_for_user(sync_config) + self.assertEquals(e.exception.errcode, Codes.MAU_LIMIT_EXCEEDED) + + def _generate_sync_config(self, user_id): + return SyncConfig( + user=UserID(user_id.split(":")[0][1:], user_id.split(":")[1]), filter_collection=DEFAULT_FILTER_COLLECTION, is_guest=False, request_key="request_key", device_id="device_id", ) - # Ensure that an exception is not thrown - yield self.sync_handler.wait_for_sync_for_user(sync_config) - self.hs.config.hs_disabled = True - - with self.assertRaises(AuthError): - yield self.sync_handler.wait_for_sync_for_user(sync_config) From 04df7142598b531e8e400611e3f92b21afeabab6 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 9 Aug 2018 17:41:52 +0100 Subject: [PATCH 157/205] fix imports --- tests/handlers/test_sync.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index b95a8743a7..cfd37f3138 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. from twisted.internet import defer -from synapse.api.errors import AuthError, Codes +from synapse.api.errors import AuthError, Codes from synapse.api.filtering import DEFAULT_FILTER_COLLECTION from synapse.handlers.sync import SyncConfig, SyncHandler from synapse.types import UserID From c1f9dec92ac8abaa693cc591438087d8282c6844 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 9 Aug 2018 17:43:26 +0100 Subject: [PATCH 158/205] fix errant parenthesis --- synapse/api/auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 170039fc82..df6022ff69 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -797,4 +797,4 @@ class Auth(object): if current_mau >= self.hs.config.max_mau_value: raise AuthError( 403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED - ) + ) From 885ea9c602ca4002385a6d73c94c35e0958de809 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 9 Aug 2018 18:02:12 +0100 Subject: [PATCH 159/205] rename _user_last_seen_monthly_active --- synapse/api/auth.py | 2 +- synapse/storage/monthly_active_users.py | 10 +++++----- tests/storage/test_client_ips.py | 12 ++++++------ tests/storage/test_monthly_active_users.py | 13 +++++++------ 4 files changed, 19 insertions(+), 18 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index df6022ff69..c31c6a6a08 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -789,7 +789,7 @@ class Auth(object): if self.hs.config.limit_usage_by_mau is True: # If the user is already part of the MAU cohort if user_id: - timestamp = yield self.store._user_last_seen_monthly_active(user_id) + timestamp = yield self.store.user_last_seen_monthly_active(user_id) if timestamp: return # Else if there is no room in the MAU bucket, bail diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index d47dcef3a0..07211432af 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -113,7 +113,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): # is racy. # Have resolved to invalidate the whole cache for now and do # something about it if and when the perf becomes significant - self._user_last_seen_monthly_active.invalidate_all() + self.user_last_seen_monthly_active.invalidate_all() self.get_monthly_active_count.invalidate_all() @cached(num_args=0) @@ -152,11 +152,11 @@ class MonthlyActiveUsersStore(SQLBaseStore): lock=False, ) if is_insert: - self._user_last_seen_monthly_active.invalidate((user_id,)) + self.user_last_seen_monthly_active.invalidate((user_id,)) self.get_monthly_active_count.invalidate(()) @cached(num_args=1) - def _user_last_seen_monthly_active(self, user_id): + def user_last_seen_monthly_active(self, user_id): """ Checks if a given user is part of the monthly active user group Arguments: @@ -173,7 +173,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): }, retcol="timestamp", allow_none=True, - desc="_user_last_seen_monthly_active", + desc="user_last_seen_monthly_active", )) @defer.inlineCallbacks @@ -185,7 +185,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): user_id(str): the user_id to query """ if self.hs.config.limit_usage_by_mau: - last_seen_timestamp = yield self._user_last_seen_monthly_active(user_id) + last_seen_timestamp = yield self.user_last_seen_monthly_active(user_id) now = self.hs.get_clock().time_msec() # We want to reduce to the total number of db writes, and are happy diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index 7a58c6eb24..6d2bb3058e 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -64,7 +64,7 @@ class ClientIpStoreTestCase(tests.unittest.TestCase): yield self.store.insert_client_ip( user_id, "access_token", "ip", "user_agent", "device_id", ) - active = yield self.store._user_last_seen_monthly_active(user_id) + active = yield self.store.user_last_seen_monthly_active(user_id) self.assertFalse(active) @defer.inlineCallbacks @@ -80,7 +80,7 @@ class ClientIpStoreTestCase(tests.unittest.TestCase): yield self.store.insert_client_ip( user_id, "access_token", "ip", "user_agent", "device_id", ) - active = yield self.store._user_last_seen_monthly_active(user_id) + active = yield self.store.user_last_seen_monthly_active(user_id) self.assertFalse(active) @defer.inlineCallbacks @@ -88,13 +88,13 @@ class ClientIpStoreTestCase(tests.unittest.TestCase): self.hs.config.limit_usage_by_mau = True self.hs.config.max_mau_value = 50 user_id = "@user:server" - active = yield self.store._user_last_seen_monthly_active(user_id) + active = yield self.store.user_last_seen_monthly_active(user_id) self.assertFalse(active) yield self.store.insert_client_ip( user_id, "access_token", "ip", "user_agent", "device_id", ) - active = yield self.store._user_last_seen_monthly_active(user_id) + active = yield self.store.user_last_seen_monthly_active(user_id) self.assertTrue(active) @defer.inlineCallbacks @@ -103,7 +103,7 @@ class ClientIpStoreTestCase(tests.unittest.TestCase): self.hs.config.max_mau_value = 50 user_id = "@user:server" - active = yield self.store._user_last_seen_monthly_active(user_id) + active = yield self.store.user_last_seen_monthly_active(user_id) self.assertFalse(active) yield self.store.insert_client_ip( @@ -112,5 +112,5 @@ class ClientIpStoreTestCase(tests.unittest.TestCase): yield self.store.insert_client_ip( user_id, "access_token", "ip", "user_agent", "device_id", ) - active = yield self.store._user_last_seen_monthly_active(user_id) + active = yield self.store.user_last_seen_monthly_active(user_id) self.assertTrue(active) diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index cbd480cd42..be74c30218 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -66,9 +66,9 @@ class MonthlyActiveUsersTestCase(tests.unittest.TestCase): # Test user is marked as active - timestamp = yield self.store._user_last_seen_monthly_active(user1) + timestamp = yield self.store.user_last_seen_monthly_active(user1) self.assertTrue(timestamp) - timestamp = yield self.store._user_last_seen_monthly_active(user2) + timestamp = yield self.store.user_last_seen_monthly_active(user2) self.assertTrue(timestamp) # Test that users are never removed from the db. @@ -92,17 +92,18 @@ class MonthlyActiveUsersTestCase(tests.unittest.TestCase): self.assertEqual(1, count) @defer.inlineCallbacks - def test__user_last_seen_monthly_active(self): + def test_user_last_seen_monthly_active(self): user_id1 = "@user1:server" user_id2 = "@user2:server" user_id3 = "@user3:server" - result = yield self.store._user_last_seen_monthly_active(user_id1) + + result = yield self.store.user_last_seen_monthly_active(user_id1) self.assertFalse(result == 0) yield self.store.upsert_monthly_active_user(user_id1) yield self.store.upsert_monthly_active_user(user_id2) - result = yield self.store._user_last_seen_monthly_active(user_id1) + result = yield self.store.user_last_seen_monthly_active(user_id1) self.assertTrue(result > 0) - result = yield self.store._user_last_seen_monthly_active(user_id3) + result = yield self.store.user_last_seen_monthly_active(user_id3) self.assertFalse(result == 0) @defer.inlineCallbacks From 64899341dc8988b40b86dd49034aae27ddcb8c44 Mon Sep 17 00:00:00 2001 From: Jeroen Date: Thu, 9 Aug 2018 21:04:22 +0200 Subject: [PATCH 160/205] include private functions from twisted --- synapse/crypto/context_factory.py | 37 +++++++++++++++++++++++++++++-- 1 file changed, 35 insertions(+), 2 deletions(-) diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 29a75e1873..08c41a92b6 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -16,10 +16,10 @@ import logging from zope.interface import implementer from OpenSSL import SSL, crypto -from twisted.internet._idna import _idnaBytes -from twisted.internet._sslverify import _defaultCurveName, _tolerateErrors +from twisted.internet._sslverify import _defaultCurveName from twisted.internet.interfaces import IOpenSSLClientConnectionCreator from twisted.internet.ssl import CertificateOptions, ContextFactory +from twisted.python.failure import Failure logger = logging.getLogger(__name__) @@ -53,6 +53,39 @@ class ServerContextFactory(ContextFactory): return self._context +def _idnaBytes(text): + """ + Convert some text typed by a human into some ASCII bytes. This is a + copy of twisted.internet._idna._idnaBytes. For documentation, see the + twisted documentation. + """ + try: + import idna + except ImportError: + return text.encode("idna") + else: + return idna.encode(text) + + +def _tolerateErrors(wrapped): + """ + Wrap up an info_callback for pyOpenSSL so that if something goes wrong + the error is immediately logged and the connection is dropped if possible. + This is a copy of twisted.internet._sslverify._tolerateErrors. For + documentation, see the twisted documentation. + """ + + def infoCallback(connection, where, ret): + try: + return wrapped(connection, where, ret) + except: # noqa: E722, taken from the twisted implementation + f = Failure() + logger.exception("Error during info_callback") + connection.get_app_data().failVerification(f) + + return infoCallback + + @implementer(IOpenSSLClientConnectionCreator) class ClientTLSOptions(object): """ From 2e9c73e8cab5af901957f5809f3be415d6fc0908 Mon Sep 17 00:00:00 2001 From: Jeroen Date: Thu, 9 Aug 2018 21:31:26 +0200 Subject: [PATCH 161/205] more generic conversion of str/bytes to unicode --- synapse/crypto/context_factory.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 08c41a92b6..1a391adec1 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -123,6 +123,6 @@ class ClientTLSOptionsFactory(object): def get_options(self, host): return ClientTLSOptions( - unicode(host), + host.decode('utf-8'), CertificateOptions(verify=False).getContext() ) From 638d35ef082cb7f24d43fcb585c8bae52645bd6b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 10 Aug 2018 10:59:09 +0100 Subject: [PATCH 162/205] Fix linearizer cancellation on twisted < 18.7 Turns out that cancellation of inlineDeferreds didn't really work properly until Twisted 18.7. This commit refactors Linearizer.queue to avoid inlineCallbacks. --- synapse/util/async.py | 111 ++++++++++++++++++++++++++---------------- 1 file changed, 68 insertions(+), 43 deletions(-) diff --git a/synapse/util/async.py b/synapse/util/async.py index a7094e2fb4..9b3f2f4b96 100644 --- a/synapse/util/async.py +++ b/synapse/util/async.py @@ -188,62 +188,30 @@ class Linearizer(object): # things blocked from executing. self.key_to_defer = {} - @defer.inlineCallbacks def queue(self, key): + # we avoid doing defer.inlineCallbacks here, so that cancellation works correctly. + # (https://twistedmatrix.com/trac/ticket/4632 meant that cancellations were not + # propagated inside inlineCallbacks until Twisted 18.7) entry = self.key_to_defer.setdefault(key, [0, collections.OrderedDict()]) # If the number of things executing is greater than the maximum # then add a deferred to the list of blocked items - # When on of the things currently executing finishes it will callback + # When one of the things currently executing finishes it will callback # this item so that it can continue executing. if entry[0] >= self.max_count: - new_defer = defer.Deferred() - entry[1][new_defer] = 1 - - logger.info( - "Waiting to acquire linearizer lock %r for key %r", self.name, key, - ) - try: - yield make_deferred_yieldable(new_defer) - except Exception as e: - if isinstance(e, CancelledError): - logger.info( - "Cancelling wait for linearizer lock %r for key %r", - self.name, key, - ) - else: - logger.warn( - "Unexpected exception waiting for linearizer lock %r for key %r", - self.name, key, - ) - - # we just have to take ourselves back out of the queue. - del entry[1][new_defer] - raise - - logger.info("Acquired linearizer lock %r for key %r", self.name, key) - entry[0] += 1 - - # if the code holding the lock completes synchronously, then it - # will recursively run the next claimant on the list. That can - # relatively rapidly lead to stack exhaustion. This is essentially - # the same problem as http://twistedmatrix.com/trac/ticket/9304. - # - # In order to break the cycle, we add a cheeky sleep(0) here to - # ensure that we fall back to the reactor between each iteration. - # - # (This needs to happen while we hold the lock, and the context manager's exit - # code must be synchronous, so this is the only sensible place.) - yield self._clock.sleep(0) - + res = self._await_lock(key) else: logger.info( "Acquired uncontended linearizer lock %r for key %r", self.name, key, ) entry[0] += 1 + res = defer.succeed(None) + + # once we successfully get the lock, we need to return a context manager which + # will release the lock. @contextmanager - def _ctx_manager(): + def _ctx_manager(_): try: yield finally: @@ -264,7 +232,64 @@ class Linearizer(object): # map. del self.key_to_defer[key] - defer.returnValue(_ctx_manager()) + res.addCallback(_ctx_manager) + return res + + def _await_lock(self, key): + """Helper for queue: adds a deferred to the queue + + Assumes that we've already checked that we've reached the limit of the number + of lock-holders we allow. Creates a new deferred which is added to the list, and + adds some management around cancellations. + + Returns the deferred, which will callback once we have secured the lock. + + """ + entry = self.key_to_defer[key] + + logger.info( + "Waiting to acquire linearizer lock %r for key %r", self.name, key, + ) + + new_defer = make_deferred_yieldable(defer.Deferred()) + entry[1][new_defer] = 1 + + def cb(_r): + logger.info("Acquired linearizer lock %r for key %r", self.name, key) + entry[0] += 1 + + # if the code holding the lock completes synchronously, then it + # will recursively run the next claimant on the list. That can + # relatively rapidly lead to stack exhaustion. This is essentially + # the same problem as http://twistedmatrix.com/trac/ticket/9304. + # + # In order to break the cycle, we add a cheeky sleep(0) here to + # ensure that we fall back to the reactor between each iteration. + # + # (This needs to happen while we hold the lock, and the context manager's exit + # code must be synchronous, so this is the only sensible place.) + return self._clock.sleep(0) + + def eb(e): + logger.info("defer %r got err %r", new_defer, e) + if isinstance(e, CancelledError): + logger.info( + "Cancelling wait for linearizer lock %r for key %r", + self.name, key, + ) + + else: + logger.warn( + "Unexpected exception waiting for linearizer lock %r for key %r", + self.name, key, + ) + + # we just have to take ourselves back out of the queue. + del entry[1][new_defer] + return e + + new_defer.addCallbacks(cb, eb) + return new_defer class ReadWriteLock(object): From 178ab76ac0e804ad519800f270e121b241139246 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 10 Aug 2018 11:05:23 +0100 Subject: [PATCH 163/205] changelog --- changelog.d/3676.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3676.bugfix diff --git a/changelog.d/3676.bugfix b/changelog.d/3676.bugfix new file mode 100644 index 0000000000..7b23a2773a --- /dev/null +++ b/changelog.d/3676.bugfix @@ -0,0 +1 @@ +Make the tests pass on Twisted < 18.7.0 From c75b71a397d91c013a65a7f57d2c13199cf4c204 Mon Sep 17 00:00:00 2001 From: Andrej Shadura Date: Fri, 10 Aug 2018 13:22:39 +0200 Subject: [PATCH 164/205] Use recaptcha_ajax.js directly from Google The script recaptcha_ajax.js contains minified non-free code which we probably cannot redistribute. Since it talks to Google servers anyway, it is better to just download it from Google directly instead of shipping it. This fixes #1932. --- changelog.d/1932.bugfix | 1 + synapse/static/client/register/index.html | 2 +- .../client/register/js/recaptcha_ajax.js | 195 ------------------ 3 files changed, 2 insertions(+), 196 deletions(-) create mode 100644 changelog.d/1932.bugfix delete mode 100644 synapse/static/client/register/js/recaptcha_ajax.js diff --git a/changelog.d/1932.bugfix b/changelog.d/1932.bugfix new file mode 100644 index 0000000000..caa551627b --- /dev/null +++ b/changelog.d/1932.bugfix @@ -0,0 +1 @@ +Don’t ship recaptcha_ajax.js, use it directly from Google diff --git a/synapse/static/client/register/index.html b/synapse/static/client/register/index.html index 600b3ee41e..886f2edd1f 100644 --- a/synapse/static/client/register/index.html +++ b/synapse/static/client/register/index.html @@ -4,7 +4,7 @@ - + diff --git a/synapse/static/client/register/js/recaptcha_ajax.js b/synapse/static/client/register/js/recaptcha_ajax.js deleted file mode 100644 index d0e71e5b88..0000000000 --- a/synapse/static/client/register/js/recaptcha_ajax.js +++ /dev/null @@ -1,195 +0,0 @@ -(function(){var h,k=this,l=function(a){return void 0!==a},ba=function(){},n=function(a){var b=typeof a;if("object"==b)if(a){if(a instanceof Array)return"array";if(a instanceof Object)return b;var c=Object.prototype.toString.call(a);if("[object Window]"==c)return"object";if("[object Array]"==c||"number"==typeof a.length&&"undefined"!=typeof a.splice&&"undefined"!=typeof a.propertyIsEnumerable&&!a.propertyIsEnumerable("splice"))return"array";if("[object Function]"==c||"undefined"!=typeof a.call&&"undefined"!=typeof a.propertyIsEnumerable&& -!a.propertyIsEnumerable("call"))return"function"}else return"null";else if("function"==b&&"undefined"==typeof a.call)return"object";return b},p=function(a){return"array"==n(a)},ca=function(a){var b=n(a);return"array"==b||"object"==b&&"number"==typeof a.length},q=function(a){return"string"==typeof a},r=function(a){return"function"==n(a)},da=function(a){var b=typeof a;return"object"==b&&null!=a||"function"==b},ea=function(a,b,c){return a.call.apply(a.bind,arguments)},fa=function(a,b,c){if(!a)throw Error(); -if(2
';v.CleanCss=".recaptchatable td img{display:block}.recaptchatable .recaptcha_image_cell center img{height:57px}.recaptchatable .recaptcha_image_cell center{height:57px}.recaptchatable .recaptcha_image_cell{background-color:white;height:57px;padding:7px!important}.recaptchatable,#recaptcha_area tr,#recaptcha_area td,#recaptcha_area th{margin:0!important;border:0!important;border-collapse:collapse!important;vertical-align:middle!important}.recaptchatable *{margin:0;padding:0;border:0;color:black;position:static;top:auto;left:auto;right:auto;bottom:auto}.recaptchatable #recaptcha_image{position:relative;margin:auto;border:1px solid #dfdfdf!important}.recaptchatable #recaptcha_image #recaptcha_challenge_image{display:block}.recaptchatable #recaptcha_image #recaptcha_ad_image{display:block;position:absolute;top:0}.recaptchatable a img{border:0}.recaptchatable a,.recaptchatable a:hover{cursor:pointer;outline:none;border:0!important;padding:0!important;text-decoration:none;color:blue;background:none!important;font-weight:normal}.recaptcha_input_area{position:relative!important;background:none!important}.recaptchatable label.recaptcha_input_area_text{border:1px solid #dfdfdf!important;margin:0!important;padding:0!important;position:static!important;top:auto!important;left:auto!important;right:auto!important;bottom:auto!important}.recaptcha_theme_red label.recaptcha_input_area_text,.recaptcha_theme_white label.recaptcha_input_area_text{color:black!important}.recaptcha_theme_blackglass label.recaptcha_input_area_text{color:white!important}.recaptchatable #recaptcha_response_field{font-size:11pt}.recaptcha_theme_blackglass #recaptcha_response_field,.recaptcha_theme_white #recaptcha_response_field{border:1px solid gray}.recaptcha_theme_red #recaptcha_response_field{border:1px solid #cca940}.recaptcha_audio_cant_hear_link{font-size:7pt;color:black}.recaptchatable{line-height:1em;border:1px solid #dfdfdf!important}.recaptcha_error_text{color:red}.recaptcha_only_if_privacy{float:right;text-align:right;margin-right:7px}#recaptcha-ad-choices{position:absolute;height:15px;top:0;right:0}#recaptcha-ad-choices img{height:15px}.recaptcha-ad-choices-collapsed{width:30px;height:15px;display:block}.recaptcha-ad-choices-expanded{width:75px;height:15px;display:none}#recaptcha-ad-choices:hover .recaptcha-ad-choices-collapsed{display:none}#recaptcha-ad-choices:hover .recaptcha-ad-choices-expanded{display:block}";v.CleanHtml='
';v.VertCss=".recaptchatable td img{display:block}.recaptchatable .recaptcha_r1_c1{background:url('IMGROOT/sprite.png') 0 -63px no-repeat;width:318px;height:9px}.recaptchatable .recaptcha_r2_c1{background:url('IMGROOT/sprite.png') -18px 0 no-repeat;width:9px;height:57px}.recaptchatable .recaptcha_r2_c2{background:url('IMGROOT/sprite.png') -27px 0 no-repeat;width:9px;height:57px}.recaptchatable .recaptcha_r3_c1{background:url('IMGROOT/sprite.png') 0 0 no-repeat;width:9px;height:63px}.recaptchatable .recaptcha_r3_c2{background:url('IMGROOT/sprite.png') -18px -57px no-repeat;width:300px;height:6px}.recaptchatable .recaptcha_r3_c3{background:url('IMGROOT/sprite.png') -9px 0 no-repeat;width:9px;height:63px}.recaptchatable .recaptcha_r4_c1{background:url('IMGROOT/sprite.png') -43px 0 no-repeat;width:171px;height:49px}.recaptchatable .recaptcha_r4_c2{background:url('IMGROOT/sprite.png') -36px 0 no-repeat;width:7px;height:57px}.recaptchatable .recaptcha_r4_c4{background:url('IMGROOT/sprite.png') -214px 0 no-repeat;width:97px;height:57px}.recaptchatable .recaptcha_r7_c1{background:url('IMGROOT/sprite.png') -43px -49px no-repeat;width:171px;height:8px}.recaptchatable .recaptcha_r8_c1{background:url('IMGROOT/sprite.png') -43px -49px no-repeat;width:25px;height:8px}.recaptchatable .recaptcha_image_cell center img{height:57px}.recaptchatable .recaptcha_image_cell center{height:57px}.recaptchatable .recaptcha_image_cell{background-color:white;height:57px}#recaptcha_area,#recaptcha_table{width:318px!important}.recaptchatable,#recaptcha_area tr,#recaptcha_area td,#recaptcha_area th{margin:0!important;border:0!important;padding:0!important;border-collapse:collapse!important;vertical-align:middle!important}.recaptchatable *{margin:0;padding:0;border:0;font-family:helvetica,sans-serif;font-size:8pt;color:black;position:static;top:auto;left:auto;right:auto;bottom:auto}.recaptchatable #recaptcha_image{position:relative;margin:auto}.recaptchatable #recaptcha_image #recaptcha_challenge_image{display:block}.recaptchatable #recaptcha_image #recaptcha_ad_image{display:block;position:absolute;top:0}.recaptchatable img{border:0!important;margin:0!important;padding:0!important}.recaptchatable a,.recaptchatable a:hover{cursor:pointer;outline:none;border:0!important;padding:0!important;text-decoration:none;color:blue;background:none!important;font-weight:normal}.recaptcha_input_area{position:relative!important;width:153px!important;height:45px!important;margin-left:7px!important;margin-right:7px!important;background:none!important}.recaptchatable label.recaptcha_input_area_text{margin:0!important;padding:0!important;position:static!important;top:auto!important;left:auto!important;right:auto!important;bottom:auto!important;background:none!important;height:auto!important;width:auto!important}.recaptcha_theme_red label.recaptcha_input_area_text,.recaptcha_theme_white label.recaptcha_input_area_text{color:black!important}.recaptcha_theme_blackglass label.recaptcha_input_area_text{color:white!important}.recaptchatable #recaptcha_response_field{width:153px!important;position:relative!important;bottom:7px!important;padding:0!important;margin:15px 0 0 0!important;font-size:10pt}.recaptcha_theme_blackglass #recaptcha_response_field,.recaptcha_theme_white #recaptcha_response_field{border:1px solid gray}.recaptcha_theme_red #recaptcha_response_field{border:1px solid #cca940}.recaptcha_audio_cant_hear_link{font-size:7pt;color:black}.recaptchatable{line-height:1!important}#recaptcha_instructions_error{color:red!important}.recaptcha_only_if_privacy{float:right;text-align:right}#recaptcha-ad-choices{position:absolute;height:15px;top:0;right:0}#recaptcha-ad-choices img{height:15px}.recaptcha-ad-choices-collapsed{width:30px;height:15px;display:block}.recaptcha-ad-choices-expanded{width:75px;height:15px;display:none}#recaptcha-ad-choices:hover .recaptcha-ad-choices-collapsed{display:none}#recaptcha-ad-choices:hover .recaptcha-ad-choices-expanded{display:block}";var w={visual_challenge:"Get a visual challenge",audio_challenge:"Get an audio challenge",refresh_btn:"Get a new challenge",instructions_visual:"Type the text:",instructions_audio:"Type what you hear:",help_btn:"Help",play_again:"Play sound again",cant_hear_this:"Download sound as MP3",incorrect_try_again:"Incorrect. Try again.",image_alt_text:"reCAPTCHA challenge image",privacy_and_terms:"Privacy & Terms"},ja={visual_challenge:"\u0627\u0644\u062d\u0635\u0648\u0644 \u0639\u0644\u0649 \u062a\u062d\u062f\u064d \u0645\u0631\u0626\u064a", -audio_challenge:"\u0627\u0644\u062d\u0635\u0648\u0644 \u0639\u0644\u0649 \u062a\u062d\u062f\u064d \u0635\u0648\u062a\u064a",refresh_btn:"\u0627\u0644\u062d\u0635\u0648\u0644 \u0639\u0644\u0649 \u062a\u062d\u062f\u064d \u062c\u062f\u064a\u062f",instructions_visual:"\u064a\u0631\u062c\u0649 \u0643\u062a\u0627\u0628\u0629 \u0627\u0644\u0646\u0635:",instructions_audio:"\u0627\u0643\u062a\u0628 \u0645\u0627 \u062a\u0633\u0645\u0639\u0647:",help_btn:"\u0645\u0633\u0627\u0639\u062f\u0629",play_again:"\u062a\u0634\u063a\u064a\u0644 \u0627\u0644\u0635\u0648\u062a \u0645\u0631\u0629 \u0623\u062e\u0631\u0649", -cant_hear_this:"\u062a\u0646\u0632\u064a\u0644 \u0627\u0644\u0635\u0648\u062a \u0628\u062a\u0646\u0633\u064a\u0642 MP3",incorrect_try_again:"\u063a\u064a\u0631 \u0635\u062d\u064a\u062d. \u0623\u0639\u062f \u0627\u0644\u0645\u062d\u0627\u0648\u0644\u0629.",image_alt_text:"\u0635\u0648\u0631\u0629 \u0627\u0644\u062a\u062d\u062f\u064a \u0645\u0646 reCAPTCHA",privacy_and_terms:"\u0627\u0644\u062e\u0635\u0648\u0635\u064a\u0629 \u0648\u0627\u0644\u0628\u0646\u0648\u062f"},ka={visual_challenge:"Obtener una pista visual", -audio_challenge:"Obtener una pista sonora",refresh_btn:"Obtener una pista nueva",instructions_visual:"Introduzca el texto:",instructions_audio:"Escribe lo que oigas:",help_btn:"Ayuda",play_again:"Volver a reproducir el sonido",cant_hear_this:"Descargar el sonido en MP3",incorrect_try_again:"Incorrecto. Vu\u00e9lvelo a intentar.",image_alt_text:"Pista de imagen reCAPTCHA",privacy_and_terms:"Privacidad y condiciones"},la={visual_challenge:"Kumuha ng pagsubok na visual",audio_challenge:"Kumuha ng pagsubok na audio", -refresh_btn:"Kumuha ng bagong pagsubok",instructions_visual:"I-type ang teksto:",instructions_audio:"I-type ang iyong narinig",help_btn:"Tulong",play_again:"I-play muli ang tunog",cant_hear_this:"I-download ang tunog bilang MP3",incorrect_try_again:"Hindi wasto. Muling subukan.",image_alt_text:"larawang panghamon ng reCAPTCHA",privacy_and_terms:"Privacy at Mga Tuntunin"},ma={visual_challenge:"Test visuel",audio_challenge:"Test audio",refresh_btn:"Nouveau test",instructions_visual:"Saisissez le texte\u00a0:", -instructions_audio:"Qu'entendez-vous ?",help_btn:"Aide",play_again:"R\u00e9\u00e9couter",cant_hear_this:"T\u00e9l\u00e9charger l'audio au format MP3",incorrect_try_again:"Incorrect. Veuillez r\u00e9essayer.",image_alt_text:"Image reCAPTCHA",privacy_and_terms:"Confidentialit\u00e9 et conditions d'utilisation"},na={visual_challenge:"Dapatkan kata pengujian berbentuk visual",audio_challenge:"Dapatkan kata pengujian berbentuk audio",refresh_btn:"Dapatkan kata pengujian baru",instructions_visual:"Ketik teks:", -instructions_audio:"Ketik yang Anda dengar:",help_btn:"Bantuan",play_again:"Putar suara sekali lagi",cant_hear_this:"Unduh suara sebagai MP3",incorrect_try_again:"Salah. Coba lagi.",image_alt_text:"Gambar tantangan reCAPTCHA",privacy_and_terms:"Privasi & Persyaratan"},oa={visual_challenge:"\u05e7\u05d1\u05dc \u05d0\u05ea\u05d2\u05e8 \u05d7\u05d6\u05d5\u05ea\u05d9",audio_challenge:"\u05e7\u05d1\u05dc \u05d0\u05ea\u05d2\u05e8 \u05e9\u05de\u05e2",refresh_btn:"\u05e7\u05d1\u05dc \u05d0\u05ea\u05d2\u05e8 \u05d7\u05d3\u05e9", -instructions_visual:"\u05d4\u05e7\u05dc\u05d3 \u05d0\u05ea \u05d4\u05d8\u05e7\u05e1\u05d8:",instructions_audio:"\u05d4\u05e7\u05dc\u05d3 \u05d0\u05ea \u05de\u05d4 \u05e9\u05d0\u05ea\u05d4 \u05e9\u05d5\u05de\u05e2:",help_btn:"\u05e2\u05d6\u05e8\u05d4",play_again:"\u05d4\u05e4\u05e2\u05dc \u05e9\u05d5\u05d1 \u05d0\u05ea \u05d4\u05e9\u05de\u05e2",cant_hear_this:"\u05d4\u05d5\u05e8\u05d3 \u05e9\u05de\u05e2 \u05db-3MP",incorrect_try_again:"\u05e9\u05d2\u05d5\u05d9. \u05e0\u05e1\u05d4 \u05e9\u05d5\u05d1.", -image_alt_text:"\u05ea\u05de\u05d5\u05e0\u05ea \u05d0\u05ea\u05d2\u05e8 \u05e9\u05dc reCAPTCHA",privacy_and_terms:"\u05e4\u05e8\u05d8\u05d9\u05d5\u05ea \u05d5\u05ea\u05e0\u05d0\u05d9\u05dd"},pa={visual_challenge:"Obter um desafio visual",audio_challenge:"Obter um desafio de \u00e1udio",refresh_btn:"Obter um novo desafio",instructions_visual:"Digite o texto:",instructions_audio:"Digite o que voc\u00ea ouve:",help_btn:"Ajuda",play_again:"Reproduzir som novamente",cant_hear_this:"Fazer download do som no formato MP3", -incorrect_try_again:"Incorreto. Tente novamente.",image_alt_text:"Imagem de desafio reCAPTCHA",privacy_and_terms:"Privacidade e Termos"},qa={visual_challenge:"Ob\u0163ine\u0163i un cod captcha vizual",audio_challenge:"Ob\u0163ine\u0163i un cod captcha audio",refresh_btn:"Ob\u0163ine\u0163i un nou cod captcha",instructions_visual:"Introduce\u021bi textul:",instructions_audio:"Introduce\u0163i ceea ce auzi\u0163i:",help_btn:"Ajutor",play_again:"Reda\u0163i sunetul din nou",cant_hear_this:"Desc\u0103rca\u0163i fi\u015fierul audio ca MP3", -incorrect_try_again:"Incorect. \u00cencerca\u0163i din nou.",image_alt_text:"Imagine de verificare reCAPTCHA",privacy_and_terms:"Confiden\u0163ialitate \u015fi termeni"},ra={visual_challenge:"\u6536\u5230\u4e00\u4e2a\u89c6\u9891\u9080\u8bf7",audio_challenge:"\u6362\u4e00\u7ec4\u97f3\u9891\u9a8c\u8bc1\u7801",refresh_btn:"\u6362\u4e00\u7ec4\u9a8c\u8bc1\u7801",instructions_visual:"\u8f93\u5165\u6587\u5b57\uff1a",instructions_audio:"\u8bf7\u952e\u5165\u60a8\u542c\u5230\u7684\u5185\u5bb9\uff1a",help_btn:"\u5e2e\u52a9", -play_again:"\u91cd\u65b0\u64ad\u653e",cant_hear_this:"\u4ee5 MP3 \u683c\u5f0f\u4e0b\u8f7d\u58f0\u97f3",incorrect_try_again:"\u4e0d\u6b63\u786e\uff0c\u8bf7\u91cd\u8bd5\u3002",image_alt_text:"reCAPTCHA \u9a8c\u8bc1\u56fe\u7247",privacy_and_terms:"\u9690\u79c1\u6743\u548c\u4f7f\u7528\u6761\u6b3e"},sa={en:w,af:{visual_challenge:"Kry 'n visuele verifi\u00ebring",audio_challenge:"Kry 'n klankverifi\u00ebring",refresh_btn:"Kry 'n nuwe verifi\u00ebring",instructions_visual:"",instructions_audio:"Tik wat jy hoor:", -help_btn:"Hulp",play_again:"Speel geluid weer",cant_hear_this:"Laai die klank af as MP3",incorrect_try_again:"Verkeerd. Probeer weer.",image_alt_text:"reCAPTCHA-uitdagingprent",privacy_and_terms:"Privaatheid en bepalings"},am:{visual_challenge:"\u12e8\u12a5\u12ed\u1273 \u1270\u130b\u1323\u121a \u12a0\u130d\u129d",audio_challenge:"\u120c\u120b \u12a0\u12f2\u1235 \u12e8\u12f5\u121d\u133d \u1325\u12eb\u1244 \u12ed\u1245\u1228\u1265",refresh_btn:"\u120c\u120b \u12a0\u12f2\u1235 \u1325\u12eb\u1244 \u12ed\u1245\u1228\u1265", -instructions_visual:"",instructions_audio:"\u12e8\u121d\u1275\u1230\u121b\u12cd\u1295 \u1270\u12ed\u1265\u1361-",help_btn:"\u12a5\u1308\u12db",play_again:"\u12f5\u121d\u1339\u1295 \u12a5\u1295\u12f0\u1308\u1293 \u12a0\u132b\u12cd\u1275",cant_hear_this:"\u12f5\u121d\u1339\u1295 \u1260MP3 \u1245\u122d\u133d \u12a0\u12cd\u122d\u12f5",incorrect_try_again:"\u1275\u12ad\u12ad\u120d \u12a0\u12ed\u12f0\u1208\u121d\u1362 \u12a5\u1295\u12f0\u1308\u1293 \u121e\u12ad\u122d\u1362",image_alt_text:"reCAPTCHA \u121d\u1235\u120d \u130d\u1320\u121d", -privacy_and_terms:"\u130d\u120b\u12ca\u1290\u1275 \u12a5\u1293 \u12cd\u120d"},ar:ja,"ar-EG":ja,bg:{visual_challenge:"\u041f\u043e\u043b\u0443\u0447\u0430\u0432\u0430\u043d\u0435 \u043d\u0430 \u0432\u0438\u0437\u0443\u0430\u043b\u043d\u0430 \u043f\u0440\u043e\u0432\u0435\u0440\u043a\u0430",audio_challenge:"\u0417\u0430\u0440\u0435\u0436\u0434\u0430\u043d\u0435 \u043d\u0430 \u0430\u0443\u0434\u0438\u043e\u0442\u0435\u0441\u0442",refresh_btn:"\u0417\u0430\u0440\u0435\u0436\u0434\u0430\u043d\u0435 \u043d\u0430 \u043d\u043e\u0432 \u0442\u0435\u0441\u0442", -instructions_visual:"\u0412\u044a\u0432\u0435\u0434\u0435\u0442\u0435 \u0442\u0435\u043a\u0441\u0442\u0430:",instructions_audio:"\u0412\u044a\u0432\u0435\u0434\u0435\u0442\u0435 \u0447\u0443\u0442\u043e\u0442\u043e:",help_btn:"\u041f\u043e\u043c\u043e\u0449",play_again:"\u041f\u043e\u0432\u0442\u043e\u0440\u043d\u043e \u043f\u0443\u0441\u043a\u0430\u043d\u0435 \u043d\u0430 \u0437\u0432\u0443\u043a\u0430",cant_hear_this:"\u0418\u0437\u0442\u0435\u0433\u043b\u044f\u043d\u0435 \u043d\u0430 \u0437\u0432\u0443\u043a\u0430 \u0432\u044a\u0432 \u0444\u043e\u0440\u043c\u0430\u0442 MP3", -incorrect_try_again:"\u041d\u0435\u043f\u0440\u0430\u0432\u0438\u043b\u043d\u043e. \u041e\u043f\u0438\u0442\u0430\u0439\u0442\u0435 \u043e\u0442\u043d\u043e\u0432\u043e.",image_alt_text:"\u0418\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u0435 \u043d\u0430 \u043f\u0440\u043e\u0432\u0435\u0440\u043a\u0430\u0442\u0430 \u0441 reCAPTCHA",privacy_and_terms:"\u041f\u043e\u0432\u0435\u0440\u0438\u0442\u0435\u043b\u043d\u043e\u0441\u0442 \u0438 \u041e\u0431\u0449\u0438 \u0443\u0441\u043b\u043e\u0432\u0438\u044f"}, -bn:{visual_challenge:"\u098f\u0995\u099f\u09bf \u09a6\u09c3\u09b6\u09cd\u09af\u09ae\u09be\u09a8 \u09aa\u09cd\u09b0\u09a4\u09bf\u09a6\u09cd\u09ac\u09a8\u09cd\u09a6\u09cd\u09ac\u09bf\u09a4\u09be \u09aa\u09be\u09a8",audio_challenge:"\u098f\u0995\u099f\u09bf \u0985\u09a1\u09bf\u0993 \u09aa\u09cd\u09b0\u09a4\u09bf\u09a6\u09cd\u09ac\u09a8\u09cd\u09a6\u09cd\u09ac\u09bf\u09a4\u09be \u09aa\u09be\u09a8",refresh_btn:"\u098f\u0995\u099f\u09bf \u09a8\u09a4\u09c1\u09a8 \u09aa\u09cd\u09b0\u09a4\u09bf\u09a6\u09cd\u09ac\u09a8\u09cd\u09a6\u09cd\u09ac\u09bf\u09a4\u09be \u09aa\u09be\u09a8", -instructions_visual:"",instructions_audio:"\u0986\u09aa\u09a8\u09bf \u09af\u09be \u09b6\u09c1\u09a8\u099b\u09c7\u09a8 \u09a4\u09be \u09b2\u09bf\u0996\u09c1\u09a8:",help_btn:"\u09b8\u09b9\u09be\u09df\u09a4\u09be",play_again:"\u0986\u09ac\u09be\u09b0 \u09b8\u09be\u0989\u09a8\u09cd\u09a1 \u09aa\u09cd\u09b2\u09c7 \u0995\u09b0\u09c1\u09a8",cant_hear_this:"MP3 \u09b0\u09c2\u09aa\u09c7 \u09b6\u09ac\u09cd\u09a6 \u09a1\u09be\u0989\u09a8\u09b2\u09cb\u09a1 \u0995\u09b0\u09c1\u09a8",incorrect_try_again:"\u09ac\u09c7\u09a0\u09bf\u0995\u09f7 \u0986\u09ac\u09be\u09b0 \u099a\u09c7\u09b7\u09cd\u099f\u09be \u0995\u09b0\u09c1\u09a8\u09f7", -image_alt_text:"reCAPTCHA \u099a\u09cd\u09af\u09be\u09b2\u09c7\u099e\u09cd\u099c \u099a\u09bf\u09a4\u09cd\u09b0",privacy_and_terms:"\u0997\u09cb\u09aa\u09a8\u09c0\u09af\u09bc\u09a4\u09be \u0993 \u09b6\u09b0\u09cd\u09a4\u09be\u09ac\u09b2\u09c0"},ca:{visual_challenge:"Obt\u00e9n un repte visual",audio_challenge:"Obteniu una pista sonora",refresh_btn:"Obteniu una pista nova",instructions_visual:"Escriviu el text:",instructions_audio:"Escriviu el que escolteu:",help_btn:"Ajuda",play_again:"Torna a reproduir el so", -cant_hear_this:"Baixa el so com a MP3",incorrect_try_again:"No \u00e9s correcte. Torna-ho a provar.",image_alt_text:"Imatge del repte de reCAPTCHA",privacy_and_terms:"Privadesa i condicions"},cs:{visual_challenge:"Zobrazit vizu\u00e1ln\u00ed podobu v\u00fdrazu",audio_challenge:"P\u0159ehr\u00e1t zvukovou podobu v\u00fdrazu",refresh_btn:"Zobrazit nov\u00fd v\u00fdraz",instructions_visual:"Zadejte text:",instructions_audio:"Napi\u0161te, co jste sly\u0161eli:",help_btn:"N\u00e1pov\u011bda",play_again:"Znovu p\u0159ehr\u00e1t zvuk", -cant_hear_this:"St\u00e1hnout zvuk ve form\u00e1tu MP3",incorrect_try_again:"\u0160patn\u011b. Zkuste to znovu.",image_alt_text:"Obr\u00e1zek reCAPTCHA",privacy_and_terms:"Ochrana soukrom\u00ed a smluvn\u00ed podm\u00ednky"},da:{visual_challenge:"Hent en visuel udfordring",audio_challenge:"Hent en lydudfordring",refresh_btn:"Hent en ny udfordring",instructions_visual:"Indtast teksten:",instructions_audio:"Indtast det, du h\u00f8rer:",help_btn:"Hj\u00e6lp",play_again:"Afspil lyden igen",cant_hear_this:"Download lyd som MP3", -incorrect_try_again:"Forkert. Pr\u00f8v igen.",image_alt_text:"reCAPTCHA-udfordringsbillede",privacy_and_terms:"Privatliv og vilk\u00e5r"},de:{visual_challenge:"Captcha abrufen",audio_challenge:"Audio-Captcha abrufen",refresh_btn:"Neues Captcha abrufen",instructions_visual:"Geben Sie den angezeigten Text ein:",instructions_audio:"Geben Sie das Geh\u00f6rte ein:",help_btn:"Hilfe",play_again:"Wort erneut abspielen",cant_hear_this:"Wort als MP3 herunterladen",incorrect_try_again:"Falsch. Bitte versuchen Sie es erneut.", -image_alt_text:"reCAPTCHA-Bild",privacy_and_terms:"Datenschutzerkl\u00e4rung & Nutzungsbedingungen"},el:{visual_challenge:"\u039f\u03c0\u03c4\u03b9\u03ba\u03ae \u03c0\u03c1\u03cc\u03ba\u03bb\u03b7\u03c3\u03b7",audio_challenge:"\u0397\u03c7\u03b7\u03c4\u03b9\u03ba\u03ae \u03c0\u03c1\u03cc\u03ba\u03bb\u03b7\u03c3\u03b7",refresh_btn:"\u039d\u03ad\u03b1 \u03c0\u03c1\u03cc\u03ba\u03bb\u03b7\u03c3\u03b7",instructions_visual:"\u03a0\u03bb\u03b7\u03ba\u03c4\u03c1\u03bf\u03bb\u03bf\u03b3\u03ae\u03c3\u03c4\u03b5 \u03c4\u03bf \u03ba\u03b5\u03af\u03bc\u03b5\u03bd\u03bf:", -instructions_audio:"\u03a0\u03bb\u03b7\u03ba\u03c4\u03c1\u03bf\u03bb\u03bf\u03b3\u03ae\u03c3\u03c4\u03b5 \u03cc\u03c4\u03b9 \u03b1\u03ba\u03bf\u03cd\u03c4\u03b5:",help_btn:"\u0392\u03bf\u03ae\u03b8\u03b5\u03b9\u03b1",play_again:"\u0391\u03bd\u03b1\u03c0\u03b1\u03c1\u03b1\u03b3\u03c9\u03b3\u03ae \u03ae\u03c7\u03bf\u03c5 \u03be\u03b1\u03bd\u03ac",cant_hear_this:"\u039b\u03ae\u03c8\u03b7 \u03ae\u03c7\u03bf\u03c5 \u03c9\u03c2 \u039c\u03a13",incorrect_try_again:"\u039b\u03ac\u03b8\u03bf\u03c2. \u0394\u03bf\u03ba\u03b9\u03bc\u03ac\u03c3\u03c4\u03b5 \u03be\u03b1\u03bd\u03ac.", -image_alt_text:"\u0395\u03b9\u03ba\u03cc\u03bd\u03b1 \u03c0\u03c1\u03cc\u03ba\u03bb\u03b7\u03c3\u03b7\u03c2 reCAPTCHA",privacy_and_terms:"\u0391\u03c0\u03cc\u03c1\u03c1\u03b7\u03c4\u03bf \u03ba\u03b1\u03b9 \u03cc\u03c1\u03bf\u03b9"},"en-GB":w,"en-US":w,es:ka,"es-419":{visual_challenge:"Enfrentar un desaf\u00edo visual",audio_challenge:"Enfrentar un desaf\u00edo de audio",refresh_btn:"Enfrentar un nuevo desaf\u00edo",instructions_visual:"Escriba el texto:",instructions_audio:"Escribe lo que escuchas:", -help_btn:"Ayuda",play_again:"Reproducir sonido de nuevo",cant_hear_this:"Descargar sonido en formato MP3",incorrect_try_again:"Incorrecto. Vuelve a intentarlo.",image_alt_text:"Imagen del desaf\u00edo de la reCAPTCHA",privacy_and_terms:"Privacidad y condiciones"},"es-ES":ka,et:{visual_challenge:"Kuva kuvap\u00f5hine robotil\u00f5ks",audio_challenge:"Kuva helip\u00f5hine robotil\u00f5ks",refresh_btn:"Kuva uus robotil\u00f5ks",instructions_visual:"Tippige tekst:",instructions_audio:"Tippige, mida kuulete.", -help_btn:"Abi",play_again:"Esita heli uuesti",cant_hear_this:"Laadi heli alla MP3-vormingus",incorrect_try_again:"Vale. Proovige uuesti.",image_alt_text:"reCAPTCHA robotil\u00f5ksu kujutis",privacy_and_terms:"Privaatsus ja tingimused"},eu:{visual_challenge:"Eskuratu ikusizko erronka",audio_challenge:"Eskuratu audio-erronka",refresh_btn:"Eskuratu erronka berria",instructions_visual:"",instructions_audio:"Idatzi entzuten duzuna:",help_btn:"Laguntza",play_again:"Erreproduzitu soinua berriro",cant_hear_this:"Deskargatu soinua MP3 gisa", -incorrect_try_again:"Ez da zuzena. Saiatu berriro.",image_alt_text:"reCAPTCHA erronkaren irudia",privacy_and_terms:"Pribatutasuna eta baldintzak"},fa:{visual_challenge:"\u062f\u0631\u06cc\u0627\u0641\u062a \u06cc\u06a9 \u0645\u0639\u0645\u0627\u06cc \u062f\u06cc\u062f\u0627\u0631\u06cc",audio_challenge:"\u062f\u0631\u06cc\u0627\u0641\u062a \u06cc\u06a9 \u0645\u0639\u0645\u0627\u06cc \u0635\u0648\u062a\u06cc",refresh_btn:"\u062f\u0631\u06cc\u0627\u0641\u062a \u06cc\u06a9 \u0645\u0639\u0645\u0627\u06cc \u062c\u062f\u06cc\u062f", -instructions_visual:"",instructions_audio:"\u0622\u0646\u0686\u0647 \u0631\u0627 \u06a9\u0647 \u0645\u06cc\u200c\u0634\u0646\u0648\u06cc\u062f \u062a\u0627\u06cc\u067e \u06a9\u0646\u06cc\u062f:",help_btn:"\u0631\u0627\u0647\u0646\u0645\u0627\u06cc\u06cc",play_again:"\u067e\u062e\u0634 \u0645\u062c\u062f\u062f \u0635\u062f\u0627",cant_hear_this:"\u062f\u0627\u0646\u0644\u0648\u062f \u0635\u062f\u0627 \u0628\u0647 \u0635\u0648\u0631\u062a MP3",incorrect_try_again:"\u0646\u0627\u062f\u0631\u0633\u062a. \u062f\u0648\u0628\u0627\u0631\u0647 \u0627\u0645\u062a\u062d\u0627\u0646 \u06a9\u0646\u06cc\u062f.", -image_alt_text:"\u062a\u0635\u0648\u06cc\u0631 \u0686\u0627\u0644\u0634\u06cc reCAPTCHA",privacy_and_terms:"\u062d\u0631\u06cc\u0645 \u062e\u0635\u0648\u0635\u06cc \u0648 \u0634\u0631\u0627\u06cc\u0637"},fi:{visual_challenge:"Kuvavahvistus",audio_challenge:"\u00c4\u00e4nivahvistus",refresh_btn:"Uusi kuva",instructions_visual:"Kirjoita teksti:",instructions_audio:"Kirjoita kuulemasi:",help_btn:"Ohje",play_again:"Toista \u00e4\u00e4ni uudelleen",cant_hear_this:"Lataa \u00e4\u00e4ni MP3-tiedostona", -incorrect_try_again:"V\u00e4\u00e4rin. Yrit\u00e4 uudelleen.",image_alt_text:"reCAPTCHA-kuva",privacy_and_terms:"Tietosuoja ja k\u00e4ytt\u00f6ehdot"},fil:la,fr:ma,"fr-CA":{visual_challenge:"Obtenir un test visuel",audio_challenge:"Obtenir un test audio",refresh_btn:"Obtenir un nouveau test",instructions_visual:"Saisissez le texte\u00a0:",instructions_audio:"Tapez ce que vous entendez\u00a0:",help_btn:"Aide",play_again:"Jouer le son de nouveau",cant_hear_this:"T\u00e9l\u00e9charger le son en format MP3", -incorrect_try_again:"Erreur, essayez \u00e0 nouveau",image_alt_text:"Image reCAPTCHA",privacy_and_terms:"Confidentialit\u00e9 et conditions d'utilisation"},"fr-FR":ma,gl:{visual_challenge:"Obter unha proba visual",audio_challenge:"Obter unha proba de audio",refresh_btn:"Obter unha proba nova",instructions_visual:"",instructions_audio:"Escribe o que escoitas:",help_btn:"Axuda",play_again:"Reproducir o son de novo",cant_hear_this:"Descargar son como MP3",incorrect_try_again:"Incorrecto. T\u00e9ntao de novo.", -image_alt_text:"Imaxe de proba de reCAPTCHA",privacy_and_terms:"Privacidade e condici\u00f3ns"},gu:{visual_challenge:"\u0a8f\u0a95 \u0aa6\u0ac3\u0ab6\u0acd\u0aaf\u0abe\u0aa4\u0acd\u0aae\u0a95 \u0aaa\u0aa1\u0a95\u0abe\u0ab0 \u0aae\u0ac7\u0ab3\u0ab5\u0acb",audio_challenge:"\u0a8f\u0a95 \u0a91\u0aa1\u0abf\u0a93 \u0aaa\u0aa1\u0a95\u0abe\u0ab0 \u0aae\u0ac7\u0ab3\u0ab5\u0acb",refresh_btn:"\u0a8f\u0a95 \u0aa8\u0ab5\u0acb \u0aaa\u0aa1\u0a95\u0abe\u0ab0 \u0aae\u0ac7\u0ab3\u0ab5\u0acb",instructions_visual:"", -instructions_audio:"\u0aa4\u0aae\u0ac7 \u0a9c\u0ac7 \u0ab8\u0abe\u0a82\u0aad\u0ab3\u0acb \u0a9b\u0acb \u0aa4\u0ac7 \u0ab2\u0a96\u0acb:",help_btn:"\u0ab8\u0ab9\u0abe\u0aaf",play_again:"\u0aa7\u0acd\u0ab5\u0aa8\u0abf \u0aab\u0ab0\u0ac0\u0aa5\u0ac0 \u0a9a\u0ab2\u0abe\u0ab5\u0acb",cant_hear_this:"MP3 \u0aa4\u0ab0\u0ac0\u0a95\u0ac7 \u0aa7\u0acd\u0ab5\u0aa8\u0abf\u0aa8\u0ac7 \u0aa1\u0abe\u0a89\u0aa8\u0ab2\u0acb\u0aa1 \u0a95\u0ab0\u0acb",incorrect_try_again:"\u0a96\u0acb\u0a9f\u0ac1\u0a82. \u0aab\u0ab0\u0ac0 \u0aaa\u0acd\u0ab0\u0aaf\u0abe\u0ab8 \u0a95\u0ab0\u0acb.", -image_alt_text:"reCAPTCHA \u0aaa\u0aa1\u0a95\u0abe\u0ab0 \u0a9b\u0aac\u0ac0",privacy_and_terms:"\u0a97\u0acb\u0aaa\u0aa8\u0ac0\u0aaf\u0aa4\u0abe \u0a85\u0aa8\u0ac7 \u0ab6\u0ab0\u0aa4\u0acb"},hi:{visual_challenge:"\u0915\u094b\u0908 \u0935\u093f\u091c\u0941\u0905\u0932 \u091a\u0941\u0928\u094c\u0924\u0940 \u0932\u0947\u0902",audio_challenge:"\u0915\u094b\u0908 \u0911\u0921\u093f\u092f\u094b \u091a\u0941\u0928\u094c\u0924\u0940 \u0932\u0947\u0902",refresh_btn:"\u0915\u094b\u0908 \u0928\u0908 \u091a\u0941\u0928\u094c\u0924\u0940 \u0932\u0947\u0902", -instructions_visual:"\u091f\u0947\u0915\u094d\u0938\u094d\u091f \u091f\u093e\u0907\u092a \u0915\u0930\u0947\u0902:",instructions_audio:"\u091c\u094b \u0906\u092a \u0938\u0941\u0928 \u0930\u0939\u0947 \u0939\u0948\u0902 \u0909\u0938\u0947 \u0932\u093f\u0916\u0947\u0902:",help_btn:"\u0938\u0939\u093e\u092f\u0924\u093e",play_again:"\u0927\u094d\u200d\u0935\u0928\u093f \u092a\u0941\u0928: \u091a\u0932\u093e\u090f\u0902",cant_hear_this:"\u0927\u094d\u200d\u0935\u0928\u093f \u0915\u094b MP3 \u0915\u0947 \u0930\u0942\u092a \u092e\u0947\u0902 \u0921\u093e\u0909\u0928\u0932\u094b\u0921 \u0915\u0930\u0947\u0902", -incorrect_try_again:"\u0917\u0932\u0924. \u092a\u0941\u0928: \u092a\u094d\u0930\u092f\u093e\u0938 \u0915\u0930\u0947\u0902.",image_alt_text:"reCAPTCHA \u091a\u0941\u0928\u094c\u0924\u0940 \u091a\u093f\u0924\u094d\u0930",privacy_and_terms:"\u0917\u094b\u092a\u0928\u0940\u092f\u0924\u093e \u0914\u0930 \u0936\u0930\u094d\u0924\u0947\u0902"},hr:{visual_challenge:"Dohvati vizualni upit",audio_challenge:"Dohvati zvu\u010dni upit",refresh_btn:"Dohvati novi upit",instructions_visual:"Unesite tekst:",instructions_audio:"Upi\u0161ite \u0161to \u010dujete:", -help_btn:"Pomo\u0107",play_again:"Ponovi zvuk",cant_hear_this:"Preuzmi zvuk u MP3 formatu",incorrect_try_again:"Nije to\u010dno. Poku\u0161ajte ponovno.",image_alt_text:"Slikovni izazov reCAPTCHA",privacy_and_terms:"Privatnost i odredbe"},hu:{visual_challenge:"Vizu\u00e1lis kih\u00edv\u00e1s k\u00e9r\u00e9se",audio_challenge:"Hangkih\u00edv\u00e1s k\u00e9r\u00e9se",refresh_btn:"\u00daj kih\u00edv\u00e1s k\u00e9r\u00e9se",instructions_visual:"\u00cdrja be a sz\u00f6veget:",instructions_audio:"\u00cdrja le, amit hall:", -help_btn:"S\u00fag\u00f3",play_again:"Hang ism\u00e9telt lej\u00e1tsz\u00e1sa",cant_hear_this:"Hang let\u00f6lt\u00e9se MP3 form\u00e1tumban",incorrect_try_again:"Hib\u00e1s. Pr\u00f3b\u00e1lkozzon \u00fajra.",image_alt_text:"reCAPTCHA ellen\u0151rz\u0151 k\u00e9p",privacy_and_terms:"Adatv\u00e9delem \u00e9s Szerz\u0151d\u00e9si Felt\u00e9telek"},hy:{visual_challenge:"\u054d\u057f\u0561\u0576\u0561\u056c \u057f\u0565\u057d\u0578\u0572\u0561\u056f\u0561\u0576 \u056d\u0576\u0564\u056b\u0580",audio_challenge:"\u054d\u057f\u0561\u0576\u0561\u056c \u0571\u0561\u0575\u0576\u0561\u0575\u056b\u0576 \u056d\u0576\u0564\u056b\u0580", -refresh_btn:"\u054d\u057f\u0561\u0576\u0561\u056c \u0576\u0578\u0580 \u056d\u0576\u0564\u056b\u0580",instructions_visual:"\u0544\u0578\u0582\u057f\u0584\u0561\u0563\u0580\u0565\u0584 \u057f\u0565\u0584\u057d\u057f\u0568\u055d",instructions_audio:"\u0544\u0578\u0582\u057f\u0584\u0561\u0563\u0580\u0565\u0584 \u0561\u0575\u0576, \u056b\u0576\u0579 \u056c\u057d\u0578\u0582\u0574 \u0565\u0584\u055d",help_btn:"\u0555\u0563\u0576\u0578\u0582\u0569\u0575\u0578\u0582\u0576",play_again:"\u0546\u057e\u0561\u0563\u0561\u0580\u056f\u0565\u056c \u0571\u0561\u0575\u0576\u0568 \u056f\u0580\u056f\u056b\u0576", -cant_hear_this:"\u0532\u0565\u057c\u0576\u0565\u056c \u0571\u0561\u0575\u0576\u0568 \u0578\u0580\u057a\u0565\u057d MP3",incorrect_try_again:"\u054d\u056d\u0561\u056c \u0567: \u0553\u0578\u0580\u0571\u0565\u0584 \u056f\u0580\u056f\u056b\u0576:",image_alt_text:"reCAPTCHA \u057a\u0561\u057f\u056f\u0565\u0580\u0578\u057e \u056d\u0576\u0564\u056b\u0580",privacy_and_terms:"\u0533\u0561\u0572\u057f\u0576\u056b\u0578\u0582\u0569\u0575\u0561\u0576 & \u057a\u0561\u0575\u0574\u0561\u0576\u0576\u0565\u0580"}, -id:na,is:{visual_challenge:"F\u00e1 a\u00f0gangspr\u00f3f sem mynd",audio_challenge:"F\u00e1 a\u00f0gangspr\u00f3f sem hlj\u00f3\u00f0skr\u00e1",refresh_btn:"F\u00e1 n\u00fdtt a\u00f0gangspr\u00f3f",instructions_visual:"",instructions_audio:"Sl\u00e1\u00f0u inn \u00fea\u00f0 sem \u00fe\u00fa heyrir:",help_btn:"Hj\u00e1lp",play_again:"Spila hlj\u00f3\u00f0 aftur",cant_hear_this:"S\u00e6kja hlj\u00f3\u00f0 sem MP3",incorrect_try_again:"Rangt. Reyndu aftur.",image_alt_text:"mynd reCAPTCHA a\u00f0gangspr\u00f3fs", -privacy_and_terms:"Pers\u00f3nuvernd og skilm\u00e1lar"},it:{visual_challenge:"Verifica visiva",audio_challenge:"Verifica audio",refresh_btn:"Nuova verifica",instructions_visual:"Digita il testo:",instructions_audio:"Digita ci\u00f2 che senti:",help_btn:"Guida",play_again:"Riproduci di nuovo audio",cant_hear_this:"Scarica audio in MP3",incorrect_try_again:"Sbagliato. Riprova.",image_alt_text:"Immagine di verifica reCAPTCHA",privacy_and_terms:"Privacy e Termini"},iw:oa,ja:{visual_challenge:"\u753b\u50cf\u3067\u78ba\u8a8d\u3057\u307e\u3059", -audio_challenge:"\u97f3\u58f0\u3067\u78ba\u8a8d\u3057\u307e\u3059",refresh_btn:"\u5225\u306e\u5358\u8a9e\u3067\u3084\u308a\u76f4\u3057\u307e\u3059",instructions_visual:"\u30c6\u30ad\u30b9\u30c8\u3092\u5165\u529b:",instructions_audio:"\u805e\u3053\u3048\u305f\u5358\u8a9e\u3092\u5165\u529b\u3057\u307e\u3059:",help_btn:"\u30d8\u30eb\u30d7",play_again:"\u3082\u3046\u4e00\u5ea6\u805e\u304f",cant_hear_this:"MP3 \u3067\u97f3\u58f0\u3092\u30c0\u30a6\u30f3\u30ed\u30fc\u30c9",incorrect_try_again:"\u6b63\u3057\u304f\u3042\u308a\u307e\u305b\u3093\u3002\u3082\u3046\u4e00\u5ea6\u3084\u308a\u76f4\u3057\u3066\u304f\u3060\u3055\u3044\u3002", -image_alt_text:"reCAPTCHA \u78ba\u8a8d\u7528\u753b\u50cf",privacy_and_terms:"\u30d7\u30e9\u30a4\u30d0\u30b7\u30fc\u3068\u5229\u7528\u898f\u7d04"},kn:{visual_challenge:"\u0ca6\u0cc3\u0cb6\u0ccd\u0caf \u0cb8\u0cb5\u0cbe\u0cb2\u0cca\u0c82\u0ca6\u0ca8\u0ccd\u0ca8\u0cc1 \u0cb8\u0ccd\u0cb5\u0cc0\u0c95\u0cb0\u0cbf\u0cb8\u0cbf",audio_challenge:"\u0c86\u0ca1\u0cbf\u0caf\u0ccb \u0cb8\u0cb5\u0cbe\u0cb2\u0cca\u0c82\u0ca6\u0ca8\u0ccd\u0ca8\u0cc1 \u0cb8\u0ccd\u0cb5\u0cc0\u0c95\u0cb0\u0cbf\u0cb8\u0cbf",refresh_btn:"\u0cb9\u0cca\u0cb8 \u0cb8\u0cb5\u0cbe\u0cb2\u0cca\u0c82\u0ca6\u0ca8\u0ccd\u0ca8\u0cc1 \u0caa\u0ca1\u0cc6\u0caf\u0cbf\u0cb0\u0cbf", -instructions_visual:"",instructions_audio:"\u0ca8\u0cbf\u0cae\u0c97\u0cc6 \u0c95\u0cc7\u0cb3\u0cbf\u0cb8\u0cc1\u0cb5\u0cc1\u0ca6\u0ca8\u0ccd\u0ca8\u0cc1 \u0c9f\u0cc8\u0caa\u0ccd\u200c \u0cae\u0cbe\u0ca1\u0cbf:",help_btn:"\u0cb8\u0cb9\u0cbe\u0caf",play_again:"\u0ca7\u0ccd\u0cb5\u0ca8\u0cbf\u0caf\u0ca8\u0ccd\u0ca8\u0cc1 \u0cae\u0ca4\u0ccd\u0ca4\u0cc6 \u0caa\u0ccd\u0cb2\u0cc7 \u0cae\u0cbe\u0ca1\u0cbf",cant_hear_this:"\u0ca7\u0ccd\u0cb5\u0ca8\u0cbf\u0caf\u0ca8\u0ccd\u0ca8\u0cc1 MP3 \u0cb0\u0cc2\u0caa\u0ca6\u0cb2\u0ccd\u0cb2\u0cbf \u0ca1\u0ccc\u0ca8\u0ccd\u200c\u0cb2\u0ccb\u0ca1\u0ccd \u0cae\u0cbe\u0ca1\u0cbf", -incorrect_try_again:"\u0ca4\u0caa\u0ccd\u0caa\u0cbe\u0c97\u0cbf\u0ca6\u0cc6. \u0cae\u0ca4\u0ccd\u0ca4\u0cca\u0cae\u0ccd\u0cae\u0cc6 \u0caa\u0ccd\u0cb0\u0caf\u0ca4\u0ccd\u0ca8\u0cbf\u0cb8\u0cbf.",image_alt_text:"reCAPTCHA \u0cb8\u0cb5\u0cbe\u0cb2\u0cc1 \u0c9a\u0cbf\u0ca4\u0ccd\u0cb0",privacy_and_terms:"\u0c97\u0ccc\u0caa\u0ccd\u0caf\u0ca4\u0cc6 \u0cae\u0ca4\u0ccd\u0ca4\u0cc1 \u0ca8\u0cbf\u0caf\u0cae\u0c97\u0cb3\u0cc1"},ko:{visual_challenge:"\uadf8\ub9bc\uc73c\ub85c \ubcf4\uc548\ubb38\uc790 \ubc1b\uae30", -audio_challenge:"\uc74c\uc131\uc73c\ub85c \ubcf4\uc548\ubb38\uc790 \ubc1b\uae30",refresh_btn:"\ubcf4\uc548\ubb38\uc790 \uc0c8\ub85c \ubc1b\uae30",instructions_visual:"\ud14d\uc2a4\ud2b8 \uc785\ub825:",instructions_audio:"\uc74c\uc131 \ubcf4\uc548\ubb38\uc790 \uc785\ub825:",help_btn:"\ub3c4\uc6c0\ub9d0",play_again:"\uc74c\uc131 \ub2e4\uc2dc \ub4e3\uae30",cant_hear_this:"\uc74c\uc131\uc744 MP3\ub85c \ub2e4\uc6b4\ub85c\ub4dc",incorrect_try_again:"\ud2c0\ub838\uc2b5\ub2c8\ub2e4. \ub2e4\uc2dc \uc2dc\ub3c4\ud574 \uc8fc\uc138\uc694.", -image_alt_text:"reCAPTCHA \ubcf4\uc548\ubb38\uc790 \uc774\ubbf8\uc9c0",privacy_and_terms:"\uac1c\uc778\uc815\ubcf4 \ubcf4\ud638 \ubc0f \uc57d\uad00"},ln:ma,lt:{visual_challenge:"Gauti vaizdin\u012f atpa\u017einimo test\u0105",audio_challenge:"Gauti garso atpa\u017einimo test\u0105",refresh_btn:"Gauti nauj\u0105 atpa\u017einimo test\u0105",instructions_visual:"\u012eveskite tekst\u0105:",instructions_audio:"\u012eveskite tai, k\u0105 girdite:",help_btn:"Pagalba",play_again:"Dar kart\u0105 paleisti gars\u0105", -cant_hear_this:"Atsisi\u0173sti gars\u0105 kaip MP3",incorrect_try_again:"Neteisingai. Bandykite dar kart\u0105.",image_alt_text:"Testo \u201ereCAPTCHA\u201c vaizdas",privacy_and_terms:"Privatumas ir s\u0105lygos"},lv:{visual_challenge:"Sa\u0146emt vizu\u0101lu izaicin\u0101jumu",audio_challenge:"Sa\u0146emt audio izaicin\u0101jumu",refresh_btn:"Sa\u0146emt jaunu izaicin\u0101jumu",instructions_visual:"Ievadiet tekstu:",instructions_audio:"Ierakstiet dzirdamo:",help_btn:"Pal\u012bdz\u012bba",play_again:"V\u0113lreiz atska\u0146ot ska\u0146u", -cant_hear_this:"Lejupiel\u0101d\u0113t ska\u0146u MP3\u00a0form\u0101t\u0101",incorrect_try_again:"Nepareizi. M\u0113\u0123iniet v\u0113lreiz.",image_alt_text:"reCAPTCHA izaicin\u0101juma att\u0113ls",privacy_and_terms:"Konfidencialit\u0101te un noteikumi"},ml:{visual_challenge:"\u0d12\u0d30\u0d41 \u0d26\u0d43\u0d36\u0d4d\u0d2f \u0d1a\u0d32\u0d1e\u0d4d\u0d1a\u0d4d \u0d28\u0d47\u0d1f\u0d41\u0d15",audio_challenge:"\u0d12\u0d30\u0d41 \u0d13\u0d21\u0d3f\u0d2f\u0d4b \u0d1a\u0d32\u0d1e\u0d4d\u0d1a\u0d4d \u0d28\u0d47\u0d1f\u0d41\u0d15", -refresh_btn:"\u0d12\u0d30\u0d41 \u0d2a\u0d41\u0d24\u0d3f\u0d2f \u0d1a\u0d32\u0d1e\u0d4d\u0d1a\u0d4d \u0d28\u0d47\u0d1f\u0d41\u0d15",instructions_visual:"",instructions_audio:"\u0d15\u0d47\u0d7e\u0d15\u0d4d\u0d15\u0d41\u0d28\u0d4d\u0d28\u0d24\u0d4d \u0d1f\u0d48\u0d2a\u0d4d\u0d2a\u0d4d \u0d1a\u0d46\u0d2f\u0d4d\u0d2f\u0d42:",help_btn:"\u0d38\u0d39\u0d3e\u0d2f\u0d02",play_again:"\u0d36\u0d2c\u0d4d\u200c\u0d26\u0d02 \u0d35\u0d40\u0d23\u0d4d\u0d1f\u0d41\u0d02 \u0d2a\u0d4d\u0d32\u0d47 \u0d1a\u0d46\u0d2f\u0d4d\u0d2f\u0d41\u0d15", -cant_hear_this:"\u0d36\u0d2c\u0d4d\u200c\u0d26\u0d02 MP3 \u0d06\u0d2f\u0d3f \u0d21\u0d57\u0d7a\u0d32\u0d4b\u0d21\u0d4d \u0d1a\u0d46\u0d2f\u0d4d\u0d2f\u0d41\u0d15",incorrect_try_again:"\u0d24\u0d46\u0d31\u0d4d\u0d31\u0d3e\u0d23\u0d4d. \u0d35\u0d40\u0d23\u0d4d\u0d1f\u0d41\u0d02 \u0d36\u0d4d\u0d30\u0d2e\u0d3f\u0d15\u0d4d\u0d15\u0d41\u0d15.",image_alt_text:"reCAPTCHA \u0d1a\u0d32\u0d1e\u0d4d\u0d1a\u0d4d \u0d07\u0d2e\u0d47\u0d1c\u0d4d",privacy_and_terms:"\u0d38\u0d4d\u0d35\u0d15\u0d3e\u0d30\u0d4d\u0d2f\u0d24\u0d2f\u0d41\u0d02 \u0d28\u0d3f\u0d2c\u0d28\u0d4d\u0d27\u0d28\u0d15\u0d33\u0d41\u0d02"}, -mr:{visual_challenge:"\u0926\u0943\u0936\u094d\u200d\u092f\u092e\u093e\u0928 \u0906\u0935\u094d\u0939\u093e\u0928 \u092a\u094d\u0930\u093e\u092a\u094d\u0924 \u0915\u0930\u093e",audio_challenge:"\u0911\u0921\u0940\u0913 \u0906\u0935\u094d\u0939\u093e\u0928 \u092a\u094d\u0930\u093e\u092a\u094d\u0924 \u0915\u0930\u093e",refresh_btn:"\u090f\u0915 \u0928\u0935\u0940\u0928 \u0906\u0935\u094d\u0939\u093e\u0928 \u092a\u094d\u0930\u093e\u092a\u094d\u0924 \u0915\u0930\u093e",instructions_visual:"",instructions_audio:"\u0906\u092a\u0932\u094d\u092f\u093e\u0932\u093e \u091c\u0947 \u0910\u0915\u0942 \u092f\u0947\u0908\u0932 \u0924\u0947 \u091f\u093e\u0907\u092a \u0915\u0930\u093e:", -help_btn:"\u092e\u0926\u0924",play_again:"\u0927\u094d\u200d\u0935\u0928\u0940 \u092a\u0941\u0928\u094d\u0939\u093e \u092a\u094d\u200d\u0932\u0947 \u0915\u0930\u093e",cant_hear_this:"MP3 \u0930\u0941\u092a\u093e\u0924 \u0927\u094d\u200d\u0935\u0928\u0940 \u0921\u093e\u0909\u0928\u0932\u094b\u0921 \u0915\u0930\u093e",incorrect_try_again:"\u0905\u092f\u094b\u0917\u094d\u200d\u092f. \u092a\u0941\u0928\u094d\u200d\u0939\u093e \u092a\u094d\u0930\u092f\u0924\u094d\u200d\u0928 \u0915\u0930\u093e.",image_alt_text:"reCAPTCHA \u0906\u0935\u094d\u200d\u0939\u093e\u0928 \u092a\u094d\u0930\u0924\u093f\u092e\u093e", -privacy_and_terms:"\u0917\u094b\u092a\u0928\u0940\u092f\u0924\u093e \u0906\u0923\u093f \u0905\u091f\u0940"},ms:{visual_challenge:"Dapatkan cabaran visual",audio_challenge:"Dapatkan cabaran audio",refresh_btn:"Dapatkan cabaran baru",instructions_visual:"Taipkan teksnya:",instructions_audio:"Taip apa yang didengari:",help_btn:"Bantuan",play_again:"Mainkan bunyi sekali lagi",cant_hear_this:"Muat turun bunyi sebagai MP3",incorrect_try_again:"Tidak betul. Cuba lagi.",image_alt_text:"Imej cabaran reCAPTCHA", -privacy_and_terms:"Privasi & Syarat"},nl:{visual_challenge:"Een visuele uitdaging proberen",audio_challenge:"Een audio-uitdaging proberen",refresh_btn:"Een nieuwe uitdaging proberen",instructions_visual:"Typ de tekst:",instructions_audio:"Typ wat u hoort:",help_btn:"Help",play_again:"Geluid opnieuw afspelen",cant_hear_this:"Geluid downloaden als MP3",incorrect_try_again:"Onjuist. Probeer het opnieuw.",image_alt_text:"reCAPTCHA-uitdagingsafbeelding",privacy_and_terms:"Privacy en voorwaarden"},no:{visual_challenge:"F\u00e5 en bildeutfordring", -audio_challenge:"F\u00e5 en lydutfordring",refresh_btn:"F\u00e5 en ny utfordring",instructions_visual:"Skriv inn teksten:",instructions_audio:"Skriv inn det du h\u00f8rer:",help_btn:"Hjelp",play_again:"Spill av lyd p\u00e5 nytt",cant_hear_this:"Last ned lyd som MP3",incorrect_try_again:"Feil. Pr\u00f8v p\u00e5 nytt.",image_alt_text:"reCAPTCHA-utfordringsbilde",privacy_and_terms:"Personvern og vilk\u00e5r"},pl:{visual_challenge:"Poka\u017c podpowied\u017a wizualn\u0105",audio_challenge:"Odtw\u00f3rz podpowied\u017a d\u017awi\u0119kow\u0105", -refresh_btn:"Nowa podpowied\u017a",instructions_visual:"Przepisz tekst:",instructions_audio:"Wpisz us\u0142yszane s\u0142owa:",help_btn:"Pomoc",play_again:"Odtw\u00f3rz d\u017awi\u0119k ponownie",cant_hear_this:"Pobierz d\u017awi\u0119k jako plik MP3",incorrect_try_again:"Nieprawid\u0142owo. Spr\u00f3buj ponownie.",image_alt_text:"Zadanie obrazkowe reCAPTCHA",privacy_and_terms:"Prywatno\u015b\u0107 i warunki"},pt:pa,"pt-BR":pa,"pt-PT":{visual_challenge:"Obter um desafio visual",audio_challenge:"Obter um desafio de \u00e1udio", -refresh_btn:"Obter um novo desafio",instructions_visual:"Introduza o texto:",instructions_audio:"Escreva o que ouvir:",help_btn:"Ajuda",play_again:"Reproduzir som novamente",cant_hear_this:"Transferir som como MP3",incorrect_try_again:"Incorreto. Tente novamente.",image_alt_text:"Imagem de teste reCAPTCHA",privacy_and_terms:"Privacidade e Termos de Utiliza\u00e7\u00e3o"},ro:qa,ru:{visual_challenge:"\u0412\u0438\u0437\u0443\u0430\u043b\u044c\u043d\u0430\u044f \u043f\u0440\u043e\u0432\u0435\u0440\u043a\u0430", -audio_challenge:"\u0417\u0432\u0443\u043a\u043e\u0432\u0430\u044f \u043f\u0440\u043e\u0432\u0435\u0440\u043a\u0430",refresh_btn:"\u041e\u0431\u043d\u043e\u0432\u0438\u0442\u044c",instructions_visual:"\u0412\u0432\u0435\u0434\u0438\u0442\u0435 \u0442\u0435\u043a\u0441\u0442:",instructions_audio:"\u0412\u0432\u0435\u0434\u0438\u0442\u0435 \u0442\u043e, \u0447\u0442\u043e \u0441\u043b\u044b\u0448\u0438\u0442\u0435:",help_btn:"\u0421\u043f\u0440\u0430\u0432\u043a\u0430",play_again:"\u041f\u0440\u043e\u0441\u043b\u0443\u0448\u0430\u0442\u044c \u0435\u0449\u0435 \u0440\u0430\u0437", -cant_hear_this:"\u0417\u0430\u0433\u0440\u0443\u0437\u0438\u0442\u044c MP3-\u0444\u0430\u0439\u043b",incorrect_try_again:"\u041d\u0435\u043f\u0440\u0430\u0432\u0438\u043b\u044c\u043d\u043e. \u041f\u043e\u0432\u0442\u043e\u0440\u0438\u0442\u0435 \u043f\u043e\u043f\u044b\u0442\u043a\u0443.",image_alt_text:"\u041f\u0440\u043e\u0432\u0435\u0440\u043a\u0430 \u043f\u043e \u0441\u043b\u043e\u0432\u0443 reCAPTCHA",privacy_and_terms:"\u041f\u0440\u0430\u0432\u0438\u043b\u0430 \u0438 \u043f\u0440\u0438\u043d\u0446\u0438\u043f\u044b"}, -sk:{visual_challenge:"Zobrazi\u0165 vizu\u00e1lnu podobu",audio_challenge:"Prehra\u0165 zvukov\u00fa podobu",refresh_btn:"Zobrazi\u0165 nov\u00fd v\u00fdraz",instructions_visual:"Zadajte text:",instructions_audio:"Zadajte, \u010do po\u010dujete:",help_btn:"Pomocn\u00edk",play_again:"Znova prehra\u0165 zvuk",cant_hear_this:"Prevzia\u0165 zvuk v podobe s\u00faboru MP3",incorrect_try_again:"Nespr\u00e1vne. Sk\u00faste to znova.",image_alt_text:"Obr\u00e1zok zadania reCAPTCHA",privacy_and_terms:"Ochrana osobn\u00fdch \u00fadajov a Zmluvn\u00e9 podmienky"}, -sl:{visual_challenge:"Vizualni preskus",audio_challenge:"Zvo\u010dni preskus",refresh_btn:"Nov preskus",instructions_visual:"Vnesite besedilo:",instructions_audio:"Natipkajte, kaj sli\u0161ite:",help_btn:"Pomo\u010d",play_again:"Znova predvajaj zvok",cant_hear_this:"Prenesi zvok kot MP3",incorrect_try_again:"Napa\u010dno. Poskusite znova.",image_alt_text:"Slika izziva reCAPTCHA",privacy_and_terms:"Zasebnost in pogoji"},sr:{visual_challenge:"\u041f\u0440\u0438\u043c\u0438\u0442\u0435 \u0432\u0438\u0437\u0443\u0435\u043b\u043d\u0438 \u0443\u043f\u0438\u0442", -audio_challenge:"\u041f\u0440\u0438\u043c\u0438\u0442\u0435 \u0430\u0443\u0434\u0438\u043e \u0443\u043f\u0438\u0442",refresh_btn:"\u041f\u0440\u0438\u043c\u0438\u0442\u0435 \u043d\u043e\u0432\u0438 \u0443\u043f\u0438\u0442",instructions_visual:"\u0423\u043d\u0435\u0441\u0438\u0442\u0435 \u0442\u0435\u043a\u0441\u0442:",instructions_audio:"\u041e\u0442\u043a\u0443\u0446\u0430\u0458\u0442\u0435 \u043e\u043d\u043e \u0448\u0442\u043e \u0447\u0443\u0458\u0435\u0442\u0435:",help_btn:"\u041f\u043e\u043c\u043e\u045b", -play_again:"\u041f\u043e\u043d\u043e\u0432\u043e \u043f\u0443\u0441\u0442\u0438 \u0437\u0432\u0443\u043a",cant_hear_this:"\u041f\u0440\u0435\u0443\u0437\u043c\u0438 \u0437\u0432\u0443\u043a \u043a\u0430\u043e MP3 \u0441\u043d\u0438\u043c\u0430\u043a",incorrect_try_again:"\u041d\u0435\u0442\u0430\u0447\u043d\u043e. \u041f\u043e\u043a\u0443\u0448\u0430\u0458\u0442\u0435 \u043f\u043e\u043d\u043e\u0432\u043e.",image_alt_text:"\u0421\u043b\u0438\u043a\u0430 reCAPTCHA \u043f\u0440\u043e\u0432\u0435\u0440\u0435", -privacy_and_terms:"\u041f\u0440\u0438\u0432\u0430\u0442\u043d\u043e\u0441\u0442 \u0438 \u0443\u0441\u043b\u043e\u0432\u0438"},sv:{visual_challenge:"H\u00e4mta captcha i bildformat",audio_challenge:"H\u00e4mta captcha i ljudformat",refresh_btn:"H\u00e4mta ny captcha",instructions_visual:"Skriv texten:",instructions_audio:"Skriv det du h\u00f6r:",help_btn:"Hj\u00e4lp",play_again:"Spela upp ljudet igen",cant_hear_this:"H\u00e4mta ljud som MP3",incorrect_try_again:"Fel. F\u00f6rs\u00f6k igen.",image_alt_text:"reCAPTCHA-bild", -privacy_and_terms:"Sekretess och villkor"},sw:{visual_challenge:"Pata herufi za kusoma",audio_challenge:"Pata herufi za kusikiliza",refresh_btn:"Pata herufi mpya",instructions_visual:"",instructions_audio:"Charaza unachosikia:",help_btn:"Usaidizi",play_again:"Cheza sauti tena",cant_hear_this:"Pakua sauti kama MP3",incorrect_try_again:"Sio sahihi. Jaribu tena.",image_alt_text:"picha ya changamoto ya reCAPTCHA",privacy_and_terms:"Faragha & Masharti"},ta:{visual_challenge:"\u0baa\u0bbe\u0bb0\u0bcd\u0bb5\u0bc8 \u0b9a\u0bc7\u0bb2\u0b9e\u0bcd\u0b9a\u0bc8\u0baa\u0bcd \u0baa\u0bc6\u0bb1\u0bc1\u0b95", -audio_challenge:"\u0b86\u0b9f\u0bbf\u0baf\u0bcb \u0b9a\u0bc7\u0bb2\u0b9e\u0bcd\u0b9a\u0bc8\u0baa\u0bcd \u0baa\u0bc6\u0bb1\u0bc1\u0b95",refresh_btn:"\u0baa\u0bc1\u0ba4\u0bbf\u0baf \u0b9a\u0bc7\u0bb2\u0b9e\u0bcd\u0b9a\u0bc8\u0baa\u0bcd \u0baa\u0bc6\u0bb1\u0bc1\u0b95",instructions_visual:"",instructions_audio:"\u0b95\u0bc7\u0b9f\u0bcd\u0baa\u0ba4\u0bc8 \u0b9f\u0bc8\u0baa\u0bcd \u0b9a\u0bc6\u0baf\u0bcd\u0b95:",help_btn:"\u0b89\u0ba4\u0bb5\u0bbf",play_again:"\u0b92\u0bb2\u0bbf\u0baf\u0bc8 \u0bae\u0bc0\u0ba3\u0bcd\u0b9f\u0bc1\u0bae\u0bcd \u0b87\u0baf\u0b95\u0bcd\u0b95\u0bc1", -cant_hear_this:"\u0b92\u0bb2\u0bbf\u0baf\u0bc8 MP3 \u0b86\u0b95 \u0baa\u0ba4\u0bbf\u0bb5\u0bbf\u0bb1\u0b95\u0bcd\u0b95\u0bc1\u0b95",incorrect_try_again:"\u0ba4\u0bb5\u0bb1\u0bbe\u0ba9\u0ba4\u0bc1. \u0bae\u0bc0\u0ba3\u0bcd\u0b9f\u0bc1\u0bae\u0bcd \u0bae\u0bc1\u0baf\u0bb2\u0bb5\u0bc1\u0bae\u0bcd.",image_alt_text:"reCAPTCHA \u0b9a\u0bc7\u0bb2\u0b9e\u0bcd\u0b9a\u0bcd \u0baa\u0b9f\u0bae\u0bcd",privacy_and_terms:"\u0ba4\u0ba9\u0bbf\u0baf\u0bc1\u0bb0\u0bbf\u0bae\u0bc8 & \u0bb5\u0bbf\u0ba4\u0bbf\u0bae\u0bc1\u0bb1\u0bc8\u0b95\u0bb3\u0bcd"}, -te:{visual_challenge:"\u0c12\u0c15 \u0c26\u0c43\u0c36\u0c4d\u0c2f\u0c2e\u0c3e\u0c28 \u0c38\u0c35\u0c3e\u0c32\u0c41\u0c28\u0c41 \u0c38\u0c4d\u0c35\u0c40\u0c15\u0c30\u0c3f\u0c02\u0c1a\u0c02\u0c21\u0c3f",audio_challenge:"\u0c12\u0c15 \u0c06\u0c21\u0c3f\u0c2f\u0c4b \u0c38\u0c35\u0c3e\u0c32\u0c41\u0c28\u0c41 \u0c38\u0c4d\u0c35\u0c40\u0c15\u0c30\u0c3f\u0c02\u0c1a\u0c02\u0c21\u0c3f",refresh_btn:"\u0c15\u0c4d\u0c30\u0c4a\u0c24\u0c4d\u0c24 \u0c38\u0c35\u0c3e\u0c32\u0c41\u0c28\u0c41 \u0c38\u0c4d\u0c35\u0c40\u0c15\u0c30\u0c3f\u0c02\u0c1a\u0c02\u0c21\u0c3f", -instructions_visual:"",instructions_audio:"\u0c2e\u0c40\u0c30\u0c41 \u0c35\u0c3f\u0c28\u0c4d\u0c28\u0c26\u0c3f \u0c1f\u0c48\u0c2a\u0c4d \u0c1a\u0c47\u0c2f\u0c02\u0c21\u0c3f:",help_btn:"\u0c38\u0c39\u0c3e\u0c2f\u0c02",play_again:"\u0c27\u0c4d\u0c35\u0c28\u0c3f\u0c28\u0c3f \u0c2e\u0c33\u0c4d\u0c32\u0c40 \u0c2a\u0c4d\u0c32\u0c47 \u0c1a\u0c47\u0c2f\u0c3f",cant_hear_this:"\u0c27\u0c4d\u0c35\u0c28\u0c3f\u0c28\u0c3f MP3 \u0c35\u0c32\u0c46 \u0c21\u0c4c\u0c28\u0c4d\u200c\u0c32\u0c4b\u0c21\u0c4d \u0c1a\u0c47\u0c2f\u0c3f", -incorrect_try_again:"\u0c24\u0c2a\u0c4d\u0c2a\u0c41. \u0c2e\u0c33\u0c4d\u0c32\u0c40 \u0c2a\u0c4d\u0c30\u0c2f\u0c24\u0c4d\u0c28\u0c3f\u0c02\u0c1a\u0c02\u0c21\u0c3f.",image_alt_text:"reCAPTCHA \u0c38\u0c35\u0c3e\u0c32\u0c41 \u0c1a\u0c3f\u0c24\u0c4d\u0c30\u0c02",privacy_and_terms:"\u0c17\u0c4b\u0c2a\u0c4d\u0c2f\u0c24 & \u0c28\u0c3f\u0c2c\u0c02\u0c27\u0c28\u0c32\u0c41"},th:{visual_challenge:"\u0e23\u0e31\u0e1a\u0e04\u0e27\u0e32\u0e21\u0e17\u0e49\u0e32\u0e17\u0e32\u0e22\u0e14\u0e49\u0e32\u0e19\u0e20\u0e32\u0e1e", -audio_challenge:"\u0e23\u0e31\u0e1a\u0e04\u0e27\u0e32\u0e21\u0e17\u0e49\u0e32\u0e17\u0e32\u0e22\u0e14\u0e49\u0e32\u0e19\u0e40\u0e2a\u0e35\u0e22\u0e07",refresh_btn:"\u0e23\u0e31\u0e1a\u0e04\u0e27\u0e32\u0e21\u0e17\u0e49\u0e32\u0e17\u0e32\u0e22\u0e43\u0e2b\u0e21\u0e48",instructions_visual:"\u0e1e\u0e34\u0e21\u0e1e\u0e4c\u0e02\u0e49\u0e2d\u0e04\u0e27\u0e32\u0e21\u0e19\u0e35\u0e49:",instructions_audio:"\u0e1e\u0e34\u0e21\u0e1e\u0e4c\u0e2a\u0e34\u0e48\u0e07\u0e17\u0e35\u0e48\u0e04\u0e38\u0e13\u0e44\u0e14\u0e49\u0e22\u0e34\u0e19:", -help_btn:"\u0e04\u0e27\u0e32\u0e21\u0e0a\u0e48\u0e27\u0e22\u0e40\u0e2b\u0e25\u0e37\u0e2d",play_again:"\u0e40\u0e25\u0e48\u0e19\u0e40\u0e2a\u0e35\u0e22\u0e07\u0e2d\u0e35\u0e01\u0e04\u0e23\u0e31\u0e49\u0e07",cant_hear_this:"\u0e14\u0e32\u0e27\u0e42\u0e2b\u0e25\u0e14\u0e40\u0e2a\u0e35\u0e22\u0e07\u0e40\u0e1b\u0e47\u0e19 MP3",incorrect_try_again:"\u0e44\u0e21\u0e48\u0e16\u0e39\u0e01\u0e15\u0e49\u0e2d\u0e07 \u0e25\u0e2d\u0e07\u0e2d\u0e35\u0e01\u0e04\u0e23\u0e31\u0e49\u0e07",image_alt_text:"\u0e23\u0e2b\u0e31\u0e2a\u0e20\u0e32\u0e1e reCAPTCHA", -privacy_and_terms:"\u0e19\u0e42\u0e22\u0e1a\u0e32\u0e22\u0e2a\u0e48\u0e27\u0e19\u0e1a\u0e38\u0e04\u0e04\u0e25\u0e41\u0e25\u0e30\u0e02\u0e49\u0e2d\u0e01\u0e33\u0e2b\u0e19\u0e14"},tr:{visual_challenge:"G\u00f6rsel sorgu al",audio_challenge:"Sesli sorgu al",refresh_btn:"Yeniden y\u00fckle",instructions_visual:"Metni yaz\u0131n:",instructions_audio:"Duydu\u011funuzu yaz\u0131n:",help_btn:"Yard\u0131m",play_again:"Sesi tekrar \u00e7al",cant_hear_this:"Sesi MP3 olarak indir",incorrect_try_again:"Yanl\u0131\u015f. Tekrar deneyin.", -image_alt_text:"reCAPTCHA sorusu resmi",privacy_and_terms:"Gizlilik ve \u015eartlar"},uk:{visual_challenge:"\u041e\u0442\u0440\u0438\u043c\u0430\u0442\u0438 \u0432\u0456\u0437\u0443\u0430\u043b\u044c\u043d\u0438\u0439 \u0442\u0435\u043a\u0441\u0442",audio_challenge:"\u041e\u0442\u0440\u0438\u043c\u0430\u0442\u0438 \u0430\u0443\u0434\u0456\u043e\u0437\u0430\u043f\u0438\u0441",refresh_btn:"\u041e\u043d\u043e\u0432\u0438\u0442\u0438 \u0442\u0435\u043a\u0441\u0442",instructions_visual:"\u0412\u0432\u0435\u0434\u0456\u0442\u044c \u0442\u0435\u043a\u0441\u0442:", -instructions_audio:"\u0412\u0432\u0435\u0434\u0456\u0442\u044c \u043f\u043e\u0447\u0443\u0442\u0435:",help_btn:"\u0414\u043e\u0432\u0456\u0434\u043a\u0430",play_again:"\u0412\u0456\u0434\u0442\u0432\u043e\u0440\u0438\u0442\u0438 \u0437\u0430\u043f\u0438\u0441 \u0449\u0435 \u0440\u0430\u0437",cant_hear_this:"\u0417\u0430\u0432\u0430\u043d\u0442\u0430\u0436\u0438\u0442\u0438 \u0437\u0430\u043f\u0438\u0441 \u044f\u043a MP3",incorrect_try_again:"\u041d\u0435\u043f\u0440\u0430\u0432\u0438\u043b\u044c\u043d\u043e. \u0421\u043f\u0440\u043e\u0431\u0443\u0439\u0442\u0435 \u0449\u0435 \u0440\u0430\u0437.", -image_alt_text:"\u0417\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u043d\u044f \u0437\u0430\u0432\u0434\u0430\u043d\u043d\u044f reCAPTCHA",privacy_and_terms:"\u041a\u043e\u043d\u0444\u0456\u0434\u0435\u043d\u0446\u0456\u0439\u043d\u0456\u0441\u0442\u044c \u0456 \u0443\u043c\u043e\u0432\u0438"},ur:{visual_challenge:"\u0627\u06cc\u06a9 \u0645\u0631\u0626\u06cc \u0686\u06cc\u0644\u0646\u062c \u062d\u0627\u0635\u0644 \u06a9\u0631\u06cc\u06ba",audio_challenge:"\u0627\u06cc\u06a9 \u0622\u0688\u06cc\u0648 \u0686\u06cc\u0644\u0646\u062c \u062d\u0627\u0635\u0644 \u06a9\u0631\u06cc\u06ba", -refresh_btn:"\u0627\u06cc\u06a9 \u0646\u06cc\u0627 \u0686\u06cc\u0644\u0646\u062c \u062d\u0627\u0635\u0644 \u06a9\u0631\u06cc\u06ba",instructions_visual:"",instructions_audio:"\u062c\u0648 \u0633\u0646\u0627\u0626\u06cc \u062f\u06cc\u062a\u0627 \u06c1\u06d2 \u0648\u06c1 \u0679\u0627\u0626\u067e \u06a9\u0631\u06cc\u06ba:",help_btn:"\u0645\u062f\u062f",play_again:"\u0622\u0648\u0627\u0632 \u062f\u0648\u0628\u0627\u0631\u06c1 \u0686\u0644\u0627\u0626\u06cc\u06ba",cant_hear_this:"\u0622\u0648\u0627\u0632 \u06a9\u0648 MP3 \u06a9\u06d2 \u0628\u0637\u0648\u0631 \u0688\u0627\u0624\u0646 \u0644\u0648\u0688 \u06a9\u0631\u06cc\u06ba", -incorrect_try_again:"\u063a\u0644\u0637\u06d4 \u062f\u0648\u0628\u0627\u0631\u06c1 \u06a9\u0648\u0634\u0634 \u06a9\u0631\u06cc\u06ba\u06d4",image_alt_text:"reCAPTCHA \u0686\u06cc\u0644\u0646\u062c \u0648\u0627\u0644\u06cc \u0634\u0628\u06cc\u06c1",privacy_and_terms:"\u0631\u0627\u0632\u062f\u0627\u0631\u06cc \u0648 \u0634\u0631\u0627\u0626\u0637"},vi:{visual_challenge:"Nh\u1eadn th\u1eed th\u00e1ch h\u00ecnh \u1ea3nh",audio_challenge:"Nh\u1eadn th\u1eed th\u00e1ch \u00e2m thanh",refresh_btn:"Nh\u1eadn th\u1eed th\u00e1ch m\u1edbi", -instructions_visual:"Nh\u1eadp v\u0103n b\u1ea3n:",instructions_audio:"Nh\u1eadp n\u1ed9i dung b\u1ea1n nghe th\u1ea5y:",help_btn:"Tr\u1ee3 gi\u00fap",play_again:"Ph\u00e1t l\u1ea1i \u00e2m thanh",cant_hear_this:"T\u1ea3i \u00e2m thanh xu\u1ed1ng d\u01b0\u1edbi d\u1ea1ng MP3",incorrect_try_again:"Kh\u00f4ng ch\u00ednh x\u00e1c. H\u00e3y th\u1eed l\u1ea1i.",image_alt_text:"H\u00ecnh x\u00e1c th\u1ef1c reCAPTCHA",privacy_and_terms:"B\u1ea3o m\u1eadt v\u00e0 \u0111i\u1ec1u kho\u1ea3n"},"zh-CN":ra,"zh-HK":{visual_challenge:"\u56de\u7b54\u5716\u50cf\u9a57\u8b49\u554f\u984c", -audio_challenge:"\u53d6\u5f97\u8a9e\u97f3\u9a57\u8b49\u554f\u984c",refresh_btn:"\u63db\u4e00\u500b\u9a57\u8b49\u554f\u984c",instructions_visual:"\u8f38\u5165\u6587\u5b57\uff1a",instructions_audio:"\u9375\u5165\u60a8\u6240\u807d\u5230\u7684\uff1a",help_btn:"\u8aaa\u660e",play_again:"\u518d\u6b21\u64ad\u653e\u8072\u97f3",cant_hear_this:"\u5c07\u8072\u97f3\u4e0b\u8f09\u70ba MP3",incorrect_try_again:"\u4e0d\u6b63\u78ba\uff0c\u518d\u8a66\u4e00\u6b21\u3002",image_alt_text:"reCAPTCHA \u9a57\u8b49\u6587\u5b57\u5716\u7247", -privacy_and_terms:"\u79c1\u96b1\u6b0a\u8207\u689d\u6b3e"},"zh-TW":{visual_challenge:"\u53d6\u5f97\u5716\u7247\u9a57\u8b49\u554f\u984c",audio_challenge:"\u53d6\u5f97\u8a9e\u97f3\u9a57\u8b49\u554f\u984c",refresh_btn:"\u53d6\u5f97\u65b0\u7684\u9a57\u8b49\u554f\u984c",instructions_visual:"\u8acb\u8f38\u5165\u5716\u7247\u4e2d\u7684\u6587\u5b57\uff1a",instructions_audio:"\u8acb\u8f38\u5165\u8a9e\u97f3\u5167\u5bb9\uff1a",help_btn:"\u8aaa\u660e",play_again:"\u518d\u6b21\u64ad\u653e",cant_hear_this:"\u4ee5 MP3 \u683c\u5f0f\u4e0b\u8f09\u8072\u97f3", -incorrect_try_again:"\u9a57\u8b49\u78bc\u6709\u8aa4\uff0c\u8acb\u518d\u8a66\u4e00\u6b21\u3002",image_alt_text:"reCAPTCHA \u9a57\u8b49\u6587\u5b57\u5716\u7247",privacy_and_terms:"\u96b1\u79c1\u6b0a\u8207\u689d\u6b3e"},zu:{visual_challenge:"Thola inselelo ebonakalayo",audio_challenge:"Thola inselelo yokulalelwayo",refresh_btn:"Thola inselelo entsha",instructions_visual:"",instructions_audio:"Bhala okuzwayo:",help_btn:"Usizo",play_again:"Phinda udlale okulalelwayo futhi",cant_hear_this:"Layisha umsindo njenge-MP3", -incorrect_try_again:"Akulungile. Zama futhi.",image_alt_text:"umfanekiso oyinselelo we-reCAPTCHA",privacy_and_terms:"Okwangasese kanye nemigomo"},tl:la,he:oa,"in":na,mo:qa,zh:ra};var x=function(a){if(Error.captureStackTrace)Error.captureStackTrace(this,x);else{var b=Error().stack;b&&(this.stack=b)}a&&(this.message=String(a))};u(x,Error);x.prototype.name="CustomError";var ta;var ua=function(a,b){for(var c=a.split("%s"),d="",e=Array.prototype.slice.call(arguments,1);e.length&&1")&&(a=a.replace(za,">"));-1!=a.indexOf('"')&&(a=a.replace(Aa,"""));-1!=a.indexOf("'")&& -(a=a.replace(Ba,"'"));-1!=a.indexOf("\x00")&&(a=a.replace(Ca,"�"));return a},xa=/&/g,ya=//g,Aa=/"/g,Ba=/'/g,Ca=/\x00/g,wa=/[\x00&<>"']/,Ea=function(a,b){return ab?1:0},Fa=function(a){return String(a).replace(/\-([a-z])/g,function(a,c){return c.toUpperCase()})},Ga=function(a){var b=q(void 0)?"undefined".replace(/([-()\[\]{}+?*.$\^|,:#c?Math.max(0,a.length+c):c;if(q(a))return q(b)&&1==b.length?a.indexOf(b,c):-1;for(;c=arguments.length?z.slice.call(a,b):z.slice.call(a,b,c)};var Ta=function(a,b){for(var c in a)b.call(void 0,a[c],c,a)},Ua=function(a){var b=[],c=0,d;for(d in a)b[c++]=d;return b},Va=function(a){for(var b in a)return!1;return!0},Xa=function(){var a=Wa()?k.google_ad:null,b={},c;for(c in a)b[c]=a[c];return b},Ya="constructor hasOwnProperty isPrototypeOf propertyIsEnumerable toLocaleString toString valueOf".split(" "),Za=function(a,b){for(var c,d,e=1;eparseFloat(a))?String(b):a}(),fb={},F=function(a){var b; -if(!(b=fb[a])){b=0;for(var c=va(String(eb)).split("."),d=va(String(a)).split("."),e=Math.max(c.length,d.length),g=0;0==b&&ga.length)return String.fromCharCode.apply(null,a);for(var b="",c=0;cb;b++)kb[b]="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=".charAt(b),lb[kb[b]]=b,62<=b&&(lb["ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.".charAt(b)]=b)}for(var b=lb,c=[],d=0;d> -4);64!=f&&(c.push(g<<4&240|f>>2),64!=m&&c.push(f<<6&192|m))}return c};var H=function(){this.disposed_=this.disposed_;this.onDisposeCallbacks_=this.onDisposeCallbacks_};H.prototype.disposed_=!1;H.prototype.dispose=function(){this.disposed_||(this.disposed_=!0,this.disposeInternal())};var nb=function(a,b){a.onDisposeCallbacks_||(a.onDisposeCallbacks_=[]);a.onDisposeCallbacks_.push(l(void 0)?s(b,void 0):b)};H.prototype.disposeInternal=function(){if(this.onDisposeCallbacks_)for(;this.onDisposeCallbacks_.length;)this.onDisposeCallbacks_.shift()()}; -var ob=function(a){a&&"function"==typeof a.dispose&&a.dispose()};var pb=!C||C&&9<=hb;!D&&!C||C&&C&&9<=hb||D&&F("1.9.1");C&&F("9");var sb=function(a){return a?new qb(rb(a)):ta||(ta=new qb)},tb=function(a,b){return q(b)?a.getElementById(b):b},vb=function(a,b){Ta(b,function(b,d){"style"==d?a.style.cssText=b:"class"==d?a.className=b:"for"==d?a.htmlFor=b:d in ub?a.setAttribute(ub[d],b):0==d.lastIndexOf("aria-",0)||0==d.lastIndexOf("data-",0)?a.setAttribute(d,b):a[d]=b})},ub={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",frameborder:"frameBorder",height:"height",maxlength:"maxLength",role:"role",rowspan:"rowSpan", -type:"type",usemap:"useMap",valign:"vAlign",width:"width"},xb=function(a,b,c){return wb(document,arguments)},wb=function(a,b){var c=b[0],d=b[1];if(!pb&&d&&(d.name||d.type)){c=["<",c];d.name&&c.push(' name="',Da(d.name),'"');if(d.type){c.push(' type="',Da(d.type),'"');var e={};Za(e,d);delete e.type;d=e}c.push(">");c=c.join("")}c=a.createElement(c);d&&(q(d)?c.className=d:p(d)?c.className=d.join(" "):vb(c,d));2=b.branches_&&b.cancel())}this.onCancelFunction_?this.onCancelFunction_.call(this.defaultScope_,this):this.silentlyCanceled_=!0;this.fired_||Wb(this,new Xb)}};M.prototype.continue_=function(a,b){this.blocked_=!1;Yb(this,a,b)}; -var Yb=function(a,b,c){a.fired_=!0;a.result_=c;a.hadError_=!b;Zb(a)},ac=function(a){if(a.fired_){if(!a.silentlyCanceled_)throw new $b;a.silentlyCanceled_=!1}};M.prototype.callback=function(a){ac(this);bc(a);Yb(this,!0,a)};var Wb=function(a,b){ac(a);bc(b);Yb(a,!1,b)},bc=function(a){y(!(a instanceof M),"An execution sequence may not be initiated with a blocking Deferred.")},cc=function(a,b,c,d){y(!a.blocking_,"Blocking Deferreds can not be re-used");a.sequence_.push([b,c,d]);a.fired_&&Zb(a)}; -M.prototype.then=function(a,b,c){var d,e,g=new L(function(a,b){d=a;e=b});cc(this,d,function(a){a instanceof Xb?g.cancel():e(a)});return g.then(a,b,c)};Mb(M); -var dc=function(a){return Pa(a.sequence_,function(a){return r(a[1])})},Zb=function(a){if(a.unhandledErrorId_&&a.fired_&&dc(a)){var b=a.unhandledErrorId_,c=ec[b];c&&(k.clearTimeout(c.id_),delete ec[b]);a.unhandledErrorId_=0}a.parent_&&(a.parent_.branches_--,delete a.parent_);for(var b=a.result_,d=c=!1;a.sequence_.length&&!a.blocked_;){var e=a.sequence_.shift(),g=e[0],f=e[1],e=e[2];if(g=a.hadError_?f:g)try{var m=g.call(e||a.defaultScope_,b);l(m)&&(a.hadError_=a.hadError_&&(m==b||m instanceof Error), -a.result_=b=m);Nb(b)&&(d=!0,a.blocked_=!0)}catch($){b=$,a.hadError_=!0,dc(a)||(c=!0)}}a.result_=b;d&&(m=s(a.continue_,a,!0),d=s(a.continue_,a,!1),b instanceof M?(cc(b,m,d),b.blocking_=!0):b.then(m,d));c&&(b=new fc(b),ec[b.id_]=b,a.unhandledErrorId_=b.id_)},$b=function(){x.call(this)};u($b,x);$b.prototype.message="Deferred has already fired";$b.prototype.name="AlreadyCalledError";var Xb=function(){x.call(this)};u(Xb,x);Xb.prototype.message="Deferred was canceled";Xb.prototype.name="CanceledError"; -var fc=function(a){this.id_=k.setTimeout(s(this.throwError,this),0);this.error_=a};fc.prototype.throwError=function(){y(ec[this.id_],"Cannot throw an error that is not scheduled.");delete ec[this.id_];throw this.error_;};var ec={};var kc=function(a){var b={},c=b.document||document,d=document.createElement("SCRIPT"),e={script_:d,timeout_:void 0},g=new M(gc,e),f=null,m=null!=b.timeout?b.timeout:5E3;0=a.keyCode)a.keyCode=-1}catch(b){}};O.prototype.disposeInternal=function(){};var oc="closure_listenable_"+(1E6*Math.random()|0),pc=0;var qc=function(a,b,c,d,e){this.listener=a;this.proxy=null;this.src=b;this.type=c;this.capture=!!d;this.handler=e;this.key=++pc;this.removed=this.callOnce=!1},rc=function(a){a.removed=!0;a.listener=null;a.proxy=null;a.src=null;a.handler=null};var P=function(a){this.src=a;this.listeners={};this.typeCount_=0};P.prototype.add=function(a,b,c,d,e){var g=a.toString();a=this.listeners[g];a||(a=this.listeners[g]=[],this.typeCount_++);var f=sc(a,b,d,e);-1e.keyCode||void 0!=e.returnValue)){t:{var g=!1;if(0==e.keyCode)try{e.keyCode=-1;break t}catch(f){g=!0}if(g||void 0==e.returnValue)e.returnValue=!0}e=[];for(g=c.currentTarget;g;g=g.parentNode)e.push(g);for(var g=a.type,m=e.length-1;!c.propagationStopped_&&0<=m;m--)c.currentTarget=e[m],d&=Gc(e[m],g,!0,c);for(m=0;!c.propagationStopped_&&m>>0),yc=function(a){y(a,"Listener can not be null.");if(r(a))return a;y(a.handleEvent,"An object listener must have handleEvent method.");a[Hc]||(a[Hc]=function(b){return a.handleEvent(b)});return a[Hc]};var Q=function(a){H.call(this);this.handler_=a;this.keys_={}};u(Q,H);var Ic=[];h=Q.prototype;h.listen=function(a,b,c,d){p(b)||(b&&(Ic[0]=b.toString()),b=Ic);for(var e=0;e++d,"infinite loop")}c=this.actualEventTarget_;d=a.type||a;if(q(a))a=new N(a,c);else if(a instanceof N)a.target=a.target||c;else{var e=a;a=new N(d,c);Za(a,e)}var e=!0,g;if(b)for(var f=b.length-1;!a.propagationStopped_&&0<=f;f--)g=a.currentTarget=b[f],e=Kc(g,d,!0,a)&&e;a.propagationStopped_||(g=a.currentTarget=c,e=Kc(g,d,!0,a)&&e,a.propagationStopped_||(e=Kc(g,d,!1,a)&& -e));if(b)for(f=0;!a.propagationStopped_&&f
AdChoices
AdChoices
'))}; -var Rc=function(a){var b=Sc(a,"visibility");a=Sc(a,"display");return"hidden"!=b&&"none"!=a},Sc=function(a,b){var c;t:{c=rb(a);if(c.defaultView&&c.defaultView.getComputedStyle&&(c=c.defaultView.getComputedStyle(a,null))){c=c[b]||c.getPropertyValue(b)||"";break t}c=""}if(!(c=c||(a.currentStyle?a.currentStyle[b]:null))&&(c=a.style[Fa(b)],"undefined"===typeof c)){c=a.style;var d;t:if(d=Fa(b),void 0===a.style[d]){var e=(E?"Webkit":D?"Moz":C?"ms":cb?"O":null)+Ga(d);if(void 0!==a.style[e]){d=e;break t}}c= -c[d]||""}return c};V.prototype.disposeInternal=function(){delete this.captchaImage_;delete this.adImage_;V.superClass_.disposeInternal.call(this)};var Tc=function(a,b,c){H.call(this);this.listener_=a;this.interval_=b||0;this.handler_=c;this.callback_=s(this.doAction_,this)};u(Tc,H);h=Tc.prototype;h.id_=0;h.disposeInternal=function(){Tc.superClass_.disposeInternal.call(this);this.stop();delete this.listener_;delete this.handler_}; -h.start=function(a){this.stop();var b=this.callback_;a=l(a)?a:this.interval_;if(!r(b))if(b&&"function"==typeof b.handleEvent)b=s(b.handleEvent,b);else throw Error("Invalid listener argument");this.id_=2147483647c?-1:ha()+c;this.delay_.start(0>c?this.interval_:Math.min(this.interval_,c))};h.stop=function(){this.delay_.stop()}; -h.isActive=function(){return this.delay_.isActive()};h.onSuccess=function(){};h.onFailure=function(){};h.onTick_=function(){if(this.listener_.call(this.handler_))this.onSuccess();else if(0>this.runUntil_)this.delay_.start(this.interval_);else{var a=this.runUntil_-ha();if(0>=a)this.onFailure();else this.delay_.start(Math.min(this.interval_,a))}};$a("area base br col command embed hr img input keygen link meta param source track wbr".split(" "));$a("action","cite","data","formaction","href","manifest","poster","src");$a("link","script","style");var Vc={sanitizedContentKindHtml:!0},Wc={sanitizedContentKindText:!0},Xc=function(){throw Error("Do not instantiate directly");};Xc.prototype.contentDir=null;Xc.prototype.toString=function(){return this.content};var bd=function(a){var b=Yc;y(b,"Soy template may not be null.");var c=sb().createElement("DIV");a=Zc(b(a||$c,void 0,void 0));b=a.match(ad);y(!b,"This template starts with a %s, which cannot be a child of a
, as required by soy internals. Consider using goog.soy.renderElement instead.\nTemplate output: %s",b&&b[0],a);c.innerHTML=a;return 1==c.childNodes.length&&(a=c.firstChild,1==a.nodeType)?a:c},Zc=function(a){if(!da(a))return String(a);if(a instanceof Xc){if(a.contentKind===Vc)return Ka(a.content); -if(a.contentKind===Wc)return Da(a.content)}Ja("Soy template output is unsafe for use as HTML: "+a);return"zSoyz"},ad=/^<(body|caption|col|colgroup|head|html|tr|td|tbody|thead|tfoot)>/i,$c={};C&&F(8);var cd=function(){Xc.call(this)};u(cd,Xc);cd.prototype.contentKind=Vc;var dd=function(a){function b(a){this.content=a}b.prototype=a.prototype;return function(a,d){var e=new b(String(a));void 0!==d&&(e.contentDir=d);return e}}(cd);(function(a){function b(a){this.content=a}b.prototype=a.prototype;return function(a,d){var e=String(a);if(!e)return"";e=new b(e);void 0!==d&&(e.contentDir=d);return e}})(cd); -var ed={"\x00":"\\x00","\b":"\\x08","\t":"\\t","\n":"\\n","\x0B":"\\x0b","\f":"\\f","\r":"\\r",'"':"\\x22",$:"\\x24","&":"\\x26","'":"\\x27","(":"\\x28",")":"\\x29","*":"\\x2a","+":"\\x2b",",":"\\x2c","-":"\\x2d",".":"\\x2e","/":"\\/",":":"\\x3a","<":"\\x3c","=":"\\x3d",">":"\\x3e","?":"\\x3f","[":"\\x5b","\\":"\\\\","]":"\\x5d","^":"\\x5e","{":"\\x7b","|":"\\x7c","}":"\\x7d","\u0085":"\\x85","\u2028":"\\u2028","\u2029":"\\u2029"},fd=function(a){return ed[a]},gd=/[\x00\x08-\x0d\x22\x26\x27\/\x3c-\x3e\\\x85\u2028\u2029]/g;var Yc=function(a){return dd('