From 3d605853c8e649ab4b3f91fb0a32cc77ef05d71f Mon Sep 17 00:00:00 2001 From: Jeroen Date: Sun, 24 Jun 2018 22:38:43 +0200 Subject: [PATCH 001/173] send SNI for federation requests --- synapse/app/client_reader.py | 2 ++ synapse/app/event_creator.py | 2 ++ synapse/app/federation_reader.py | 2 ++ synapse/app/federation_sender.py | 2 ++ synapse/app/frontend_proxy.py | 2 ++ synapse/app/homeserver.py | 2 ++ synapse/app/media_repository.py | 2 ++ synapse/app/user_dir.py | 2 ++ synapse/config/tls.py | 9 +++++++ synapse/crypto/context_factory.py | 34 +++++++++++++++++++++++++- synapse/crypto/keyclient.py | 4 +-- synapse/crypto/keyring.py | 4 +-- synapse/http/endpoint.py | 11 ++++----- synapse/http/matrixfederationclient.py | 4 +-- tests/utils.py | 2 ++ 15 files changed, 71 insertions(+), 13 deletions(-) diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index 654ddb8414d5..1c885e321cdb 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -153,11 +153,13 @@ def start(config_options): database_engine = create_engine(config.database_config) tls_server_context_factory = context_factory.ServerContextFactory(config) + tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) ss = ClientReaderServer( config.server_name, db_config=config.database_config, tls_server_context_factory=tls_server_context_factory, + tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py index 441467093a4b..ccbdd8b2df75 100644 --- a/synapse/app/event_creator.py +++ b/synapse/app/event_creator.py @@ -171,11 +171,13 @@ def start(config_options): database_engine = create_engine(config.database_config) tls_server_context_factory = context_factory.ServerContextFactory(config) + tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) ss = EventCreatorServer( config.server_name, db_config=config.database_config, tls_server_context_factory=tls_server_context_factory, + tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index b2415cc6712f..7fd20322f4ec 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -142,11 +142,13 @@ def start(config_options): database_engine = create_engine(config.database_config) tls_server_context_factory = context_factory.ServerContextFactory(config) + tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) ss = FederationReaderServer( config.server_name, db_config=config.database_config, tls_server_context_factory=tls_server_context_factory, + tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py index 13d2b7005310..67dffaccbb03 100644 --- a/synapse/app/federation_sender.py +++ b/synapse/app/federation_sender.py @@ -185,11 +185,13 @@ def start(config_options): config.send_federation = True tls_server_context_factory = context_factory.ServerContextFactory(config) + tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) ps = FederationSenderServer( config.server_name, db_config=config.database_config, tls_server_context_factory=tls_server_context_factory, + tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index d2bae4ad0316..d5638087a0e8 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -209,11 +209,13 @@ def start(config_options): database_engine = create_engine(config.database_config) tls_server_context_factory = context_factory.ServerContextFactory(config) + tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) ss = FrontendProxyServer( config.server_name, db_config=config.database_config, tls_server_context_factory=tls_server_context_factory, + tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index ae5fc751d550..d48d68bc7eaa 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -321,6 +321,7 @@ def setup(config_options): events.USE_FROZEN_DICTS = config.use_frozen_dicts tls_server_context_factory = context_factory.ServerContextFactory(config) + tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) database_engine = create_engine(config.database_config) config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection @@ -329,6 +330,7 @@ def setup(config_options): config.server_name, db_config=config.database_config, tls_server_context_factory=tls_server_context_factory, + tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py index 19a682cce30f..cc3f31a4384e 100644 --- a/synapse/app/media_repository.py +++ b/synapse/app/media_repository.py @@ -156,11 +156,13 @@ def start(config_options): database_engine = create_engine(config.database_config) tls_server_context_factory = context_factory.ServerContextFactory(config) + tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) ss = MediaRepositoryServer( config.server_name, db_config=config.database_config, tls_server_context_factory=tls_server_context_factory, + tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py index f5726e3df6eb..51e7917ce248 100644 --- a/synapse/app/user_dir.py +++ b/synapse/app/user_dir.py @@ -213,11 +213,13 @@ def start(config_options): config.update_user_directory = True tls_server_context_factory = context_factory.ServerContextFactory(config) + tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config) ps = UserDirectoryServer( config.server_name, db_config=config.database_config, tls_server_context_factory=tls_server_context_factory, + tls_client_options_factory=tls_client_options_factory, config=config, version_string="Synapse/" + get_version_string(synapse), database_engine=database_engine, diff --git a/synapse/config/tls.py b/synapse/config/tls.py index b66154bc7ced..4e7d1bd93ea9 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -47,6 +47,8 @@ def read_config(self, config): self.tls_fingerprints = config["tls_fingerprints"] + self.tls_ignore_certificate_validation = config.get("tls_ignore_certificate_validation", False) + # Check that our own certificate is included in the list of fingerprints # and include it if it is not. x509_certificate_bytes = crypto.dump_certificate( @@ -73,6 +75,8 @@ def default_config(self, config_dir_path, server_name, **kwargs): tls_private_key_path = base_key_name + ".tls.key" tls_dh_params_path = base_key_name + ".tls.dh" + tls_ignore_certificate_validation = False + return """\ # PEM encoded X509 certificate for TLS. # You can replace the self-signed certificate that synapse @@ -117,6 +121,11 @@ def default_config(self, config_dir_path, server_name, **kwargs): # tls_fingerprints: [] # tls_fingerprints: [{"sha256": ""}] + + # Ignore certificate validation for TLS client connections to other + # homeservers using federation. Don't enable this in a production + # environment, unless you know what you are doing! + tls_ignore_certificate_validation: %(tls_ignore_certificate_validation)s """ % locals() def read_tls_certificate(self, cert_path): diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 0397f73ab4f0..297a5fb045df 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -14,7 +14,8 @@ from twisted.internet import ssl from OpenSSL import SSL, crypto -from twisted.internet._sslverify import _defaultCurveName +from twisted.internet._sslverify import _defaultCurveName, ClientTLSOptions, OpenSSLCertificateOptions, \ + optionsForClientTLS import logging @@ -48,3 +49,34 @@ def configure_context(context, config): def getContext(self): return self._context + + +class ClientTLSOptionsNoCertVerification(ClientTLSOptions): + """Redefinition of ClientTLSOptions to completely ignore certificate + validation. Should be kept in sync with the original class in Twisted. + This version of ClientTLSOptions is only intended for development use.""" + + def __init__(self, *args, **kwargs): + super(ClientTLSOptionsNoCertVerification, self).__init__(*args, **kwargs) + + def do_nothing(*_args, **_kwargs): + pass + + self._ctx.set_info_callback(do_nothing) + + +class ClientTLSOptionsFactory(object): + """Factory for Twisted ClientTLSOptions that are used to make connections + to remote servers for federation.""" + + def __init__(self, config): + self._ignore_certificate_validation = config.tls_ignore_certificate_validation + + def get_options(self, host): + if self._ignore_certificate_validation: + return ClientTLSOptionsNoCertVerification( + unicode(host), + OpenSSLCertificateOptions(verify=False).getContext() + ) + else: + return optionsForClientTLS(unicode(host)) diff --git a/synapse/crypto/keyclient.py b/synapse/crypto/keyclient.py index f1fd488b9057..8e48f8a9cff9 100644 --- a/synapse/crypto/keyclient.py +++ b/synapse/crypto/keyclient.py @@ -28,14 +28,14 @@ @defer.inlineCallbacks -def fetch_server_key(server_name, ssl_context_factory, path=KEY_API_V1): +def fetch_server_key(server_name, tls_client_options_factory, path=KEY_API_V1): """Fetch the keys for a remote server.""" factory = SynapseKeyClientFactory() factory.path = path factory.host = server_name endpoint = matrix_federation_endpoint( - reactor, server_name, ssl_context_factory, timeout=30 + reactor, server_name, tls_client_options_factory, timeout=30 ) for i in range(5): diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 9b17ef0a08f0..9a1bb211fbc3 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -510,7 +510,7 @@ def get_server_verify_key_v2_direct(self, server_name, key_ids): continue (response, tls_certificate) = yield fetch_server_key( - server_name, self.hs.tls_server_context_factory, + server_name, self.tls_client_options_factory, path=(b"/_matrix/key/v2/server/%s" % ( urllib.quote(requested_key_id), )).encode("ascii"), @@ -653,7 +653,7 @@ def get_server_verify_key_v1_direct(self, server_name, key_ids): # Try to fetch the key from the remote server. (response, tls_certificate) = yield fetch_server_key( - server_name, self.hs.tls_server_context_factory + server_name, self.hs.tls_client_options_factory ) # Check the response. diff --git a/synapse/http/endpoint.py b/synapse/http/endpoint.py index 87a482650df1..e783f95719fc 100644 --- a/synapse/http/endpoint.py +++ b/synapse/http/endpoint.py @@ -26,7 +26,6 @@ logger = logging.getLogger(__name__) - SERVER_CACHE = {} # our record of an individual server which can be tried to reach a destination. @@ -38,15 +37,15 @@ ) -def matrix_federation_endpoint(reactor, destination, ssl_context_factory=None, +def matrix_federation_endpoint(reactor, destination, tls_client_options_factory=None, timeout=None): """Construct an endpoint for the given matrix destination. Args: reactor: Twisted reactor. destination (bytes): The name of the server to connect to. - ssl_context_factory (twisted.internet.ssl.ContextFactory): Factory - which generates SSL contexts to use for TLS. + tls_client_options_factory (synapse.crypto.context_factory.ClientTLSOptionsFactory): + Factory which generates TLS options for client connections. timeout (int): connection timeout in seconds """ @@ -59,13 +58,13 @@ def matrix_federation_endpoint(reactor, destination, ssl_context_factory=None, if timeout is not None: endpoint_kw_args.update(timeout=timeout) - if ssl_context_factory is None: + if tls_client_options_factory is None: transport_endpoint = HostnameEndpoint default_port = 8008 else: def transport_endpoint(reactor, host, port, timeout): return wrapClientTLS( - ssl_context_factory, + tls_client_options_factory.get_options(unicode(host)), HostnameEndpoint(reactor, host, port, timeout=timeout)) default_port = 8448 diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 4e0399e7629a..66796a202f4e 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -62,14 +62,14 @@ class MatrixFederationEndpointFactory(object): def __init__(self, hs): - self.tls_server_context_factory = hs.tls_server_context_factory + self.tls_client_options_factory = hs.tls_client_options_factory def endpointForURI(self, uri): destination = uri.netloc return matrix_federation_endpoint( reactor, destination, timeout=10, - ssl_context_factory=self.tls_server_context_factory + tls_client_options_factory = self.tls_client_options_factory ) diff --git a/tests/utils.py b/tests/utils.py index 189fd2711cc7..0b501e0786c3 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -114,6 +114,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None database_engine=db_engine, room_list_handler=object(), tls_server_context_factory=Mock(), + tls_client_options_factory=Mock(), reactor=reactor, **kargs ) @@ -134,6 +135,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None database_engine=db_engine, room_list_handler=object(), tls_server_context_factory=Mock(), + tls_client_options_factory=Mock(), **kargs ) From 07b4f88de9ee534629db29aa4fc398607e7d866e Mon Sep 17 00:00:00 2001 From: Jeroen Date: Mon, 25 Jun 2018 12:31:16 +0200 Subject: [PATCH 002/173] formatting changes for pep8 --- synapse/config/tls.py | 4 +++- synapse/crypto/context_factory.py | 4 ++-- synapse/http/endpoint.py | 3 ++- synapse/http/matrixfederationclient.py | 2 +- 4 files changed, 8 insertions(+), 5 deletions(-) diff --git a/synapse/config/tls.py b/synapse/config/tls.py index 4e7d1bd93ea9..b09dc986ab29 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -47,7 +47,9 @@ def read_config(self, config): self.tls_fingerprints = config["tls_fingerprints"] - self.tls_ignore_certificate_validation = config.get("tls_ignore_certificate_validation", False) + self.tls_ignore_certificate_validation = config.get( + "tls_ignore_certificate_validation", False + ) # Check that our own certificate is included in the list of fingerprints # and include it if it is not. diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 297a5fb045df..415899e62bdd 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -14,8 +14,8 @@ from twisted.internet import ssl from OpenSSL import SSL, crypto -from twisted.internet._sslverify import _defaultCurveName, ClientTLSOptions, OpenSSLCertificateOptions, \ - optionsForClientTLS +from twisted.internet._sslverify import _defaultCurveName, ClientTLSOptions, \ + OpenSSLCertificateOptions, optionsForClientTLS import logging diff --git a/synapse/http/endpoint.py b/synapse/http/endpoint.py index e783f95719fc..abf486208418 100644 --- a/synapse/http/endpoint.py +++ b/synapse/http/endpoint.py @@ -44,7 +44,8 @@ def matrix_federation_endpoint(reactor, destination, tls_client_options_factory= Args: reactor: Twisted reactor. destination (bytes): The name of the server to connect to. - tls_client_options_factory (synapse.crypto.context_factory.ClientTLSOptionsFactory): + tls_client_options_factory + (synapse.crypto.context_factory.ClientTLSOptionsFactory): Factory which generates TLS options for client connections. timeout (int): connection timeout in seconds """ diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 66796a202f4e..b48d05fcd2f0 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -69,7 +69,7 @@ def endpointForURI(self, uri): return matrix_federation_endpoint( reactor, destination, timeout=10, - tls_client_options_factory = self.tls_client_options_factory + tls_client_options_factory=self.tls_client_options_factory ) From b7f34ee348c9f064d98922dfdbe24a3cf18ca00a Mon Sep 17 00:00:00 2001 From: Jeroen Date: Tue, 26 Jun 2018 20:41:05 +0200 Subject: [PATCH 003/173] allow self-signed certificates --- synapse/config/tls.py | 11 ------ synapse/crypto/context_factory.py | 58 +++++++++++++++++++------------ synapse/http/endpoint.py | 2 +- 3 files changed, 36 insertions(+), 35 deletions(-) diff --git a/synapse/config/tls.py b/synapse/config/tls.py index b09dc986ab29..b66154bc7ced 100644 --- a/synapse/config/tls.py +++ b/synapse/config/tls.py @@ -47,10 +47,6 @@ def read_config(self, config): self.tls_fingerprints = config["tls_fingerprints"] - self.tls_ignore_certificate_validation = config.get( - "tls_ignore_certificate_validation", False - ) - # Check that our own certificate is included in the list of fingerprints # and include it if it is not. x509_certificate_bytes = crypto.dump_certificate( @@ -77,8 +73,6 @@ def default_config(self, config_dir_path, server_name, **kwargs): tls_private_key_path = base_key_name + ".tls.key" tls_dh_params_path = base_key_name + ".tls.dh" - tls_ignore_certificate_validation = False - return """\ # PEM encoded X509 certificate for TLS. # You can replace the self-signed certificate that synapse @@ -123,11 +117,6 @@ def default_config(self, config_dir_path, server_name, **kwargs): # tls_fingerprints: [] # tls_fingerprints: [{"sha256": ""}] - - # Ignore certificate validation for TLS client connections to other - # homeservers using federation. Don't enable this in a production - # environment, unless you know what you are doing! - tls_ignore_certificate_validation: %(tls_ignore_certificate_validation)s """ % locals() def read_tls_certificate(self, cert_path): diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 415899e62bdd..e93afbf975c8 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -11,18 +11,19 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import logging -from twisted.internet import ssl +import idna from OpenSSL import SSL, crypto -from twisted.internet._sslverify import _defaultCurveName, ClientTLSOptions, \ - OpenSSLCertificateOptions, optionsForClientTLS - -import logging +from twisted.internet.ssl import ContextFactory, CertificateOptions +from twisted.internet._sslverify import _defaultCurveName, _tolerateErrors +from twisted.internet.interfaces import IOpenSSLClientConnectionCreator +from zope.interface import implementer logger = logging.getLogger(__name__) -class ServerContextFactory(ssl.ContextFactory): +class ServerContextFactory(ContextFactory): """Factory for PyOpenSSL SSL contexts that are used to handle incoming connections and to make connections to remote servers.""" @@ -51,18 +52,31 @@ def getContext(self): return self._context -class ClientTLSOptionsNoCertVerification(ClientTLSOptions): - """Redefinition of ClientTLSOptions to completely ignore certificate - validation. Should be kept in sync with the original class in Twisted. - This version of ClientTLSOptions is only intended for development use.""" +@implementer(IOpenSSLClientConnectionCreator) +class ClientTLSOptions(object): + """ + Client creator for TLS without certificate identity verification. This is a + copy of twisted.internet._sslverify.ClientTLSOptions with the identity + verification left out. For documentation, see the twisted documentation. + """ - def __init__(self, *args, **kwargs): - super(ClientTLSOptionsNoCertVerification, self).__init__(*args, **kwargs) + def __init__(self, hostname, ctx): + self._ctx = ctx + self._hostname = hostname + self._hostnameBytes = idna.encode(hostname) + ctx.set_info_callback( + _tolerateErrors(self._identityVerifyingInfoCallback) + ) - def do_nothing(*_args, **_kwargs): - pass + def clientConnectionForTLS(self, tlsProtocol): + context = self._ctx + connection = SSL.Connection(context, None) + connection.set_app_data(tlsProtocol) + return connection - self._ctx.set_info_callback(do_nothing) + def _identityVerifyingInfoCallback(self, connection, where, ret): + if where & SSL.SSL_CB_HANDSHAKE_START: + connection.set_tlsext_host_name(self._hostnameBytes) class ClientTLSOptionsFactory(object): @@ -70,13 +84,11 @@ class ClientTLSOptionsFactory(object): to remote servers for federation.""" def __init__(self, config): - self._ignore_certificate_validation = config.tls_ignore_certificate_validation + # We don't use config options yet + pass def get_options(self, host): - if self._ignore_certificate_validation: - return ClientTLSOptionsNoCertVerification( - unicode(host), - OpenSSLCertificateOptions(verify=False).getContext() - ) - else: - return optionsForClientTLS(unicode(host)) + return ClientTLSOptions( + unicode(host), + CertificateOptions(verify=False).getContext() + ) diff --git a/synapse/http/endpoint.py b/synapse/http/endpoint.py index abf486208418..39432da4527a 100644 --- a/synapse/http/endpoint.py +++ b/synapse/http/endpoint.py @@ -65,7 +65,7 @@ def matrix_federation_endpoint(reactor, destination, tls_client_options_factory= else: def transport_endpoint(reactor, host, port, timeout): return wrapClientTLS( - tls_client_options_factory.get_options(unicode(host)), + tls_client_options_factory.get_options(host), HostnameEndpoint(reactor, host, port, timeout=timeout)) default_port = 8448 From 26651b0d6a55788ff7ee62eda2051f7a85aa45e7 Mon Sep 17 00:00:00 2001 From: Jeroen Date: Tue, 26 Jun 2018 21:10:45 +0200 Subject: [PATCH 004/173] towncrier changelog --- changelog.d/1491.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/1491.feature diff --git a/changelog.d/1491.feature b/changelog.d/1491.feature new file mode 100644 index 000000000000..77b6d6ca09ea --- /dev/null +++ b/changelog.d/1491.feature @@ -0,0 +1 @@ +Add support for the SNI extension to federation TLS connections \ No newline at end of file From 95341a8f6f9c3b7ebcb3a428de65b18740234f3e Mon Sep 17 00:00:00 2001 From: Jeroen Date: Tue, 26 Jun 2018 21:15:14 +0200 Subject: [PATCH 005/173] take idna implementation from twisted --- synapse/crypto/context_factory.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index e93afbf975c8..57694e18b0a5 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -13,8 +13,8 @@ # limitations under the License. import logging -import idna from OpenSSL import SSL, crypto +from twisted.internet._idna import _idnaBytes from twisted.internet.ssl import ContextFactory, CertificateOptions from twisted.internet._sslverify import _defaultCurveName, _tolerateErrors from twisted.internet.interfaces import IOpenSSLClientConnectionCreator @@ -63,7 +63,7 @@ class ClientTLSOptions(object): def __init__(self, hostname, ctx): self._ctx = ctx self._hostname = hostname - self._hostnameBytes = idna.encode(hostname) + self._hostnameBytes = _idnaBytes(hostname) ctx.set_info_callback( _tolerateErrors(self._identityVerifyingInfoCallback) ) From 8e3f75b39abb846687ac905d8e31d7f41372e5d7 Mon Sep 17 00:00:00 2001 From: Jeroen Date: Fri, 27 Jul 2018 12:17:31 +0200 Subject: [PATCH 006/173] fix accidental removal of hs --- synapse/crypto/keyring.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 762e4b8014e7..30e2742102d6 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -512,7 +512,7 @@ def get_server_verify_key_v2_direct(self, server_name, key_ids): continue (response, tls_certificate) = yield fetch_server_key( - server_name, self.tls_client_options_factory, + server_name, self.hs.tls_client_options_factory, path=(b"/_matrix/key/v2/server/%s" % ( urllib.quote(requested_key_id), )).encode("ascii"), From 2903e65affdcbd77ec794b0f998afb9e24f50215 Mon Sep 17 00:00:00 2001 From: Jeroen Date: Sun, 29 Jul 2018 19:47:08 +0200 Subject: [PATCH 007/173] fix isort --- synapse/crypto/context_factory.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 57694e18b0a5..569377e65363 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -13,12 +13,13 @@ # limitations under the License. import logging +from zope.interface import implementer + from OpenSSL import SSL, crypto from twisted.internet._idna import _idnaBytes -from twisted.internet.ssl import ContextFactory, CertificateOptions from twisted.internet._sslverify import _defaultCurveName, _tolerateErrors from twisted.internet.interfaces import IOpenSSLClientConnectionCreator -from zope.interface import implementer +from twisted.internet.ssl import CertificateOptions, ContextFactory logger = logging.getLogger(__name__) From d81602b75afc2c39314da1f9a21be436134e5361 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 31 Jul 2018 13:52:49 +0100 Subject: [PATCH 008/173] Add helper base class for generating new replication endpoints This will hopefully reduce the boiler plate required to implement new internal HTTP requests. --- synapse/replication/http/_base.py | 208 ++++++++++++++++++++++++++++++ 1 file changed, 208 insertions(+) create mode 100644 synapse/replication/http/_base.py diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py new file mode 100644 index 000000000000..24f00d95fce8 --- /dev/null +++ b/synapse/replication/http/_base.py @@ -0,0 +1,208 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import abc +import logging +import re + +from six.moves import urllib + +from twisted.internet import defer + +from synapse.api.errors import ( + CodeMessageException, + MatrixCodeMessageException, + SynapseError, +) +from synapse.util.caches.response_cache import ResponseCache +from synapse.util.stringutils import random_string + +logger = logging.getLogger(__name__) + + +class ReplicationEndpoint(object): + """Helper base class for defining new replication HTTP endpoints. + + This creates an endpoint under `/_synapse/replication/:NAME/:PATH_ARGS..` + (with an `/:txn_id` prefix for cached requests.), where NAME is a name, + PATH_ARGS are a tuple of parameters to be encoded in the URL. + + For example, if `NAME` is "send_event" and `PATH_ARGS` is `("event_id",)`, + with `CACHE` set to true then this generates an endpoint: + + /_synapse/replication/send_event/:event_id/:txn_id + + For POST requests the payload is serialized to json and sent as the body, + while for GET requests the payload is added as query parameters. See + `_serialize_payload` for details. + + Incoming requests are handled by overriding `_handle_request`. Servers + must call `register` to register the path with the HTTP server. + + Requests can be sent by calling the client returned by `make_client`. + + Attributes: + NAME (str): A name for the endpoint, added to the path as well as used + in logging and metrics. + PATH_ARGS (tuple[str]): A list of parameters to be added to the path. + Adding parameters to the path (rather than payload) can make it + easier to follow along in the log files. + POST (bool): True to use POST request with JSON body, or false to use + GET requests with query params. + CACHE (bool): Whether server should cache the result of the request/ + If true then transparently adds a txn_id to all requests, and + `_handle_request` must return a Deferred. + RETRY_ON_TIMEOUT(bool): Whether or not to retry the request when a 504 + is received. + """ + + __metaclass__ = abc.ABCMeta + + NAME = abc.abstractproperty() + PATH_ARGS = abc.abstractproperty() + + POST = True + CACHE = True + RETRY_ON_TIMEOUT = True + + def __init__(self, hs): + if self.CACHE: + self.response_cache = ResponseCache( + hs, "repl." + self.NAME, + timeout_ms=30 * 60 * 1000, + ) + + @abc.abstractmethod + def _serialize_payload(**kwargs): + """Static method that is called when creating a request. + + Concrete implementations should have explicit parameters (rather than + kwargs) so that an appropriate exception is raised if the client is + called with unexpected parameters. All PATH_ARGS must appear in + argument list. + + Returns: + Deferred[dict]|dict: If POST request then dictionary must be JSON + serialisable, otherwise must be appropriate for adding as query + args. + """ + return {} + + @abc.abstractmethod + def _handle_request(self, request, **kwargs): + """Handle incoming request. + + This is called with the request object and PATH_ARGS. + + Returns: + Deferred[dict]: A JSON serialisable dict to be used as response + body of request. + """ + pass + + @classmethod + def make_client(cls, hs): + """Create a client that makes requests. + + Returns a callable that accepts the same parameters as `_serialize_payload`. + """ + clock = hs.get_clock() + host = hs.config.worker_replication_host + port = hs.config.worker_replication_http_port + + client = hs.get_simple_http_client() + + @defer.inlineCallbacks + def send_request(**kwargs): + data = yield cls._serialize_payload(**kwargs) + + url_args = [urllib.parse.quote(kwargs[name]) for name in cls.PATH_ARGS] + + if cls.CACHE: + txn_id = random_string(10) + url_args.append(txn_id) + + if cls.POST: + request_func = client.post_json_get_json + else: + request_func = client.get_json + + uri = "http://%s:%s/_synapse/replication/%s/%s" % ( + host, port, cls.NAME, "/".join(url_args) + ) + + try: + # We keep retrying the same request for timeouts. This is so that we + # have a good idea that the request has either succeeded or failed on + # the master, and so whether we should clean up or not. + while True: + try: + result = yield request_func(uri, data) + break + except CodeMessageException as e: + if e.code != 504 or not cls.RETRY_ON_TIMEOUT: + raise + + logger.warn("send_federation_events_to_master request timed out") + + # If we timed out we probably don't need to worry about backing + # off too much, but lets just wait a little anyway. + yield clock.sleep(1) + except MatrixCodeMessageException as e: + # We convert to SynapseError as we know that it was a SynapseError + # on the master process that we should send to the client. (And + # importantly, not stack traces everywhere) + raise SynapseError(e.code, e.msg, e.errcode) + + defer.returnValue(result) + + return send_request + + def register(self, http_server): + """Called by the server to register this as a handler to the + appropriate path. + """ + + url_args = list(self.PATH_ARGS) + method = "GET" + handler = self._handle_request + if self.POST: + method = "POST" + + if self.CACHE: + handler = self._cached_handler + url_args.append("txn_id") + + args = "/".join("(?P<%s>[^/]+)" % (arg,) for arg in url_args) + pattern = re.compile("^/_synapse/replication/%s/%s$" % ( + self.NAME, + args + )) + + http_server.register_paths(method, [pattern], handler) + + def _cached_handler(self, request, txn_id, **kwargs): + """Wraps `_handle_request` the responses should be cached. + """ + # We just use the txn_id here, but we probably also want to use the + # other PATH_ARGS as well. + + assert self.CACHE + + return self.response_cache.wrap( + txn_id, + self._handle_request, + request, **kwargs + ) From 729b672823132f413800a10f5fa8cac1f9b99008 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 31 Jul 2018 13:53:54 +0100 Subject: [PATCH 009/173] Use new helper base class for ReplicationSendEventRestServlet --- synapse/handlers/message.py | 11 +-- synapse/replication/http/send_event.py | 115 ++++++++----------------- 2 files changed, 40 insertions(+), 86 deletions(-) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 39d77247786c..bcb093ba3ea7 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -30,7 +30,7 @@ from synapse.crypto.event_signing import add_hashes_and_signatures from synapse.events.utils import serialize_event from synapse.events.validator import EventValidator -from synapse.replication.http.send_event import send_event_to_master +from synapse.replication.http.send_event import ReplicationSendEventRestServlet from synapse.types import RoomAlias, UserID from synapse.util.async import Linearizer from synapse.util.frozenutils import frozendict_json_encoder @@ -171,7 +171,7 @@ def __init__(self, hs): self.notifier = hs.get_notifier() self.config = hs.config - self.http_client = hs.get_simple_http_client() + self.send_event_to_master = ReplicationSendEventRestServlet.make_client(hs) # This is only used to get at ratelimit function, and maybe_kick_guest_users self.base_handler = BaseHandler(hs) @@ -559,12 +559,9 @@ def handle_new_client_event( try: # If we're a worker we need to hit out to the master. if self.config.worker_app: - yield send_event_to_master( - clock=self.hs.get_clock(), + yield self.send_event_to_master( + event_id=event.event_id, store=self.store, - client=self.http_client, - host=self.config.worker_replication_host, - port=self.config.worker_replication_http_port, requester=requester, event=event, context=context, diff --git a/synapse/replication/http/send_event.py b/synapse/replication/http/send_event.py index 5227bc333d19..50810d94cb64 100644 --- a/synapse/replication/http/send_event.py +++ b/synapse/replication/http/send_event.py @@ -14,90 +14,26 @@ # limitations under the License. import logging -import re from twisted.internet import defer -from synapse.api.errors import ( - CodeMessageException, - MatrixCodeMessageException, - SynapseError, -) from synapse.events import FrozenEvent from synapse.events.snapshot import EventContext -from synapse.http.servlet import RestServlet, parse_json_object_from_request +from synapse.http.servlet import parse_json_object_from_request +from synapse.replication.http._base import ReplicationEndpoint from synapse.types import Requester, UserID -from synapse.util.caches.response_cache import ResponseCache from synapse.util.metrics import Measure logger = logging.getLogger(__name__) -@defer.inlineCallbacks -def send_event_to_master(clock, store, client, host, port, requester, event, context, - ratelimit, extra_users): - """Send event to be handled on the master - - Args: - clock (synapse.util.Clock) - store (DataStore) - client (SimpleHttpClient) - host (str): host of master - port (int): port on master listening for HTTP replication - requester (Requester) - event (FrozenEvent) - context (EventContext) - ratelimit (bool) - extra_users (list(UserID)): Any extra users to notify about event - """ - uri = "http://%s:%s/_synapse/replication/send_event/%s" % ( - host, port, event.event_id, - ) - - serialized_context = yield context.serialize(event, store) - - payload = { - "event": event.get_pdu_json(), - "internal_metadata": event.internal_metadata.get_dict(), - "rejected_reason": event.rejected_reason, - "context": serialized_context, - "requester": requester.serialize(), - "ratelimit": ratelimit, - "extra_users": [u.to_string() for u in extra_users], - } - - try: - # We keep retrying the same request for timeouts. This is so that we - # have a good idea that the request has either succeeded or failed on - # the master, and so whether we should clean up or not. - while True: - try: - result = yield client.put_json(uri, payload) - break - except CodeMessageException as e: - if e.code != 504: - raise - - logger.warn("send_event request timed out") - - # If we timed out we probably don't need to worry about backing - # off too much, but lets just wait a little anyway. - yield clock.sleep(1) - except MatrixCodeMessageException as e: - # We convert to SynapseError as we know that it was a SynapseError - # on the master process that we should send to the client. (And - # importantly, not stack traces everywhere) - raise SynapseError(e.code, e.msg, e.errcode) - defer.returnValue(result) - - -class ReplicationSendEventRestServlet(RestServlet): +class ReplicationSendEventRestServlet(ReplicationEndpoint): """Handles events newly created on workers, including persisting and notifying. The API looks like: - POST /_synapse/replication/send_event/:event_id + POST /_synapse/replication/send_event/:event_id/:txn_id { "event": { .. serialized event .. }, @@ -109,27 +45,48 @@ class ReplicationSendEventRestServlet(RestServlet): "extra_users": [], } """ - PATTERNS = [re.compile("^/_synapse/replication/send_event/(?P[^/]+)$")] + NAME = "send_event" + PATH_ARGS = ("event_id",) + POST = True def __init__(self, hs): - super(ReplicationSendEventRestServlet, self).__init__() + super(ReplicationSendEventRestServlet, self).__init__(hs) self.event_creation_handler = hs.get_event_creation_handler() self.store = hs.get_datastore() self.clock = hs.get_clock() - # The responses are tiny, so we may as well cache them for a while - self.response_cache = ResponseCache(hs, "send_event", timeout_ms=30 * 60 * 1000) + @staticmethod + @defer.inlineCallbacks + def _serialize_payload(event_id, store, event, context, requester, + ratelimit, extra_users): + """ + Args: + event_id (str) + store (DataStore) + requester (Requester) + event (FrozenEvent) + context (EventContext) + ratelimit (bool) + extra_users (list(UserID)): Any extra users to notify about event + """ + + serialized_context = yield context.serialize(event, store) + + payload = { + "event": event.get_pdu_json(), + "internal_metadata": event.internal_metadata.get_dict(), + "rejected_reason": event.rejected_reason, + "context": serialized_context, + "requester": requester.serialize(), + "ratelimit": ratelimit, + "extra_users": [u.to_string() for u in extra_users], + } - def on_PUT(self, request, event_id): - return self.response_cache.wrap( - event_id, - self._handle_request, - request - ) + defer.returnValue(payload) @defer.inlineCallbacks - def _handle_request(self, request): + def _handle_request(self, request, event_id): with Measure(self.clock, "repl_send_event_parse"): content = parse_json_object_from_request(request) From 443da003bc46da8d6e46403cfa31ee6a4e4da230 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 31 Jul 2018 14:31:51 +0100 Subject: [PATCH 010/173] Use new helper base class for membership requests --- synapse/handlers/room_member_worker.py | 41 ++-- synapse/replication/http/membership.py | 262 +++++++++---------------- 2 files changed, 108 insertions(+), 195 deletions(-) diff --git a/synapse/handlers/room_member_worker.py b/synapse/handlers/room_member_worker.py index 22d8b4b0d346..acc6eb8099fb 100644 --- a/synapse/handlers/room_member_worker.py +++ b/synapse/handlers/room_member_worker.py @@ -20,16 +20,24 @@ from synapse.api.errors import SynapseError from synapse.handlers.room_member import RoomMemberHandler from synapse.replication.http.membership import ( - get_or_register_3pid_guest, - notify_user_membership_change, - remote_join, - remote_reject_invite, + ReplicationRegister3PIDGuestRestServlet as Repl3PID, + ReplicationRemoteJoinRestServlet as ReplRemoteJoin, + ReplicationRemoteRejectInviteRestServlet as ReplRejectInvite, + ReplicationUserJoinedLeftRoomRestServlet as ReplJoinedLeft, ) logger = logging.getLogger(__name__) class RoomMemberWorkerHandler(RoomMemberHandler): + def __init__(self, hs): + super(RoomMemberWorkerHandler, self).__init__(hs) + + self._get_register_3pid_client = Repl3PID.make_client(hs) + self._remote_join_client = ReplRemoteJoin.make_client(hs) + self._remote_reject_client = ReplRejectInvite.make_client(hs) + self._notify_change_client = ReplJoinedLeft.make_client(hs) + @defer.inlineCallbacks def _remote_join(self, requester, remote_room_hosts, room_id, user, content): """Implements RoomMemberHandler._remote_join @@ -37,10 +45,7 @@ def _remote_join(self, requester, remote_room_hosts, room_id, user, content): if len(remote_room_hosts) == 0: raise SynapseError(404, "No known servers") - ret = yield remote_join( - self.simple_http_client, - host=self.config.worker_replication_host, - port=self.config.worker_replication_http_port, + ret = yield self._remote_join_client( requester=requester, remote_room_hosts=remote_room_hosts, room_id=room_id, @@ -55,10 +60,7 @@ def _remote_join(self, requester, remote_room_hosts, room_id, user, content): def _remote_reject_invite(self, requester, remote_room_hosts, room_id, target): """Implements RoomMemberHandler._remote_reject_invite """ - return remote_reject_invite( - self.simple_http_client, - host=self.config.worker_replication_host, - port=self.config.worker_replication_http_port, + return self._remote_reject_client( requester=requester, remote_room_hosts=remote_room_hosts, room_id=room_id, @@ -68,10 +70,7 @@ def _remote_reject_invite(self, requester, remote_room_hosts, room_id, target): def _user_joined_room(self, target, room_id): """Implements RoomMemberHandler._user_joined_room """ - return notify_user_membership_change( - self.simple_http_client, - host=self.config.worker_replication_host, - port=self.config.worker_replication_http_port, + return self._notify_change_client( user_id=target.to_string(), room_id=room_id, change="joined", @@ -80,10 +79,7 @@ def _user_joined_room(self, target, room_id): def _user_left_room(self, target, room_id): """Implements RoomMemberHandler._user_left_room """ - return notify_user_membership_change( - self.simple_http_client, - host=self.config.worker_replication_host, - port=self.config.worker_replication_http_port, + return self._notify_change_client( user_id=target.to_string(), room_id=room_id, change="left", @@ -92,10 +88,7 @@ def _user_left_room(self, target, room_id): def get_or_register_3pid_guest(self, requester, medium, address, inviter_user_id): """Implements RoomMemberHandler.get_or_register_3pid_guest """ - return get_or_register_3pid_guest( - self.simple_http_client, - host=self.config.worker_replication_host, - port=self.config.worker_replication_http_port, + return self._get_register_3pid_client( requester=requester, medium=medium, address=address, diff --git a/synapse/replication/http/membership.py b/synapse/replication/http/membership.py index 6bfc8a5b89d4..8ad83e8421c6 100644 --- a/synapse/replication/http/membership.py +++ b/synapse/replication/http/membership.py @@ -14,182 +14,53 @@ # limitations under the License. import logging -import re from twisted.internet import defer -from synapse.api.errors import MatrixCodeMessageException, SynapseError -from synapse.http.servlet import RestServlet, parse_json_object_from_request +from synapse.http.servlet import parse_json_object_from_request +from synapse.replication.http._base import ReplicationEndpoint from synapse.types import Requester, UserID from synapse.util.distributor import user_joined_room, user_left_room logger = logging.getLogger(__name__) -@defer.inlineCallbacks -def remote_join(client, host, port, requester, remote_room_hosts, - room_id, user_id, content): - """Ask the master to do a remote join for the given user to the given room - - Args: - client (SimpleHttpClient) - host (str): host of master - port (int): port on master listening for HTTP replication - requester (Requester) - remote_room_hosts (list[str]): Servers to try and join via - room_id (str) - user_id (str) - content (dict): The event content to use for the join event - - Returns: - Deferred - """ - uri = "http://%s:%s/_synapse/replication/remote_join" % (host, port) - - payload = { - "requester": requester.serialize(), - "remote_room_hosts": remote_room_hosts, - "room_id": room_id, - "user_id": user_id, - "content": content, - } - - try: - result = yield client.post_json_get_json(uri, payload) - except MatrixCodeMessageException as e: - # We convert to SynapseError as we know that it was a SynapseError - # on the master process that we should send to the client. (And - # importantly, not stack traces everywhere) - raise SynapseError(e.code, e.msg, e.errcode) - defer.returnValue(result) - - -@defer.inlineCallbacks -def remote_reject_invite(client, host, port, requester, remote_room_hosts, - room_id, user_id): - """Ask master to reject the invite for the user and room. - - Args: - client (SimpleHttpClient) - host (str): host of master - port (int): port on master listening for HTTP replication - requester (Requester) - remote_room_hosts (list[str]): Servers to try and reject via - room_id (str) - user_id (str) - - Returns: - Deferred - """ - uri = "http://%s:%s/_synapse/replication/remote_reject_invite" % (host, port) - - payload = { - "requester": requester.serialize(), - "remote_room_hosts": remote_room_hosts, - "room_id": room_id, - "user_id": user_id, - } - - try: - result = yield client.post_json_get_json(uri, payload) - except MatrixCodeMessageException as e: - # We convert to SynapseError as we know that it was a SynapseError - # on the master process that we should send to the client. (And - # importantly, not stack traces everywhere) - raise SynapseError(e.code, e.msg, e.errcode) - defer.returnValue(result) - - -@defer.inlineCallbacks -def get_or_register_3pid_guest(client, host, port, requester, - medium, address, inviter_user_id): - """Ask the master to get/create a guest account for given 3PID. - - Args: - client (SimpleHttpClient) - host (str): host of master - port (int): port on master listening for HTTP replication - requester (Requester) - medium (str) - address (str) - inviter_user_id (str): The user ID who is trying to invite the - 3PID - - Returns: - Deferred[(str, str)]: A 2-tuple of `(user_id, access_token)` of the - 3PID guest account. - """ - - uri = "http://%s:%s/_synapse/replication/get_or_register_3pid_guest" % (host, port) - - payload = { - "requester": requester.serialize(), - "medium": medium, - "address": address, - "inviter_user_id": inviter_user_id, - } - - try: - result = yield client.post_json_get_json(uri, payload) - except MatrixCodeMessageException as e: - # We convert to SynapseError as we know that it was a SynapseError - # on the master process that we should send to the client. (And - # importantly, not stack traces everywhere) - raise SynapseError(e.code, e.msg, e.errcode) - defer.returnValue(result) - - -@defer.inlineCallbacks -def notify_user_membership_change(client, host, port, user_id, room_id, change): - """Notify master that a user has joined or left the room - - Args: - client (SimpleHttpClient) - host (str): host of master - port (int): port on master listening for HTTP replication. - user_id (str) - room_id (str) - change (str): Either "join" or "left" - - Returns: - Deferred +class ReplicationRemoteJoinRestServlet(ReplicationEndpoint): + """Does a remote join for the given user to the given room """ - assert change in ("joined", "left") - - uri = "http://%s:%s/_synapse/replication/user_%s_room" % (host, port, change) - - payload = { - "user_id": user_id, - "room_id": room_id, - } - - try: - result = yield client.post_json_get_json(uri, payload) - except MatrixCodeMessageException as e: - # We convert to SynapseError as we know that it was a SynapseError - # on the master process that we should send to the client. (And - # importantly, not stack traces everywhere) - raise SynapseError(e.code, e.msg, e.errcode) - defer.returnValue(result) - -class ReplicationRemoteJoinRestServlet(RestServlet): - PATTERNS = [re.compile("^/_synapse/replication/remote_join$")] + NAME = "remote_join" + PATH_ARGS = ("room_id", "user_id",) def __init__(self, hs): - super(ReplicationRemoteJoinRestServlet, self).__init__() + super(ReplicationRemoteJoinRestServlet, self).__init__(hs) self.federation_handler = hs.get_handlers().federation_handler self.store = hs.get_datastore() self.clock = hs.get_clock() + @staticmethod + def _serialize_payload(requester, room_id, user_id, remote_room_hosts, + content): + """ + Args: + requester(Requester) + room_id (str) + user_id (str) + remote_room_hosts (list[str]): Servers to try and join via + content(dict): The event content to use for the join event + """ + return { + "requester": requester.serialize(), + "remote_room_hosts": remote_room_hosts, + "content": content, + } + @defer.inlineCallbacks - def on_POST(self, request): + def _handle_request(self, request, room_id, user_id): content = parse_json_object_from_request(request) remote_room_hosts = content["remote_room_hosts"] - room_id = content["room_id"] - user_id = content["user_id"] event_content = content["content"] requester = Requester.deserialize(self.store, content["requester"]) @@ -212,23 +83,39 @@ def on_POST(self, request): defer.returnValue((200, {})) -class ReplicationRemoteRejectInviteRestServlet(RestServlet): - PATTERNS = [re.compile("^/_synapse/replication/remote_reject_invite$")] +class ReplicationRemoteRejectInviteRestServlet(ReplicationEndpoint): + """Rejects the invite for the user and room. + """ + + NAME = "remote_reject_invite" + PATH_ARGS = ("room_id", "user_id",) def __init__(self, hs): - super(ReplicationRemoteRejectInviteRestServlet, self).__init__() + super(ReplicationRemoteRejectInviteRestServlet, self).__init__(hs) self.federation_handler = hs.get_handlers().federation_handler self.store = hs.get_datastore() self.clock = hs.get_clock() + @staticmethod + def _serialize_payload(requester, room_id, user_id, remote_room_hosts): + """ + Args: + requester(Requester) + room_id (str) + user_id (str) + remote_room_hosts (list[str]): Servers to try and reject via + """ + return { + "requester": requester.serialize(), + "remote_room_hosts": remote_room_hosts, + } + @defer.inlineCallbacks - def on_POST(self, request): + def _handle_request(self, request, room_id, user_id): content = parse_json_object_from_request(request) remote_room_hosts = content["remote_room_hosts"] - room_id = content["room_id"] - user_id = content["user_id"] requester = Requester.deserialize(self.store, content["requester"]) @@ -264,18 +151,39 @@ def on_POST(self, request): defer.returnValue((200, ret)) -class ReplicationRegister3PIDGuestRestServlet(RestServlet): - PATTERNS = [re.compile("^/_synapse/replication/get_or_register_3pid_guest$")] +class ReplicationRegister3PIDGuestRestServlet(ReplicationEndpoint): + """Gets/creates a guest account for given 3PID. + """ + + NAME = "get_or_register_3pid_guest" + PATH_ARGS = () def __init__(self, hs): - super(ReplicationRegister3PIDGuestRestServlet, self).__init__() + super(ReplicationRegister3PIDGuestRestServlet, self).__init__(hs) self.registeration_handler = hs.get_handlers().registration_handler self.store = hs.get_datastore() self.clock = hs.get_clock() + @staticmethod + def _serialize_payload(requester, medium, address, inviter_user_id): + """ + Args: + requester(Requester) + medium (str) + address (str) + inviter_user_id (str): The user ID who is trying to invite the + 3PID + """ + return { + "requester": requester.serialize(), + "medium": medium, + "address": address, + "inviter_user_id": inviter_user_id, + } + @defer.inlineCallbacks - def on_POST(self, request): + def _handle_request(self, request): content = parse_json_object_from_request(request) medium = content["medium"] @@ -296,23 +204,35 @@ def on_POST(self, request): defer.returnValue((200, ret)) -class ReplicationUserJoinedLeftRoomRestServlet(RestServlet): - PATTERNS = [re.compile("^/_synapse/replication/user_(?Pjoined|left)_room$")] +class ReplicationUserJoinedLeftRoomRestServlet(ReplicationEndpoint): + """Notifies that a user has joined or left the room + """ + + NAME = "membership_change" + PATH_ARGS = ("room_id", "user_id", "change") + CACHE = False # No point caching as should return instantly. def __init__(self, hs): - super(ReplicationUserJoinedLeftRoomRestServlet, self).__init__() + super(ReplicationUserJoinedLeftRoomRestServlet, self).__init__(hs) self.registeration_handler = hs.get_handlers().registration_handler self.store = hs.get_datastore() self.clock = hs.get_clock() self.distributor = hs.get_distributor() - def on_POST(self, request, change): - content = parse_json_object_from_request(request) + @staticmethod + def _serialize_payload(room_id, user_id, change): + """ + Args: + room_id (str) + user_id (str) + change (str): Either "joined" or "left" + """ + assert change in ("joined", "left",) - user_id = content["user_id"] - room_id = content["room_id"] + return {} + def _handle_request(self, request, room_id, user_id, change): logger.info("user membership change: %s in %s", user_id, room_id) user = UserID.from_string(user_id) From 16bd63f32f97a25252bb438413b79f6bdf1e5c1e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 31 Jul 2018 14:48:43 +0100 Subject: [PATCH 011/173] Newsfile --- changelog.d/3632.misc | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 changelog.d/3632.misc diff --git a/changelog.d/3632.misc b/changelog.d/3632.misc new file mode 100644 index 000000000000..e69de29bb2d1 From 6ef983ce5cc0a1cd7323ac82c8eed41d72ff3a99 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 31 Jul 2018 16:36:24 +0100 Subject: [PATCH 012/173] api into monthly_active_users table --- synapse/app/homeserver.py | 4 + synapse/storage/monthly_active_users.py | 89 +++++++++++++++++++ synapse/storage/prepare_database.py | 2 +- .../delta/50/make_event_content_nullable.py | 2 +- .../schema/delta/51/monthly_active_users.sql | 23 +++++ tests/storage/test_monthly_active_users.py | 42 +++++++++ 6 files changed, 160 insertions(+), 2 deletions(-) create mode 100644 synapse/storage/monthly_active_users.py create mode 100644 synapse/storage/schema/delta/51/monthly_active_users.sql create mode 100644 tests/storage/test_monthly_active_users.py diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 57b815d777d7..79772fa61a8a 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -62,6 +62,7 @@ from synapse.server import HomeServer from synapse.storage import are_all_users_on_domain from synapse.storage.engines import IncorrectDatabaseSetup, create_engine +from synapse.storage.monthly_active_users import MonthlyActiveUsersStore from synapse.storage.prepare_database import UpgradeDatabaseException, prepare_database from synapse.util.caches import CACHE_SIZE_FACTOR from synapse.util.httpresourcetree import create_resource_tree @@ -511,6 +512,9 @@ def generate_user_daily_visit_stats(): # If you increase the loop period, the accuracy of user_daily_visits # table will decrease clock.looping_call(generate_user_daily_visit_stats, 5 * 60 * 1000) + clock.looping_call( + MonthlyActiveUsersStore(hs).reap_monthly_active_users, 1000 * 60 * 60 + ) if hs.config.report_stats: logger.info("Scheduling stats reporting for 3 hour intervals") diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py new file mode 100644 index 000000000000..373e828c0ae7 --- /dev/null +++ b/synapse/storage/monthly_active_users.py @@ -0,0 +1,89 @@ +from twisted.internet import defer + +from ._base import SQLBaseStore + + +class MonthlyActiveUsersStore(SQLBaseStore): + def __init__(self, hs): + super(MonthlyActiveUsersStore, self).__init__(None, hs) + self._clock = hs.get_clock() + + def reap_monthly_active_users(self): + """ + Cleans out monthly active user table to ensure that no stale + entries exist. + Return: + defered, no return type + """ + def _reap_users(txn): + thirty_days_ago = ( + int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) + ) + sql = "DELETE FROM monthly_active_users WHERE timestamp < ?" + txn.execute(sql, (thirty_days_ago,)) + + return self.runInteraction("reap_monthly_active_users", _reap_users) + + def get_monthly_active_count(self): + """ + Generates current count of monthly active users.abs + return: + defered resolves to int + """ + def _count_users(txn): + sql = """ + SELECT COALESCE(count(*), 0) FROM ( + SELECT user_id FROM monthly_active_users + ) u + """ + txn.execute(sql) + count, = txn.fetchone() + return count + return self.runInteraction("count_users", _count_users) + + def upsert_monthly_active_user(self, user_id): + """ + Updates or inserts monthly active user member + Arguments: + user_id (str): user to add/update + """ + return self._simple_upsert( + desc="upsert_monthly_active_user", + table="monthly_active_users", + keyvalues={ + "user_id": user_id, + }, + values={ + "timestamp": int(self._clock.time_msec()), + }, + lock=False, + ) + + def clean_out_monthly_active_users(self): + pass + + @defer.inlineCallbacks + def is_user_monthly_active(self, user_id): + """ + Checks if a given user is part of the monthly active user group + Arguments: + user_id (str): user to add/update + Return: + bool : True if user part of group, False otherwise + """ + user_present = yield self._simple_select_onecol( + table="monthly_active_users", + keyvalues={ + "user_id": user_id, + }, + retcol="user_id", + desc="is_user_monthly_active", + ) + # jeff = self.cursor_to_dict(res) + result = False + if user_present: + result = True + + defer.returnValue( + result + ) diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index b290f834b3d2..b364719312d6 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -25,7 +25,7 @@ # Remember to update this number every time a change is made to database # schema files, so the users will be informed on server restarts. -SCHEMA_VERSION = 50 +SCHEMA_VERSION = 51 dir_path = os.path.abspath(os.path.dirname(__file__)) diff --git a/synapse/storage/schema/delta/50/make_event_content_nullable.py b/synapse/storage/schema/delta/50/make_event_content_nullable.py index 7d27342e3994..6dd467b6c533 100644 --- a/synapse/storage/schema/delta/50/make_event_content_nullable.py +++ b/synapse/storage/schema/delta/50/make_event_content_nullable.py @@ -88,5 +88,5 @@ def run_upgrade(cur, database_engine, *args, **kwargs): "UPDATE sqlite_master SET sql=? WHERE tbl_name='events' AND type='table'", (sql, ), ) - cur.execute("PRAGMA schema_version=%i" % (oldver+1,)) + cur.execute("PRAGMA schema_version=%i" % (oldver + 1,)) cur.execute("PRAGMA writable_schema=OFF") diff --git a/synapse/storage/schema/delta/51/monthly_active_users.sql b/synapse/storage/schema/delta/51/monthly_active_users.sql new file mode 100644 index 000000000000..b3c0e1a67621 --- /dev/null +++ b/synapse/storage/schema/delta/51/monthly_active_users.sql @@ -0,0 +1,23 @@ +/* Copyright 2018 New Vector Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- a table of users who have requested that their details be erased +CREATE TABLE monthly_active_users ( + user_id TEXT NOT NULL, + timestamp BIGINT NOT NULL +); + +CREATE UNIQUE INDEX monthly_active_users_users ON monthly_active_users(user_id); +CREATE INDEX monthly_active_users_time_stamp ON monthly_active_users(timestamp); diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py new file mode 100644 index 000000000000..9b1ffc6369ee --- /dev/null +++ b/tests/storage/test_monthly_active_users.py @@ -0,0 +1,42 @@ +from twisted.internet import defer + +from synapse.storage.monthly_active_users import MonthlyActiveUsersStore + +import tests.unittest +import tests.utils +from tests.utils import setup_test_homeserver + + +class MonthlyActiveUsersTestCase(tests.unittest.TestCase): + def __init__(self, *args, **kwargs): + super(MonthlyActiveUsersTestCase, self).__init__(*args, **kwargs) + self.mau = None + + @defer.inlineCallbacks + def setUp(self): + hs = yield setup_test_homeserver() + self.mau = MonthlyActiveUsersStore(hs) + + @defer.inlineCallbacks + def test_can_insert_and_count_mau(self): + count = yield self.mau.get_monthly_active_count() + self.assertEqual(0, count) + + yield self.mau.upsert_monthly_active_user("@user:server") + count = yield self.mau.get_monthly_active_count() + + self.assertEqual(1, count) + + @defer.inlineCallbacks + def test_is_user_monthly_active(self): + user_id1 = "@user1:server" + user_id2 = "@user2:server" + user_id3 = "@user3:server" + result = yield self.mau.is_user_monthly_active(user_id1) + self.assertFalse(result) + yield self.mau.upsert_monthly_active_user(user_id1) + yield self.mau.upsert_monthly_active_user(user_id2) + result = yield self.mau.is_user_monthly_active(user_id1) + self.assertTrue(result) + result = yield self.mau.is_user_monthly_active(user_id3) + self.assertFalse(result) From f9f55599718ba9849b2dee18e253dcaf8f5fcfe7 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 12:03:42 +0100 Subject: [PATCH 013/173] fix comment --- synapse/storage/schema/delta/51/monthly_active_users.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/schema/delta/51/monthly_active_users.sql b/synapse/storage/schema/delta/51/monthly_active_users.sql index b3c0e1a67621..f2b6d3e31e4e 100644 --- a/synapse/storage/schema/delta/51/monthly_active_users.sql +++ b/synapse/storage/schema/delta/51/monthly_active_users.sql @@ -13,7 +13,7 @@ * limitations under the License. */ --- a table of users who have requested that their details be erased +-- a table of monthly active users, for use where blocking based on mau limits CREATE TABLE monthly_active_users ( user_id TEXT NOT NULL, timestamp BIGINT NOT NULL From 4e5ac901ddd15e8938605c5ac4312be804997ed5 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 12:03:57 +0100 Subject: [PATCH 014/173] clean up --- synapse/storage/monthly_active_users.py | 18 +++--------------- 1 file changed, 3 insertions(+), 15 deletions(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 373e828c0ae7..03eeea792e22 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -31,11 +31,8 @@ def get_monthly_active_count(self): defered resolves to int """ def _count_users(txn): - sql = """ - SELECT COALESCE(count(*), 0) FROM ( - SELECT user_id FROM monthly_active_users - ) u - """ + sql = "SELECT COALESCE(count(*), 0) FROM monthly_active_users" + txn.execute(sql) count, = txn.fetchone() return count @@ -59,9 +56,6 @@ def upsert_monthly_active_user(self, user_id): lock=False, ) - def clean_out_monthly_active_users(self): - pass - @defer.inlineCallbacks def is_user_monthly_active(self, user_id): """ @@ -79,11 +73,5 @@ def is_user_monthly_active(self, user_id): retcol="user_id", desc="is_user_monthly_active", ) - # jeff = self.cursor_to_dict(res) - result = False - if user_present: - result = True - defer.returnValue( - result - ) + defer.returnValue(bool(user_present)) From ec716a35b219d147dee51733b55573952799a549 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 17:54:37 +0100 Subject: [PATCH 015/173] change monthly_active_users table to be a single column --- synapse/storage/monthly_active_users.py | 10 +++------- .../storage/schema/delta/51/monthly_active_users.sql | 4 +--- tests/storage/test_monthly_active_users.py | 6 +++--- 3 files changed, 7 insertions(+), 13 deletions(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 03eeea792e22..7b3f13aedf17 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -38,22 +38,18 @@ def _count_users(txn): return count return self.runInteraction("count_users", _count_users) - def upsert_monthly_active_user(self, user_id): + def insert_monthly_active_user(self, user_id): """ Updates or inserts monthly active user member Arguments: user_id (str): user to add/update """ - return self._simple_upsert( + return self._simple_insert( desc="upsert_monthly_active_user", table="monthly_active_users", - keyvalues={ - "user_id": user_id, - }, values={ - "timestamp": int(self._clock.time_msec()), + "user_id": user_id, }, - lock=False, ) @defer.inlineCallbacks diff --git a/synapse/storage/schema/delta/51/monthly_active_users.sql b/synapse/storage/schema/delta/51/monthly_active_users.sql index f2b6d3e31e4e..590b1abab2f9 100644 --- a/synapse/storage/schema/delta/51/monthly_active_users.sql +++ b/synapse/storage/schema/delta/51/monthly_active_users.sql @@ -15,9 +15,7 @@ -- a table of monthly active users, for use where blocking based on mau limits CREATE TABLE monthly_active_users ( - user_id TEXT NOT NULL, - timestamp BIGINT NOT NULL + user_id TEXT NOT NULL ); CREATE UNIQUE INDEX monthly_active_users_users ON monthly_active_users(user_id); -CREATE INDEX monthly_active_users_time_stamp ON monthly_active_users(timestamp); diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index 9b1ffc6369ee..7a8432ce6942 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -22,7 +22,7 @@ def test_can_insert_and_count_mau(self): count = yield self.mau.get_monthly_active_count() self.assertEqual(0, count) - yield self.mau.upsert_monthly_active_user("@user:server") + yield self.mau.insert_monthly_active_user("@user:server") count = yield self.mau.get_monthly_active_count() self.assertEqual(1, count) @@ -34,8 +34,8 @@ def test_is_user_monthly_active(self): user_id3 = "@user3:server" result = yield self.mau.is_user_monthly_active(user_id1) self.assertFalse(result) - yield self.mau.upsert_monthly_active_user(user_id1) - yield self.mau.upsert_monthly_active_user(user_id2) + yield self.mau.insert_monthly_active_user(user_id1) + yield self.mau.insert_monthly_active_user(user_id2) result = yield self.mau.is_user_monthly_active(user_id1) self.assertTrue(result) result = yield self.mau.is_user_monthly_active(user_id3) From c21d82bab3b32e2f59c9ef09a1a10b337ce45db4 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 23:24:38 +0100 Subject: [PATCH 016/173] normalise reaping query --- synapse/storage/monthly_active_users.py | 41 +++++++++++++++++++++++-- 1 file changed, 38 insertions(+), 3 deletions(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 7b3f13aedf17..0741c7fa614e 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -7,6 +7,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): def __init__(self, hs): super(MonthlyActiveUsersStore, self).__init__(None, hs) self._clock = hs.get_clock() + self.max_mau_value = hs.config.max_mau_value def reap_monthly_active_users(self): """ @@ -19,8 +20,42 @@ def _reap_users(txn): thirty_days_ago = ( int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) ) - sql = "DELETE FROM monthly_active_users WHERE timestamp < ?" - txn.execute(sql, (thirty_days_ago,)) + + # Query deletes the union of users that have either: + # * not visited in the last 30 days + # * exceeded the total max_mau_value threshold. Where there is + # an excess, more recent users are favoured - this is to cover + # the case where the limit has been step change reduced. + # + sql = """ + DELETE FROM monthly_active_users + WHERE user_id + IN ( + SELECT * FROM ( + SELECT monthly_active_users.user_id + FROM monthly_active_users + LEFT JOIN ( + SELECT user_id, max(last_seen) AS last_seen + FROM user_ips + GROUP BY user_id + ) AS uip ON uip.user_id=monthly_active_users.user_id + ORDER BY uip.last_seen desc LIMIT -1 OFFSET ? + ) + UNION + SELECT * FROM ( + SELECT monthly_active_users.user_id + FROM monthly_active_users + LEFT JOIN ( + SELECT user_id, max(last_seen) AS last_seen + FROM user_ips + GROUP BY user_id + ) AS uip ON uip.user_id=monthly_active_users.user_id + WHERE uip.last_seen < ? + ) + ) + """ + + txn.execute(sql, (self.max_mau_value, thirty_days_ago,)) return self.runInteraction("reap_monthly_active_users", _reap_users) @@ -45,7 +80,7 @@ def insert_monthly_active_user(self, user_id): user_id (str): user to add/update """ return self._simple_insert( - desc="upsert_monthly_active_user", + desc="insert_monthly_active_user", table="monthly_active_users", values={ "user_id": user_id, From 08281fe6b7cdd9620d28d2aa6b725bdb0a351bbc Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 1 Aug 2018 23:26:24 +0100 Subject: [PATCH 017/173] self.db_conn unused --- synapse/storage/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 134e4a80f1f0..04ff1f8006a2 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -94,7 +94,6 @@ def __init__(self, db_conn, hs): self._clock = hs.get_clock() self.database_engine = hs.database_engine - self.db_conn = db_conn self._stream_id_gen = StreamIdGenerator( db_conn, "events", "stream_ordering", extra_tables=[("local_invites", "stream_id")] From 165e06703340667dd88eb73dfe299a4d3298b94b Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 2 Aug 2018 10:59:58 +0100 Subject: [PATCH 018/173] Revert "change monthly_active_users table to be a single column" This reverts commit ec716a35b219d147dee51733b55573952799a549. --- synapse/storage/monthly_active_users.py | 10 +++++++--- .../storage/schema/delta/51/monthly_active_users.sql | 4 +++- tests/storage/test_monthly_active_users.py | 6 +++--- 3 files changed, 13 insertions(+), 7 deletions(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 7b3f13aedf17..03eeea792e22 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -38,18 +38,22 @@ def _count_users(txn): return count return self.runInteraction("count_users", _count_users) - def insert_monthly_active_user(self, user_id): + def upsert_monthly_active_user(self, user_id): """ Updates or inserts monthly active user member Arguments: user_id (str): user to add/update """ - return self._simple_insert( + return self._simple_upsert( desc="upsert_monthly_active_user", table="monthly_active_users", - values={ + keyvalues={ "user_id": user_id, }, + values={ + "timestamp": int(self._clock.time_msec()), + }, + lock=False, ) @defer.inlineCallbacks diff --git a/synapse/storage/schema/delta/51/monthly_active_users.sql b/synapse/storage/schema/delta/51/monthly_active_users.sql index 590b1abab2f9..f2b6d3e31e4e 100644 --- a/synapse/storage/schema/delta/51/monthly_active_users.sql +++ b/synapse/storage/schema/delta/51/monthly_active_users.sql @@ -15,7 +15,9 @@ -- a table of monthly active users, for use where blocking based on mau limits CREATE TABLE monthly_active_users ( - user_id TEXT NOT NULL + user_id TEXT NOT NULL, + timestamp BIGINT NOT NULL ); CREATE UNIQUE INDEX monthly_active_users_users ON monthly_active_users(user_id); +CREATE INDEX monthly_active_users_time_stamp ON monthly_active_users(timestamp); diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index 7a8432ce6942..9b1ffc6369ee 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -22,7 +22,7 @@ def test_can_insert_and_count_mau(self): count = yield self.mau.get_monthly_active_count() self.assertEqual(0, count) - yield self.mau.insert_monthly_active_user("@user:server") + yield self.mau.upsert_monthly_active_user("@user:server") count = yield self.mau.get_monthly_active_count() self.assertEqual(1, count) @@ -34,8 +34,8 @@ def test_is_user_monthly_active(self): user_id3 = "@user3:server" result = yield self.mau.is_user_monthly_active(user_id1) self.assertFalse(result) - yield self.mau.insert_monthly_active_user(user_id1) - yield self.mau.insert_monthly_active_user(user_id2) + yield self.mau.upsert_monthly_active_user(user_id1) + yield self.mau.upsert_monthly_active_user(user_id2) result = yield self.mau.is_user_monthly_active(user_id1) self.assertTrue(result) result = yield self.mau.is_user_monthly_active(user_id3) From 00f99f74b1b875bb7ac6b0623994cabad4a59cc6 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 2 Aug 2018 13:47:19 +0100 Subject: [PATCH 019/173] insertion into monthly_active_users --- synapse/api/auth.py | 2 +- synapse/storage/__init__.py | 2 + synapse/storage/client_ips.py | 22 ++++++++- synapse/storage/monthly_active_users.py | 18 ++++--- tests/storage/test_client_ips.py | 66 +++++++++++++++++++++++-- 5 files changed, 99 insertions(+), 11 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 5bbbe8e2e71f..d8022bcf8e95 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -213,7 +213,7 @@ def get_user_by_req(self, request, allow_guest=False, rights="access"): default=[b""] )[0] if user and access_token and ip_addr: - self.store.insert_client_ip( + yield self.store.insert_client_ip( user_id=user.to_string(), access_token=access_token, ip=ip_addr, diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 04ff1f8006a2..2120f46ed5be 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -39,6 +39,7 @@ from .group_server import GroupServerStore from .keys import KeyStore from .media_repository import MediaRepositoryStore +from .monthly_active_users import MonthlyActiveUsersStore from .openid import OpenIdStore from .presence import PresenceStore, UserPresenceState from .profile import ProfileStore @@ -87,6 +88,7 @@ class DataStore(RoomMemberStore, RoomStore, UserDirectoryStore, GroupServerStore, UserErasureStore, + MonthlyActiveUsersStore, ): def __init__(self, db_conn, hs): diff --git a/synapse/storage/client_ips.py b/synapse/storage/client_ips.py index b8cefd43d6df..506915a1ef65 100644 --- a/synapse/storage/client_ips.py +++ b/synapse/storage/client_ips.py @@ -35,6 +35,7 @@ class ClientIpStore(background_updates.BackgroundUpdateStore): def __init__(self, db_conn, hs): + self.client_ip_last_seen = Cache( name="client_ip_last_seen", keylen=4, @@ -74,6 +75,7 @@ def __init__(self, db_conn, hs): "before", "shutdown", self._update_client_ips_batch ) + @defer.inlineCallbacks def insert_client_ip(self, user_id, access_token, ip, user_agent, device_id, now=None): if not now: @@ -84,7 +86,7 @@ def insert_client_ip(self, user_id, access_token, ip, user_agent, device_id, last_seen = self.client_ip_last_seen.get(key) except KeyError: last_seen = None - + yield self._populate_monthly_active_users(user_id) # Rate-limited inserts if last_seen is not None and (now - last_seen) < LAST_SEEN_GRANULARITY: return @@ -93,6 +95,24 @@ def insert_client_ip(self, user_id, access_token, ip, user_agent, device_id, self._batch_row_update[key] = (user_agent, device_id, now) + @defer.inlineCallbacks + def _populate_monthly_active_users(self, user_id): + store = self.hs.get_datastore() + print "entering _populate_monthly_active_users" + if self.hs.config.limit_usage_by_mau: + print "self.hs.config.limit_usage_by_mau is TRUE" + is_user_monthly_active = yield store.is_user_monthly_active(user_id) + print "is_user_monthly_active is %r" % is_user_monthly_active + if is_user_monthly_active: + yield store.upsert_monthly_active_user(user_id) + else: + count = yield store.get_monthly_active_count() + print "count is %d" % count + if count < self.hs.config.max_mau_value: + print "count is less than self.hs.config.max_mau_value " + res = yield store.upsert_monthly_active_user(user_id) + print "upsert response is %r" % res + def _update_client_ips_batch(self): def update(): to_update = self._batch_row_update diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 2337438c583b..aada9bd2b954 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -4,7 +4,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): - def __init__(self, hs): + def __init__(self, dbconn, hs): super(MonthlyActiveUsersStore, self).__init__(None, hs) self._clock = hs.get_clock() self.max_mau_value = hs.config.max_mau_value @@ -14,24 +14,28 @@ def reap_monthly_active_users(self): Cleans out monthly active user table to ensure that no stale entries exist. Return: - defered, no return type + Defered() """ def _reap_users(txn): thirty_days_ago = ( int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) ) - sql = "DELETE FROM monthly_active_users WHERE timestamp < ?" - txn.execute(sql, (thirty_days_ago,)) + sql = """ + DELETE FROM monthly_active_users + ORDER BY timestamp desc + LIMIT -1 OFFSET ? + """ + txn.execute(sql, (self.max_mau_value,)) return self.runInteraction("reap_monthly_active_users", _reap_users) def get_monthly_active_count(self): """ Generates current count of monthly active users.abs - return: - defered resolves to int + Return: + Defered(int): Number of current monthly active users """ def _count_users(txn): sql = "SELECT COALESCE(count(*), 0) FROM monthly_active_users" @@ -46,6 +50,8 @@ def upsert_monthly_active_user(self, user_id): Updates or inserts monthly active user member Arguments: user_id (str): user to add/update + Deferred(bool): True if a new entry was created, False if an + existing one was updated. """ return self._simple_upsert( desc="upsert_monthly_active_user", diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index bd6fda6cb12f..e1552510cc50 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -12,6 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from mock import Mock from twisted.internet import defer @@ -27,9 +28,9 @@ def __init__(self, *args, **kwargs): @defer.inlineCallbacks def setUp(self): - hs = yield tests.utils.setup_test_homeserver() - self.store = hs.get_datastore() - self.clock = hs.get_clock() + self.hs = yield tests.utils.setup_test_homeserver() + self.store = self.hs.get_datastore() + self.clock = self.hs.get_clock() @defer.inlineCallbacks def test_insert_new_client_ip(self): @@ -54,3 +55,62 @@ def test_insert_new_client_ip(self): }, r ) + + @defer.inlineCallbacks + def test_disabled_monthly_active_user(self): + self.hs.config.limit_usage_by_mau = False + self.hs.config.max_mau_value = 50 + user_id = "@user:server" + yield self.store.insert_client_ip( + user_id, "access_token", "ip", "user_agent", "device_id", + ) + active = yield self.store.is_user_monthly_active(user_id) + self.assertFalse(active) + + @defer.inlineCallbacks + def test_adding_monthly_active_user_when_full(self): + self.hs.config.limit_usage_by_mau = True + self.hs.config.max_mau_value = 50 + lots_of_users = 100 + user_id = "@user:server" + + self.store.get_monthly_active_count = Mock( + return_value=defer.succeed(lots_of_users) + ) + yield self.store.insert_client_ip( + user_id, "access_token", "ip", "user_agent", "device_id", + ) + active = yield self.store.is_user_monthly_active(user_id) + self.assertFalse(active) + + @defer.inlineCallbacks + def test_adding_monthly_active_user_when_space(self): + self.hs.config.limit_usage_by_mau = True + self.hs.config.max_mau_value = 50 + user_id = "@user:server" + active = yield self.store.is_user_monthly_active(user_id) + self.assertFalse(active) + + yield self.store.insert_client_ip( + user_id, "access_token", "ip", "user_agent", "device_id", + ) + active = yield self.store.is_user_monthly_active(user_id) + self.assertTrue(active) + + @defer.inlineCallbacks + def test_updating_monthly_active_user_when_space(self): + self.hs.config.limit_usage_by_mau = True + self.hs.config.max_mau_value = 50 + user_id = "@user:server" + + active = yield self.store.is_user_monthly_active(user_id) + self.assertFalse(active) + + yield self.store.insert_client_ip( + user_id, "access_token", "ip", "user_agent", "device_id", + ) + yield self.store.insert_client_ip( + user_id, "access_token", "ip", "user_agent", "device_id", + ) + active = yield self.store.is_user_monthly_active(user_id) + self.assertTrue(active) From c4ffbecb6856602743198f1a8d4c25a8bb7afd4d Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 2 Aug 2018 13:51:05 +0100 Subject: [PATCH 020/173] fix test, update constructor call --- tests/storage/test_monthly_active_users.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index 9b1ffc6369ee..d8d25a606911 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -15,7 +15,7 @@ def __init__(self, *args, **kwargs): @defer.inlineCallbacks def setUp(self): hs = yield setup_test_homeserver() - self.mau = MonthlyActiveUsersStore(hs) + self.mau = MonthlyActiveUsersStore(None, hs) @defer.inlineCallbacks def test_can_insert_and_count_mau(self): From 9180061b49907da363f2e3bfa0061f913d7ccf7a Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 2 Aug 2018 15:55:29 +0100 Subject: [PATCH 021/173] remove unused count_monthly_users --- synapse/storage/__init__.py | 25 -------------- tests/storage/test__init__.py | 65 ----------------------------------- 2 files changed, 90 deletions(-) delete mode 100644 tests/storage/test__init__.py diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 2120f46ed5be..23b4a8d76d04 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -268,31 +268,6 @@ def _count_users(txn): return self.runInteraction("count_users", _count_users) - def count_monthly_users(self): - """Counts the number of users who used this homeserver in the last 30 days - - This method should be refactored with count_daily_users - the only - reason not to is waiting on definition of mau - - Returns: - Defered[int] - """ - def _count_monthly_users(txn): - thirty_days_ago = int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) - sql = """ - SELECT COALESCE(count(*), 0) FROM ( - SELECT user_id FROM user_ips - WHERE last_seen > ? - GROUP BY user_id - ) u - """ - - txn.execute(sql, (thirty_days_ago,)) - count, = txn.fetchone() - return count - - return self.runInteraction("count_monthly_users", _count_monthly_users) - def count_r30_users(self): """ Counts the number of 30 day retained users, defined as:- diff --git a/tests/storage/test__init__.py b/tests/storage/test__init__.py deleted file mode 100644 index f19cb1265ca0..000000000000 --- a/tests/storage/test__init__.py +++ /dev/null @@ -1,65 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2018 New Vector Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from twisted.internet import defer - -import tests.utils - - -class InitTestCase(tests.unittest.TestCase): - def __init__(self, *args, **kwargs): - super(InitTestCase, self).__init__(*args, **kwargs) - self.store = None # type: synapse.storage.DataStore - - @defer.inlineCallbacks - def setUp(self): - hs = yield tests.utils.setup_test_homeserver() - - hs.config.max_mau_value = 50 - hs.config.limit_usage_by_mau = True - self.store = hs.get_datastore() - self.clock = hs.get_clock() - - @defer.inlineCallbacks - def test_count_monthly_users(self): - count = yield self.store.count_monthly_users() - self.assertEqual(0, count) - - yield self._insert_user_ips("@user:server1") - yield self._insert_user_ips("@user:server2") - - count = yield self.store.count_monthly_users() - self.assertEqual(2, count) - - @defer.inlineCallbacks - def _insert_user_ips(self, user): - """ - Helper function to populate user_ips without using batch insertion infra - args: - user (str): specify username i.e. @user:server.com - """ - yield self.store._simple_upsert( - table="user_ips", - keyvalues={ - "user_id": user, - "access_token": "access_token", - "ip": "ip", - "user_agent": "user_agent", - "device_id": "device_id", - }, - values={ - "last_seen": self.clock.time_msec(), - } - ) From 74b1d46ad9ae692774f2e9d71cbbe1cea91b4070 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 2 Aug 2018 16:57:35 +0100 Subject: [PATCH 022/173] do mau checks based on monthly_active_users table --- synapse/api/auth.py | 13 ++++++ synapse/handlers/auth.py | 10 ++--- synapse/handlers/register.py | 10 ++--- synapse/storage/client_ips.py | 15 +++---- tests/api/test_auth.py | 31 +++++++++++++- tests/handlers/test_auth.py | 8 ++-- tests/handlers/test_register.py | 71 ++++++++++++++++----------------- 7 files changed, 97 insertions(+), 61 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index d8022bcf8e95..943a488339bb 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -773,3 +773,16 @@ def check_in_room_or_world_readable(self, room_id, user_id): raise AuthError( 403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN ) + + @defer.inlineCallbacks + def check_auth_blocking(self, error): + """Checks if the user should be rejected for some external reason, + such as monthly active user limiting or global disable flag + Args: + error (Error): The error that should be raised if user is to be + blocked + """ + if self.hs.config.limit_usage_by_mau is True: + current_mau = yield self.store.get_monthly_active_count() + if current_mau >= self.hs.config.max_mau_value: + raise error diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 184eef09d03b..8f9cff92e84b 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -913,12 +913,10 @@ def _check_mau_limits(self): Ensure that if mau blocking is enabled that invalid users cannot log in. """ - if self.hs.config.limit_usage_by_mau is True: - current_mau = yield self.store.count_monthly_users() - if current_mau >= self.hs.config.max_mau_value: - raise AuthError( - 403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED - ) + error = AuthError( + 403, "Monthly Active User limits exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED + ) + yield self.auth.check_auth_blocking(error) @attr.s diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 289704b24162..706ed8c292db 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -540,9 +540,7 @@ def _check_mau_limits(self): Do not accept registrations if monthly active user limits exceeded and limiting is enabled """ - if self.hs.config.limit_usage_by_mau is True: - current_mau = yield self.store.count_monthly_users() - if current_mau >= self.hs.config.max_mau_value: - raise RegistrationError( - 403, "MAU Limit Exceeded", Codes.MAU_LIMIT_EXCEEDED - ) + error = RegistrationError( + 403, "Monthly Active User limits exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED + ) + yield self.auth.check_auth_blocking(error) diff --git a/synapse/storage/client_ips.py b/synapse/storage/client_ips.py index 506915a1ef65..83d64d1563c2 100644 --- a/synapse/storage/client_ips.py +++ b/synapse/storage/client_ips.py @@ -97,21 +97,22 @@ def insert_client_ip(self, user_id, access_token, ip, user_agent, device_id, @defer.inlineCallbacks def _populate_monthly_active_users(self, user_id): + """Checks on the state of monthly active user limits and optionally + add the user to the monthly active tables + + Args: + user_id(str): the user_id to query + """ + store = self.hs.get_datastore() - print "entering _populate_monthly_active_users" if self.hs.config.limit_usage_by_mau: - print "self.hs.config.limit_usage_by_mau is TRUE" is_user_monthly_active = yield store.is_user_monthly_active(user_id) - print "is_user_monthly_active is %r" % is_user_monthly_active if is_user_monthly_active: yield store.upsert_monthly_active_user(user_id) else: count = yield store.get_monthly_active_count() - print "count is %d" % count if count < self.hs.config.max_mau_value: - print "count is less than self.hs.config.max_mau_value " - res = yield store.upsert_monthly_active_user(user_id) - print "upsert response is %r" % res + yield store.upsert_monthly_active_user(user_id) def _update_client_ips_batch(self): def update(): diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index a82d737e71da..54bdf28663f9 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -21,7 +21,7 @@ import synapse.handlers.auth from synapse.api.auth import Auth -from synapse.api.errors import AuthError +from synapse.api.errors import AuthError, Codes from synapse.types import UserID from tests import unittest @@ -444,3 +444,32 @@ def get_user(tok): self.assertEqual("Guest access token used for regular user", cm.exception.msg) self.store.get_user_by_id.assert_called_with(USER_ID) + + @defer.inlineCallbacks + def test_blocking_mau(self): + self.hs.config.limit_usage_by_mau = False + self.hs.config.max_mau_value = 50 + lots_of_users = 100 + small_number_of_users = 1 + + error = AuthError( + 403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED + ) + + # Ensure no error thrown + yield self.auth.check_auth_blocking(error) + + self.hs.config.limit_usage_by_mau = True + + self.store.get_monthly_active_count = Mock( + return_value=defer.succeed(lots_of_users) + ) + + with self.assertRaises(AuthError): + yield self.auth.check_auth_blocking(error) + + # Ensure does not throw an error + self.store.get_monthly_active_count = Mock( + return_value=defer.succeed(small_number_of_users) + ) + yield self.auth.check_auth_blocking(error) diff --git a/tests/handlers/test_auth.py b/tests/handlers/test_auth.py index 55eab9e9cf9e..8a9bf2d5fd02 100644 --- a/tests/handlers/test_auth.py +++ b/tests/handlers/test_auth.py @@ -132,14 +132,14 @@ def test_mau_limits_disabled(self): @defer.inlineCallbacks def test_mau_limits_exceeded(self): self.hs.config.limit_usage_by_mau = True - self.hs.get_datastore().count_monthly_users = Mock( + self.hs.get_datastore().get_monthly_active_count = Mock( return_value=defer.succeed(self.large_number_of_users) ) with self.assertRaises(AuthError): yield self.auth_handler.get_access_token_for_user_id('user_a') - self.hs.get_datastore().count_monthly_users = Mock( + self.hs.get_datastore().get_monthly_active_count = Mock( return_value=defer.succeed(self.large_number_of_users) ) with self.assertRaises(AuthError): @@ -151,13 +151,13 @@ def test_mau_limits_exceeded(self): def test_mau_limits_not_exceeded(self): self.hs.config.limit_usage_by_mau = True - self.hs.get_datastore().count_monthly_users = Mock( + self.hs.get_datastore().get_monthly_active_count = Mock( return_value=defer.succeed(self.small_number_of_users) ) # Ensure does not raise exception yield self.auth_handler.get_access_token_for_user_id('user_a') - self.hs.get_datastore().count_monthly_users = Mock( + self.hs.get_datastore().get_monthly_active_count = Mock( return_value=defer.succeed(self.small_number_of_users) ) yield self.auth_handler.validate_short_term_login_token_and_get_user_id( diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 0937d71cf60f..6b5b8b3772a4 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -50,6 +50,10 @@ def setUp(self): self.hs.get_macaroon_generator = Mock(return_value=self.macaroon_generator) self.hs.handlers = RegistrationHandlers(self.hs) self.handler = self.hs.get_handlers().registration_handler + self.store = self.hs.get_datastore() + self.hs.config.max_mau_value = 50 + self.lots_of_users = 100 + self.small_number_of_users = 1 @defer.inlineCallbacks def test_user_is_created_and_logged_in_if_doesnt_exist(self): @@ -80,51 +84,44 @@ def test_if_user_exists(self): self.assertEquals(result_token, 'secret') @defer.inlineCallbacks - def test_cannot_register_when_mau_limits_exceeded(self): - local_part = "someone" - display_name = "someone" - requester = create_requester("@as:test") - store = self.hs.get_datastore() + def test_mau_limits_when_disabled(self): self.hs.config.limit_usage_by_mau = False - self.hs.config.max_mau_value = 50 - lots_of_users = 100 - small_number_users = 1 - - store.count_monthly_users = Mock(return_value=defer.succeed(lots_of_users)) - # Ensure does not throw exception - yield self.handler.get_or_create_user(requester, 'a', display_name) + yield self.handler.get_or_create_user("requester", 'a', "display_name") + @defer.inlineCallbacks + def test_get_or_create_user_mau_not_blocked(self): self.hs.config.limit_usage_by_mau = True - - with self.assertRaises(RegistrationError): - yield self.handler.get_or_create_user(requester, 'b', display_name) - - store.count_monthly_users = Mock(return_value=defer.succeed(small_number_users)) - - self._macaroon_mock_generator("another_secret") - + self.store.count_monthly_users = Mock( + return_value=defer.succeed(self.small_number_of_users) + ) # Ensure does not throw exception - yield self.handler.get_or_create_user("@neil:matrix.org", 'c', "Neil") + yield self.handler.get_or_create_user("@user:server", 'c', "User") - self._macaroon_mock_generator("another another secret") - store.count_monthly_users = Mock(return_value=defer.succeed(lots_of_users)) + @defer.inlineCallbacks + def test_get_or_create_user_mau_blocked(self): + self.hs.config.limit_usage_by_mau = True + self.store.get_monthly_active_count = Mock( + return_value=defer.succeed(self.lots_of_users) + ) with self.assertRaises(RegistrationError): - yield self.handler.register(localpart=local_part) + yield self.handler.get_or_create_user("requester", 'b', "display_name") - self._macaroon_mock_generator("another another secret") - store.count_monthly_users = Mock(return_value=defer.succeed(lots_of_users)) + @defer.inlineCallbacks + def test_register_mau_blocked(self): + self.hs.config.limit_usage_by_mau = True + self.store.get_monthly_active_count = Mock( + return_value=defer.succeed(self.lots_of_users) + ) + with self.assertRaises(RegistrationError): + yield self.handler.register(localpart="local_part") + @defer.inlineCallbacks + def test_register_saml2_mau_blocked(self): + self.hs.config.limit_usage_by_mau = True + self.store.get_monthly_active_count = Mock( + return_value=defer.succeed(self.lots_of_users) + ) with self.assertRaises(RegistrationError): - yield self.handler.register_saml2(local_part) - - def _macaroon_mock_generator(self, secret): - """ - Reset macaroon generator in the case where the test creates multiple users - """ - macaroon_generator = Mock( - generate_access_token=Mock(return_value=secret)) - self.hs.get_macaroon_generator = Mock(return_value=macaroon_generator) - self.hs.handlers = RegistrationHandlers(self.hs) - self.handler = self.hs.get_handlers().registration_handler + yield self.handler.register_saml2(localpart="local_part") From 4ecb4bdac963ae967da810de6134d85b50c999f9 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 2 Aug 2018 22:41:05 +0100 Subject: [PATCH 023/173] wip attempt at caching --- synapse/storage/monthly_active_users.py | 56 +++++++++++++++++++++---- 1 file changed, 47 insertions(+), 9 deletions(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index aada9bd2b954..19846cefd9f9 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -1,4 +1,6 @@ from twisted.internet import defer +from synapse.util.caches.descriptors import cachedInlineCallbacks +from synapse.storage.engines import PostgresEngine, Sqlite3Engine from ._base import SQLBaseStore @@ -20,17 +22,53 @@ def _reap_users(txn): thirty_days_ago = ( int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) ) - sql = "DELETE FROM monthly_active_users WHERE timestamp < ?" - txn.execute(sql, (thirty_days_ago,)) - sql = """ - DELETE FROM monthly_active_users - ORDER BY timestamp desc - LIMIT -1 OFFSET ? - """ - txn.execute(sql, (self.max_mau_value,)) + + if isinstance(self.database_engine, PostgresEngine): + sql = """ + DELETE FROM monthly_active_users + WHERE timestamp < ? + RETURNING user_id + """ + txn.execute(sql, (thirty_days_ago,)) + res = txn.fetchall() + for r in res: + self.is_user_monthly_active.invalidate(r) + + sql = """ + DELETE FROM monthly_active_users + ORDER BY timestamp desc + LIMIT -1 OFFSET ? + RETURNING user_id + """ + txn.execute(sql, (self.max_mau_value,)) + res = txn.fetchall() + for r in res: + self.is_user_monthly_active.invalidate(r) + print r + self.get_monthly_active_count.invalidate() + elif isinstance(self.database_engine, Sqlite3Engine): + sql = "DELETE FROM monthly_active_users WHERE timestamp < ?" + + txn.execute(sql, (thirty_days_ago,)) + sql = """ + DELETE FROM monthly_active_users + ORDER BY timestamp desc + LIMIT -1 OFFSET ? + """ + txn.execute(sql, (self.max_mau_value,)) + + # It seems poor to invalidate the whole cache, but the alternative + # is to select then delete which has it's own problem. + # It seems unlikely that anyone using this feature on large datasets + # would be using sqlite and if they are then there will be + # larger perf issues than this one to encourage an upgrade to postgres. + + self.is_user_monthly_active.invalidate_all() + self.get_monthly_active_count.invalidate_all() return self.runInteraction("reap_monthly_active_users", _reap_users) + @cachedInlineCallbacks(num_args=0) def get_monthly_active_count(self): """ Generates current count of monthly active users.abs @@ -65,7 +103,7 @@ def upsert_monthly_active_user(self, user_id): lock=False, ) - @defer.inlineCallbacks + @cachedInlineCallbacks(num_args=1) def is_user_monthly_active(self, user_id): """ Checks if a given user is part of the monthly active user group From 5e2f7b80844bc8ee401beab494948c093338f404 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 2 Aug 2018 22:47:48 +0100 Subject: [PATCH 024/173] typo --- synapse/storage/monthly_active_users.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 19846cefd9f9..2872ba4cae1b 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -58,7 +58,7 @@ def _reap_users(txn): txn.execute(sql, (self.max_mau_value,)) # It seems poor to invalidate the whole cache, but the alternative - # is to select then delete which has it's own problem. + # is to select then delete which has its own problems. # It seems unlikely that anyone using this feature on large datasets # would be using sqlite and if they are then there will be # larger perf issues than this one to encourage an upgrade to postgres. From c0affa7b4ff1e0775a8a5e6d704596b5383112a5 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 2 Aug 2018 23:03:01 +0100 Subject: [PATCH 025/173] update generate_monthly_active_users, and reap_monthly_active_users --- synapse/app/homeserver.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 5f0ca51ac76d..7d4ea493bcde 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -520,14 +520,14 @@ def generate_user_daily_visit_stats(): # table will decrease clock.looping_call(generate_user_daily_visit_stats, 5 * 60 * 1000) clock.looping_call( - MonthlyActiveUsersStore(hs).reap_monthly_active_users, 1000 * 60 * 60 + hs.get_datastore().reap_monthly_active_users, 1000 * 60 * 60 ) @defer.inlineCallbacks def generate_monthly_active_users(): count = 0 if hs.config.limit_usage_by_mau: - count = yield hs.get_datastore().count_monthly_users() + count = yield hs.get_datastore().get_monthly_active_count() current_mau_gauge.set(float(count)) max_mau_value_gauge.set(float(hs.config.max_mau_value)) From 950807d93a264b6d10ece386d227dc4069f7d0da Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 3 Aug 2018 13:49:53 +0100 Subject: [PATCH 026/173] fix caching and tests --- synapse/app/homeserver.py | 1 - synapse/storage/monthly_active_users.py | 91 ++++++++++------------ tests/storage/test_monthly_active_users.py | 50 ++++++++---- 3 files changed, 80 insertions(+), 62 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 7d4ea493bcde..3a67db8b30db 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -64,7 +64,6 @@ from synapse.server import HomeServer from synapse.storage import are_all_users_on_domain from synapse.storage.engines import IncorrectDatabaseSetup, create_engine -from synapse.storage.monthly_active_users import MonthlyActiveUsersStore from synapse.storage.prepare_database import UpgradeDatabaseException, prepare_database from synapse.util.caches import CACHE_SIZE_FACTOR from synapse.util.httpresourcetree import create_resource_tree diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 2872ba4cae1b..d06c90cbcc83 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -1,6 +1,21 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from twisted.internet import defer -from synapse.util.caches.descriptors import cachedInlineCallbacks -from synapse.storage.engines import PostgresEngine, Sqlite3Engine + +from synapse.util.caches.descriptors import cached, cachedInlineCallbacks from ._base import SQLBaseStore @@ -9,7 +24,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): def __init__(self, dbconn, hs): super(MonthlyActiveUsersStore, self).__init__(None, hs) self._clock = hs.get_clock() - self.max_mau_value = hs.config.max_mau_value + self.hs = hs def reap_monthly_active_users(self): """ @@ -19,62 +34,41 @@ def reap_monthly_active_users(self): Defered() """ def _reap_users(txn): + thirty_days_ago = ( int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) ) - if isinstance(self.database_engine, PostgresEngine): - sql = """ - DELETE FROM monthly_active_users - WHERE timestamp < ? - RETURNING user_id - """ - txn.execute(sql, (thirty_days_ago,)) - res = txn.fetchall() - for r in res: - self.is_user_monthly_active.invalidate(r) - - sql = """ - DELETE FROM monthly_active_users - ORDER BY timestamp desc - LIMIT -1 OFFSET ? - RETURNING user_id - """ - txn.execute(sql, (self.max_mau_value,)) - res = txn.fetchall() - for r in res: - self.is_user_monthly_active.invalidate(r) - print r - self.get_monthly_active_count.invalidate() - elif isinstance(self.database_engine, Sqlite3Engine): - sql = "DELETE FROM monthly_active_users WHERE timestamp < ?" + sql = "DELETE FROM monthly_active_users WHERE timestamp < ?" - txn.execute(sql, (thirty_days_ago,)) - sql = """ - DELETE FROM monthly_active_users - ORDER BY timestamp desc - LIMIT -1 OFFSET ? - """ - txn.execute(sql, (self.max_mau_value,)) + txn.execute(sql, (thirty_days_ago,)) + sql = """ + DELETE FROM monthly_active_users + ORDER BY timestamp desc + LIMIT -1 OFFSET ? + """ + txn.execute(sql, (self.hs.config.max_mau_value,)) - # It seems poor to invalidate the whole cache, but the alternative - # is to select then delete which has its own problems. - # It seems unlikely that anyone using this feature on large datasets - # would be using sqlite and if they are then there will be - # larger perf issues than this one to encourage an upgrade to postgres. + res = self.runInteraction("reap_monthly_active_users", _reap_users) + # It seems poor to invalidate the whole cache, Postgres supports + # 'Returning' which would allow me to invalidate only the + # specific users, but sqlite has no way to do this and instead + # I would need to SELECT and the DELETE which without locking + # is racy. + # Have resolved to invalidate the whole cache for now and do + # something about it if and when the perf becomes significant + self.is_user_monthly_active.invalidate_all() + self.get_monthly_active_count.invalidate_all() + return res - self.is_user_monthly_active.invalidate_all() - self.get_monthly_active_count.invalidate_all() - - return self.runInteraction("reap_monthly_active_users", _reap_users) - - @cachedInlineCallbacks(num_args=0) + @cached(num_args=0) def get_monthly_active_count(self): """ Generates current count of monthly active users.abs Return: Defered(int): Number of current monthly active users """ + def _count_users(txn): sql = "SELECT COALESCE(count(*), 0) FROM monthly_active_users" @@ -91,7 +85,7 @@ def upsert_monthly_active_user(self, user_id): Deferred(bool): True if a new entry was created, False if an existing one was updated. """ - return self._simple_upsert( + self._simple_upsert( desc="upsert_monthly_active_user", table="monthly_active_users", keyvalues={ @@ -102,6 +96,8 @@ def upsert_monthly_active_user(self, user_id): }, lock=False, ) + self.is_user_monthly_active.invalidate((user_id,)) + self.get_monthly_active_count.invalidate(()) @cachedInlineCallbacks(num_args=1) def is_user_monthly_active(self, user_id): @@ -120,5 +116,4 @@ def is_user_monthly_active(self, user_id): retcol="user_id", desc="is_user_monthly_active", ) - defer.returnValue(bool(user_present)) diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index d8d25a606911..c32109ecc5d9 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -1,6 +1,19 @@ -from twisted.internet import defer +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. -from synapse.storage.monthly_active_users import MonthlyActiveUsersStore +from twisted.internet import defer import tests.unittest import tests.utils @@ -10,20 +23,19 @@ class MonthlyActiveUsersTestCase(tests.unittest.TestCase): def __init__(self, *args, **kwargs): super(MonthlyActiveUsersTestCase, self).__init__(*args, **kwargs) - self.mau = None @defer.inlineCallbacks def setUp(self): - hs = yield setup_test_homeserver() - self.mau = MonthlyActiveUsersStore(None, hs) + self.hs = yield setup_test_homeserver() + self.store = self.hs.get_datastore() @defer.inlineCallbacks def test_can_insert_and_count_mau(self): - count = yield self.mau.get_monthly_active_count() + count = yield self.store.get_monthly_active_count() self.assertEqual(0, count) - yield self.mau.upsert_monthly_active_user("@user:server") - count = yield self.mau.get_monthly_active_count() + yield self.store.upsert_monthly_active_user("@user:server") + count = yield self.store.get_monthly_active_count() self.assertEqual(1, count) @@ -32,11 +44,23 @@ def test_is_user_monthly_active(self): user_id1 = "@user1:server" user_id2 = "@user2:server" user_id3 = "@user3:server" - result = yield self.mau.is_user_monthly_active(user_id1) + result = yield self.store.is_user_monthly_active(user_id1) self.assertFalse(result) - yield self.mau.upsert_monthly_active_user(user_id1) - yield self.mau.upsert_monthly_active_user(user_id2) - result = yield self.mau.is_user_monthly_active(user_id1) + yield self.store.upsert_monthly_active_user(user_id1) + yield self.store.upsert_monthly_active_user(user_id2) + result = yield self.store.is_user_monthly_active(user_id1) self.assertTrue(result) - result = yield self.mau.is_user_monthly_active(user_id3) + result = yield self.store.is_user_monthly_active(user_id3) self.assertFalse(result) + + @defer.inlineCallbacks + def test_reap_monthly_active_users(self): + self.hs.config.max_mau_value = 5 + initial_users = 10 + for i in range(initial_users): + yield self.store.upsert_monthly_active_user("@user%d:server" % i) + count = yield self.store.get_monthly_active_count() + self.assertTrue(count, initial_users) + yield self.store.reap_monthly_active_users() + count = yield self.store.get_monthly_active_count() + self.assertTrue(count, initial_users - self.hs.config.max_mau_value) From da90337d8952999f6c2c7dbcbb006b46bce17020 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 3 Aug 2018 14:05:20 +0100 Subject: [PATCH 027/173] Add ability to limit number of monthly active users on the server --- changelog.d/3633.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3633.feature diff --git a/changelog.d/3633.feature b/changelog.d/3633.feature new file mode 100644 index 000000000000..8007a048405b --- /dev/null +++ b/changelog.d/3633.feature @@ -0,0 +1 @@ +Add ability to limit number of monthly active users on the server From b2aab04d2c6021e6999fdc3c204db63d7c6f9334 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 3 Aug 2018 14:12:56 +0100 Subject: [PATCH 028/173] fix py3 test failure --- tests/storage/test_client_ips.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index e1552510cc50..0cd290176ea0 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -34,6 +34,7 @@ def setUp(self): @defer.inlineCallbacks def test_insert_new_client_ip(self): + self.hs.config.max_mau_value = 50 self.clock.now = 12345678 user_id = "@user:id" yield self.store.insert_client_ip( From 5593ff6773c7556b85514ee172c4b6c06898f35e Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 3 Aug 2018 14:59:17 +0100 Subject: [PATCH 029/173] fix (lots of) py3 test failures --- synapse/config/server.py | 4 ++-- tests/storage/test_client_ips.py | 1 - tests/utils.py | 1 + 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/config/server.py b/synapse/config/server.py index 6a471a0a5ec7..8fd23197590c 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -69,12 +69,12 @@ def read_config(self, config): # Options to control access by tracking MAU self.limit_usage_by_mau = config.get("limit_usage_by_mau", False) + self.max_mau_value = 0 if self.limit_usage_by_mau: self.max_mau_value = config.get( "max_mau_value", 0, ) - else: - self.max_mau_value = 0 + # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist = None federation_domain_whitelist = config.get( diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index 0cd290176ea0..e1552510cc50 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -34,7 +34,6 @@ def setUp(self): @defer.inlineCallbacks def test_insert_new_client_ip(self): - self.hs.config.max_mau_value = 50 self.clock.now = 12345678 user_id = "@user:id" yield self.store.insert_client_ip( diff --git a/tests/utils.py b/tests/utils.py index 9bff3ff3b9b5..ec40428e7438 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -73,6 +73,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None config.block_events_without_consent_error = None config.media_storage_providers = [] config.auto_join_rooms = [] + config.limit_usage_by_mau = False # disable user directory updates, because they get done in the # background, which upsets the test runner. From 15c1ae45e52c31d03f162c5b201f0ccafa032885 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 3 Aug 2018 16:04:29 +0100 Subject: [PATCH 030/173] Docstrings for BaseFederationServlet ... to save me reverse-engineering this stuff again. --- synapse/federation/transport/server.py | 47 ++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index eae5f2b427bb..93bd899b865c 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -190,6 +190,41 @@ def strip_quotes(value): class BaseFederationServlet(object): + """Abstract base class for federation servlet classes. + + The servlet object should have a PATH attribute which takes the form of a regexp to + match against the request path (excluding the /federation/v1 prefix). + + The servlet should also implement one or more of on_GET, on_POST, on_PUT, to match + the appropriate HTTP method. These methods have the signature: + + on_(self, origin, content, query, **kwargs) + + With arguments: + + origin (unicode|None): The authenticated server_name of the calling server, + unless REQUIRE_AUTH is set to False and authentication failed. + + content (unicode|None): decoded json body of the request. None if the + request was a GET. + + query (dict[bytes, list[bytes]]): Query params from the request. url-decoded + (ie, '+' and '%xx' are decoded) but note that it is *not* utf8-decoded + yet. + + **kwargs (dict[unicode, unicode]): the dict mapping keys to path + components as specified in the path match regexp. + + Returns: + Deferred[(int, object)|None]: either (response code, response object) to + return a JSON response, or None if the request has already been handled. + + Raises: + SynapseError: to return an error code + + Exception: other exceptions will be caught, logged, and a 500 will be + returned. + """ REQUIRE_AUTH = True def __init__(self, handler, authenticator, ratelimiter, server_name): @@ -204,6 +239,18 @@ def _wrap(self, func): @defer.inlineCallbacks @functools.wraps(func) def new_func(request, *args, **kwargs): + """ A callback which can be passed to HttpServer.RegisterPaths + + Args: + request (twisted.web.http.Request): + *args: unused? + **kwargs (dict[unicode, unicode]): the dict mapping keys to path + components as specified in the path match regexp. + + Returns: + Deferred[(int, object)|None]: (response code, response object) as returned + by the callback method. None if the request has already been handled. + """ content = None if request.method in ["PUT", "POST"]: # TODO: Handle other method types? other content types? From 0ca459ea334ff86016bda241c0c823178789c215 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 25 Jul 2018 22:10:39 +0100 Subject: [PATCH 031/173] Basic support for room versioning This is the first tranche of support for room versioning. It includes: * setting the default room version in the config file * new room_version param on the createRoom API * storing the version of newly-created rooms in the m.room.create event * fishing the version of existing rooms out of the m.room.create event --- synapse/api/constants.py | 6 ++++ synapse/api/errors.py | 2 ++ synapse/config/server.py | 14 +++++++++ synapse/handlers/room.py | 27 ++++++++++++++++- synapse/replication/slave/storage/events.py | 2 +- synapse/storage/state.py | 33 +++++++++++++++++++-- tests/utils.py | 4 +++ 7 files changed, 83 insertions(+), 5 deletions(-) diff --git a/synapse/api/constants.py b/synapse/api/constants.py index 4df930c8d1b1..a27bf3b32d92 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd # Copyright 2017 Vector Creations Ltd +# Copyright 2018 New Vector Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -94,3 +95,8 @@ class RoomCreationPreset(object): class ThirdPartyEntityKind(object): USER = "user" LOCATION = "location" + + +# vdh-test-version is a placeholder to get room versioning support working and tested +# until we have a working v2. +KNOWN_ROOM_VERSIONS = {"1", "vdh-test-version"} diff --git a/synapse/api/errors.py b/synapse/api/errors.py index b41d595059a3..477ca07a242b 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd +# Copyright 2018 New Vector Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -56,6 +57,7 @@ class Codes(object): CONSENT_NOT_GIVEN = "M_CONSENT_NOT_GIVEN" CANNOT_LEAVE_SERVER_NOTICE_ROOM = "M_CANNOT_LEAVE_SERVER_NOTICE_ROOM" MAU_LIMIT_EXCEEDED = "M_MAU_LIMIT_EXCEEDED" + UNSUPPORTED_ROOM_VERSION = "M_UNSUPPORTED_ROOM_VERSION" class CodeMessageException(RuntimeError): diff --git a/synapse/config/server.py b/synapse/config/server.py index 6a471a0a5ec7..68ef2789d3c9 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -16,6 +16,7 @@ import logging +from synapse.api.constants import KNOWN_ROOM_VERSIONS from synapse.http.endpoint import parse_and_validate_server_name from ._base import Config, ConfigError @@ -75,6 +76,16 @@ def read_config(self, config): ) else: self.max_mau_value = 0 + + # the version of rooms created by default on this server + self.default_room_version = str(config.get( + "default_room_version", "1", + )) + if self.default_room_version not in KNOWN_ROOM_VERSIONS: + raise ConfigError("Unrecognised value '%s' for default_room_version" % ( + self.default_room_version, + )) + # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist = None federation_domain_whitelist = config.get( @@ -249,6 +260,9 @@ def default_config(self, server_name, **kwargs): # (except those sent by local server admins). The default is False. # block_non_admin_invites: True + # The room_version of rooms which are created by default by this server. + # default_room_version: 1 + # Restrict federation to the following whitelist of domains. # N.B. we recommend also firewalling your federation listener to limit # inbound federation traffic as early as possible, rather than relying diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 7b7804d9b2cc..a526b684e9dd 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -21,9 +21,16 @@ import string from collections import OrderedDict +from six import string_types + from twisted.internet import defer -from synapse.api.constants import EventTypes, JoinRules, RoomCreationPreset +from synapse.api.constants import ( + KNOWN_ROOM_VERSIONS, + EventTypes, + JoinRules, + RoomCreationPreset, +) from synapse.api.errors import AuthError, Codes, StoreError, SynapseError from synapse.types import RoomAlias, RoomID, RoomStreamToken, StreamToken, UserID from synapse.util import stringutils @@ -99,6 +106,21 @@ def create_room(self, requester, config, ratelimit=True, if ratelimit: yield self.ratelimit(requester) + room_version = config.get("room_version", self.hs.config.default_room_version) + if not isinstance(room_version, string_types): + raise SynapseError( + 400, + "room_version must be a string", + Codes.BAD_JSON, + ) + + if room_version not in KNOWN_ROOM_VERSIONS: + raise SynapseError( + 400, + "Your homeserver does not support this room version", + Codes.UNSUPPORTED_ROOM_VERSION, + ) + if "room_alias_name" in config: for wchar in string.whitespace: if wchar in config["room_alias_name"]: @@ -184,6 +206,9 @@ def create_room(self, requester, config, ratelimit=True, creation_content = config.get("creation_content", {}) + # override any attempt to set room versions via the creation_content + creation_content["room_version"] = room_version + room_member_handler = self.hs.get_room_member_handler() yield self._send_events_for_new_room( diff --git a/synapse/replication/slave/storage/events.py b/synapse/replication/slave/storage/events.py index bdb5eee4afc5..4830c68f3575 100644 --- a/synapse/replication/slave/storage/events.py +++ b/synapse/replication/slave/storage/events.py @@ -44,8 +44,8 @@ class SlavedEventStore(EventFederationWorkerStore, RoomMemberWorkerStore, EventPushActionsWorkerStore, StreamWorkerStore, - EventsWorkerStore, StateGroupWorkerStore, + EventsWorkerStore, SignatureWorkerStore, UserErasureWorkerStore, BaseSlavedStore): diff --git a/synapse/storage/state.py b/synapse/storage/state.py index b27b3ae14458..17b14d464baf 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -21,15 +21,17 @@ from twisted.internet import defer +from synapse.api.constants import EventTypes +from synapse.api.errors import NotFoundError +from synapse.storage._base import SQLBaseStore from synapse.storage.background_updates import BackgroundUpdateStore from synapse.storage.engines import PostgresEngine +from synapse.storage.events_worker import EventsWorkerStore from synapse.util.caches import get_cache_factor_for, intern_string from synapse.util.caches.descriptors import cached, cachedList from synapse.util.caches.dictionary_cache import DictionaryCache from synapse.util.stringutils import to_ascii -from ._base import SQLBaseStore - logger = logging.getLogger(__name__) @@ -46,7 +48,8 @@ def __len__(self): return len(self.delta_ids) if self.delta_ids else 0 -class StateGroupWorkerStore(SQLBaseStore): +# this inherits from EventsWorkerStore because it calls self.get_events +class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): """The parts of StateGroupStore that can be called from workers. """ @@ -61,6 +64,30 @@ def __init__(self, db_conn, hs): "*stateGroupCache*", 500000 * get_cache_factor_for("stateGroupCache") ) + @defer.inlineCallbacks + def get_room_version(self, room_id): + """Get the room_version of a given room + + Args: + room_id (str) + + Returns: + Deferred[str] + + Raises: + NotFoundError if the room is unknown + """ + # for now we do this by looking at the create event. We may want to cache this + # more intelligently in future. + state_ids = yield self.get_current_state_ids(room_id) + create_id = state_ids.get((EventTypes.Create, "")) + + if not create_id: + raise NotFoundError("Unknown room") + + create_event = yield self.get_event(create_id) + defer.returnValue(create_event.content.get("room_version", "1")) + @cached(max_entries=100000, iterable=True) def get_current_state_ids(self, room_id): """Get the current state event ids for a room based on the diff --git a/tests/utils.py b/tests/utils.py index 9bff3ff3b9b5..9e188a8ed4e9 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -74,6 +74,10 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None config.media_storage_providers = [] config.auto_join_rooms = [] + # we need a sane default_room_version, otherwise attempts to create rooms will + # fail. + config.default_room_version = "1" + # disable user directory updates, because they get done in the # background, which upsets the test runner. config.update_user_directory = False From 0d63d93ca834771324f0c5b9340346ad2113a4b1 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 25 Jul 2018 22:25:41 +0100 Subject: [PATCH 032/173] Enforce compatibility when processing make_join requests Reject make_join requests from servers which do not support the room version. Also include the room version in the response. --- synapse/api/errors.py | 22 ++++++++++++++++++++++ synapse/federation/federation_server.py | 21 ++++++++++++++++++--- synapse/federation/transport/server.py | 24 +++++++++++++++++++++++- 3 files changed, 63 insertions(+), 4 deletions(-) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index 477ca07a242b..70400347bc08 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -58,6 +58,7 @@ class Codes(object): CANNOT_LEAVE_SERVER_NOTICE_ROOM = "M_CANNOT_LEAVE_SERVER_NOTICE_ROOM" MAU_LIMIT_EXCEEDED = "M_MAU_LIMIT_EXCEEDED" UNSUPPORTED_ROOM_VERSION = "M_UNSUPPORTED_ROOM_VERSION" + INCOMPATIBLE_ROOM_VERSION = "M_INCOMPATIBLE_ROOM_VERSION" class CodeMessageException(RuntimeError): @@ -287,6 +288,27 @@ def error_dict(self): ) +class IncompatibleRoomVersionError(SynapseError): + """A server is trying to join a room whose version it does not support.""" + + def __init__(self, room_version): + super(IncompatibleRoomVersionError, self).__init__( + code=400, + msg="Your homeserver does not support the features required to " + "join this room", + errcode=Codes.INCOMPATIBLE_ROOM_VERSION, + ) + + self._room_version = room_version + + def error_dict(self): + return cs_error( + self.msg, + self.errcode, + room_version=self._room_version, + ) + + def cs_error(msg, code=Codes.UNKNOWN, **kwargs): """ Utility method for constructing an error response for client-server interactions. diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index bf89d568afed..2b62f687b683 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -27,7 +27,13 @@ from twisted.python import failure from synapse.api.constants import EventTypes -from synapse.api.errors import AuthError, FederationError, NotFoundError, SynapseError +from synapse.api.errors import ( + AuthError, + FederationError, + IncompatibleRoomVersionError, + NotFoundError, + SynapseError, +) from synapse.crypto.event_signing import compute_event_signature from synapse.federation.federation_base import FederationBase, event_from_pdu_json from synapse.federation.persistence import TransactionActions @@ -323,12 +329,21 @@ def on_query_request(self, query_type, args): defer.returnValue((200, resp)) @defer.inlineCallbacks - def on_make_join_request(self, origin, room_id, user_id): + def on_make_join_request(self, origin, room_id, user_id, supported_versions): origin_host, _ = parse_server_name(origin) yield self.check_server_matches_acl(origin_host, room_id) + + room_version = yield self.store.get_room_version(room_id) + if room_version not in supported_versions: + logger.warn("Room version %s not in %s", room_version, supported_versions) + raise IncompatibleRoomVersionError(room_version=room_version) + pdu = yield self.handler.on_make_join_request(room_id, user_id) time_now = self._clock.time_msec() - defer.returnValue({"event": pdu.get_pdu_json(time_now)}) + defer.returnValue({ + "event": pdu.get_pdu_json(time_now), + "room_version": room_version, + }) @defer.inlineCallbacks def on_invite_request(self, origin, content): diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 93bd899b865c..77969a4f3804 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -431,9 +431,31 @@ class FederationMakeJoinServlet(BaseFederationServlet): PATH = "/make_join/(?P[^/]*)/(?P[^/]*)" @defer.inlineCallbacks - def on_GET(self, origin, content, query, context, user_id): + def on_GET(self, origin, _content, query, context, user_id): + """ + Args: + origin (unicode): The authenticated server_name of the calling server + + _content (None): (GETs don't have bodies) + + query (dict[bytes, list[bytes]]): Query params from the request. + + **kwargs (dict[unicode, unicode]): the dict mapping keys to path + components as specified in the path match regexp. + + Returns: + Deferred[(int, object)|None]: either (response code, response object) to + return a JSON response, or None if the request has already been handled. + """ + versions = query.get(b'ver') + if versions is not None: + supported_versions = [v.decode("utf-8") for v in versions] + else: + supported_versions = ["1"] + content = yield self.handler.on_make_join_request( origin, context, user_id, + supported_versions=supported_versions, ) defer.returnValue((200, content)) From 3777fa26aaf36245168980054c4eebf96169dd13 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 1 Aug 2018 15:35:29 +0100 Subject: [PATCH 033/173] sanity check response from make_join --- synapse/federation/federation_client.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 7550e11b6ec2..de4b813a1572 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -561,7 +561,9 @@ def send_request(destination): destination, room_id, user_id, membership ) - pdu_dict = ret["event"] + pdu_dict = ret.get("event", None) + if not isinstance(pdu_dict, dict): + raise InvalidResponseError("Bad 'event' field in response") logger.debug("Got response to make_%s: %s", membership, pdu_dict) From e10830e9765eb3897da5af6ffb4809badb8e3009 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 3 Aug 2018 17:55:50 +0100 Subject: [PATCH 034/173] wip commit - tests failing --- synapse/api/auth.py | 6 +- synapse/storage/client_ips.py | 21 +------ synapse/storage/monthly_active_users.py | 66 ++++++++++++++++------ tests/storage/test_client_ips.py | 12 ++-- tests/storage/test_monthly_active_users.py | 14 ++--- 5 files changed, 66 insertions(+), 53 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 943a488339bb..d8ebbbc6e8de 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -775,7 +775,7 @@ def check_in_room_or_world_readable(self, room_id, user_id): ) @defer.inlineCallbacks - def check_auth_blocking(self, error): + def check_auth_blocking(self): """Checks if the user should be rejected for some external reason, such as monthly active user limiting or global disable flag Args: @@ -785,4 +785,6 @@ def check_auth_blocking(self, error): if self.hs.config.limit_usage_by_mau is True: current_mau = yield self.store.get_monthly_active_count() if current_mau >= self.hs.config.max_mau_value: - raise error + raise AuthError( + 403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED + ) diff --git a/synapse/storage/client_ips.py b/synapse/storage/client_ips.py index 83d64d1563c2..2489527f2cc8 100644 --- a/synapse/storage/client_ips.py +++ b/synapse/storage/client_ips.py @@ -86,7 +86,7 @@ def insert_client_ip(self, user_id, access_token, ip, user_agent, device_id, last_seen = self.client_ip_last_seen.get(key) except KeyError: last_seen = None - yield self._populate_monthly_active_users(user_id) + yield self.populate_monthly_active_users(user_id) # Rate-limited inserts if last_seen is not None and (now - last_seen) < LAST_SEEN_GRANULARITY: return @@ -95,25 +95,6 @@ def insert_client_ip(self, user_id, access_token, ip, user_agent, device_id, self._batch_row_update[key] = (user_agent, device_id, now) - @defer.inlineCallbacks - def _populate_monthly_active_users(self, user_id): - """Checks on the state of monthly active user limits and optionally - add the user to the monthly active tables - - Args: - user_id(str): the user_id to query - """ - - store = self.hs.get_datastore() - if self.hs.config.limit_usage_by_mau: - is_user_monthly_active = yield store.is_user_monthly_active(user_id) - if is_user_monthly_active: - yield store.upsert_monthly_active_user(user_id) - else: - count = yield store.get_monthly_active_count() - if count < self.hs.config.max_mau_value: - yield store.upsert_monthly_active_user(user_id) - def _update_client_ips_batch(self): def update(): to_update = self._batch_row_update diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index d06c90cbcc83..6def6830d002 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -19,6 +19,10 @@ from ._base import SQLBaseStore +# Number of msec of granularity to store the monthly_active_user timestamp +# This means it is not necessary to update the table on every request +LAST_SEEN_GRANULARITY = 60 * 60 * 1000 + class MonthlyActiveUsersStore(SQLBaseStore): def __init__(self, dbconn, hs): @@ -30,8 +34,9 @@ def reap_monthly_active_users(self): """ Cleans out monthly active user table to ensure that no stale entries exist. - Return: - Defered() + + Returns: + Deferred[] """ def _reap_users(txn): @@ -49,7 +54,7 @@ def _reap_users(txn): """ txn.execute(sql, (self.hs.config.max_mau_value,)) - res = self.runInteraction("reap_monthly_active_users", _reap_users) + res = yield self.runInteraction("reap_monthly_active_users", _reap_users) # It seems poor to invalidate the whole cache, Postgres supports # 'Returning' which would allow me to invalidate only the # specific users, but sqlite has no way to do this and instead @@ -57,16 +62,16 @@ def _reap_users(txn): # is racy. # Have resolved to invalidate the whole cache for now and do # something about it if and when the perf becomes significant - self.is_user_monthly_active.invalidate_all() + self._user_last_seen_monthly_active.invalidate_all() self.get_monthly_active_count.invalidate_all() return res @cached(num_args=0) def get_monthly_active_count(self): - """ - Generates current count of monthly active users.abs - Return: - Defered(int): Number of current monthly active users + """Generates current count of monthly active users.abs + + Returns: + Defered[int]: Number of current monthly active users """ def _count_users(txn): @@ -82,10 +87,10 @@ def upsert_monthly_active_user(self, user_id): Updates or inserts monthly active user member Arguments: user_id (str): user to add/update - Deferred(bool): True if a new entry was created, False if an + Deferred[bool]: True if a new entry was created, False if an existing one was updated. """ - self._simple_upsert( + is_insert = self._simple_upsert( desc="upsert_monthly_active_user", table="monthly_active_users", keyvalues={ @@ -96,24 +101,49 @@ def upsert_monthly_active_user(self, user_id): }, lock=False, ) - self.is_user_monthly_active.invalidate((user_id,)) - self.get_monthly_active_count.invalidate(()) + if is_insert: + self._user_last_seen_monthly_active.invalidate((user_id,)) + self.get_monthly_active_count.invalidate(()) @cachedInlineCallbacks(num_args=1) - def is_user_monthly_active(self, user_id): + def _user_last_seen_monthly_active(self, user_id): """ Checks if a given user is part of the monthly active user group Arguments: user_id (str): user to add/update Return: - bool : True if user part of group, False otherwise + int : timestamp since last seen, None if never seen + """ - user_present = yield self._simple_select_onecol( + result = yield self._simple_select_onecol( table="monthly_active_users", keyvalues={ "user_id": user_id, }, - retcol="user_id", - desc="is_user_monthly_active", + retcol="timestamp", + desc="_user_last_seen_monthly_active", ) - defer.returnValue(bool(user_present)) + timestamp = None + if len(result) > 0: + timestamp = result[0] + defer.returnValue(timestamp) + + @defer.inlineCallbacks + def populate_monthly_active_users(self, user_id): + """Checks on the state of monthly active user limits and optionally + add the user to the monthly active tables + + Args: + user_id(str): the user_id to query + """ + + if self.hs.config.limit_usage_by_mau: + last_seen_timestamp = yield self._user_last_seen_monthly_active(user_id) + now = self.hs.get_clock().time_msec() + + if last_seen_timestamp is None: + count = yield self.get_monthly_active_count() + if count < self.hs.config.max_mau_value: + yield self.upsert_monthly_active_user(user_id) + elif now - last_seen_timestamp > LAST_SEEN_GRANULARITY: + yield self.upsert_monthly_active_user(user_id) diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index e1552510cc50..7a58c6eb24e5 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -64,7 +64,7 @@ def test_disabled_monthly_active_user(self): yield self.store.insert_client_ip( user_id, "access_token", "ip", "user_agent", "device_id", ) - active = yield self.store.is_user_monthly_active(user_id) + active = yield self.store._user_last_seen_monthly_active(user_id) self.assertFalse(active) @defer.inlineCallbacks @@ -80,7 +80,7 @@ def test_adding_monthly_active_user_when_full(self): yield self.store.insert_client_ip( user_id, "access_token", "ip", "user_agent", "device_id", ) - active = yield self.store.is_user_monthly_active(user_id) + active = yield self.store._user_last_seen_monthly_active(user_id) self.assertFalse(active) @defer.inlineCallbacks @@ -88,13 +88,13 @@ def test_adding_monthly_active_user_when_space(self): self.hs.config.limit_usage_by_mau = True self.hs.config.max_mau_value = 50 user_id = "@user:server" - active = yield self.store.is_user_monthly_active(user_id) + active = yield self.store._user_last_seen_monthly_active(user_id) self.assertFalse(active) yield self.store.insert_client_ip( user_id, "access_token", "ip", "user_agent", "device_id", ) - active = yield self.store.is_user_monthly_active(user_id) + active = yield self.store._user_last_seen_monthly_active(user_id) self.assertTrue(active) @defer.inlineCallbacks @@ -103,7 +103,7 @@ def test_updating_monthly_active_user_when_space(self): self.hs.config.max_mau_value = 50 user_id = "@user:server" - active = yield self.store.is_user_monthly_active(user_id) + active = yield self.store._user_last_seen_monthly_active(user_id) self.assertFalse(active) yield self.store.insert_client_ip( @@ -112,5 +112,5 @@ def test_updating_monthly_active_user_when_space(self): yield self.store.insert_client_ip( user_id, "access_token", "ip", "user_agent", "device_id", ) - active = yield self.store.is_user_monthly_active(user_id) + active = yield self.store._user_last_seen_monthly_active(user_id) self.assertTrue(active) diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index c32109ecc5d9..0bfd24a7fb3d 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -40,18 +40,18 @@ def test_can_insert_and_count_mau(self): self.assertEqual(1, count) @defer.inlineCallbacks - def test_is_user_monthly_active(self): + def test__user_last_seen_monthly_active(self): user_id1 = "@user1:server" user_id2 = "@user2:server" user_id3 = "@user3:server" - result = yield self.store.is_user_monthly_active(user_id1) - self.assertFalse(result) + result = yield self.store._user_last_seen_monthly_active(user_id1) + self.assertFalse(result == 0) yield self.store.upsert_monthly_active_user(user_id1) yield self.store.upsert_monthly_active_user(user_id2) - result = yield self.store.is_user_monthly_active(user_id1) - self.assertTrue(result) - result = yield self.store.is_user_monthly_active(user_id3) - self.assertFalse(result) + result = yield self.store._user_last_seen_monthly_active(user_id1) + self.assertTrue(result > 0) + result = yield self.store._user_last_seen_monthly_active(user_id3) + self.assertFalse(result == 0) @defer.inlineCallbacks def test_reap_monthly_active_users(self): From 886be75ad1bc60e016611b453b9644e8db17a9f1 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 3 Aug 2018 22:29:03 +0100 Subject: [PATCH 035/173] bug fixes --- synapse/handlers/auth.py | 15 ++------------- synapse/handlers/register.py | 8 ++++---- synapse/storage/monthly_active_users.py | 3 +-- tests/api/test_auth.py | 10 +++------- tests/handlers/test_register.py | 1 - 5 files changed, 10 insertions(+), 27 deletions(-) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 8f9cff92e84b..7ea8ce9f9479 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -520,7 +520,7 @@ def get_access_token_for_user_id(self, user_id, device_id=None): """ logger.info("Logging in user %s on device %s", user_id, device_id) access_token = yield self.issue_access_token(user_id, device_id) - yield self._check_mau_limits() + yield self.auth.check_auth_blocking() # the device *should* have been registered before we got here; however, # it's possible we raced against a DELETE operation. The thing we @@ -734,7 +734,7 @@ def issue_access_token(self, user_id, device_id=None): @defer.inlineCallbacks def validate_short_term_login_token_and_get_user_id(self, login_token): - yield self._check_mau_limits() + yield self.auth.check_auth_blocking() auth_api = self.hs.get_auth() user_id = None try: @@ -907,17 +907,6 @@ def _do_validate_hash(): else: return defer.succeed(False) - @defer.inlineCallbacks - def _check_mau_limits(self): - """ - Ensure that if mau blocking is enabled that invalid users cannot - log in. - """ - error = AuthError( - 403, "Monthly Active User limits exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED - ) - yield self.auth.check_auth_blocking(error) - @attr.s class MacaroonGenerator(object): diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 706ed8c292db..8cf0a36a8f8e 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -540,7 +540,7 @@ def _check_mau_limits(self): Do not accept registrations if monthly active user limits exceeded and limiting is enabled """ - error = RegistrationError( - 403, "Monthly Active User limits exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED - ) - yield self.auth.check_auth_blocking(error) + try: + yield self.auth.check_auth_blocking() + except AuthError as e: + raise RegistrationError(e.code, e.message, e.errcode) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 6def6830d002..135837507a14 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -54,7 +54,7 @@ def _reap_users(txn): """ txn.execute(sql, (self.hs.config.max_mau_value,)) - res = yield self.runInteraction("reap_monthly_active_users", _reap_users) + yield self.runInteraction("reap_monthly_active_users", _reap_users) # It seems poor to invalidate the whole cache, Postgres supports # 'Returning' which would allow me to invalidate only the # specific users, but sqlite has no way to do this and instead @@ -64,7 +64,6 @@ def _reap_users(txn): # something about it if and when the perf becomes significant self._user_last_seen_monthly_active.invalidate_all() self.get_monthly_active_count.invalidate_all() - return res @cached(num_args=0) def get_monthly_active_count(self): diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 54bdf28663f9..e963963c730a 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -452,12 +452,8 @@ def test_blocking_mau(self): lots_of_users = 100 small_number_of_users = 1 - error = AuthError( - 403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED - ) - # Ensure no error thrown - yield self.auth.check_auth_blocking(error) + yield self.auth.check_auth_blocking() self.hs.config.limit_usage_by_mau = True @@ -466,10 +462,10 @@ def test_blocking_mau(self): ) with self.assertRaises(AuthError): - yield self.auth.check_auth_blocking(error) + yield self.auth.check_auth_blocking() # Ensure does not throw an error self.store.get_monthly_active_count = Mock( return_value=defer.succeed(small_number_of_users) ) - yield self.auth.check_auth_blocking(error) + yield self.auth.check_auth_blocking() diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 6b5b8b3772a4..4ea59a58de26 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -104,7 +104,6 @@ def test_get_or_create_user_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(RegistrationError): yield self.handler.get_or_create_user("requester", 'b', "display_name") From d08296f9f278f39372bb5db99266d627614ecc96 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 3 Aug 2018 23:00:16 +0100 Subject: [PATCH 036/173] remove unused import --- tests/api/test_auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index e963963c730a..5dc33983003d 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -21,7 +21,7 @@ import synapse.handlers.auth from synapse.api.auth import Auth -from synapse.api.errors import AuthError, Codes +from synapse.api.errors import AuthError from synapse.types import UserID from tests import unittest From e40a510fbff931b5e6b295351847b95fb8c69e71 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 3 Aug 2018 23:19:13 +0100 Subject: [PATCH 037/173] py3 fix --- synapse/handlers/register.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 8cf0a36a8f8e..0e16bbe0ee7a 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -543,4 +543,4 @@ def _check_mau_limits(self): try: yield self.auth.check_auth_blocking() except AuthError as e: - raise RegistrationError(e.code, e.message, e.errcode) + raise RegistrationError(e.code, str(e), e.errcode) From 42c6823827a12cdb4374bc6e64d32d4ce4f526ec Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Sat, 4 Aug 2018 22:07:04 +0100 Subject: [PATCH 038/173] disable HS from config --- synapse/api/auth.py | 4 ++++ synapse/api/errors.py | 1 + synapse/config/server.py | 4 ++++ tests/api/test_auth.py | 11 ++++++++++- tests/utils.py | 2 ++ 5 files changed, 21 insertions(+), 1 deletion(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index d8ebbbc6e8de..089f166152d2 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -782,6 +782,10 @@ def check_auth_blocking(self): error (Error): The error that should be raised if user is to be blocked """ + if self.hs.config.hs_disabled: + raise AuthError( + 403, self.hs.config.hs_disabled_message, errcode=Codes.HS_DISABLED + ) if self.hs.config.limit_usage_by_mau is True: current_mau = yield self.store.get_monthly_active_count() if current_mau >= self.hs.config.max_mau_value: diff --git a/synapse/api/errors.py b/synapse/api/errors.py index b41d595059a3..466240248aeb 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -56,6 +56,7 @@ class Codes(object): CONSENT_NOT_GIVEN = "M_CONSENT_NOT_GIVEN" CANNOT_LEAVE_SERVER_NOTICE_ROOM = "M_CANNOT_LEAVE_SERVER_NOTICE_ROOM" MAU_LIMIT_EXCEEDED = "M_MAU_LIMIT_EXCEEDED" + HS_DISABLED = "M_HS_DISABLED" class CodeMessageException(RuntimeError): diff --git a/synapse/config/server.py b/synapse/config/server.py index 8fd23197590c..2e1e2f5961c4 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -75,6 +75,10 @@ def read_config(self, config): "max_mau_value", 0, ) + # Options to disable HS + self.hs_disabled = config.get("hs_disabled", False) + self.hs_disabled_message = config.get("hs_disabled_message", "") + # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist = None federation_domain_whitelist = config.get( diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 5dc33983003d..fbb96361a8bc 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -21,7 +21,7 @@ import synapse.handlers.auth from synapse.api.auth import Auth -from synapse.api.errors import AuthError +from synapse.api.errors import AuthError, Codes from synapse.types import UserID from tests import unittest @@ -469,3 +469,12 @@ def test_blocking_mau(self): return_value=defer.succeed(small_number_of_users) ) yield self.auth.check_auth_blocking() + + @defer.inlineCallbacks + def test_hs_disabled(self): + self.hs.config.hs_disabled = True + self.hs.config.hs_disabled_message = "Reason for being disabled" + with self.assertRaises(AuthError) as e: + yield self.auth.check_auth_blocking() + self.assertEquals(e.exception.errcode, Codes.HS_DISABLED) + self.assertEquals(e.exception.code, 403) diff --git a/tests/utils.py b/tests/utils.py index ec40428e7438..a0aa38d26488 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -74,6 +74,8 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None config.media_storage_providers = [] config.auto_join_rooms = [] config.limit_usage_by_mau = False + config.hs_disabled = False + config.hs_disabled_message = "" # disable user directory updates, because they get done in the # background, which upsets the test runner. From f900d508244b4277065d34dd9a05224fd60d5221 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 6 Aug 2018 13:45:37 +0100 Subject: [PATCH 039/173] include known room versions in outgoing make_joins --- synapse/federation/federation_client.py | 8 +++++--- synapse/federation/transport/client.py | 5 ++++- synapse/handlers/federation.py | 13 +++++++++++-- synapse/http/matrixfederationclient.py | 7 +++++-- 4 files changed, 25 insertions(+), 8 deletions(-) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index de4b813a1572..7ec1d7a88984 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -521,7 +521,7 @@ def _try_destination_list(self, description, destinations, callback): raise RuntimeError("Failed to %s via any server", description) def make_membership_event(self, destinations, room_id, user_id, membership, - content={},): + content, params): """ Creates an m.room.member event, with context, without participating in the room. @@ -537,8 +537,10 @@ def make_membership_event(self, destinations, room_id, user_id, membership, user_id (str): The user whose membership is being evented. membership (str): The "membership" property of the event. Must be one of "join" or "leave". - content (object): Any additional data to put into the content field + content (dict): Any additional data to put into the content field of the event. + params (dict[str, str|Iterable[str]]): Query parameters to include in the + request. Return: Deferred: resolves to a tuple of (origin (str), event (object)) where origin is the remote homeserver which generated the event. @@ -558,7 +560,7 @@ def make_membership_event(self, destinations, room_id, user_id, membership, @defer.inlineCallbacks def send_request(destination): ret = yield self.transport_layer.make_membership_event( - destination, room_id, user_id, membership + destination, room_id, user_id, membership, params, ) pdu_dict = ret.get("event", None) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index 4529d454af46..b4fbe2c9d582 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -195,7 +195,7 @@ def make_query(self, destination, query_type, args, retry_on_dns_fail, @defer.inlineCallbacks @log_function - def make_membership_event(self, destination, room_id, user_id, membership): + def make_membership_event(self, destination, room_id, user_id, membership, params): """Asks a remote server to build and sign us a membership event Note that this does not append any events to any graphs. @@ -205,6 +205,8 @@ def make_membership_event(self, destination, room_id, user_id, membership): room_id (str): room to join/leave user_id (str): user to be joined/left membership (str): one of join/leave + params (dict[str, str|Iterable[str]]): Query parameters to include in the + request. Returns: Deferred: Succeeds when we get a 2xx HTTP response. The result @@ -241,6 +243,7 @@ def make_membership_event(self, destination, room_id, user_id, membership): content = yield self.client.get_json( destination=destination, path=path, + args=params, retry_on_dns_fail=retry_on_dns_fail, timeout=20000, ignore_backoff=ignore_backoff, diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 533b82c78361..0dffd44e22e0 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -30,7 +30,12 @@ from twisted.internet import defer -from synapse.api.constants import EventTypes, Membership, RejectedReason +from synapse.api.constants import ( + KNOWN_ROOM_VERSIONS, + EventTypes, + Membership, + RejectedReason, +) from synapse.api.errors import ( AuthError, CodeMessageException, @@ -922,6 +927,9 @@ def do_invite_join(self, target_hosts, room_id, joinee, content): joinee, "join", content, + params={ + "ver": KNOWN_ROOM_VERSIONS, + }, ) # This shouldn't happen, because the RoomMemberHandler has a @@ -1187,13 +1195,14 @@ def do_remotely_reject_invite(self, target_hosts, room_id, user_id): @defer.inlineCallbacks def _make_and_verify_event(self, target_hosts, room_id, user_id, membership, - content={},): + content={}, params=None): origin, pdu = yield self.federation_client.make_membership_event( target_hosts, room_id, user_id, membership, content, + params=params, ) logger.debug("Got response to make_%s: %s", membership, pdu) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index bf1aa2950271..b3f5415aa660 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -439,7 +439,7 @@ def body_callback(method, url_bytes, headers_dict): defer.returnValue(json.loads(body)) @defer.inlineCallbacks - def get_json(self, destination, path, args={}, retry_on_dns_fail=True, + def get_json(self, destination, path, args=None, retry_on_dns_fail=True, timeout=None, ignore_backoff=False): """ GETs some json from the given host homeserver and path @@ -447,7 +447,7 @@ def get_json(self, destination, path, args={}, retry_on_dns_fail=True, destination (str): The remote server to send the HTTP request to. path (str): The HTTP path. - args (dict): A dictionary used to create query strings, defaults to + args (dict|None): A dictionary used to create query strings, defaults to None. timeout (int): How long to try (in ms) the destination for before giving up. None indicates no timeout and that the request will @@ -702,6 +702,9 @@ def check_content_type_is_json(headers): def encode_query_args(args): + if args is None: + return b"" + encoded_args = {} for k, vs in args.items(): if isinstance(vs, string_types): From 051a99c4006a7deb1f9256df0a25c702dcdb451d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 6 Aug 2018 14:29:31 +0100 Subject: [PATCH 040/173] Fix isort --- synapse/replication/http/_base.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index a7d1b2dabe59..4de3825fdab5 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -21,10 +21,7 @@ from twisted.internet import defer -from synapse.api.errors import ( - CodeMessageException, - HttpResponseException, -) +from synapse.api.errors import CodeMessageException, HttpResponseException from synapse.util.caches.response_cache import ResponseCache from synapse.util.stringutils import random_string From e26dbd82ef5f1d755be9a62165556ebce041af10 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 25 Jul 2018 16:32:05 +0100 Subject: [PATCH 041/173] Add replication APIs for persisting federation events --- synapse/app/federation_reader.py | 8 + synapse/handlers/federation.py | 44 ++++- synapse/replication/http/__init__.py | 3 +- synapse/replication/http/federation.py | 245 +++++++++++++++++++++++++ 4 files changed, 290 insertions(+), 10 deletions(-) create mode 100644 synapse/replication/http/federation.py diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index 7af00b8bcfaa..c512b4be877e 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -32,9 +32,13 @@ from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore +from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore from synapse.replication.slave.storage.directory import DirectoryStore from synapse.replication.slave.storage.events import SlavedEventStore from synapse.replication.slave.storage.keys import SlavedKeyStore +from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore +from synapse.replication.slave.storage.pushers import SlavedPusherStore +from synapse.replication.slave.storage.receipts import SlavedReceiptsStore from synapse.replication.slave.storage.room import RoomStore from synapse.replication.slave.storage.transactions import TransactionStore from synapse.replication.tcp.client import ReplicationClientHandler @@ -49,6 +53,10 @@ class FederationReaderSlavedStore( + SlavedApplicationServiceStore, + SlavedPusherStore, + SlavedPushRuleStore, + SlavedReceiptsStore, SlavedEventStore, SlavedKeyStore, RoomStore, diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 533b82c78361..0524dec942d1 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -44,6 +44,8 @@ compute_event_signature, ) from synapse.events.validator import EventValidator +from synapse.replication.http.federation import send_federation_events_to_master +from synapse.replication.http.membership import notify_user_membership_change from synapse.state import resolve_events_with_factory from synapse.types import UserID, get_domain_from_id from synapse.util import logcontext, unwrapFirstError @@ -86,6 +88,8 @@ def __init__(self, hs): self.spam_checker = hs.get_spam_checker() self.event_creation_handler = hs.get_event_creation_handler() self._server_notices_mxid = hs.config.server_notices_mxid + self.config = hs.config + self.http_client = hs.get_simple_http_client() # When joining a room we need to queue any events for that room up self.room_queues = {} @@ -2288,7 +2292,7 @@ def _check_key_revocation(self, public_key, url): for revocation. """ try: - response = yield self.hs.get_simple_http_client().get_json( + response = yield self.http_client.get_json( url, {"public_key": public_key} ) @@ -2313,14 +2317,25 @@ def _persist_events(self, event_and_contexts, backfilled=False): Returns: Deferred """ - max_stream_id = yield self.store.persist_events( - event_and_contexts, - backfilled=backfilled, - ) + if self.config.worker_app: + yield send_federation_events_to_master( + clock=self.hs.get_clock(), + store=self.store, + client=self.http_client, + host=self.config.worker_replication_host, + port=self.config.worker_replication_http_port, + event_and_contexts=event_and_contexts, + backfilled=backfilled + ) + else: + max_stream_id = yield self.store.persist_events( + event_and_contexts, + backfilled=backfilled, + ) - if not backfilled: # Never notify for backfilled events - for event, _ in event_and_contexts: - self._notify_persisted_event(event, max_stream_id) + if not backfilled: # Never notify for backfilled events + for event, _ in event_and_contexts: + self._notify_persisted_event(event, max_stream_id) def _notify_persisted_event(self, event, max_stream_id): """Checks to see if notifier/pushers should be notified about the @@ -2359,9 +2374,20 @@ def _notify_persisted_event(self, event, max_stream_id): ) def _clean_room_for_join(self, room_id): + # TODO move this out to master return self.store.clean_room_for_join(room_id) def user_joined_room(self, user, room_id): """Called when a new user has joined the room """ - return user_joined_room(self.distributor, user, room_id) + if self.config.worker_app: + return notify_user_membership_change( + client=self.http_client, + host=self.config.worker_replication_host, + port=self.config.worker_replication_http_port, + room_id=room_id, + user_id=user.to_string(), + change="joined", + ) + else: + return user_joined_room(self.distributor, user, room_id) diff --git a/synapse/replication/http/__init__.py b/synapse/replication/http/__init__.py index 589ee94c66a0..19f214281ef6 100644 --- a/synapse/replication/http/__init__.py +++ b/synapse/replication/http/__init__.py @@ -14,7 +14,7 @@ # limitations under the License. from synapse.http.server import JsonResource -from synapse.replication.http import membership, send_event +from synapse.replication.http import federation, membership, send_event REPLICATION_PREFIX = "/_synapse/replication" @@ -27,3 +27,4 @@ def __init__(self, hs): def register_servlets(self, hs): send_event.register_servlets(hs, self) membership.register_servlets(hs, self) + federation.register_servlets(hs, self) diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py new file mode 100644 index 000000000000..f39aaa89be57 --- /dev/null +++ b/synapse/replication/http/federation.py @@ -0,0 +1,245 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from twisted.internet import defer + +from synapse.api.constants import EventTypes, Membership +from synapse.events import FrozenEvent +from synapse.events.snapshot import EventContext +from synapse.http.servlet import parse_json_object_from_request +from synapse.replication.http._base import ReplicationEndpoint +from synapse.types import UserID +from synapse.util.logcontext import run_in_background +from synapse.util.metrics import Measure + +logger = logging.getLogger(__name__) + + +class ReplicationFederationSendEventsRestServlet(ReplicationEndpoint): + """Handles events newly received from federation, including persisting and + notifying. + + The API looks like: + + POST /_synapse/replication/fed_send_events/:txn_id + + { + "events": [{ + "event": { .. serialized event .. }, + "internal_metadata": { .. serialized internal_metadata .. }, + "rejected_reason": .., // The event.rejected_reason field + "context": { .. serialized event context .. }, + }], + "backfilled": false + """ + + NAME = "fed_send_events" + PATH_ARGS = () + + def __init__(self, hs): + super(ReplicationFederationSendEventsRestServlet, self).__init__(hs) + + self.store = hs.get_datastore() + self.clock = hs.get_clock() + self.is_mine_id = hs.is_mine_id + self.notifier = hs.get_notifier() + self.pusher_pool = hs.get_pusherpool() + + @defer.inlineCallbacks + @staticmethod + def _serialize_payload(store, event_and_contexts, backfilled): + """ + Args: + store + event_and_contexts (list[tuple[FrozenEvent, EventContext]]) + backfilled (bool): Whether or not the events are the result of + backfilling + """ + event_payloads = [] + for event, context in event_and_contexts: + serialized_context = yield context.serialize(event, store) + + event_payloads.append({ + "event": event.get_pdu_json(), + "internal_metadata": event.internal_metadata.get_dict(), + "rejected_reason": event.rejected_reason, + "context": serialized_context, + }) + + payload = { + "events": event_payloads, + "backfilled": backfilled, + } + + defer.returnValue(payload) + + @defer.inlineCallbacks + def _handle_request(self, request): + with Measure(self.clock, "repl_fed_send_events_parse"): + content = parse_json_object_from_request(request) + + backfilled = content["backfilled"] + + event_payloads = content["events"] + + event_and_contexts = [] + for event_payload in event_payloads: + event_dict = event_payload["event"] + internal_metadata = event_payload["internal_metadata"] + rejected_reason = event_payload["rejected_reason"] + event = FrozenEvent(event_dict, internal_metadata, rejected_reason) + + context = yield EventContext.deserialize( + self.store, event_payload["context"], + ) + + event_and_contexts.append((event, context)) + + logger.info( + "Got %d events from federation", + len(event_and_contexts), + ) + + max_stream_id = yield self.store.persist_events( + event_and_contexts, + backfilled=backfilled + ) + + if not backfilled: + for event, _ in event_and_contexts: + self._notify_persisted_event(event, max_stream_id) + + defer.returnValue((200, {})) + + def _notify_persisted_event(self, event, max_stream_id): + extra_users = [] + if event.type == EventTypes.Member: + target_user_id = event.state_key + + # We notify for memberships if its an invite for one of our + # users + if event.internal_metadata.is_outlier(): + if event.membership != Membership.INVITE: + if not self.is_mine_id(target_user_id): + return + + target_user = UserID.from_string(target_user_id) + extra_users.append(target_user) + elif event.internal_metadata.is_outlier(): + return + + event_stream_id = event.internal_metadata.stream_ordering + self.notifier.on_new_room_event( + event, event_stream_id, max_stream_id, + extra_users=extra_users + ) + + run_in_background( + self.pusher_pool.on_new_notifications, + event_stream_id, max_stream_id, + ) + + +class ReplicationFederationSendEduRestServlet(ReplicationEndpoint): + """Handles EDUs newly received from federation, including persisting and + notifying. + """ + + NAME = "fed_send_edu" + PATH_ARGS = ("edu_type",) + + def __init__(self, hs): + super(ReplicationFederationSendEduRestServlet, self).__init__(hs) + + self.store = hs.get_datastore() + self.clock = hs.get_clock() + self.registry = hs.get_federation_registry() + + @staticmethod + def _serialize_payload(edu_type, origin, content): + return { + "origin": origin, + "content": content, + } + + @defer.inlineCallbacks + def _handle_request(self, request, edu_type): + with Measure(self.clock, "repl_fed_send_edu_parse"): + content = parse_json_object_from_request(request) + + origin = content["origin"] + edu_content = content["content"] + + logger.info( + "Got %r edu from $s", + edu_type, origin, + ) + + result = yield self.registry.on_edu(edu_type, origin, edu_content) + + defer.returnValue((200, result)) + + +class ReplicationGetQueryRestServlet(ReplicationEndpoint): + """Handle responding to queries from federation. + """ + + NAME = "fed_query" + PATH_ARGS = ("query_type",) + + # This is a query, so let's not bother caching + CACHE = False + + def __init__(self, hs): + super(ReplicationGetQueryRestServlet, self).__init__(hs) + + self.store = hs.get_datastore() + self.clock = hs.get_clock() + self.registry = hs.get_federation_registry() + + @staticmethod + def _serialize_payload(query_type, args): + """ + Args: + query_type (str) + args (dict): The arguments received for the given query type + """ + return { + "args": args, + } + + @defer.inlineCallbacks + def _handle_request(self, request, query_type): + with Measure(self.clock, "repl_fed_query_parse"): + content = parse_json_object_from_request(request) + + args = content["args"] + + logger.info( + "Got %r query", + query_type, + ) + + result = yield self.registry.on_query(query_type, args) + + defer.returnValue((200, result)) + + +def register_servlets(hs, http_server): + ReplicationFederationSendEventsRestServlet(hs).register(http_server) + ReplicationFederationSendEduRestServlet(hs).register(http_server) + ReplicationGetQueryRestServlet(hs).register(http_server) From a3f5bf79a0fc0ea6d59069945f53717a3e9c6581 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 26 Jul 2018 11:44:22 +0100 Subject: [PATCH 042/173] Add EDU/query handling over replication --- synapse/federation/federation_server.py | 43 +++++++++++++++++++++++++ synapse/handlers/federation.py | 24 +++++++------- synapse/replication/http/federation.py | 2 +- synapse/server.py | 6 +++- 4 files changed, 62 insertions(+), 13 deletions(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index bf89d568afed..941e30a59632 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -33,6 +33,10 @@ from synapse.federation.persistence import TransactionActions from synapse.federation.units import Edu, Transaction from synapse.http.endpoint import parse_server_name +from synapse.replication.http.federation import ( + ReplicationFederationSendEduRestServlet, + ReplicationGetQueryRestServlet, +) from synapse.types import get_domain_from_id from synapse.util import async from synapse.util.caches.response_cache import ResponseCache @@ -745,6 +749,8 @@ def register_edu_handler(self, edu_type, handler): if edu_type in self.edu_handlers: raise KeyError("Already have an EDU handler for %s" % (edu_type,)) + logger.info("Registering federation EDU handler for %r", edu_type) + self.edu_handlers[edu_type] = handler def register_query_handler(self, query_type, handler): @@ -763,6 +769,8 @@ def register_query_handler(self, query_type, handler): "Already have a Query handler for %s" % (query_type,) ) + logger.info("Registering federation query handler for %r", query_type) + self.query_handlers[query_type] = handler @defer.inlineCallbacks @@ -785,3 +793,38 @@ def on_query(self, query_type, args): raise NotFoundError("No handler for Query type '%s'" % (query_type,)) return handler(args) + + +class ReplicationFederationHandlerRegistry(FederationHandlerRegistry): + def __init__(self, hs): + self.config = hs.config + self.http_client = hs.get_simple_http_client() + self.clock = hs.get_clock() + + self._get_query_client = ReplicationGetQueryRestServlet.make_client(hs) + self._send_edu = ReplicationFederationSendEduRestServlet.make_client(hs) + + super(ReplicationFederationHandlerRegistry, self).__init__() + + def on_edu(self, edu_type, origin, content): + handler = self.edu_handlers.get(edu_type) + if handler: + return super(ReplicationFederationHandlerRegistry, self).on_edu( + edu_type, origin, content, + ) + + return self._send_edu( + edu_type=edu_type, + origin=origin, + content=content, + ) + + def on_query(self, query_type, args): + handler = self.query_handlers.get(query_type) + if handler: + return handler(args) + + return self._get_query_client( + query_type=query_type, + args=args, + ) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 0524dec942d1..d2cbb12df3e2 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -44,8 +44,10 @@ compute_event_signature, ) from synapse.events.validator import EventValidator -from synapse.replication.http.federation import send_federation_events_to_master -from synapse.replication.http.membership import notify_user_membership_change +from synapse.replication.http.federation import ( + ReplicationFederationSendEventsRestServlet, +) +from synapse.replication.http.membership import ReplicationUserJoinedLeftRoomRestServlet from synapse.state import resolve_events_with_factory from synapse.types import UserID, get_domain_from_id from synapse.util import logcontext, unwrapFirstError @@ -91,6 +93,13 @@ def __init__(self, hs): self.config = hs.config self.http_client = hs.get_simple_http_client() + self._send_events_to_master = ( + ReplicationFederationSendEventsRestServlet.make_client(hs) + ) + self._notify_user_membership_change = ( + ReplicationUserJoinedLeftRoomRestServlet.make_client(hs) + ) + # When joining a room we need to queue any events for that room up self.room_queues = {} self._room_pdu_linearizer = Linearizer("fed_room_pdu") @@ -2318,12 +2327,8 @@ def _persist_events(self, event_and_contexts, backfilled=False): Deferred """ if self.config.worker_app: - yield send_federation_events_to_master( - clock=self.hs.get_clock(), + yield self._send_events_to_master( store=self.store, - client=self.http_client, - host=self.config.worker_replication_host, - port=self.config.worker_replication_http_port, event_and_contexts=event_and_contexts, backfilled=backfilled ) @@ -2381,10 +2386,7 @@ def user_joined_room(self, user, room_id): """Called when a new user has joined the room """ if self.config.worker_app: - return notify_user_membership_change( - client=self.http_client, - host=self.config.worker_replication_host, - port=self.config.worker_replication_http_port, + return self._notify_user_membership_change( room_id=room_id, user_id=user.to_string(), change="joined", diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py index f39aaa89be57..3fa7bd64c7b5 100644 --- a/synapse/replication/http/federation.py +++ b/synapse/replication/http/federation.py @@ -59,8 +59,8 @@ def __init__(self, hs): self.notifier = hs.get_notifier() self.pusher_pool = hs.get_pusherpool() - @defer.inlineCallbacks @staticmethod + @defer.inlineCallbacks def _serialize_payload(store, event_and_contexts, backfilled): """ Args: diff --git a/synapse/server.py b/synapse/server.py index 140be9ebe863..26228d8c72b4 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -36,6 +36,7 @@ from synapse.federation.federation_server import ( FederationHandlerRegistry, FederationServer, + ReplicationFederationHandlerRegistry, ) from synapse.federation.send_queue import FederationRemoteSendQueue from synapse.federation.transaction_queue import TransactionQueue @@ -423,7 +424,10 @@ def build_room_member_handler(self): return RoomMemberMasterHandler(self) def build_federation_registry(self): - return FederationHandlerRegistry() + if self.config.worker_app: + return ReplicationFederationHandlerRegistry(self) + else: + return FederationHandlerRegistry() def build_server_notices_manager(self): if self.config.worker_app: From 1e2bed9656a4826ed7fdc82bc096df775bc38baf Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 26 Jul 2018 11:45:12 +0100 Subject: [PATCH 043/173] Import all functions from TransactionStore --- synapse/replication/slave/storage/transactions.py | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/synapse/replication/slave/storage/transactions.py b/synapse/replication/slave/storage/transactions.py index 9c9a5eadd906..cd6416c47c8a 100644 --- a/synapse/replication/slave/storage/transactions.py +++ b/synapse/replication/slave/storage/transactions.py @@ -13,19 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.storage import DataStore from synapse.storage.transactions import TransactionStore from ._base import BaseSlavedStore -class TransactionStore(BaseSlavedStore): - get_destination_retry_timings = TransactionStore.__dict__[ - "get_destination_retry_timings" - ] - _get_destination_retry_timings = DataStore._get_destination_retry_timings.__func__ - set_destination_retry_timings = DataStore.set_destination_retry_timings.__func__ - _set_destination_retry_timings = DataStore._set_destination_retry_timings.__func__ - - prep_send_transaction = DataStore.prep_send_transaction.__func__ - delivered_txn = DataStore.delivered_txn.__func__ +class TransactionStore(TransactionStore, BaseSlavedStore): + pass From 62ace05c4521caeed023e5b9dadd993afe5c154f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 26 Jul 2018 13:31:59 +0100 Subject: [PATCH 044/173] Move necessary storage functions to worker classes --- synapse/storage/events.py | 82 ------------------------------- synapse/storage/events_worker.py | 84 ++++++++++++++++++++++++++++++++ synapse/storage/room.py | 32 ++++++------ 3 files changed, 100 insertions(+), 98 deletions(-) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index e8e5a0fe443c..5374796fd730 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -1430,88 +1430,6 @@ def _store_redaction(self, txn, event): (event.event_id, event.redacts) ) - @defer.inlineCallbacks - def have_events_in_timeline(self, event_ids): - """Given a list of event ids, check if we have already processed and - stored them as non outliers. - """ - rows = yield self._simple_select_many_batch( - table="events", - retcols=("event_id",), - column="event_id", - iterable=list(event_ids), - keyvalues={"outlier": False}, - desc="have_events_in_timeline", - ) - - defer.returnValue(set(r["event_id"] for r in rows)) - - @defer.inlineCallbacks - def have_seen_events(self, event_ids): - """Given a list of event ids, check if we have already processed them. - - Args: - event_ids (iterable[str]): - - Returns: - Deferred[set[str]]: The events we have already seen. - """ - results = set() - - def have_seen_events_txn(txn, chunk): - sql = ( - "SELECT event_id FROM events as e WHERE e.event_id IN (%s)" - % (",".join("?" * len(chunk)), ) - ) - txn.execute(sql, chunk) - for (event_id, ) in txn: - results.add(event_id) - - # break the input up into chunks of 100 - input_iterator = iter(event_ids) - for chunk in iter(lambda: list(itertools.islice(input_iterator, 100)), - []): - yield self.runInteraction( - "have_seen_events", - have_seen_events_txn, - chunk, - ) - defer.returnValue(results) - - def get_seen_events_with_rejections(self, event_ids): - """Given a list of event ids, check if we rejected them. - - Args: - event_ids (list[str]) - - Returns: - Deferred[dict[str, str|None): - Has an entry for each event id we already have seen. Maps to - the rejected reason string if we rejected the event, else maps - to None. - """ - if not event_ids: - return defer.succeed({}) - - def f(txn): - sql = ( - "SELECT e.event_id, reason FROM events as e " - "LEFT JOIN rejections as r ON e.event_id = r.event_id " - "WHERE e.event_id = ?" - ) - - res = {} - for event_id in event_ids: - txn.execute(sql, (event_id,)) - row = txn.fetchone() - if row: - _, rejected = row - res[event_id] = rejected - - return res - - return self.runInteraction("get_rejection_reasons", f) - @defer.inlineCallbacks def count_daily_messages(self): """ diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index 9b4cfeb89922..e1dc2c62fd6f 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -12,7 +12,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import itertools import logging + from collections import namedtuple from canonicaljson import json @@ -442,3 +444,85 @@ def _get_event_from_row(self, internal_metadata, js, redacted, self._get_event_cache.prefill((original_ev.event_id,), cache_entry) defer.returnValue(cache_entry) + + @defer.inlineCallbacks + def have_events_in_timeline(self, event_ids): + """Given a list of event ids, check if we have already processed and + stored them as non outliers. + """ + rows = yield self._simple_select_many_batch( + table="events", + retcols=("event_id",), + column="event_id", + iterable=list(event_ids), + keyvalues={"outlier": False}, + desc="have_events_in_timeline", + ) + + defer.returnValue(set(r["event_id"] for r in rows)) + + @defer.inlineCallbacks + def have_seen_events(self, event_ids): + """Given a list of event ids, check if we have already processed them. + + Args: + event_ids (iterable[str]): + + Returns: + Deferred[set[str]]: The events we have already seen. + """ + results = set() + + def have_seen_events_txn(txn, chunk): + sql = ( + "SELECT event_id FROM events as e WHERE e.event_id IN (%s)" + % (",".join("?" * len(chunk)), ) + ) + txn.execute(sql, chunk) + for (event_id, ) in txn: + results.add(event_id) + + # break the input up into chunks of 100 + input_iterator = iter(event_ids) + for chunk in iter(lambda: list(itertools.islice(input_iterator, 100)), + []): + yield self.runInteraction( + "have_seen_events", + have_seen_events_txn, + chunk, + ) + defer.returnValue(results) + + def get_seen_events_with_rejections(self, event_ids): + """Given a list of event ids, check if we rejected them. + + Args: + event_ids (list[str]) + + Returns: + Deferred[dict[str, str|None): + Has an entry for each event id we already have seen. Maps to + the rejected reason string if we rejected the event, else maps + to None. + """ + if not event_ids: + return defer.succeed({}) + + def f(txn): + sql = ( + "SELECT e.event_id, reason FROM events as e " + "LEFT JOIN rejections as r ON e.event_id = r.event_id " + "WHERE e.event_id = ?" + ) + + res = {} + for event_id in event_ids: + txn.execute(sql, (event_id,)) + row = txn.fetchone() + if row: + _, rejected = row + res[event_id] = rejected + + return res + + return self.runInteraction("get_rejection_reasons", f) diff --git a/synapse/storage/room.py b/synapse/storage/room.py index 3147fb682775..3378fc77d1c4 100644 --- a/synapse/storage/room.py +++ b/synapse/storage/room.py @@ -41,6 +41,22 @@ class RoomWorkerStore(SQLBaseStore): + def get_room(self, room_id): + """Retrieve a room. + + Args: + room_id (str): The ID of the room to retrieve. + Returns: + A namedtuple containing the room information, or an empty list. + """ + return self._simple_select_one( + table="rooms", + keyvalues={"room_id": room_id}, + retcols=("room_id", "is_public", "creator"), + desc="get_room", + allow_none=True, + ) + def get_public_room_ids(self): return self._simple_select_onecol( table="rooms", @@ -215,22 +231,6 @@ def store_room_txn(txn, next_id): logger.error("store_room with room_id=%s failed: %s", room_id, e) raise StoreError(500, "Problem creating room.") - def get_room(self, room_id): - """Retrieve a room. - - Args: - room_id (str): The ID of the room to retrieve. - Returns: - A namedtuple containing the room information, or an empty list. - """ - return self._simple_select_one( - table="rooms", - keyvalues={"room_id": room_id}, - retcols=("room_id", "is_public", "creator"), - desc="get_room", - allow_none=True, - ) - @defer.inlineCallbacks def set_room_is_public(self, room_id, is_public): def set_room_is_public_txn(txn, next_id): From 96a9a296455e2c08be4e0375cf784e4113fa51e1 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 26 Jul 2018 13:35:36 +0100 Subject: [PATCH 045/173] Pull in necessary stores in federation_reader --- synapse/app/federation_reader.py | 2 ++ synapse/storage/events_worker.py | 1 - 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index c512b4be877e..0c557a824227 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -36,6 +36,7 @@ from synapse.replication.slave.storage.directory import DirectoryStore from synapse.replication.slave.storage.events import SlavedEventStore from synapse.replication.slave.storage.keys import SlavedKeyStore +from synapse.replication.slave.storage.profile import SlavedProfileStore from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore from synapse.replication.slave.storage.pushers import SlavedPusherStore from synapse.replication.slave.storage.receipts import SlavedReceiptsStore @@ -53,6 +54,7 @@ class FederationReaderSlavedStore( + SlavedProfileStore, SlavedApplicationServiceStore, SlavedPusherStore, SlavedPushRuleStore, diff --git a/synapse/storage/events_worker.py b/synapse/storage/events_worker.py index e1dc2c62fd6f..59822178ff02 100644 --- a/synapse/storage/events_worker.py +++ b/synapse/storage/events_worker.py @@ -14,7 +14,6 @@ # limitations under the License. import itertools import logging - from collections import namedtuple from canonicaljson import json From 19a17068f1bc98a1556ff618b544b5fbf57eeba0 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 6 Aug 2018 15:15:19 +0100 Subject: [PATCH 046/173] Check m.room.create for sane room_versions --- synapse/event_auth.py | 10 +++++++++- synapse/federation/federation_client.py | 26 ++++++++++++++++++++++--- 2 files changed, 32 insertions(+), 4 deletions(-) diff --git a/synapse/event_auth.py b/synapse/event_auth.py index b32f64e72976..6baeccca38ae 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -20,7 +20,7 @@ from signedjson.sign import SignatureVerifyException, verify_signed_json from unpaddedbase64 import decode_base64 -from synapse.api.constants import EventTypes, JoinRules, Membership +from synapse.api.constants import KNOWN_ROOM_VERSIONS, EventTypes, JoinRules, Membership from synapse.api.errors import AuthError, EventSizeError, SynapseError from synapse.types import UserID, get_domain_from_id @@ -83,6 +83,14 @@ def check(event, auth_events, do_sig_check=True, do_size_check=True): 403, "Creation event's room_id domain does not match sender's" ) + + room_version = event.content.get("room_version", "1") + if room_version not in KNOWN_ROOM_VERSIONS: + raise AuthError( + 403, + "room appears to have unsupported version %s" % ( + room_version, + )) # FIXME logger.debug("Allowing! %s", event) return diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 7ec1d7a88984..c9f3c2d35262 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -25,7 +25,7 @@ from twisted.internet import defer -from synapse.api.constants import Membership +from synapse.api.constants import KNOWN_ROOM_VERSIONS, EventTypes, Membership from synapse.api.errors import ( CodeMessageException, FederationDeniedError, @@ -518,7 +518,7 @@ def _try_destination_list(self, description, destinations, callback): description, destination, exc_info=1, ) - raise RuntimeError("Failed to %s via any server", description) + raise RuntimeError("Failed to %s via any server" % (description, )) def make_membership_event(self, destinations, room_id, user_id, membership, content, params): @@ -609,6 +609,26 @@ def send_join(self, destinations, pdu): Fails with a ``RuntimeError`` if no servers were reachable. """ + def check_authchain_validity(signed_auth_chain): + for e in signed_auth_chain: + if e.type == EventTypes.Create: + create_event = e + break + else: + raise InvalidResponseError( + "no %s in auth chain" % (EventTypes.Create,), + ) + + # the room version should be sane. + room_version = create_event.content.get("room_version", "1") + if room_version not in KNOWN_ROOM_VERSIONS: + # This shouldn't be possible, because the remote server should have + # rejected the join attempt during make_join. + raise InvalidResponseError( + "room appears to have unsupported version %s" % ( + room_version, + )) + @defer.inlineCallbacks def send_request(destination): time_now = self._clock.time_msec() @@ -665,7 +685,7 @@ def send_request(destination): for s in signed_state: s.internal_metadata = copy.deepcopy(s.internal_metadata) - auth_chain.sort(key=lambda e: e.depth) + check_authchain_validity(signed_auth) defer.returnValue({ "state": signed_state, From 7f3f1085612a3c46a5e3c2a6a376b42024f6b6cb Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 6 Aug 2018 16:30:52 +0100 Subject: [PATCH 047/173] changelog --- changelog.d/3654.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3654.feature diff --git a/changelog.d/3654.feature b/changelog.d/3654.feature new file mode 100644 index 000000000000..35c95580bc45 --- /dev/null +++ b/changelog.d/3654.feature @@ -0,0 +1 @@ +Basic support for room versioning From 16d78be315fd71d8bdb39af53317b3f7dd1dbb32 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 6 Aug 2018 17:51:15 +0100 Subject: [PATCH 048/173] make use of _simple_select_one_onecol, improved comments --- synapse/storage/monthly_active_users.py | 19 +++++++++++-------- .../schema/delta/51/monthly_active_users.sql | 4 ++++ 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 135837507a14..c28c698c6c19 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -15,7 +15,7 @@ from twisted.internet import defer -from synapse.util.caches.descriptors import cached, cachedInlineCallbacks +from synapse.util.caches.descriptors import cached from ._base import SQLBaseStore @@ -104,7 +104,7 @@ def upsert_monthly_active_user(self, user_id): self._user_last_seen_monthly_active.invalidate((user_id,)) self.get_monthly_active_count.invalidate(()) - @cachedInlineCallbacks(num_args=1) + @cached(num_args=1) def _user_last_seen_monthly_active(self, user_id): """ Checks if a given user is part of the monthly active user group @@ -114,18 +114,16 @@ def _user_last_seen_monthly_active(self, user_id): int : timestamp since last seen, None if never seen """ - result = yield self._simple_select_onecol( + + return(self._simple_select_one_onecol( table="monthly_active_users", keyvalues={ "user_id": user_id, }, retcol="timestamp", + allow_none=True, desc="_user_last_seen_monthly_active", - ) - timestamp = None - if len(result) > 0: - timestamp = result[0] - defer.returnValue(timestamp) + )) @defer.inlineCallbacks def populate_monthly_active_users(self, user_id): @@ -140,6 +138,11 @@ def populate_monthly_active_users(self, user_id): last_seen_timestamp = yield self._user_last_seen_monthly_active(user_id) now = self.hs.get_clock().time_msec() + # We want to reduce to the total number of db writes, and are happy + # to trade accuracy of timestamp in order to lighten load. This means + # We always insert new users (where MAU threshold has not been reached), + # but only update if we have not previously seen the user for + # LAST_SEEN_GRANULARITY ms if last_seen_timestamp is None: count = yield self.get_monthly_active_count() if count < self.hs.config.max_mau_value: diff --git a/synapse/storage/schema/delta/51/monthly_active_users.sql b/synapse/storage/schema/delta/51/monthly_active_users.sql index f2b6d3e31e4e..10aac90ce137 100644 --- a/synapse/storage/schema/delta/51/monthly_active_users.sql +++ b/synapse/storage/schema/delta/51/monthly_active_users.sql @@ -16,6 +16,10 @@ -- a table of monthly active users, for use where blocking based on mau limits CREATE TABLE monthly_active_users ( user_id TEXT NOT NULL, + -- Last time we saw the user. Not guaranteed to be accurate due to rate limiting + -- on updates, Granularity of updates governed by + -- syanpse.storage.monthly_active_users.LAST_SEEN_GRANULARITY + -- Measured in ms since epoch. timestamp BIGINT NOT NULL ); From 1911c037cbace498c0827eb5abe6ed3f64acb671 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 6 Aug 2018 18:01:46 +0100 Subject: [PATCH 049/173] update comments to reflect new sig --- synapse/api/auth.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index d8ebbbc6e8de..91b23ff1d720 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -778,10 +778,7 @@ def check_in_room_or_world_readable(self, room_id, user_id): def check_auth_blocking(self): """Checks if the user should be rejected for some external reason, such as monthly active user limiting or global disable flag - Args: - error (Error): The error that should be raised if user is to be - blocked - """ + """ if self.hs.config.limit_usage_by_mau is True: current_mau = yield self.store.get_monthly_active_count() if current_mau >= self.hs.config.max_mau_value: From e54794f5b658992627eab5400b13199128eec0a1 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 6 Aug 2018 21:55:54 +0100 Subject: [PATCH 050/173] Fix postgres compatibility bug --- synapse/storage/monthly_active_users.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index c28c698c6c19..abe1e6bb99ad 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -43,14 +43,25 @@ def _reap_users(txn): thirty_days_ago = ( int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) ) - + # Purge stale users sql = "DELETE FROM monthly_active_users WHERE timestamp < ?" - txn.execute(sql, (thirty_days_ago,)) + + # If MAU user count still exceeds the MAU threshold, then delete on + # a least recently active basis. + # Note it is not possible to write this query using OFFSET due to + # incompatibilities in how sqlite an postgres support the feature. + # sqlite requires 'LIMIT -1 OFFSET ?', the LIMIT must be present + # While Postgres does not require 'LIMIT', but also does not support + # negative LIMIT values. So there is no way to write it that both can + # support sql = """ DELETE FROM monthly_active_users - ORDER BY timestamp desc - LIMIT -1 OFFSET ? + WHERE user_id NOT IN ( + SELECT user_id FROM monthly_active_users + ORDER BY timestamp DESC + LIMIT ? + ) """ txn.execute(sql, (self.hs.config.max_mau_value,)) From 7daa8a78c5183a46ae462c0718939ca52fa0130f Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 6 Aug 2018 22:55:05 +0100 Subject: [PATCH 051/173] load mau limit threepids --- synapse/config/server.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/synapse/config/server.py b/synapse/config/server.py index 8fd23197590c..cacac253ab0f 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -74,6 +74,9 @@ def read_config(self, config): self.max_mau_value = config.get( "max_mau_value", 0, ) + self.mau_limits_reserved_threepids = config.get( + "mau_limit_reserved_threepid", [] + ) # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist = None From fbe255f9a4c46fe82a394b9efab6ad8811ae71e0 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 6 Aug 2018 23:24:54 +0100 Subject: [PATCH 052/173] add default mau_limits_reserved_threepids --- tests/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/utils.py b/tests/utils.py index ec40428e7438..baeed5d60047 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -74,6 +74,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None config.media_storage_providers = [] config.auto_join_rooms = [] config.limit_usage_by_mau = False + config.mau_limits_reserved_threepids = [] # disable user directory updates, because they get done in the # background, which upsets the test runner. From a74b25faaaf47b4696cb30207ef2eae6a3a5d8c7 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 6 Aug 2018 23:25:25 +0100 Subject: [PATCH 053/173] WIP building out mau reserved users --- synapse/storage/monthly_active_users.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index abe1e6bb99ad..6a37d6fc22af 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -19,16 +19,30 @@ from ._base import SQLBaseStore + # Number of msec of granularity to store the monthly_active_user timestamp # This means it is not necessary to update the table on every request LAST_SEEN_GRANULARITY = 60 * 60 * 1000 class MonthlyActiveUsersStore(SQLBaseStore): + @defer.inlineCallbacks def __init__(self, dbconn, hs): super(MonthlyActiveUsersStore, self).__init__(None, hs) self._clock = hs.get_clock() self.hs = hs + threepids = self.hs.config.mau_limits_reserved_threepids + self.reserved_user_ids = set() + for tp in threepids: + user_id = yield hs.get_datastore().get_user_id_by_threepid( + tp["medium"], tp["address"] + ) + if user_id: + self.reserved_user_ids.add(user_id) + else: + logger.warning( + "mau limit reserved threepid %s not found in db" % tp + ) def reap_monthly_active_users(self): """ @@ -78,7 +92,7 @@ def _reap_users(txn): @cached(num_args=0) def get_monthly_active_count(self): - """Generates current count of monthly active users.abs + """Generates current count of monthly active users Returns: Defered[int]: Number of current monthly active users From ca9bc1f4feaaabce48595ee2e387529f5bd365e6 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 7 Aug 2018 10:00:03 +0100 Subject: [PATCH 054/173] Fix occasional glitches in the synapse_event_persisted_position metric Every so often this metric glitched to a negative number. I'm assuming it was due to backfilled events. --- synapse/storage/events.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index e8e5a0fe443c..ce32e8fefd23 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -485,9 +485,14 @@ def _persist_events(self, events_and_contexts, backfilled=False, new_forward_extremeties=new_forward_extremeties, ) persist_event_counter.inc(len(chunk)) - synapse.metrics.event_persisted_position.set( - chunk[-1][0].internal_metadata.stream_ordering, - ) + + if not backfilled: + # backfilled events have negative stream orderings, so we don't + # want to set the event_persisted_position to that. + synapse.metrics.event_persisted_position.set( + chunk[-1][0].internal_metadata.stream_ordering, + ) + for event, context in chunk: if context.app_service: origin_type = "local" From 865d07cd381214c38628cc4ad4200a5cdd45f5d3 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 7 Aug 2018 10:02:01 +0100 Subject: [PATCH 055/173] changelog --- changelog.d/3658.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3658.bugfix diff --git a/changelog.d/3658.bugfix b/changelog.d/3658.bugfix new file mode 100644 index 000000000000..556011a15060 --- /dev/null +++ b/changelog.d/3658.bugfix @@ -0,0 +1 @@ +Fix occasional glitches in the synapse_event_persisted_position metric From 9b92720d88f27c2961d4c0e078cfb2205f248dec Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 7 Aug 2018 00:41:23 +0100 Subject: [PATCH 056/173] fix event lag graph --- contrib/grafana/synapse.json | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/contrib/grafana/synapse.json b/contrib/grafana/synapse.json index 94a1de58f4e3..c58612594ab8 100644 --- a/contrib/grafana/synapse.json +++ b/contrib/grafana/synapse.json @@ -54,7 +54,7 @@ "gnetId": null, "graphTooltip": 0, "id": null, - "iteration": 1533026624326, + "iteration": 1533598785368, "links": [ { "asDropdown": true, @@ -4629,7 +4629,7 @@ "h": 9, "w": 12, "x": 0, - "y": 11 + "y": 29 }, "id": 67, "legend": { @@ -4655,11 +4655,11 @@ "steppedLine": false, "targets": [ { - "expr": " synapse_event_persisted_position{instance=\"$instance\"} - ignoring(index, job, name) group_right(instance) synapse_event_processing_positions{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", + "expr": " synapse_event_persisted_position{instance=\"$instance\",job=\"synapse\"} - ignoring(index, job, name) group_right() synapse_event_processing_positions{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}", "format": "time_series", "interval": "", "intervalFactor": 1, - "legendFormat": "{{job}}-{{index}}", + "legendFormat": "{{job}}-{{index}} ", "refId": "A" } ], @@ -4697,7 +4697,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -4710,7 +4714,7 @@ "h": 9, "w": 12, "x": 12, - "y": 11 + "y": 29 }, "id": 71, "legend": { @@ -4778,7 +4782,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } } ], "title": "Event processing loop positions", @@ -4957,5 +4965,5 @@ "timezone": "", "title": "Synapse", "uid": "000000012", - "version": 125 + "version": 127 } \ No newline at end of file From 3523f5432a79659ac365dc2ff1212aa1a3707d8e Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 7 Aug 2018 12:51:57 +0100 Subject: [PATCH 057/173] Don't expose default_room_version as config opt --- synapse/api/constants.py | 3 +++ synapse/config/server.py | 14 -------------- synapse/handlers/room.py | 3 ++- 3 files changed, 5 insertions(+), 15 deletions(-) diff --git a/synapse/api/constants.py b/synapse/api/constants.py index a27bf3b32d92..b0da506f6d64 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -97,6 +97,9 @@ class ThirdPartyEntityKind(object): LOCATION = "location" +# the version we will give rooms which are created on this server +DEFAULT_ROOM_VERSION = "1" + # vdh-test-version is a placeholder to get room versioning support working and tested # until we have a working v2. KNOWN_ROOM_VERSIONS = {"1", "vdh-test-version"} diff --git a/synapse/config/server.py b/synapse/config/server.py index 68ef2789d3c9..6a471a0a5ec7 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -16,7 +16,6 @@ import logging -from synapse.api.constants import KNOWN_ROOM_VERSIONS from synapse.http.endpoint import parse_and_validate_server_name from ._base import Config, ConfigError @@ -76,16 +75,6 @@ def read_config(self, config): ) else: self.max_mau_value = 0 - - # the version of rooms created by default on this server - self.default_room_version = str(config.get( - "default_room_version", "1", - )) - if self.default_room_version not in KNOWN_ROOM_VERSIONS: - raise ConfigError("Unrecognised value '%s' for default_room_version" % ( - self.default_room_version, - )) - # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist = None federation_domain_whitelist = config.get( @@ -260,9 +249,6 @@ def default_config(self, server_name, **kwargs): # (except those sent by local server admins). The default is False. # block_non_admin_invites: True - # The room_version of rooms which are created by default by this server. - # default_room_version: 1 - # Restrict federation to the following whitelist of domains. # N.B. we recommend also firewalling your federation listener to limit # inbound federation traffic as early as possible, rather than relying diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index a526b684e9dd..6a17c42238ec 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -26,6 +26,7 @@ from twisted.internet import defer from synapse.api.constants import ( + DEFAULT_ROOM_VERSION, KNOWN_ROOM_VERSIONS, EventTypes, JoinRules, @@ -106,7 +107,7 @@ def create_room(self, requester, config, ratelimit=True, if ratelimit: yield self.ratelimit(requester) - room_version = config.get("room_version", self.hs.config.default_room_version) + room_version = config.get("room_version", DEFAULT_ROOM_VERSION) if not isinstance(room_version, string_types): raise SynapseError( 400, From e8eba2b4e3a99d35f08c96205ebb18211adddcb9 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 7 Aug 2018 17:49:43 +0100 Subject: [PATCH 058/173] implement reserved users for mau limits --- synapse/app/homeserver.py | 6 +++ synapse/config/server.py | 2 +- synapse/storage/monthly_active_users.py | 45 +++++++++++++---- tests/storage/test_monthly_active_users.py | 59 +++++++++++++++++++++- 4 files changed, 99 insertions(+), 13 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 3a67db8b30db..a4a65e728660 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -518,6 +518,8 @@ def generate_user_daily_visit_stats(): # If you increase the loop period, the accuracy of user_daily_visits # table will decrease clock.looping_call(generate_user_daily_visit_stats, 5 * 60 * 1000) + + # monthly active user limiting functionality clock.looping_call( hs.get_datastore().reap_monthly_active_users, 1000 * 60 * 60 ) @@ -530,9 +532,13 @@ def generate_monthly_active_users(): current_mau_gauge.set(float(count)) max_mau_value_gauge.set(float(hs.config.max_mau_value)) + hs.get_datastore().initialise_reserved_users( + hs.config.mau_limits_reserved_threepids + ) generate_monthly_active_users() if hs.config.limit_usage_by_mau: clock.looping_call(generate_monthly_active_users, 5 * 60 * 1000) + # End of monthly active user settings if hs.config.report_stats: logger.info("Scheduling stats reporting for 3 hour intervals") diff --git a/synapse/config/server.py b/synapse/config/server.py index cacac253ab0f..114d7a981595 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -75,7 +75,7 @@ def read_config(self, config): "max_mau_value", 0, ) self.mau_limits_reserved_threepids = config.get( - "mau_limit_reserved_threepid", [] + "mau_limit_reserved_threepids", [] ) # FIXME: federation_domain_whitelist needs sytests diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 6a37d6fc22af..168f564ed5d1 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -12,6 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import logging from twisted.internet import defer @@ -19,6 +20,7 @@ from ._base import SQLBaseStore +logger = logging.getLogger(__name__) # Number of msec of granularity to store the monthly_active_user timestamp # This means it is not necessary to update the table on every request @@ -26,24 +28,31 @@ class MonthlyActiveUsersStore(SQLBaseStore): - @defer.inlineCallbacks def __init__(self, dbconn, hs): super(MonthlyActiveUsersStore, self).__init__(None, hs) self._clock = hs.get_clock() self.hs = hs - threepids = self.hs.config.mau_limits_reserved_threepids - self.reserved_user_ids = set() + self.reserved_users = () + + @defer.inlineCallbacks + def initialise_reserved_users(self, threepids): + # TODO Why can't I do this in init? + store = self.hs.get_datastore() + reserved_user_list = [] for tp in threepids: - user_id = yield hs.get_datastore().get_user_id_by_threepid( + user_id = yield store.get_user_id_by_threepid( tp["medium"], tp["address"] ) if user_id: - self.reserved_user_ids.add(user_id) + self.upsert_monthly_active_user(user_id) + reserved_user_list.append(user_id) else: logger.warning( "mau limit reserved threepid %s not found in db" % tp ) + self.reserved_users = tuple(reserved_user_list) + @defer.inlineCallbacks def reap_monthly_active_users(self): """ Cleans out monthly active user table to ensure that no stale @@ -58,8 +67,20 @@ def _reap_users(txn): int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) ) # Purge stale users - sql = "DELETE FROM monthly_active_users WHERE timestamp < ?" - txn.execute(sql, (thirty_days_ago,)) + + # questionmarks is a hack to overcome sqlite not supporting + # tuples in 'WHERE IN %s' + questionmarks = '?' * len(self.reserved_users) + query_args = [thirty_days_ago] + query_args.extend(self.reserved_users) + + sql = """ + DELETE FROM monthly_active_users + WHERE timestamp < ? + AND user_id NOT IN ({}) + """.format(','.join(questionmarks)) + + txn.execute(sql, query_args) # If MAU user count still exceeds the MAU threshold, then delete on # a least recently active basis. @@ -69,6 +90,8 @@ def _reap_users(txn): # While Postgres does not require 'LIMIT', but also does not support # negative LIMIT values. So there is no way to write it that both can # support + query_args = [self.hs.config.max_mau_value] + query_args.extend(self.reserved_users) sql = """ DELETE FROM monthly_active_users WHERE user_id NOT IN ( @@ -76,8 +99,9 @@ def _reap_users(txn): ORDER BY timestamp DESC LIMIT ? ) - """ - txn.execute(sql, (self.hs.config.max_mau_value,)) + AND user_id NOT IN ({}) + """.format(','.join(questionmarks)) + txn.execute(sql, query_args) yield self.runInteraction("reap_monthly_active_users", _reap_users) # It seems poor to invalidate the whole cache, Postgres supports @@ -136,7 +160,7 @@ def _user_last_seen_monthly_active(self, user_id): Arguments: user_id (str): user to add/update Return: - int : timestamp since last seen, None if never seen + Deferred[int] : timestamp since last seen, None if never seen """ @@ -158,7 +182,6 @@ def populate_monthly_active_users(self, user_id): Args: user_id(str): the user_id to query """ - if self.hs.config.limit_usage_by_mau: last_seen_timestamp = yield self._user_last_seen_monthly_active(user_id) now = self.hs.get_clock().time_msec() diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index 0bfd24a7fb3d..cbd480cd4215 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -19,6 +19,8 @@ import tests.utils from tests.utils import setup_test_homeserver +FORTY_DAYS = 40 * 24 * 60 * 60 + class MonthlyActiveUsersTestCase(tests.unittest.TestCase): def __init__(self, *args, **kwargs): @@ -29,6 +31,56 @@ def setUp(self): self.hs = yield setup_test_homeserver() self.store = self.hs.get_datastore() + @defer.inlineCallbacks + def test_initialise_reserved_users(self): + + user1 = "@user1:server" + user1_email = "user1@matrix.org" + user2 = "@user2:server" + user2_email = "user2@matrix.org" + threepids = [ + {'medium': 'email', 'address': user1_email}, + {'medium': 'email', 'address': user2_email} + ] + user_num = len(threepids) + + yield self.store.register( + user_id=user1, + token="123", + password_hash=None) + + yield self.store.register( + user_id=user2, + token="456", + password_hash=None) + + now = int(self.hs.get_clock().time_msec()) + yield self.store.user_add_threepid(user1, "email", user1_email, now, now) + yield self.store.user_add_threepid(user2, "email", user2_email, now, now) + yield self.store.initialise_reserved_users(threepids) + + active_count = yield self.store.get_monthly_active_count() + + # Test total counts + self.assertEquals(active_count, user_num) + + # Test user is marked as active + + timestamp = yield self.store._user_last_seen_monthly_active(user1) + self.assertTrue(timestamp) + timestamp = yield self.store._user_last_seen_monthly_active(user2) + self.assertTrue(timestamp) + + # Test that users are never removed from the db. + self.hs.config.max_mau_value = 0 + + self.hs.get_clock().advance_time(FORTY_DAYS) + + yield self.store.reap_monthly_active_users() + + active_count = yield self.store.get_monthly_active_count() + self.assertEquals(active_count, user_num) + @defer.inlineCallbacks def test_can_insert_and_count_mau(self): count = yield self.store.get_monthly_active_count() @@ -63,4 +115,9 @@ def test_reap_monthly_active_users(self): self.assertTrue(count, initial_users) yield self.store.reap_monthly_active_users() count = yield self.store.get_monthly_active_count() - self.assertTrue(count, initial_users - self.hs.config.max_mau_value) + self.assertEquals(count, initial_users - self.hs.config.max_mau_value) + + self.hs.get_clock().advance_time(FORTY_DAYS) + yield self.store.reap_monthly_active_users() + count = yield self.store.get_monthly_active_count() + self.assertEquals(count, 0) From ef3589063adddd1eee89f136e6a54250cce414a1 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 7 Aug 2018 17:59:27 +0100 Subject: [PATCH 059/173] prevent total number of reserved users being too large --- synapse/storage/monthly_active_users.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 168f564ed5d1..54de5a8686fa 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -39,7 +39,9 @@ def initialise_reserved_users(self, threepids): # TODO Why can't I do this in init? store = self.hs.get_datastore() reserved_user_list = [] - for tp in threepids: + + # Do not add more reserved users than the total allowable number + for tp in threepids[:self.hs.config.max_mau_value]: user_id = yield store.get_user_id_by_threepid( tp["medium"], tp["address"] ) From 53bca4690b5c94fb1506dbc628ce2ef0f770745f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 7 Aug 2018 19:09:48 +0100 Subject: [PATCH 060/173] more metrics for the federation and appservice senders --- synapse/federation/transaction_queue.py | 10 +++++++++- synapse/handlers/appservice.py | 10 ++++++++++ synapse/metrics/__init__.py | 13 +++++++++++++ 3 files changed, 32 insertions(+), 1 deletion(-) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index 78f9d40a3ad1..f603c8a368aa 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -26,6 +26,8 @@ from synapse.handlers.presence import format_user_presence_state, get_interested_remotes from synapse.metrics import ( LaterGauge, + event_processing_loop_counter, + event_processing_loop_room_count, events_processed_counter, sent_edus_counter, sent_transactions_counter, @@ -253,7 +255,13 @@ def handle_room_events(events): synapse.metrics.event_processing_last_ts.labels( "federation_sender").set(ts) - events_processed_counter.inc(len(events)) + events_processed_counter.inc(len(events)) + + event_processing_loop_room_count.labels( + "federation_sender" + ).inc(len(events_by_room)) + + event_processing_loop_counter.labels("federation_sender").inc() synapse.metrics.event_processing_positions.labels( "federation_sender").set(next_token) diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index ee41aed69e46..f0f89af7dcda 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -23,6 +23,10 @@ import synapse from synapse.api.constants import EventTypes +from synapse.metrics import ( + event_processing_loop_counter, + event_processing_loop_room_count, +) from synapse.metrics.background_process_metrics import run_as_background_process from synapse.util.logcontext import make_deferred_yieldable, run_in_background from synapse.util.metrics import Measure @@ -136,6 +140,12 @@ def handle_room_events(events): events_processed_counter.inc(len(events)) + event_processing_loop_room_count.labels( + "appservice_sender" + ).inc(len(events_by_room)) + + event_processing_loop_counter.labels("appservice_sender").inc() + synapse.metrics.event_processing_lag.labels( "appservice_sender").set(now - ts) synapse.metrics.event_processing_last_ts.labels( diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index a9158fc0669a..b7cb3a730a77 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -174,6 +174,19 @@ def collect(self): events_processed_counter = Counter("synapse_federation_client_events_processed", "") +event_processing_loop_counter = Counter( + "synapse_event_processing_loop", + "Event processing loop iterations", + ["name"], +) + +event_processing_loop_room_count = Counter( + "synapse_event_processing_loop_room_count", + "Rooms seen per event processing loop iteration", + ["name"], +) + + # Used to track where various components have processed in the event stream, # e.g. federation sending, appservice sending, etc. event_processing_positions = Gauge("synapse_event_processing_positions", "", ["name"]) From bab94da79cb71cc483184c2732707e40f7a708ce Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 7 Aug 2018 22:11:45 +0100 Subject: [PATCH 061/173] fix metric name --- synapse/metrics/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index b7cb3a730a77..550f8443f78a 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -175,7 +175,7 @@ def collect(self): events_processed_counter = Counter("synapse_federation_client_events_processed", "") event_processing_loop_counter = Counter( - "synapse_event_processing_loop", + "synapse_event_processing_loop_count", "Event processing loop iterations", ["name"], ) From e6d73b858231567abfe47b05d3e04376c9016b1a Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 7 Aug 2018 22:14:35 +0100 Subject: [PATCH 062/173] changelog --- changelog.d/3664.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3664.feature diff --git a/changelog.d/3664.feature b/changelog.d/3664.feature new file mode 100644 index 000000000000..184dde99393d --- /dev/null +++ b/changelog.d/3664.feature @@ -0,0 +1 @@ +Add some metrics for the appservice and federation event sending loops From 501141763245647f41636621867ad1bacc47e6b5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 8 Aug 2018 10:29:58 +0100 Subject: [PATCH 063/173] Fixup logging and docstrings --- synapse/replication/http/_base.py | 6 +++-- synapse/replication/http/membership.py | 36 ++++++++++++++++++++++++++ 2 files changed, 40 insertions(+), 2 deletions(-) diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index 4de3825fdab5..619ddab54081 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -151,7 +151,7 @@ def send_request(**kwargs): if e.code != 504 or not cls.RETRY_ON_TIMEOUT: raise - logger.warn("send_federation_events_to_master request timed out") + logger.warn("%s request timed out", cls.NAME) # If we timed out we probably don't need to worry about backing # off too much, but lets just wait a little anyway. @@ -190,7 +190,9 @@ def register(self, http_server): http_server.register_paths(method, [pattern], handler) def _cached_handler(self, request, txn_id, **kwargs): - """Wraps `_handle_request` the responses should be cached. + """Called on new incoming requests when caching is enabled. Checks + if their is a cached response for the request and returns that, + otherwise calls `_handle_request` and caches its response. """ # We just use the txn_id here, but we probably also want to use the # other PATH_ARGS as well. diff --git a/synapse/replication/http/membership.py b/synapse/replication/http/membership.py index 8ad83e8421c6..e58bebf12ad9 100644 --- a/synapse/replication/http/membership.py +++ b/synapse/replication/http/membership.py @@ -27,6 +27,16 @@ class ReplicationRemoteJoinRestServlet(ReplicationEndpoint): """Does a remote join for the given user to the given room + + Request format: + + POST /_synapse/replication/remote_join/:room_id/:user_id + + { + "requester": ..., + "remote_room_hosts": [...], + "content": { ... } + } """ NAME = "remote_join" @@ -85,6 +95,15 @@ def _handle_request(self, request, room_id, user_id): class ReplicationRemoteRejectInviteRestServlet(ReplicationEndpoint): """Rejects the invite for the user and room. + + Request format: + + POST /_synapse/replication/remote_reject_invite/:room_id/:user_id + + { + "requester": ..., + "remote_room_hosts": [...], + } """ NAME = "remote_reject_invite" @@ -153,6 +172,17 @@ def _handle_request(self, request, room_id, user_id): class ReplicationRegister3PIDGuestRestServlet(ReplicationEndpoint): """Gets/creates a guest account for given 3PID. + + Request format: + + POST /_synapse/replication/get_or_register_3pid_guest/ + + { + "requester": ..., + "medium": ..., + "address": ..., + "inviter_user_id": ... + } """ NAME = "get_or_register_3pid_guest" @@ -206,6 +236,12 @@ def _handle_request(self, request): class ReplicationUserJoinedLeftRoomRestServlet(ReplicationEndpoint): """Notifies that a user has joined or left the room + + Request format: + + POST /_synapse/replication/membership_change/:room_id/:user_id/:change + + {} """ NAME = "membership_change" From bebe325e6cdd83f7bacd8ad71f6cdd23273c88db Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 8 Aug 2018 10:35:47 +0100 Subject: [PATCH 064/173] Rename POST param to METHOD --- synapse/replication/http/_base.py | 34 +++++++++++++++++--------- synapse/replication/http/send_event.py | 1 - 2 files changed, 22 insertions(+), 13 deletions(-) diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index 619ddab54081..53a0fd459a82 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -40,8 +40,8 @@ class ReplicationEndpoint(object): /_synapse/replication/send_event/:event_id/:txn_id - For POST requests the payload is serialized to json and sent as the body, - while for GET requests the payload is added as query parameters. See + For POST/PUT requests the payload is serialized to json and sent as the + body, while for GET requests the payload is added as query parameters. See `_serialize_payload` for details. Incoming requests are handled by overriding `_handle_request`. Servers @@ -55,8 +55,9 @@ class ReplicationEndpoint(object): PATH_ARGS (tuple[str]): A list of parameters to be added to the path. Adding parameters to the path (rather than payload) can make it easier to follow along in the log files. - POST (bool): True to use POST request with JSON body, or false to use - GET requests with query params. + METHOD (str): The method of the HTTP request, defaults to POST. Can be + one of POST, PUT or GET. If GET then the payload is sent as query + parameters rather than a JSON body. CACHE (bool): Whether server should cache the result of the request/ If true then transparently adds a txn_id to all requests, and `_handle_request` must return a Deferred. @@ -69,7 +70,7 @@ class ReplicationEndpoint(object): NAME = abc.abstractproperty() PATH_ARGS = abc.abstractproperty() - POST = True + METHOD = "POST" CACHE = True RETRY_ON_TIMEOUT = True @@ -80,6 +81,8 @@ def __init__(self, hs): timeout_ms=30 * 60 * 1000, ) + assert self.METHOD in ("PUT", "POST", "GET") + @abc.abstractmethod def _serialize_payload(**kwargs): """Static method that is called when creating a request. @@ -90,9 +93,9 @@ def _serialize_payload(**kwargs): argument list. Returns: - Deferred[dict]|dict: If POST request then dictionary must be JSON - serialisable, otherwise must be appropriate for adding as query - args. + Deferred[dict]|dict: If POST/PUT request then dictionary must be + JSON serialisable, otherwise must be appropriate for adding as + query args. """ return {} @@ -130,10 +133,18 @@ def send_request(**kwargs): txn_id = random_string(10) url_args.append(txn_id) - if cls.POST: + if cls.METHOD == "POST": request_func = client.post_json_get_json - else: + elif cls.METHOD == "PUT": + request_func = client.put_json + elif cls.METHOD == "GET": request_func = client.get_json + else: + # We have already asserted in the constructor that a + # compatible was picked, but lets be paranoid. + raise Exception( + "Unknown METHOD on %s replication endpoint" % (cls.NAME,) + ) uri = "http://%s:%s/_synapse/replication/%s/%s" % ( host, port, cls.NAME, "/".join(url_args) @@ -174,8 +185,7 @@ def register(self, http_server): url_args = list(self.PATH_ARGS) method = "GET" handler = self._handle_request - if self.POST: - method = "POST" + method = self.METHOD if self.CACHE: handler = self._cached_handler diff --git a/synapse/replication/http/send_event.py b/synapse/replication/http/send_event.py index 50810d94cb64..5b52c91650a8 100644 --- a/synapse/replication/http/send_event.py +++ b/synapse/replication/http/send_event.py @@ -47,7 +47,6 @@ class ReplicationSendEventRestServlet(ReplicationEndpoint): """ NAME = "send_event" PATH_ARGS = ("event_id",) - POST = True def __init__(self, hs): super(ReplicationSendEventRestServlet, self).__init__(hs) From 7f3d897e7af7c5227fc27355ead082055a8c0ecf Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 8 Aug 2018 11:46:23 +0100 Subject: [PATCH 065/173] mock config.max_mau_value --- tests/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/utils.py b/tests/utils.py index baeed5d60047..e7894819c025 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -74,6 +74,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None config.media_storage_providers = [] config.auto_join_rooms = [] config.limit_usage_by_mau = False + config.max_mau_value = 50 config.mau_limits_reserved_threepids = [] # disable user directory updates, because they get done in the From 360ba89c50ea5cbf824e54f04d536b89b57f3304 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 8 Aug 2018 11:54:55 +0100 Subject: [PATCH 066/173] Don't fail requests to unbind 3pids for non supporting ID servers Older identity servers may not support the unbind 3pid request, so we shouldn't fail the requests if we received one of 400/404/501. The request still fails if we receive e.g. 500 responses, allowing clients to retry requests on transient identity server errors that otherwise do support the API. Fixes #3661 --- changelog.d/3661.bugfix | 1 + synapse/handlers/auth.py | 20 ++++++++++++++--- synapse/handlers/deactivate_account.py | 13 +++++++++-- synapse/handlers/identity.py | 30 +++++++++++++++++-------- synapse/rest/client/v1/admin.py | 11 +++++++-- synapse/rest/client/v2_alpha/account.py | 22 ++++++++++++++---- 6 files changed, 77 insertions(+), 20 deletions(-) create mode 100644 changelog.d/3661.bugfix diff --git a/changelog.d/3661.bugfix b/changelog.d/3661.bugfix new file mode 100644 index 000000000000..f2b4703d806b --- /dev/null +++ b/changelog.d/3661.bugfix @@ -0,0 +1 @@ +Fix bug on deleting 3pid when using identity servers that don't support unbind API diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 184eef09d03b..da17e73fdd05 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -828,12 +828,26 @@ def add_threepid(self, user_id, medium, address, validated_at): @defer.inlineCallbacks def delete_threepid(self, user_id, medium, address): + """Attempts to unbind the 3pid on the identity servers and deletes it + from the local database. + + Args: + user_id (str) + medium (str) + address (str) + + Returns: + Deferred[bool]: Returns True if successfully unbound the 3pid on + the identity server, False if identity server doesn't support the + unbind API. + """ + # 'Canonicalise' email addresses as per above if medium == 'email': address = address.lower() identity_handler = self.hs.get_handlers().identity_handler - yield identity_handler.unbind_threepid( + result = yield identity_handler.try_unbind_threepid( user_id, { 'medium': medium, @@ -841,10 +855,10 @@ def delete_threepid(self, user_id, medium, address): }, ) - ret = yield self.store.user_delete_threepid( + yield self.store.user_delete_threepid( user_id, medium, address, ) - defer.returnValue(ret) + defer.returnValue(result) def _save_session(self, session): # TODO: Persistent storage diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index b3c5a9ee642c..b078df4a762e 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -51,7 +51,8 @@ def deactivate_account(self, user_id, erase_data): erase_data (bool): whether to GDPR-erase the user's data Returns: - Deferred + Deferred[bool]: True if identity server supports removing + threepids, otherwise False. """ # FIXME: Theoretically there is a race here wherein user resets # password using threepid. @@ -60,16 +61,22 @@ def deactivate_account(self, user_id, erase_data): # leave the user still active so they can try again. # Ideally we would prevent password resets and then do this in the # background thread. + + # This will be set to false if the identity server doesn't support + # unbinding + identity_server_supports_unbinding = True + threepids = yield self.store.user_get_threepids(user_id) for threepid in threepids: try: - yield self._identity_handler.unbind_threepid( + result = yield self._identity_handler.try_unbind_threepid( user_id, { 'medium': threepid['medium'], 'address': threepid['address'], }, ) + identity_server_supports_unbinding &= result except Exception: # Do we want this to be a fatal error or should we carry on? logger.exception("Failed to remove threepid from ID server") @@ -103,6 +110,8 @@ def deactivate_account(self, user_id, erase_data): # parts users from rooms (if it isn't already running) self._start_user_parting() + defer.returnValue(identity_server_supports_unbinding) + def _start_user_parting(self): """ Start the process that goes through the table of users diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index 1d36d967c3b5..a0f5fecc9602 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -137,15 +137,19 @@ def bind_threepid(self, creds, mxid): defer.returnValue(data) @defer.inlineCallbacks - def unbind_threepid(self, mxid, threepid): - """ - Removes a binding from an identity server + def try_unbind_threepid(self, mxid, threepid): + """Removes a binding from an identity server + Args: mxid (str): Matrix user ID of binding to be removed threepid (dict): Dict with medium & address of binding to be removed + Raises: + SynapseError: If we failed to contact the identity server + Returns: - Deferred[bool]: True on success, otherwise False + Deferred[bool]: True on success, otherwise False if the identity + server doesn't support unbinding """ logger.debug("unbinding threepid %r from %s", threepid, mxid) if not self.trusted_id_servers: @@ -175,11 +179,19 @@ def unbind_threepid(self, mxid, threepid): content=content, destination_is=id_server, ) - yield self.http_client.post_json_get_json( - url, - content, - headers, - ) + try: + yield self.http_client.post_json_get_json( + url, + content, + headers, + ) + except HttpResponseException as e: + if e.code in (400, 404, 501,): + # The remote server probably doesn't support unbinding (yet) + defer.returnValue(False) + else: + raise SynapseError(502, "Failed to contact identity server") + defer.returnValue(True) @defer.inlineCallbacks diff --git a/synapse/rest/client/v1/admin.py b/synapse/rest/client/v1/admin.py index 80d625eecce6..ad536ab570f5 100644 --- a/synapse/rest/client/v1/admin.py +++ b/synapse/rest/client/v1/admin.py @@ -391,10 +391,17 @@ def on_POST(self, request, target_user_id): if not is_admin: raise AuthError(403, "You are not a server admin") - yield self._deactivate_account_handler.deactivate_account( + result = yield self._deactivate_account_handler.deactivate_account( target_user_id, erase, ) - defer.returnValue((200, {})) + if result: + id_server_unbind_result = "success" + else: + id_server_unbind_result = "no-support" + + defer.returnValue((200, { + "id_server_unbind_result": id_server_unbind_result, + })) class ShutdownRoomRestServlet(ClientV1RestServlet): diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index eeae466d8291..372648cafdab 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -209,10 +209,17 @@ def on_POST(self, request): yield self.auth_handler.validate_user_via_ui_auth( requester, body, self.hs.get_ip_from_request(request), ) - yield self._deactivate_account_handler.deactivate_account( + result = yield self._deactivate_account_handler.deactivate_account( requester.user.to_string(), erase, ) - defer.returnValue((200, {})) + if result: + id_server_unbind_result = "success" + else: + id_server_unbind_result = "no-support" + + defer.returnValue((200, { + "id_server_unbind_result": id_server_unbind_result, + })) class EmailThreepidRequestTokenRestServlet(RestServlet): @@ -364,7 +371,7 @@ def on_POST(self, request): user_id = requester.user.to_string() try: - yield self.auth_handler.delete_threepid( + ret = yield self.auth_handler.delete_threepid( user_id, body['medium'], body['address'] ) except Exception: @@ -374,7 +381,14 @@ def on_POST(self, request): logger.exception("Failed to remove threepid") raise SynapseError(500, "Failed to remove threepid") - defer.returnValue((200, {})) + if ret: + id_server_unbind_result = "success" + else: + id_server_unbind_result = "no-support" + + defer.returnValue((200, { + "id_server_unbind_result": id_server_unbind_result, + })) class WhoamiRestServlet(RestServlet): From d92675a4861eab5f5eec0e8a39a7505d5bda850d Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 8 Aug 2018 12:06:42 +0100 Subject: [PATCH 067/173] Ability to whitelist specific threepids against monthly active user limiting --- changelog.d/3662.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3662.feature diff --git a/changelog.d/3662.feature b/changelog.d/3662.feature new file mode 100644 index 000000000000..daacef086d03 --- /dev/null +++ b/changelog.d/3662.feature @@ -0,0 +1 @@ +Ability to whitelist specific threepids against monthly active user limiting From 312ae747466f7c60c0148b75446f357263330148 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 8 Aug 2018 13:33:16 +0100 Subject: [PATCH 068/173] typos --- synapse/storage/monthly_active_users.py | 4 ++-- synapse/storage/schema/delta/51/monthly_active_users.sql | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index abe1e6bb99ad..8b3beaf26a1f 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -50,7 +50,7 @@ def _reap_users(txn): # If MAU user count still exceeds the MAU threshold, then delete on # a least recently active basis. # Note it is not possible to write this query using OFFSET due to - # incompatibilities in how sqlite an postgres support the feature. + # incompatibilities in how sqlite and postgres support the feature. # sqlite requires 'LIMIT -1 OFFSET ?', the LIMIT must be present # While Postgres does not require 'LIMIT', but also does not support # negative LIMIT values. So there is no way to write it that both can @@ -78,7 +78,7 @@ def _reap_users(txn): @cached(num_args=0) def get_monthly_active_count(self): - """Generates current count of monthly active users.abs + """Generates current count of monthly active users Returns: Defered[int]: Number of current monthly active users diff --git a/synapse/storage/schema/delta/51/monthly_active_users.sql b/synapse/storage/schema/delta/51/monthly_active_users.sql index 10aac90ce137..c9d537d5a306 100644 --- a/synapse/storage/schema/delta/51/monthly_active_users.sql +++ b/synapse/storage/schema/delta/51/monthly_active_users.sql @@ -18,7 +18,7 @@ CREATE TABLE monthly_active_users ( user_id TEXT NOT NULL, -- Last time we saw the user. Not guaranteed to be accurate due to rate limiting -- on updates, Granularity of updates governed by - -- syanpse.storage.monthly_active_users.LAST_SEEN_GRANULARITY + -- synapse.storage.monthly_active_users.LAST_SEEN_GRANULARITY -- Measured in ms since epoch. timestamp BIGINT NOT NULL ); From 54685d294dd15bb8b9a9928b8d6f371ae4236e25 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 8 Aug 2018 15:38:54 +0100 Subject: [PATCH 069/173] Ability to disable client/server Synapse via conf toggle --- changelog.d/3655.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3655.feature diff --git a/changelog.d/3655.feature b/changelog.d/3655.feature new file mode 100644 index 000000000000..1134e549e7b3 --- /dev/null +++ b/changelog.d/3655.feature @@ -0,0 +1 @@ +Ability to disable client/server Synapse via conf toggle From 839a317c9627ff0d61f91504a2cfca275f31d7b2 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 8 Aug 2018 17:39:04 +0100 Subject: [PATCH 070/173] fix pep8 too many lines --- synapse/api/errors.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index 69e6ffb5a320..dc3bed5fcbbd 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -62,7 +62,6 @@ class Codes(object): INCOMPATIBLE_ROOM_VERSION = "M_INCOMPATIBLE_ROOM_VERSION" - class CodeMessageException(RuntimeError): """An exception with integer code and message string attributes. From e92fb00f32c63de6ea50ba1cbbadf74060ea143d Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 8 Aug 2018 17:54:49 +0100 Subject: [PATCH 071/173] sync auth blocking --- synapse/handlers/sync.py | 16 +++++++++----- tests/handlers/test_sync.py | 42 +++++++++++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+), 5 deletions(-) create mode 100644 tests/handlers/test_sync.py diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index dff1f67dcb45..f748d9afb099 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -191,6 +191,7 @@ def __init__(self, hs): self.clock = hs.get_clock() self.response_cache = ResponseCache(hs, "sync") self.state = hs.get_state_handler() + self.auth = hs.get_auth() # ExpiringCache((User, Device)) -> LruCache(state_key => event_id) self.lazy_loaded_members_cache = ExpiringCache( @@ -198,18 +199,23 @@ def __init__(self, hs): max_len=0, expiry_ms=LAZY_LOADED_MEMBERS_CACHE_MAX_AGE, ) + @defer.inlineCallbacks def wait_for_sync_for_user(self, sync_config, since_token=None, timeout=0, full_state=False): """Get the sync for a client if we have new data for it now. Otherwise wait for new data to arrive on the server. If the timeout expires, then return an empty sync result. Returns: - A Deferred SyncResult. + Deferred[SyncResult] """ - return self.response_cache.wrap( - sync_config.request_key, - self._wait_for_sync_for_user, - sync_config, since_token, timeout, full_state, + yield self.auth.check_auth_blocking() + + defer.returnValue( + self.response_cache.wrap( + sync_config.request_key, + self._wait_for_sync_for_user, + sync_config, since_token, timeout, full_state, + ) ) @defer.inlineCallbacks diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py new file mode 100644 index 000000000000..3b1b4d492367 --- /dev/null +++ b/tests/handlers/test_sync.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from twisted.internet import defer + +import tests.unittest +import tests.utils +from tests.utils import setup_test_homeserver +from synapse.handlers.sync import SyncHandler, SyncConfig +from synapse.types import UserID + + +class SyncTestCase(tests.unittest.TestCase): + """ Tests Sync Handler. """ + + @defer.inlineCallbacks + def setUp(self): + self.hs = yield setup_test_homeserver() + self.sync_handler = SyncHandler(self.hs) + + @defer.inlineCallbacks + def test_wait_for_sync_for_user_auth_blocking(self): + sync_config = SyncConfig( + user=UserID("@user","server"), + filter_collection=None, + is_guest=False, + request_key="request_key", + device_id="device_id", + ) + res = yield self.sync_handler.wait_for_sync_for_user(sync_config) + print res From d5c0ce4cadb4848da174bf5a5563de0ebba4790f Mon Sep 17 00:00:00 2001 From: Jeroen Date: Wed, 8 Aug 2018 19:25:01 +0200 Subject: [PATCH 072/173] updated docstring for ServerContextFactory --- synapse/crypto/context_factory.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 569377e65363..29a75e187374 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -26,7 +26,7 @@ class ServerContextFactory(ContextFactory): """Factory for PyOpenSSL SSL contexts that are used to handle incoming - connections and to make connections to remote servers.""" + connections.""" def __init__(self, config): self._context = SSL.Context(SSL.SSLv23_METHOD) From 2511f3f8a082585e680dad200069dec77b066a6a Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 9 Aug 2018 12:22:01 +1000 Subject: [PATCH 073/173] Test fixes for Python 3 (#3647) --- changelog.d/3647.misc | 1 + tests/handlers/test_typing.py | 4 +++- tests/rest/client/test_transactions.py | 4 ++-- tests/rest/client/v1/test_admin.py | 10 +++++----- tests/rest/client/v1/test_profile.py | 8 ++++---- tests/rest/client/v1/utils.py | 10 +++++----- tests/rest/client/v2_alpha/test_filter.py | 22 ++++++++++----------- tests/rest/client/v2_alpha/test_register.py | 14 ++++++------- tests/rest/client/v2_alpha/test_sync.py | 4 ++-- tests/server.py | 22 +++++++++++++++++++-- tests/storage/test_event_federation.py | 2 +- tests/storage/test_state.py | 2 +- tests/test_server.py | 11 ++++------- tests/utils.py | 9 +++++---- 14 files changed, 71 insertions(+), 52 deletions(-) create mode 100644 changelog.d/3647.misc diff --git a/changelog.d/3647.misc b/changelog.d/3647.misc new file mode 100644 index 000000000000..dbc66dae6087 --- /dev/null +++ b/changelog.d/3647.misc @@ -0,0 +1 @@ +Tests now correctly execute on Python 3. diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index 2c263af1a3a0..f422cf3c5a5f 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -48,7 +48,9 @@ def _expect_edu(destination, edu_type, content, origin="test"): def _make_edu_json(origin, edu_type, content): - return json.dumps(_expect_edu("test", edu_type, content, origin=origin)) + return json.dumps( + _expect_edu("test", edu_type, content, origin=origin) + ).encode('utf8') class TypingNotificationsTestCase(unittest.TestCase): diff --git a/tests/rest/client/test_transactions.py b/tests/rest/client/test_transactions.py index 34e68ae82fa4..d46c27e7e9f7 100644 --- a/tests/rest/client/test_transactions.py +++ b/tests/rest/client/test_transactions.py @@ -85,7 +85,7 @@ def cb(): try: yield self.cache.fetch_or_execute(self.mock_key, cb) except Exception as e: - self.assertEqual(e.message, "boo") + self.assertEqual(e.args[0], "boo") self.assertIs(LoggingContext.current_context(), test_context) res = yield self.cache.fetch_or_execute(self.mock_key, cb) @@ -111,7 +111,7 @@ def cb(): try: yield self.cache.fetch_or_execute(self.mock_key, cb) except Exception as e: - self.assertEqual(e.message, "boo") + self.assertEqual(e.args[0], "boo") self.assertIs(LoggingContext.current_context(), test_context) res = yield self.cache.fetch_or_execute(self.mock_key, cb) diff --git a/tests/rest/client/v1/test_admin.py b/tests/rest/client/v1/test_admin.py index 8c90145601a6..fb28883d30c4 100644 --- a/tests/rest/client/v1/test_admin.py +++ b/tests/rest/client/v1/test_admin.py @@ -140,7 +140,7 @@ def test_register_incorrect_nonce(self): "admin": True, "mac": want_mac, } - ).encode('utf8') + ) request, channel = make_request("POST", self.url, body.encode('utf8')) render(request, self.resource, self.clock) @@ -168,7 +168,7 @@ def test_register_correct_nonce(self): "admin": True, "mac": want_mac, } - ).encode('utf8') + ) request, channel = make_request("POST", self.url, body.encode('utf8')) render(request, self.resource, self.clock) @@ -195,7 +195,7 @@ def test_nonce_reuse(self): "admin": True, "mac": want_mac, } - ).encode('utf8') + ) request, channel = make_request("POST", self.url, body.encode('utf8')) render(request, self.resource, self.clock) @@ -253,7 +253,7 @@ def nonce(): self.assertEqual('Invalid username', channel.json_body["error"]) # Must not have null bytes - body = json.dumps({"nonce": nonce(), "username": b"abcd\x00"}) + body = json.dumps({"nonce": nonce(), "username": u"abcd\u0000"}) request, channel = make_request("POST", self.url, body.encode('utf8')) render(request, self.resource, self.clock) @@ -289,7 +289,7 @@ def nonce(): self.assertEqual('Invalid password', channel.json_body["error"]) # Must not have null bytes - body = json.dumps({"nonce": nonce(), "username": "a", "password": b"abcd\x00"}) + body = json.dumps({"nonce": nonce(), "username": "a", "password": u"abcd\u0000"}) request, channel = make_request("POST", self.url, body.encode('utf8')) render(request, self.resource, self.clock) diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py index d71cc8e0db73..0516ce3cfbd2 100644 --- a/tests/rest/client/v1/test_profile.py +++ b/tests/rest/client/v1/test_profile.py @@ -80,7 +80,7 @@ def test_set_my_name(self): (code, response) = yield self.mock_resource.trigger( "PUT", "/profile/%s/displayname" % (myid), - '{"displayname": "Frank Jr."}' + b'{"displayname": "Frank Jr."}' ) self.assertEquals(200, code) @@ -95,7 +95,7 @@ def test_set_my_name_noauth(self): (code, response) = yield self.mock_resource.trigger( "PUT", "/profile/%s/displayname" % ("@4567:test"), - '{"displayname": "Frank Jr."}' + b'{"displayname": "Frank Jr."}' ) self.assertTrue( @@ -122,7 +122,7 @@ def test_set_other_name(self): (code, response) = yield self.mock_resource.trigger( "PUT", "/profile/%s/displayname" % ("@opaque:elsewhere"), - '{"displayname":"bob"}' + b'{"displayname":"bob"}' ) self.assertTrue( @@ -151,7 +151,7 @@ def test_set_my_avatar(self): (code, response) = yield self.mock_resource.trigger( "PUT", "/profile/%s/avatar_url" % (myid), - '{"avatar_url": "http://my.server/pic.gif"}' + b'{"avatar_url": "http://my.server/pic.gif"}' ) self.assertEquals(200, code) diff --git a/tests/rest/client/v1/utils.py b/tests/rest/client/v1/utils.py index 41de8e0762ec..e3bc5f378d22 100644 --- a/tests/rest/client/v1/utils.py +++ b/tests/rest/client/v1/utils.py @@ -105,7 +105,7 @@ def register(self, user_id): "password": "test", "type": "m.login.password" })) - self.assertEquals(200, code) + self.assertEquals(200, code, msg=response) defer.returnValue(response) @defer.inlineCallbacks @@ -149,14 +149,14 @@ class RestHelper(object): def create_room_as(self, room_creator, is_public=True, tok=None): temp_id = self.auth_user_id self.auth_user_id = room_creator - path = b"/_matrix/client/r0/createRoom" + path = "/_matrix/client/r0/createRoom" content = {} if not is_public: content["visibility"] = "private" if tok: - path = path + b"?access_token=%s" % tok.encode('ascii') + path = path + "?access_token=%s" % tok - request, channel = make_request(b"POST", path, json.dumps(content).encode('utf8')) + request, channel = make_request("POST", path, json.dumps(content).encode('utf8')) request.render(self.resource) wait_until_result(self.hs.get_reactor(), channel) @@ -205,7 +205,7 @@ def change_membership(self, room, src, targ, membership, tok=None, expect_code=2 data = {"membership": membership} request, channel = make_request( - b"PUT", path.encode('ascii'), json.dumps(data).encode('utf8') + "PUT", path, json.dumps(data).encode('utf8') ) request.render(self.resource) diff --git a/tests/rest/client/v2_alpha/test_filter.py b/tests/rest/client/v2_alpha/test_filter.py index e890f0feac46..de33b10a5f5e 100644 --- a/tests/rest/client/v2_alpha/test_filter.py +++ b/tests/rest/client/v2_alpha/test_filter.py @@ -33,7 +33,7 @@ class FilterTestCase(unittest.TestCase): - USER_ID = b"@apple:test" + USER_ID = "@apple:test" EXAMPLE_FILTER = {"room": {"timeline": {"types": ["m.room.message"]}}} EXAMPLE_FILTER_JSON = b'{"room": {"timeline": {"types": ["m.room.message"]}}}' TO_REGISTER = [filter] @@ -72,8 +72,8 @@ def get_user_by_req(request, allow_guest=False, rights="access"): def test_add_filter(self): request, channel = make_request( - b"POST", - b"/_matrix/client/r0/user/%s/filter" % (self.USER_ID), + "POST", + "/_matrix/client/r0/user/%s/filter" % (self.USER_ID), self.EXAMPLE_FILTER_JSON, ) request.render(self.resource) @@ -87,8 +87,8 @@ def test_add_filter(self): def test_add_filter_for_other_user(self): request, channel = make_request( - b"POST", - b"/_matrix/client/r0/user/%s/filter" % (b"@watermelon:test"), + "POST", + "/_matrix/client/r0/user/%s/filter" % ("@watermelon:test"), self.EXAMPLE_FILTER_JSON, ) request.render(self.resource) @@ -101,8 +101,8 @@ def test_add_filter_non_local_user(self): _is_mine = self.hs.is_mine self.hs.is_mine = lambda target_user: False request, channel = make_request( - b"POST", - b"/_matrix/client/r0/user/%s/filter" % (self.USER_ID), + "POST", + "/_matrix/client/r0/user/%s/filter" % (self.USER_ID), self.EXAMPLE_FILTER_JSON, ) request.render(self.resource) @@ -119,7 +119,7 @@ def test_get_filter(self): self.clock.advance(1) filter_id = filter_id.result request, channel = make_request( - b"GET", b"/_matrix/client/r0/user/%s/filter/%s" % (self.USER_ID, filter_id) + "GET", "/_matrix/client/r0/user/%s/filter/%s" % (self.USER_ID, filter_id) ) request.render(self.resource) wait_until_result(self.clock, channel) @@ -129,7 +129,7 @@ def test_get_filter(self): def test_get_filter_non_existant(self): request, channel = make_request( - b"GET", "/_matrix/client/r0/user/%s/filter/12382148321" % (self.USER_ID) + "GET", "/_matrix/client/r0/user/%s/filter/12382148321" % (self.USER_ID) ) request.render(self.resource) wait_until_result(self.clock, channel) @@ -141,7 +141,7 @@ def test_get_filter_non_existant(self): # in errors.py def test_get_filter_invalid_id(self): request, channel = make_request( - b"GET", "/_matrix/client/r0/user/%s/filter/foobar" % (self.USER_ID) + "GET", "/_matrix/client/r0/user/%s/filter/foobar" % (self.USER_ID) ) request.render(self.resource) wait_until_result(self.clock, channel) @@ -151,7 +151,7 @@ def test_get_filter_invalid_id(self): # No ID also returns an invalid_id error def test_get_filter_no_id(self): request, channel = make_request( - b"GET", "/_matrix/client/r0/user/%s/filter/" % (self.USER_ID) + "GET", "/_matrix/client/r0/user/%s/filter/" % (self.USER_ID) ) request.render(self.resource) wait_until_result(self.clock, channel) diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index e004d8fc73d9..f6293f11a871 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -81,7 +81,7 @@ def test_POST_appservice_registration_valid(self): "access_token": token, "home_server": self.hs.hostname, } - self.assertDictContainsSubset(det_data, json.loads(channel.result["body"])) + self.assertDictContainsSubset(det_data, channel.json_body) def test_POST_appservice_registration_invalid(self): self.appservice = None # no application service exists @@ -102,7 +102,7 @@ def test_POST_bad_password(self): self.assertEquals(channel.result["code"], b"400", channel.result) self.assertEquals( - json.loads(channel.result["body"])["error"], "Invalid password" + channel.json_body["error"], "Invalid password" ) def test_POST_bad_username(self): @@ -113,7 +113,7 @@ def test_POST_bad_username(self): self.assertEquals(channel.result["code"], b"400", channel.result) self.assertEquals( - json.loads(channel.result["body"])["error"], "Invalid username" + channel.json_body["error"], "Invalid username" ) def test_POST_user_valid(self): @@ -140,7 +140,7 @@ def test_POST_user_valid(self): "device_id": device_id, } self.assertEquals(channel.result["code"], b"200", channel.result) - self.assertDictContainsSubset(det_data, json.loads(channel.result["body"])) + self.assertDictContainsSubset(det_data, channel.json_body) self.auth_handler.get_login_tuple_for_user_id( user_id, device_id=device_id, initial_device_display_name=None ) @@ -158,7 +158,7 @@ def test_POST_disabled_registration(self): self.assertEquals(channel.result["code"], b"403", channel.result) self.assertEquals( - json.loads(channel.result["body"])["error"], + channel.json_body["error"], "Registration has been disabled", ) @@ -178,7 +178,7 @@ def test_POST_guest_registration(self): "device_id": "guest_device", } self.assertEquals(channel.result["code"], b"200", channel.result) - self.assertDictContainsSubset(det_data, json.loads(channel.result["body"])) + self.assertDictContainsSubset(det_data, channel.json_body) def test_POST_disabled_guest_registration(self): self.hs.config.allow_guest_access = False @@ -189,5 +189,5 @@ def test_POST_disabled_guest_registration(self): self.assertEquals(channel.result["code"], b"403", channel.result) self.assertEquals( - json.loads(channel.result["body"])["error"], "Guest access is disabled" + channel.json_body["error"], "Guest access is disabled" ) diff --git a/tests/rest/client/v2_alpha/test_sync.py b/tests/rest/client/v2_alpha/test_sync.py index 03ec3993b2df..bafc0d1df07d 100644 --- a/tests/rest/client/v2_alpha/test_sync.py +++ b/tests/rest/client/v2_alpha/test_sync.py @@ -32,7 +32,7 @@ class FilterTestCase(unittest.TestCase): - USER_ID = b"@apple:test" + USER_ID = "@apple:test" TO_REGISTER = [sync] def setUp(self): @@ -68,7 +68,7 @@ def get_user_by_req(request, allow_guest=False, rights="access"): r.register_servlets(self.hs, self.resource) def test_sync_argless(self): - request, channel = make_request(b"GET", b"/_matrix/client/r0/sync") + request, channel = make_request("GET", "/_matrix/client/r0/sync") request.render(self.resource) wait_until_result(self.clock, channel) diff --git a/tests/server.py b/tests/server.py index c611dd6059dd..e249668d2104 100644 --- a/tests/server.py +++ b/tests/server.py @@ -11,6 +11,7 @@ from twisted.test.proto_helpers import MemoryReactorClock from synapse.http.site import SynapseRequest +from synapse.util import Clock from tests.utils import setup_test_homeserver as _sth @@ -28,7 +29,13 @@ class FakeChannel(object): def json_body(self): if not self.result: raise Exception("No result yet.") - return json.loads(self.result["body"]) + return json.loads(self.result["body"].decode('utf8')) + + @property + def code(self): + if not self.result: + raise Exception("No result yet.") + return int(self.result["code"]) def writeHeaders(self, version, code, reason, headers): self.result["version"] = version @@ -79,11 +86,16 @@ def make_request(method, path, content=b""): Make a web request using the given method and path, feed it the content, and return the Request and the Channel underneath. """ + if not isinstance(method, bytes): + method = method.encode('ascii') + + if not isinstance(path, bytes): + path = path.encode('ascii') # Decorate it to be the full path if not path.startswith(b"/_matrix"): path = b"/_matrix/client/r0/" + path - path = path.replace("//", "/") + path = path.replace(b"//", b"/") if isinstance(content, text_type): content = content.encode('utf8') @@ -191,3 +203,9 @@ def _(res): clock.threadpool = ThreadPool() pool.threadpool = ThreadPool() return d + + +def get_clock(): + clock = ThreadedMemoryReactorClock() + hs_clock = Clock(clock) + return (clock, hs_clock) diff --git a/tests/storage/test_event_federation.py b/tests/storage/test_event_federation.py index 30683e7888ed..69412c5aadd8 100644 --- a/tests/storage/test_event_federation.py +++ b/tests/storage/test_event_federation.py @@ -49,7 +49,7 @@ def insert_event(txn, i): 'INSERT INTO event_reference_hashes ' '(event_id, algorithm, hash) ' "VALUES (?, 'sha256', ?)" - ), (event_id, 'ffff')) + ), (event_id, b'ffff')) for i in range(0, 11): yield self.store.runInteraction("insert", insert_event, i) diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py index 7a76d67b8c8b..f7871cd42611 100644 --- a/tests/storage/test_state.py +++ b/tests/storage/test_state.py @@ -176,7 +176,7 @@ def test_get_state_for_event(self): room_id = self.room.to_string() group_ids = yield self.store.get_state_groups_ids(room_id, [e5.event_id]) - group = group_ids.keys()[0] + group = list(group_ids.keys())[0] # test _get_some_state_from_cache correctly filters out members with types=[] (state_dict, is_all) = yield self.store._get_some_state_from_cache( diff --git a/tests/test_server.py b/tests/test_server.py index 7e063c029096..fc396226ea27 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -1,4 +1,3 @@ -import json import re from twisted.internet.defer import Deferred @@ -104,9 +103,8 @@ def _callback(request, **kwargs): request.render(res) self.assertEqual(channel.result["code"], b'403') - reply_body = json.loads(channel.result["body"]) - self.assertEqual(reply_body["error"], "Forbidden!!one!") - self.assertEqual(reply_body["errcode"], "M_FORBIDDEN") + self.assertEqual(channel.json_body["error"], "Forbidden!!one!") + self.assertEqual(channel.json_body["errcode"], "M_FORBIDDEN") def test_no_handler(self): """ @@ -126,6 +124,5 @@ def _callback(request, **kwargs): request.render(res) self.assertEqual(channel.result["code"], b'400') - reply_body = json.loads(channel.result["body"]) - self.assertEqual(reply_body["error"], "Unrecognized request") - self.assertEqual(reply_body["errcode"], "M_UNRECOGNIZED") + self.assertEqual(channel.json_body["error"], "Unrecognized request") + self.assertEqual(channel.json_body["errcode"], "M_UNRECOGNIZED") diff --git a/tests/utils.py b/tests/utils.py index 151db4b89078..5d49692c589d 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -153,8 +153,9 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None # Need to let the HS build an auth handler and then mess with it # because AuthHandler's constructor requires the HS, so we can't make one # beforehand and pass it in to the HS's constructor (chicken / egg) - hs.get_auth_handler().hash = lambda p: hashlib.md5(p).hexdigest() - hs.get_auth_handler().validate_hash = lambda p, h: hashlib.md5(p).hexdigest() == h + hs.get_auth_handler().hash = lambda p: hashlib.md5(p.encode('utf8')).hexdigest() + hs.get_auth_handler().validate_hash = lambda p, h: hashlib.md5( + p.encode('utf8')).hexdigest() == h fed = kargs.get("resource_for_federation", None) if fed: @@ -227,8 +228,8 @@ def trigger(self, http_method, path, content, mock_request, federation_auth=Fals mock_content.configure_mock(**config) mock_request.content = mock_content - mock_request.method = http_method - mock_request.uri = path + mock_request.method = http_method.encode('ascii') + mock_request.uri = path.encode('ascii') mock_request.getClientIP.return_value = "-" From 62564797f5f9c6b1295a98a9742ae226b87a135e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 9 Aug 2018 09:56:10 +0100 Subject: [PATCH 074/173] Fixup wording and remove dead code --- changelog.d/3632.misc | 1 + synapse/replication/http/_base.py | 3 +-- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/changelog.d/3632.misc b/changelog.d/3632.misc index e69de29bb2d1..9d64bbe83b65 100644 --- a/changelog.d/3632.misc +++ b/changelog.d/3632.misc @@ -0,0 +1 @@ +Refactor HTTP replication endpoints to reduce code duplication diff --git a/synapse/replication/http/_base.py b/synapse/replication/http/_base.py index 53a0fd459a82..5e5376cf58fa 100644 --- a/synapse/replication/http/_base.py +++ b/synapse/replication/http/_base.py @@ -183,7 +183,6 @@ def register(self, http_server): """ url_args = list(self.PATH_ARGS) - method = "GET" handler = self._handle_request method = self.METHOD @@ -201,7 +200,7 @@ def register(self, http_server): def _cached_handler(self, request, txn_id, **kwargs): """Called on new incoming requests when caching is enabled. Checks - if their is a cached response for the request and returns that, + if there is a cached response for the request and returns that, otherwise calls `_handle_request` and caches its response. """ # We just use the txn_id here, but we probably also want to use the From bf7598f582cdfbd7db0fed55afce28bcc51a4801 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 9 Aug 2018 10:09:56 +0100 Subject: [PATCH 075/173] Log when we 3pid/unbind request fails --- synapse/handlers/identity.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index a0f5fecc9602..5feb3f22a691 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -188,8 +188,10 @@ def try_unbind_threepid(self, mxid, threepid): except HttpResponseException as e: if e.code in (400, 404, 501,): # The remote server probably doesn't support unbinding (yet) + logger.warn("Received %d response while unbinding threepid", e.code) defer.returnValue(False) else: + logger.error("Failed to unbind threepid on identity server: %s", e) raise SynapseError(502, "Failed to contact identity server") defer.returnValue(True) From 72d1902bbeaa029e7003aecaeaf9bc5a41d17646 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 9 Aug 2018 10:23:49 +0100 Subject: [PATCH 076/173] Fixup doc comments --- synapse/federation/federation_server.py | 11 +++++++++++ synapse/replication/http/federation.py | 17 +++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index d23c1cf13b99..3bc26c4ab43f 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -811,6 +811,13 @@ def on_query(self, query_type, args): class ReplicationFederationHandlerRegistry(FederationHandlerRegistry): + """A FederationHandlerRegistry for worker processes. + + When receiving EDU or queries it will check if an appropriate handler has + been registered on the worker, if there isn't one then it calls off to the + master process. + """ + def __init__(self, hs): self.config = hs.config self.http_client = hs.get_simple_http_client() @@ -822,6 +829,8 @@ def __init__(self, hs): super(ReplicationFederationHandlerRegistry, self).__init__() def on_edu(self, edu_type, origin, content): + """Overrides FederationHandlerRegistry + """ handler = self.edu_handlers.get(edu_type) if handler: return super(ReplicationFederationHandlerRegistry, self).on_edu( @@ -835,6 +844,8 @@ def on_edu(self, edu_type, origin, content): ) def on_query(self, query_type, args): + """Overrides FederationHandlerRegistry + """ handler = self.query_handlers.get(query_type) if handler: return handler(args) diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py index 3fa7bd64c7b5..3e6cbbf5a19a 100644 --- a/synapse/replication/http/federation.py +++ b/synapse/replication/http/federation.py @@ -157,6 +157,15 @@ def _notify_persisted_event(self, event, max_stream_id): class ReplicationFederationSendEduRestServlet(ReplicationEndpoint): """Handles EDUs newly received from federation, including persisting and notifying. + + Request format: + + POST /_synapse/replication/fed_send_edu/:edu_type/:txn_id + + { + "origin": ..., + "content: { ... } + } """ NAME = "fed_send_edu" @@ -196,6 +205,14 @@ def _handle_request(self, request, edu_type): class ReplicationGetQueryRestServlet(ReplicationEndpoint): """Handle responding to queries from federation. + + Request format: + + POST /_synapse/replication/fed_query/:query_type + + { + "args": { ... } + } """ NAME = "fed_query" From b179537f2a51f4de52e2625939cc32eeba75cd6b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 9 Aug 2018 10:29:48 +0100 Subject: [PATCH 077/173] Move clean_room_for_join to master --- synapse/handlers/federation.py | 16 ++++++++++-- synapse/replication/http/federation.py | 35 ++++++++++++++++++++++++++ 2 files changed, 49 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 37d2307d0abc..acabca1d258c 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -50,6 +50,7 @@ ) from synapse.events.validator import EventValidator from synapse.replication.http.federation import ( + ReplicationCleanRoomRestServlet, ReplicationFederationSendEventsRestServlet, ) from synapse.replication.http.membership import ReplicationUserJoinedLeftRoomRestServlet @@ -104,6 +105,9 @@ def __init__(self, hs): self._notify_user_membership_change = ( ReplicationUserJoinedLeftRoomRestServlet.make_client(hs) ) + self._clean_room_for_join_client = ( + ReplicationCleanRoomRestServlet.make_client(hs) + ) # When joining a room we need to queue any events for that room up self.room_queues = {} @@ -2388,8 +2392,16 @@ def _notify_persisted_event(self, event, max_stream_id): ) def _clean_room_for_join(self, room_id): - # TODO move this out to master - return self.store.clean_room_for_join(room_id) + """Called to clean up any data in DB for a given room, ready for the + server to join the room. + + Args: + room_id (str) + """ + if self.config.worker_app: + return self._clean_room_for_join_client(room_id) + else: + return self.store.clean_room_for_join(room_id) def user_joined_room(self, user, room_id): """Called when a new user has joined the room diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py index 3e6cbbf5a19a..7b0b1cd32ed1 100644 --- a/synapse/replication/http/federation.py +++ b/synapse/replication/http/federation.py @@ -256,7 +256,42 @@ def _handle_request(self, request, query_type): defer.returnValue((200, result)) +class ReplicationCleanRoomRestServlet(ReplicationEndpoint): + """Called to clean up any data in DB for a given room, ready for the + server to join the room. + + Request format: + + POST /_synapse/replication/fed_query/:fed_cleanup_room/:txn_id + + {} + """ + + NAME = "fed_cleanup_room" + PATH_ARGS = ("room_id",) + + def __init__(self, hs): + super(ReplicationCleanRoomRestServlet, self).__init__(hs) + + self.store = hs.get_datastore() + + @staticmethod + def _serialize_payload(room_id, args): + """ + Args: + room_id (str) + """ + return {} + + @defer.inlineCallbacks + def _handle_request(self, request, room_id): + yield self.store.clean_room_for_join(room_id) + + defer.returnValue((200, {})) + + def register_servlets(hs, http_server): ReplicationFederationSendEventsRestServlet(hs).register(http_server) ReplicationFederationSendEduRestServlet(hs).register(http_server) ReplicationGetQueryRestServlet(hs).register(http_server) + ReplicationCleanRoomRestServlet(hs).register(http_server) From 8876ce7f77ecdf543cc23fb8333e9020791e9376 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 9 Aug 2018 10:30:50 +0100 Subject: [PATCH 078/173] Newsfile --- changelog.d/3653.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3653.feature diff --git a/changelog.d/3653.feature b/changelog.d/3653.feature new file mode 100644 index 000000000000..6c5422994f34 --- /dev/null +++ b/changelog.d/3653.feature @@ -0,0 +1 @@ +Support more federation endpoints on workers From 5c6226707d2588c9f2669512adabc00687295445 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 9 Aug 2018 10:33:55 +0100 Subject: [PATCH 079/173] Update docs/workers.rst --- docs/workers.rst | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/docs/workers.rst b/docs/workers.rst index c5b37c3ded73..ac9efb621f06 100644 --- a/docs/workers.rst +++ b/docs/workers.rst @@ -173,10 +173,23 @@ endpoints matching the following regular expressions:: ^/_matrix/federation/v1/backfill/ ^/_matrix/federation/v1/get_missing_events/ ^/_matrix/federation/v1/publicRooms + ^/_matrix/federation/v1/query/ + ^/_matrix/federation/v1/make_join/ + ^/_matrix/federation/v1/make_leave/ + ^/_matrix/federation/v1/send_join/ + ^/_matrix/federation/v1/send_leave/ + ^/_matrix/federation/v1/invite/ + ^/_matrix/federation/v1/query_auth/ + ^/_matrix/federation/v1/event_auth/ + ^/_matrix/federation/v1/exchange_third_party_invite/ + ^/_matrix/federation/v1/send/ The above endpoints should all be routed to the federation_reader worker by the reverse-proxy configuration. +The `^/_matrix/federation/v1/send/` endpoint must only be handled by a single +instance. + ``synapse.app.federation_sender`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 69ce057ea613f425d5ef6ace03d0019a8e4fdf49 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 9 Aug 2018 12:26:27 +0100 Subject: [PATCH 080/173] block sync if auth checks fail --- synapse/handlers/sync.py | 12 +++++------- tests/handlers/test_sync.py | 19 +++++++++++++------ 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index f748d9afb099..776ddca638c8 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -209,14 +209,12 @@ def wait_for_sync_for_user(self, sync_config, since_token=None, timeout=0, Deferred[SyncResult] """ yield self.auth.check_auth_blocking() - - defer.returnValue( - self.response_cache.wrap( - sync_config.request_key, - self._wait_for_sync_for_user, - sync_config, since_token, timeout, full_state, - ) + res = yield self.response_cache.wrap( + sync_config.request_key, + self._wait_for_sync_for_user, + sync_config, since_token, timeout, full_state, ) + defer.returnValue(res) @defer.inlineCallbacks def _wait_for_sync_for_user(self, sync_config, since_token, timeout, diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index 3b1b4d492367..497e4bd933a0 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -14,11 +14,14 @@ # limitations under the License. from twisted.internet import defer +from synapse.api.errors import AuthError +from synapse.api.filtering import DEFAULT_FILTER_COLLECTION +from synapse.handlers.sync import SyncConfig, SyncHandler +from synapse.types import UserID + import tests.unittest import tests.utils from tests.utils import setup_test_homeserver -from synapse.handlers.sync import SyncHandler, SyncConfig -from synapse.types import UserID class SyncTestCase(tests.unittest.TestCase): @@ -32,11 +35,15 @@ def setUp(self): @defer.inlineCallbacks def test_wait_for_sync_for_user_auth_blocking(self): sync_config = SyncConfig( - user=UserID("@user","server"), - filter_collection=None, + user=UserID("@user", "server"), + filter_collection=DEFAULT_FILTER_COLLECTION, is_guest=False, request_key="request_key", device_id="device_id", ) - res = yield self.sync_handler.wait_for_sync_for_user(sync_config) - print res + # Ensure that an exception is not thrown + yield self.sync_handler.wait_for_sync_for_user(sync_config) + self.hs.config.hs_disabled = True + + with self.assertRaises(AuthError): + yield self.sync_handler.wait_for_sync_for_user(sync_config) From d967653705854dac98cde06cb3de54113e704e60 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Christian=20Gr=C3=BCnhage?= Date: Thu, 9 Aug 2018 13:21:30 +0200 Subject: [PATCH 081/173] update docker base-image to alpine 3.8 --- changelog.d/3669.misc | 1 + docker/Dockerfile | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/3669.misc diff --git a/changelog.d/3669.misc b/changelog.d/3669.misc new file mode 100644 index 000000000000..fc579ddc607d --- /dev/null +++ b/changelog.d/3669.misc @@ -0,0 +1 @@ +Update docker base image from alpine 3.7 to 3.8. diff --git a/docker/Dockerfile b/docker/Dockerfile index 26fb3a6bfffd..777976217d4f 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/python:2-alpine3.7 +FROM docker.io/python:2-alpine3.8 RUN apk add --no-cache --virtual .nacl_deps \ build-base \ From c6b28fb4791d88e064cd88686dd32e7ed7834525 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 9 Aug 2018 12:45:58 +0100 Subject: [PATCH 082/173] Where server is disabled, block ability for locked out users to read new messages --- changelog.d/3670.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3670.feature diff --git a/changelog.d/3670.feature b/changelog.d/3670.feature new file mode 100644 index 000000000000..ba00f2d2ecb8 --- /dev/null +++ b/changelog.d/3670.feature @@ -0,0 +1 @@ +Where server is disabled, block ability for locked out users to read new messages From 09cf13089858902f3cdcb49b9f9bc3d214ba6337 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 9 Aug 2018 17:39:12 +0100 Subject: [PATCH 083/173] only block on sync where user is not part of the mau cohort --- synapse/api/auth.py | 13 ++++++++++-- synapse/handlers/sync.py | 7 ++++++- tests/handlers/test_sync.py | 40 ++++++++++++++++++++++++++++--------- 3 files changed, 48 insertions(+), 12 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 9c62ec43742d..170039fc8262 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -775,17 +775,26 @@ def check_in_room_or_world_readable(self, room_id, user_id): ) @defer.inlineCallbacks - def check_auth_blocking(self): + def check_auth_blocking(self, user_id=None): """Checks if the user should be rejected for some external reason, such as monthly active user limiting or global disable flag + + Args: + user_id(str): If present, checks for presence against existing MAU cohort """ if self.hs.config.hs_disabled: raise AuthError( 403, self.hs.config.hs_disabled_message, errcode=Codes.HS_DISABLED ) if self.hs.config.limit_usage_by_mau is True: + # If the user is already part of the MAU cohort + if user_id: + timestamp = yield self.store._user_last_seen_monthly_active(user_id) + if timestamp: + return + # Else if there is no room in the MAU bucket, bail current_mau = yield self.store.get_monthly_active_count() if current_mau >= self.hs.config.max_mau_value: raise AuthError( 403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED - ) + ) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 776ddca638c8..d3b26a4106b2 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -208,7 +208,12 @@ def wait_for_sync_for_user(self, sync_config, since_token=None, timeout=0, Returns: Deferred[SyncResult] """ - yield self.auth.check_auth_blocking() + # If the user is not part of the mau group, then check that limits have + # not been exceeded (if not part of the group by this point, almost certain + # auth_blocking will occur) + user_id = sync_config.user.to_string() + yield self.auth.check_auth_blocking(user_id) + res = yield self.response_cache.wrap( sync_config.request_key, self._wait_for_sync_for_user, diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index 497e4bd933a0..b95a8743a71c 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. from twisted.internet import defer +from synapse.api.errors import AuthError, Codes -from synapse.api.errors import AuthError from synapse.api.filtering import DEFAULT_FILTER_COLLECTION from synapse.handlers.sync import SyncConfig, SyncHandler from synapse.types import UserID @@ -31,19 +31,41 @@ class SyncTestCase(tests.unittest.TestCase): def setUp(self): self.hs = yield setup_test_homeserver() self.sync_handler = SyncHandler(self.hs) + self.store = self.hs.get_datastore() @defer.inlineCallbacks def test_wait_for_sync_for_user_auth_blocking(self): - sync_config = SyncConfig( - user=UserID("@user", "server"), + + user_id1 = "@user1:server" + user_id2 = "@user2:server" + sync_config = self._generate_sync_config(user_id1) + + self.hs.config.limit_usage_by_mau = True + self.hs.config.max_mau_value = 1 + + # Check that the happy case does not throw errors + yield self.store.upsert_monthly_active_user(user_id1) + yield self.sync_handler.wait_for_sync_for_user(sync_config) + + # Test that global lock works + self.hs.config.hs_disabled = True + with self.assertRaises(AuthError) as e: + yield self.sync_handler.wait_for_sync_for_user(sync_config) + self.assertEquals(e.exception.errcode, Codes.HS_DISABLED) + + self.hs.config.hs_disabled = False + + sync_config = self._generate_sync_config(user_id2) + + with self.assertRaises(AuthError) as e: + yield self.sync_handler.wait_for_sync_for_user(sync_config) + self.assertEquals(e.exception.errcode, Codes.MAU_LIMIT_EXCEEDED) + + def _generate_sync_config(self, user_id): + return SyncConfig( + user=UserID(user_id.split(":")[0][1:], user_id.split(":")[1]), filter_collection=DEFAULT_FILTER_COLLECTION, is_guest=False, request_key="request_key", device_id="device_id", ) - # Ensure that an exception is not thrown - yield self.sync_handler.wait_for_sync_for_user(sync_config) - self.hs.config.hs_disabled = True - - with self.assertRaises(AuthError): - yield self.sync_handler.wait_for_sync_for_user(sync_config) From 04df7142598b531e8e400611e3f92b21afeabab6 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 9 Aug 2018 17:41:52 +0100 Subject: [PATCH 084/173] fix imports --- tests/handlers/test_sync.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index b95a8743a71c..cfd37f3138d3 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. from twisted.internet import defer -from synapse.api.errors import AuthError, Codes +from synapse.api.errors import AuthError, Codes from synapse.api.filtering import DEFAULT_FILTER_COLLECTION from synapse.handlers.sync import SyncConfig, SyncHandler from synapse.types import UserID From c1f9dec92ac8abaa693cc591438087d8282c6844 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 9 Aug 2018 17:43:26 +0100 Subject: [PATCH 085/173] fix errant parenthesis --- synapse/api/auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 170039fc8262..df6022ff6957 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -797,4 +797,4 @@ def check_auth_blocking(self, user_id=None): if current_mau >= self.hs.config.max_mau_value: raise AuthError( 403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED - ) + ) From 885ea9c602ca4002385a6d73c94c35e0958de809 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 9 Aug 2018 18:02:12 +0100 Subject: [PATCH 086/173] rename _user_last_seen_monthly_active --- synapse/api/auth.py | 2 +- synapse/storage/monthly_active_users.py | 10 +++++----- tests/storage/test_client_ips.py | 12 ++++++------ tests/storage/test_monthly_active_users.py | 13 +++++++------ 4 files changed, 19 insertions(+), 18 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index df6022ff6957..c31c6a6a0867 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -789,7 +789,7 @@ def check_auth_blocking(self, user_id=None): if self.hs.config.limit_usage_by_mau is True: # If the user is already part of the MAU cohort if user_id: - timestamp = yield self.store._user_last_seen_monthly_active(user_id) + timestamp = yield self.store.user_last_seen_monthly_active(user_id) if timestamp: return # Else if there is no room in the MAU bucket, bail diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index d47dcef3a0a5..07211432af79 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -113,7 +113,7 @@ def _reap_users(txn): # is racy. # Have resolved to invalidate the whole cache for now and do # something about it if and when the perf becomes significant - self._user_last_seen_monthly_active.invalidate_all() + self.user_last_seen_monthly_active.invalidate_all() self.get_monthly_active_count.invalidate_all() @cached(num_args=0) @@ -152,11 +152,11 @@ def upsert_monthly_active_user(self, user_id): lock=False, ) if is_insert: - self._user_last_seen_monthly_active.invalidate((user_id,)) + self.user_last_seen_monthly_active.invalidate((user_id,)) self.get_monthly_active_count.invalidate(()) @cached(num_args=1) - def _user_last_seen_monthly_active(self, user_id): + def user_last_seen_monthly_active(self, user_id): """ Checks if a given user is part of the monthly active user group Arguments: @@ -173,7 +173,7 @@ def _user_last_seen_monthly_active(self, user_id): }, retcol="timestamp", allow_none=True, - desc="_user_last_seen_monthly_active", + desc="user_last_seen_monthly_active", )) @defer.inlineCallbacks @@ -185,7 +185,7 @@ def populate_monthly_active_users(self, user_id): user_id(str): the user_id to query """ if self.hs.config.limit_usage_by_mau: - last_seen_timestamp = yield self._user_last_seen_monthly_active(user_id) + last_seen_timestamp = yield self.user_last_seen_monthly_active(user_id) now = self.hs.get_clock().time_msec() # We want to reduce to the total number of db writes, and are happy diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index 7a58c6eb24e5..6d2bb3058ea2 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -64,7 +64,7 @@ def test_disabled_monthly_active_user(self): yield self.store.insert_client_ip( user_id, "access_token", "ip", "user_agent", "device_id", ) - active = yield self.store._user_last_seen_monthly_active(user_id) + active = yield self.store.user_last_seen_monthly_active(user_id) self.assertFalse(active) @defer.inlineCallbacks @@ -80,7 +80,7 @@ def test_adding_monthly_active_user_when_full(self): yield self.store.insert_client_ip( user_id, "access_token", "ip", "user_agent", "device_id", ) - active = yield self.store._user_last_seen_monthly_active(user_id) + active = yield self.store.user_last_seen_monthly_active(user_id) self.assertFalse(active) @defer.inlineCallbacks @@ -88,13 +88,13 @@ def test_adding_monthly_active_user_when_space(self): self.hs.config.limit_usage_by_mau = True self.hs.config.max_mau_value = 50 user_id = "@user:server" - active = yield self.store._user_last_seen_monthly_active(user_id) + active = yield self.store.user_last_seen_monthly_active(user_id) self.assertFalse(active) yield self.store.insert_client_ip( user_id, "access_token", "ip", "user_agent", "device_id", ) - active = yield self.store._user_last_seen_monthly_active(user_id) + active = yield self.store.user_last_seen_monthly_active(user_id) self.assertTrue(active) @defer.inlineCallbacks @@ -103,7 +103,7 @@ def test_updating_monthly_active_user_when_space(self): self.hs.config.max_mau_value = 50 user_id = "@user:server" - active = yield self.store._user_last_seen_monthly_active(user_id) + active = yield self.store.user_last_seen_monthly_active(user_id) self.assertFalse(active) yield self.store.insert_client_ip( @@ -112,5 +112,5 @@ def test_updating_monthly_active_user_when_space(self): yield self.store.insert_client_ip( user_id, "access_token", "ip", "user_agent", "device_id", ) - active = yield self.store._user_last_seen_monthly_active(user_id) + active = yield self.store.user_last_seen_monthly_active(user_id) self.assertTrue(active) diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index cbd480cd4215..be74c3021812 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -66,9 +66,9 @@ def test_initialise_reserved_users(self): # Test user is marked as active - timestamp = yield self.store._user_last_seen_monthly_active(user1) + timestamp = yield self.store.user_last_seen_monthly_active(user1) self.assertTrue(timestamp) - timestamp = yield self.store._user_last_seen_monthly_active(user2) + timestamp = yield self.store.user_last_seen_monthly_active(user2) self.assertTrue(timestamp) # Test that users are never removed from the db. @@ -92,17 +92,18 @@ def test_can_insert_and_count_mau(self): self.assertEqual(1, count) @defer.inlineCallbacks - def test__user_last_seen_monthly_active(self): + def test_user_last_seen_monthly_active(self): user_id1 = "@user1:server" user_id2 = "@user2:server" user_id3 = "@user3:server" - result = yield self.store._user_last_seen_monthly_active(user_id1) + + result = yield self.store.user_last_seen_monthly_active(user_id1) self.assertFalse(result == 0) yield self.store.upsert_monthly_active_user(user_id1) yield self.store.upsert_monthly_active_user(user_id2) - result = yield self.store._user_last_seen_monthly_active(user_id1) + result = yield self.store.user_last_seen_monthly_active(user_id1) self.assertTrue(result > 0) - result = yield self.store._user_last_seen_monthly_active(user_id3) + result = yield self.store.user_last_seen_monthly_active(user_id3) self.assertFalse(result == 0) @defer.inlineCallbacks From 64899341dc8988b40b86dd49034aae27ddcb8c44 Mon Sep 17 00:00:00 2001 From: Jeroen Date: Thu, 9 Aug 2018 21:04:22 +0200 Subject: [PATCH 087/173] include private functions from twisted --- synapse/crypto/context_factory.py | 37 +++++++++++++++++++++++++++++-- 1 file changed, 35 insertions(+), 2 deletions(-) diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 29a75e187374..08c41a92b672 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -16,10 +16,10 @@ from zope.interface import implementer from OpenSSL import SSL, crypto -from twisted.internet._idna import _idnaBytes -from twisted.internet._sslverify import _defaultCurveName, _tolerateErrors +from twisted.internet._sslverify import _defaultCurveName from twisted.internet.interfaces import IOpenSSLClientConnectionCreator from twisted.internet.ssl import CertificateOptions, ContextFactory +from twisted.python.failure import Failure logger = logging.getLogger(__name__) @@ -53,6 +53,39 @@ def getContext(self): return self._context +def _idnaBytes(text): + """ + Convert some text typed by a human into some ASCII bytes. This is a + copy of twisted.internet._idna._idnaBytes. For documentation, see the + twisted documentation. + """ + try: + import idna + except ImportError: + return text.encode("idna") + else: + return idna.encode(text) + + +def _tolerateErrors(wrapped): + """ + Wrap up an info_callback for pyOpenSSL so that if something goes wrong + the error is immediately logged and the connection is dropped if possible. + This is a copy of twisted.internet._sslverify._tolerateErrors. For + documentation, see the twisted documentation. + """ + + def infoCallback(connection, where, ret): + try: + return wrapped(connection, where, ret) + except: # noqa: E722, taken from the twisted implementation + f = Failure() + logger.exception("Error during info_callback") + connection.get_app_data().failVerification(f) + + return infoCallback + + @implementer(IOpenSSLClientConnectionCreator) class ClientTLSOptions(object): """ From 2e9c73e8cab5af901957f5809f3be415d6fc0908 Mon Sep 17 00:00:00 2001 From: Jeroen Date: Thu, 9 Aug 2018 21:31:26 +0200 Subject: [PATCH 088/173] more generic conversion of str/bytes to unicode --- synapse/crypto/context_factory.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/crypto/context_factory.py b/synapse/crypto/context_factory.py index 08c41a92b672..1a391adec198 100644 --- a/synapse/crypto/context_factory.py +++ b/synapse/crypto/context_factory.py @@ -123,6 +123,6 @@ def __init__(self, config): def get_options(self, host): return ClientTLSOptions( - unicode(host), + host.decode('utf-8'), CertificateOptions(verify=False).getContext() ) From 638d35ef082cb7f24d43fcb585c8bae52645bd6b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 10 Aug 2018 10:59:09 +0100 Subject: [PATCH 089/173] Fix linearizer cancellation on twisted < 18.7 Turns out that cancellation of inlineDeferreds didn't really work properly until Twisted 18.7. This commit refactors Linearizer.queue to avoid inlineCallbacks. --- synapse/util/async.py | 111 ++++++++++++++++++++++++++---------------- 1 file changed, 68 insertions(+), 43 deletions(-) diff --git a/synapse/util/async.py b/synapse/util/async.py index a7094e2fb463..9b3f2f4b96df 100644 --- a/synapse/util/async.py +++ b/synapse/util/async.py @@ -188,62 +188,30 @@ def __init__(self, name=None, max_count=1, clock=None): # things blocked from executing. self.key_to_defer = {} - @defer.inlineCallbacks def queue(self, key): + # we avoid doing defer.inlineCallbacks here, so that cancellation works correctly. + # (https://twistedmatrix.com/trac/ticket/4632 meant that cancellations were not + # propagated inside inlineCallbacks until Twisted 18.7) entry = self.key_to_defer.setdefault(key, [0, collections.OrderedDict()]) # If the number of things executing is greater than the maximum # then add a deferred to the list of blocked items - # When on of the things currently executing finishes it will callback + # When one of the things currently executing finishes it will callback # this item so that it can continue executing. if entry[0] >= self.max_count: - new_defer = defer.Deferred() - entry[1][new_defer] = 1 - - logger.info( - "Waiting to acquire linearizer lock %r for key %r", self.name, key, - ) - try: - yield make_deferred_yieldable(new_defer) - except Exception as e: - if isinstance(e, CancelledError): - logger.info( - "Cancelling wait for linearizer lock %r for key %r", - self.name, key, - ) - else: - logger.warn( - "Unexpected exception waiting for linearizer lock %r for key %r", - self.name, key, - ) - - # we just have to take ourselves back out of the queue. - del entry[1][new_defer] - raise - - logger.info("Acquired linearizer lock %r for key %r", self.name, key) - entry[0] += 1 - - # if the code holding the lock completes synchronously, then it - # will recursively run the next claimant on the list. That can - # relatively rapidly lead to stack exhaustion. This is essentially - # the same problem as http://twistedmatrix.com/trac/ticket/9304. - # - # In order to break the cycle, we add a cheeky sleep(0) here to - # ensure that we fall back to the reactor between each iteration. - # - # (This needs to happen while we hold the lock, and the context manager's exit - # code must be synchronous, so this is the only sensible place.) - yield self._clock.sleep(0) - + res = self._await_lock(key) else: logger.info( "Acquired uncontended linearizer lock %r for key %r", self.name, key, ) entry[0] += 1 + res = defer.succeed(None) + + # once we successfully get the lock, we need to return a context manager which + # will release the lock. @contextmanager - def _ctx_manager(): + def _ctx_manager(_): try: yield finally: @@ -264,7 +232,64 @@ def _ctx_manager(): # map. del self.key_to_defer[key] - defer.returnValue(_ctx_manager()) + res.addCallback(_ctx_manager) + return res + + def _await_lock(self, key): + """Helper for queue: adds a deferred to the queue + + Assumes that we've already checked that we've reached the limit of the number + of lock-holders we allow. Creates a new deferred which is added to the list, and + adds some management around cancellations. + + Returns the deferred, which will callback once we have secured the lock. + + """ + entry = self.key_to_defer[key] + + logger.info( + "Waiting to acquire linearizer lock %r for key %r", self.name, key, + ) + + new_defer = make_deferred_yieldable(defer.Deferred()) + entry[1][new_defer] = 1 + + def cb(_r): + logger.info("Acquired linearizer lock %r for key %r", self.name, key) + entry[0] += 1 + + # if the code holding the lock completes synchronously, then it + # will recursively run the next claimant on the list. That can + # relatively rapidly lead to stack exhaustion. This is essentially + # the same problem as http://twistedmatrix.com/trac/ticket/9304. + # + # In order to break the cycle, we add a cheeky sleep(0) here to + # ensure that we fall back to the reactor between each iteration. + # + # (This needs to happen while we hold the lock, and the context manager's exit + # code must be synchronous, so this is the only sensible place.) + return self._clock.sleep(0) + + def eb(e): + logger.info("defer %r got err %r", new_defer, e) + if isinstance(e, CancelledError): + logger.info( + "Cancelling wait for linearizer lock %r for key %r", + self.name, key, + ) + + else: + logger.warn( + "Unexpected exception waiting for linearizer lock %r for key %r", + self.name, key, + ) + + # we just have to take ourselves back out of the queue. + del entry[1][new_defer] + return e + + new_defer.addCallbacks(cb, eb) + return new_defer class ReadWriteLock(object): From 178ab76ac0e804ad519800f270e121b241139246 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 10 Aug 2018 11:05:23 +0100 Subject: [PATCH 090/173] changelog --- changelog.d/3676.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3676.bugfix diff --git a/changelog.d/3676.bugfix b/changelog.d/3676.bugfix new file mode 100644 index 000000000000..7b23a2773a55 --- /dev/null +++ b/changelog.d/3676.bugfix @@ -0,0 +1 @@ +Make the tests pass on Twisted < 18.7.0 From b37c4724191995eec4f5bc9d64f176b2a315f777 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Fri, 10 Aug 2018 23:50:21 +1000 Subject: [PATCH 091/173] Rename async to async_helpers because `async` is a keyword on Python 3.7 (#3678) --- changelog.d/3678.misc | 1 + synapse/app/federation_sender.py | 2 +- synapse/federation/federation_server.py | 8 ++++---- synapse/handlers/device.py | 2 +- synapse/handlers/federation.py | 2 +- synapse/handlers/initial_sync.py | 2 +- synapse/handlers/message.py | 2 +- synapse/handlers/pagination.py | 2 +- synapse/handlers/presence.py | 2 +- synapse/handlers/read_marker.py | 2 +- synapse/handlers/register.py | 2 +- synapse/handlers/room_list.py | 2 +- synapse/handlers/room_member.py | 2 +- synapse/handlers/sync.py | 2 +- synapse/http/client.py | 2 +- synapse/http/matrixfederationclient.py | 2 +- synapse/notifier.py | 2 +- synapse/push/bulk_push_rule_evaluator.py | 2 +- synapse/push/mailer.py | 2 +- synapse/rest/client/transactions.py | 2 +- synapse/rest/media/v1/media_repository.py | 2 +- synapse/rest/media/v1/preview_url_resource.py | 2 +- synapse/state.py | 2 +- synapse/storage/events.py | 2 +- synapse/storage/roommember.py | 2 +- synapse/util/{async.py => async_helpers.py} | 0 synapse/util/caches/descriptors.py | 2 +- synapse/util/caches/response_cache.py | 2 +- synapse/util/caches/snapshot_cache.py | 2 +- synapse/util/logcontext.py | 2 +- tests/storage/test__base.py | 2 +- tests/util/test_linearizer.py | 2 +- tests/util/test_rwlock.py | 2 +- 33 files changed, 35 insertions(+), 34 deletions(-) create mode 100644 changelog.d/3678.misc rename synapse/util/{async.py => async_helpers.py} (100%) diff --git a/changelog.d/3678.misc b/changelog.d/3678.misc new file mode 100644 index 000000000000..0d7c8da64aa7 --- /dev/null +++ b/changelog.d/3678.misc @@ -0,0 +1 @@ +Rename synapse.util.async to synapse.util.async_helpers to mitigate async becoming a keyword on Python 3.7. diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py index e082d45c94e9..7bbf0ad0826c 100644 --- a/synapse/app/federation_sender.py +++ b/synapse/app/federation_sender.py @@ -40,7 +40,7 @@ from synapse.replication.tcp.client import ReplicationClientHandler from synapse.server import HomeServer from synapse.storage.engines import create_engine -from synapse.util.async import Linearizer +from synapse.util.async_helpers import Linearizer from synapse.util.httpresourcetree import create_resource_tree from synapse.util.logcontext import LoggingContext, run_in_background from synapse.util.manhole import manhole diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 2b62f687b683..a23136784a39 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -40,7 +40,7 @@ from synapse.federation.units import Edu, Transaction from synapse.http.endpoint import parse_server_name from synapse.types import get_domain_from_id -from synapse.util import async +from synapse.util.async_helpers import Linearizer, concurrently_execute from synapse.util.caches.response_cache import ResponseCache from synapse.util.logutils import log_function @@ -67,8 +67,8 @@ def __init__(self, hs): self.auth = hs.get_auth() self.handler = hs.get_handlers().federation_handler - self._server_linearizer = async.Linearizer("fed_server") - self._transaction_linearizer = async.Linearizer("fed_txn_handler") + self._server_linearizer = Linearizer("fed_server") + self._transaction_linearizer = Linearizer("fed_txn_handler") self.transaction_actions = TransactionActions(self.store) @@ -200,7 +200,7 @@ def process_pdus_for_room(room_id): event_id, f.getTraceback().rstrip(), ) - yield async.concurrently_execute( + yield concurrently_execute( process_pdus_for_room, pdus_by_room.keys(), TRANSACTION_CONCURRENCY_LIMIT, ) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 2d44f15da306..9e017116a904 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -23,7 +23,7 @@ from synapse.api.errors import FederationDeniedError from synapse.types import RoomStreamToken, get_domain_from_id from synapse.util import stringutils -from synapse.util.async import Linearizer +from synapse.util.async_helpers import Linearizer from synapse.util.caches.expiringcache import ExpiringCache from synapse.util.metrics import measure_func from synapse.util.retryutils import NotRetryingDestination diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 0dffd44e22e0..2380d17f4e99 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -52,7 +52,7 @@ from synapse.state import resolve_events_with_factory from synapse.types import UserID, get_domain_from_id from synapse.util import logcontext, unwrapFirstError -from synapse.util.async import Linearizer +from synapse.util.async_helpers import Linearizer from synapse.util.distributor import user_joined_room from synapse.util.frozenutils import unfreeze from synapse.util.logutils import log_function diff --git a/synapse/handlers/initial_sync.py b/synapse/handlers/initial_sync.py index 40e7580a61dd..1fb17fd9a5b1 100644 --- a/synapse/handlers/initial_sync.py +++ b/synapse/handlers/initial_sync.py @@ -25,7 +25,7 @@ from synapse.streams.config import PaginationConfig from synapse.types import StreamToken, UserID from synapse.util import unwrapFirstError -from synapse.util.async import concurrently_execute +from synapse.util.async_helpers import concurrently_execute from synapse.util.caches.snapshot_cache import SnapshotCache from synapse.util.logcontext import make_deferred_yieldable, run_in_background from synapse.visibility import filter_events_for_client diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index bcb093ba3ea7..01a362360e92 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -32,7 +32,7 @@ from synapse.events.validator import EventValidator from synapse.replication.http.send_event import ReplicationSendEventRestServlet from synapse.types import RoomAlias, UserID -from synapse.util.async import Linearizer +from synapse.util.async_helpers import Linearizer from synapse.util.frozenutils import frozendict_json_encoder from synapse.util.logcontext import run_in_background from synapse.util.metrics import measure_func diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index b2849783ed1b..a97d43550f6c 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -22,7 +22,7 @@ from synapse.api.errors import SynapseError from synapse.events.utils import serialize_event from synapse.types import RoomStreamToken -from synapse.util.async import ReadWriteLock +from synapse.util.async_helpers import ReadWriteLock from synapse.util.logcontext import run_in_background from synapse.util.stringutils import random_string from synapse.visibility import filter_events_for_client diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 3732830194ee..20fc3b03230d 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -36,7 +36,7 @@ from synapse.metrics import LaterGauge from synapse.storage.presence import UserPresenceState from synapse.types import UserID, get_domain_from_id -from synapse.util.async import Linearizer +from synapse.util.async_helpers import Linearizer from synapse.util.caches.descriptors import cachedInlineCallbacks from synapse.util.logcontext import run_in_background from synapse.util.logutils import log_function diff --git a/synapse/handlers/read_marker.py b/synapse/handlers/read_marker.py index 995460f82a30..32108568c640 100644 --- a/synapse/handlers/read_marker.py +++ b/synapse/handlers/read_marker.py @@ -17,7 +17,7 @@ from twisted.internet import defer -from synapse.util.async import Linearizer +from synapse.util.async_helpers import Linearizer from ._base import BaseHandler diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 0e16bbe0ee7a..3526b20d5a57 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -28,7 +28,7 @@ ) from synapse.http.client import CaptchaServerHttpClient from synapse.types import RoomAlias, RoomID, UserID, create_requester -from synapse.util.async import Linearizer +from synapse.util.async_helpers import Linearizer from synapse.util.threepids import check_3pid_allowed from ._base import BaseHandler diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 828229f5c3e3..37e41afd61db 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -26,7 +26,7 @@ from synapse.api.constants import EventTypes, JoinRules from synapse.types import ThirdPartyInstanceID -from synapse.util.async import concurrently_execute +from synapse.util.async_helpers import concurrently_execute from synapse.util.caches.descriptors import cachedInlineCallbacks from synapse.util.caches.response_cache import ResponseCache diff --git a/synapse/handlers/room_member.py b/synapse/handlers/room_member.py index 0d4a3f4677e5..fb94b5d7d4a1 100644 --- a/synapse/handlers/room_member.py +++ b/synapse/handlers/room_member.py @@ -30,7 +30,7 @@ from synapse.api.constants import EventTypes, Membership from synapse.api.errors import AuthError, Codes, SynapseError from synapse.types import RoomID, UserID -from synapse.util.async import Linearizer +from synapse.util.async_helpers import Linearizer from synapse.util.distributor import user_joined_room, user_left_room logger = logging.getLogger(__name__) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index dff1f67dcb45..6393a9674b72 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -25,7 +25,7 @@ from synapse.api.constants import EventTypes, Membership from synapse.push.clientformat import format_push_rules_for_user from synapse.types import RoomStreamToken -from synapse.util.async import concurrently_execute +from synapse.util.async_helpers import concurrently_execute from synapse.util.caches.expiringcache import ExpiringCache from synapse.util.caches.lrucache import LruCache from synapse.util.caches.response_cache import ResponseCache diff --git a/synapse/http/client.py b/synapse/http/client.py index 3771e0b3f627..ab4fbf59b28d 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -42,7 +42,7 @@ from synapse.api.errors import Codes, HttpResponseException, SynapseError from synapse.http import cancelled_to_request_timed_out_error, redact_uri from synapse.http.endpoint import SpiderEndpoint -from synapse.util.async import add_timeout_to_deferred +from synapse.util.async_helpers import add_timeout_to_deferred from synapse.util.caches import CACHE_SIZE_FACTOR from synapse.util.logcontext import make_deferred_yieldable diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 762273f59b4b..44b61e70a49c 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -43,7 +43,7 @@ from synapse.http import cancelled_to_request_timed_out_error from synapse.http.endpoint import matrix_federation_endpoint from synapse.util import logcontext -from synapse.util.async import add_timeout_to_deferred +from synapse.util.async_helpers import add_timeout_to_deferred from synapse.util.logcontext import make_deferred_yieldable logger = logging.getLogger(__name__) diff --git a/synapse/notifier.py b/synapse/notifier.py index e650c3e4941a..82f391481c2c 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -25,7 +25,7 @@ from synapse.handlers.presence import format_user_presence_state from synapse.metrics import LaterGauge from synapse.types import StreamToken -from synapse.util.async import ( +from synapse.util.async_helpers import ( DeferredTimeoutError, ObservableDeferred, add_timeout_to_deferred, diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index 1d14d3639c1d..8f9a76147f97 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -26,7 +26,7 @@ from synapse.api.constants import EventTypes, Membership from synapse.event_auth import get_user_power_level from synapse.state import POWER_KEY -from synapse.util.async import Linearizer +from synapse.util.async_helpers import Linearizer from synapse.util.caches import register_cache from synapse.util.caches.descriptors import cached diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index 9d601208fd62..bfa6df7b68b8 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -35,7 +35,7 @@ name_from_member_event, ) from synapse.types import UserID -from synapse.util.async import concurrently_execute +from synapse.util.async_helpers import concurrently_execute from synapse.visibility import filter_events_for_client logger = logging.getLogger(__name__) diff --git a/synapse/rest/client/transactions.py b/synapse/rest/client/transactions.py index 00b1b3066e5d..511e96ab00d9 100644 --- a/synapse/rest/client/transactions.py +++ b/synapse/rest/client/transactions.py @@ -17,7 +17,7 @@ to ensure idempotency when performing PUTs using the REST API.""" import logging -from synapse.util.async import ObservableDeferred +from synapse.util.async_helpers import ObservableDeferred from synapse.util.logcontext import make_deferred_yieldable, run_in_background logger = logging.getLogger(__name__) diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 8fb413d8253b..4c589e05e07d 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -36,7 +36,7 @@ ) from synapse.http.matrixfederationclient import MatrixFederationHttpClient from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.util.async import Linearizer +from synapse.util.async_helpers import Linearizer from synapse.util.logcontext import make_deferred_yieldable from synapse.util.retryutils import NotRetryingDestination from synapse.util.stringutils import is_ascii, random_string diff --git a/synapse/rest/media/v1/preview_url_resource.py b/synapse/rest/media/v1/preview_url_resource.py index 27aa0def2f02..778ef9733741 100644 --- a/synapse/rest/media/v1/preview_url_resource.py +++ b/synapse/rest/media/v1/preview_url_resource.py @@ -42,7 +42,7 @@ ) from synapse.http.servlet import parse_integer, parse_string from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.util.async import ObservableDeferred +from synapse.util.async_helpers import ObservableDeferred from synapse.util.caches.expiringcache import ExpiringCache from synapse.util.logcontext import make_deferred_yieldable, run_in_background from synapse.util.stringutils import is_ascii, random_string diff --git a/synapse/state.py b/synapse/state.py index e1092b97a9b4..8b92d4057a2a 100644 --- a/synapse/state.py +++ b/synapse/state.py @@ -28,7 +28,7 @@ from synapse.api.constants import EventTypes from synapse.api.errors import AuthError from synapse.events.snapshot import EventContext -from synapse.util.async import Linearizer +from synapse.util.async_helpers import Linearizer from synapse.util.caches import CACHE_SIZE_FACTOR from synapse.util.caches.expiringcache import ExpiringCache from synapse.util.logutils import log_function diff --git a/synapse/storage/events.py b/synapse/storage/events.py index ce32e8fefd23..d4aa192a0a49 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -38,7 +38,7 @@ from synapse.storage.event_federation import EventFederationStore from synapse.storage.events_worker import EventsWorkerStore from synapse.types import RoomStreamToken, get_domain_from_id -from synapse.util.async import ObservableDeferred +from synapse.util.async_helpers import ObservableDeferred from synapse.util.caches.descriptors import cached, cachedInlineCallbacks from synapse.util.frozenutils import frozendict_json_encoder from synapse.util.logcontext import PreserveLoggingContext, make_deferred_yieldable diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index 10dce21cea19..9b4e6d6aa820 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -26,7 +26,7 @@ from synapse.api.constants import EventTypes, Membership from synapse.storage.events_worker import EventsWorkerStore from synapse.types import get_domain_from_id -from synapse.util.async import Linearizer +from synapse.util.async_helpers import Linearizer from synapse.util.caches import intern_string from synapse.util.caches.descriptors import cached, cachedInlineCallbacks from synapse.util.stringutils import to_ascii diff --git a/synapse/util/async.py b/synapse/util/async_helpers.py similarity index 100% rename from synapse/util/async.py rename to synapse/util/async_helpers.py diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index 861c24809c31..187510576a36 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -25,7 +25,7 @@ from twisted.internet import defer from synapse.util import logcontext, unwrapFirstError -from synapse.util.async import ObservableDeferred +from synapse.util.async_helpers import ObservableDeferred from synapse.util.caches import get_cache_factor_for from synapse.util.caches.lrucache import LruCache from synapse.util.caches.treecache import TreeCache, iterate_tree_cache_entry diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py index a8491b42d57b..afb03b2e1b7c 100644 --- a/synapse/util/caches/response_cache.py +++ b/synapse/util/caches/response_cache.py @@ -16,7 +16,7 @@ from twisted.internet import defer -from synapse.util.async import ObservableDeferred +from synapse.util.async_helpers import ObservableDeferred from synapse.util.caches import register_cache from synapse.util.logcontext import make_deferred_yieldable, run_in_background diff --git a/synapse/util/caches/snapshot_cache.py b/synapse/util/caches/snapshot_cache.py index d03678b8c886..8318db8d2c76 100644 --- a/synapse/util/caches/snapshot_cache.py +++ b/synapse/util/caches/snapshot_cache.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.util.async import ObservableDeferred +from synapse.util.async_helpers import ObservableDeferred class SnapshotCache(object): diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py index 8dcae50b3936..07e83fadda0c 100644 --- a/synapse/util/logcontext.py +++ b/synapse/util/logcontext.py @@ -526,7 +526,7 @@ def _set_context_cb(result, context): "synapse.util.logcontext", "synapse.http.server", "synapse.storage._base", - "synapse.util.async", + "synapse.util.async_helpers", ] diff --git a/tests/storage/test__base.py b/tests/storage/test__base.py index 6d6f00c5c5cc..52eff2a10494 100644 --- a/tests/storage/test__base.py +++ b/tests/storage/test__base.py @@ -18,7 +18,7 @@ from twisted.internet import defer -from synapse.util.async import ObservableDeferred +from synapse.util.async_helpers import ObservableDeferred from synapse.util.caches.descriptors import Cache, cached from tests import unittest diff --git a/tests/util/test_linearizer.py b/tests/util/test_linearizer.py index 4729bd5a0a15..7f48a72de88f 100644 --- a/tests/util/test_linearizer.py +++ b/tests/util/test_linearizer.py @@ -20,7 +20,7 @@ from twisted.internet.defer import CancelledError from synapse.util import Clock, logcontext -from synapse.util.async import Linearizer +from synapse.util.async_helpers import Linearizer from tests import unittest diff --git a/tests/util/test_rwlock.py b/tests/util/test_rwlock.py index 24194e3b2599..7cd470be6743 100644 --- a/tests/util/test_rwlock.py +++ b/tests/util/test_rwlock.py @@ -14,7 +14,7 @@ # limitations under the License. -from synapse.util.async import ReadWriteLock +from synapse.util.async_helpers import ReadWriteLock from tests import unittest From 8b3d9b6b199abb87246f982d5db356f1966db925 Mon Sep 17 00:00:00 2001 From: black Date: Fri, 10 Aug 2018 23:54:09 +1000 Subject: [PATCH 092/173] Run black. --- tests/api/test_auth.py | 76 ++-- tests/api/test_filtering.py | 361 +++++------------- tests/api/test_ratelimiting.py | 7 +- tests/appservice/test_appservice.py | 86 ++--- tests/appservice/test_scheduler.py | 28 +- tests/config/test_generate.py | 38 +- tests/config/test_load.py | 38 +- tests/crypto/test_event_signing.py | 19 +- tests/crypto/test_keyring.py | 46 +-- tests/events/test_utils.py | 135 ++----- tests/federation/test_federation_server.py | 26 +- tests/handlers/test_appservice.py | 39 +- tests/handlers/test_auth.py | 16 +- tests/handlers/test_device.py | 98 ++--- tests/handlers/test_directory.py | 22 +- tests/handlers/test_e2e_keys.py | 99 ++--- tests/handlers/test_presence.py | 216 +++++------ tests/handlers/test_profile.py | 25 +- tests/handlers/test_register.py | 12 +- tests/handlers/test_typing.py | 231 ++++++----- tests/http/test_endpoint.py | 6 +- tests/replication/slave/storage/_base.py | 5 +- .../slave/storage/test_account_data.py | 14 +- .../replication/slave/storage/test_events.py | 105 ++--- .../slave/storage/test_receipts.py | 6 +- tests/rest/client/test_transactions.py | 30 +- tests/rest/client/v1/test_admin.py | 5 +- tests/rest/client/v1/test_events.py | 33 +- tests/rest/client/v1/test_profile.py | 42 +- tests/rest/client/v1/test_register.py | 1 + tests/rest/client/v1/test_typing.py | 47 ++- tests/rest/client/v1/utils.py | 70 ++-- tests/rest/client/v2_alpha/test_register.py | 17 +- tests/rest/media/v1/test_media_storage.py | 6 +- tests/server.py | 2 + tests/storage/test__base.py | 5 +- tests/storage/test_appservice.py | 178 +++------ tests/storage/test_background_update.py | 17 +- tests/storage/test_base.py | 46 +-- tests/storage/test_client_ips.py | 15 +- tests/storage/test_devices.py | 69 ++-- tests/storage/test_directory.py | 22 +- tests/storage/test_end_to_end_keys.py | 61 +-- tests/storage/test_event_federation.py | 39 +- tests/storage/test_event_push_actions.py | 78 ++-- tests/storage/test_keys.py | 11 +- tests/storage/test_monthly_active_users.py | 12 +- tests/storage/test_presence.py | 69 ++-- tests/storage/test_profile.py | 18 +- tests/storage/test_redaction.py | 70 ++-- tests/storage/test_registration.py | 38 +- tests/storage/test_room.py | 47 +-- tests/storage/test_roommember.py | 31 +- tests/storage/test_state.py | 249 ++++++------ tests/storage/test_user_directory.py | 40 +- tests/test_distributor.py | 10 +- tests/test_dns.py | 22 +- tests/test_event_auth.py | 98 +++-- tests/test_preview.py | 55 +-- tests/test_state.py | 245 +++++------- tests/test_test_utils.py | 5 +- tests/test_types.py | 5 +- tests/test_visibility.py | 145 +++---- tests/unittest.py | 7 +- tests/util/caches/test_descriptors.py | 46 +-- tests/util/test_dict_cache.py | 19 +- tests/util/test_expiring_cache.py | 1 - tests/util/test_file_consumer.py | 5 +- tests/util/test_linearizer.py | 7 +- tests/util/test_logcontext.py | 14 +- tests/util/test_lrucache.py | 2 - tests/util/test_rwlock.py | 9 +- tests/util/test_snapshot_cache.py | 1 - tests/util/test_stream_change_cache.py | 13 +- tests/utils.py | 78 ++-- 75 files changed, 1629 insertions(+), 2280 deletions(-) diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index fbb96361a8bc..f8e28876bb52 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -34,7 +34,6 @@ def __init__(self, hs): class AuthTestCase(unittest.TestCase): - @defer.inlineCallbacks def setUp(self): self.state_handler = Mock() @@ -53,11 +52,7 @@ def setUp(self): @defer.inlineCallbacks def test_get_user_by_req_user_valid_token(self): - user_info = { - "name": self.test_user, - "token_id": "ditto", - "device_id": "device", - } + user_info = {"name": self.test_user, "token_id": "ditto", "device_id": "device"} self.store.get_user_by_access_token = Mock(return_value=user_info) request = Mock(args={}) @@ -76,10 +71,7 @@ def test_get_user_by_req_user_bad_token(self): self.failureResultOf(d, AuthError) def test_get_user_by_req_user_missing_token(self): - user_info = { - "name": self.test_user, - "token_id": "ditto", - } + user_info = {"name": self.test_user, "token_id": "ditto"} self.store.get_user_by_access_token = Mock(return_value=user_info) request = Mock(args={}) @@ -90,8 +82,7 @@ def test_get_user_by_req_user_missing_token(self): @defer.inlineCallbacks def test_get_user_by_req_appservice_valid_token(self): app_service = Mock( - token="foobar", url="a_url", sender=self.test_user, - ip_range_whitelist=None, + token="foobar", url="a_url", sender=self.test_user, ip_range_whitelist=None ) self.store.get_app_service_by_token = Mock(return_value=app_service) self.store.get_user_by_access_token = Mock(return_value=None) @@ -106,8 +97,11 @@ def test_get_user_by_req_appservice_valid_token(self): @defer.inlineCallbacks def test_get_user_by_req_appservice_valid_token_good_ip(self): from netaddr import IPSet + app_service = Mock( - token="foobar", url="a_url", sender=self.test_user, + token="foobar", + url="a_url", + sender=self.test_user, ip_range_whitelist=IPSet(["192.168/16"]), ) self.store.get_app_service_by_token = Mock(return_value=app_service) @@ -122,8 +116,11 @@ def test_get_user_by_req_appservice_valid_token_good_ip(self): def test_get_user_by_req_appservice_valid_token_bad_ip(self): from netaddr import IPSet + app_service = Mock( - token="foobar", url="a_url", sender=self.test_user, + token="foobar", + url="a_url", + sender=self.test_user, ip_range_whitelist=IPSet(["192.168/16"]), ) self.store.get_app_service_by_token = Mock(return_value=app_service) @@ -160,8 +157,7 @@ def test_get_user_by_req_appservice_missing_token(self): def test_get_user_by_req_appservice_valid_token_valid_user_id(self): masquerading_user_id = b"@doppelganger:matrix.org" app_service = Mock( - token="foobar", url="a_url", sender=self.test_user, - ip_range_whitelist=None, + token="foobar", url="a_url", sender=self.test_user, ip_range_whitelist=None ) app_service.is_interested_in_user = Mock(return_value=True) self.store.get_app_service_by_token = Mock(return_value=app_service) @@ -174,15 +170,13 @@ def test_get_user_by_req_appservice_valid_token_valid_user_id(self): request.requestHeaders.getRawHeaders = mock_getRawHeaders() requester = yield self.auth.get_user_by_req(request) self.assertEquals( - requester.user.to_string(), - masquerading_user_id.decode('utf8') + requester.user.to_string(), masquerading_user_id.decode('utf8') ) def test_get_user_by_req_appservice_valid_token_bad_user_id(self): masquerading_user_id = b"@doppelganger:matrix.org" app_service = Mock( - token="foobar", url="a_url", sender=self.test_user, - ip_range_whitelist=None, + token="foobar", url="a_url", sender=self.test_user, ip_range_whitelist=None ) app_service.is_interested_in_user = Mock(return_value=False) self.store.get_app_service_by_token = Mock(return_value=app_service) @@ -201,17 +195,15 @@ def test_get_user_from_macaroon(self): # TODO(danielwh): Remove this mock when we remove the # get_user_by_access_token fallback. self.store.get_user_by_access_token = Mock( - return_value={ - "name": "@baldrick:matrix.org", - "device_id": "device", - } + return_value={"name": "@baldrick:matrix.org", "device_id": "device"} ) user_id = "@baldrick:matrix.org" macaroon = pymacaroons.Macaroon( location=self.hs.config.server_name, identifier="key", - key=self.hs.config.macaroon_secret_key) + key=self.hs.config.macaroon_secret_key, + ) macaroon.add_first_party_caveat("gen = 1") macaroon.add_first_party_caveat("type = access") macaroon.add_first_party_caveat("user_id = %s" % (user_id,)) @@ -225,15 +217,14 @@ def test_get_user_from_macaroon(self): @defer.inlineCallbacks def test_get_guest_user_from_macaroon(self): - self.store.get_user_by_id = Mock(return_value={ - "is_guest": True, - }) + self.store.get_user_by_id = Mock(return_value={"is_guest": True}) user_id = "@baldrick:matrix.org" macaroon = pymacaroons.Macaroon( location=self.hs.config.server_name, identifier="key", - key=self.hs.config.macaroon_secret_key) + key=self.hs.config.macaroon_secret_key, + ) macaroon.add_first_party_caveat("gen = 1") macaroon.add_first_party_caveat("type = access") macaroon.add_first_party_caveat("user_id = %s" % (user_id,)) @@ -257,7 +248,8 @@ def test_get_user_from_macaroon_user_db_mismatch(self): macaroon = pymacaroons.Macaroon( location=self.hs.config.server_name, identifier="key", - key=self.hs.config.macaroon_secret_key) + key=self.hs.config.macaroon_secret_key, + ) macaroon.add_first_party_caveat("gen = 1") macaroon.add_first_party_caveat("type = access") macaroon.add_first_party_caveat("user_id = %s" % (user,)) @@ -277,7 +269,8 @@ def test_get_user_from_macaroon_missing_caveat(self): macaroon = pymacaroons.Macaroon( location=self.hs.config.server_name, identifier="key", - key=self.hs.config.macaroon_secret_key) + key=self.hs.config.macaroon_secret_key, + ) macaroon.add_first_party_caveat("gen = 1") macaroon.add_first_party_caveat("type = access") @@ -298,7 +291,8 @@ def test_get_user_from_macaroon_wrong_key(self): macaroon = pymacaroons.Macaroon( location=self.hs.config.server_name, identifier="key", - key=self.hs.config.macaroon_secret_key + "wrong") + key=self.hs.config.macaroon_secret_key + "wrong", + ) macaroon.add_first_party_caveat("gen = 1") macaroon.add_first_party_caveat("type = access") macaroon.add_first_party_caveat("user_id = %s" % (user,)) @@ -320,7 +314,8 @@ def test_get_user_from_macaroon_unknown_caveat(self): macaroon = pymacaroons.Macaroon( location=self.hs.config.server_name, identifier="key", - key=self.hs.config.macaroon_secret_key) + key=self.hs.config.macaroon_secret_key, + ) macaroon.add_first_party_caveat("gen = 1") macaroon.add_first_party_caveat("type = access") macaroon.add_first_party_caveat("user_id = %s" % (user,)) @@ -347,7 +342,8 @@ def test_get_user_from_macaroon_expired(self): macaroon = pymacaroons.Macaroon( location=self.hs.config.server_name, identifier="key", - key=self.hs.config.macaroon_secret_key) + key=self.hs.config.macaroon_secret_key, + ) macaroon.add_first_party_caveat("gen = 1") macaroon.add_first_party_caveat("type = access") macaroon.add_first_party_caveat("user_id = %s" % (user,)) @@ -380,7 +376,8 @@ def test_get_user_from_macaroon_with_valid_duration(self): macaroon = pymacaroons.Macaroon( location=self.hs.config.server_name, identifier="key", - key=self.hs.config.macaroon_secret_key) + key=self.hs.config.macaroon_secret_key, + ) macaroon.add_first_party_caveat("gen = 1") macaroon.add_first_party_caveat("type = access") macaroon.add_first_party_caveat("user_id = %s" % (user_id,)) @@ -401,9 +398,7 @@ def test_cannot_use_regular_token_as_guest(self): token = yield self.hs.handlers.auth_handler.issue_access_token( USER_ID, "DEVICE" ) - self.store.add_access_token_to_user.assert_called_with( - USER_ID, token, "DEVICE" - ) + self.store.add_access_token_to_user.assert_called_with(USER_ID, token, "DEVICE") def get_user(tok): if token != tok: @@ -414,10 +409,9 @@ def get_user(tok): "token_id": 1234, "device_id": "DEVICE", } + self.store.get_user_by_access_token = get_user - self.store.get_user_by_id = Mock(return_value={ - "is_guest": False, - }) + self.store.get_user_by_id = Mock(return_value={"is_guest": False}) # check the token works request = Mock(args={}) diff --git a/tests/api/test_filtering.py b/tests/api/test_filtering.py index 836a23fb54bf..1c2d71052cd0 100644 --- a/tests/api/test_filtering.py +++ b/tests/api/test_filtering.py @@ -38,7 +38,6 @@ def MockEvent(**kwargs): class FilteringTestCase(unittest.TestCase): - @defer.inlineCallbacks def setUp(self): self.mock_federation_resource = MockHttpResource() @@ -47,9 +46,7 @@ def setUp(self): self.mock_http_client.put_json = DeferredMockCallable() hs = yield setup_test_homeserver( - handlers=None, - http_client=self.mock_http_client, - keyring=Mock(), + handlers=None, http_client=self.mock_http_client, keyring=Mock() ) self.filtering = hs.get_filtering() @@ -64,7 +61,7 @@ def test_errors_on_invalid_filters(self): {"room": {"timeline": {"limit": 0}, "state": {"not_bars": ["*"]}}}, {"event_format": "other"}, {"room": {"not_rooms": ["#foo:pik-test"]}}, - {"presence": {"senders": ["@bar;pik.test.com"]}} + {"presence": {"senders": ["@bar;pik.test.com"]}}, ] for filter in invalid_filters: with self.assertRaises(SynapseError) as check_filter_error: @@ -81,34 +78,34 @@ def test_valid_filters(self): "include_leave": False, "rooms": ["!dee:pik-test"], "not_rooms": ["!gee:pik-test"], - "account_data": {"limit": 0, "types": ["*"]} + "account_data": {"limit": 0, "types": ["*"]}, } }, { "room": { "state": { "types": ["m.room.*"], - "not_rooms": ["!726s6s6q:example.com"] + "not_rooms": ["!726s6s6q:example.com"], }, "timeline": { "limit": 10, "types": ["m.room.message"], "not_rooms": ["!726s6s6q:example.com"], - "not_senders": ["@spam:example.com"] + "not_senders": ["@spam:example.com"], }, "ephemeral": { "types": ["m.receipt", "m.typing"], "not_rooms": ["!726s6s6q:example.com"], - "not_senders": ["@spam:example.com"] - } + "not_senders": ["@spam:example.com"], + }, }, "presence": { "types": ["m.presence"], - "not_senders": ["@alice:example.com"] + "not_senders": ["@alice:example.com"], }, "event_format": "client", - "event_fields": ["type", "content", "sender"] - } + "event_fields": ["type", "content", "sender"], + }, ] for filter in valid_filters: try: @@ -121,229 +118,131 @@ def test_limits_are_applied(self): pass def test_definition_types_works_with_literals(self): - definition = { - "types": ["m.room.message", "org.matrix.foo.bar"] - } - event = MockEvent( - sender="@foo:bar", - type="m.room.message", - room_id="!foo:bar" - ) + definition = {"types": ["m.room.message", "org.matrix.foo.bar"]} + event = MockEvent(sender="@foo:bar", type="m.room.message", room_id="!foo:bar") - self.assertTrue( - Filter(definition).check(event) - ) + self.assertTrue(Filter(definition).check(event)) def test_definition_types_works_with_wildcards(self): - definition = { - "types": ["m.*", "org.matrix.foo.bar"] - } - event = MockEvent( - sender="@foo:bar", - type="m.room.message", - room_id="!foo:bar" - ) - self.assertTrue( - Filter(definition).check(event) - ) + definition = {"types": ["m.*", "org.matrix.foo.bar"]} + event = MockEvent(sender="@foo:bar", type="m.room.message", room_id="!foo:bar") + self.assertTrue(Filter(definition).check(event)) def test_definition_types_works_with_unknowns(self): - definition = { - "types": ["m.room.message", "org.matrix.foo.bar"] - } + definition = {"types": ["m.room.message", "org.matrix.foo.bar"]} event = MockEvent( sender="@foo:bar", type="now.for.something.completely.different", - room_id="!foo:bar" - ) - self.assertFalse( - Filter(definition).check(event) + room_id="!foo:bar", ) + self.assertFalse(Filter(definition).check(event)) def test_definition_not_types_works_with_literals(self): - definition = { - "not_types": ["m.room.message", "org.matrix.foo.bar"] - } - event = MockEvent( - sender="@foo:bar", - type="m.room.message", - room_id="!foo:bar" - ) - self.assertFalse( - Filter(definition).check(event) - ) + definition = {"not_types": ["m.room.message", "org.matrix.foo.bar"]} + event = MockEvent(sender="@foo:bar", type="m.room.message", room_id="!foo:bar") + self.assertFalse(Filter(definition).check(event)) def test_definition_not_types_works_with_wildcards(self): - definition = { - "not_types": ["m.room.message", "org.matrix.*"] - } + definition = {"not_types": ["m.room.message", "org.matrix.*"]} event = MockEvent( - sender="@foo:bar", - type="org.matrix.custom.event", - room_id="!foo:bar" - ) - self.assertFalse( - Filter(definition).check(event) + sender="@foo:bar", type="org.matrix.custom.event", room_id="!foo:bar" ) + self.assertFalse(Filter(definition).check(event)) def test_definition_not_types_works_with_unknowns(self): - definition = { - "not_types": ["m.*", "org.*"] - } - event = MockEvent( - sender="@foo:bar", - type="com.nom.nom.nom", - room_id="!foo:bar" - ) - self.assertTrue( - Filter(definition).check(event) - ) + definition = {"not_types": ["m.*", "org.*"]} + event = MockEvent(sender="@foo:bar", type="com.nom.nom.nom", room_id="!foo:bar") + self.assertTrue(Filter(definition).check(event)) def test_definition_not_types_takes_priority_over_types(self): definition = { "not_types": ["m.*", "org.*"], - "types": ["m.room.message", "m.room.topic"] + "types": ["m.room.message", "m.room.topic"], } - event = MockEvent( - sender="@foo:bar", - type="m.room.topic", - room_id="!foo:bar" - ) - self.assertFalse( - Filter(definition).check(event) - ) + event = MockEvent(sender="@foo:bar", type="m.room.topic", room_id="!foo:bar") + self.assertFalse(Filter(definition).check(event)) def test_definition_senders_works_with_literals(self): - definition = { - "senders": ["@flibble:wibble"] - } + definition = {"senders": ["@flibble:wibble"]} event = MockEvent( - sender="@flibble:wibble", - type="com.nom.nom.nom", - room_id="!foo:bar" - ) - self.assertTrue( - Filter(definition).check(event) + sender="@flibble:wibble", type="com.nom.nom.nom", room_id="!foo:bar" ) + self.assertTrue(Filter(definition).check(event)) def test_definition_senders_works_with_unknowns(self): - definition = { - "senders": ["@flibble:wibble"] - } + definition = {"senders": ["@flibble:wibble"]} event = MockEvent( - sender="@challenger:appears", - type="com.nom.nom.nom", - room_id="!foo:bar" - ) - self.assertFalse( - Filter(definition).check(event) + sender="@challenger:appears", type="com.nom.nom.nom", room_id="!foo:bar" ) + self.assertFalse(Filter(definition).check(event)) def test_definition_not_senders_works_with_literals(self): - definition = { - "not_senders": ["@flibble:wibble"] - } + definition = {"not_senders": ["@flibble:wibble"]} event = MockEvent( - sender="@flibble:wibble", - type="com.nom.nom.nom", - room_id="!foo:bar" - ) - self.assertFalse( - Filter(definition).check(event) + sender="@flibble:wibble", type="com.nom.nom.nom", room_id="!foo:bar" ) + self.assertFalse(Filter(definition).check(event)) def test_definition_not_senders_works_with_unknowns(self): - definition = { - "not_senders": ["@flibble:wibble"] - } + definition = {"not_senders": ["@flibble:wibble"]} event = MockEvent( - sender="@challenger:appears", - type="com.nom.nom.nom", - room_id="!foo:bar" - ) - self.assertTrue( - Filter(definition).check(event) + sender="@challenger:appears", type="com.nom.nom.nom", room_id="!foo:bar" ) + self.assertTrue(Filter(definition).check(event)) def test_definition_not_senders_takes_priority_over_senders(self): definition = { "not_senders": ["@misspiggy:muppets"], - "senders": ["@kermit:muppets", "@misspiggy:muppets"] + "senders": ["@kermit:muppets", "@misspiggy:muppets"], } event = MockEvent( - sender="@misspiggy:muppets", - type="m.room.topic", - room_id="!foo:bar" - ) - self.assertFalse( - Filter(definition).check(event) + sender="@misspiggy:muppets", type="m.room.topic", room_id="!foo:bar" ) + self.assertFalse(Filter(definition).check(event)) def test_definition_rooms_works_with_literals(self): - definition = { - "rooms": ["!secretbase:unknown"] - } + definition = {"rooms": ["!secretbase:unknown"]} event = MockEvent( - sender="@foo:bar", - type="m.room.message", - room_id="!secretbase:unknown" - ) - self.assertTrue( - Filter(definition).check(event) + sender="@foo:bar", type="m.room.message", room_id="!secretbase:unknown" ) + self.assertTrue(Filter(definition).check(event)) def test_definition_rooms_works_with_unknowns(self): - definition = { - "rooms": ["!secretbase:unknown"] - } + definition = {"rooms": ["!secretbase:unknown"]} event = MockEvent( sender="@foo:bar", type="m.room.message", - room_id="!anothersecretbase:unknown" - ) - self.assertFalse( - Filter(definition).check(event) + room_id="!anothersecretbase:unknown", ) + self.assertFalse(Filter(definition).check(event)) def test_definition_not_rooms_works_with_literals(self): - definition = { - "not_rooms": ["!anothersecretbase:unknown"] - } + definition = {"not_rooms": ["!anothersecretbase:unknown"]} event = MockEvent( sender="@foo:bar", type="m.room.message", - room_id="!anothersecretbase:unknown" - ) - self.assertFalse( - Filter(definition).check(event) + room_id="!anothersecretbase:unknown", ) + self.assertFalse(Filter(definition).check(event)) def test_definition_not_rooms_works_with_unknowns(self): - definition = { - "not_rooms": ["!secretbase:unknown"] - } + definition = {"not_rooms": ["!secretbase:unknown"]} event = MockEvent( sender="@foo:bar", type="m.room.message", - room_id="!anothersecretbase:unknown" - ) - self.assertTrue( - Filter(definition).check(event) + room_id="!anothersecretbase:unknown", ) + self.assertTrue(Filter(definition).check(event)) def test_definition_not_rooms_takes_priority_over_rooms(self): definition = { "not_rooms": ["!secretbase:unknown"], - "rooms": ["!secretbase:unknown"] + "rooms": ["!secretbase:unknown"], } event = MockEvent( - sender="@foo:bar", - type="m.room.message", - room_id="!secretbase:unknown" - ) - self.assertFalse( - Filter(definition).check(event) + sender="@foo:bar", type="m.room.message", room_id="!secretbase:unknown" ) + self.assertFalse(Filter(definition).check(event)) def test_definition_combined_event(self): definition = { @@ -352,16 +251,14 @@ def test_definition_combined_event(self): "rooms": ["!stage:unknown"], "not_rooms": ["!piggyshouse:muppets"], "types": ["m.room.message", "muppets.kermit.*"], - "not_types": ["muppets.misspiggy.*"] + "not_types": ["muppets.misspiggy.*"], } event = MockEvent( sender="@kermit:muppets", # yup type="m.room.message", # yup - room_id="!stage:unknown" # yup - ) - self.assertTrue( - Filter(definition).check(event) + room_id="!stage:unknown", # yup ) + self.assertTrue(Filter(definition).check(event)) def test_definition_combined_event_bad_sender(self): definition = { @@ -370,16 +267,14 @@ def test_definition_combined_event_bad_sender(self): "rooms": ["!stage:unknown"], "not_rooms": ["!piggyshouse:muppets"], "types": ["m.room.message", "muppets.kermit.*"], - "not_types": ["muppets.misspiggy.*"] + "not_types": ["muppets.misspiggy.*"], } event = MockEvent( sender="@misspiggy:muppets", # nope type="m.room.message", # yup - room_id="!stage:unknown" # yup - ) - self.assertFalse( - Filter(definition).check(event) + room_id="!stage:unknown", # yup ) + self.assertFalse(Filter(definition).check(event)) def test_definition_combined_event_bad_room(self): definition = { @@ -388,16 +283,14 @@ def test_definition_combined_event_bad_room(self): "rooms": ["!stage:unknown"], "not_rooms": ["!piggyshouse:muppets"], "types": ["m.room.message", "muppets.kermit.*"], - "not_types": ["muppets.misspiggy.*"] + "not_types": ["muppets.misspiggy.*"], } event = MockEvent( sender="@kermit:muppets", # yup type="m.room.message", # yup - room_id="!piggyshouse:muppets" # nope - ) - self.assertFalse( - Filter(definition).check(event) + room_id="!piggyshouse:muppets", # nope ) + self.assertFalse(Filter(definition).check(event)) def test_definition_combined_event_bad_type(self): definition = { @@ -406,37 +299,26 @@ def test_definition_combined_event_bad_type(self): "rooms": ["!stage:unknown"], "not_rooms": ["!piggyshouse:muppets"], "types": ["m.room.message", "muppets.kermit.*"], - "not_types": ["muppets.misspiggy.*"] + "not_types": ["muppets.misspiggy.*"], } event = MockEvent( sender="@kermit:muppets", # yup type="muppets.misspiggy.kisses", # nope - room_id="!stage:unknown" # yup - ) - self.assertFalse( - Filter(definition).check(event) + room_id="!stage:unknown", # yup ) + self.assertFalse(Filter(definition).check(event)) @defer.inlineCallbacks def test_filter_presence_match(self): - user_filter_json = { - "presence": { - "types": ["m.*"] - } - } + user_filter_json = {"presence": {"types": ["m.*"]}} filter_id = yield self.datastore.add_user_filter( - user_localpart=user_localpart, - user_filter=user_filter_json, - ) - event = MockEvent( - sender="@foo:bar", - type="m.profile", + user_localpart=user_localpart, user_filter=user_filter_json ) + event = MockEvent(sender="@foo:bar", type="m.profile") events = [event] user_filter = yield self.filtering.get_user_filter( - user_localpart=user_localpart, - filter_id=filter_id, + user_localpart=user_localpart, filter_id=filter_id ) results = user_filter.filter_presence(events=events) @@ -444,15 +326,10 @@ def test_filter_presence_match(self): @defer.inlineCallbacks def test_filter_presence_no_match(self): - user_filter_json = { - "presence": { - "types": ["m.*"] - } - } + user_filter_json = {"presence": {"types": ["m.*"]}} filter_id = yield self.datastore.add_user_filter( - user_localpart=user_localpart + "2", - user_filter=user_filter_json, + user_localpart=user_localpart + "2", user_filter=user_filter_json ) event = MockEvent( event_id="$asdasd:localhost", @@ -462,8 +339,7 @@ def test_filter_presence_no_match(self): events = [event] user_filter = yield self.filtering.get_user_filter( - user_localpart=user_localpart + "2", - filter_id=filter_id, + user_localpart=user_localpart + "2", filter_id=filter_id ) results = user_filter.filter_presence(events=events) @@ -471,27 +347,15 @@ def test_filter_presence_no_match(self): @defer.inlineCallbacks def test_filter_room_state_match(self): - user_filter_json = { - "room": { - "state": { - "types": ["m.*"] - } - } - } + user_filter_json = {"room": {"state": {"types": ["m.*"]}}} filter_id = yield self.datastore.add_user_filter( - user_localpart=user_localpart, - user_filter=user_filter_json, - ) - event = MockEvent( - sender="@foo:bar", - type="m.room.topic", - room_id="!foo:bar" + user_localpart=user_localpart, user_filter=user_filter_json ) + event = MockEvent(sender="@foo:bar", type="m.room.topic", room_id="!foo:bar") events = [event] user_filter = yield self.filtering.get_user_filter( - user_localpart=user_localpart, - filter_id=filter_id, + user_localpart=user_localpart, filter_id=filter_id ) results = user_filter.filter_room_state(events=events) @@ -499,27 +363,17 @@ def test_filter_room_state_match(self): @defer.inlineCallbacks def test_filter_room_state_no_match(self): - user_filter_json = { - "room": { - "state": { - "types": ["m.*"] - } - } - } + user_filter_json = {"room": {"state": {"types": ["m.*"]}}} filter_id = yield self.datastore.add_user_filter( - user_localpart=user_localpart, - user_filter=user_filter_json, + user_localpart=user_localpart, user_filter=user_filter_json ) event = MockEvent( - sender="@foo:bar", - type="org.matrix.custom.event", - room_id="!foo:bar" + sender="@foo:bar", type="org.matrix.custom.event", room_id="!foo:bar" ) events = [event] user_filter = yield self.filtering.get_user_filter( - user_localpart=user_localpart, - filter_id=filter_id, + user_localpart=user_localpart, filter_id=filter_id ) results = user_filter.filter_room_state(events) @@ -543,45 +397,32 @@ def test_filter_rooms(self): @defer.inlineCallbacks def test_add_filter(self): - user_filter_json = { - "room": { - "state": { - "types": ["m.*"] - } - } - } + user_filter_json = {"room": {"state": {"types": ["m.*"]}}} filter_id = yield self.filtering.add_user_filter( - user_localpart=user_localpart, - user_filter=user_filter_json, + user_localpart=user_localpart, user_filter=user_filter_json ) self.assertEquals(filter_id, 0) - self.assertEquals(user_filter_json, ( - yield self.datastore.get_user_filter( - user_localpart=user_localpart, - filter_id=0, - ) - )) + self.assertEquals( + user_filter_json, + ( + yield self.datastore.get_user_filter( + user_localpart=user_localpart, filter_id=0 + ) + ), + ) @defer.inlineCallbacks def test_get_filter(self): - user_filter_json = { - "room": { - "state": { - "types": ["m.*"] - } - } - } + user_filter_json = {"room": {"state": {"types": ["m.*"]}}} filter_id = yield self.datastore.add_user_filter( - user_localpart=user_localpart, - user_filter=user_filter_json, + user_localpart=user_localpart, user_filter=user_filter_json ) filter = yield self.filtering.get_user_filter( - user_localpart=user_localpart, - filter_id=filter_id, + user_localpart=user_localpart, filter_id=filter_id ) self.assertEquals(filter.get_filter_json(), user_filter_json) diff --git a/tests/api/test_ratelimiting.py b/tests/api/test_ratelimiting.py index c45b59b36c37..8933fe3b721f 100644 --- a/tests/api/test_ratelimiting.py +++ b/tests/api/test_ratelimiting.py @@ -4,17 +4,16 @@ class TestRatelimiter(unittest.TestCase): - def test_allowed(self): limiter = Ratelimiter() allowed, time_allowed = limiter.send_message( - user_id="test_id", time_now_s=0, msg_rate_hz=0.1, burst_count=1, + user_id="test_id", time_now_s=0, msg_rate_hz=0.1, burst_count=1 ) self.assertTrue(allowed) self.assertEquals(10., time_allowed) allowed, time_allowed = limiter.send_message( - user_id="test_id", time_now_s=5, msg_rate_hz=0.1, burst_count=1, + user_id="test_id", time_now_s=5, msg_rate_hz=0.1, burst_count=1 ) self.assertFalse(allowed) self.assertEquals(10., time_allowed) @@ -28,7 +27,7 @@ def test_allowed(self): def test_pruning(self): limiter = Ratelimiter() allowed, time_allowed = limiter.send_message( - user_id="test_id_1", time_now_s=0, msg_rate_hz=0.1, burst_count=1, + user_id="test_id_1", time_now_s=0, msg_rate_hz=0.1, burst_count=1 ) self.assertIn("test_id_1", limiter.message_counts) diff --git a/tests/appservice/test_appservice.py b/tests/appservice/test_appservice.py index 891e0cc97301..4003869ed665 100644 --- a/tests/appservice/test_appservice.py +++ b/tests/appservice/test_appservice.py @@ -24,14 +24,10 @@ def _regex(regex, exclusive=True): - return { - "regex": re.compile(regex), - "exclusive": exclusive - } + return {"regex": re.compile(regex), "exclusive": exclusive} class ApplicationServiceTestCase(unittest.TestCase): - def setUp(self): self.service = ApplicationService( id="unique_identifier", @@ -41,8 +37,8 @@ def setUp(self): namespaces={ ApplicationService.NS_USERS: [], ApplicationService.NS_ROOMS: [], - ApplicationService.NS_ALIASES: [] - } + ApplicationService.NS_ALIASES: [], + }, ) self.event = Mock( type="m.something", room_id="!foo:bar", sender="@someone:somewhere" @@ -52,25 +48,19 @@ def setUp(self): @defer.inlineCallbacks def test_regex_user_id_prefix_match(self): - self.service.namespaces[ApplicationService.NS_USERS].append( - _regex("@irc_.*") - ) + self.service.namespaces[ApplicationService.NS_USERS].append(_regex("@irc_.*")) self.event.sender = "@irc_foobar:matrix.org" self.assertTrue((yield self.service.is_interested(self.event))) @defer.inlineCallbacks def test_regex_user_id_prefix_no_match(self): - self.service.namespaces[ApplicationService.NS_USERS].append( - _regex("@irc_.*") - ) + self.service.namespaces[ApplicationService.NS_USERS].append(_regex("@irc_.*")) self.event.sender = "@someone_else:matrix.org" self.assertFalse((yield self.service.is_interested(self.event))) @defer.inlineCallbacks def test_regex_room_member_is_checked(self): - self.service.namespaces[ApplicationService.NS_USERS].append( - _regex("@irc_.*") - ) + self.service.namespaces[ApplicationService.NS_USERS].append(_regex("@irc_.*")) self.event.sender = "@someone_else:matrix.org" self.event.type = "m.room.member" self.event.state_key = "@irc_foobar:matrix.org" @@ -98,60 +88,47 @@ def test_regex_alias_match(self): _regex("#irc_.*:matrix.org") ) self.store.get_aliases_for_room.return_value = [ - "#irc_foobar:matrix.org", "#athing:matrix.org" + "#irc_foobar:matrix.org", + "#athing:matrix.org", ] self.store.get_users_in_room.return_value = [] - self.assertTrue((yield self.service.is_interested( - self.event, self.store - ))) + self.assertTrue((yield self.service.is_interested(self.event, self.store))) def test_non_exclusive_alias(self): self.service.namespaces[ApplicationService.NS_ALIASES].append( _regex("#irc_.*:matrix.org", exclusive=False) ) - self.assertFalse(self.service.is_exclusive_alias( - "#irc_foobar:matrix.org" - )) + self.assertFalse(self.service.is_exclusive_alias("#irc_foobar:matrix.org")) def test_non_exclusive_room(self): self.service.namespaces[ApplicationService.NS_ROOMS].append( _regex("!irc_.*:matrix.org", exclusive=False) ) - self.assertFalse(self.service.is_exclusive_room( - "!irc_foobar:matrix.org" - )) + self.assertFalse(self.service.is_exclusive_room("!irc_foobar:matrix.org")) def test_non_exclusive_user(self): self.service.namespaces[ApplicationService.NS_USERS].append( _regex("@irc_.*:matrix.org", exclusive=False) ) - self.assertFalse(self.service.is_exclusive_user( - "@irc_foobar:matrix.org" - )) + self.assertFalse(self.service.is_exclusive_user("@irc_foobar:matrix.org")) def test_exclusive_alias(self): self.service.namespaces[ApplicationService.NS_ALIASES].append( _regex("#irc_.*:matrix.org", exclusive=True) ) - self.assertTrue(self.service.is_exclusive_alias( - "#irc_foobar:matrix.org" - )) + self.assertTrue(self.service.is_exclusive_alias("#irc_foobar:matrix.org")) def test_exclusive_user(self): self.service.namespaces[ApplicationService.NS_USERS].append( _regex("@irc_.*:matrix.org", exclusive=True) ) - self.assertTrue(self.service.is_exclusive_user( - "@irc_foobar:matrix.org" - )) + self.assertTrue(self.service.is_exclusive_user("@irc_foobar:matrix.org")) def test_exclusive_room(self): self.service.namespaces[ApplicationService.NS_ROOMS].append( _regex("!irc_.*:matrix.org", exclusive=True) ) - self.assertTrue(self.service.is_exclusive_room( - "!irc_foobar:matrix.org" - )) + self.assertTrue(self.service.is_exclusive_room("!irc_foobar:matrix.org")) @defer.inlineCallbacks def test_regex_alias_no_match(self): @@ -159,47 +136,36 @@ def test_regex_alias_no_match(self): _regex("#irc_.*:matrix.org") ) self.store.get_aliases_for_room.return_value = [ - "#xmpp_foobar:matrix.org", "#athing:matrix.org" + "#xmpp_foobar:matrix.org", + "#athing:matrix.org", ] self.store.get_users_in_room.return_value = [] - self.assertFalse((yield self.service.is_interested( - self.event, self.store - ))) + self.assertFalse((yield self.service.is_interested(self.event, self.store))) @defer.inlineCallbacks def test_regex_multiple_matches(self): self.service.namespaces[ApplicationService.NS_ALIASES].append( _regex("#irc_.*:matrix.org") ) - self.service.namespaces[ApplicationService.NS_USERS].append( - _regex("@irc_.*") - ) + self.service.namespaces[ApplicationService.NS_USERS].append(_regex("@irc_.*")) self.event.sender = "@irc_foobar:matrix.org" self.store.get_aliases_for_room.return_value = ["#irc_barfoo:matrix.org"] self.store.get_users_in_room.return_value = [] - self.assertTrue((yield self.service.is_interested( - self.event, self.store - ))) + self.assertTrue((yield self.service.is_interested(self.event, self.store))) @defer.inlineCallbacks def test_interested_in_self(self): # make sure invites get through self.service.sender = "@appservice:name" - self.service.namespaces[ApplicationService.NS_USERS].append( - _regex("@irc_.*") - ) + self.service.namespaces[ApplicationService.NS_USERS].append(_regex("@irc_.*")) self.event.type = "m.room.member" - self.event.content = { - "membership": "invite" - } + self.event.content = {"membership": "invite"} self.event.state_key = self.service.sender self.assertTrue((yield self.service.is_interested(self.event))) @defer.inlineCallbacks def test_member_list_match(self): - self.service.namespaces[ApplicationService.NS_USERS].append( - _regex("@irc_.*") - ) + self.service.namespaces[ApplicationService.NS_USERS].append(_regex("@irc_.*")) self.store.get_users_in_room.return_value = [ "@alice:here", "@irc_fo:here", # AS user @@ -208,6 +174,6 @@ def test_member_list_match(self): self.store.get_aliases_for_room.return_value = [] self.event.sender = "@xmpp_foobar:matrix.org" - self.assertTrue((yield self.service.is_interested( - event=self.event, store=self.store - ))) + self.assertTrue( + (yield self.service.is_interested(event=self.event, store=self.store)) + ) diff --git a/tests/appservice/test_scheduler.py b/tests/appservice/test_scheduler.py index b9f4863e9ace..db9f86bdacf0 100644 --- a/tests/appservice/test_scheduler.py +++ b/tests/appservice/test_scheduler.py @@ -30,7 +30,6 @@ class ApplicationServiceSchedulerTransactionCtrlTestCase(unittest.TestCase): - def setUp(self): self.clock = MockClock() self.store = Mock() @@ -38,8 +37,10 @@ def setUp(self): self.recoverer = Mock() self.recoverer_fn = Mock(return_value=self.recoverer) self.txnctrl = _TransactionController( - clock=self.clock, store=self.store, as_api=self.as_api, - recoverer_fn=self.recoverer_fn + clock=self.clock, + store=self.store, + as_api=self.as_api, + recoverer_fn=self.recoverer_fn, ) def test_single_service_up_txn_sent(self): @@ -54,9 +55,7 @@ def test_single_service_up_txn_sent(self): return_value=defer.succeed(ApplicationServiceState.UP) ) txn.send = Mock(return_value=defer.succeed(True)) - self.store.create_appservice_txn = Mock( - return_value=defer.succeed(txn) - ) + self.store.create_appservice_txn = Mock(return_value=defer.succeed(txn)) # actual call self.txnctrl.send(service, events) @@ -77,9 +76,7 @@ def test_single_service_down(self): self.store.get_appservice_state = Mock( return_value=defer.succeed(ApplicationServiceState.DOWN) ) - self.store.create_appservice_txn = Mock( - return_value=defer.succeed(txn) - ) + self.store.create_appservice_txn = Mock(return_value=defer.succeed(txn)) # actual call self.txnctrl.send(service, events) @@ -104,9 +101,7 @@ def test_single_service_up_txn_not_sent(self): ) self.store.set_appservice_state = Mock(return_value=defer.succeed(True)) txn.send = Mock(return_value=defer.succeed(False)) # fails to send - self.store.create_appservice_txn = Mock( - return_value=defer.succeed(txn) - ) + self.store.create_appservice_txn = Mock(return_value=defer.succeed(txn)) # actual call self.txnctrl.send(service, events) @@ -124,7 +119,6 @@ def test_single_service_up_txn_not_sent(self): class ApplicationServiceSchedulerRecovererTestCase(unittest.TestCase): - def setUp(self): self.clock = MockClock() self.as_api = Mock() @@ -146,6 +140,7 @@ def test_recover_single_txn(self): def take_txn(*args, **kwargs): return defer.succeed(txns.pop(0)) + self.store.get_oldest_unsent_txn = Mock(side_effect=take_txn) self.recoverer.recover() @@ -171,6 +166,7 @@ def take_txn(*args, **kwargs): return defer.succeed(txns.pop(0)) else: return defer.succeed(txn) + self.store.get_oldest_unsent_txn = Mock(side_effect=take_txn) self.recoverer.recover() @@ -197,7 +193,6 @@ def take_txn(*args, **kwargs): class ApplicationServiceSchedulerQueuerTestCase(unittest.TestCase): - def setUp(self): self.txn_ctrl = Mock() self.queuer = _ServiceQueuer(self.txn_ctrl, MockClock()) @@ -211,9 +206,7 @@ def test_send_single_event_no_queue(self): def test_send_single_event_with_queue(self): d = defer.Deferred() - self.txn_ctrl.send = Mock( - side_effect=lambda x, y: make_deferred_yieldable(d), - ) + self.txn_ctrl.send = Mock(side_effect=lambda x, y: make_deferred_yieldable(d)) service = Mock(id=4) event = Mock(event_id="first") event2 = Mock(event_id="second") @@ -247,6 +240,7 @@ def test_multiple_service_queues(self): def do_send(x, y): return make_deferred_yieldable(send_return_list.pop(0)) + self.txn_ctrl.send = Mock(side_effect=do_send) # send events for different ASes and make sure they are sent diff --git a/tests/config/test_generate.py b/tests/config/test_generate.py index eb7f0ab12a89..f88d28a19dce 100644 --- a/tests/config/test_generate.py +++ b/tests/config/test_generate.py @@ -24,7 +24,6 @@ class ConfigGenerationTestCase(unittest.TestCase): - def setUp(self): self.dir = tempfile.mkdtemp() self.file = os.path.join(self.dir, "homeserver.yaml") @@ -33,23 +32,30 @@ def tearDown(self): shutil.rmtree(self.dir) def test_generate_config_generates_files(self): - HomeServerConfig.load_or_generate_config("", [ - "--generate-config", - "-c", self.file, - "--report-stats=yes", - "-H", "lemurs.win" - ]) + HomeServerConfig.load_or_generate_config( + "", + [ + "--generate-config", + "-c", + self.file, + "--report-stats=yes", + "-H", + "lemurs.win", + ], + ) self.assertSetEqual( - set([ - "homeserver.yaml", - "lemurs.win.log.config", - "lemurs.win.signing.key", - "lemurs.win.tls.crt", - "lemurs.win.tls.dh", - "lemurs.win.tls.key", - ]), - set(os.listdir(self.dir)) + set( + [ + "homeserver.yaml", + "lemurs.win.log.config", + "lemurs.win.signing.key", + "lemurs.win.tls.crt", + "lemurs.win.tls.dh", + "lemurs.win.tls.key", + ] + ), + set(os.listdir(self.dir)), ) self.assert_log_filename_is( diff --git a/tests/config/test_load.py b/tests/config/test_load.py index 5c422eff388a..d5f177709350 100644 --- a/tests/config/test_load.py +++ b/tests/config/test_load.py @@ -24,7 +24,6 @@ class ConfigLoadingTestCase(unittest.TestCase): - def setUp(self): self.dir = tempfile.mkdtemp() print(self.dir) @@ -43,15 +42,14 @@ def test_load_fails_if_server_name_missing(self): def test_generates_and_loads_macaroon_secret_key(self): self.generate_config() - with open(self.file, - "r") as f: + with open(self.file, "r") as f: raw = yaml.load(f) self.assertIn("macaroon_secret_key", raw) config = HomeServerConfig.load_config("", ["-c", self.file]) self.assertTrue( hasattr(config, "macaroon_secret_key"), - "Want config to have attr macaroon_secret_key" + "Want config to have attr macaroon_secret_key", ) if len(config.macaroon_secret_key) < 5: self.fail( @@ -62,7 +60,7 @@ def test_generates_and_loads_macaroon_secret_key(self): config = HomeServerConfig.load_or_generate_config("", ["-c", self.file]) self.assertTrue( hasattr(config, "macaroon_secret_key"), - "Want config to have attr macaroon_secret_key" + "Want config to have attr macaroon_secret_key", ) if len(config.macaroon_secret_key) < 5: self.fail( @@ -80,10 +78,9 @@ def test_load_succeeds_if_macaroon_secret_key_missing(self): def test_disable_registration(self): self.generate_config() - self.add_lines_to_config([ - "enable_registration: true", - "disable_registration: true", - ]) + self.add_lines_to_config( + ["enable_registration: true", "disable_registration: true"] + ) # Check that disable_registration clobbers enable_registration. config = HomeServerConfig.load_config("", ["-c", self.file]) self.assertFalse(config.enable_registration) @@ -92,18 +89,23 @@ def test_disable_registration(self): self.assertFalse(config.enable_registration) # Check that either config value is clobbered by the command line. - config = HomeServerConfig.load_or_generate_config("", [ - "-c", self.file, "--enable-registration" - ]) + config = HomeServerConfig.load_or_generate_config( + "", ["-c", self.file, "--enable-registration"] + ) self.assertTrue(config.enable_registration) def generate_config(self): - HomeServerConfig.load_or_generate_config("", [ - "--generate-config", - "-c", self.file, - "--report-stats=yes", - "-H", "lemurs.win" - ]) + HomeServerConfig.load_or_generate_config( + "", + [ + "--generate-config", + "-c", + self.file, + "--report-stats=yes", + "-H", + "lemurs.win", + ], + ) def generate_config_and_remove_lines_containing(self, needle): self.generate_config() diff --git a/tests/crypto/test_event_signing.py b/tests/crypto/test_event_signing.py index cd11871b8059..b2536c1e6914 100644 --- a/tests/crypto/test_event_signing.py +++ b/tests/crypto/test_event_signing.py @@ -24,9 +24,7 @@ # Perform these tests using given secret key so we get entirely deterministic # signatures output that we can test against. -SIGNING_KEY_SEED = decode_base64( - "YJDBA9Xnr2sVqXD9Vj7XVUnmFZcZrlw8Md7kMW+3XA1" -) +SIGNING_KEY_SEED = decode_base64("YJDBA9Xnr2sVqXD9Vj7XVUnmFZcZrlw8Md7kMW+3XA1") KEY_ALG = "ed25519" KEY_VER = 1 @@ -36,7 +34,6 @@ class EventSigningTestCase(unittest.TestCase): - def setUp(self): self.signing_key = nacl.signing.SigningKey(SIGNING_KEY_SEED) self.signing_key.alg = KEY_ALG @@ -51,7 +48,7 @@ def test_sign_minimal(self): 'signatures': {}, 'type': "X", 'unsigned': {'age_ts': 1000000}, - }, + } ) add_hashes_and_signatures(builder, HOSTNAME, self.signing_key) @@ -61,8 +58,7 @@ def test_sign_minimal(self): self.assertTrue(hasattr(event, 'hashes')) self.assertIn('sha256', event.hashes) self.assertEquals( - event.hashes['sha256'], - "6tJjLpXtggfke8UxFhAKg82QVkJzvKOVOOSjUDK4ZSI", + event.hashes['sha256'], "6tJjLpXtggfke8UxFhAKg82QVkJzvKOVOOSjUDK4ZSI" ) self.assertTrue(hasattr(event, 'signatures')) @@ -77,9 +73,7 @@ def test_sign_minimal(self): def test_sign_message(self): builder = EventBuilder( { - 'content': { - 'body': "Here is the message content", - }, + 'content': {'body': "Here is the message content"}, 'event_id': "$0:domain", 'origin': "domain", 'origin_server_ts': 1000000, @@ -98,8 +92,7 @@ def test_sign_message(self): self.assertTrue(hasattr(event, 'hashes')) self.assertIn('sha256', event.hashes) self.assertEquals( - event.hashes['sha256'], - "onLKD1bGljeBWQhWZ1kaP9SorVmRQNdN5aM2JYU2n/g", + event.hashes['sha256'], "onLKD1bGljeBWQhWZ1kaP9SorVmRQNdN5aM2JYU2n/g" ) self.assertTrue(hasattr(event, 'signatures')) @@ -108,5 +101,5 @@ def test_sign_message(self): self.assertEquals( event.signatures[HOSTNAME][KEY_NAME], "Wm+VzmOUOz08Ds+0NTWb1d4CZrVsJSikkeRxh6aCcUw" - "u6pNC78FunoD7KNWzqFn241eYHYMGCA5McEiVPdhzBA" + "u6pNC78FunoD7KNWzqFn241eYHYMGCA5McEiVPdhzBA", ) diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index a9d37fe0845f..e40681ed1e71 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -36,9 +36,7 @@ def __init__(self): def get_verify_keys(self): vk = signedjson.key.get_verify_key(self.key) - return { - "%s:%s" % (vk.alg, vk.version): vk, - } + return {"%s:%s" % (vk.alg, vk.version): vk} def get_signed_key(self, server_name, verify_key): key_id = "%s:%s" % (verify_key.alg, verify_key.version) @@ -47,10 +45,8 @@ def get_signed_key(self, server_name, verify_key): "old_verify_keys": {}, "valid_until_ts": time.time() * 1000 + 3600, "verify_keys": { - key_id: { - "key": signedjson.key.encode_verify_key_base64(verify_key) - } - } + key_id: {"key": signedjson.key.encode_verify_key_base64(verify_key)} + }, } signedjson.sign.sign_json(res, self.server_name, self.key) return res @@ -62,18 +58,15 @@ def setUp(self): self.mock_perspective_server = MockPerspectiveServer() self.http_client = Mock() self.hs = yield utils.setup_test_homeserver( - handlers=None, - http_client=self.http_client, + handlers=None, http_client=self.http_client ) self.hs.config.perspectives = { - self.mock_perspective_server.server_name: - self.mock_perspective_server.get_verify_keys() + self.mock_perspective_server.server_name: self.mock_perspective_server.get_verify_keys() } def check_context(self, _, expected): self.assertEquals( - getattr(LoggingContext.current_context(), "request", None), - expected + getattr(LoggingContext.current_context(), "request", None), expected ) @defer.inlineCallbacks @@ -89,8 +82,7 @@ def test_wait_for_previous_lookups(self): context_one.request = "one" wait_1_deferred = kr.wait_for_previous_lookups( - ["server1"], - {"server1": lookup_1_deferred}, + ["server1"], {"server1": lookup_1_deferred} ) # there were no previous lookups, so the deferred should be ready @@ -105,8 +97,7 @@ def test_wait_for_previous_lookups(self): # set off another wait. It should block because the first lookup # hasn't yet completed. wait_2_deferred = kr.wait_for_previous_lookups( - ["server1"], - {"server1": lookup_2_deferred}, + ["server1"], {"server1": lookup_2_deferred} ) self.assertFalse(wait_2_deferred.called) # ... so we should have reset the LoggingContext. @@ -132,21 +123,19 @@ def test_verify_json_objects_for_server_awaits_previous_requests(self): persp_resp = { "server_keys": [ self.mock_perspective_server.get_signed_key( - "server10", - signedjson.key.get_verify_key(key1) - ), + "server10", signedjson.key.get_verify_key(key1) + ) ] } persp_deferred = defer.Deferred() @defer.inlineCallbacks def get_perspectives(**kwargs): - self.assertEquals( - LoggingContext.current_context().request, "11", - ) + self.assertEquals(LoggingContext.current_context().request, "11") with logcontext.PreserveLoggingContext(): yield persp_deferred defer.returnValue(persp_resp) + self.http_client.post_json.side_effect = get_perspectives with LoggingContext("11") as context_11: @@ -154,9 +143,7 @@ def get_perspectives(**kwargs): # start off a first set of lookups res_deferreds = kr.verify_json_objects_for_server( - [("server10", json1), - ("server11", {}) - ] + [("server10", json1), ("server11", {})] ) # the unsigned json should be rejected pretty quickly @@ -172,7 +159,7 @@ def get_perspectives(**kwargs): # wait a tick for it to send the request to the perspectives server # (it first tries the datastore) - yield clock.sleep(1) # XXX find out why this takes so long! + yield clock.sleep(1) # XXX find out why this takes so long! self.http_client.post_json.assert_called_once() self.assertIs(LoggingContext.current_context(), context_11) @@ -186,7 +173,7 @@ def get_perspectives(**kwargs): self.http_client.post_json.return_value = defer.Deferred() res_deferreds_2 = kr.verify_json_objects_for_server( - [("server10", json1)], + [("server10", json1)] ) yield clock.sleep(1) self.http_client.post_json.assert_not_called() @@ -207,8 +194,7 @@ def test_verify_json_for_server(self): key1 = signedjson.key.generate_signing_key(1) yield self.hs.datastore.store_server_verify_key( - "server9", "", time.time() * 1000, - signedjson.key.get_verify_key(key1), + "server9", "", time.time() * 1000, signedjson.key.get_verify_key(key1) ) json1 = {} signedjson.sign.sign_json(json1, "server9", key1) diff --git a/tests/events/test_utils.py b/tests/events/test_utils.py index f51d99419e5d..ff217ca8b989 100644 --- a/tests/events/test_utils.py +++ b/tests/events/test_utils.py @@ -31,25 +31,20 @@ def MockEvent(**kwargs): class PruneEventTestCase(unittest.TestCase): """ Asserts that a new event constructed with `evdict` will look like `matchdict` when it is redacted. """ + def run_test(self, evdict, matchdict): - self.assertEquals( - prune_event(FrozenEvent(evdict)).get_dict(), - matchdict - ) + self.assertEquals(prune_event(FrozenEvent(evdict)).get_dict(), matchdict) def test_minimal(self): self.run_test( - { - 'type': 'A', - 'event_id': '$test:domain', - }, + {'type': 'A', 'event_id': '$test:domain'}, { 'type': 'A', 'event_id': '$test:domain', 'content': {}, 'signatures': {}, 'unsigned': {}, - } + }, ) def test_basic_keys(self): @@ -70,23 +65,19 @@ def test_basic_keys(self): 'content': {}, 'signatures': {}, 'unsigned': {}, - } + }, ) def test_unsigned_age_ts(self): self.run_test( - { - 'type': 'B', - 'event_id': '$test:domain', - 'unsigned': {'age_ts': 20}, - }, + {'type': 'B', 'event_id': '$test:domain', 'unsigned': {'age_ts': 20}}, { 'type': 'B', 'event_id': '$test:domain', 'content': {}, 'signatures': {}, 'unsigned': {'age_ts': 20}, - } + }, ) self.run_test( @@ -101,23 +92,19 @@ def test_unsigned_age_ts(self): 'content': {}, 'signatures': {}, 'unsigned': {}, - } + }, ) def test_content(self): self.run_test( - { - 'type': 'C', - 'event_id': '$test:domain', - 'content': {'things': 'here'}, - }, + {'type': 'C', 'event_id': '$test:domain', 'content': {'things': 'here'}}, { 'type': 'C', 'event_id': '$test:domain', 'content': {}, 'signatures': {}, 'unsigned': {}, - } + }, ) self.run_test( @@ -132,27 +119,20 @@ def test_content(self): 'content': {'creator': '@2:domain'}, 'signatures': {}, 'unsigned': {}, - } + }, ) class SerializeEventTestCase(unittest.TestCase): - def serialize(self, ev, fields): return serialize_event(ev, 1479807801915, only_event_fields=fields) def test_event_fields_works_with_keys(self): self.assertEquals( self.serialize( - MockEvent( - sender="@alice:localhost", - room_id="!foo:bar" - ), - ["room_id"] + MockEvent(sender="@alice:localhost", room_id="!foo:bar"), ["room_id"] ), - { - "room_id": "!foo:bar", - } + {"room_id": "!foo:bar"}, ) def test_event_fields_works_with_nested_keys(self): @@ -161,17 +141,11 @@ def test_event_fields_works_with_nested_keys(self): MockEvent( sender="@alice:localhost", room_id="!foo:bar", - content={ - "body": "A message", - }, + content={"body": "A message"}, ), - ["content.body"] + ["content.body"], ), - { - "content": { - "body": "A message", - } - } + {"content": {"body": "A message"}}, ) def test_event_fields_works_with_dot_keys(self): @@ -180,17 +154,11 @@ def test_event_fields_works_with_dot_keys(self): MockEvent( sender="@alice:localhost", room_id="!foo:bar", - content={ - "key.with.dots": {}, - }, + content={"key.with.dots": {}}, ), - ["content.key\.with\.dots"] + ["content.key\.with\.dots"], ), - { - "content": { - "key.with.dots": {}, - } - } + {"content": {"key.with.dots": {}}}, ) def test_event_fields_works_with_nested_dot_keys(self): @@ -201,21 +169,12 @@ def test_event_fields_works_with_nested_dot_keys(self): room_id="!foo:bar", content={ "not_me": 1, - "nested.dot.key": { - "leaf.key": 42, - "not_me_either": 1, - }, + "nested.dot.key": {"leaf.key": 42, "not_me_either": 1}, }, ), - ["content.nested\.dot\.key.leaf\.key"] + ["content.nested\.dot\.key.leaf\.key"], ), - { - "content": { - "nested.dot.key": { - "leaf.key": 42, - }, - } - } + {"content": {"nested.dot.key": {"leaf.key": 42}}}, ) def test_event_fields_nops_with_unknown_keys(self): @@ -224,17 +183,11 @@ def test_event_fields_nops_with_unknown_keys(self): MockEvent( sender="@alice:localhost", room_id="!foo:bar", - content={ - "foo": "bar", - }, + content={"foo": "bar"}, ), - ["content.foo", "content.notexists"] + ["content.foo", "content.notexists"], ), - { - "content": { - "foo": "bar", - } - } + {"content": {"foo": "bar"}}, ) def test_event_fields_nops_with_non_dict_keys(self): @@ -243,13 +196,11 @@ def test_event_fields_nops_with_non_dict_keys(self): MockEvent( sender="@alice:localhost", room_id="!foo:bar", - content={ - "foo": ["I", "am", "an", "array"], - }, + content={"foo": ["I", "am", "an", "array"]}, ), - ["content.foo.am"] + ["content.foo.am"], ), - {} + {}, ) def test_event_fields_nops_with_array_keys(self): @@ -258,13 +209,11 @@ def test_event_fields_nops_with_array_keys(self): MockEvent( sender="@alice:localhost", room_id="!foo:bar", - content={ - "foo": ["I", "am", "an", "array"], - }, + content={"foo": ["I", "am", "an", "array"]}, ), - ["content.foo.1"] + ["content.foo.1"], ), - {} + {}, ) def test_event_fields_all_fields_if_empty(self): @@ -274,31 +223,21 @@ def test_event_fields_all_fields_if_empty(self): type="foo", event_id="test", room_id="!foo:bar", - content={ - "foo": "bar", - }, + content={"foo": "bar"}, ), - [] + [], ), { "type": "foo", "event_id": "test", "room_id": "!foo:bar", - "content": { - "foo": "bar", - }, - "unsigned": {} - } + "content": {"foo": "bar"}, + "unsigned": {}, + }, ) def test_event_fields_fail_if_fields_not_str(self): with self.assertRaises(TypeError): self.serialize( - MockEvent( - room_id="!foo:bar", - content={ - "foo": "bar", - }, - ), - ["room_id", 4] + MockEvent(room_id="!foo:bar", content={"foo": "bar"}), ["room_id", 4] ) diff --git a/tests/federation/test_federation_server.py b/tests/federation/test_federation_server.py index c91e25f54ff0..af15f4cc5a81 100644 --- a/tests/federation/test_federation_server.py +++ b/tests/federation/test_federation_server.py @@ -23,10 +23,7 @@ @unittest.DEBUG class ServerACLsTestCase(unittest.TestCase): def test_blacklisted_server(self): - e = _create_acl_event({ - "allow": ["*"], - "deny": ["evil.com"], - }) + e = _create_acl_event({"allow": ["*"], "deny": ["evil.com"]}) logging.info("ACL event: %s", e.content) self.assertFalse(server_matches_acl_event("evil.com", e)) @@ -36,10 +33,7 @@ def test_blacklisted_server(self): self.assertTrue(server_matches_acl_event("honestly.not.evil.com", e)) def test_block_ip_literals(self): - e = _create_acl_event({ - "allow_ip_literals": False, - "allow": ["*"], - }) + e = _create_acl_event({"allow_ip_literals": False, "allow": ["*"]}) logging.info("ACL event: %s", e.content) self.assertFalse(server_matches_acl_event("1.2.3.4", e)) @@ -49,10 +43,12 @@ def test_block_ip_literals(self): def _create_acl_event(content): - return FrozenEvent({ - "room_id": "!a:b", - "event_id": "$a:b", - "type": "m.room.server_acls", - "sender": "@a:b", - "content": content - }) + return FrozenEvent( + { + "room_id": "!a:b", + "event_id": "$a:b", + "type": "m.room.server_acls", + "sender": "@a:b", + "content": content, + } + ) diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py index 57c0771cf33d..ba7148ec0181 100644 --- a/tests/handlers/test_appservice.py +++ b/tests/handlers/test_appservice.py @@ -45,20 +45,18 @@ def test_notify_interested_services(self): services = [ self._mkservice(is_interested=False), interested_service, - self._mkservice(is_interested=False) + self._mkservice(is_interested=False), ] self.mock_store.get_app_services = Mock(return_value=services) self.mock_store.get_user_by_id = Mock(return_value=[]) event = Mock( - sender="@someone:anywhere", - type="m.room.message", - room_id="!foo:bar" + sender="@someone:anywhere", type="m.room.message", room_id="!foo:bar" ) self.mock_store.get_new_events_for_appservice.side_effect = [ (0, [event]), - (0, []) + (0, []), ] self.mock_as_api.push = Mock() yield self.handler.notify_interested_services(0) @@ -74,21 +72,15 @@ def test_query_user_exists_unknown_user(self): self.mock_store.get_app_services = Mock(return_value=services) self.mock_store.get_user_by_id = Mock(return_value=None) - event = Mock( - sender=user_id, - type="m.room.message", - room_id="!foo:bar" - ) + event = Mock(sender=user_id, type="m.room.message", room_id="!foo:bar") self.mock_as_api.push = Mock() self.mock_as_api.query_user = Mock() self.mock_store.get_new_events_for_appservice.side_effect = [ (0, [event]), - (0, []) + (0, []), ] yield self.handler.notify_interested_services(0) - self.mock_as_api.query_user.assert_called_once_with( - services[0], user_id - ) + self.mock_as_api.query_user.assert_called_once_with(services[0], user_id) @defer.inlineCallbacks def test_query_user_exists_known_user(self): @@ -96,25 +88,19 @@ def test_query_user_exists_known_user(self): services = [self._mkservice(is_interested=True)] services[0].is_interested_in_user = Mock(return_value=True) self.mock_store.get_app_services = Mock(return_value=services) - self.mock_store.get_user_by_id = Mock(return_value={ - "name": user_id - }) + self.mock_store.get_user_by_id = Mock(return_value={"name": user_id}) - event = Mock( - sender=user_id, - type="m.room.message", - room_id="!foo:bar" - ) + event = Mock(sender=user_id, type="m.room.message", room_id="!foo:bar") self.mock_as_api.push = Mock() self.mock_as_api.query_user = Mock() self.mock_store.get_new_events_for_appservice.side_effect = [ (0, [event]), - (0, []) + (0, []), ] yield self.handler.notify_interested_services(0) self.assertFalse( self.mock_as_api.query_user.called, - "query_user called when it shouldn't have been." + "query_user called when it shouldn't have been.", ) @defer.inlineCallbacks @@ -129,7 +115,7 @@ def test_query_room_alias_exists(self): services = [ self._mkservice_alias(is_interested_in_alias=False), interested_service, - self._mkservice_alias(is_interested_in_alias=False) + self._mkservice_alias(is_interested_in_alias=False), ] self.mock_store.get_app_services = Mock(return_value=services) @@ -140,8 +126,7 @@ def test_query_room_alias_exists(self): result = yield self.handler.query_room_alias_exists(room_alias) self.mock_as_api.query_alias.assert_called_once_with( - interested_service, - room_alias_str + interested_service, room_alias_str ) self.assertEquals(result.room_id, room_id) self.assertEquals(result.servers, servers) diff --git a/tests/handlers/test_auth.py b/tests/handlers/test_auth.py index 8a9bf2d5fd02..ede01f8099a6 100644 --- a/tests/handlers/test_auth.py +++ b/tests/handlers/test_auth.py @@ -81,9 +81,7 @@ def verify_nonce(caveat): def test_short_term_login_token_gives_user_id(self): self.hs.clock.now = 1000 - token = self.macaroon_generator.generate_short_term_login_token( - "a_user", 5000 - ) + token = self.macaroon_generator.generate_short_term_login_token("a_user", 5000) user_id = yield self.auth_handler.validate_short_term_login_token_and_get_user_id( token ) @@ -98,17 +96,13 @@ def test_short_term_login_token_gives_user_id(self): @defer.inlineCallbacks def test_short_term_login_token_cannot_replace_user_id(self): - token = self.macaroon_generator.generate_short_term_login_token( - "a_user", 5000 - ) + token = self.macaroon_generator.generate_short_term_login_token("a_user", 5000) macaroon = pymacaroons.Macaroon.deserialize(token) user_id = yield self.auth_handler.validate_short_term_login_token_and_get_user_id( macaroon.serialize() ) - self.assertEqual( - "a_user", user_id - ) + self.assertEqual("a_user", user_id) # add another "user_id" caveat, which might allow us to override the # user_id. @@ -165,7 +159,5 @@ def test_mau_limits_not_exceeded(self): ) def _get_macaroon(self): - token = self.macaroon_generator.generate_short_term_login_token( - "user_a", 5000 - ) + token = self.macaroon_generator.generate_short_term_login_token("user_a", 5000) return pymacaroons.Macaroon.deserialize(token) diff --git a/tests/handlers/test_device.py b/tests/handlers/test_device.py index 633a0b7f361f..d70d64550403 100644 --- a/tests/handlers/test_device.py +++ b/tests/handlers/test_device.py @@ -28,9 +28,9 @@ class DeviceTestCase(unittest.TestCase): def __init__(self, *args, **kwargs): super(DeviceTestCase, self).__init__(*args, **kwargs) - self.store = None # type: synapse.storage.DataStore + self.store = None # type: synapse.storage.DataStore self.handler = None # type: synapse.handlers.device.DeviceHandler - self.clock = None # type: utils.MockClock + self.clock = None # type: utils.MockClock @defer.inlineCallbacks def setUp(self): @@ -44,7 +44,7 @@ def test_device_is_created_if_doesnt_exist(self): res = yield self.handler.check_device_registered( user_id="@boris:foo", device_id="fco", - initial_device_display_name="display name" + initial_device_display_name="display name", ) self.assertEqual(res, "fco") @@ -56,14 +56,14 @@ def test_device_is_preserved_if_exists(self): res1 = yield self.handler.check_device_registered( user_id="@boris:foo", device_id="fco", - initial_device_display_name="display name" + initial_device_display_name="display name", ) self.assertEqual(res1, "fco") res2 = yield self.handler.check_device_registered( user_id="@boris:foo", device_id="fco", - initial_device_display_name="new display name" + initial_device_display_name="new display name", ) self.assertEqual(res2, "fco") @@ -75,7 +75,7 @@ def test_device_id_is_made_up_if_unspecified(self): device_id = yield self.handler.check_device_registered( user_id="@theresa:foo", device_id=None, - initial_device_display_name="display" + initial_device_display_name="display", ) dev = yield self.handler.store.get_device("@theresa:foo", device_id) @@ -87,43 +87,53 @@ def test_get_devices_by_user(self): res = yield self.handler.get_devices_by_user(user1) self.assertEqual(3, len(res)) - device_map = { - d["device_id"]: d for d in res - } - self.assertDictContainsSubset({ - "user_id": user1, - "device_id": "xyz", - "display_name": "display 0", - "last_seen_ip": None, - "last_seen_ts": None, - }, device_map["xyz"]) - self.assertDictContainsSubset({ - "user_id": user1, - "device_id": "fco", - "display_name": "display 1", - "last_seen_ip": "ip1", - "last_seen_ts": 1000000, - }, device_map["fco"]) - self.assertDictContainsSubset({ - "user_id": user1, - "device_id": "abc", - "display_name": "display 2", - "last_seen_ip": "ip3", - "last_seen_ts": 3000000, - }, device_map["abc"]) + device_map = {d["device_id"]: d for d in res} + self.assertDictContainsSubset( + { + "user_id": user1, + "device_id": "xyz", + "display_name": "display 0", + "last_seen_ip": None, + "last_seen_ts": None, + }, + device_map["xyz"], + ) + self.assertDictContainsSubset( + { + "user_id": user1, + "device_id": "fco", + "display_name": "display 1", + "last_seen_ip": "ip1", + "last_seen_ts": 1000000, + }, + device_map["fco"], + ) + self.assertDictContainsSubset( + { + "user_id": user1, + "device_id": "abc", + "display_name": "display 2", + "last_seen_ip": "ip3", + "last_seen_ts": 3000000, + }, + device_map["abc"], + ) @defer.inlineCallbacks def test_get_device(self): yield self._record_users() res = yield self.handler.get_device(user1, "abc") - self.assertDictContainsSubset({ - "user_id": user1, - "device_id": "abc", - "display_name": "display 2", - "last_seen_ip": "ip3", - "last_seen_ts": 3000000, - }, res) + self.assertDictContainsSubset( + { + "user_id": user1, + "device_id": "abc", + "display_name": "display 2", + "last_seen_ip": "ip3", + "last_seen_ts": 3000000, + }, + res, + ) @defer.inlineCallbacks def test_delete_device(self): @@ -153,8 +163,7 @@ def test_update_device(self): def test_update_unknown_device(self): update = {"display_name": "new_display"} with self.assertRaises(synapse.api.errors.NotFoundError): - yield self.handler.update_device("user_id", "unknown_device_id", - update) + yield self.handler.update_device("user_id", "unknown_device_id", update) @defer.inlineCallbacks def _record_users(self): @@ -168,16 +177,17 @@ def _record_users(self): yield self._record_user(user2, "def", "dispkay", "token4", "ip4") @defer.inlineCallbacks - def _record_user(self, user_id, device_id, display_name, - access_token=None, ip=None): + def _record_user( + self, user_id, device_id, display_name, access_token=None, ip=None + ): device_id = yield self.handler.check_device_registered( user_id=user_id, device_id=device_id, - initial_device_display_name=display_name + initial_device_display_name=display_name, ) if ip is not None: yield self.store.insert_client_ip( - user_id, - access_token, ip, "user_agent", device_id) + user_id, access_token, ip, "user_agent", device_id + ) self.clock.advance_time(1000) diff --git a/tests/handlers/test_directory.py b/tests/handlers/test_directory.py index a353070316a0..06de9f5eca25 100644 --- a/tests/handlers/test_directory.py +++ b/tests/handlers/test_directory.py @@ -42,6 +42,7 @@ def setUp(self): def register_query_handler(query_type, handler): self.query_handlers[query_type] = handler + self.mock_registry.register_query_handler = register_query_handler hs = yield setup_test_homeserver( @@ -68,10 +69,7 @@ def test_get_local_association(self): result = yield self.handler.get_association(self.my_room) - self.assertEquals({ - "room_id": "!8765qwer:test", - "servers": ["test"], - }, result) + self.assertEquals({"room_id": "!8765qwer:test", "servers": ["test"]}, result) @defer.inlineCallbacks def test_get_remote_association(self): @@ -81,16 +79,13 @@ def test_get_remote_association(self): result = yield self.handler.get_association(self.remote_room) - self.assertEquals({ - "room_id": "!8765qwer:test", - "servers": ["test", "remote"], - }, result) + self.assertEquals( + {"room_id": "!8765qwer:test", "servers": ["test", "remote"]}, result + ) self.mock_federation.make_query.assert_called_with( destination="remote", query_type="directory", - args={ - "room_alias": "#another:remote", - }, + args={"room_alias": "#another:remote"}, retry_on_dns_fail=False, ignore_backoff=True, ) @@ -105,7 +100,4 @@ def test_incoming_fed_query(self): {"room_alias": "#your-room:test"} ) - self.assertEquals({ - "room_id": "!8765asdf:test", - "servers": ["test"], - }, response) + self.assertEquals({"room_id": "!8765asdf:test", "servers": ["test"]}, response) diff --git a/tests/handlers/test_e2e_keys.py b/tests/handlers/test_e2e_keys.py index ca1542236d5b..57ab228455e4 100644 --- a/tests/handlers/test_e2e_keys.py +++ b/tests/handlers/test_e2e_keys.py @@ -28,14 +28,13 @@ class E2eKeysHandlerTestCase(unittest.TestCase): def __init__(self, *args, **kwargs): super(E2eKeysHandlerTestCase, self).__init__(*args, **kwargs) - self.hs = None # type: synapse.server.HomeServer + self.hs = None # type: synapse.server.HomeServer self.handler = None # type: synapse.handlers.e2e_keys.E2eKeysHandler @defer.inlineCallbacks def setUp(self): self.hs = yield utils.setup_test_homeserver( - handlers=None, - federation_client=mock.Mock(), + handlers=None, federation_client=mock.Mock() ) self.handler = synapse.handlers.e2e_keys.E2eKeysHandler(self.hs) @@ -54,30 +53,21 @@ def test_reupload_one_time_keys(self): device_id = "xyz" keys = { "alg1:k1": "key1", - "alg2:k2": { - "key": "key2", - "signatures": {"k1": "sig1"} - }, - "alg2:k3": { - "key": "key3", - }, + "alg2:k2": {"key": "key2", "signatures": {"k1": "sig1"}}, + "alg2:k3": {"key": "key3"}, } res = yield self.handler.upload_keys_for_user( - local_user, device_id, {"one_time_keys": keys}, + local_user, device_id, {"one_time_keys": keys} ) - self.assertDictEqual(res, { - "one_time_key_counts": {"alg1": 1, "alg2": 2} - }) + self.assertDictEqual(res, {"one_time_key_counts": {"alg1": 1, "alg2": 2}}) # we should be able to change the signature without a problem keys["alg2:k2"]["signatures"]["k1"] = "sig2" res = yield self.handler.upload_keys_for_user( - local_user, device_id, {"one_time_keys": keys}, + local_user, device_id, {"one_time_keys": keys} ) - self.assertDictEqual(res, { - "one_time_key_counts": {"alg1": 1, "alg2": 2} - }) + self.assertDictEqual(res, {"one_time_key_counts": {"alg1": 1, "alg2": 2}}) @defer.inlineCallbacks def test_change_one_time_keys(self): @@ -87,25 +77,18 @@ def test_change_one_time_keys(self): device_id = "xyz" keys = { "alg1:k1": "key1", - "alg2:k2": { - "key": "key2", - "signatures": {"k1": "sig1"} - }, - "alg2:k3": { - "key": "key3", - }, + "alg2:k2": {"key": "key2", "signatures": {"k1": "sig1"}}, + "alg2:k3": {"key": "key3"}, } res = yield self.handler.upload_keys_for_user( - local_user, device_id, {"one_time_keys": keys}, + local_user, device_id, {"one_time_keys": keys} ) - self.assertDictEqual(res, { - "one_time_key_counts": {"alg1": 1, "alg2": 2} - }) + self.assertDictEqual(res, {"one_time_key_counts": {"alg1": 1, "alg2": 2}}) try: yield self.handler.upload_keys_for_user( - local_user, device_id, {"one_time_keys": {"alg1:k1": "key2"}}, + local_user, device_id, {"one_time_keys": {"alg1:k1": "key2"}} ) self.fail("No error when changing string key") except errors.SynapseError: @@ -113,7 +96,7 @@ def test_change_one_time_keys(self): try: yield self.handler.upload_keys_for_user( - local_user, device_id, {"one_time_keys": {"alg2:k3": "key2"}}, + local_user, device_id, {"one_time_keys": {"alg2:k3": "key2"}} ) self.fail("No error when replacing dict key with string") except errors.SynapseError: @@ -121,9 +104,7 @@ def test_change_one_time_keys(self): try: yield self.handler.upload_keys_for_user( - local_user, device_id, { - "one_time_keys": {"alg1:k1": {"key": "key"}} - }, + local_user, device_id, {"one_time_keys": {"alg1:k1": {"key": "key"}}} ) self.fail("No error when replacing string key with dict") except errors.SynapseError: @@ -131,13 +112,12 @@ def test_change_one_time_keys(self): try: yield self.handler.upload_keys_for_user( - local_user, device_id, { + local_user, + device_id, + { "one_time_keys": { - "alg2:k2": { - "key": "key3", - "signatures": {"k1": "sig1"}, - } - }, + "alg2:k2": {"key": "key3", "signatures": {"k1": "sig1"}} + } }, ) self.fail("No error when replacing dict key") @@ -148,31 +128,20 @@ def test_change_one_time_keys(self): def test_claim_one_time_key(self): local_user = "@boris:" + self.hs.hostname device_id = "xyz" - keys = { - "alg1:k1": "key1", - } + keys = {"alg1:k1": "key1"} res = yield self.handler.upload_keys_for_user( - local_user, device_id, {"one_time_keys": keys}, + local_user, device_id, {"one_time_keys": keys} + ) + self.assertDictEqual(res, {"one_time_key_counts": {"alg1": 1}}) + + res2 = yield self.handler.claim_one_time_keys( + {"one_time_keys": {local_user: {device_id: "alg1"}}}, timeout=None + ) + self.assertEqual( + res2, + { + "failures": {}, + "one_time_keys": {local_user: {device_id: {"alg1:k1": "key1"}}}, + }, ) - self.assertDictEqual(res, { - "one_time_key_counts": {"alg1": 1} - }) - - res2 = yield self.handler.claim_one_time_keys({ - "one_time_keys": { - local_user: { - device_id: "alg1" - } - } - }, timeout=None) - self.assertEqual(res2, { - "failures": {}, - "one_time_keys": { - local_user: { - device_id: { - "alg1:k1": "key1" - } - } - } - }) diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index 121ce78634ec..fc2b646ba25a 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -39,8 +39,7 @@ def test_offline_to_online(self): prev_state = UserPresenceState.default(user_id) new_state = prev_state.copy_and_replace( - state=PresenceState.ONLINE, - last_active_ts=now, + state=PresenceState.ONLINE, last_active_ts=now ) state, persist_and_notify, federation_ping = handle_update( @@ -54,23 +53,22 @@ def test_offline_to_online(self): self.assertEquals(state.last_federation_update_ts, now) self.assertEquals(wheel_timer.insert.call_count, 3) - wheel_timer.insert.assert_has_calls([ - call( - now=now, - obj=user_id, - then=new_state.last_active_ts + IDLE_TIMER - ), - call( - now=now, - obj=user_id, - then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT - ), - call( - now=now, - obj=user_id, - then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY - ), - ], any_order=True) + wheel_timer.insert.assert_has_calls( + [ + call(now=now, obj=user_id, then=new_state.last_active_ts + IDLE_TIMER), + call( + now=now, + obj=user_id, + then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT, + ), + call( + now=now, + obj=user_id, + then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY, + ), + ], + any_order=True, + ) def test_online_to_online(self): wheel_timer = Mock() @@ -79,14 +77,11 @@ def test_online_to_online(self): prev_state = UserPresenceState.default(user_id) prev_state = prev_state.copy_and_replace( - state=PresenceState.ONLINE, - last_active_ts=now, - currently_active=True, + state=PresenceState.ONLINE, last_active_ts=now, currently_active=True ) new_state = prev_state.copy_and_replace( - state=PresenceState.ONLINE, - last_active_ts=now, + state=PresenceState.ONLINE, last_active_ts=now ) state, persist_and_notify, federation_ping = handle_update( @@ -101,23 +96,22 @@ def test_online_to_online(self): self.assertEquals(state.last_federation_update_ts, now) self.assertEquals(wheel_timer.insert.call_count, 3) - wheel_timer.insert.assert_has_calls([ - call( - now=now, - obj=user_id, - then=new_state.last_active_ts + IDLE_TIMER - ), - call( - now=now, - obj=user_id, - then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT - ), - call( - now=now, - obj=user_id, - then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY - ), - ], any_order=True) + wheel_timer.insert.assert_has_calls( + [ + call(now=now, obj=user_id, then=new_state.last_active_ts + IDLE_TIMER), + call( + now=now, + obj=user_id, + then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT, + ), + call( + now=now, + obj=user_id, + then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY, + ), + ], + any_order=True, + ) def test_online_to_online_last_active_noop(self): wheel_timer = Mock() @@ -132,8 +126,7 @@ def test_online_to_online_last_active_noop(self): ) new_state = prev_state.copy_and_replace( - state=PresenceState.ONLINE, - last_active_ts=now, + state=PresenceState.ONLINE, last_active_ts=now ) state, persist_and_notify, federation_ping = handle_update( @@ -148,23 +141,22 @@ def test_online_to_online_last_active_noop(self): self.assertEquals(state.last_federation_update_ts, now) self.assertEquals(wheel_timer.insert.call_count, 3) - wheel_timer.insert.assert_has_calls([ - call( - now=now, - obj=user_id, - then=new_state.last_active_ts + IDLE_TIMER - ), - call( - now=now, - obj=user_id, - then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT - ), - call( - now=now, - obj=user_id, - then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY - ), - ], any_order=True) + wheel_timer.insert.assert_has_calls( + [ + call(now=now, obj=user_id, then=new_state.last_active_ts + IDLE_TIMER), + call( + now=now, + obj=user_id, + then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT, + ), + call( + now=now, + obj=user_id, + then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY, + ), + ], + any_order=True, + ) def test_online_to_online_last_active(self): wheel_timer = Mock() @@ -178,9 +170,7 @@ def test_online_to_online_last_active(self): currently_active=True, ) - new_state = prev_state.copy_and_replace( - state=PresenceState.ONLINE, - ) + new_state = prev_state.copy_and_replace(state=PresenceState.ONLINE) state, persist_and_notify, federation_ping = handle_update( prev_state, new_state, is_mine=True, wheel_timer=wheel_timer, now=now @@ -193,18 +183,17 @@ def test_online_to_online_last_active(self): self.assertEquals(state.last_federation_update_ts, now) self.assertEquals(wheel_timer.insert.call_count, 2) - wheel_timer.insert.assert_has_calls([ - call( - now=now, - obj=user_id, - then=new_state.last_active_ts + IDLE_TIMER - ), - call( - now=now, - obj=user_id, - then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT - ) - ], any_order=True) + wheel_timer.insert.assert_has_calls( + [ + call(now=now, obj=user_id, then=new_state.last_active_ts + IDLE_TIMER), + call( + now=now, + obj=user_id, + then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT, + ), + ], + any_order=True, + ) def test_remote_ping_timer(self): wheel_timer = Mock() @@ -213,13 +202,10 @@ def test_remote_ping_timer(self): prev_state = UserPresenceState.default(user_id) prev_state = prev_state.copy_and_replace( - state=PresenceState.ONLINE, - last_active_ts=now, + state=PresenceState.ONLINE, last_active_ts=now ) - new_state = prev_state.copy_and_replace( - state=PresenceState.ONLINE, - ) + new_state = prev_state.copy_and_replace(state=PresenceState.ONLINE) state, persist_and_notify, federation_ping = handle_update( prev_state, new_state, is_mine=False, wheel_timer=wheel_timer, now=now @@ -232,13 +218,16 @@ def test_remote_ping_timer(self): self.assertEquals(new_state.status_msg, state.status_msg) self.assertEquals(wheel_timer.insert.call_count, 1) - wheel_timer.insert.assert_has_calls([ - call( - now=now, - obj=user_id, - then=new_state.last_federation_update_ts + FEDERATION_TIMEOUT - ), - ], any_order=True) + wheel_timer.insert.assert_has_calls( + [ + call( + now=now, + obj=user_id, + then=new_state.last_federation_update_ts + FEDERATION_TIMEOUT, + ) + ], + any_order=True, + ) def test_online_to_offline(self): wheel_timer = Mock() @@ -247,14 +236,10 @@ def test_online_to_offline(self): prev_state = UserPresenceState.default(user_id) prev_state = prev_state.copy_and_replace( - state=PresenceState.ONLINE, - last_active_ts=now, - currently_active=True, + state=PresenceState.ONLINE, last_active_ts=now, currently_active=True ) - new_state = prev_state.copy_and_replace( - state=PresenceState.OFFLINE, - ) + new_state = prev_state.copy_and_replace(state=PresenceState.OFFLINE) state, persist_and_notify, federation_ping = handle_update( prev_state, new_state, is_mine=True, wheel_timer=wheel_timer, now=now @@ -273,14 +258,10 @@ def test_online_to_idle(self): prev_state = UserPresenceState.default(user_id) prev_state = prev_state.copy_and_replace( - state=PresenceState.ONLINE, - last_active_ts=now, - currently_active=True, + state=PresenceState.ONLINE, last_active_ts=now, currently_active=True ) - new_state = prev_state.copy_and_replace( - state=PresenceState.UNAVAILABLE, - ) + new_state = prev_state.copy_and_replace(state=PresenceState.UNAVAILABLE) state, persist_and_notify, federation_ping = handle_update( prev_state, new_state, is_mine=True, wheel_timer=wheel_timer, now=now @@ -293,13 +274,16 @@ def test_online_to_idle(self): self.assertEquals(new_state.status_msg, state.status_msg) self.assertEquals(wheel_timer.insert.call_count, 1) - wheel_timer.insert.assert_has_calls([ - call( - now=now, - obj=user_id, - then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT - ) - ], any_order=True) + wheel_timer.insert.assert_has_calls( + [ + call( + now=now, + obj=user_id, + then=new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT, + ) + ], + any_order=True, + ) class PresenceTimeoutTestCase(unittest.TestCase): @@ -314,9 +298,7 @@ def test_idle_timer(self): last_user_sync_ts=now, ) - new_state = handle_timeout( - state, is_mine=True, syncing_user_ids=set(), now=now - ) + new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now) self.assertIsNotNone(new_state) self.assertEquals(new_state.state, PresenceState.UNAVAILABLE) @@ -332,9 +314,7 @@ def test_sync_timeout(self): last_user_sync_ts=now - SYNC_ONLINE_TIMEOUT - 1, ) - new_state = handle_timeout( - state, is_mine=True, syncing_user_ids=set(), now=now - ) + new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now) self.assertIsNotNone(new_state) self.assertEquals(new_state.state, PresenceState.OFFLINE) @@ -369,9 +349,7 @@ def test_federation_ping(self): last_federation_update_ts=now - FEDERATION_PING_INTERVAL - 1, ) - new_state = handle_timeout( - state, is_mine=True, syncing_user_ids=set(), now=now - ) + new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now) self.assertIsNotNone(new_state) self.assertEquals(new_state, new_state) @@ -388,9 +366,7 @@ def test_no_timeout(self): last_federation_update_ts=now, ) - new_state = handle_timeout( - state, is_mine=True, syncing_user_ids=set(), now=now - ) + new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now) self.assertIsNone(new_state) @@ -425,9 +401,7 @@ def test_last_active(self): last_federation_update_ts=now, ) - new_state = handle_timeout( - state, is_mine=True, syncing_user_ids=set(), now=now - ) + new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now) self.assertIsNotNone(new_state) self.assertEquals(state, new_state) diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py index dc17918a3d6b..9268a6fe2b89 100644 --- a/tests/handlers/test_profile.py +++ b/tests/handlers/test_profile.py @@ -54,9 +54,7 @@ def register_query_handler(query_type, handler): federation_client=self.mock_federation, federation_server=Mock(), federation_registry=self.mock_registry, - ratelimiter=NonCallableMock(spec_set=[ - "send_message", - ]) + ratelimiter=NonCallableMock(spec_set=["send_message"]), ) self.ratelimiter = hs.get_ratelimiter() @@ -74,9 +72,7 @@ def register_query_handler(query_type, handler): @defer.inlineCallbacks def test_get_my_name(self): - yield self.store.set_profile_displayname( - self.frank.localpart, "Frank" - ) + yield self.store.set_profile_displayname(self.frank.localpart, "Frank") displayname = yield self.handler.get_displayname(self.frank) @@ -85,22 +81,18 @@ def test_get_my_name(self): @defer.inlineCallbacks def test_set_my_name(self): yield self.handler.set_displayname( - self.frank, - synapse.types.create_requester(self.frank), - "Frank Jr." + self.frank, synapse.types.create_requester(self.frank), "Frank Jr." ) self.assertEquals( (yield self.store.get_profile_displayname(self.frank.localpart)), - "Frank Jr." + "Frank Jr.", ) @defer.inlineCallbacks def test_set_my_name_noauth(self): d = self.handler.set_displayname( - self.frank, - synapse.types.create_requester(self.bob), - "Frank Jr." + self.frank, synapse.types.create_requester(self.bob), "Frank Jr." ) yield self.assertFailure(d, AuthError) @@ -145,11 +137,12 @@ def test_get_my_avatar(self): @defer.inlineCallbacks def test_set_my_avatar(self): yield self.handler.set_avatar_url( - self.frank, synapse.types.create_requester(self.frank), - "http://my.server/pic.gif" + self.frank, + synapse.types.create_requester(self.frank), + "http://my.server/pic.gif", ) self.assertEquals( (yield self.store.get_profile_avatar_url(self.frank.localpart)), - "http://my.server/pic.gif" + "http://my.server/pic.gif", ) diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 4ea59a58de26..dbec81076f13 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -46,7 +46,8 @@ def setUp(self): profile_handler=Mock(), ) self.macaroon_generator = Mock( - generate_access_token=Mock(return_value='secret')) + generate_access_token=Mock(return_value='secret') + ) self.hs.get_macaroon_generator = Mock(return_value=self.macaroon_generator) self.hs.handlers = RegistrationHandlers(self.hs) self.handler = self.hs.get_handlers().registration_handler @@ -62,7 +63,8 @@ def test_user_is_created_and_logged_in_if_doesnt_exist(self): user_id = "@someone:test" requester = create_requester("@as:test") result_user_id, result_token = yield self.handler.get_or_create_user( - requester, local_part, display_name) + requester, local_part, display_name + ) self.assertEquals(result_user_id, user_id) self.assertEquals(result_token, 'secret') @@ -73,13 +75,15 @@ def test_if_user_exists(self): yield store.register( user_id=frank.to_string(), token="jkv;g498752-43gj['eamb!-5", - password_hash=None) + password_hash=None, + ) local_part = "frank" display_name = "Frank" user_id = "@frank:test" requester = create_requester("@as:test") result_user_id, result_token = yield self.handler.get_or_create_user( - requester, local_part, display_name) + requester, local_part, display_name + ) self.assertEquals(result_user_id, user_id) self.assertEquals(result_token, 'secret') diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index f422cf3c5a5f..becfa77bfa9f 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -38,23 +38,19 @@ def _expect_edu(destination, edu_type, content, origin="test"): "origin": origin, "origin_server_ts": 1000000, "pdus": [], - "edus": [ - { - "edu_type": edu_type, - "content": content, - } - ], + "edus": [{"edu_type": edu_type, "content": content}], } def _make_edu_json(origin, edu_type, content): - return json.dumps( - _expect_edu("test", edu_type, content, origin=origin) - ).encode('utf8') + return json.dumps(_expect_edu("test", edu_type, content, origin=origin)).encode( + 'utf8' + ) class TypingNotificationsTestCase(unittest.TestCase): """Tests typing notifications to rooms.""" + @defer.inlineCallbacks def setUp(self): self.clock = MockClock() @@ -74,18 +70,20 @@ def setUp(self): "test", auth=self.auth, clock=self.clock, - datastore=Mock(spec=[ - # Bits that Federation needs - "prep_send_transaction", - "delivered_txn", - "get_received_txn_response", - "set_received_txn_response", - "get_destination_retry_timings", - "get_devices_by_remote", - # Bits that user_directory needs - "get_user_directory_stream_pos", - "get_current_state_deltas", - ]), + datastore=Mock( + spec=[ + # Bits that Federation needs + "prep_send_transaction", + "delivered_txn", + "get_received_txn_response", + "set_received_txn_response", + "get_destination_retry_timings", + "get_devices_by_remote", + # Bits that user_directory needs + "get_user_directory_stream_pos", + "get_current_state_deltas", + ] + ), state_handler=self.state_handler, handlers=Mock(), notifier=mock_notifier, @@ -100,19 +98,16 @@ def setUp(self): self.event_source = hs.get_event_sources().sources["typing"] self.datastore = hs.get_datastore() - retry_timings_res = { - "destination": "", - "retry_last_ts": 0, - "retry_interval": 0, - } - self.datastore.get_destination_retry_timings.return_value = ( - defer.succeed(retry_timings_res) + retry_timings_res = {"destination": "", "retry_last_ts": 0, "retry_interval": 0} + self.datastore.get_destination_retry_timings.return_value = defer.succeed( + retry_timings_res ) self.datastore.get_devices_by_remote.return_value = (0, []) def get_received_txn_response(*args): return defer.succeed(None) + self.datastore.get_received_txn_response = get_received_txn_response self.room_id = "a-room" @@ -125,10 +120,12 @@ def check_joined_room(room_id, user_id): def get_joined_hosts_for_room(room_id): return set(member.domain for member in self.room_members) + self.datastore.get_joined_hosts_for_room = get_joined_hosts_for_room def get_current_user_in_room(room_id): return set(str(u) for u in self.room_members) + self.state_handler.get_current_user_in_room = get_current_user_in_room self.datastore.get_user_directory_stream_pos.return_value = ( @@ -136,19 +133,13 @@ def get_current_user_in_room(room_id): defer.succeed(1) ) - self.datastore.get_current_state_deltas.return_value = ( - None - ) + self.datastore.get_current_state_deltas.return_value = None self.auth.check_joined_room = check_joined_room self.datastore.get_to_device_stream_token = lambda: 0 - self.datastore.get_new_device_msgs_for_remote = ( - lambda *args, **kargs: ([], 0) - ) - self.datastore.delete_device_msgs_for_remote = ( - lambda *args, **kargs: None - ) + self.datastore.get_new_device_msgs_for_remote = lambda *args, **kargs: ([], 0) + self.datastore.delete_device_msgs_for_remote = lambda *args, **kargs: None # Some local users to test with self.u_apple = UserID.from_string("@apple:test") @@ -170,24 +161,23 @@ def test_started_typing_local(self): timeout=20000, ) - self.on_new_event.assert_has_calls([ - call('typing_key', 1, rooms=[self.room_id]), - ]) + self.on_new_event.assert_has_calls( + [call('typing_key', 1, rooms=[self.room_id])] + ) self.assertEquals(self.event_source.get_current_key(), 1) events = yield self.event_source.get_new_events( - room_ids=[self.room_id], - from_key=0, + room_ids=[self.room_id], from_key=0 ) self.assertEquals( events[0], [ - {"type": "m.typing", - "room_id": self.room_id, - "content": { - "user_ids": [self.u_apple.to_string()], - }}, - ] + { + "type": "m.typing", + "room_id": self.room_id, + "content": {"user_ids": [self.u_apple.to_string()]}, + } + ], ) @defer.inlineCallbacks @@ -206,13 +196,13 @@ def test_started_typing_remote_send(self): "room_id": self.room_id, "user_id": self.u_apple.to_string(), "typing": True, - } + }, ), json_data_callback=ANY, long_retries=True, backoff_on_404=True, ), - defer.succeed((200, "OK")) + defer.succeed((200, "OK")), ) yield self.handler.started_typing( @@ -240,27 +230,29 @@ def test_started_typing_remote_recv(self): "room_id": self.room_id, "user_id": self.u_onion.to_string(), "typing": True, - } + }, ), federation_auth=True, ) - self.on_new_event.assert_has_calls([ - call('typing_key', 1, rooms=[self.room_id]), - ]) + self.on_new_event.assert_has_calls( + [call('typing_key', 1, rooms=[self.room_id])] + ) self.assertEquals(self.event_source.get_current_key(), 1) events = yield self.event_source.get_new_events( - room_ids=[self.room_id], - from_key=0 + room_ids=[self.room_id], from_key=0 + ) + self.assertEquals( + events[0], + [ + { + "type": "m.typing", + "room_id": self.room_id, + "content": {"user_ids": [self.u_onion.to_string()]}, + } + ], ) - self.assertEquals(events[0], [{ - "type": "m.typing", - "room_id": self.room_id, - "content": { - "user_ids": [self.u_onion.to_string()], - }, - }]) @defer.inlineCallbacks def test_stopped_typing(self): @@ -278,17 +270,18 @@ def test_stopped_typing(self): "room_id": self.room_id, "user_id": self.u_apple.to_string(), "typing": False, - } + }, ), json_data_callback=ANY, long_retries=True, backoff_on_404=True, ), - defer.succeed((200, "OK")) + defer.succeed((200, "OK")), ) # Gut-wrenching from synapse.handlers.typing import RoomMember + member = RoomMember(self.room_id, self.u_apple.to_string()) self.handler._member_typing_until[member] = 1002000 self.handler._room_typing[self.room_id] = set([self.u_apple.to_string()]) @@ -296,29 +289,29 @@ def test_stopped_typing(self): self.assertEquals(self.event_source.get_current_key(), 0) yield self.handler.stopped_typing( - target_user=self.u_apple, - auth_user=self.u_apple, - room_id=self.room_id, + target_user=self.u_apple, auth_user=self.u_apple, room_id=self.room_id ) - self.on_new_event.assert_has_calls([ - call('typing_key', 1, rooms=[self.room_id]), - ]) + self.on_new_event.assert_has_calls( + [call('typing_key', 1, rooms=[self.room_id])] + ) yield put_json.await_calls() self.assertEquals(self.event_source.get_current_key(), 1) events = yield self.event_source.get_new_events( - room_ids=[self.room_id], - from_key=0, + room_ids=[self.room_id], from_key=0 + ) + self.assertEquals( + events[0], + [ + { + "type": "m.typing", + "room_id": self.room_id, + "content": {"user_ids": []}, + } + ], ) - self.assertEquals(events[0], [{ - "type": "m.typing", - "room_id": self.room_id, - "content": { - "user_ids": [], - }, - }]) @defer.inlineCallbacks def test_typing_timeout(self): @@ -333,42 +326,46 @@ def test_typing_timeout(self): timeout=10000, ) - self.on_new_event.assert_has_calls([ - call('typing_key', 1, rooms=[self.room_id]), - ]) + self.on_new_event.assert_has_calls( + [call('typing_key', 1, rooms=[self.room_id])] + ) self.on_new_event.reset_mock() self.assertEquals(self.event_source.get_current_key(), 1) events = yield self.event_source.get_new_events( - room_ids=[self.room_id], - from_key=0, + room_ids=[self.room_id], from_key=0 + ) + self.assertEquals( + events[0], + [ + { + "type": "m.typing", + "room_id": self.room_id, + "content": {"user_ids": [self.u_apple.to_string()]}, + } + ], ) - self.assertEquals(events[0], [{ - "type": "m.typing", - "room_id": self.room_id, - "content": { - "user_ids": [self.u_apple.to_string()], - }, - }]) self.clock.advance_time(16) - self.on_new_event.assert_has_calls([ - call('typing_key', 2, rooms=[self.room_id]), - ]) + self.on_new_event.assert_has_calls( + [call('typing_key', 2, rooms=[self.room_id])] + ) self.assertEquals(self.event_source.get_current_key(), 2) events = yield self.event_source.get_new_events( - room_ids=[self.room_id], - from_key=1, + room_ids=[self.room_id], from_key=1 + ) + self.assertEquals( + events[0], + [ + { + "type": "m.typing", + "room_id": self.room_id, + "content": {"user_ids": []}, + } + ], ) - self.assertEquals(events[0], [{ - "type": "m.typing", - "room_id": self.room_id, - "content": { - "user_ids": [], - }, - }]) # SYN-230 - see if we can still set after timeout @@ -379,20 +376,22 @@ def test_typing_timeout(self): timeout=10000, ) - self.on_new_event.assert_has_calls([ - call('typing_key', 3, rooms=[self.room_id]), - ]) + self.on_new_event.assert_has_calls( + [call('typing_key', 3, rooms=[self.room_id])] + ) self.on_new_event.reset_mock() self.assertEquals(self.event_source.get_current_key(), 3) events = yield self.event_source.get_new_events( - room_ids=[self.room_id], - from_key=0, + room_ids=[self.room_id], from_key=0 + ) + self.assertEquals( + events[0], + [ + { + "type": "m.typing", + "room_id": self.room_id, + "content": {"user_ids": [self.u_apple.to_string()]}, + } + ], ) - self.assertEquals(events[0], [{ - "type": "m.typing", - "room_id": self.room_id, - "content": { - "user_ids": [self.u_apple.to_string()], - }, - }]) diff --git a/tests/http/test_endpoint.py b/tests/http/test_endpoint.py index 60e6a7595352..3b0155ed03cc 100644 --- a/tests/http/test_endpoint.py +++ b/tests/http/test_endpoint.py @@ -39,15 +39,13 @@ def test_validate_bad_server_names(self): "[1234", "underscore_.com", "percent%65.com", - "1234:5678:80", # too many colons + "1234:5678:80", # too many colons ] for i in test_data: try: parse_and_validate_server_name(i) self.fail( - "Expected parse_and_validate_server_name('%s') to throw" % ( - i, - ), + "Expected parse_and_validate_server_name('%s') to throw" % (i,) ) except ValueError: pass diff --git a/tests/replication/slave/storage/_base.py b/tests/replication/slave/storage/_base.py index a103e7be802e..c23b6e2cfd7a 100644 --- a/tests/replication/slave/storage/_base.py +++ b/tests/replication/slave/storage/_base.py @@ -31,6 +31,7 @@ class TestReplicationClientHandler(ReplicationClientHandler): """Overrides on_rdata so that we can wait for it to happen""" + def __init__(self, store): super(TestReplicationClientHandler, self).__init__(store) self._rdata_awaiters = [] @@ -56,9 +57,7 @@ def setUp(self): "blue", http_client=None, federation_client=Mock(), - ratelimiter=NonCallableMock(spec_set=[ - "send_message", - ]), + ratelimiter=NonCallableMock(spec_set=["send_message"]), ) self.hs.get_ratelimiter().send_message.return_value = (True, 0) diff --git a/tests/replication/slave/storage/test_account_data.py b/tests/replication/slave/storage/test_account_data.py index adf226404e39..87cc2b2fba5f 100644 --- a/tests/replication/slave/storage/test_account_data.py +++ b/tests/replication/slave/storage/test_account_data.py @@ -29,20 +29,14 @@ class SlavedAccountDataStoreTestCase(BaseSlavedStoreTestCase): @defer.inlineCallbacks def test_user_account_data(self): - yield self.master_store.add_account_data_for_user( - USER_ID, TYPE, {"a": 1} - ) + yield self.master_store.add_account_data_for_user(USER_ID, TYPE, {"a": 1}) yield self.replicate() yield self.check( - "get_global_account_data_by_type_for_user", - [TYPE, USER_ID], {"a": 1} + "get_global_account_data_by_type_for_user", [TYPE, USER_ID], {"a": 1} ) - yield self.master_store.add_account_data_for_user( - USER_ID, TYPE, {"a": 2} - ) + yield self.master_store.add_account_data_for_user(USER_ID, TYPE, {"a": 2}) yield self.replicate() yield self.check( - "get_global_account_data_by_type_for_user", - [TYPE, USER_ID], {"a": 2} + "get_global_account_data_by_type_for_user", [TYPE, USER_ID], {"a": 2} ) diff --git a/tests/replication/slave/storage/test_events.py b/tests/replication/slave/storage/test_events.py index f5b47f5ec0c2..622be2eef8fd 100644 --- a/tests/replication/slave/storage/test_events.py +++ b/tests/replication/slave/storage/test_events.py @@ -38,6 +38,7 @@ def patch__eq__(cls): def unpatch(): if eq is not None: cls.__eq__ = eq + return unpatch @@ -48,10 +49,7 @@ class SlavedEventStoreTestCase(BaseSlavedStoreTestCase): def setUp(self): # Patch up the equality operator for events so that we can check # whether lists of events match using assertEquals - self.unpatches = [ - patch__eq__(_EventInternalMetadata), - patch__eq__(FrozenEvent), - ] + self.unpatches = [patch__eq__(_EventInternalMetadata), patch__eq__(FrozenEvent)] return super(SlavedEventStoreTestCase, self).setUp() def tearDown(self): @@ -61,33 +59,27 @@ def tearDown(self): def test_get_latest_event_ids_in_room(self): create = yield self.persist(type="m.room.create", key="", creator=USER_ID) yield self.replicate() - yield self.check( - "get_latest_event_ids_in_room", (ROOM_ID,), [create.event_id] - ) + yield self.check("get_latest_event_ids_in_room", (ROOM_ID,), [create.event_id]) join = yield self.persist( - type="m.room.member", key=USER_ID, membership="join", + type="m.room.member", + key=USER_ID, + membership="join", prev_events=[(create.event_id, {})], ) yield self.replicate() - yield self.check( - "get_latest_event_ids_in_room", (ROOM_ID,), [join.event_id] - ) + yield self.check("get_latest_event_ids_in_room", (ROOM_ID,), [join.event_id]) @defer.inlineCallbacks def test_redactions(self): yield self.persist(type="m.room.create", key="", creator=USER_ID) yield self.persist(type="m.room.member", key=USER_ID, membership="join") - msg = yield self.persist( - type="m.room.message", msgtype="m.text", body="Hello" - ) + msg = yield self.persist(type="m.room.message", msgtype="m.text", body="Hello") yield self.replicate() yield self.check("get_event", [msg.event_id], msg) - redaction = yield self.persist( - type="m.room.redaction", redacts=msg.event_id - ) + redaction = yield self.persist(type="m.room.redaction", redacts=msg.event_id) yield self.replicate() msg_dict = msg.get_dict() @@ -102,9 +94,7 @@ def test_backfilled_redactions(self): yield self.persist(type="m.room.create", key="", creator=USER_ID) yield self.persist(type="m.room.member", key=USER_ID, membership="join") - msg = yield self.persist( - type="m.room.message", msgtype="m.text", body="Hello" - ) + msg = yield self.persist(type="m.room.message", msgtype="m.text", body="Hello") yield self.replicate() yield self.check("get_event", [msg.event_id], msg) @@ -127,10 +117,19 @@ def test_invites(self): type="m.room.member", key=USER_ID_2, membership="invite" ) yield self.replicate() - yield self.check("get_invited_rooms_for_user", [USER_ID_2], [RoomsForUser( - ROOM_ID, USER_ID, "invite", event.event_id, - event.internal_metadata.stream_ordering - )]) + yield self.check( + "get_invited_rooms_for_user", + [USER_ID_2], + [ + RoomsForUser( + ROOM_ID, + USER_ID, + "invite", + event.event_id, + event.internal_metadata.stream_ordering, + ) + ], + ) @defer.inlineCallbacks def test_push_actions_for_user(self): @@ -146,40 +145,55 @@ def test_push_actions_for_user(self): yield self.check( "get_unread_event_push_actions_by_room_for_user", [ROOM_ID, USER_ID_2, event1.event_id], - {"highlight_count": 0, "notify_count": 0} + {"highlight_count": 0, "notify_count": 0}, ) yield self.persist( - type="m.room.message", msgtype="m.text", body="world", + type="m.room.message", + msgtype="m.text", + body="world", push_actions=[(USER_ID_2, ["notify"])], ) yield self.replicate() yield self.check( "get_unread_event_push_actions_by_room_for_user", [ROOM_ID, USER_ID_2, event1.event_id], - {"highlight_count": 0, "notify_count": 1} + {"highlight_count": 0, "notify_count": 1}, ) yield self.persist( - type="m.room.message", msgtype="m.text", body="world", - push_actions=[(USER_ID_2, [ - "notify", {"set_tweak": "highlight", "value": True} - ])], + type="m.room.message", + msgtype="m.text", + body="world", + push_actions=[ + (USER_ID_2, ["notify", {"set_tweak": "highlight", "value": True}]) + ], ) yield self.replicate() yield self.check( "get_unread_event_push_actions_by_room_for_user", [ROOM_ID, USER_ID_2, event1.event_id], - {"highlight_count": 1, "notify_count": 2} + {"highlight_count": 1, "notify_count": 2}, ) event_id = 0 @defer.inlineCallbacks def persist( - self, sender=USER_ID, room_id=ROOM_ID, type={}, key=None, internal={}, - state=None, reset_state=False, backfill=False, - depth=None, prev_events=[], auth_events=[], prev_state=[], redacts=None, + self, + sender=USER_ID, + room_id=ROOM_ID, + type={}, + key=None, + internal={}, + state=None, + reset_state=False, + backfill=False, + depth=None, + prev_events=[], + auth_events=[], + prev_state=[], + redacts=None, push_actions=[], **content ): @@ -219,34 +233,23 @@ def persist( self.event_id += 1 if state is not None: - state_ids = { - key: e.event_id for key, e in state.items() - } + state_ids = {key: e.event_id for key, e in state.items()} context = EventContext.with_state( - state_group=None, - current_state_ids=state_ids, - prev_state_ids=state_ids + state_group=None, current_state_ids=state_ids, prev_state_ids=state_ids ) else: state_handler = self.hs.get_state_handler() context = yield state_handler.compute_event_context(event) yield self.master_store.add_push_actions_to_staging( - event.event_id, { - user_id: actions - for user_id, actions in push_actions - }, + event.event_id, {user_id: actions for user_id, actions in push_actions} ) ordering = None if backfill: - yield self.master_store.persist_events( - [(event, context)], backfilled=True - ) + yield self.master_store.persist_events([(event, context)], backfilled=True) else: - ordering, _ = yield self.master_store.persist_event( - event, context, - ) + ordering, _ = yield self.master_store.persist_event(event, context) if ordering: event.internal_metadata.stream_ordering = ordering diff --git a/tests/replication/slave/storage/test_receipts.py b/tests/replication/slave/storage/test_receipts.py index e6d670cc1f27..ae1adeded103 100644 --- a/tests/replication/slave/storage/test_receipts.py +++ b/tests/replication/slave/storage/test_receipts.py @@ -34,6 +34,6 @@ def test_receipt(self): ROOM_ID, "m.read", USER_ID, [EVENT_ID], {} ) yield self.replicate() - yield self.check("get_receipts_for_user", [USER_ID, "m.read"], { - ROOM_ID: EVENT_ID - }) + yield self.check( + "get_receipts_for_user", [USER_ID, "m.read"], {ROOM_ID: EVENT_ID} + ) diff --git a/tests/rest/client/test_transactions.py b/tests/rest/client/test_transactions.py index d46c27e7e9f7..708dc26e6147 100644 --- a/tests/rest/client/test_transactions.py +++ b/tests/rest/client/test_transactions.py @@ -11,7 +11,6 @@ class HttpTransactionCacheTestCase(unittest.TestCase): - def setUp(self): self.clock = MockClock() self.hs = Mock() @@ -24,9 +23,7 @@ def setUp(self): @defer.inlineCallbacks def test_executes_given_function(self): - cb = Mock( - return_value=defer.succeed(self.mock_http_response) - ) + cb = Mock(return_value=defer.succeed(self.mock_http_response)) res = yield self.cache.fetch_or_execute( self.mock_key, cb, "some_arg", keyword="arg" ) @@ -35,9 +32,7 @@ def test_executes_given_function(self): @defer.inlineCallbacks def test_deduplicates_based_on_key(self): - cb = Mock( - return_value=defer.succeed(self.mock_http_response) - ) + cb = Mock(return_value=defer.succeed(self.mock_http_response)) for i in range(3): # invoke multiple times res = yield self.cache.fetch_or_execute( self.mock_key, cb, "some_arg", keyword="arg", changing_args=i @@ -120,29 +115,18 @@ def cb(): @defer.inlineCallbacks def test_cleans_up(self): - cb = Mock( - return_value=defer.succeed(self.mock_http_response) - ) - yield self.cache.fetch_or_execute( - self.mock_key, cb, "an arg" - ) + cb = Mock(return_value=defer.succeed(self.mock_http_response)) + yield self.cache.fetch_or_execute(self.mock_key, cb, "an arg") # should NOT have cleaned up yet self.clock.advance_time_msec(CLEANUP_PERIOD_MS / 2) - yield self.cache.fetch_or_execute( - self.mock_key, cb, "an arg" - ) + yield self.cache.fetch_or_execute(self.mock_key, cb, "an arg") # still using cache cb.assert_called_once_with("an arg") self.clock.advance_time_msec(CLEANUP_PERIOD_MS) - yield self.cache.fetch_or_execute( - self.mock_key, cb, "an arg" - ) + yield self.cache.fetch_or_execute(self.mock_key, cb, "an arg") # no longer using cache self.assertEqual(cb.call_count, 2) - self.assertEqual( - cb.call_args_list, - [call("an arg",), call("an arg",)] - ) + self.assertEqual(cb.call_args_list, [call("an arg"), call("an arg")]) diff --git a/tests/rest/client/v1/test_admin.py b/tests/rest/client/v1/test_admin.py index fb28883d30c4..67d9ab94e2c8 100644 --- a/tests/rest/client/v1/test_admin.py +++ b/tests/rest/client/v1/test_admin.py @@ -215,6 +215,7 @@ def test_missing_parts(self): mac. Admin is optional. Additional checks are done for length and type. """ + def nonce(): request, channel = make_request("GET", self.url) render(request, self.resource, self.clock) @@ -289,7 +290,9 @@ def nonce(): self.assertEqual('Invalid password', channel.json_body["error"]) # Must not have null bytes - body = json.dumps({"nonce": nonce(), "username": "a", "password": u"abcd\u0000"}) + body = json.dumps( + {"nonce": nonce(), "username": "a", "password": u"abcd\u0000"} + ) request, channel = make_request("POST", self.url, body.encode('utf8')) render(request, self.resource, self.clock) diff --git a/tests/rest/client/v1/test_events.py b/tests/rest/client/v1/test_events.py index 50418153fa91..0316b74fa111 100644 --- a/tests/rest/client/v1/test_events.py +++ b/tests/rest/client/v1/test_events.py @@ -43,9 +43,7 @@ def setUp(self): hs = yield setup_test_homeserver( http_client=None, federation_client=Mock(), - ratelimiter=NonCallableMock(spec_set=[ - "send_message", - ]), + ratelimiter=NonCallableMock(spec_set=["send_message"]), ) self.ratelimiter = hs.get_ratelimiter() self.ratelimiter.send_message.return_value = (True, 0) @@ -83,7 +81,7 @@ def test_stream_basic_permissions(self): # behaviour is used instead to be consistent with the r0 spec. # see issue #2602 (code, response) = yield self.mock_resource.trigger_get( - "/events?access_token=%s" % ("invalid" + self.token, ) + "/events?access_token=%s" % ("invalid" + self.token,) ) self.assertEquals(401, code, msg=str(response)) @@ -98,18 +96,12 @@ def test_stream_basic_permissions(self): @defer.inlineCallbacks def test_stream_room_permissions(self): - room_id = yield self.create_room_as( - self.other_user, - tok=self.other_token - ) + room_id = yield self.create_room_as(self.other_user, tok=self.other_token) yield self.send(room_id, tok=self.other_token) # invited to room (expect no content for room) yield self.invite( - room_id, - src=self.other_user, - targ=self.user_id, - tok=self.other_token + room_id, src=self.other_user, targ=self.user_id, tok=self.other_token ) (code, response) = yield self.mock_resource.trigger_get( @@ -120,13 +112,16 @@ def test_stream_room_permissions(self): # We may get a presence event for ourselves down self.assertEquals( 0, - len([ - c for c in response["chunk"] - if not ( - c.get("type") == "m.presence" - and c["content"].get("user_id") == self.user_id - ) - ]) + len( + [ + c + for c in response["chunk"] + if not ( + c.get("type") == "m.presence" + and c["content"].get("user_id") == self.user_id + ) + ] + ), ) # joined room (expect all content for room) diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py index 0516ce3cfbd2..9ba0ffc19f8a 100644 --- a/tests/rest/client/v1/test_profile.py +++ b/tests/rest/client/v1/test_profile.py @@ -36,12 +36,14 @@ class ProfileTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.mock_resource = MockHttpResource(prefix=PATH_PREFIX) - self.mock_handler = Mock(spec=[ - "get_displayname", - "set_displayname", - "get_avatar_url", - "set_avatar_url", - ]) + self.mock_handler = Mock( + spec=[ + "get_displayname", + "set_displayname", + "get_avatar_url", + "set_avatar_url", + ] + ) hs = yield setup_test_homeserver( "test", @@ -49,7 +51,7 @@ def setUp(self): resource_for_client=self.mock_resource, federation=Mock(), federation_client=Mock(), - profile_handler=self.mock_handler + profile_handler=self.mock_handler, ) def _get_user_by_req(request=None, allow_guest=False): @@ -78,9 +80,7 @@ def test_set_my_name(self): mocked_set.return_value = defer.succeed(()) (code, response) = yield self.mock_resource.trigger( - "PUT", - "/profile/%s/displayname" % (myid), - b'{"displayname": "Frank Jr."}' + "PUT", "/profile/%s/displayname" % (myid), b'{"displayname": "Frank Jr."}' ) self.assertEquals(200, code) @@ -94,14 +94,12 @@ def test_set_my_name_noauth(self): mocked_set.side_effect = AuthError(400, "message") (code, response) = yield self.mock_resource.trigger( - "PUT", "/profile/%s/displayname" % ("@4567:test"), - b'{"displayname": "Frank Jr."}' + "PUT", + "/profile/%s/displayname" % ("@4567:test"), + b'{"displayname": "Frank Jr."}', ) - self.assertTrue( - 400 <= code < 499, - msg="code %d is in the 4xx range" % (code) - ) + self.assertTrue(400 <= code < 499, msg="code %d is in the 4xx range" % (code)) @defer.inlineCallbacks def test_get_other_name(self): @@ -121,14 +119,12 @@ def test_set_other_name(self): mocked_set.side_effect = SynapseError(400, "message") (code, response) = yield self.mock_resource.trigger( - "PUT", "/profile/%s/displayname" % ("@opaque:elsewhere"), - b'{"displayname":"bob"}' + "PUT", + "/profile/%s/displayname" % ("@opaque:elsewhere"), + b'{"displayname":"bob"}', ) - self.assertTrue( - 400 <= code <= 499, - msg="code %d is in the 4xx range" % (code) - ) + self.assertTrue(400 <= code <= 499, msg="code %d is in the 4xx range" % (code)) @defer.inlineCallbacks def test_get_my_avatar(self): @@ -151,7 +147,7 @@ def test_set_my_avatar(self): (code, response) = yield self.mock_resource.trigger( "PUT", "/profile/%s/avatar_url" % (myid), - b'{"avatar_url": "http://my.server/pic.gif"}' + b'{"avatar_url": "http://my.server/pic.gif"}', ) self.assertEquals(200, code) diff --git a/tests/rest/client/v1/test_register.py b/tests/rest/client/v1/test_register.py index 83a23cd8fe3c..6f15d69ecd54 100644 --- a/tests/rest/client/v1/test_register.py +++ b/tests/rest/client/v1/test_register.py @@ -32,6 +32,7 @@ class CreateUserServletTestCase(unittest.TestCase): """ Tests for CreateUserRestServlet. """ + if PY3: skip = "Not ported to Python 3." diff --git a/tests/rest/client/v1/test_typing.py b/tests/rest/client/v1/test_typing.py index bddb3302e481..7f1a435e7ba3 100644 --- a/tests/rest/client/v1/test_typing.py +++ b/tests/rest/client/v1/test_typing.py @@ -31,6 +31,7 @@ class RoomTypingTestCase(RestTestCase): """ Tests /rooms/$room_id/typing/$user_id REST API. """ + user_id = "@sid:red" user = UserID.from_string(user_id) @@ -47,9 +48,7 @@ def setUp(self): clock=self.clock, http_client=None, federation_client=Mock(), - ratelimiter=NonCallableMock(spec_set=[ - "send_message", - ]), + ratelimiter=NonCallableMock(spec_set=["send_message"]), ) self.hs = hs @@ -71,6 +70,7 @@ def get_user_by_access_token(token=None, allow_guest=False): def _insert_client_ip(*args, **kwargs): return defer.succeed(None) + hs.get_datastore().insert_client_ip = _insert_client_ip def get_room_members(room_id): @@ -94,6 +94,7 @@ def fetch_room_distributions_into( else: if remotedomains is not None: remotedomains.add(member.domain) + hs.get_room_member_handler().fetch_room_distributions_into = ( fetch_room_distributions_into ) @@ -107,37 +108,42 @@ def fetch_room_distributions_into( @defer.inlineCallbacks def test_set_typing(self): (code, _) = yield self.mock_resource.trigger( - "PUT", "/rooms/%s/typing/%s" % (self.room_id, self.user_id), - '{"typing": true, "timeout": 30000}' + "PUT", + "/rooms/%s/typing/%s" % (self.room_id, self.user_id), + '{"typing": true, "timeout": 30000}', ) self.assertEquals(200, code) self.assertEquals(self.event_source.get_current_key(), 1) events = yield self.event_source.get_new_events( - from_key=0, - room_ids=[self.room_id], + from_key=0, room_ids=[self.room_id] + ) + self.assertEquals( + events[0], + [ + { + "type": "m.typing", + "room_id": self.room_id, + "content": {"user_ids": [self.user_id]}, + } + ], ) - self.assertEquals(events[0], [{ - "type": "m.typing", - "room_id": self.room_id, - "content": { - "user_ids": [self.user_id], - } - }]) @defer.inlineCallbacks def test_set_not_typing(self): (code, _) = yield self.mock_resource.trigger( - "PUT", "/rooms/%s/typing/%s" % (self.room_id, self.user_id), - '{"typing": false}' + "PUT", + "/rooms/%s/typing/%s" % (self.room_id, self.user_id), + '{"typing": false}', ) self.assertEquals(200, code) @defer.inlineCallbacks def test_typing_timeout(self): (code, _) = yield self.mock_resource.trigger( - "PUT", "/rooms/%s/typing/%s" % (self.room_id, self.user_id), - '{"typing": true, "timeout": 30000}' + "PUT", + "/rooms/%s/typing/%s" % (self.room_id, self.user_id), + '{"typing": true, "timeout": 30000}', ) self.assertEquals(200, code) @@ -148,8 +154,9 @@ def test_typing_timeout(self): self.assertEquals(self.event_source.get_current_key(), 2) (code, _) = yield self.mock_resource.trigger( - "PUT", "/rooms/%s/typing/%s" % (self.room_id, self.user_id), - '{"typing": true, "timeout": 30000}' + "PUT", + "/rooms/%s/typing/%s" % (self.room_id, self.user_id), + '{"typing": true, "timeout": 30000}', ) self.assertEquals(200, code) diff --git a/tests/rest/client/v1/utils.py b/tests/rest/client/v1/utils.py index e3bc5f378d22..9f862f9dfa02 100644 --- a/tests/rest/client/v1/utils.py +++ b/tests/rest/client/v1/utils.py @@ -55,25 +55,39 @@ def create_room_as(self, room_creator, is_public=True, tok=None): @defer.inlineCallbacks def invite(self, room=None, src=None, targ=None, expect_code=200, tok=None): - yield self.change_membership(room=room, src=src, targ=targ, tok=tok, - membership=Membership.INVITE, - expect_code=expect_code) + yield self.change_membership( + room=room, + src=src, + targ=targ, + tok=tok, + membership=Membership.INVITE, + expect_code=expect_code, + ) @defer.inlineCallbacks def join(self, room=None, user=None, expect_code=200, tok=None): - yield self.change_membership(room=room, src=user, targ=user, tok=tok, - membership=Membership.JOIN, - expect_code=expect_code) + yield self.change_membership( + room=room, + src=user, + targ=user, + tok=tok, + membership=Membership.JOIN, + expect_code=expect_code, + ) @defer.inlineCallbacks def leave(self, room=None, user=None, expect_code=200, tok=None): - yield self.change_membership(room=room, src=user, targ=user, tok=tok, - membership=Membership.LEAVE, - expect_code=expect_code) + yield self.change_membership( + room=room, + src=user, + targ=user, + tok=tok, + membership=Membership.LEAVE, + expect_code=expect_code, + ) @defer.inlineCallbacks - def change_membership(self, room, src, targ, membership, tok=None, - expect_code=200): + def change_membership(self, room, src, targ, membership, tok=None, expect_code=200): temp_id = self.auth_user_id self.auth_user_id = src @@ -81,16 +95,15 @@ def change_membership(self, room, src, targ, membership, tok=None, if tok: path = path + "?access_token=%s" % tok - data = { - "membership": membership - } + data = {"membership": membership} (code, response) = yield self.mock_resource.trigger( "PUT", path, json.dumps(data) ) self.assertEquals( - expect_code, code, - msg="Expected: %d, got: %d, resp: %r" % (expect_code, code, response) + expect_code, + code, + msg="Expected: %d, got: %d, resp: %r" % (expect_code, code, response), ) self.auth_user_id = temp_id @@ -100,17 +113,15 @@ def register(self, user_id): (code, response) = yield self.mock_resource.trigger( "POST", "/register", - json.dumps({ - "user": user_id, - "password": "test", - "type": "m.login.password" - })) + json.dumps( + {"user": user_id, "password": "test", "type": "m.login.password"} + ), + ) self.assertEquals(200, code, msg=response) defer.returnValue(response) @defer.inlineCallbacks - def send(self, room_id, body=None, txn_id=None, tok=None, - expect_code=200): + def send(self, room_id, body=None, txn_id=None, tok=None, expect_code=200): if txn_id is None: txn_id = "m%s" % (str(time.time())) if body is None: @@ -132,8 +143,9 @@ def assert_dict(self, required, actual): actual (dict): The test result. Extra keys will not be checked. """ for key in required: - self.assertEquals(required[key], actual[key], - msg="%s mismatch. %s" % (key, actual)) + self.assertEquals( + required[key], actual[key], msg="%s mismatch. %s" % (key, actual) + ) @attr.s @@ -156,7 +168,9 @@ def create_room_as(self, room_creator, is_public=True, tok=None): if tok: path = path + "?access_token=%s" % tok - request, channel = make_request("POST", path, json.dumps(content).encode('utf8')) + request, channel = make_request( + "POST", path, json.dumps(content).encode('utf8') + ) request.render(self.resource) wait_until_result(self.hs.get_reactor(), channel) @@ -204,9 +218,7 @@ def change_membership(self, room, src, targ, membership, tok=None, expect_code=2 data = {"membership": membership} - request, channel = make_request( - "PUT", path, json.dumps(data).encode('utf8') - ) + request, channel = make_request("PUT", path, json.dumps(data).encode('utf8')) request.render(self.resource) wait_until_result(self.hs.get_reactor(), channel) diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index f6293f11a871..9487babac343 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -101,9 +101,7 @@ def test_POST_bad_password(self): wait_until_result(self.clock, channel) self.assertEquals(channel.result["code"], b"400", channel.result) - self.assertEquals( - channel.json_body["error"], "Invalid password" - ) + self.assertEquals(channel.json_body["error"], "Invalid password") def test_POST_bad_username(self): request_data = json.dumps({"username": 777, "password": "monkey"}) @@ -112,9 +110,7 @@ def test_POST_bad_username(self): wait_until_result(self.clock, channel) self.assertEquals(channel.result["code"], b"400", channel.result) - self.assertEquals( - channel.json_body["error"], "Invalid username" - ) + self.assertEquals(channel.json_body["error"], "Invalid username") def test_POST_user_valid(self): user_id = "@kermit:muppet" @@ -157,10 +153,7 @@ def test_POST_disabled_registration(self): wait_until_result(self.clock, channel) self.assertEquals(channel.result["code"], b"403", channel.result) - self.assertEquals( - channel.json_body["error"], - "Registration has been disabled", - ) + self.assertEquals(channel.json_body["error"], "Registration has been disabled") def test_POST_guest_registration(self): user_id = "a@b" @@ -188,6 +181,4 @@ def test_POST_disabled_guest_registration(self): wait_until_result(self.clock, channel) self.assertEquals(channel.result["code"], b"403", channel.result) - self.assertEquals( - channel.json_body["error"], "Guest access is disabled" - ) + self.assertEquals(channel.json_body["error"], "Guest access is disabled") diff --git a/tests/rest/media/v1/test_media_storage.py b/tests/rest/media/v1/test_media_storage.py index bf254a260d45..a86901c2d833 100644 --- a/tests/rest/media/v1/test_media_storage.py +++ b/tests/rest/media/v1/test_media_storage.py @@ -41,13 +41,11 @@ def setUp(self): hs.get_reactor = Mock(return_value=reactor) hs.config.media_store_path = self.primary_base_path - storage_providers = [FileStorageProviderBackend( - hs, self.secondary_base_path - )] + storage_providers = [FileStorageProviderBackend(hs, self.secondary_base_path)] self.filepaths = MediaFilePaths(self.primary_base_path) self.media_storage = MediaStorage( - hs, self.primary_base_path, self.filepaths, storage_providers, + hs, self.primary_base_path, self.filepaths, storage_providers ) def tearDown(self): diff --git a/tests/server.py b/tests/server.py index e249668d2104..05708be8b917 100644 --- a/tests/server.py +++ b/tests/server.py @@ -136,6 +136,7 @@ class ThreadedMemoryReactorClock(MemoryReactorClock): """ A MemoryReactorClock that supports callFromThread. """ + def callFromThread(self, callback, *args, **kwargs): """ Make the callback fire in the next reactor iteration. @@ -184,6 +185,7 @@ class ThreadPool: """ Threadless thread pool. """ + def start(self): pass diff --git a/tests/storage/test__base.py b/tests/storage/test__base.py index 52eff2a10494..52eb05bfbf90 100644 --- a/tests/storage/test__base.py +++ b/tests/storage/test__base.py @@ -25,7 +25,6 @@ class CacheTestCase(unittest.TestCase): - def setUp(self): self.cache = Cache("test") @@ -97,7 +96,6 @@ def test_eviction_lru(self): class CacheDecoratorTestCase(unittest.TestCase): - @defer.inlineCallbacks def test_passthrough(self): class A(object): @@ -180,8 +178,7 @@ def func(self, key): yield a.func(k) self.assertTrue( - callcount[0] >= 14, - msg="Expected callcount >= 14, got %d" % (callcount[0]) + callcount[0] >= 14, msg="Expected callcount >= 14, got %d" % (callcount[0]) ) def test_prefill(self): diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index 099861b27cf1..fbb25a88441a 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -34,7 +34,6 @@ class ApplicationServiceStoreTestCase(unittest.TestCase): - @defer.inlineCallbacks def setUp(self): self.as_yaml_files = [] @@ -44,20 +43,14 @@ def setUp(self): password_providers=[], ) hs = yield setup_test_homeserver( - config=config, - federation_sender=Mock(), - federation_client=Mock(), + config=config, federation_sender=Mock(), federation_client=Mock() ) self.as_token = "token1" self.as_url = "some_url" self.as_id = "as1" self._add_appservice( - self.as_token, - self.as_id, - self.as_url, - "some_hs_token", - "bob" + self.as_token, self.as_id, self.as_url, "some_hs_token", "bob" ) self._add_appservice("token2", "as2", "some_url", "some_hs_token", "bob") self._add_appservice("token3", "as3", "some_url", "some_hs_token", "bob") @@ -73,8 +66,14 @@ def tearDown(self): pass def _add_appservice(self, as_token, id, url, hs_token, sender): - as_yaml = dict(url=url, as_token=as_token, hs_token=hs_token, - id=id, sender_localpart=sender, namespaces={}) + as_yaml = dict( + url=url, + as_token=as_token, + hs_token=hs_token, + id=id, + sender_localpart=sender, + namespaces={}, + ) # use the token as the filename with open(as_token, 'w') as outfile: outfile.write(yaml.dump(as_yaml)) @@ -85,24 +84,13 @@ def test_retrieve_unknown_service_token(self): self.assertEquals(service, None) def test_retrieval_of_service(self): - stored_service = self.store.get_app_service_by_token( - self.as_token - ) + stored_service = self.store.get_app_service_by_token(self.as_token) self.assertEquals(stored_service.token, self.as_token) self.assertEquals(stored_service.id, self.as_id) self.assertEquals(stored_service.url, self.as_url) - self.assertEquals( - stored_service.namespaces[ApplicationService.NS_ALIASES], - [] - ) - self.assertEquals( - stored_service.namespaces[ApplicationService.NS_ROOMS], - [] - ) - self.assertEquals( - stored_service.namespaces[ApplicationService.NS_USERS], - [] - ) + self.assertEquals(stored_service.namespaces[ApplicationService.NS_ALIASES], []) + self.assertEquals(stored_service.namespaces[ApplicationService.NS_ROOMS], []) + self.assertEquals(stored_service.namespaces[ApplicationService.NS_USERS], []) def test_retrieval_of_all_services(self): services = self.store.get_app_services() @@ -110,7 +98,6 @@ def test_retrieval_of_all_services(self): class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): - @defer.inlineCallbacks def setUp(self): self.as_yaml_files = [] @@ -121,33 +108,15 @@ def setUp(self): password_providers=[], ) hs = yield setup_test_homeserver( - config=config, - federation_sender=Mock(), - federation_client=Mock(), + config=config, federation_sender=Mock(), federation_client=Mock() ) self.db_pool = hs.get_db_pool() self.as_list = [ - { - "token": "token1", - "url": "https://matrix-as.org", - "id": "id_1" - }, - { - "token": "alpha_tok", - "url": "https://alpha.com", - "id": "id_alpha" - }, - { - "token": "beta_tok", - "url": "https://beta.com", - "id": "id_beta" - }, - { - "token": "gamma_tok", - "url": "https://gamma.com", - "id": "id_gamma" - }, + {"token": "token1", "url": "https://matrix-as.org", "id": "id_1"}, + {"token": "alpha_tok", "url": "https://alpha.com", "id": "id_alpha"}, + {"token": "beta_tok", "url": "https://beta.com", "id": "id_beta"}, + {"token": "gamma_tok", "url": "https://gamma.com", "id": "id_gamma"}, ] for s in self.as_list: yield self._add_service(s["url"], s["token"], s["id"]) @@ -157,8 +126,14 @@ def setUp(self): self.store = TestTransactionStore(None, hs) def _add_service(self, url, as_token, id): - as_yaml = dict(url=url, as_token=as_token, hs_token="something", - id=id, sender_localpart="a_sender", namespaces={}) + as_yaml = dict( + url=url, + as_token=as_token, + hs_token="something", + id=id, + sender_localpart="a_sender", + namespaces={}, + ) # use the token as the filename with open(as_token, 'w') as outfile: outfile.write(yaml.dump(as_yaml)) @@ -168,21 +143,21 @@ def _set_state(self, id, state, txn=None): return self.db_pool.runQuery( "INSERT INTO application_services_state(as_id, state, last_txn) " "VALUES(?,?,?)", - (id, state, txn) + (id, state, txn), ) def _insert_txn(self, as_id, txn_id, events): return self.db_pool.runQuery( "INSERT INTO application_services_txns(as_id, txn_id, event_ids) " "VALUES(?,?,?)", - (as_id, txn_id, json.dumps([e.event_id for e in events])) + (as_id, txn_id, json.dumps([e.event_id for e in events])), ) def _set_last_txn(self, as_id, txn_id): return self.db_pool.runQuery( "INSERT INTO application_services_state(as_id, last_txn, state) " "VALUES(?,?,?)", - (as_id, txn_id, ApplicationServiceState.UP) + (as_id, txn_id, ApplicationServiceState.UP), ) @defer.inlineCallbacks @@ -193,24 +168,16 @@ def test_get_appservice_state_none(self): @defer.inlineCallbacks def test_get_appservice_state_up(self): - yield self._set_state( - self.as_list[0]["id"], ApplicationServiceState.UP - ) + yield self._set_state(self.as_list[0]["id"], ApplicationServiceState.UP) service = Mock(id=self.as_list[0]["id"]) state = yield self.store.get_appservice_state(service) self.assertEquals(ApplicationServiceState.UP, state) @defer.inlineCallbacks def test_get_appservice_state_down(self): - yield self._set_state( - self.as_list[0]["id"], ApplicationServiceState.UP - ) - yield self._set_state( - self.as_list[1]["id"], ApplicationServiceState.DOWN - ) - yield self._set_state( - self.as_list[2]["id"], ApplicationServiceState.DOWN - ) + yield self._set_state(self.as_list[0]["id"], ApplicationServiceState.UP) + yield self._set_state(self.as_list[1]["id"], ApplicationServiceState.DOWN) + yield self._set_state(self.as_list[2]["id"], ApplicationServiceState.DOWN) service = Mock(id=self.as_list[1]["id"]) state = yield self.store.get_appservice_state(service) self.assertEquals(ApplicationServiceState.DOWN, state) @@ -225,34 +192,22 @@ def test_get_appservices_by_state_none(self): @defer.inlineCallbacks def test_set_appservices_state_down(self): service = Mock(id=self.as_list[1]["id"]) - yield self.store.set_appservice_state( - service, - ApplicationServiceState.DOWN - ) + yield self.store.set_appservice_state(service, ApplicationServiceState.DOWN) rows = yield self.db_pool.runQuery( "SELECT as_id FROM application_services_state WHERE state=?", - (ApplicationServiceState.DOWN,) + (ApplicationServiceState.DOWN,), ) self.assertEquals(service.id, rows[0][0]) @defer.inlineCallbacks def test_set_appservices_state_multiple_up(self): service = Mock(id=self.as_list[1]["id"]) - yield self.store.set_appservice_state( - service, - ApplicationServiceState.UP - ) - yield self.store.set_appservice_state( - service, - ApplicationServiceState.DOWN - ) - yield self.store.set_appservice_state( - service, - ApplicationServiceState.UP - ) + yield self.store.set_appservice_state(service, ApplicationServiceState.UP) + yield self.store.set_appservice_state(service, ApplicationServiceState.DOWN) + yield self.store.set_appservice_state(service, ApplicationServiceState.UP) rows = yield self.db_pool.runQuery( "SELECT as_id FROM application_services_state WHERE state=?", - (ApplicationServiceState.UP,) + (ApplicationServiceState.UP,), ) self.assertEquals(service.id, rows[0][0]) @@ -319,14 +274,13 @@ def test_complete_appservice_txn_first_txn(self): res = yield self.db_pool.runQuery( "SELECT last_txn FROM application_services_state WHERE as_id=?", - (service.id,) + (service.id,), ) self.assertEquals(1, len(res)) self.assertEquals(txn_id, res[0][0]) res = yield self.db_pool.runQuery( - "SELECT * FROM application_services_txns WHERE txn_id=?", - (txn_id,) + "SELECT * FROM application_services_txns WHERE txn_id=?", (txn_id,) ) self.assertEquals(0, len(res)) @@ -340,17 +294,15 @@ def test_complete_appservice_txn_existing_in_state_table(self): yield self.store.complete_appservice_txn(txn_id=txn_id, service=service) res = yield self.db_pool.runQuery( - "SELECT last_txn, state FROM application_services_state WHERE " - "as_id=?", - (service.id,) + "SELECT last_txn, state FROM application_services_state WHERE " "as_id=?", + (service.id,), ) self.assertEquals(1, len(res)) self.assertEquals(txn_id, res[0][0]) self.assertEquals(ApplicationServiceState.UP, res[0][1]) res = yield self.db_pool.runQuery( - "SELECT * FROM application_services_txns WHERE txn_id=?", - (txn_id,) + "SELECT * FROM application_services_txns WHERE txn_id=?", (txn_id,) ) self.assertEquals(0, len(res)) @@ -382,12 +334,8 @@ def test_get_oldest_unsent_txn(self): @defer.inlineCallbacks def test_get_appservices_by_state_single(self): - yield self._set_state( - self.as_list[0]["id"], ApplicationServiceState.DOWN - ) - yield self._set_state( - self.as_list[1]["id"], ApplicationServiceState.UP - ) + yield self._set_state(self.as_list[0]["id"], ApplicationServiceState.DOWN) + yield self._set_state(self.as_list[1]["id"], ApplicationServiceState.UP) services = yield self.store.get_appservices_by_state( ApplicationServiceState.DOWN @@ -397,18 +345,10 @@ def test_get_appservices_by_state_single(self): @defer.inlineCallbacks def test_get_appservices_by_state_multiple(self): - yield self._set_state( - self.as_list[0]["id"], ApplicationServiceState.DOWN - ) - yield self._set_state( - self.as_list[1]["id"], ApplicationServiceState.UP - ) - yield self._set_state( - self.as_list[2]["id"], ApplicationServiceState.DOWN - ) - yield self._set_state( - self.as_list[3]["id"], ApplicationServiceState.UP - ) + yield self._set_state(self.as_list[0]["id"], ApplicationServiceState.DOWN) + yield self._set_state(self.as_list[1]["id"], ApplicationServiceState.UP) + yield self._set_state(self.as_list[2]["id"], ApplicationServiceState.DOWN) + yield self._set_state(self.as_list[3]["id"], ApplicationServiceState.UP) services = yield self.store.get_appservices_by_state( ApplicationServiceState.DOWN @@ -416,20 +356,17 @@ def test_get_appservices_by_state_multiple(self): self.assertEquals(2, len(services)) self.assertEquals( set([self.as_list[2]["id"], self.as_list[0]["id"]]), - set([services[0].id, services[1].id]) + set([services[0].id, services[1].id]), ) # required for ApplicationServiceTransactionStoreTestCase tests -class TestTransactionStore(ApplicationServiceTransactionStore, - ApplicationServiceStore): - +class TestTransactionStore(ApplicationServiceTransactionStore, ApplicationServiceStore): def __init__(self, db_conn, hs): super(TestTransactionStore, self).__init__(db_conn, hs) class ApplicationServiceStoreConfigTestCase(unittest.TestCase): - def _write_config(self, suffix, **kwargs): vals = { "id": "id" + suffix, @@ -452,8 +389,7 @@ def test_unique_works(self): f2 = self._write_config(suffix="2") config = Mock( - app_service_config_files=[f1, f2], event_cache_size=1, - password_providers=[] + app_service_config_files=[f1, f2], event_cache_size=1, password_providers=[] ) hs = yield setup_test_homeserver( config=config, @@ -470,8 +406,7 @@ def test_duplicate_ids(self): f2 = self._write_config(id="id", suffix="2") config = Mock( - app_service_config_files=[f1, f2], event_cache_size=1, - password_providers=[] + app_service_config_files=[f1, f2], event_cache_size=1, password_providers=[] ) hs = yield setup_test_homeserver( config=config, @@ -494,8 +429,7 @@ def test_duplicate_as_tokens(self): f2 = self._write_config(as_token="as_token", suffix="2") config = Mock( - app_service_config_files=[f1, f2], event_cache_size=1, - password_providers=[] + app_service_config_files=[f1, f2], event_cache_size=1, password_providers=[] ) hs = yield setup_test_homeserver( config=config, diff --git a/tests/storage/test_background_update.py b/tests/storage/test_background_update.py index ab1f31057224..b4f6baf4418a 100644 --- a/tests/storage/test_background_update.py +++ b/tests/storage/test_background_update.py @@ -7,7 +7,6 @@ class BackgroundUpdateTestCase(unittest.TestCase): - @defer.inlineCallbacks def setUp(self): hs = yield setup_test_homeserver() # type: synapse.server.HomeServer @@ -51,9 +50,7 @@ def update(progress, count): yield self.store.start_background_update("test_update", {"my_key": 1}) self.update_handler.reset_mock() - result = yield self.store.do_next_background_update( - duration_ms * desired_count - ) + result = yield self.store.do_next_background_update(duration_ms * desired_count) self.assertIsNotNone(result) self.update_handler.assert_called_once_with( {"my_key": 1}, self.store.DEFAULT_BACKGROUND_BATCH_SIZE @@ -67,18 +64,12 @@ def update(progress, count): self.update_handler.side_effect = update self.update_handler.reset_mock() - result = yield self.store.do_next_background_update( - duration_ms * desired_count - ) + result = yield self.store.do_next_background_update(duration_ms * desired_count) self.assertIsNotNone(result) - self.update_handler.assert_called_once_with( - {"my_key": 2}, desired_count - ) + self.update_handler.assert_called_once_with({"my_key": 2}, desired_count) # third step: we don't expect to be called any more self.update_handler.reset_mock() - result = yield self.store.do_next_background_update( - duration_ms * desired_count - ) + result = yield self.store.do_next_background_update(duration_ms * desired_count) self.assertIsNone(result) self.assertFalse(self.update_handler.called) diff --git a/tests/storage/test_base.py b/tests/storage/test_base.py index 1d1234ee391e..7cb5f0e4cfcb 100644 --- a/tests/storage/test_base.py +++ b/tests/storage/test_base.py @@ -40,10 +40,12 @@ def setUp(self): def runInteraction(func, *args, **kwargs): return defer.succeed(func(self.mock_txn, *args, **kwargs)) + self.db_pool.runInteraction = runInteraction def runWithConnection(func, *args, **kwargs): return defer.succeed(func(self.mock_conn, *args, **kwargs)) + self.db_pool.runWithConnection = runWithConnection config = Mock() @@ -63,8 +65,7 @@ def test_insert_1col(self): self.mock_txn.rowcount = 1 yield self.datastore._simple_insert( - table="tablename", - values={"columname": "Value"} + table="tablename", values={"columname": "Value"} ) self.mock_txn.execute.assert_called_with( @@ -78,12 +79,11 @@ def test_insert_3cols(self): yield self.datastore._simple_insert( table="tablename", # Use OrderedDict() so we can assert on the SQL generated - values=OrderedDict([("colA", 1), ("colB", 2), ("colC", 3)]) + values=OrderedDict([("colA", 1), ("colB", 2), ("colC", 3)]), ) self.mock_txn.execute.assert_called_with( - "INSERT INTO tablename (colA, colB, colC) VALUES(?, ?, ?)", - (1, 2, 3,) + "INSERT INTO tablename (colA, colB, colC) VALUES(?, ?, ?)", (1, 2, 3) ) @defer.inlineCallbacks @@ -92,9 +92,7 @@ def test_select_one_1col(self): self.mock_txn.__iter__ = Mock(return_value=iter([("Value",)])) value = yield self.datastore._simple_select_one_onecol( - table="tablename", - keyvalues={"keycol": "TheKey"}, - retcol="retcol" + table="tablename", keyvalues={"keycol": "TheKey"}, retcol="retcol" ) self.assertEquals("Value", value) @@ -110,13 +108,12 @@ def test_select_one_3col(self): ret = yield self.datastore._simple_select_one( table="tablename", keyvalues={"keycol": "TheKey"}, - retcols=["colA", "colB", "colC"] + retcols=["colA", "colB", "colC"], ) self.assertEquals({"colA": 1, "colB": 2, "colC": 3}, ret) self.mock_txn.execute.assert_called_with( - "SELECT colA, colB, colC FROM tablename WHERE keycol = ?", - ["TheKey"] + "SELECT colA, colB, colC FROM tablename WHERE keycol = ?", ["TheKey"] ) @defer.inlineCallbacks @@ -128,7 +125,7 @@ def test_select_one_missing(self): table="tablename", keyvalues={"keycol": "Not here"}, retcols=["colA"], - allow_none=True + allow_none=True, ) self.assertFalse(ret) @@ -137,20 +134,15 @@ def test_select_one_missing(self): def test_select_list(self): self.mock_txn.rowcount = 3 self.mock_txn.__iter__ = Mock(return_value=iter([(1,), (2,), (3,)])) - self.mock_txn.description = ( - ("colA", None, None, None, None, None, None), - ) + self.mock_txn.description = (("colA", None, None, None, None, None, None),) ret = yield self.datastore._simple_select_list( - table="tablename", - keyvalues={"keycol": "A set"}, - retcols=["colA"], + table="tablename", keyvalues={"keycol": "A set"}, retcols=["colA"] ) self.assertEquals([{"colA": 1}, {"colA": 2}, {"colA": 3}], ret) self.mock_txn.execute.assert_called_with( - "SELECT colA FROM tablename WHERE keycol = ?", - ["A set"] + "SELECT colA FROM tablename WHERE keycol = ?", ["A set"] ) @defer.inlineCallbacks @@ -160,12 +152,12 @@ def test_update_one_1col(self): yield self.datastore._simple_update_one( table="tablename", keyvalues={"keycol": "TheKey"}, - updatevalues={"columnname": "New Value"} + updatevalues={"columnname": "New Value"}, ) self.mock_txn.execute.assert_called_with( "UPDATE tablename SET columnname = ? WHERE keycol = ?", - ["New Value", "TheKey"] + ["New Value", "TheKey"], ) @defer.inlineCallbacks @@ -175,13 +167,12 @@ def test_update_one_4cols(self): yield self.datastore._simple_update_one( table="tablename", keyvalues=OrderedDict([("colA", 1), ("colB", 2)]), - updatevalues=OrderedDict([("colC", 3), ("colD", 4)]) + updatevalues=OrderedDict([("colC", 3), ("colD", 4)]), ) self.mock_txn.execute.assert_called_with( - "UPDATE tablename SET colC = ?, colD = ? WHERE" - " colA = ? AND colB = ?", - [3, 4, 1, 2] + "UPDATE tablename SET colC = ?, colD = ? WHERE" " colA = ? AND colB = ?", + [3, 4, 1, 2], ) @defer.inlineCallbacks @@ -189,8 +180,7 @@ def test_delete_one(self): self.mock_txn.rowcount = 1 yield self.datastore._simple_delete_one( - table="tablename", - keyvalues={"keycol": "Go away"}, + table="tablename", keyvalues={"keycol": "Go away"} ) self.mock_txn.execute.assert_called_with( diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index 7a58c6eb24e5..ea00bbe84cd4 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -37,8 +37,7 @@ def test_insert_new_client_ip(self): self.clock.now = 12345678 user_id = "@user:id" yield self.store.insert_client_ip( - user_id, - "access_token", "ip", "user_agent", "device_id", + user_id, "access_token", "ip", "user_agent", "device_id" ) result = yield self.store.get_last_client_ip_by_device(user_id, "device_id") @@ -53,7 +52,7 @@ def test_insert_new_client_ip(self): "user_agent": "user_agent", "last_seen": 12345678000, }, - r + r, ) @defer.inlineCallbacks @@ -62,7 +61,7 @@ def test_disabled_monthly_active_user(self): self.hs.config.max_mau_value = 50 user_id = "@user:server" yield self.store.insert_client_ip( - user_id, "access_token", "ip", "user_agent", "device_id", + user_id, "access_token", "ip", "user_agent", "device_id" ) active = yield self.store._user_last_seen_monthly_active(user_id) self.assertFalse(active) @@ -78,7 +77,7 @@ def test_adding_monthly_active_user_when_full(self): return_value=defer.succeed(lots_of_users) ) yield self.store.insert_client_ip( - user_id, "access_token", "ip", "user_agent", "device_id", + user_id, "access_token", "ip", "user_agent", "device_id" ) active = yield self.store._user_last_seen_monthly_active(user_id) self.assertFalse(active) @@ -92,7 +91,7 @@ def test_adding_monthly_active_user_when_space(self): self.assertFalse(active) yield self.store.insert_client_ip( - user_id, "access_token", "ip", "user_agent", "device_id", + user_id, "access_token", "ip", "user_agent", "device_id" ) active = yield self.store._user_last_seen_monthly_active(user_id) self.assertTrue(active) @@ -107,10 +106,10 @@ def test_updating_monthly_active_user_when_space(self): self.assertFalse(active) yield self.store.insert_client_ip( - user_id, "access_token", "ip", "user_agent", "device_id", + user_id, "access_token", "ip", "user_agent", "device_id" ) yield self.store.insert_client_ip( - user_id, "access_token", "ip", "user_agent", "device_id", + user_id, "access_token", "ip", "user_agent", "device_id" ) active = yield self.store._user_last_seen_monthly_active(user_id) self.assertTrue(active) diff --git a/tests/storage/test_devices.py b/tests/storage/test_devices.py index a54cc6bc32b2..63bc42d9e096 100644 --- a/tests/storage/test_devices.py +++ b/tests/storage/test_devices.py @@ -34,62 +34,58 @@ def setUp(self): @defer.inlineCallbacks def test_store_new_device(self): - yield self.store.store_device( - "user_id", "device_id", "display_name" - ) + yield self.store.store_device("user_id", "device_id", "display_name") res = yield self.store.get_device("user_id", "device_id") - self.assertDictContainsSubset({ - "user_id": "user_id", - "device_id": "device_id", - "display_name": "display_name", - }, res) + self.assertDictContainsSubset( + { + "user_id": "user_id", + "device_id": "device_id", + "display_name": "display_name", + }, + res, + ) @defer.inlineCallbacks def test_get_devices_by_user(self): - yield self.store.store_device( - "user_id", "device1", "display_name 1" - ) - yield self.store.store_device( - "user_id", "device2", "display_name 2" - ) - yield self.store.store_device( - "user_id2", "device3", "display_name 3" - ) + yield self.store.store_device("user_id", "device1", "display_name 1") + yield self.store.store_device("user_id", "device2", "display_name 2") + yield self.store.store_device("user_id2", "device3", "display_name 3") res = yield self.store.get_devices_by_user("user_id") self.assertEqual(2, len(res.keys())) - self.assertDictContainsSubset({ - "user_id": "user_id", - "device_id": "device1", - "display_name": "display_name 1", - }, res["device1"]) - self.assertDictContainsSubset({ - "user_id": "user_id", - "device_id": "device2", - "display_name": "display_name 2", - }, res["device2"]) + self.assertDictContainsSubset( + { + "user_id": "user_id", + "device_id": "device1", + "display_name": "display_name 1", + }, + res["device1"], + ) + self.assertDictContainsSubset( + { + "user_id": "user_id", + "device_id": "device2", + "display_name": "display_name 2", + }, + res["device2"], + ) @defer.inlineCallbacks def test_update_device(self): - yield self.store.store_device( - "user_id", "device_id", "display_name 1" - ) + yield self.store.store_device("user_id", "device_id", "display_name 1") res = yield self.store.get_device("user_id", "device_id") self.assertEqual("display_name 1", res["display_name"]) # do a no-op first - yield self.store.update_device( - "user_id", "device_id", - ) + yield self.store.update_device("user_id", "device_id") res = yield self.store.get_device("user_id", "device_id") self.assertEqual("display_name 1", res["display_name"]) # do the update yield self.store.update_device( - "user_id", "device_id", - new_display_name="display_name 2", + "user_id", "device_id", new_display_name="display_name 2" ) # check it worked @@ -100,7 +96,6 @@ def test_update_device(self): def test_update_unknown_device(self): with self.assertRaises(synapse.api.errors.StoreError) as cm: yield self.store.update_device( - "user_id", "unknown_device_id", - new_display_name="display_name 2", + "user_id", "unknown_device_id", new_display_name="display_name 2" ) self.assertEqual(404, cm.exception.code) diff --git a/tests/storage/test_directory.py b/tests/storage/test_directory.py index 129ebaf34389..9a8ba2fcfe29 100644 --- a/tests/storage/test_directory.py +++ b/tests/storage/test_directory.py @@ -24,7 +24,6 @@ class DirectoryStoreTestCase(unittest.TestCase): - @defer.inlineCallbacks def setUp(self): hs = yield setup_test_homeserver() @@ -37,38 +36,29 @@ def setUp(self): @defer.inlineCallbacks def test_room_to_alias(self): yield self.store.create_room_alias_association( - room_alias=self.alias, - room_id=self.room.to_string(), - servers=["test"], + room_alias=self.alias, room_id=self.room.to_string(), servers=["test"] ) self.assertEquals( ["#my-room:test"], - (yield self.store.get_aliases_for_room(self.room.to_string())) + (yield self.store.get_aliases_for_room(self.room.to_string())), ) @defer.inlineCallbacks def test_alias_to_room(self): yield self.store.create_room_alias_association( - room_alias=self.alias, - room_id=self.room.to_string(), - servers=["test"], + room_alias=self.alias, room_id=self.room.to_string(), servers=["test"] ) self.assertObjectHasAttributes( - { - "room_id": self.room.to_string(), - "servers": ["test"], - }, - (yield self.store.get_association_from_room_alias(self.alias)) + {"room_id": self.room.to_string(), "servers": ["test"]}, + (yield self.store.get_association_from_room_alias(self.alias)), ) @defer.inlineCallbacks def test_delete_alias(self): yield self.store.create_room_alias_association( - room_alias=self.alias, - room_id=self.room.to_string(), - servers=["test"], + room_alias=self.alias, room_id=self.room.to_string(), servers=["test"] ) room_id = yield self.store.delete_room_alias(self.alias) diff --git a/tests/storage/test_end_to_end_keys.py b/tests/storage/test_end_to_end_keys.py index 84ce492a2cf5..d45c775c2d66 100644 --- a/tests/storage/test_end_to_end_keys.py +++ b/tests/storage/test_end_to_end_keys.py @@ -35,70 +35,49 @@ def test_key_without_device_name(self): now = 1470174257070 json = {"key": "value"} - yield self.store.store_device( - "user", "device", None - ) + yield self.store.store_device("user", "device", None) - yield self.store.set_e2e_device_keys( - "user", "device", now, json) + yield self.store.set_e2e_device_keys("user", "device", now, json) res = yield self.store.get_e2e_device_keys((("user", "device"),)) self.assertIn("user", res) self.assertIn("device", res["user"]) dev = res["user"]["device"] - self.assertDictContainsSubset({ - "keys": json, - "device_display_name": None, - }, dev) + self.assertDictContainsSubset({"keys": json, "device_display_name": None}, dev) @defer.inlineCallbacks def test_get_key_with_device_name(self): now = 1470174257070 json = {"key": "value"} - yield self.store.set_e2e_device_keys( - "user", "device", now, json) - yield self.store.store_device( - "user", "device", "display_name" - ) + yield self.store.set_e2e_device_keys("user", "device", now, json) + yield self.store.store_device("user", "device", "display_name") res = yield self.store.get_e2e_device_keys((("user", "device"),)) self.assertIn("user", res) self.assertIn("device", res["user"]) dev = res["user"]["device"] - self.assertDictContainsSubset({ - "keys": json, - "device_display_name": "display_name", - }, dev) + self.assertDictContainsSubset( + {"keys": json, "device_display_name": "display_name"}, dev + ) @defer.inlineCallbacks def test_multiple_devices(self): now = 1470174257070 - yield self.store.store_device( - "user1", "device1", None - ) - yield self.store.store_device( - "user1", "device2", None - ) - yield self.store.store_device( - "user2", "device1", None - ) - yield self.store.store_device( - "user2", "device2", None - ) + yield self.store.store_device("user1", "device1", None) + yield self.store.store_device("user1", "device2", None) + yield self.store.store_device("user2", "device1", None) + yield self.store.store_device("user2", "device2", None) - yield self.store.set_e2e_device_keys( - "user1", "device1", now, 'json11') - yield self.store.set_e2e_device_keys( - "user1", "device2", now, 'json12') - yield self.store.set_e2e_device_keys( - "user2", "device1", now, 'json21') - yield self.store.set_e2e_device_keys( - "user2", "device2", now, 'json22') - - res = yield self.store.get_e2e_device_keys((("user1", "device1"), - ("user2", "device2"))) + yield self.store.set_e2e_device_keys("user1", "device1", now, 'json11') + yield self.store.set_e2e_device_keys("user1", "device2", now, 'json12') + yield self.store.set_e2e_device_keys("user2", "device1", now, 'json21') + yield self.store.set_e2e_device_keys("user2", "device2", now, 'json22') + + res = yield self.store.get_e2e_device_keys( + (("user1", "device1"), ("user2", "device2")) + ) self.assertIn("user1", res) self.assertIn("device1", res["user1"]) self.assertNotIn("device2", res["user1"]) diff --git a/tests/storage/test_event_federation.py b/tests/storage/test_event_federation.py index 69412c5aadd8..66eb11958107 100644 --- a/tests/storage/test_event_federation.py +++ b/tests/storage/test_event_federation.py @@ -33,23 +33,32 @@ def test_get_prev_events_for_room(self): def insert_event(txn, i): event_id = '$event_%i:local' % i - txn.execute(( - "INSERT INTO events (" - " room_id, event_id, type, depth, topological_ordering," - " content, processed, outlier) " - "VALUES (?, ?, 'm.test', ?, ?, 'test', ?, ?)" - ), (room_id, event_id, i, i, True, False)) + txn.execute( + ( + "INSERT INTO events (" + " room_id, event_id, type, depth, topological_ordering," + " content, processed, outlier) " + "VALUES (?, ?, 'm.test', ?, ?, 'test', ?, ?)" + ), + (room_id, event_id, i, i, True, False), + ) - txn.execute(( - 'INSERT INTO event_forward_extremities (room_id, event_id) ' - 'VALUES (?, ?)' - ), (room_id, event_id)) + txn.execute( + ( + 'INSERT INTO event_forward_extremities (room_id, event_id) ' + 'VALUES (?, ?)' + ), + (room_id, event_id), + ) - txn.execute(( - 'INSERT INTO event_reference_hashes ' - '(event_id, algorithm, hash) ' - "VALUES (?, 'sha256', ?)" - ), (event_id, b'ffff')) + txn.execute( + ( + 'INSERT INTO event_reference_hashes ' + '(event_id, algorithm, hash) ' + "VALUES (?, 'sha256', ?)" + ), + (event_id, b'ffff'), + ) for i in range(0, 11): yield self.store.runInteraction("insert", insert_event, i) diff --git a/tests/storage/test_event_push_actions.py b/tests/storage/test_event_push_actions.py index 8430fc7ba6a9..5e87b4530df8 100644 --- a/tests/storage/test_event_push_actions.py +++ b/tests/storage/test_event_push_actions.py @@ -24,12 +24,13 @@ PlAIN_NOTIF = ["notify", {"set_tweak": "highlight", "value": False}] HIGHLIGHT = [ - "notify", {"set_tweak": "sound", "value": "default"}, {"set_tweak": "highlight"} + "notify", + {"set_tweak": "sound", "value": "default"}, + {"set_tweak": "highlight"}, ] class EventPushActionsStoreTestCase(tests.unittest.TestCase): - @defer.inlineCallbacks def setUp(self): hs = yield tests.utils.setup_test_homeserver() @@ -55,12 +56,11 @@ def test_count_aggregation(self): @defer.inlineCallbacks def _assert_counts(noitf_count, highlight_count): counts = yield self.store.runInteraction( - "", self.store._get_unread_counts_by_pos_txn, - room_id, user_id, 0 + "", self.store._get_unread_counts_by_pos_txn, room_id, user_id, 0 ) self.assertEquals( counts, - {"notify_count": noitf_count, "highlight_count": highlight_count} + {"notify_count": noitf_count, "highlight_count": highlight_count}, ) @defer.inlineCallbacks @@ -72,11 +72,13 @@ def _inject_actions(stream, action): event.depth = stream yield self.store.add_push_actions_to_staging( - event.event_id, {user_id: action}, + event.event_id, {user_id: action} ) yield self.store.runInteraction( - "", self.store._set_push_actions_for_event_and_users_txn, - [(event, None)], [(event, None)], + "", + self.store._set_push_actions_for_event_and_users_txn, + [(event, None)], + [(event, None)], ) def _rotate(stream): @@ -86,8 +88,11 @@ def _rotate(stream): def _mark_read(stream, depth): return self.store.runInteraction( - "", self.store._remove_old_push_actions_before_txn, - room_id, user_id, stream + "", + self.store._remove_old_push_actions_before_txn, + room_id, + user_id, + stream, ) yield _assert_counts(0, 0) @@ -112,9 +117,7 @@ def _mark_read(stream, depth): yield _rotate(7) yield self.store._simple_delete( - table="event_push_actions", - keyvalues={"1": 1}, - desc="", + table="event_push_actions", keyvalues={"1": 1}, desc="" ) yield _assert_counts(1, 0) @@ -132,18 +135,21 @@ def _mark_read(stream, depth): @defer.inlineCallbacks def test_find_first_stream_ordering_after_ts(self): def add_event(so, ts): - return self.store._simple_insert("events", { - "stream_ordering": so, - "received_ts": ts, - "event_id": "event%i" % so, - "type": "", - "room_id": "", - "content": "", - "processed": True, - "outlier": False, - "topological_ordering": 0, - "depth": 0, - }) + return self.store._simple_insert( + "events", + { + "stream_ordering": so, + "received_ts": ts, + "event_id": "event%i" % so, + "type": "", + "room_id": "", + "content": "", + "processed": True, + "outlier": False, + "topological_ordering": 0, + "depth": 0, + }, + ) # start with the base case where there are no events in the table r = yield self.store.find_first_stream_ordering_after_ts(11) @@ -160,31 +166,27 @@ def add_event(so, ts): # add a bunch of dummy events to the events table for (stream_ordering, ts) in ( - (3, 110), - (4, 120), - (5, 120), - (10, 130), - (20, 140), + (3, 110), + (4, 120), + (5, 120), + (10, 130), + (20, 140), ): yield add_event(stream_ordering, ts) r = yield self.store.find_first_stream_ordering_after_ts(110) - self.assertEqual(r, 3, - "First event after 110ms should be 3, was %i" % r) + self.assertEqual(r, 3, "First event after 110ms should be 3, was %i" % r) # 4 and 5 are both after 120: we want 4 rather than 5 r = yield self.store.find_first_stream_ordering_after_ts(120) - self.assertEqual(r, 4, - "First event after 120ms should be 4, was %i" % r) + self.assertEqual(r, 4, "First event after 120ms should be 4, was %i" % r) r = yield self.store.find_first_stream_ordering_after_ts(129) - self.assertEqual(r, 10, - "First event after 129ms should be 10, was %i" % r) + self.assertEqual(r, 10, "First event after 129ms should be 10, was %i" % r) # check we can get the last event r = yield self.store.find_first_stream_ordering_after_ts(140) - self.assertEqual(r, 20, - "First event after 14ms should be 20, was %i" % r) + self.assertEqual(r, 20, "First event after 14ms should be 20, was %i" % r) # off the end r = yield self.store.find_first_stream_ordering_after_ts(160) diff --git a/tests/storage/test_keys.py b/tests/storage/test_keys.py index 3a3d00278232..ad0a55b324b0 100644 --- a/tests/storage/test_keys.py +++ b/tests/storage/test_keys.py @@ -39,15 +39,12 @@ def test_get_server_verify_keys(self): key2 = signedjson.key.decode_verify_key_base64( "ed25519", "key2", "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw" ) - yield self.store.store_server_verify_key( - "server1", "from_server", 0, key1 - ) - yield self.store.store_server_verify_key( - "server1", "from_server", 0, key2 - ) + yield self.store.store_server_verify_key("server1", "from_server", 0, key1) + yield self.store.store_server_verify_key("server1", "from_server", 0, key2) res = yield self.store.get_server_verify_keys( - "server1", ["ed25519:key1", "ed25519:key2", "ed25519:key3"]) + "server1", ["ed25519:key1", "ed25519:key2", "ed25519:key3"] + ) self.assertEqual(len(res.keys()), 2) self.assertEqual(res["ed25519:key1"].version, "key1") diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index cbd480cd4215..22b1072d9f57 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -40,19 +40,13 @@ def test_initialise_reserved_users(self): user2_email = "user2@matrix.org" threepids = [ {'medium': 'email', 'address': user1_email}, - {'medium': 'email', 'address': user2_email} + {'medium': 'email', 'address': user2_email}, ] user_num = len(threepids) - yield self.store.register( - user_id=user1, - token="123", - password_hash=None) + yield self.store.register(user_id=user1, token="123", password_hash=None) - yield self.store.register( - user_id=user2, - token="456", - password_hash=None) + yield self.store.register(user_id=user2, token="456", password_hash=None) now = int(self.hs.get_clock().time_msec()) yield self.store.user_add_threepid(user1, "email", user1_email, now, now) diff --git a/tests/storage/test_presence.py b/tests/storage/test_presence.py index 3276b3950469..12c540dfab40 100644 --- a/tests/storage/test_presence.py +++ b/tests/storage/test_presence.py @@ -24,7 +24,6 @@ class PresenceStoreTestCase(unittest.TestCase): - @defer.inlineCallbacks def setUp(self): hs = yield setup_test_homeserver(clock=MockClock()) @@ -38,16 +37,19 @@ def setUp(self): def test_presence_list(self): self.assertEquals( [], - (yield self.store.get_presence_list( - observer_localpart=self.u_apple.localpart, - )) + ( + yield self.store.get_presence_list( + observer_localpart=self.u_apple.localpart + ) + ), ) self.assertEquals( [], - (yield self.store.get_presence_list( - observer_localpart=self.u_apple.localpart, - accepted=True, - )) + ( + yield self.store.get_presence_list( + observer_localpart=self.u_apple.localpart, accepted=True + ) + ), ) yield self.store.add_presence_list_pending( @@ -57,16 +59,19 @@ def test_presence_list(self): self.assertEquals( [{"observed_user_id": "@banana:test", "accepted": 0}], - (yield self.store.get_presence_list( - observer_localpart=self.u_apple.localpart, - )) + ( + yield self.store.get_presence_list( + observer_localpart=self.u_apple.localpart + ) + ), ) self.assertEquals( [], - (yield self.store.get_presence_list( - observer_localpart=self.u_apple.localpart, - accepted=True, - )) + ( + yield self.store.get_presence_list( + observer_localpart=self.u_apple.localpart, accepted=True + ) + ), ) yield self.store.set_presence_list_accepted( @@ -76,16 +81,19 @@ def test_presence_list(self): self.assertEquals( [{"observed_user_id": "@banana:test", "accepted": 1}], - (yield self.store.get_presence_list( - observer_localpart=self.u_apple.localpart, - )) + ( + yield self.store.get_presence_list( + observer_localpart=self.u_apple.localpart + ) + ), ) self.assertEquals( [{"observed_user_id": "@banana:test", "accepted": 1}], - (yield self.store.get_presence_list( - observer_localpart=self.u_apple.localpart, - accepted=True, - )) + ( + yield self.store.get_presence_list( + observer_localpart=self.u_apple.localpart, accepted=True + ) + ), ) yield self.store.del_presence_list( @@ -95,14 +103,17 @@ def test_presence_list(self): self.assertEquals( [], - (yield self.store.get_presence_list( - observer_localpart=self.u_apple.localpart, - )) + ( + yield self.store.get_presence_list( + observer_localpart=self.u_apple.localpart + ) + ), ) self.assertEquals( [], - (yield self.store.get_presence_list( - observer_localpart=self.u_apple.localpart, - accepted=True, - )) + ( + yield self.store.get_presence_list( + observer_localpart=self.u_apple.localpart, accepted=True + ) + ), ) diff --git a/tests/storage/test_profile.py b/tests/storage/test_profile.py index 2c95e5e95a81..5acbc8be0c57 100644 --- a/tests/storage/test_profile.py +++ b/tests/storage/test_profile.py @@ -24,7 +24,6 @@ class ProfileStoreTestCase(unittest.TestCase): - @defer.inlineCallbacks def setUp(self): hs = yield setup_test_homeserver() @@ -35,24 +34,17 @@ def setUp(self): @defer.inlineCallbacks def test_displayname(self): - yield self.store.create_profile( - self.u_frank.localpart - ) + yield self.store.create_profile(self.u_frank.localpart) - yield self.store.set_profile_displayname( - self.u_frank.localpart, "Frank" - ) + yield self.store.set_profile_displayname(self.u_frank.localpart, "Frank") self.assertEquals( - "Frank", - (yield self.store.get_profile_displayname(self.u_frank.localpart)) + "Frank", (yield self.store.get_profile_displayname(self.u_frank.localpart)) ) @defer.inlineCallbacks def test_avatar_url(self): - yield self.store.create_profile( - self.u_frank.localpart - ) + yield self.store.create_profile(self.u_frank.localpart) yield self.store.set_profile_avatar_url( self.u_frank.localpart, "http://my.site/here" @@ -60,5 +52,5 @@ def test_avatar_url(self): self.assertEquals( "http://my.site/here", - (yield self.store.get_profile_avatar_url(self.u_frank.localpart)) + (yield self.store.get_profile_avatar_url(self.u_frank.localpart)), ) diff --git a/tests/storage/test_redaction.py b/tests/storage/test_redaction.py index 475ec900c4a2..85ce61e841cf 100644 --- a/tests/storage/test_redaction.py +++ b/tests/storage/test_redaction.py @@ -26,12 +26,10 @@ class RedactionTestCase(unittest.TestCase): - @defer.inlineCallbacks def setUp(self): hs = yield setup_test_homeserver( - resource_for_federation=Mock(), - http_client=None, + resource_for_federation=Mock(), http_client=None ) self.store = hs.get_datastore() @@ -46,17 +44,20 @@ def setUp(self): self.depth = 1 @defer.inlineCallbacks - def inject_room_member(self, room, user, membership, replaces_state=None, - extra_content={}): + def inject_room_member( + self, room, user, membership, replaces_state=None, extra_content={} + ): content = {"membership": membership} content.update(extra_content) - builder = self.event_builder_factory.new({ - "type": EventTypes.Member, - "sender": user.to_string(), - "state_key": user.to_string(), - "room_id": room.to_string(), - "content": content, - }) + builder = self.event_builder_factory.new( + { + "type": EventTypes.Member, + "sender": user.to_string(), + "state_key": user.to_string(), + "room_id": room.to_string(), + "content": content, + } + ) event, context = yield self.event_creation_handler.create_new_client_event( builder @@ -70,13 +71,15 @@ def inject_room_member(self, room, user, membership, replaces_state=None, def inject_message(self, room, user, body): self.depth += 1 - builder = self.event_builder_factory.new({ - "type": EventTypes.Message, - "sender": user.to_string(), - "state_key": user.to_string(), - "room_id": room.to_string(), - "content": {"body": body, "msgtype": u"message"}, - }) + builder = self.event_builder_factory.new( + { + "type": EventTypes.Message, + "sender": user.to_string(), + "state_key": user.to_string(), + "room_id": room.to_string(), + "content": {"body": body, "msgtype": u"message"}, + } + ) event, context = yield self.event_creation_handler.create_new_client_event( builder @@ -88,14 +91,16 @@ def inject_message(self, room, user, body): @defer.inlineCallbacks def inject_redaction(self, room, event_id, user, reason): - builder = self.event_builder_factory.new({ - "type": EventTypes.Redaction, - "sender": user.to_string(), - "state_key": user.to_string(), - "room_id": room.to_string(), - "content": {"reason": reason}, - "redacts": event_id, - }) + builder = self.event_builder_factory.new( + { + "type": EventTypes.Redaction, + "sender": user.to_string(), + "state_key": user.to_string(), + "room_id": room.to_string(), + "content": {"reason": reason}, + "redacts": event_id, + } + ) event, context = yield self.event_creation_handler.create_new_client_event( builder @@ -105,9 +110,7 @@ def inject_redaction(self, room, event_id, user, reason): @defer.inlineCallbacks def test_redact(self): - yield self.inject_room_member( - self.room1, self.u_alice, Membership.JOIN - ) + yield self.inject_room_member(self.room1, self.u_alice, Membership.JOIN) msg_event = yield self.inject_message(self.room1, self.u_alice, u"t") @@ -157,13 +160,10 @@ def test_redact(self): @defer.inlineCallbacks def test_redact_join(self): - yield self.inject_room_member( - self.room1, self.u_alice, Membership.JOIN - ) + yield self.inject_room_member(self.room1, self.u_alice, Membership.JOIN) msg_event = yield self.inject_room_member( - self.room1, self.u_bob, Membership.JOIN, - extra_content={"blue": "red"}, + self.room1, self.u_bob, Membership.JOIN, extra_content={"blue": "red"} ) event = yield self.store.get_event(msg_event.event_id) diff --git a/tests/storage/test_registration.py b/tests/storage/test_registration.py index 7821ea3fa306..bd96896bb35e 100644 --- a/tests/storage/test_registration.py +++ b/tests/storage/test_registration.py @@ -21,7 +21,6 @@ class RegistrationStoreTestCase(unittest.TestCase): - @defer.inlineCallbacks def setUp(self): hs = yield setup_test_homeserver() @@ -30,10 +29,7 @@ def setUp(self): self.store = hs.get_datastore() self.user_id = "@my-user:test" - self.tokens = [ - "AbCdEfGhIjKlMnOpQrStUvWxYz", - "BcDeFgHiJkLmNoPqRsTuVwXyZa" - ] + self.tokens = ["AbCdEfGhIjKlMnOpQrStUvWxYz", "BcDeFgHiJkLmNoPqRsTuVwXyZa"] self.pwhash = "{xx1}123456789" self.device_id = "akgjhdjklgshg" @@ -51,34 +47,26 @@ def test_register(self): "consent_server_notice_sent": None, "appservice_id": None, }, - (yield self.store.get_user_by_id(self.user_id)) + (yield self.store.get_user_by_id(self.user_id)), ) result = yield self.store.get_user_by_access_token(self.tokens[0]) - self.assertDictContainsSubset( - { - "name": self.user_id, - }, - result - ) + self.assertDictContainsSubset({"name": self.user_id}, result) self.assertTrue("token_id" in result) @defer.inlineCallbacks def test_add_tokens(self): yield self.store.register(self.user_id, self.tokens[0], self.pwhash) - yield self.store.add_access_token_to_user(self.user_id, self.tokens[1], - self.device_id) + yield self.store.add_access_token_to_user( + self.user_id, self.tokens[1], self.device_id + ) result = yield self.store.get_user_by_access_token(self.tokens[1]) self.assertDictContainsSubset( - { - "name": self.user_id, - "device_id": self.device_id, - }, - result + {"name": self.user_id, "device_id": self.device_id}, result ) self.assertTrue("token_id" in result) @@ -87,12 +75,13 @@ def test_add_tokens(self): def test_user_delete_access_tokens(self): # add some tokens yield self.store.register(self.user_id, self.tokens[0], self.pwhash) - yield self.store.add_access_token_to_user(self.user_id, self.tokens[1], - self.device_id) + yield self.store.add_access_token_to_user( + self.user_id, self.tokens[1], self.device_id + ) # now delete some yield self.store.user_delete_access_tokens( - self.user_id, device_id=self.device_id, + self.user_id, device_id=self.device_id ) # check they were deleted @@ -107,8 +96,7 @@ def test_user_delete_access_tokens(self): yield self.store.user_delete_access_tokens(self.user_id) user = yield self.store.get_user_by_access_token(self.tokens[0]) - self.assertIsNone(user, - "access token was not deleted without device_id") + self.assertIsNone(user, "access token was not deleted without device_id") class TokenGenerator: @@ -117,4 +105,4 @@ def __init__(self): def generate(self, user_id): self._last_issued_token += 1 - return u"%s-%d" % (user_id, self._last_issued_token,) + return u"%s-%d" % (user_id, self._last_issued_token) diff --git a/tests/storage/test_room.py b/tests/storage/test_room.py index ae8ae94b6d63..84d49b55c1cb 100644 --- a/tests/storage/test_room.py +++ b/tests/storage/test_room.py @@ -24,7 +24,6 @@ class RoomStoreTestCase(unittest.TestCase): - @defer.inlineCallbacks def setUp(self): hs = yield setup_test_homeserver() @@ -40,7 +39,7 @@ def setUp(self): yield self.store.store_room( self.room.to_string(), room_creator_user_id=self.u_creator.to_string(), - is_public=True + is_public=True, ) @defer.inlineCallbacks @@ -49,14 +48,13 @@ def test_get_room(self): { "room_id": self.room.to_string(), "creator": self.u_creator.to_string(), - "is_public": True + "is_public": True, }, - (yield self.store.get_room(self.room.to_string())) + (yield self.store.get_room(self.room.to_string())), ) class RoomEventsStoreTestCase(unittest.TestCase): - @defer.inlineCallbacks def setUp(self): hs = setup_test_homeserver() @@ -69,18 +67,13 @@ def setUp(self): self.room = RoomID.from_string("!abcde:test") yield self.store.store_room( - self.room.to_string(), - room_creator_user_id="@creator:text", - is_public=True + self.room.to_string(), room_creator_user_id="@creator:text", is_public=True ) @defer.inlineCallbacks def inject_room_event(self, **kwargs): yield self.store.persist_event( - self.event_factory.create_event( - room_id=self.room.to_string(), - **kwargs - ) + self.event_factory.create_event(room_id=self.room.to_string(), **kwargs) ) @defer.inlineCallbacks @@ -88,22 +81,15 @@ def STALE_test_room_name(self): name = u"A-Room-Name" yield self.inject_room_event( - etype=EventTypes.Name, - name=name, - content={"name": name}, - depth=1, + etype=EventTypes.Name, name=name, content={"name": name}, depth=1 ) - state = yield self.store.get_current_state( - room_id=self.room.to_string() - ) + state = yield self.store.get_current_state(room_id=self.room.to_string()) self.assertEquals(1, len(state)) self.assertObjectHasAttributes( - {"type": "m.room.name", - "room_id": self.room.to_string(), - "name": name}, - state[0] + {"type": "m.room.name", "room_id": self.room.to_string(), "name": name}, + state[0], ) @defer.inlineCallbacks @@ -111,22 +97,15 @@ def STALE_test_room_topic(self): topic = u"A place for things" yield self.inject_room_event( - etype=EventTypes.Topic, - topic=topic, - content={"topic": topic}, - depth=1, + etype=EventTypes.Topic, topic=topic, content={"topic": topic}, depth=1 ) - state = yield self.store.get_current_state( - room_id=self.room.to_string() - ) + state = yield self.store.get_current_state(room_id=self.room.to_string()) self.assertEquals(1, len(state)) self.assertObjectHasAttributes( - {"type": "m.room.topic", - "room_id": self.room.to_string(), - "topic": topic}, - state[0] + {"type": "m.room.topic", "room_id": self.room.to_string(), "topic": topic}, + state[0], ) # Not testing the various 'level' methods for now because there's lots diff --git a/tests/storage/test_roommember.py b/tests/storage/test_roommember.py index c5fd54f67e88..0d9908926a94 100644 --- a/tests/storage/test_roommember.py +++ b/tests/storage/test_roommember.py @@ -26,12 +26,10 @@ class RoomMemberStoreTestCase(unittest.TestCase): - @defer.inlineCallbacks def setUp(self): hs = yield setup_test_homeserver( - resource_for_federation=Mock(), - http_client=None, + resource_for_federation=Mock(), http_client=None ) # We can't test the RoomMemberStore on its own without the other event # storage logic @@ -49,13 +47,15 @@ def setUp(self): @defer.inlineCallbacks def inject_room_member(self, room, user, membership, replaces_state=None): - builder = self.event_builder_factory.new({ - "type": EventTypes.Member, - "sender": user.to_string(), - "state_key": user.to_string(), - "room_id": room.to_string(), - "content": {"membership": membership}, - }) + builder = self.event_builder_factory.new( + { + "type": EventTypes.Member, + "sender": user.to_string(), + "state_key": user.to_string(), + "room_id": room.to_string(), + "content": {"membership": membership}, + } + ) event, context = yield self.event_creation_handler.create_new_client_event( builder @@ -71,9 +71,12 @@ def test_one_member(self): self.assertEquals( [self.room.to_string()], - [m.room_id for m in ( - yield self.store.get_rooms_for_user_where_membership_is( - self.u_alice.to_string(), [Membership.JOIN] + [ + m.room_id + for m in ( + yield self.store.get_rooms_for_user_where_membership_is( + self.u_alice.to_string(), [Membership.JOIN] + ) ) - )] + ], ) diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py index f7871cd42611..ed5b41644a5c 100644 --- a/tests/storage/test_state.py +++ b/tests/storage/test_state.py @@ -45,20 +45,20 @@ def setUp(self): self.room = RoomID.from_string("!abc123:test") yield self.store.store_room( - self.room.to_string(), - room_creator_user_id="@creator:text", - is_public=True + self.room.to_string(), room_creator_user_id="@creator:text", is_public=True ) @defer.inlineCallbacks def inject_state_event(self, room, sender, typ, state_key, content): - builder = self.event_builder_factory.new({ - "type": typ, - "sender": sender.to_string(), - "state_key": state_key, - "room_id": room.to_string(), - "content": content, - }) + builder = self.event_builder_factory.new( + { + "type": typ, + "sender": sender.to_string(), + "state_key": state_key, + "room_id": room.to_string(), + "content": content, + } + ) event, context = yield self.event_creation_handler.create_new_client_event( builder @@ -80,27 +80,31 @@ def test_get_state_for_event(self): # this defaults to a linear DAG as each new injection defaults to whatever # forward extremities are currently in the DB for this room. e1 = yield self.inject_state_event( - self.room, self.u_alice, EventTypes.Create, '', {}, + self.room, self.u_alice, EventTypes.Create, '', {} ) e2 = yield self.inject_state_event( - self.room, self.u_alice, EventTypes.Name, '', { - "name": "test room" - }, + self.room, self.u_alice, EventTypes.Name, '', {"name": "test room"} ) e3 = yield self.inject_state_event( - self.room, self.u_alice, EventTypes.Member, self.u_alice.to_string(), { - "membership": Membership.JOIN - }, + self.room, + self.u_alice, + EventTypes.Member, + self.u_alice.to_string(), + {"membership": Membership.JOIN}, ) e4 = yield self.inject_state_event( - self.room, self.u_bob, EventTypes.Member, self.u_bob.to_string(), { - "membership": Membership.JOIN - }, + self.room, + self.u_bob, + EventTypes.Member, + self.u_bob.to_string(), + {"membership": Membership.JOIN}, ) e5 = yield self.inject_state_event( - self.room, self.u_bob, EventTypes.Member, self.u_bob.to_string(), { - "membership": Membership.LEAVE - }, + self.room, + self.u_bob, + EventTypes.Member, + self.u_bob.to_string(), + {"membership": Membership.LEAVE}, ) # check we get the full state as of the final event @@ -110,65 +114,66 @@ def test_get_state_for_event(self): self.assertIsNotNone(e4) - self.assertStateMapEqual({ - (e1.type, e1.state_key): e1, - (e2.type, e2.state_key): e2, - (e3.type, e3.state_key): e3, - # e4 is overwritten by e5 - (e5.type, e5.state_key): e5, - }, state) + self.assertStateMapEqual( + { + (e1.type, e1.state_key): e1, + (e2.type, e2.state_key): e2, + (e3.type, e3.state_key): e3, + # e4 is overwritten by e5 + (e5.type, e5.state_key): e5, + }, + state, + ) # check we can filter to the m.room.name event (with a '' state key) state = yield self.store.get_state_for_event( e5.event_id, [(EventTypes.Name, '')], filtered_types=None ) - self.assertStateMapEqual({ - (e2.type, e2.state_key): e2, - }, state) + self.assertStateMapEqual({(e2.type, e2.state_key): e2}, state) # check we can filter to the m.room.name event (with a wildcard None state key) state = yield self.store.get_state_for_event( e5.event_id, [(EventTypes.Name, None)], filtered_types=None ) - self.assertStateMapEqual({ - (e2.type, e2.state_key): e2, - }, state) + self.assertStateMapEqual({(e2.type, e2.state_key): e2}, state) # check we can grab the m.room.member events (with a wildcard None state key) state = yield self.store.get_state_for_event( e5.event_id, [(EventTypes.Member, None)], filtered_types=None ) - self.assertStateMapEqual({ - (e3.type, e3.state_key): e3, - (e5.type, e5.state_key): e5, - }, state) + self.assertStateMapEqual( + {(e3.type, e3.state_key): e3, (e5.type, e5.state_key): e5}, state + ) # check we can use filter_types to grab a specific room member # without filtering out the other event types state = yield self.store.get_state_for_event( - e5.event_id, [(EventTypes.Member, self.u_alice.to_string())], + e5.event_id, + [(EventTypes.Member, self.u_alice.to_string())], filtered_types=[EventTypes.Member], ) - self.assertStateMapEqual({ - (e1.type, e1.state_key): e1, - (e2.type, e2.state_key): e2, - (e3.type, e3.state_key): e3, - }, state) + self.assertStateMapEqual( + { + (e1.type, e1.state_key): e1, + (e2.type, e2.state_key): e2, + (e3.type, e3.state_key): e3, + }, + state, + ) # check that types=[], filtered_types=[EventTypes.Member] # doesn't return all members state = yield self.store.get_state_for_event( - e5.event_id, [], filtered_types=[EventTypes.Member], + e5.event_id, [], filtered_types=[EventTypes.Member] ) - self.assertStateMapEqual({ - (e1.type, e1.state_key): e1, - (e2.type, e2.state_key): e2, - }, state) + self.assertStateMapEqual( + {(e1.type, e1.state_key): e1, (e2.type, e2.state_key): e2}, state + ) ####################################################### # _get_some_state_from_cache tests against a full cache @@ -184,10 +189,13 @@ def test_get_state_for_event(self): ) self.assertEqual(is_all, True) - self.assertDictEqual({ - (e1.type, e1.state_key): e1.event_id, - (e2.type, e2.state_key): e2.event_id, - }, state_dict) + self.assertDictEqual( + { + (e1.type, e1.state_key): e1.event_id, + (e2.type, e2.state_key): e2.event_id, + }, + state_dict, + ) # test _get_some_state_from_cache correctly filters in members with wildcard types (state_dict, is_all) = yield self.store._get_some_state_from_cache( @@ -195,25 +203,33 @@ def test_get_state_for_event(self): ) self.assertEqual(is_all, True) - self.assertDictEqual({ - (e1.type, e1.state_key): e1.event_id, - (e2.type, e2.state_key): e2.event_id, - (e3.type, e3.state_key): e3.event_id, - # e4 is overwritten by e5 - (e5.type, e5.state_key): e5.event_id, - }, state_dict) + self.assertDictEqual( + { + (e1.type, e1.state_key): e1.event_id, + (e2.type, e2.state_key): e2.event_id, + (e3.type, e3.state_key): e3.event_id, + # e4 is overwritten by e5 + (e5.type, e5.state_key): e5.event_id, + }, + state_dict, + ) # test _get_some_state_from_cache correctly filters in members with specific types (state_dict, is_all) = yield self.store._get_some_state_from_cache( - group, [(EventTypes.Member, e5.state_key)], filtered_types=[EventTypes.Member] + group, + [(EventTypes.Member, e5.state_key)], + filtered_types=[EventTypes.Member], ) self.assertEqual(is_all, True) - self.assertDictEqual({ - (e1.type, e1.state_key): e1.event_id, - (e2.type, e2.state_key): e2.event_id, - (e5.type, e5.state_key): e5.event_id, - }, state_dict) + self.assertDictEqual( + { + (e1.type, e1.state_key): e1.event_id, + (e2.type, e2.state_key): e2.event_id, + (e5.type, e5.state_key): e5.event_id, + }, + state_dict, + ) # test _get_some_state_from_cache correctly filters in members with specific types # and no filtered_types @@ -222,24 +238,27 @@ def test_get_state_for_event(self): ) self.assertEqual(is_all, True) - self.assertDictEqual({ - (e5.type, e5.state_key): e5.event_id, - }, state_dict) + self.assertDictEqual({(e5.type, e5.state_key): e5.event_id}, state_dict) ####################################################### # deliberately remove e2 (room name) from the _state_group_cache - (is_all, known_absent, state_dict_ids) = self.store._state_group_cache.get(group) + (is_all, known_absent, state_dict_ids) = self.store._state_group_cache.get( + group + ) self.assertEqual(is_all, True) self.assertEqual(known_absent, set()) - self.assertDictEqual(state_dict_ids, { - (e1.type, e1.state_key): e1.event_id, - (e2.type, e2.state_key): e2.event_id, - (e3.type, e3.state_key): e3.event_id, - # e4 is overwritten by e5 - (e5.type, e5.state_key): e5.event_id, - }) + self.assertDictEqual( + state_dict_ids, + { + (e1.type, e1.state_key): e1.event_id, + (e2.type, e2.state_key): e2.event_id, + (e3.type, e3.state_key): e3.event_id, + # e4 is overwritten by e5 + (e5.type, e5.state_key): e5.event_id, + }, + ) state_dict_ids.pop((e2.type, e2.state_key)) self.store._state_group_cache.invalidate(group) @@ -252,22 +271,32 @@ def test_get_state_for_event(self): (e1.type, e1.state_key), (e3.type, e3.state_key), (e5.type, e5.state_key), - ) + ), ) - (is_all, known_absent, state_dict_ids) = self.store._state_group_cache.get(group) + (is_all, known_absent, state_dict_ids) = self.store._state_group_cache.get( + group + ) self.assertEqual(is_all, False) - self.assertEqual(known_absent, set([ - (e1.type, e1.state_key), - (e3.type, e3.state_key), - (e5.type, e5.state_key), - ])) - self.assertDictEqual(state_dict_ids, { - (e1.type, e1.state_key): e1.event_id, - (e3.type, e3.state_key): e3.event_id, - (e5.type, e5.state_key): e5.event_id, - }) + self.assertEqual( + known_absent, + set( + [ + (e1.type, e1.state_key), + (e3.type, e3.state_key), + (e5.type, e5.state_key), + ] + ), + ) + self.assertDictEqual( + state_dict_ids, + { + (e1.type, e1.state_key): e1.event_id, + (e3.type, e3.state_key): e3.event_id, + (e5.type, e5.state_key): e5.event_id, + }, + ) ############################################ # test that things work with a partial cache @@ -279,9 +308,7 @@ def test_get_state_for_event(self): ) self.assertEqual(is_all, False) - self.assertDictEqual({ - (e1.type, e1.state_key): e1.event_id, - }, state_dict) + self.assertDictEqual({(e1.type, e1.state_key): e1.event_id}, state_dict) # test _get_some_state_from_cache correctly filters in members wildcard types (state_dict, is_all) = yield self.store._get_some_state_from_cache( @@ -289,23 +316,31 @@ def test_get_state_for_event(self): ) self.assertEqual(is_all, False) - self.assertDictEqual({ - (e1.type, e1.state_key): e1.event_id, - (e3.type, e3.state_key): e3.event_id, - # e4 is overwritten by e5 - (e5.type, e5.state_key): e5.event_id, - }, state_dict) + self.assertDictEqual( + { + (e1.type, e1.state_key): e1.event_id, + (e3.type, e3.state_key): e3.event_id, + # e4 is overwritten by e5 + (e5.type, e5.state_key): e5.event_id, + }, + state_dict, + ) # test _get_some_state_from_cache correctly filters in members with specific types (state_dict, is_all) = yield self.store._get_some_state_from_cache( - group, [(EventTypes.Member, e5.state_key)], filtered_types=[EventTypes.Member] + group, + [(EventTypes.Member, e5.state_key)], + filtered_types=[EventTypes.Member], ) self.assertEqual(is_all, False) - self.assertDictEqual({ - (e1.type, e1.state_key): e1.event_id, - (e5.type, e5.state_key): e5.event_id, - }, state_dict) + self.assertDictEqual( + { + (e1.type, e1.state_key): e1.event_id, + (e5.type, e5.state_key): e5.event_id, + }, + state_dict, + ) # test _get_some_state_from_cache correctly filters in members with specific types # and no filtered_types @@ -314,6 +349,4 @@ def test_get_state_for_event(self): ) self.assertEqual(is_all, True) - self.assertDictEqual({ - (e5.type, e5.state_key): e5.event_id, - }, state_dict) + self.assertDictEqual({(e5.type, e5.state_key): e5.event_id}, state_dict) diff --git a/tests/storage/test_user_directory.py b/tests/storage/test_user_directory.py index 23fad12bca62..7a273eab4880 100644 --- a/tests/storage/test_user_directory.py +++ b/tests/storage/test_user_directory.py @@ -39,20 +39,12 @@ def setUp(self): { ALICE: ProfileInfo(None, "alice"), BOB: ProfileInfo(None, "bob"), - BOBBY: ProfileInfo(None, "bobby") + BOBBY: ProfileInfo(None, "bobby"), }, ) - yield self.store.add_users_to_public_room( - "!room:id", - [ALICE, BOB], - ) + yield self.store.add_users_to_public_room("!room:id", [ALICE, BOB]) yield self.store.add_users_who_share_room( - "!room:id", - False, - ( - (ALICE, BOB), - (BOB, ALICE), - ), + "!room:id", False, ((ALICE, BOB), (BOB, ALICE)) ) @defer.inlineCallbacks @@ -62,11 +54,9 @@ def test_search_user_dir(self): r = yield self.store.search_user_dir(ALICE, "bob", 10) self.assertFalse(r["limited"]) self.assertEqual(1, len(r["results"])) - self.assertDictEqual(r["results"][0], { - "user_id": BOB, - "display_name": "bob", - "avatar_url": None, - }) + self.assertDictEqual( + r["results"][0], {"user_id": BOB, "display_name": "bob", "avatar_url": None} + ) @defer.inlineCallbacks def test_search_user_dir_all_users(self): @@ -75,15 +65,13 @@ def test_search_user_dir_all_users(self): r = yield self.store.search_user_dir(ALICE, "bob", 10) self.assertFalse(r["limited"]) self.assertEqual(2, len(r["results"])) - self.assertDictEqual(r["results"][0], { - "user_id": BOB, - "display_name": "bob", - "avatar_url": None, - }) - self.assertDictEqual(r["results"][1], { - "user_id": BOBBY, - "display_name": "bobby", - "avatar_url": None, - }) + self.assertDictEqual( + r["results"][0], + {"user_id": BOB, "display_name": "bob", "avatar_url": None}, + ) + self.assertDictEqual( + r["results"][1], + {"user_id": BOBBY, "display_name": "bobby", "avatar_url": None}, + ) finally: self.hs.config.user_directory_search_all_users = False diff --git a/tests/test_distributor.py b/tests/test_distributor.py index 71d11cda77c9..b57f36e6ac26 100644 --- a/tests/test_distributor.py +++ b/tests/test_distributor.py @@ -22,7 +22,6 @@ class DistributorTestCase(unittest.TestCase): - def setUp(self): self.dist = Distributor() @@ -44,18 +43,14 @@ def test_signal_catch(self): observers[0].side_effect = Exception("Awoogah!") - with patch( - "synapse.util.distributor.logger", spec=["warning"] - ) as mock_logger: + with patch("synapse.util.distributor.logger", spec=["warning"]) as mock_logger: self.dist.fire("alarm", "Go") observers[0].assert_called_once_with("Go") observers[1].assert_called_once_with("Go") self.assertEquals(mock_logger.warning.call_count, 1) - self.assertIsInstance( - mock_logger.warning.call_args[0][0], str - ) + self.assertIsInstance(mock_logger.warning.call_args[0][0], str) def test_signal_prereg(self): observer = Mock() @@ -69,4 +64,5 @@ def test_signal_prereg(self): def test_signal_undeclared(self): def code(): self.dist.fire("notification") + self.assertRaises(KeyError, code) diff --git a/tests/test_dns.py b/tests/test_dns.py index b647d92697f4..90bd34be349d 100644 --- a/tests/test_dns.py +++ b/tests/test_dns.py @@ -27,7 +27,6 @@ @unittest.DEBUG class DnsTestCase(unittest.TestCase): - @defer.inlineCallbacks def test_resolve(self): dns_client_mock = Mock() @@ -36,14 +35,11 @@ def test_resolve(self): host_name = "example.com" answer_srv = dns.RRHeader( - type=dns.SRV, - payload=dns.Record_SRV( - target=host_name, - ) + type=dns.SRV, payload=dns.Record_SRV(target=host_name) ) dns_client_mock.lookupService.return_value = defer.succeed( - ([answer_srv], None, None), + ([answer_srv], None, None) ) cache = {} @@ -68,9 +64,7 @@ def test_from_cache_expired_and_dns_fail(self): entry = Mock(spec_set=["expires"]) entry.expires = 0 - cache = { - service_name: [entry] - } + cache = {service_name: [entry]} servers = yield resolve_service( service_name, dns_client=dns_client_mock, cache=cache @@ -93,12 +87,10 @@ def test_from_cache(self): entry = Mock(spec_set=["expires"]) entry.expires = 999999999 - cache = { - service_name: [entry] - } + cache = {service_name: [entry]} servers = yield resolve_service( - service_name, dns_client=dns_client_mock, cache=cache, clock=clock, + service_name, dns_client=dns_client_mock, cache=cache, clock=clock ) self.assertFalse(dns_client_mock.lookupService.called) @@ -117,9 +109,7 @@ def test_empty_cache(self): cache = {} with self.assertRaises(error.DNSServerError): - yield resolve_service( - service_name, dns_client=dns_client_mock, cache=cache - ) + yield resolve_service(service_name, dns_client=dns_client_mock, cache=cache) @defer.inlineCallbacks def test_name_error(self): diff --git a/tests/test_event_auth.py b/tests/test_event_auth.py index 06112430e555..411b4a9f86b9 100644 --- a/tests/test_event_auth.py +++ b/tests/test_event_auth.py @@ -35,10 +35,7 @@ def test_random_users_cannot_send_state_before_first_pl(self): } # creator should be able to send state - event_auth.check( - _random_state_event(creator), auth_events, - do_sig_check=False, - ) + event_auth.check(_random_state_event(creator), auth_events, do_sig_check=False) # joiner should not be able to send state self.assertRaises( @@ -61,13 +58,9 @@ def test_state_default_level(self): auth_events = { ("m.room.create", ""): _create_event(creator), ("m.room.member", creator): _join_event(creator), - ("m.room.power_levels", ""): _power_levels_event(creator, { - "state_default": "30", - "users": { - pleb: "29", - king: "30", - }, - }), + ("m.room.power_levels", ""): _power_levels_event( + creator, {"state_default": "30", "users": {pleb: "29", king: "30"}} + ), ("m.room.member", pleb): _join_event(pleb), ("m.room.member", king): _join_event(king), } @@ -82,10 +75,7 @@ def test_state_default_level(self): ), # king should be able to send state - event_auth.check( - _random_state_event(king), auth_events, - do_sig_check=False, - ) + event_auth.check(_random_state_event(king), auth_events, do_sig_check=False) # helpers for making events @@ -94,52 +84,54 @@ def test_state_default_level(self): def _create_event(user_id): - return FrozenEvent({ - "room_id": TEST_ROOM_ID, - "event_id": _get_event_id(), - "type": "m.room.create", - "sender": user_id, - "content": { - "creator": user_id, - }, - }) + return FrozenEvent( + { + "room_id": TEST_ROOM_ID, + "event_id": _get_event_id(), + "type": "m.room.create", + "sender": user_id, + "content": {"creator": user_id}, + } + ) def _join_event(user_id): - return FrozenEvent({ - "room_id": TEST_ROOM_ID, - "event_id": _get_event_id(), - "type": "m.room.member", - "sender": user_id, - "state_key": user_id, - "content": { - "membership": "join", - }, - }) + return FrozenEvent( + { + "room_id": TEST_ROOM_ID, + "event_id": _get_event_id(), + "type": "m.room.member", + "sender": user_id, + "state_key": user_id, + "content": {"membership": "join"}, + } + ) def _power_levels_event(sender, content): - return FrozenEvent({ - "room_id": TEST_ROOM_ID, - "event_id": _get_event_id(), - "type": "m.room.power_levels", - "sender": sender, - "state_key": "", - "content": content, - }) + return FrozenEvent( + { + "room_id": TEST_ROOM_ID, + "event_id": _get_event_id(), + "type": "m.room.power_levels", + "sender": sender, + "state_key": "", + "content": content, + } + ) def _random_state_event(sender): - return FrozenEvent({ - "room_id": TEST_ROOM_ID, - "event_id": _get_event_id(), - "type": "test.state", - "sender": sender, - "state_key": "", - "content": { - "membership": "join", - }, - }) + return FrozenEvent( + { + "room_id": TEST_ROOM_ID, + "event_id": _get_event_id(), + "type": "test.state", + "sender": sender, + "state_key": "", + "content": {"membership": "join"}, + } + ) event_count = 0 @@ -149,4 +141,4 @@ def _get_event_id(): global event_count c = event_count event_count += 1 - return "!%i:example.com" % (c, ) + return "!%i:example.com" % (c,) diff --git a/tests/test_preview.py b/tests/test_preview.py index 446843367ef5..84ef5e5ba4b6 100644 --- a/tests/test_preview.py +++ b/tests/test_preview.py @@ -22,7 +22,6 @@ class PreviewTestCase(unittest.TestCase): - def test_long_summarize(self): example_paras = [ u"""Tromsø (Norwegian pronunciation: [ˈtrʊmsœ] ( listen); Northern Sami: @@ -32,7 +31,6 @@ def test_long_summarize(self): alternative spellings of the city.Tromsø is considered the northernmost city in the world with a population above 50,000. The most populous town north of it is Alta, Norway, with a population of 14,272 (2013).""", - u"""Tromsø lies in Northern Norway. The municipality has a population of (2015) 72,066, but with an annual influx of students it has over 75,000 most of the year. It is the largest urban area in Northern Norway and the @@ -46,7 +44,6 @@ def test_long_summarize(self): Sandnessund Bridge. Tromsø Airport connects the city to many destinations in Europe. The city is warmer than most other places located on the same latitude, due to the warming effect of the Gulf Stream.""", - u"""The city centre of Tromsø contains the highest number of old wooden houses in Northern Norway, the oldest house dating from 1789. The Arctic Cathedral, a modern church from 1965, is probably the most famous landmark @@ -67,7 +64,7 @@ def test_long_summarize(self): u" the city of Tromsø. Outside of Norway, Tromso and Tromsö are" u" alternative spellings of the city.Tromsø is considered the northernmost" u" city in the world with a population above 50,000. The most populous town" - u" north of it is Alta, Norway, with a population of 14,272 (2013)." + u" north of it is Alta, Norway, with a population of 14,272 (2013).", ) desc = summarize_paragraphs(example_paras[1:], min_size=200, max_size=500) @@ -80,7 +77,7 @@ def test_long_summarize(self): u" third largest north of the Arctic Circle (following Murmansk and Norilsk)." u" Most of Tromsø, including the city centre, is located on the island of" u" Tromsøya, 350 kilometres (217 mi) north of the Arctic Circle. In 2012," - u" Tromsøya had a population of 36,088. Substantial parts of the urban…" + u" Tromsøya had a population of 36,088. Substantial parts of the urban…", ) def test_short_summarize(self): @@ -88,11 +85,9 @@ def test_short_summarize(self): u"Tromsø (Norwegian pronunciation: [ˈtrʊmsœ] ( listen); Northern Sami:" u" Romsa; Finnish: Tromssa[2] Kven: Tromssa) is a city and municipality in" u" Troms county, Norway.", - u"Tromsø lies in Northern Norway. The municipality has a population of" u" (2015) 72,066, but with an annual influx of students it has over 75,000" u" most of the year.", - u"The city centre of Tromsø contains the highest number of old wooden" u" houses in Northern Norway, the oldest house dating from 1789. The Arctic" u" Cathedral, a modern church from 1965, is probably the most famous landmark" @@ -109,7 +104,7 @@ def test_short_summarize(self): u"\n" u"Tromsø lies in Northern Norway. The municipality has a population of" u" (2015) 72,066, but with an annual influx of students it has over 75,000" - u" most of the year." + u" most of the year.", ) def test_small_then_large_summarize(self): @@ -117,7 +112,6 @@ def test_small_then_large_summarize(self): u"Tromsø (Norwegian pronunciation: [ˈtrʊmsœ] ( listen); Northern Sami:" u" Romsa; Finnish: Tromssa[2] Kven: Tromssa) is a city and municipality in" u" Troms county, Norway.", - u"Tromsø lies in Northern Norway. The municipality has a population of" u" (2015) 72,066, but with an annual influx of students it has over 75,000" u" most of the year." @@ -138,7 +132,7 @@ def test_small_then_large_summarize(self): u" (2015) 72,066, but with an annual influx of students it has over 75,000" u" most of the year. The city centre of Tromsø contains the highest number" u" of old wooden houses in Northern Norway, the oldest house dating from" - u" 1789. The Arctic Cathedral, a modern church from…" + u" 1789. The Arctic Cathedral, a modern church from…", ) @@ -155,10 +149,7 @@ def test_simple(self): og = decode_and_calc_og(html, "http://example.com/test.html") - self.assertEquals(og, { - u"og:title": u"Foo", - u"og:description": u"Some text." - }) + self.assertEquals(og, {u"og:title": u"Foo", u"og:description": u"Some text."}) def test_comment(self): html = u""" @@ -173,10 +164,7 @@ def test_comment(self): og = decode_and_calc_og(html, "http://example.com/test.html") - self.assertEquals(og, { - u"og:title": u"Foo", - u"og:description": u"Some text." - }) + self.assertEquals(og, {u"og:title": u"Foo", u"og:description": u"Some text."}) def test_comment2(self): html = u""" @@ -194,10 +182,13 @@ def test_comment2(self): og = decode_and_calc_og(html, "http://example.com/test.html") - self.assertEquals(og, { - u"og:title": u"Foo", - u"og:description": u"Some text.\n\nSome more text.\n\nText\n\nMore text" - }) + self.assertEquals( + og, + { + u"og:title": u"Foo", + u"og:description": u"Some text.\n\nSome more text.\n\nText\n\nMore text", + }, + ) def test_script(self): html = u""" @@ -212,10 +203,7 @@ def test_script(self): og = decode_and_calc_og(html, "http://example.com/test.html") - self.assertEquals(og, { - u"og:title": u"Foo", - u"og:description": u"Some text." - }) + self.assertEquals(og, {u"og:title": u"Foo", u"og:description": u"Some text."}) def test_missing_title(self): html = u""" @@ -228,10 +216,7 @@ def test_missing_title(self): og = decode_and_calc_og(html, "http://example.com/test.html") - self.assertEquals(og, { - u"og:title": None, - u"og:description": u"Some text." - }) + self.assertEquals(og, {u"og:title": None, u"og:description": u"Some text."}) def test_h1_as_title(self): html = u""" @@ -245,10 +230,7 @@ def test_h1_as_title(self): og = decode_and_calc_og(html, "http://example.com/test.html") - self.assertEquals(og, { - u"og:title": u"Title", - u"og:description": u"Some text." - }) + self.assertEquals(og, {u"og:title": u"Title", u"og:description": u"Some text."}) def test_missing_title_and_broken_h1(self): html = u""" @@ -262,7 +244,4 @@ def test_missing_title_and_broken_h1(self): og = decode_and_calc_og(html, "http://example.com/test.html") - self.assertEquals(og, { - u"og:title": None, - u"og:description": u"Some text." - }) + self.assertEquals(og, {u"og:title": None, u"og:description": u"Some text."}) diff --git a/tests/test_state.py b/tests/test_state.py index 429a18cbf75e..96fdb8636caa 100644 --- a/tests/test_state.py +++ b/tests/test_state.py @@ -29,8 +29,15 @@ _next_event_id = 1000 -def create_event(name=None, type=None, state_key=None, depth=2, event_id=None, - prev_events=[], **kwargs): +def create_event( + name=None, + type=None, + state_key=None, + depth=2, + event_id=None, + prev_events=[], + **kwargs +): global _next_event_id if not event_id: @@ -39,9 +46,9 @@ def create_event(name=None, type=None, state_key=None, depth=2, event_id=None, if not name: if state_key is not None: - name = "<%s-%s, %s>" % (type, state_key, event_id,) + name = "<%s-%s, %s>" % (type, state_key, event_id) else: - name = "<%s, %s>" % (type, event_id,) + name = "<%s, %s>" % (type, event_id) d = { "event_id": event_id, @@ -80,8 +87,9 @@ def get_state_groups_ids(self, room_id, event_ids): return defer.succeed(groups) - def store_state_group(self, event_id, room_id, prev_group, delta_ids, - current_state_ids): + def store_state_group( + self, event_id, room_id, prev_group, delta_ids, current_state_ids + ): state_group = self._next_group self._next_group += 1 @@ -91,7 +99,8 @@ def store_state_group(self, event_id, room_id, prev_group, delta_ids, def get_events(self, event_ids, **kwargs): return { - e_id: self._event_id_to_event[e_id] for e_id in event_ids + e_id: self._event_id_to_event[e_id] + for e_id in event_ids if e_id in self._event_id_to_event } @@ -129,9 +138,7 @@ def __init__(self, nodes, edges): prev_events = [] events[event_id] = create_event( - event_id=event_id, - prev_events=prev_events, - **fields + event_id=event_id, prev_events=prev_events, **fields ) self._leaves = clobbered @@ -147,10 +154,15 @@ def get_leaves(self): class StateTestCase(unittest.TestCase): def setUp(self): self.store = StateGroupStore() - hs = Mock(spec_set=[ - "get_datastore", "get_auth", "get_state_handler", "get_clock", - "get_state_resolution_handler", - ]) + hs = Mock( + spec_set=[ + "get_datastore", + "get_auth", + "get_state_handler", + "get_clock", + "get_state_resolution_handler", + ] + ) hs.get_datastore.return_value = self.store hs.get_state_handler.return_value = None hs.get_clock.return_value = MockClock() @@ -164,35 +176,13 @@ def setUp(self): def test_branch_no_conflict(self): graph = Graph( nodes={ - "START": DictObj( - type=EventTypes.Create, - state_key="", - depth=1, - ), - "A": DictObj( - type=EventTypes.Message, - depth=2, - ), - "B": DictObj( - type=EventTypes.Message, - depth=3, - ), - "C": DictObj( - type=EventTypes.Name, - state_key="", - depth=3, - ), - "D": DictObj( - type=EventTypes.Message, - depth=4, - ), + "START": DictObj(type=EventTypes.Create, state_key="", depth=1), + "A": DictObj(type=EventTypes.Message, depth=2), + "B": DictObj(type=EventTypes.Message, depth=3), + "C": DictObj(type=EventTypes.Name, state_key="", depth=3), + "D": DictObj(type=EventTypes.Message, depth=4), }, - edges={ - "A": ["START"], - "B": ["A"], - "C": ["A"], - "D": ["B", "C"] - } + edges={"A": ["START"], "B": ["A"], "C": ["A"], "D": ["B", "C"]}, ) self.store.register_events(graph.walk()) @@ -224,27 +214,11 @@ def test_branch_basic_conflict(self): membership=Membership.JOIN, depth=2, ), - "B": DictObj( - type=EventTypes.Name, - state_key="", - depth=3, - ), - "C": DictObj( - type=EventTypes.Name, - state_key="", - depth=4, - ), - "D": DictObj( - type=EventTypes.Message, - depth=5, - ), + "B": DictObj(type=EventTypes.Name, state_key="", depth=3), + "C": DictObj(type=EventTypes.Name, state_key="", depth=4), + "D": DictObj(type=EventTypes.Message, depth=5), }, - edges={ - "A": ["START"], - "B": ["A"], - "C": ["A"], - "D": ["B", "C"] - } + edges={"A": ["START"], "B": ["A"], "C": ["A"], "D": ["B", "C"]}, ) self.store.register_events(graph.walk()) @@ -259,8 +233,7 @@ def test_branch_basic_conflict(self): prev_state_ids = yield context_store["D"].get_prev_state_ids(self.store) self.assertSetEqual( - {"START", "A", "C"}, - {e_id for e_id in prev_state_ids.values()} + {"START", "A", "C"}, {e_id for e_id in prev_state_ids.values()} ) @defer.inlineCallbacks @@ -280,11 +253,7 @@ def test_branch_have_banned_conflict(self): membership=Membership.JOIN, depth=2, ), - "B": DictObj( - type=EventTypes.Name, - state_key="", - depth=3, - ), + "B": DictObj(type=EventTypes.Name, state_key="", depth=3), "C": DictObj( type=EventTypes.Member, state_key="@user_id_2:example.com", @@ -298,18 +267,9 @@ def test_branch_have_banned_conflict(self): depth=4, sender="@user_id_2:example.com", ), - "E": DictObj( - type=EventTypes.Message, - depth=5, - ), + "E": DictObj(type=EventTypes.Message, depth=5), }, - edges={ - "A": ["START"], - "B": ["A"], - "C": ["B"], - "D": ["B"], - "E": ["C", "D"] - } + edges={"A": ["START"], "B": ["A"], "C": ["B"], "D": ["B"], "E": ["C", "D"]}, ) self.store.register_events(graph.walk()) @@ -324,8 +284,7 @@ def test_branch_have_banned_conflict(self): prev_state_ids = yield context_store["E"].get_prev_state_ids(self.store) self.assertSetEqual( - {"START", "A", "B", "C"}, - {e for e in prev_state_ids.values()} + {"START", "A", "B", "C"}, {e for e in prev_state_ids.values()} ) @defer.inlineCallbacks @@ -357,30 +316,17 @@ def test_branch_have_perms_conflict(self): state_key="", content={ "events": {"m.room.name": 50}, - "users": {userid1: 100, - userid2: 60}, + "users": {userid1: 100, userid2: 60}, }, ), - "A5": DictObj( - type=EventTypes.Name, - state_key="", - ), + "A5": DictObj(type=EventTypes.Name, state_key=""), "B": DictObj( type=EventTypes.PowerLevels, state_key="", - content={ - "events": {"m.room.name": 50}, - "users": {userid2: 30}, - }, - ), - "C": DictObj( - type=EventTypes.Name, - state_key="", - sender=userid2, - ), - "D": DictObj( - type=EventTypes.Message, + content={"events": {"m.room.name": 50}, "users": {userid2: 30}}, ), + "C": DictObj(type=EventTypes.Name, state_key="", sender=userid2), + "D": DictObj(type=EventTypes.Message), } edges = { "A2": ["A1"], @@ -389,7 +335,7 @@ def test_branch_have_perms_conflict(self): "A5": ["A4"], "B": ["A5"], "C": ["A5"], - "D": ["B", "C"] + "D": ["B", "C"], } self._add_depths(nodes, edges) graph = Graph(nodes, edges) @@ -406,8 +352,7 @@ def test_branch_have_perms_conflict(self): prev_state_ids = yield context_store["D"].get_prev_state_ids(self.store) self.assertSetEqual( - {"A1", "A2", "A3", "A5", "B"}, - {e for e in prev_state_ids.values()} + {"A1", "A2", "A3", "A5", "B"}, {e for e in prev_state_ids.values()} ) def _add_depths(self, nodes, edges): @@ -432,9 +377,7 @@ def test_annotate_with_old_message(self): create_event(type="test2", state_key=""), ] - context = yield self.state.compute_event_context( - event, old_state=old_state - ) + context = yield self.state.compute_event_context(event, old_state=old_state) current_state_ids = yield context.get_current_state_ids(self.store) @@ -454,9 +397,7 @@ def test_annotate_with_old_state(self): create_event(type="test2", state_key=""), ] - context = yield self.state.compute_event_context( - event, old_state=old_state - ) + context = yield self.state.compute_event_context(event, old_state=old_state) prev_state_ids = yield context.get_prev_state_ids(self.store) @@ -468,8 +409,7 @@ def test_annotate_with_old_state(self): def test_trivial_annotate_message(self): prev_event_id = "prev_event_id" event = create_event( - type="test_message", name="event2", - prev_events=[(prev_event_id, {})], + type="test_message", name="event2", prev_events=[(prev_event_id, {})] ) old_state = [ @@ -479,7 +419,10 @@ def test_trivial_annotate_message(self): ] group_name = self.store.store_state_group( - prev_event_id, event.room_id, None, None, + prev_event_id, + event.room_id, + None, + None, {(e.type, e.state_key): e.event_id for e in old_state}, ) self.store.register_event_id_state_group(prev_event_id, group_name) @@ -489,8 +432,7 @@ def test_trivial_annotate_message(self): current_state_ids = yield context.get_current_state_ids(self.store) self.assertEqual( - set([e.event_id for e in old_state]), - set(current_state_ids.values()) + set([e.event_id for e in old_state]), set(current_state_ids.values()) ) self.assertEqual(group_name, context.state_group) @@ -499,8 +441,7 @@ def test_trivial_annotate_message(self): def test_trivial_annotate_state(self): prev_event_id = "prev_event_id" event = create_event( - type="state", state_key="", name="event2", - prev_events=[(prev_event_id, {})], + type="state", state_key="", name="event2", prev_events=[(prev_event_id, {})] ) old_state = [ @@ -510,7 +451,10 @@ def test_trivial_annotate_state(self): ] group_name = self.store.store_state_group( - prev_event_id, event.room_id, None, None, + prev_event_id, + event.room_id, + None, + None, {(e.type, e.state_key): e.event_id for e in old_state}, ) self.store.register_event_id_state_group(prev_event_id, group_name) @@ -520,8 +464,7 @@ def test_trivial_annotate_state(self): prev_state_ids = yield context.get_prev_state_ids(self.store) self.assertEqual( - set([e.event_id for e in old_state]), - set(prev_state_ids.values()) + set([e.event_id for e in old_state]), set(prev_state_ids.values()) ) self.assertIsNotNone(context.state_group) @@ -531,13 +474,12 @@ def test_resolve_message_conflict(self): prev_event_id1 = "event_id1" prev_event_id2 = "event_id2" event = create_event( - type="test_message", name="event3", + type="test_message", + name="event3", prev_events=[(prev_event_id1, {}), (prev_event_id2, {})], ) - creation = create_event( - type=EventTypes.Create, state_key="" - ) + creation = create_event(type=EventTypes.Create, state_key="") old_state_1 = [ creation, @@ -557,7 +499,7 @@ def test_resolve_message_conflict(self): self.store.register_events(old_state_2) context = yield self._get_context( - event, prev_event_id1, old_state_1, prev_event_id2, old_state_2, + event, prev_event_id1, old_state_1, prev_event_id2, old_state_2 ) current_state_ids = yield context.get_current_state_ids(self.store) @@ -571,13 +513,13 @@ def test_resolve_state_conflict(self): prev_event_id1 = "event_id1" prev_event_id2 = "event_id2" event = create_event( - type="test4", state_key="", name="event", + type="test4", + state_key="", + name="event", prev_events=[(prev_event_id1, {}), (prev_event_id2, {})], ) - creation = create_event( - type=EventTypes.Create, state_key="" - ) + creation = create_event(type=EventTypes.Create, state_key="") old_state_1 = [ creation, @@ -599,7 +541,7 @@ def test_resolve_state_conflict(self): self.store.get_events = store.get_events context = yield self._get_context( - event, prev_event_id1, old_state_1, prev_event_id2, old_state_2, + event, prev_event_id1, old_state_1, prev_event_id2, old_state_2 ) current_state_ids = yield context.get_current_state_ids(self.store) @@ -613,29 +555,25 @@ def test_standard_depth_conflict(self): prev_event_id1 = "event_id1" prev_event_id2 = "event_id2" event = create_event( - type="test4", name="event", + type="test4", + name="event", prev_events=[(prev_event_id1, {}), (prev_event_id2, {})], ) member_event = create_event( type=EventTypes.Member, state_key="@user_id:example.com", - content={ - "membership": Membership.JOIN, - } + content={"membership": Membership.JOIN}, ) power_levels = create_event( - type=EventTypes.PowerLevels, state_key="", - content={"users": { - "@foo:bar": "100", - "@user_id:example.com": "100", - }} + type=EventTypes.PowerLevels, + state_key="", + content={"users": {"@foo:bar": "100", "@user_id:example.com": "100"}}, ) creation = create_event( - type=EventTypes.Create, state_key="", - content={"creator": "@foo:bar"} + type=EventTypes.Create, state_key="", content={"creator": "@foo:bar"} ) old_state_1 = [ @@ -658,14 +596,12 @@ def test_standard_depth_conflict(self): self.store.get_events = store.get_events context = yield self._get_context( - event, prev_event_id1, old_state_1, prev_event_id2, old_state_2, + event, prev_event_id1, old_state_1, prev_event_id2, old_state_2 ) current_state_ids = yield context.get_current_state_ids(self.store) - self.assertEqual( - old_state_2[3].event_id, current_state_ids[("test1", "1")] - ) + self.assertEqual(old_state_2[3].event_id, current_state_ids[("test1", "1")]) # Reverse the depth to make sure we are actually using the depths # during state resolution. @@ -688,25 +624,30 @@ def test_standard_depth_conflict(self): store.register_events(old_state_2) context = yield self._get_context( - event, prev_event_id1, old_state_1, prev_event_id2, old_state_2, + event, prev_event_id1, old_state_1, prev_event_id2, old_state_2 ) current_state_ids = yield context.get_current_state_ids(self.store) - self.assertEqual( - old_state_1[3].event_id, current_state_ids[("test1", "1")] - ) + self.assertEqual(old_state_1[3].event_id, current_state_ids[("test1", "1")]) - def _get_context(self, event, prev_event_id_1, old_state_1, prev_event_id_2, - old_state_2): + def _get_context( + self, event, prev_event_id_1, old_state_1, prev_event_id_2, old_state_2 + ): sg1 = self.store.store_state_group( - prev_event_id_1, event.room_id, None, None, + prev_event_id_1, + event.room_id, + None, + None, {(e.type, e.state_key): e.event_id for e in old_state_1}, ) self.store.register_event_id_state_group(prev_event_id_1, sg1) sg2 = self.store.store_state_group( - prev_event_id_2, event.room_id, None, None, + prev_event_id_2, + event.room_id, + None, + None, {(e.type, e.state_key): e.event_id for e in old_state_2}, ) self.store.register_event_id_state_group(prev_event_id_2, sg2) diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index bc97c122458a..b921ac52c07a 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -18,7 +18,6 @@ class MockClockTestCase(unittest.TestCase): - def setUp(self): self.clock = MockClock() @@ -34,10 +33,12 @@ def test_later(self): def _cb0(): invoked[0] = 1 + self.clock.call_later(10, _cb0) def _cb1(): invoked[1] = 1 + self.clock.call_later(20, _cb1) self.assertFalse(invoked[0]) @@ -56,10 +57,12 @@ def test_cancel_later(self): def _cb0(): invoked[0] = 1 + t0 = self.clock.call_later(10, _cb0) def _cb1(): invoked[1] = 1 + self.clock.call_later(20, _cb1) self.clock.cancel_call_later(t0) diff --git a/tests/test_types.py b/tests/test_types.py index 729bd676c144..be072d402b85 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -69,10 +69,7 @@ def test_parse(self): self.assertEqual("my.domain", group_id.domain) def test_validate(self): - bad_ids = [ - "$badsigil:domain", - "+:empty", - ] + [ + bad_ids = ["$badsigil:domain", "+:empty"] + [ "+group" + c + ":domain" for c in "A%?æ£" ] for id_string in bad_ids: diff --git a/tests/test_visibility.py b/tests/test_visibility.py index 0dc1a924d331..8643d6312570 100644 --- a/tests/test_visibility.py +++ b/tests/test_visibility.py @@ -54,14 +54,12 @@ def test_filtering(self): events_to_filter = [] for i in range(0, 10): - user = "@user%i:%s" % ( - i, "test_server" if i == 5 else "other_server" - ) + user = "@user%i:%s" % (i, "test_server" if i == 5 else "other_server") evt = yield self.inject_room_member(user, extra_content={"a": "b"}) events_to_filter.append(evt) filtered = yield filter_events_for_server( - self.store, "test_server", events_to_filter, + self.store, "test_server", events_to_filter ) # the result should be 5 redacted events, and 5 unredacted events. @@ -100,19 +98,21 @@ def test_erased_user(self): # ... and the filtering happens. filtered = yield filter_events_for_server( - self.store, "test_server", events_to_filter, + self.store, "test_server", events_to_filter ) for i in range(0, len(events_to_filter)): self.assertEqual( - events_to_filter[i].event_id, filtered[i].event_id, - "Unexpected event at result position %i" % (i, ) + events_to_filter[i].event_id, + filtered[i].event_id, + "Unexpected event at result position %i" % (i,), ) for i in (0, 3): self.assertEqual( - events_to_filter[i].content["body"], filtered[i].content["body"], - "Unexpected event content at result position %i" % (i,) + events_to_filter[i].content["body"], + filtered[i].content["body"], + "Unexpected event content at result position %i" % (i,), ) for i in (1, 4): @@ -121,13 +121,15 @@ def test_erased_user(self): @defer.inlineCallbacks def inject_visibility(self, user_id, visibility): content = {"history_visibility": visibility} - builder = self.event_builder_factory.new({ - "type": "m.room.history_visibility", - "sender": user_id, - "state_key": "", - "room_id": TEST_ROOM_ID, - "content": content, - }) + builder = self.event_builder_factory.new( + { + "type": "m.room.history_visibility", + "sender": user_id, + "state_key": "", + "room_id": TEST_ROOM_ID, + "content": content, + } + ) event, context = yield self.event_creation_handler.create_new_client_event( builder @@ -139,13 +141,15 @@ def inject_visibility(self, user_id, visibility): def inject_room_member(self, user_id, membership="join", extra_content={}): content = {"membership": membership} content.update(extra_content) - builder = self.event_builder_factory.new({ - "type": "m.room.member", - "sender": user_id, - "state_key": user_id, - "room_id": TEST_ROOM_ID, - "content": content, - }) + builder = self.event_builder_factory.new( + { + "type": "m.room.member", + "sender": user_id, + "state_key": user_id, + "room_id": TEST_ROOM_ID, + "content": content, + } + ) event, context = yield self.event_creation_handler.create_new_client_event( builder @@ -158,12 +162,14 @@ def inject_room_member(self, user_id, membership="join", extra_content={}): def inject_message(self, user_id, content=None): if content is None: content = {"body": "testytest"} - builder = self.event_builder_factory.new({ - "type": "m.room.message", - "sender": user_id, - "room_id": TEST_ROOM_ID, - "content": content, - }) + builder = self.event_builder_factory.new( + { + "type": "m.room.message", + "sender": user_id, + "room_id": TEST_ROOM_ID, + "content": content, + } + ) event, context = yield self.event_creation_handler.create_new_client_event( builder @@ -192,56 +198,54 @@ def test_large_room(self): # history_visibility event. room_state = [] - history_visibility_evt = FrozenEvent({ - "event_id": "$history_vis", - "type": "m.room.history_visibility", - "sender": "@resident_user_0:test.com", - "state_key": "", - "room_id": TEST_ROOM_ID, - "content": {"history_visibility": "joined"}, - }) + history_visibility_evt = FrozenEvent( + { + "event_id": "$history_vis", + "type": "m.room.history_visibility", + "sender": "@resident_user_0:test.com", + "state_key": "", + "room_id": TEST_ROOM_ID, + "content": {"history_visibility": "joined"}, + } + ) room_state.append(history_visibility_evt) test_store.add_event(history_visibility_evt) for i in range(0, 100000): - user = "@resident_user_%i:test.com" % (i, ) - evt = FrozenEvent({ - "event_id": "$res_event_%i" % (i, ), - "type": "m.room.member", - "state_key": user, - "sender": user, - "room_id": TEST_ROOM_ID, - "content": { - "membership": "join", - "extra": "zzz," - }, - }) + user = "@resident_user_%i:test.com" % (i,) + evt = FrozenEvent( + { + "event_id": "$res_event_%i" % (i,), + "type": "m.room.member", + "state_key": user, + "sender": user, + "room_id": TEST_ROOM_ID, + "content": {"membership": "join", "extra": "zzz,"}, + } + ) room_state.append(evt) test_store.add_event(evt) events_to_filter = [] for i in range(0, 10): - user = "@user%i:%s" % ( - i, "test_server" if i == 5 else "other_server" + user = "@user%i:%s" % (i, "test_server" if i == 5 else "other_server") + evt = FrozenEvent( + { + "event_id": "$evt%i" % (i,), + "type": "m.room.member", + "state_key": user, + "sender": user, + "room_id": TEST_ROOM_ID, + "content": {"membership": "join", "extra": "zzz"}, + } ) - evt = FrozenEvent({ - "event_id": "$evt%i" % (i, ), - "type": "m.room.member", - "state_key": user, - "sender": user, - "room_id": TEST_ROOM_ID, - "content": { - "membership": "join", - "extra": "zzz", - }, - }) events_to_filter.append(evt) room_state.append(evt) test_store.add_event(evt) - test_store.set_state_ids_for_event(evt, { - (e.type, e.state_key): e.event_id for e in room_state - }) + test_store.set_state_ids_for_event( + evt, {(e.type, e.state_key): e.event_id for e in room_state} + ) pr = cProfile.Profile() pr.enable() @@ -249,7 +253,7 @@ def test_large_room(self): logger.info("Starting filtering") start = time.time() filtered = yield filter_events_for_server( - test_store, "test_server", events_to_filter, + test_store, "test_server", events_to_filter ) logger.info("Filtering took %f seconds", time.time() - start) @@ -275,6 +279,7 @@ class _TestStore(object): filter_events_for_server """ + def __init__(self): # data for get_events: a map from event_id to event self.events = {} @@ -298,8 +303,8 @@ def get_state_ids_for_events(self, events, types): continue if type != "m.room.member" or state_key is not None: raise RuntimeError( - "Unimplemented: get_state_ids with type (%s, %s)" % - (type, state_key), + "Unimplemented: get_state_ids with type (%s, %s)" + % (type, state_key) ) include_memberships = True @@ -316,9 +321,7 @@ def get_state_ids_for_events(self, events, types): return succeed(res) def get_events(self, events): - return succeed({ - event_id: self.events[event_id] for event_id in events - }) + return succeed({event_id: self.events[event_id] for event_id in events}) def are_users_erased(self, users): return succeed({u: False for u in users}) diff --git a/tests/unittest.py b/tests/unittest.py index b15b06726b30..f448a6dfbd40 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -56,6 +56,7 @@ def around(target): def method_name(orig, *args, **kwargs): return orig(*args, **kwargs) """ + def _around(code): name = code.__name__ orig = getattr(target, name) @@ -89,6 +90,7 @@ def setUp(orig): old_level = logging.getLogger().level if old_level != level: + @around(self) def tearDown(orig): ret = orig() @@ -117,8 +119,9 @@ def assert_dict(self, required, actual): actual (dict): The test result. Extra keys will not be checked. """ for key in required: - self.assertEquals(required[key], actual[key], - msg="%s mismatch. %s" % (key, actual)) + self.assertEquals( + required[key], actual[key], msg="%s mismatch. %s" % (key, actual) + ) def DEBUG(target): diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py index ca8a7c907fbb..463a737efaf5 100644 --- a/tests/util/caches/test_descriptors.py +++ b/tests/util/caches/test_descriptors.py @@ -67,12 +67,8 @@ def record_callback(idx): self.assertIsNone(cache.get("key2", None)) # both callbacks should have been callbacked - self.assertTrue( - callback_record[0], "Invalidation callback for key1 not called", - ) - self.assertTrue( - callback_record[1], "Invalidation callback for key2 not called", - ) + self.assertTrue(callback_record[0], "Invalidation callback for key1 not called") + self.assertTrue(callback_record[1], "Invalidation callback for key2 not called") # letting the other lookup complete should do nothing d1.callback("result1") @@ -168,8 +164,7 @@ def do_lookup(): with logcontext.LoggingContext() as c1: c1.name = "c1" r = yield obj.fn(1) - self.assertEqual(logcontext.LoggingContext.current_context(), - c1) + self.assertEqual(logcontext.LoggingContext.current_context(), c1) defer.returnValue(r) def check_result(r): @@ -179,14 +174,18 @@ def check_result(r): # set off a deferred which will do a cache lookup d1 = do_lookup() - self.assertEqual(logcontext.LoggingContext.current_context(), - logcontext.LoggingContext.sentinel) + self.assertEqual( + logcontext.LoggingContext.current_context(), + logcontext.LoggingContext.sentinel, + ) d1.addCallback(check_result) # and another d2 = do_lookup() - self.assertEqual(logcontext.LoggingContext.current_context(), - logcontext.LoggingContext.sentinel) + self.assertEqual( + logcontext.LoggingContext.current_context(), + logcontext.LoggingContext.sentinel, + ) d2.addCallback(check_result) # let the lookup complete @@ -224,15 +223,16 @@ def do_lookup(): except SynapseError: pass - self.assertEqual(logcontext.LoggingContext.current_context(), - c1) + self.assertEqual(logcontext.LoggingContext.current_context(), c1) obj = Cls() # set off a deferred which will do a cache lookup d1 = do_lookup() - self.assertEqual(logcontext.LoggingContext.current_context(), - logcontext.LoggingContext.sentinel) + self.assertEqual( + logcontext.LoggingContext.current_context(), + logcontext.LoggingContext.sentinel, + ) return d1 @@ -288,14 +288,10 @@ def fn(self, arg1, arg2): @descriptors.cachedList("fn", "args1", inlineCallbacks=True) def list_fn(self, args1, arg2): - assert ( - logcontext.LoggingContext.current_context().request == "c1" - ) + assert logcontext.LoggingContext.current_context().request == "c1" # we want this to behave like an asynchronous function yield run_on_reactor() - assert ( - logcontext.LoggingContext.current_context().request == "c1" - ) + assert logcontext.LoggingContext.current_context().request == "c1" defer.returnValue(self.mock(args1, arg2)) with logcontext.LoggingContext() as c1: @@ -308,10 +304,7 @@ def list_fn(self, args1, arg2): logcontext.LoggingContext.sentinel, ) r = yield d1 - self.assertEqual( - logcontext.LoggingContext.current_context(), - c1 - ) + self.assertEqual(logcontext.LoggingContext.current_context(), c1) obj.mock.assert_called_once_with([10, 20], 2) self.assertEqual(r, {10: 'fish', 20: 'chips'}) obj.mock.reset_mock() @@ -337,6 +330,7 @@ def list_fn(self, args1, arg2): @defer.inlineCallbacks def test_invalidate(self): """Make sure that invalidation callbacks are called.""" + class Cls(object): def __init__(self): self.mock = mock.Mock() diff --git a/tests/util/test_dict_cache.py b/tests/util/test_dict_cache.py index 26f2fa580094..34fdc9a43af8 100644 --- a/tests/util/test_dict_cache.py +++ b/tests/util/test_dict_cache.py @@ -20,7 +20,6 @@ class DictCacheTestCase(unittest.TestCase): - def setUp(self): self.cache = DictionaryCache("foobar") @@ -41,9 +40,7 @@ def test_simple_cache_hit_partial(self): key = "test_simple_cache_hit_partial" seq = self.cache.sequence - test_value = { - "test": "test_simple_cache_hit_partial" - } + test_value = {"test": "test_simple_cache_hit_partial"} self.cache.update(seq, key, test_value) c = self.cache.get(key, ["test"]) @@ -53,9 +50,7 @@ def test_simple_cache_miss_partial(self): key = "test_simple_cache_miss_partial" seq = self.cache.sequence - test_value = { - "test": "test_simple_cache_miss_partial" - } + test_value = {"test": "test_simple_cache_miss_partial"} self.cache.update(seq, key, test_value) c = self.cache.get(key, ["test2"]) @@ -79,15 +74,11 @@ def test_multi_insert(self): key = "test_simple_cache_hit_miss_partial" seq = self.cache.sequence - test_value_1 = { - "test": "test_simple_cache_hit_miss_partial", - } + test_value_1 = {"test": "test_simple_cache_hit_miss_partial"} self.cache.update(seq, key, test_value_1, fetched_keys=set("test")) seq = self.cache.sequence - test_value_2 = { - "test2": "test_simple_cache_hit_miss_partial2", - } + test_value_2 = {"test2": "test_simple_cache_hit_miss_partial2"} self.cache.update(seq, key, test_value_2, fetched_keys=set("test2")) c = self.cache.get(key) @@ -96,5 +87,5 @@ def test_multi_insert(self): "test": "test_simple_cache_hit_miss_partial", "test2": "test_simple_cache_hit_miss_partial2", }, - c.value + c.value, ) diff --git a/tests/util/test_expiring_cache.py b/tests/util/test_expiring_cache.py index d12b5e838b1e..5cbada4edaff 100644 --- a/tests/util/test_expiring_cache.py +++ b/tests/util/test_expiring_cache.py @@ -22,7 +22,6 @@ class ExpiringCacheTestCase(unittest.TestCase): - def test_get_set(self): clock = MockClock() cache = ExpiringCache("test", clock, max_len=1) diff --git a/tests/util/test_file_consumer.py b/tests/util/test_file_consumer.py index 7ce5f8c258f4..e90e08d1c042 100644 --- a/tests/util/test_file_consumer.py +++ b/tests/util/test_file_consumer.py @@ -27,7 +27,6 @@ class FileConsumerTests(unittest.TestCase): - @defer.inlineCallbacks def test_pull_consumer(self): string_file = StringIO() @@ -87,7 +86,9 @@ def test_push_producer_feedback(self): producer = NonCallableMock(spec_set=["pauseProducing", "resumeProducing"]) resume_deferred = defer.Deferred() - producer.resumeProducing.side_effect = lambda: resume_deferred.callback(None) + producer.resumeProducing.side_effect = lambda: resume_deferred.callback( + None + ) consumer.registerProducer(producer, True) diff --git a/tests/util/test_linearizer.py b/tests/util/test_linearizer.py index 7f48a72de88f..61a55b461b80 100644 --- a/tests/util/test_linearizer.py +++ b/tests/util/test_linearizer.py @@ -26,7 +26,6 @@ class LinearizerTestCase(unittest.TestCase): - @defer.inlineCallbacks def test_linearizer(self): linearizer = Linearizer() @@ -54,13 +53,11 @@ def test_lots_of_queued_things(self): def func(i, sleep=False): with logcontext.LoggingContext("func(%s)" % i) as lc: with (yield linearizer.queue("")): - self.assertEqual( - logcontext.LoggingContext.current_context(), lc) + self.assertEqual(logcontext.LoggingContext.current_context(), lc) if sleep: yield Clock(reactor).sleep(0) - self.assertEqual( - logcontext.LoggingContext.current_context(), lc) + self.assertEqual(logcontext.LoggingContext.current_context(), lc) func(0, sleep=True) for i in range(1, 100): diff --git a/tests/util/test_logcontext.py b/tests/util/test_logcontext.py index c54001f7a424..4633db77b3fa 100644 --- a/tests/util/test_logcontext.py +++ b/tests/util/test_logcontext.py @@ -8,11 +8,8 @@ class LoggingContextTestCase(unittest.TestCase): - def _check_test_key(self, value): - self.assertEquals( - LoggingContext.current_context().request, value - ) + self.assertEquals(LoggingContext.current_context().request, value) def test_with_context(self): with LoggingContext() as context_one: @@ -50,6 +47,7 @@ def cb(res): self._check_test_key("one") callback_completed[0] = True return res + d.addCallback(cb) return d @@ -74,8 +72,7 @@ def check_logcontext(): # make sure that the context was reset before it got thrown back # into the reactor try: - self.assertIs(LoggingContext.current_context(), - sentinel_context) + self.assertIs(LoggingContext.current_context(), sentinel_context) d2.callback(None) except BaseException: d2.errback(twisted.python.failure.Failure()) @@ -104,9 +101,7 @@ def test_run_in_background_with_chained_deferred(self): # a function which returns a deferred which looks like it has been # called, but is actually paused def testfunc(): - return logcontext.make_deferred_yieldable( - _chained_deferred_function() - ) + return logcontext.make_deferred_yieldable(_chained_deferred_function()) return self._test_run_in_background(testfunc) @@ -175,5 +170,6 @@ def cb(res): d2 = defer.Deferred() reactor.callLater(0, d2.callback, res) return d2 + d.addCallback(cb) return d diff --git a/tests/util/test_lrucache.py b/tests/util/test_lrucache.py index 9b36ef4482b1..786947375df2 100644 --- a/tests/util/test_lrucache.py +++ b/tests/util/test_lrucache.py @@ -23,7 +23,6 @@ class LruCacheTestCase(unittest.TestCase): - def test_get_set(self): cache = LruCache(1) cache["key"] = "value" @@ -235,7 +234,6 @@ def test_eviction(self): class LruCacheSizedTestCase(unittest.TestCase): - def test_evict(self): cache = LruCache(5, size_callback=len) cache["key1"] = [0] diff --git a/tests/util/test_rwlock.py b/tests/util/test_rwlock.py index 7cd470be6743..bd32e2cee7d1 100644 --- a/tests/util/test_rwlock.py +++ b/tests/util/test_rwlock.py @@ -20,7 +20,6 @@ class ReadWriteLockTestCase(unittest.TestCase): - def _assert_called_before_not_after(self, lst, first_false): for i, d in enumerate(lst[:first_false]): self.assertTrue(d.called, msg="%d was unexpectedly false" % i) @@ -36,12 +35,12 @@ def test_rwlock(self): key = object() ds = [ - rwlock.read(key), # 0 - rwlock.read(key), # 1 + rwlock.read(key), # 0 + rwlock.read(key), # 1 rwlock.write(key), # 2 rwlock.write(key), # 3 - rwlock.read(key), # 4 - rwlock.read(key), # 5 + rwlock.read(key), # 4 + rwlock.read(key), # 5 rwlock.write(key), # 6 ] diff --git a/tests/util/test_snapshot_cache.py b/tests/util/test_snapshot_cache.py index 0f5b32fcc065..1a44f7242551 100644 --- a/tests/util/test_snapshot_cache.py +++ b/tests/util/test_snapshot_cache.py @@ -22,7 +22,6 @@ class SnapshotCacheTestCase(unittest.TestCase): - def setUp(self): self.cache = SnapshotCache() self.cache.DURATION_MS = 1 diff --git a/tests/util/test_stream_change_cache.py b/tests/util/test_stream_change_cache.py index 65b0f2e6fbc8..f2be63706bba 100644 --- a/tests/util/test_stream_change_cache.py +++ b/tests/util/test_stream_change_cache.py @@ -181,17 +181,8 @@ def test_get_entities_changed(self): # Query a subset of the entries mid-way through the stream. We should # only get back the subset. self.assertEqual( - cache.get_entities_changed( - [ - "bar@baz.net", - ], - stream_pos=2, - ), - set( - [ - "bar@baz.net", - ] - ), + cache.get_entities_changed(["bar@baz.net"], stream_pos=2), + set(["bar@baz.net"]), ) def test_max_pos(self): diff --git a/tests/utils.py b/tests/utils.py index 8d73797971fa..8668b5478fd8 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -38,8 +38,9 @@ @defer.inlineCallbacks -def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None, - **kargs): +def setup_test_homeserver( + name="test", datastore=None, config=None, reactor=None, **kargs +): """Setup a homeserver suitable for running tests against. Keyword arguments are passed to the Homeserver constructor. If no datastore is supplied a datastore backed by an in-memory sqlite db will be given to the HS. @@ -96,20 +97,12 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None if USE_POSTGRES_FOR_TESTS: config.database_config = { "name": "psycopg2", - "args": { - "database": "synapse_test", - "cp_min": 1, - "cp_max": 5, - }, + "args": {"database": "synapse_test", "cp_min": 1, "cp_max": 5}, } else: config.database_config = { "name": "sqlite3", - "args": { - "database": ":memory:", - "cp_min": 1, - "cp_max": 1, - }, + "args": {"database": ":memory:", "cp_min": 1, "cp_max": 1}, } db_engine = create_engine(config.database_config) @@ -121,7 +114,8 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None if datastore is None: hs = HomeServer( - name, config=config, + name, + config=config, db_config=config.database_config, version_string="Synapse/tests", database_engine=db_engine, @@ -143,7 +137,10 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None hs.setup() else: hs = HomeServer( - name, db_pool=None, datastore=datastore, config=config, + name, + db_pool=None, + datastore=datastore, + config=config, version_string="Synapse/tests", database_engine=db_engine, room_list_handler=object(), @@ -158,8 +155,9 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None # because AuthHandler's constructor requires the HS, so we can't make one # beforehand and pass it in to the HS's constructor (chicken / egg) hs.get_auth_handler().hash = lambda p: hashlib.md5(p.encode('utf8')).hexdigest() - hs.get_auth_handler().validate_hash = lambda p, h: hashlib.md5( - p.encode('utf8')).hexdigest() == h + hs.get_auth_handler().validate_hash = ( + lambda p, h: hashlib.md5(p.encode('utf8')).hexdigest() == h + ) fed = kargs.get("resource_for_federation", None) if fed: @@ -173,7 +171,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, reactor=None sleep_limit=hs.config.federation_rc_sleep_limit, sleep_msec=hs.config.federation_rc_sleep_delay, reject_limit=hs.config.federation_rc_reject_limit, - concurrent_requests=hs.config.federation_rc_concurrent + concurrent_requests=hs.config.federation_rc_concurrent, ), ) @@ -199,7 +197,6 @@ def getRawHeaders(name, default=None): # This is a mock /resource/ not an entire server class MockHttpResource(HttpServer): - def __init__(self, prefix=""): self.callbacks = [] # 3-tuple of method/pattern/function self.prefix = prefix @@ -263,15 +260,9 @@ def trigger(self, http_method, path, content, mock_request, federation_auth=Fals matcher = pattern.match(path) if matcher: try: - args = [ - urlparse.unquote(u) - for u in matcher.groups() - ] - - (code, response) = yield func( - mock_request, - *args - ) + args = [urlparse.unquote(u) for u in matcher.groups()] + + (code, response) = yield func(mock_request, *args) defer.returnValue((code, response)) except CodeMessageException as e: defer.returnValue((e.code, cs_error(e.msg, code=e.errcode))) @@ -372,8 +363,7 @@ def time_bound_deferred(self, d, *args, **kwargs): def _format_call(args, kwargs): return ", ".join( - ["%r" % (a) for a in args] + - ["%s=%r" % (k, v) for k, v in kwargs.items()] + ["%r" % (a) for a in args] + ["%s=%r" % (k, v) for k, v in kwargs.items()] ) @@ -391,8 +381,9 @@ def __call__(self, *args, **kwargs): self.calls.append((args, kwargs)) if not self.expectations: - raise ValueError("%r has no pending calls to handle call(%s)" % ( - self, _format_call(args, kwargs)) + raise ValueError( + "%r has no pending calls to handle call(%s)" + % (self, _format_call(args, kwargs)) ) for (call, result, d) in self.expectations: @@ -400,9 +391,9 @@ def __call__(self, *args, **kwargs): d.callback(None) return result - failure = AssertionError("Was not expecting call(%s)" % ( - _format_call(args, kwargs) - )) + failure = AssertionError( + "Was not expecting call(%s)" % (_format_call(args, kwargs)) + ) for _, _, d in self.expectations: try: @@ -418,17 +409,19 @@ def expect_call_and_return(self, call, result): @defer.inlineCallbacks def await_calls(self, timeout=1000): deferred = defer.DeferredList( - [d for _, _, d in self.expectations], - fireOnOneErrback=True + [d for _, _, d in self.expectations], fireOnOneErrback=True ) timer = reactor.callLater( timeout / 1000, deferred.errback, - AssertionError("%d pending calls left: %s" % ( - len([e for e in self.expectations if not e[2].called]), - [e for e in self.expectations if not e[2].called] - )) + AssertionError( + "%d pending calls left: %s" + % ( + len([e for e in self.expectations if not e[2].called]), + [e for e in self.expectations if not e[2].called], + ) + ), ) yield deferred @@ -443,7 +436,6 @@ def assert_had_no_calls(self): self.calls = [] raise AssertionError( - "Expected not to received any calls, got:\n" + "\n".join([ - "call(%s)" % _format_call(c[0], c[1]) for c in calls - ]) + "Expected not to received any calls, got:\n" + + "\n".join(["call(%s)" % _format_call(c[0], c[1]) for c in calls]) ) From dd16e7dfcc31b9bfa835d184510c9630de0b9734 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Fri, 10 Aug 2018 23:54:46 +1000 Subject: [PATCH 093/173] changelog --- changelog.d/3679.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3679.misc diff --git a/changelog.d/3679.misc b/changelog.d/3679.misc new file mode 100644 index 000000000000..1de0a0f2b41c --- /dev/null +++ b/changelog.d/3679.misc @@ -0,0 +1 @@ +Synapse's tests are now formatted with the black autoformatter. From c08f9d95b2051c6998a234e4e28e621c174b9a0e Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 10 Aug 2018 14:56:48 +0100 Subject: [PATCH 094/173] log *after* reloading log config ... because logging *before* reloading means the log message gets lost in the old MemoryLogger --- synapse/config/logger.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/synapse/config/logger.py b/synapse/config/logger.py index a87b11a1dff0..cfc20dcccf31 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -193,9 +193,8 @@ def load_log_config(): def sighup(signum, stack): # it might be better to use a file watcher or something for this. - logging.info("Reloading log config from %s due to SIGHUP", - log_config) load_log_config() + logging.info("Reloaded log config from %s due to SIGHUP", log_config) load_log_config() From 807449d8f212ff111acb44d8f418dcac7d69f713 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Sat, 11 Aug 2018 00:01:43 +1000 Subject: [PATCH 095/173] fix up a forced long line --- tests/crypto/test_keyring.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index e40681ed1e71..0c6f510d113e 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -60,8 +60,9 @@ def setUp(self): self.hs = yield utils.setup_test_homeserver( handlers=None, http_client=self.http_client ) + keys = self.mock_perspective_server.get_verify_keys() self.hs.config.perspectives = { - self.mock_perspective_server.server_name: self.mock_perspective_server.get_verify_keys() + self.mock_perspective_server.server_name: keys } def check_context(self, _, expected): From 31fa743567c3f70c66c7af2f1332560b04b78f7a Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Sat, 11 Aug 2018 22:38:34 +0100 Subject: [PATCH 096/173] fix sqlite/postgres incompatibility in reap_monthly_active_users --- synapse/storage/monthly_active_users.py | 44 ++++++++++++++++--------- 1 file changed, 28 insertions(+), 16 deletions(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index d47dcef3a0a5..9a6e699386a1 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -64,23 +64,27 @@ def reap_monthly_active_users(self): Deferred[] """ def _reap_users(txn): + # Purge stale users thirty_days_ago = ( int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) ) - # Purge stale users - - # questionmarks is a hack to overcome sqlite not supporting - # tuples in 'WHERE IN %s' - questionmarks = '?' * len(self.reserved_users) query_args = [thirty_days_ago] - query_args.extend(self.reserved_users) - - sql = """ - DELETE FROM monthly_active_users - WHERE timestamp < ? - AND user_id NOT IN ({}) - """.format(','.join(questionmarks)) + base_sql = "DELETE FROM monthly_active_users WHERE timestamp < ?" + + # Need if/else since 'AND user_id NOT IN ({})' fails on Postgres + # when len(reserved_users) == 0. Works fine on sqlite. + if len(self.reserved_users) > 0: + # questionmarks is a hack to overcome sqlite not supporting + # tuples in 'WHERE IN %s' + questionmarks = '?' * len(self.reserved_users) + + query_args.extend(self.reserved_users) + sql = base_sql + """ AND user_id NOT IN ({})""".format( + ','.join(questionmarks) + ) + else: + sql = base_sql txn.execute(sql, query_args) @@ -93,16 +97,24 @@ def _reap_users(txn): # negative LIMIT values. So there is no way to write it that both can # support query_args = [self.hs.config.max_mau_value] - query_args.extend(self.reserved_users) - sql = """ + + base_sql = """ DELETE FROM monthly_active_users WHERE user_id NOT IN ( SELECT user_id FROM monthly_active_users ORDER BY timestamp DESC LIMIT ? ) - AND user_id NOT IN ({}) - """.format(','.join(questionmarks)) + """ + # Need if/else since 'AND user_id NOT IN ({})' fails on Postgres + # when len(reserved_users) == 0. Works fine on sqlite. + if len(self.reserved_users) > 0: + query_args.extend(self.reserved_users) + sql = base_sql + """ AND user_id NOT IN ({})""".format( + ','.join(questionmarks) + ) + else: + sql = base_sql txn.execute(sql, query_args) yield self.runInteraction("reap_monthly_active_users", _reap_users) From ac205a54b22478726cbab4427a3370a58737d71f Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Sat, 11 Aug 2018 22:50:09 +0100 Subject: [PATCH 097/173] Fixes test_reap_monthly_active_users so it passes under postgres --- changelog.d/3681.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3681.bugfix diff --git a/changelog.d/3681.bugfix b/changelog.d/3681.bugfix new file mode 100644 index 000000000000..d18a69cd0c58 --- /dev/null +++ b/changelog.d/3681.bugfix @@ -0,0 +1 @@ +Fixes test_reap_monthly_active_users so it passes under postgres From 99dd975dae7baaaef2a3b0a92fa51965b121ae34 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Mon, 13 Aug 2018 16:47:46 +1000 Subject: [PATCH 098/173] Run tests under PostgreSQL (#3423) --- .travis.yml | 10 ++ changelog.d/3423.misc | 1 + synapse/handlers/presence.py | 5 + synapse/storage/client_ips.py | 5 + tests/__init__.py | 3 + tests/api/test_auth.py | 2 +- tests/api/test_filtering.py | 5 +- tests/crypto/test_keyring.py | 6 +- tests/handlers/test_auth.py | 2 +- tests/handlers/test_device.py | 2 +- tests/handlers/test_directory.py | 1 + tests/handlers/test_e2e_keys.py | 2 +- tests/handlers/test_profile.py | 1 + tests/handlers/test_register.py | 1 + tests/handlers/test_typing.py | 1 + tests/replication/slave/storage/_base.py | 1 + tests/rest/client/v1/test_admin.py | 2 +- tests/rest/client/v1/test_events.py | 1 + tests/rest/client/v1/test_profile.py | 1 + tests/rest/client/v1/test_register.py | 2 +- tests/rest/client/v1/test_rooms.py | 1 + tests/rest/client/v1/test_typing.py | 1 + tests/rest/client/v2_alpha/test_filter.py | 2 +- tests/rest/client/v2_alpha/test_register.py | 2 +- tests/rest/client/v2_alpha/test_sync.py | 2 +- tests/server.py | 10 +- tests/storage/test_appservice.py | 13 +- tests/storage/test_background_update.py | 4 +- tests/storage/test_client_ips.py | 2 +- tests/storage/test_devices.py | 2 +- tests/storage/test_directory.py | 2 +- tests/storage/test_end_to_end_keys.py | 3 +- tests/storage/test_event_federation.py | 2 +- tests/storage/test_event_push_actions.py | 2 +- tests/storage/test_keys.py | 2 +- tests/storage/test_monthly_active_users.py | 2 +- tests/storage/test_presence.py | 2 +- tests/storage/test_profile.py | 2 +- tests/storage/test_redaction.py | 2 +- tests/storage/test_registration.py | 2 +- tests/storage/test_room.py | 4 +- tests/storage/test_roommember.py | 2 +- tests/storage/test_state.py | 2 +- tests/storage/test_user_directory.py | 2 +- tests/test_federation.py | 5 +- tests/test_server.py | 2 +- tests/test_visibility.py | 2 +- tests/utils.py | 133 +++++++++++++++++--- tox.ini | 20 ++- 49 files changed, 227 insertions(+), 59 deletions(-) create mode 100644 changelog.d/3423.misc diff --git a/.travis.yml b/.travis.yml index b34b17af758f..318701c9f89f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,6 +8,9 @@ before_script: - git remote set-branches --add origin develop - git fetch origin develop +services: + - postgresql + matrix: fast_finish: true include: @@ -20,6 +23,9 @@ matrix: - python: 2.7 env: TOX_ENV=py27 + - python: 2.7 + env: TOX_ENV=py27-postgres TRIAL_FLAGS="-j 4" + - python: 3.6 env: TOX_ENV=py36 @@ -29,6 +35,10 @@ matrix: - python: 3.6 env: TOX_ENV=check-newsfragment + allow_failures: + - python: 2.7 + env: TOX_ENV=py27-postgres TRIAL_FLAGS="-j 4" + install: - pip install tox diff --git a/changelog.d/3423.misc b/changelog.d/3423.misc new file mode 100644 index 000000000000..51768c6d14ef --- /dev/null +++ b/changelog.d/3423.misc @@ -0,0 +1 @@ +The test suite now can run under PostgreSQL. diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 20fc3b03230d..3671d24f60d0 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -95,6 +95,7 @@ def __init__(self, hs): Args: hs (synapse.server.HomeServer): """ + self.hs = hs self.is_mine = hs.is_mine self.is_mine_id = hs.is_mine_id self.clock = hs.get_clock() @@ -230,6 +231,10 @@ def _on_shutdown(self): earlier than they should when synapse is restarted. This affect of this is some spurious presence changes that will self-correct. """ + # If the DB pool has already terminated, don't try updating + if not self.hs.get_db_pool().running: + return + logger.info( "Performing _on_shutdown. Persisting %d unpersisted changes", len(self.user_to_current_state) diff --git a/synapse/storage/client_ips.py b/synapse/storage/client_ips.py index 2489527f2cc8..8fc678fa67be 100644 --- a/synapse/storage/client_ips.py +++ b/synapse/storage/client_ips.py @@ -96,6 +96,11 @@ def insert_client_ip(self, user_id, access_token, ip, user_agent, device_id, self._batch_row_update[key] = (user_agent, device_id, now) def _update_client_ips_batch(self): + + # If the DB pool has already terminated, don't try updating + if not self.hs.get_db_pool().running: + return + def update(): to_update = self._batch_row_update self._batch_row_update = {} diff --git a/tests/__init__.py b/tests/__init__.py index 24006c949e48..9d9ca2282928 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -15,4 +15,7 @@ from twisted.trial import util +from tests import utils + util.DEFAULT_TIMEOUT_DURATION = 10 +utils.setupdb() diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index f8e28876bb52..a65689ba896e 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -39,7 +39,7 @@ def setUp(self): self.state_handler = Mock() self.store = Mock() - self.hs = yield setup_test_homeserver(handlers=None) + self.hs = yield setup_test_homeserver(self.addCleanup, handlers=None) self.hs.get_datastore = Mock(return_value=self.store) self.hs.handlers = TestHandlers(self.hs) self.auth = Auth(self.hs) diff --git a/tests/api/test_filtering.py b/tests/api/test_filtering.py index 1c2d71052cd0..48b2d3d663bb 100644 --- a/tests/api/test_filtering.py +++ b/tests/api/test_filtering.py @@ -46,7 +46,10 @@ def setUp(self): self.mock_http_client.put_json = DeferredMockCallable() hs = yield setup_test_homeserver( - handlers=None, http_client=self.mock_http_client, keyring=Mock() + self.addCleanup, + handlers=None, + http_client=self.mock_http_client, + keyring=Mock(), ) self.filtering = hs.get_filtering() diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 0c6f510d113e..8299dc72c87a 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -58,12 +58,10 @@ def setUp(self): self.mock_perspective_server = MockPerspectiveServer() self.http_client = Mock() self.hs = yield utils.setup_test_homeserver( - handlers=None, http_client=self.http_client + self.addCleanup, handlers=None, http_client=self.http_client ) keys = self.mock_perspective_server.get_verify_keys() - self.hs.config.perspectives = { - self.mock_perspective_server.server_name: keys - } + self.hs.config.perspectives = {self.mock_perspective_server.server_name: keys} def check_context(self, _, expected): self.assertEquals( diff --git a/tests/handlers/test_auth.py b/tests/handlers/test_auth.py index ede01f8099a6..56c0f87fb772 100644 --- a/tests/handlers/test_auth.py +++ b/tests/handlers/test_auth.py @@ -35,7 +35,7 @@ def __init__(self, hs): class AuthTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): - self.hs = yield setup_test_homeserver(handlers=None) + self.hs = yield setup_test_homeserver(self.addCleanup, handlers=None) self.hs.handlers = AuthHandlers(self.hs) self.auth_handler = self.hs.handlers.auth_handler self.macaroon_generator = self.hs.get_macaroon_generator() diff --git a/tests/handlers/test_device.py b/tests/handlers/test_device.py index d70d64550403..56e7acd37cfb 100644 --- a/tests/handlers/test_device.py +++ b/tests/handlers/test_device.py @@ -34,7 +34,7 @@ def __init__(self, *args, **kwargs): @defer.inlineCallbacks def setUp(self): - hs = yield utils.setup_test_homeserver() + hs = yield utils.setup_test_homeserver(self.addCleanup) self.handler = hs.get_device_handler() self.store = hs.get_datastore() self.clock = hs.get_clock() diff --git a/tests/handlers/test_directory.py b/tests/handlers/test_directory.py index 06de9f5eca25..ec7355688b25 100644 --- a/tests/handlers/test_directory.py +++ b/tests/handlers/test_directory.py @@ -46,6 +46,7 @@ def register_query_handler(query_type, handler): self.mock_registry.register_query_handler = register_query_handler hs = yield setup_test_homeserver( + self.addCleanup, http_client=None, resource_for_federation=Mock(), federation_client=self.mock_federation, diff --git a/tests/handlers/test_e2e_keys.py b/tests/handlers/test_e2e_keys.py index 57ab228455e4..8dccc6826eb2 100644 --- a/tests/handlers/test_e2e_keys.py +++ b/tests/handlers/test_e2e_keys.py @@ -34,7 +34,7 @@ def __init__(self, *args, **kwargs): @defer.inlineCallbacks def setUp(self): self.hs = yield utils.setup_test_homeserver( - handlers=None, federation_client=mock.Mock() + self.addCleanup, handlers=None, federation_client=mock.Mock() ) self.handler = synapse.handlers.e2e_keys.E2eKeysHandler(self.hs) diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py index 9268a6fe2b89..62dc69003c85 100644 --- a/tests/handlers/test_profile.py +++ b/tests/handlers/test_profile.py @@ -48,6 +48,7 @@ def register_query_handler(query_type, handler): self.mock_registry.register_query_handler = register_query_handler hs = yield setup_test_homeserver( + self.addCleanup, http_client=None, handlers=None, resource_for_federation=Mock(), diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index dbec81076f13..d48d40c8dd97 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -40,6 +40,7 @@ def setUp(self): self.mock_distributor.declare("registered_user") self.mock_captcha_client = Mock() self.hs = yield setup_test_homeserver( + self.addCleanup, handlers=None, http_client=None, expire_access_token=True, diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index becfa77bfa9f..ad58073a140c 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -67,6 +67,7 @@ def setUp(self): self.state_handler = Mock() hs = yield setup_test_homeserver( + self.addCleanup, "test", auth=self.auth, clock=self.clock, diff --git a/tests/replication/slave/storage/_base.py b/tests/replication/slave/storage/_base.py index c23b6e2cfd7a..65df116efc75 100644 --- a/tests/replication/slave/storage/_base.py +++ b/tests/replication/slave/storage/_base.py @@ -54,6 +54,7 @@ class BaseSlavedStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): self.hs = yield setup_test_homeserver( + self.addCleanup, "blue", http_client=None, federation_client=Mock(), diff --git a/tests/rest/client/v1/test_admin.py b/tests/rest/client/v1/test_admin.py index 67d9ab94e2c8..1a553fa3f928 100644 --- a/tests/rest/client/v1/test_admin.py +++ b/tests/rest/client/v1/test_admin.py @@ -51,7 +51,7 @@ def setUp(self): self.secrets = Mock() self.hs = setup_test_homeserver( - http_client=None, clock=self.hs_clock, reactor=self.clock + self.addCleanup, http_client=None, clock=self.hs_clock, reactor=self.clock ) self.hs.config.registration_shared_secret = u"shared" diff --git a/tests/rest/client/v1/test_events.py b/tests/rest/client/v1/test_events.py index 0316b74fa111..956f7fc4c4d3 100644 --- a/tests/rest/client/v1/test_events.py +++ b/tests/rest/client/v1/test_events.py @@ -41,6 +41,7 @@ def setUp(self): self.mock_resource = MockHttpResource(prefix=PATH_PREFIX) hs = yield setup_test_homeserver( + self.addCleanup, http_client=None, federation_client=Mock(), ratelimiter=NonCallableMock(spec_set=["send_message"]), diff --git a/tests/rest/client/v1/test_profile.py b/tests/rest/client/v1/test_profile.py index 9ba0ffc19f8a..1eab9c3bdba2 100644 --- a/tests/rest/client/v1/test_profile.py +++ b/tests/rest/client/v1/test_profile.py @@ -46,6 +46,7 @@ def setUp(self): ) hs = yield setup_test_homeserver( + self.addCleanup, "test", http_client=None, resource_for_client=self.mock_resource, diff --git a/tests/rest/client/v1/test_register.py b/tests/rest/client/v1/test_register.py index 6f15d69ecd54..4be88b8a39fe 100644 --- a/tests/rest/client/v1/test_register.py +++ b/tests/rest/client/v1/test_register.py @@ -49,7 +49,7 @@ def setUp(self): self.hs_clock = Clock(self.clock) self.hs = self.hs = setup_test_homeserver( - http_client=None, clock=self.hs_clock, reactor=self.clock + self.addCleanup, http_client=None, clock=self.hs_clock, reactor=self.clock ) self.hs.get_datastore = Mock(return_value=self.datastore) self.hs.get_handlers = Mock(return_value=handlers) diff --git a/tests/rest/client/v1/test_rooms.py b/tests/rest/client/v1/test_rooms.py index 00fc796787b0..9fe0760496de 100644 --- a/tests/rest/client/v1/test_rooms.py +++ b/tests/rest/client/v1/test_rooms.py @@ -50,6 +50,7 @@ def setUp(self): self.hs_clock = Clock(self.clock) self.hs = setup_test_homeserver( + self.addCleanup, "red", http_client=None, clock=self.hs_clock, diff --git a/tests/rest/client/v1/test_typing.py b/tests/rest/client/v1/test_typing.py index 7f1a435e7ba3..677265edf602 100644 --- a/tests/rest/client/v1/test_typing.py +++ b/tests/rest/client/v1/test_typing.py @@ -44,6 +44,7 @@ def setUp(self): self.auth_user_id = self.user_id hs = yield setup_test_homeserver( + self.addCleanup, "red", clock=self.clock, http_client=None, diff --git a/tests/rest/client/v2_alpha/test_filter.py b/tests/rest/client/v2_alpha/test_filter.py index de33b10a5f5e..8260c130f8e5 100644 --- a/tests/rest/client/v2_alpha/test_filter.py +++ b/tests/rest/client/v2_alpha/test_filter.py @@ -43,7 +43,7 @@ def setUp(self): self.hs_clock = Clock(self.clock) self.hs = setup_test_homeserver( - http_client=None, clock=self.hs_clock, reactor=self.clock + self.addCleanup, http_client=None, clock=self.hs_clock, reactor=self.clock ) self.auth = self.hs.get_auth() diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index 9487babac343..b72bd0fb7fab 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -47,7 +47,7 @@ def setUp(self): login_handler=self.login_handler, ) self.hs = setup_test_homeserver( - http_client=None, clock=self.hs_clock, reactor=self.clock + self.addCleanup, http_client=None, clock=self.hs_clock, reactor=self.clock ) self.hs.get_auth = Mock(return_value=self.auth) self.hs.get_handlers = Mock(return_value=self.handlers) diff --git a/tests/rest/client/v2_alpha/test_sync.py b/tests/rest/client/v2_alpha/test_sync.py index bafc0d1df07d..2e1d06c50982 100644 --- a/tests/rest/client/v2_alpha/test_sync.py +++ b/tests/rest/client/v2_alpha/test_sync.py @@ -40,7 +40,7 @@ def setUp(self): self.hs_clock = Clock(self.clock) self.hs = setup_test_homeserver( - http_client=None, clock=self.hs_clock, reactor=self.clock + self.addCleanup, http_client=None, clock=self.hs_clock, reactor=self.clock ) self.auth = self.hs.get_auth() diff --git a/tests/server.py b/tests/server.py index 05708be8b917..beb24cf0320a 100644 --- a/tests/server.py +++ b/tests/server.py @@ -147,12 +147,15 @@ def callFromThread(self, callback, *args, **kwargs): return d -def setup_test_homeserver(*args, **kwargs): +def setup_test_homeserver(cleanup_func, *args, **kwargs): """ Set up a synchronous test server, driven by the reactor used by the homeserver. """ - d = _sth(*args, **kwargs).result + d = _sth(cleanup_func, *args, **kwargs).result + + if isinstance(d, Failure): + d.raiseException() # Make the thread pool synchronous. clock = d.get_clock() @@ -189,6 +192,9 @@ class ThreadPool: def start(self): pass + def stop(self): + pass + def callInThreadWithCallback(self, onResult, function, *args, **kwargs): def _(res): if isinstance(res, Failure): diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index fbb25a88441a..c893990454bb 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -43,7 +43,10 @@ def setUp(self): password_providers=[], ) hs = yield setup_test_homeserver( - config=config, federation_sender=Mock(), federation_client=Mock() + self.addCleanup, + config=config, + federation_sender=Mock(), + federation_client=Mock(), ) self.as_token = "token1" @@ -108,7 +111,10 @@ def setUp(self): password_providers=[], ) hs = yield setup_test_homeserver( - config=config, federation_sender=Mock(), federation_client=Mock() + self.addCleanup, + config=config, + federation_sender=Mock(), + federation_client=Mock(), ) self.db_pool = hs.get_db_pool() @@ -392,6 +398,7 @@ def test_unique_works(self): app_service_config_files=[f1, f2], event_cache_size=1, password_providers=[] ) hs = yield setup_test_homeserver( + self.addCleanup, config=config, datastore=Mock(), federation_sender=Mock(), @@ -409,6 +416,7 @@ def test_duplicate_ids(self): app_service_config_files=[f1, f2], event_cache_size=1, password_providers=[] ) hs = yield setup_test_homeserver( + self.addCleanup, config=config, datastore=Mock(), federation_sender=Mock(), @@ -432,6 +440,7 @@ def test_duplicate_as_tokens(self): app_service_config_files=[f1, f2], event_cache_size=1, password_providers=[] ) hs = yield setup_test_homeserver( + self.addCleanup, config=config, datastore=Mock(), federation_sender=Mock(), diff --git a/tests/storage/test_background_update.py b/tests/storage/test_background_update.py index b4f6baf4418a..81403727c52b 100644 --- a/tests/storage/test_background_update.py +++ b/tests/storage/test_background_update.py @@ -9,7 +9,9 @@ class BackgroundUpdateTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): - hs = yield setup_test_homeserver() # type: synapse.server.HomeServer + hs = yield setup_test_homeserver( + self.addCleanup + ) # type: synapse.server.HomeServer self.store = hs.get_datastore() self.clock = hs.get_clock() diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index ea00bbe84cd4..fa60d949baf8 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -28,7 +28,7 @@ def __init__(self, *args, **kwargs): @defer.inlineCallbacks def setUp(self): - self.hs = yield tests.utils.setup_test_homeserver() + self.hs = yield tests.utils.setup_test_homeserver(self.addCleanup) self.store = self.hs.get_datastore() self.clock = self.hs.get_clock() diff --git a/tests/storage/test_devices.py b/tests/storage/test_devices.py index 63bc42d9e096..aef4dfaf57a0 100644 --- a/tests/storage/test_devices.py +++ b/tests/storage/test_devices.py @@ -28,7 +28,7 @@ def __init__(self, *args, **kwargs): @defer.inlineCallbacks def setUp(self): - hs = yield tests.utils.setup_test_homeserver() + hs = yield tests.utils.setup_test_homeserver(self.addCleanup) self.store = hs.get_datastore() diff --git a/tests/storage/test_directory.py b/tests/storage/test_directory.py index 9a8ba2fcfe29..b4510c1c8d2b 100644 --- a/tests/storage/test_directory.py +++ b/tests/storage/test_directory.py @@ -26,7 +26,7 @@ class DirectoryStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): - hs = yield setup_test_homeserver() + hs = yield setup_test_homeserver(self.addCleanup) self.store = DirectoryStore(None, hs) diff --git a/tests/storage/test_end_to_end_keys.py b/tests/storage/test_end_to_end_keys.py index d45c775c2d66..8f0aaece4061 100644 --- a/tests/storage/test_end_to_end_keys.py +++ b/tests/storage/test_end_to_end_keys.py @@ -26,8 +26,7 @@ def __init__(self, *args, **kwargs): @defer.inlineCallbacks def setUp(self): - hs = yield tests.utils.setup_test_homeserver() - + hs = yield tests.utils.setup_test_homeserver(self.addCleanup) self.store = hs.get_datastore() @defer.inlineCallbacks diff --git a/tests/storage/test_event_federation.py b/tests/storage/test_event_federation.py index 66eb11958107..2fdf34fdf63a 100644 --- a/tests/storage/test_event_federation.py +++ b/tests/storage/test_event_federation.py @@ -22,7 +22,7 @@ class EventFederationWorkerStoreTestCase(tests.unittest.TestCase): @defer.inlineCallbacks def setUp(self): - hs = yield tests.utils.setup_test_homeserver() + hs = yield tests.utils.setup_test_homeserver(self.addCleanup) self.store = hs.get_datastore() @defer.inlineCallbacks diff --git a/tests/storage/test_event_push_actions.py b/tests/storage/test_event_push_actions.py index 5e87b4530df8..b114c6fb1d4e 100644 --- a/tests/storage/test_event_push_actions.py +++ b/tests/storage/test_event_push_actions.py @@ -33,7 +33,7 @@ class EventPushActionsStoreTestCase(tests.unittest.TestCase): @defer.inlineCallbacks def setUp(self): - hs = yield tests.utils.setup_test_homeserver() + hs = yield tests.utils.setup_test_homeserver(self.addCleanup) self.store = hs.get_datastore() @defer.inlineCallbacks diff --git a/tests/storage/test_keys.py b/tests/storage/test_keys.py index ad0a55b324b0..47f4a8ceac99 100644 --- a/tests/storage/test_keys.py +++ b/tests/storage/test_keys.py @@ -28,7 +28,7 @@ def __init__(self, *args, **kwargs): @defer.inlineCallbacks def setUp(self): - hs = yield tests.utils.setup_test_homeserver() + hs = yield tests.utils.setup_test_homeserver(self.addCleanup) self.store = hs.get_datastore() @defer.inlineCallbacks diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index 22b1072d9f57..0a2c859f26ca 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -28,7 +28,7 @@ def __init__(self, *args, **kwargs): @defer.inlineCallbacks def setUp(self): - self.hs = yield setup_test_homeserver() + self.hs = yield setup_test_homeserver(self.addCleanup) self.store = self.hs.get_datastore() @defer.inlineCallbacks diff --git a/tests/storage/test_presence.py b/tests/storage/test_presence.py index 12c540dfab40..b5b58ff660a3 100644 --- a/tests/storage/test_presence.py +++ b/tests/storage/test_presence.py @@ -26,7 +26,7 @@ class PresenceStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): - hs = yield setup_test_homeserver(clock=MockClock()) + hs = yield setup_test_homeserver(self.addCleanup, clock=MockClock()) self.store = PresenceStore(None, hs) diff --git a/tests/storage/test_profile.py b/tests/storage/test_profile.py index 5acbc8be0c57..a1f6618bf94b 100644 --- a/tests/storage/test_profile.py +++ b/tests/storage/test_profile.py @@ -26,7 +26,7 @@ class ProfileStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): - hs = yield setup_test_homeserver() + hs = yield setup_test_homeserver(self.addCleanup) self.store = ProfileStore(None, hs) diff --git a/tests/storage/test_redaction.py b/tests/storage/test_redaction.py index 85ce61e841cf..c4e9fb72bfff 100644 --- a/tests/storage/test_redaction.py +++ b/tests/storage/test_redaction.py @@ -29,7 +29,7 @@ class RedactionTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): hs = yield setup_test_homeserver( - resource_for_federation=Mock(), http_client=None + self.addCleanup, resource_for_federation=Mock(), http_client=None ) self.store = hs.get_datastore() diff --git a/tests/storage/test_registration.py b/tests/storage/test_registration.py index bd96896bb35e..4eda122edc39 100644 --- a/tests/storage/test_registration.py +++ b/tests/storage/test_registration.py @@ -23,7 +23,7 @@ class RegistrationStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): - hs = yield setup_test_homeserver() + hs = yield setup_test_homeserver(self.addCleanup) self.db_pool = hs.get_db_pool() self.store = hs.get_datastore() diff --git a/tests/storage/test_room.py b/tests/storage/test_room.py index 84d49b55c1cb..a1ea23b06896 100644 --- a/tests/storage/test_room.py +++ b/tests/storage/test_room.py @@ -26,7 +26,7 @@ class RoomStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): - hs = yield setup_test_homeserver() + hs = yield setup_test_homeserver(self.addCleanup) # We can't test RoomStore on its own without the DirectoryStore, for # management of the 'room_aliases' table @@ -57,7 +57,7 @@ def test_get_room(self): class RoomEventsStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): - hs = setup_test_homeserver() + hs = setup_test_homeserver(self.addCleanup) # Room events need the full datastore, for persist_event() and # get_room_state() diff --git a/tests/storage/test_roommember.py b/tests/storage/test_roommember.py index 0d9908926a94..c83ef600625f 100644 --- a/tests/storage/test_roommember.py +++ b/tests/storage/test_roommember.py @@ -29,7 +29,7 @@ class RoomMemberStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): hs = yield setup_test_homeserver( - resource_for_federation=Mock(), http_client=None + self.addCleanup, resource_for_federation=Mock(), http_client=None ) # We can't test the RoomMemberStore on its own without the other event # storage logic diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py index ed5b41644a5c..6168c46248d0 100644 --- a/tests/storage/test_state.py +++ b/tests/storage/test_state.py @@ -33,7 +33,7 @@ def __init__(self, *args, **kwargs): @defer.inlineCallbacks def setUp(self): - hs = yield tests.utils.setup_test_homeserver() + hs = yield tests.utils.setup_test_homeserver(self.addCleanup) self.store = hs.get_datastore() self.event_builder_factory = hs.get_event_builder_factory() diff --git a/tests/storage/test_user_directory.py b/tests/storage/test_user_directory.py index 7a273eab4880..b46e0ea7e279 100644 --- a/tests/storage/test_user_directory.py +++ b/tests/storage/test_user_directory.py @@ -29,7 +29,7 @@ class UserDirectoryStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): - self.hs = yield setup_test_homeserver() + self.hs = yield setup_test_homeserver(self.addCleanup) self.store = UserDirectoryStore(None, self.hs) # alice and bob are both in !room_id. bobby is not but shares diff --git a/tests/test_federation.py b/tests/test_federation.py index f40ff29b52af..2540604fcc91 100644 --- a/tests/test_federation.py +++ b/tests/test_federation.py @@ -18,7 +18,10 @@ def setUp(self): self.reactor = ThreadedMemoryReactorClock() self.hs_clock = Clock(self.reactor) self.homeserver = setup_test_homeserver( - http_client=self.http_client, clock=self.hs_clock, reactor=self.reactor + self.addCleanup, + http_client=self.http_client, + clock=self.hs_clock, + reactor=self.reactor, ) user_id = UserID("us", "test") diff --git a/tests/test_server.py b/tests/test_server.py index fc396226ea27..895e49040688 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -16,7 +16,7 @@ def setUp(self): self.reactor = MemoryReactorClock() self.hs_clock = Clock(self.reactor) self.homeserver = setup_test_homeserver( - http_client=None, clock=self.hs_clock, reactor=self.reactor + self.addCleanup, http_client=None, clock=self.hs_clock, reactor=self.reactor ) def test_handler_for_request(self): diff --git a/tests/test_visibility.py b/tests/test_visibility.py index 8643d6312570..45a78338d6aa 100644 --- a/tests/test_visibility.py +++ b/tests/test_visibility.py @@ -31,7 +31,7 @@ class FilterEventsForServerTestCase(tests.unittest.TestCase): @defer.inlineCallbacks def setUp(self): - self.hs = yield setup_test_homeserver() + self.hs = yield setup_test_homeserver(self.addCleanup) self.event_creation_handler = self.hs.get_event_creation_handler() self.event_builder_factory = self.hs.get_event_builder_factory() self.store = self.hs.get_datastore() diff --git a/tests/utils.py b/tests/utils.py index 8668b5478fd8..90378326f898 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -13,7 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import atexit import hashlib +import os +import uuid from inspect import getcallargs from mock import Mock, patch @@ -27,23 +30,80 @@ from synapse.server import HomeServer from synapse.storage import PostgresEngine from synapse.storage.engines import create_engine -from synapse.storage.prepare_database import prepare_database +from synapse.storage.prepare_database import ( + _get_or_create_schema_state, + _setup_new_database, + prepare_database, +) from synapse.util.logcontext import LoggingContext from synapse.util.ratelimitutils import FederationRateLimiter # set this to True to run the tests against postgres instead of sqlite. -# It requires you to have a local postgres database called synapse_test, within -# which ALL TABLES WILL BE DROPPED -USE_POSTGRES_FOR_TESTS = False +USE_POSTGRES_FOR_TESTS = os.environ.get("SYNAPSE_POSTGRES", False) +POSTGRES_USER = os.environ.get("SYNAPSE_POSTGRES_USER", "postgres") +POSTGRES_BASE_DB = "_synapse_unit_tests_base_%s" % (os.getpid(),) + + +def setupdb(): + + # If we're using PostgreSQL, set up the db once + if USE_POSTGRES_FOR_TESTS: + pgconfig = { + "name": "psycopg2", + "args": { + "database": POSTGRES_BASE_DB, + "user": POSTGRES_USER, + "cp_min": 1, + "cp_max": 5, + }, + } + config = Mock() + config.password_providers = [] + config.database_config = pgconfig + db_engine = create_engine(pgconfig) + db_conn = db_engine.module.connect(user=POSTGRES_USER) + db_conn.autocommit = True + cur = db_conn.cursor() + cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,)) + cur.execute("CREATE DATABASE %s;" % (POSTGRES_BASE_DB,)) + cur.close() + db_conn.close() + + # Set up in the db + db_conn = db_engine.module.connect( + database=POSTGRES_BASE_DB, user=POSTGRES_USER + ) + cur = db_conn.cursor() + _get_or_create_schema_state(cur, db_engine) + _setup_new_database(cur, db_engine) + db_conn.commit() + cur.close() + db_conn.close() + + def _cleanup(): + db_conn = db_engine.module.connect(user=POSTGRES_USER) + db_conn.autocommit = True + cur = db_conn.cursor() + cur.execute("DROP DATABASE IF EXISTS %s;" % (POSTGRES_BASE_DB,)) + cur.close() + db_conn.close() + + atexit.register(_cleanup) @defer.inlineCallbacks def setup_test_homeserver( - name="test", datastore=None, config=None, reactor=None, **kargs + cleanup_func, name="test", datastore=None, config=None, reactor=None, **kargs ): - """Setup a homeserver suitable for running tests against. Keyword arguments - are passed to the Homeserver constructor. If no datastore is supplied a - datastore backed by an in-memory sqlite db will be given to the HS. + """ + Setup a homeserver suitable for running tests against. Keyword arguments + are passed to the Homeserver constructor. + + If no datastore is supplied, one is created and given to the homeserver. + + Args: + cleanup_func : The function used to register a cleanup routine for + after the test. """ if reactor is None: from twisted.internet import reactor @@ -95,9 +155,11 @@ def setup_test_homeserver( kargs["clock"] = MockClock() if USE_POSTGRES_FOR_TESTS: + test_db = "synapse_test_%s" % uuid.uuid4().hex + config.database_config = { "name": "psycopg2", - "args": {"database": "synapse_test", "cp_min": 1, "cp_max": 5}, + "args": {"database": test_db, "cp_min": 1, "cp_max": 5}, } else: config.database_config = { @@ -107,6 +169,21 @@ def setup_test_homeserver( db_engine = create_engine(config.database_config) + # Create the database before we actually try and connect to it, based off + # the template database we generate in setupdb() + if datastore is None and isinstance(db_engine, PostgresEngine): + db_conn = db_engine.module.connect( + database=POSTGRES_BASE_DB, user=POSTGRES_USER + ) + db_conn.autocommit = True + cur = db_conn.cursor() + cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,)) + cur.execute( + "CREATE DATABASE %s WITH TEMPLATE %s;" % (test_db, POSTGRES_BASE_DB) + ) + cur.close() + db_conn.close() + # we need to configure the connection pool to run the on_new_connection # function, so that we can test code that uses custom sqlite functions # (like rank). @@ -125,15 +202,35 @@ def setup_test_homeserver( reactor=reactor, **kargs ) - db_conn = hs.get_db_conn() - # make sure that the database is empty - if isinstance(db_engine, PostgresEngine): - cur = db_conn.cursor() - cur.execute("SELECT tablename FROM pg_tables where schemaname='public'") - rows = cur.fetchall() - for r in rows: - cur.execute("DROP TABLE %s CASCADE" % r[0]) - yield prepare_database(db_conn, db_engine, config) + + # Prepare the DB on SQLite -- PostgreSQL is a copy of an already up to + # date db + if not isinstance(db_engine, PostgresEngine): + db_conn = hs.get_db_conn() + yield prepare_database(db_conn, db_engine, config) + db_conn.commit() + db_conn.close() + + else: + # We need to do cleanup on PostgreSQL + def cleanup(): + # Close all the db pools + hs.get_db_pool().close() + + # Drop the test database + db_conn = db_engine.module.connect( + database=POSTGRES_BASE_DB, user=POSTGRES_USER + ) + db_conn.autocommit = True + cur = db_conn.cursor() + cur.execute("DROP DATABASE IF EXISTS %s;" % (test_db,)) + db_conn.commit() + cur.close() + db_conn.close() + + # Register the cleanup hook + cleanup_func(cleanup) + hs.setup() else: hs = HomeServer( diff --git a/tox.ini b/tox.ini index ed26644bd922..085f43898908 100644 --- a/tox.ini +++ b/tox.ini @@ -1,7 +1,7 @@ [tox] envlist = packaging, py27, py36, pep8, check_isort -[testenv] +[base] deps = coverage Twisted>=15.1 @@ -15,6 +15,15 @@ deps = setenv = PYTHONDONTWRITEBYTECODE = no_byte_code +[testenv] +deps = + {[base]deps} + +setenv = + {[base]setenv} + +passenv = * + commands = /usr/bin/find "{toxinidir}" -name '*.pyc' -delete coverage run {env:COVERAGE_OPTS:} --source="{toxinidir}/synapse" \ @@ -46,6 +55,15 @@ commands = # ) usedevelop=true +[testenv:py27-postgres] +usedevelop=true +deps = + {[base]deps} + psycopg2 +setenv = + {[base]setenv} + SYNAPSE_POSTGRES = 1 + [testenv:py36] usedevelop=true commands = From 0d43f991a19840a224d3dac78d79f13d78212ee6 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 13 Aug 2018 18:00:23 +0100 Subject: [PATCH 099/173] support admin_email config and pass through into blocking errors, return AuthError in all cases --- synapse/api/auth.py | 8 ++++++-- synapse/api/errors.py | 13 +++++++++++-- synapse/config/server.py | 4 ++++ synapse/handlers/register.py | 27 ++++++++++++++------------- tests/api/test_auth.py | 6 +++++- tests/handlers/test_register.py | 8 ++++---- tests/utils.py | 1 + 7 files changed, 45 insertions(+), 22 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 9c62ec43742d..4f028078fa27 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -781,11 +781,15 @@ def check_auth_blocking(self): """ if self.hs.config.hs_disabled: raise AuthError( - 403, self.hs.config.hs_disabled_message, errcode=Codes.HS_DISABLED + 403, self.hs.config.hs_disabled_message, + errcode=Codes.HS_DISABLED, + admin_email=self.hs.config.admin_email, ) if self.hs.config.limit_usage_by_mau is True: current_mau = yield self.store.get_monthly_active_count() if current_mau >= self.hs.config.max_mau_value: raise AuthError( - 403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED + 403, "MAU Limit Exceeded", + admin_email=self.hs.config.admin_email, + errcode=Codes.MAU_LIMIT_EXCEEDED ) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index dc3bed5fcbbd..d74848159e44 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -225,11 +225,20 @@ def __init__(self, msg="Not found", errcode=Codes.NOT_FOUND): class AuthError(SynapseError): """An error raised when there was a problem authorising an event.""" - def __init__(self, *args, **kwargs): if "errcode" not in kwargs: kwargs["errcode"] = Codes.FORBIDDEN - super(AuthError, self).__init__(*args, **kwargs) + self.admin_email = kwargs.get('admin_email') + self.msg = kwargs.get('msg') + self.errcode = kwargs.get('errcode') + super(AuthError, self).__init__(*args, errcode=kwargs["errcode"]) + + def error_dict(self): + return cs_error( + self.msg, + self.errcode, + admin_email=self.admin_email, + ) class EventSizeError(SynapseError): diff --git a/synapse/config/server.py b/synapse/config/server.py index 3b078d72cabd..64a5121a452f 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -82,6 +82,10 @@ def read_config(self, config): self.hs_disabled = config.get("hs_disabled", False) self.hs_disabled_message = config.get("hs_disabled_message", "") + # Admin email to direct users at should their instance become blocked + # due to resource constraints + self.admin_email = config.get("admin_email", None) + # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist = None federation_domain_whitelist = config.get( diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 3526b20d5a57..ef7222d7b860 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -144,7 +144,8 @@ def register( Raises: RegistrationError if there was a problem registering. """ - yield self._check_mau_limits() + + yield self.auth.check_auth_blocking() password_hash = None if password: password_hash = yield self.auth_handler().hash(password) @@ -289,7 +290,7 @@ def register_saml2(self, localpart): 400, "User ID can only contain characters a-z, 0-9, or '=_-./'", ) - yield self._check_mau_limits() + yield self.auth.check_auth_blocking() user = UserID(localpart, self.hs.hostname) user_id = user.to_string() @@ -439,7 +440,7 @@ def get_or_create_user(self, requester, localpart, displayname, """ if localpart is None: raise SynapseError(400, "Request must include user id") - yield self._check_mau_limits() + yield self.auth.check_auth_blocking() need_register = True try: @@ -534,13 +535,13 @@ def _join_user_to_room(self, requester, room_identifier): action="join", ) - @defer.inlineCallbacks - def _check_mau_limits(self): - """ - Do not accept registrations if monthly active user limits exceeded - and limiting is enabled - """ - try: - yield self.auth.check_auth_blocking() - except AuthError as e: - raise RegistrationError(e.code, str(e), e.errcode) + # @defer.inlineCallbacks + # def _s(self): + # """ + # Do not accept registrations if monthly active user limits exceeded + # and limiting is enabled + # """ + # try: + # yield self.auth.check_auth_blocking() + # except AuthError as e: + # raise RegistrationError(e.code, str(e), e.errcode) diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index a65689ba896e..e8a1894e651c 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -455,8 +455,11 @@ def test_blocking_mau(self): return_value=defer.succeed(lots_of_users) ) - with self.assertRaises(AuthError): + with self.assertRaises(AuthError) as e: yield self.auth.check_auth_blocking() + self.assertEquals(e.exception.admin_email, self.hs.config.admin_email) + self.assertEquals(e.exception.errcode, Codes.MAU_LIMIT_EXCEEDED) + self.assertEquals(e.exception.code, 403) # Ensure does not throw an error self.store.get_monthly_active_count = Mock( @@ -470,5 +473,6 @@ def test_hs_disabled(self): self.hs.config.hs_disabled_message = "Reason for being disabled" with self.assertRaises(AuthError) as e: yield self.auth.check_auth_blocking() + self.assertEquals(e.exception.admin_email, self.hs.config.admin_email) self.assertEquals(e.exception.errcode, Codes.HS_DISABLED) self.assertEquals(e.exception.code, 403) diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index d48d40c8dd97..35d1bcab3e9f 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -17,7 +17,7 @@ from twisted.internet import defer -from synapse.api.errors import RegistrationError +from synapse.api.errors import AuthError from synapse.handlers.register import RegistrationHandler from synapse.types import UserID, create_requester @@ -109,7 +109,7 @@ def test_get_or_create_user_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(RegistrationError): + with self.assertRaises(AuthError): yield self.handler.get_or_create_user("requester", 'b', "display_name") @defer.inlineCallbacks @@ -118,7 +118,7 @@ def test_register_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(RegistrationError): + with self.assertRaises(AuthError): yield self.handler.register(localpart="local_part") @defer.inlineCallbacks @@ -127,5 +127,5 @@ def test_register_saml2_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(RegistrationError): + with self.assertRaises(AuthError): yield self.handler.register_saml2(localpart="local_part") diff --git a/tests/utils.py b/tests/utils.py index 90378326f898..4af81624eb39 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -139,6 +139,7 @@ def setup_test_homeserver( config.hs_disabled_message = "" config.max_mau_value = 50 config.mau_limits_reserved_threepids = [] + config.admin_email = None # we need a sane default_room_version, otherwise attempts to create rooms will # fail. From ce7de9ae6b74e8e5e89ff442bc29f8cd73328042 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 13 Aug 2018 18:06:18 +0100 Subject: [PATCH 100/173] Revert "support admin_email config and pass through into blocking errors, return AuthError in all cases" This reverts commit 0d43f991a19840a224d3dac78d79f13d78212ee6. --- synapse/api/auth.py | 8 ++------ synapse/api/errors.py | 13 ++----------- synapse/config/server.py | 4 ---- synapse/handlers/register.py | 27 +++++++++++++-------------- tests/api/test_auth.py | 6 +----- tests/handlers/test_register.py | 8 ++++---- tests/utils.py | 1 - 7 files changed, 22 insertions(+), 45 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 4f028078fa27..9c62ec43742d 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -781,15 +781,11 @@ def check_auth_blocking(self): """ if self.hs.config.hs_disabled: raise AuthError( - 403, self.hs.config.hs_disabled_message, - errcode=Codes.HS_DISABLED, - admin_email=self.hs.config.admin_email, + 403, self.hs.config.hs_disabled_message, errcode=Codes.HS_DISABLED ) if self.hs.config.limit_usage_by_mau is True: current_mau = yield self.store.get_monthly_active_count() if current_mau >= self.hs.config.max_mau_value: raise AuthError( - 403, "MAU Limit Exceeded", - admin_email=self.hs.config.admin_email, - errcode=Codes.MAU_LIMIT_EXCEEDED + 403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED ) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index d74848159e44..dc3bed5fcbbd 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -225,20 +225,11 @@ def __init__(self, msg="Not found", errcode=Codes.NOT_FOUND): class AuthError(SynapseError): """An error raised when there was a problem authorising an event.""" + def __init__(self, *args, **kwargs): if "errcode" not in kwargs: kwargs["errcode"] = Codes.FORBIDDEN - self.admin_email = kwargs.get('admin_email') - self.msg = kwargs.get('msg') - self.errcode = kwargs.get('errcode') - super(AuthError, self).__init__(*args, errcode=kwargs["errcode"]) - - def error_dict(self): - return cs_error( - self.msg, - self.errcode, - admin_email=self.admin_email, - ) + super(AuthError, self).__init__(*args, **kwargs) class EventSizeError(SynapseError): diff --git a/synapse/config/server.py b/synapse/config/server.py index 64a5121a452f..3b078d72cabd 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -82,10 +82,6 @@ def read_config(self, config): self.hs_disabled = config.get("hs_disabled", False) self.hs_disabled_message = config.get("hs_disabled_message", "") - # Admin email to direct users at should their instance become blocked - # due to resource constraints - self.admin_email = config.get("admin_email", None) - # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist = None federation_domain_whitelist = config.get( diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index ef7222d7b860..3526b20d5a57 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -144,8 +144,7 @@ def register( Raises: RegistrationError if there was a problem registering. """ - - yield self.auth.check_auth_blocking() + yield self._check_mau_limits() password_hash = None if password: password_hash = yield self.auth_handler().hash(password) @@ -290,7 +289,7 @@ def register_saml2(self, localpart): 400, "User ID can only contain characters a-z, 0-9, or '=_-./'", ) - yield self.auth.check_auth_blocking() + yield self._check_mau_limits() user = UserID(localpart, self.hs.hostname) user_id = user.to_string() @@ -440,7 +439,7 @@ def get_or_create_user(self, requester, localpart, displayname, """ if localpart is None: raise SynapseError(400, "Request must include user id") - yield self.auth.check_auth_blocking() + yield self._check_mau_limits() need_register = True try: @@ -535,13 +534,13 @@ def _join_user_to_room(self, requester, room_identifier): action="join", ) - # @defer.inlineCallbacks - # def _s(self): - # """ - # Do not accept registrations if monthly active user limits exceeded - # and limiting is enabled - # """ - # try: - # yield self.auth.check_auth_blocking() - # except AuthError as e: - # raise RegistrationError(e.code, str(e), e.errcode) + @defer.inlineCallbacks + def _check_mau_limits(self): + """ + Do not accept registrations if monthly active user limits exceeded + and limiting is enabled + """ + try: + yield self.auth.check_auth_blocking() + except AuthError as e: + raise RegistrationError(e.code, str(e), e.errcode) diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index e8a1894e651c..a65689ba896e 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -455,11 +455,8 @@ def test_blocking_mau(self): return_value=defer.succeed(lots_of_users) ) - with self.assertRaises(AuthError) as e: + with self.assertRaises(AuthError): yield self.auth.check_auth_blocking() - self.assertEquals(e.exception.admin_email, self.hs.config.admin_email) - self.assertEquals(e.exception.errcode, Codes.MAU_LIMIT_EXCEEDED) - self.assertEquals(e.exception.code, 403) # Ensure does not throw an error self.store.get_monthly_active_count = Mock( @@ -473,6 +470,5 @@ def test_hs_disabled(self): self.hs.config.hs_disabled_message = "Reason for being disabled" with self.assertRaises(AuthError) as e: yield self.auth.check_auth_blocking() - self.assertEquals(e.exception.admin_email, self.hs.config.admin_email) self.assertEquals(e.exception.errcode, Codes.HS_DISABLED) self.assertEquals(e.exception.code, 403) diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 35d1bcab3e9f..d48d40c8dd97 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -17,7 +17,7 @@ from twisted.internet import defer -from synapse.api.errors import AuthError +from synapse.api.errors import RegistrationError from synapse.handlers.register import RegistrationHandler from synapse.types import UserID, create_requester @@ -109,7 +109,7 @@ def test_get_or_create_user_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(AuthError): + with self.assertRaises(RegistrationError): yield self.handler.get_or_create_user("requester", 'b', "display_name") @defer.inlineCallbacks @@ -118,7 +118,7 @@ def test_register_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(AuthError): + with self.assertRaises(RegistrationError): yield self.handler.register(localpart="local_part") @defer.inlineCallbacks @@ -127,5 +127,5 @@ def test_register_saml2_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(AuthError): + with self.assertRaises(RegistrationError): yield self.handler.register_saml2(localpart="local_part") diff --git a/tests/utils.py b/tests/utils.py index 4af81624eb39..90378326f898 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -139,7 +139,6 @@ def setup_test_homeserver( config.hs_disabled_message = "" config.max_mau_value = 50 config.mau_limits_reserved_threepids = [] - config.admin_email = None # we need a sane default_room_version, otherwise attempts to create rooms will # fail. From 8ba2dac6ea1c6400e1194323b27ad1eb46eb5f79 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 13 Aug 2018 18:54:01 +0100 Subject: [PATCH 101/173] Name changelog after PR, not bug --- changelog.d/{1932.bugfix => 3677.bugfix} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename changelog.d/{1932.bugfix => 3677.bugfix} (100%) diff --git a/changelog.d/1932.bugfix b/changelog.d/3677.bugfix similarity index 100% rename from changelog.d/1932.bugfix rename to changelog.d/3677.bugfix From b6d55588e1055ad99d0640b4311551e959de462f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 13 Aug 2018 18:56:55 +0100 Subject: [PATCH 102/173] Changelog should link to PRs The changelog makes much more sense if it consistently links to PRs, rather than mostly PRs with the occasional issue #. --- CONTRIBUTING.rst | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index aa2738eea39d..3d75853aa7e2 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -30,11 +30,11 @@ use github's pull request workflow to review the contribution, and either ask you to make any refinements needed or merge it and make them ourselves. The changes will then land on master when we next do a release. -We use `Jenkins `_ and +We use `Jenkins `_ and `Travis `_ for continuous -integration. All pull requests to synapse get automatically tested by Travis; -the Jenkins builds require an adminstrator to start them. If your change -breaks the build, this will be shown in github, so please keep an eye on the +integration. All pull requests to synapse get automatically tested by Travis; +the Jenkins builds require an adminstrator to start them. If your change +breaks the build, this will be shown in github, so please keep an eye on the pull request for feedback. Code style @@ -56,17 +56,17 @@ entry. These are managed by Towncrier (https://github.com/hawkowl/towncrier). To create a changelog entry, make a new file in the ``changelog.d`` -file named in the format of ``issuenumberOrPR.type``. The type can be +file named in the format of ``PRnumber.type``. The type can be one of ``feature``, ``bugfix``, ``removal`` (also used for deprecations), or ``misc`` (for internal-only changes). The content of the file is your changelog entry, which can contain RestructuredText formatting. A note of contributors is welcomed in changelogs for non-misc changes (the content of misc changes is not displayed). -For example, a fix for a bug reported in #1234 would have its -changelog entry in ``changelog.d/1234.bugfix``, and contain content -like "The security levels of Florbs are now validated when -recieved over federation. Contributed by Jane Matrix". +For example, a fix in PR #1234 would have its changelog entry in +``changelog.d/1234.bugfix``, and contain content like "The security levels of +Florbs are now validated when recieved over federation. Contributed by Jane +Matrix". Attribution ~~~~~~~~~~~ @@ -125,7 +125,7 @@ the contribution or otherwise have the right to contribute it to Matrix:: personal information I submit with it, including my sign-off) is maintained indefinitely and may be redistributed consistent with this project or the open source license(s) involved. - + If you agree to this for your contribution, then all that's needed is to include the line in your commit or pull request comment:: From f4b49152e27593dd6c863e71479a2ab712c4ada2 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 13 Aug 2018 18:00:23 +0100 Subject: [PATCH 103/173] support admin_email config and pass through into blocking errors, return AuthError in all cases --- synapse/api/auth.py | 8 ++++++-- synapse/api/errors.py | 13 +++++++++++-- synapse/config/server.py | 4 ++++ synapse/handlers/register.py | 27 ++++++++++++++------------- tests/api/test_auth.py | 6 +++++- tests/handlers/test_register.py | 8 ++++---- tests/utils.py | 1 + 7 files changed, 45 insertions(+), 22 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 9c62ec43742d..4f028078fa27 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -781,11 +781,15 @@ def check_auth_blocking(self): """ if self.hs.config.hs_disabled: raise AuthError( - 403, self.hs.config.hs_disabled_message, errcode=Codes.HS_DISABLED + 403, self.hs.config.hs_disabled_message, + errcode=Codes.HS_DISABLED, + admin_email=self.hs.config.admin_email, ) if self.hs.config.limit_usage_by_mau is True: current_mau = yield self.store.get_monthly_active_count() if current_mau >= self.hs.config.max_mau_value: raise AuthError( - 403, "MAU Limit Exceeded", errcode=Codes.MAU_LIMIT_EXCEEDED + 403, "MAU Limit Exceeded", + admin_email=self.hs.config.admin_email, + errcode=Codes.MAU_LIMIT_EXCEEDED ) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index dc3bed5fcbbd..d74848159e44 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -225,11 +225,20 @@ def __init__(self, msg="Not found", errcode=Codes.NOT_FOUND): class AuthError(SynapseError): """An error raised when there was a problem authorising an event.""" - def __init__(self, *args, **kwargs): if "errcode" not in kwargs: kwargs["errcode"] = Codes.FORBIDDEN - super(AuthError, self).__init__(*args, **kwargs) + self.admin_email = kwargs.get('admin_email') + self.msg = kwargs.get('msg') + self.errcode = kwargs.get('errcode') + super(AuthError, self).__init__(*args, errcode=kwargs["errcode"]) + + def error_dict(self): + return cs_error( + self.msg, + self.errcode, + admin_email=self.admin_email, + ) class EventSizeError(SynapseError): diff --git a/synapse/config/server.py b/synapse/config/server.py index 3b078d72cabd..64a5121a452f 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -82,6 +82,10 @@ def read_config(self, config): self.hs_disabled = config.get("hs_disabled", False) self.hs_disabled_message = config.get("hs_disabled_message", "") + # Admin email to direct users at should their instance become blocked + # due to resource constraints + self.admin_email = config.get("admin_email", None) + # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist = None federation_domain_whitelist = config.get( diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 3526b20d5a57..ef7222d7b860 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -144,7 +144,8 @@ def register( Raises: RegistrationError if there was a problem registering. """ - yield self._check_mau_limits() + + yield self.auth.check_auth_blocking() password_hash = None if password: password_hash = yield self.auth_handler().hash(password) @@ -289,7 +290,7 @@ def register_saml2(self, localpart): 400, "User ID can only contain characters a-z, 0-9, or '=_-./'", ) - yield self._check_mau_limits() + yield self.auth.check_auth_blocking() user = UserID(localpart, self.hs.hostname) user_id = user.to_string() @@ -439,7 +440,7 @@ def get_or_create_user(self, requester, localpart, displayname, """ if localpart is None: raise SynapseError(400, "Request must include user id") - yield self._check_mau_limits() + yield self.auth.check_auth_blocking() need_register = True try: @@ -534,13 +535,13 @@ def _join_user_to_room(self, requester, room_identifier): action="join", ) - @defer.inlineCallbacks - def _check_mau_limits(self): - """ - Do not accept registrations if monthly active user limits exceeded - and limiting is enabled - """ - try: - yield self.auth.check_auth_blocking() - except AuthError as e: - raise RegistrationError(e.code, str(e), e.errcode) + # @defer.inlineCallbacks + # def _s(self): + # """ + # Do not accept registrations if monthly active user limits exceeded + # and limiting is enabled + # """ + # try: + # yield self.auth.check_auth_blocking() + # except AuthError as e: + # raise RegistrationError(e.code, str(e), e.errcode) diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index a65689ba896e..e8a1894e651c 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -455,8 +455,11 @@ def test_blocking_mau(self): return_value=defer.succeed(lots_of_users) ) - with self.assertRaises(AuthError): + with self.assertRaises(AuthError) as e: yield self.auth.check_auth_blocking() + self.assertEquals(e.exception.admin_email, self.hs.config.admin_email) + self.assertEquals(e.exception.errcode, Codes.MAU_LIMIT_EXCEEDED) + self.assertEquals(e.exception.code, 403) # Ensure does not throw an error self.store.get_monthly_active_count = Mock( @@ -470,5 +473,6 @@ def test_hs_disabled(self): self.hs.config.hs_disabled_message = "Reason for being disabled" with self.assertRaises(AuthError) as e: yield self.auth.check_auth_blocking() + self.assertEquals(e.exception.admin_email, self.hs.config.admin_email) self.assertEquals(e.exception.errcode, Codes.HS_DISABLED) self.assertEquals(e.exception.code, 403) diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index d48d40c8dd97..35d1bcab3e9f 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -17,7 +17,7 @@ from twisted.internet import defer -from synapse.api.errors import RegistrationError +from synapse.api.errors import AuthError from synapse.handlers.register import RegistrationHandler from synapse.types import UserID, create_requester @@ -109,7 +109,7 @@ def test_get_or_create_user_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(RegistrationError): + with self.assertRaises(AuthError): yield self.handler.get_or_create_user("requester", 'b', "display_name") @defer.inlineCallbacks @@ -118,7 +118,7 @@ def test_register_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(RegistrationError): + with self.assertRaises(AuthError): yield self.handler.register(localpart="local_part") @defer.inlineCallbacks @@ -127,5 +127,5 @@ def test_register_saml2_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(RegistrationError): + with self.assertRaises(AuthError): yield self.handler.register_saml2(localpart="local_part") diff --git a/tests/utils.py b/tests/utils.py index 90378326f898..4af81624eb39 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -139,6 +139,7 @@ def setup_test_homeserver( config.hs_disabled_message = "" config.max_mau_value = 50 config.mau_limits_reserved_threepids = [] + config.admin_email = None # we need a sane default_room_version, otherwise attempts to create rooms will # fail. From 1f24d8681b74debeb8842ecf1df9fcc1b25b522e Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 13 Aug 2018 21:26:43 +0100 Subject: [PATCH 104/173] set admin email via config --- changelog.d/3687.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3687.feature diff --git a/changelog.d/3687.feature b/changelog.d/3687.feature new file mode 100644 index 000000000000..93b24d1acb46 --- /dev/null +++ b/changelog.d/3687.feature @@ -0,0 +1 @@ +set admin email via config, to be used in error messages where the user should contact the administrator From bdfbd934d6eb42b899779abbe17f72ec4c77e720 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Tue, 14 Aug 2018 20:53:43 +1000 Subject: [PATCH 105/173] Implement a new test baseclass to cut down on boilerplate (#3684) --- changelog.d/3684.misc | 1 + tests/rest/client/v1/test_typing.py | 66 ++++++------- tests/unittest.py | 144 ++++++++++++++++++++++++++++ 3 files changed, 174 insertions(+), 37 deletions(-) create mode 100644 changelog.d/3684.misc diff --git a/changelog.d/3684.misc b/changelog.d/3684.misc new file mode 100644 index 000000000000..4c013263c453 --- /dev/null +++ b/changelog.d/3684.misc @@ -0,0 +1 @@ +Implemented a new testing base class to reduce test boilerplate. diff --git a/tests/rest/client/v1/test_typing.py b/tests/rest/client/v1/test_typing.py index 677265edf602..0ad814c5e588 100644 --- a/tests/rest/client/v1/test_typing.py +++ b/tests/rest/client/v1/test_typing.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd +# Copyright 2018 New Vector # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,41 +18,32 @@ from mock import Mock, NonCallableMock -# twisted imports from twisted.internet import defer -import synapse.rest.client.v1.room +from synapse.rest.client.v1 import room from synapse.types import UserID -from ....utils import MockClock, MockHttpResource, setup_test_homeserver -from .utils import RestTestCase +from tests import unittest PATH_PREFIX = "/_matrix/client/api/v1" -class RoomTypingTestCase(RestTestCase): +class RoomTypingTestCase(unittest.HomeserverTestCase): """ Tests /rooms/$room_id/typing/$user_id REST API. """ user_id = "@sid:red" user = UserID.from_string(user_id) + servlets = [room.register_servlets] - @defer.inlineCallbacks - def setUp(self): - self.clock = MockClock() + def make_homeserver(self, reactor, clock): - self.mock_resource = MockHttpResource(prefix=PATH_PREFIX) - self.auth_user_id = self.user_id - - hs = yield setup_test_homeserver( - self.addCleanup, + hs = self.setup_test_homeserver( "red", - clock=self.clock, http_client=None, federation_client=Mock(), ratelimiter=NonCallableMock(spec_set=["send_message"]), ) - self.hs = hs self.event_source = hs.get_event_sources().sources["typing"] @@ -100,25 +92,24 @@ def fetch_room_distributions_into( fetch_room_distributions_into ) - synapse.rest.client.v1.room.register_servlets(hs, self.mock_resource) + return hs - self.room_id = yield self.create_room_as(self.user_id) + def prepare(self, reactor, clock, hs): + self.room_id = self.helper.create_room_as(self.user_id) # Need another user to make notifications actually work - yield self.join(self.room_id, user="@jim:red") + self.helper.join(self.room_id, user="@jim:red") - @defer.inlineCallbacks def test_set_typing(self): - (code, _) = yield self.mock_resource.trigger( + request, channel = self.make_request( "PUT", "/rooms/%s/typing/%s" % (self.room_id, self.user_id), - '{"typing": true, "timeout": 30000}', + b'{"typing": true, "timeout": 30000}', ) - self.assertEquals(200, code) + self.render(request) + self.assertEquals(200, channel.code) self.assertEquals(self.event_source.get_current_key(), 1) - events = yield self.event_source.get_new_events( - from_key=0, room_ids=[self.room_id] - ) + events = self.event_source.get_new_events(from_key=0, room_ids=[self.room_id]) self.assertEquals( events[0], [ @@ -130,35 +121,36 @@ def test_set_typing(self): ], ) - @defer.inlineCallbacks def test_set_not_typing(self): - (code, _) = yield self.mock_resource.trigger( + request, channel = self.make_request( "PUT", "/rooms/%s/typing/%s" % (self.room_id, self.user_id), - '{"typing": false}', + b'{"typing": false}', ) - self.assertEquals(200, code) + self.render(request) + self.assertEquals(200, channel.code) - @defer.inlineCallbacks def test_typing_timeout(self): - (code, _) = yield self.mock_resource.trigger( + request, channel = self.make_request( "PUT", "/rooms/%s/typing/%s" % (self.room_id, self.user_id), - '{"typing": true, "timeout": 30000}', + b'{"typing": true, "timeout": 30000}', ) - self.assertEquals(200, code) + self.render(request) + self.assertEquals(200, channel.code) self.assertEquals(self.event_source.get_current_key(), 1) - self.clock.advance_time(36) + self.reactor.advance(36) self.assertEquals(self.event_source.get_current_key(), 2) - (code, _) = yield self.mock_resource.trigger( + request, channel = self.make_request( "PUT", "/rooms/%s/typing/%s" % (self.room_id, self.user_id), - '{"typing": true, "timeout": 30000}', + b'{"typing": true, "timeout": 30000}', ) - self.assertEquals(200, code) + self.render(request) + self.assertEquals(200, channel.code) self.assertEquals(self.event_source.get_current_key(), 3) diff --git a/tests/unittest.py b/tests/unittest.py index f448a6dfbd40..e6afe3b96d61 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd +# Copyright 2018 New Vector # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,12 +16,19 @@ import logging +from mock import Mock + import twisted import twisted.logger from twisted.trial import unittest +from synapse.http.server import JsonResource +from synapse.server import HomeServer +from synapse.types import UserID, create_requester from synapse.util.logcontext import LoggingContextFilter +from tests.server import get_clock, make_request, render, setup_test_homeserver + # Set up putting Synapse's logs into Trial's. rootLogger = logging.getLogger() @@ -129,3 +137,139 @@ def DEBUG(target): Can apply to either a TestCase or an individual test method.""" target.loglevel = logging.DEBUG return target + + +class HomeserverTestCase(TestCase): + """ + A base TestCase that reduces boilerplate for HomeServer-using test cases. + + Attributes: + servlets (list[function]): List of servlet registration function. + user_id (str): The user ID to assume if auth is hijacked. + hijack_auth (bool): Whether to hijack auth to return the user specified + in user_id. + """ + servlets = [] + hijack_auth = True + + def setUp(self): + """ + Set up the TestCase by calling the homeserver constructor, optionally + hijacking the authentication system to return a fixed user, and then + calling the prepare function. + """ + self.reactor, self.clock = get_clock() + self._hs_args = {"clock": self.clock, "reactor": self.reactor} + self.hs = self.make_homeserver(self.reactor, self.clock) + + if self.hs is None: + raise Exception("No homeserver returned from make_homeserver.") + + if not isinstance(self.hs, HomeServer): + raise Exception("A homeserver wasn't returned, but %r" % (self.hs,)) + + # Register the resources + self.resource = JsonResource(self.hs) + + for servlet in self.servlets: + servlet(self.hs, self.resource) + + if hasattr(self, "user_id"): + from tests.rest.client.v1.utils import RestHelper + + self.helper = RestHelper(self.hs, self.resource, self.user_id) + + if self.hijack_auth: + + def get_user_by_access_token(token=None, allow_guest=False): + return { + "user": UserID.from_string(self.helper.auth_user_id), + "token_id": 1, + "is_guest": False, + } + + def get_user_by_req(request, allow_guest=False, rights="access"): + return create_requester( + UserID.from_string(self.helper.auth_user_id), 1, False, None + ) + + self.hs.get_auth().get_user_by_req = get_user_by_req + self.hs.get_auth().get_user_by_access_token = get_user_by_access_token + self.hs.get_auth().get_access_token_from_request = Mock( + return_value="1234" + ) + + if hasattr(self, "prepare"): + self.prepare(self.reactor, self.clock, self.hs) + + def make_homeserver(self, reactor, clock): + """ + Make and return a homeserver. + + Args: + reactor: A Twisted Reactor, or something that pretends to be one. + clock (synapse.util.Clock): The Clock, associated with the reactor. + + Returns: + A homeserver (synapse.server.HomeServer) suitable for testing. + + Function to be overridden in subclasses. + """ + raise NotImplementedError() + + def prepare(self, reactor, clock, homeserver): + """ + Prepare for the test. This involves things like mocking out parts of + the homeserver, or building test data common across the whole test + suite. + + Args: + reactor: A Twisted Reactor, or something that pretends to be one. + clock (synapse.util.Clock): The Clock, associated with the reactor. + homeserver (synapse.server.HomeServer): The HomeServer to test + against. + + Function to optionally be overridden in subclasses. + """ + + def make_request(self, method, path, content=b""): + """ + Create a SynapseRequest at the path using the method and containing the + given content. + + Args: + method (bytes/unicode): The HTTP request method ("verb"). + path (bytes/unicode): The HTTP path, suitably URL encoded (e.g. + escaped UTF-8 & spaces and such). + content (bytes): The body of the request. + + Returns: + A synapse.http.site.SynapseRequest. + """ + return make_request(method, path, content) + + def render(self, request): + """ + Render a request against the resources registered by the test class's + servlets. + + Args: + request (synapse.http.site.SynapseRequest): The request to render. + """ + render(request, self.resource, self.reactor) + + def setup_test_homeserver(self, *args, **kwargs): + """ + Set up the test homeserver, meant to be called by the overridable + make_homeserver. It automatically passes through the test class's + clock & reactor. + + Args: + See tests.utils.setup_test_homeserver. + + Returns: + synapse.server.HomeServer + """ + kwargs = dict(kwargs) + kwargs.update(self._hs_args) + return setup_test_homeserver(self.addCleanup, *args, **kwargs) From 614e6d517d90922cbc54e59d0117a4daaed9a9fc Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Tue, 14 Aug 2018 21:30:34 +1000 Subject: [PATCH 106/173] Dockerised sytest (#3660) --- .circleci/config.yml | 48 +++++++++++++++++++++++++++++++++++++++++++ .dockerignore | 3 +++ MANIFEST.in | 1 + changelog.d/3660.misc | 1 + 4 files changed, 53 insertions(+) create mode 100644 .circleci/config.yml create mode 100644 changelog.d/3660.misc diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 000000000000..e03f01b83703 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,48 @@ +version: 2 +jobs: + sytestpy2: + machine: true + steps: + - checkout + - run: docker pull matrixdotorg/sytest-synapsepy2 + - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs matrixdotorg/sytest-synapsepy2 + - store_artifacts: + path: ~/project/logs + destination: logs + sytestpy2postgres: + machine: true + steps: + - checkout + - run: docker pull matrixdotorg/sytest-synapsepy2 + - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs -e POSTGRES=1 matrixdotorg/sytest-synapsepy2 + - store_artifacts: + path: ~/project/logs + destination: logs + sytestpy3: + machine: true + steps: + - checkout + - run: docker pull matrixdotorg/sytest-synapsepy3 + - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs hawkowl/sytestpy3 + - store_artifacts: + path: ~/project/logs + destination: logs + sytestpy3postgres: + machine: true + steps: + - checkout + - run: docker pull matrixdotorg/sytest-synapsepy3 + - run: docker run --rm -it -v $(pwd)\:/src -v $(pwd)/logs\:/logs -e POSTGRES=1 matrixdotorg/sytest-synapsepy3 + - store_artifacts: + path: ~/project/logs + destination: logs + +workflows: + version: 2 + build: + jobs: + - sytestpy2 + - sytestpy2postgres +# Currently broken while the Python 3 port is incomplete +# - sytestpy3 +# - sytestpy3postgres diff --git a/.dockerignore b/.dockerignore index f36f86fbb722..6cdb8532d332 100644 --- a/.dockerignore +++ b/.dockerignore @@ -3,3 +3,6 @@ Dockerfile .gitignore demo/etc tox.ini +synctl +.git/* +.tox/* diff --git a/MANIFEST.in b/MANIFEST.in index 1ff98d95dff0..e0826ba544c8 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -36,3 +36,4 @@ recursive-include changelog.d * prune .github prune demo/etc prune docker +prune .circleci diff --git a/changelog.d/3660.misc b/changelog.d/3660.misc new file mode 100644 index 000000000000..acd814c27391 --- /dev/null +++ b/changelog.d/3660.misc @@ -0,0 +1 @@ +Sytests can now be run inside a Docker container. From 99ebaed8e6c5a65872a91f7a50e914ad9e574f7f Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 14 Aug 2018 14:55:55 +0100 Subject: [PATCH 107/173] Update register.py remove comments --- synapse/handlers/register.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index ef7222d7b860..54e343492854 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -535,13 +535,3 @@ def _join_user_to_room(self, requester, room_identifier): action="join", ) - # @defer.inlineCallbacks - # def _s(self): - # """ - # Do not accept registrations if monthly active user limits exceeded - # and limiting is enabled - # """ - # try: - # yield self.auth.check_auth_blocking() - # except AuthError as e: - # raise RegistrationError(e.code, str(e), e.errcode) From ed4bc3d2fc7242e27b3cdd36bc6c27c98fac09c8 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 14 Aug 2018 15:04:48 +0100 Subject: [PATCH 108/173] fix off by 1s on mau --- synapse/handlers/auth.py | 4 ++-- tests/handlers/test_auth.py | 39 ++++++++++++++++++++++++++++++++- tests/handlers/test_register.py | 14 ++++++++---- 3 files changed, 50 insertions(+), 7 deletions(-) diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 7ea8ce9f9479..7baaa394471f 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -520,7 +520,7 @@ def get_access_token_for_user_id(self, user_id, device_id=None): """ logger.info("Logging in user %s on device %s", user_id, device_id) access_token = yield self.issue_access_token(user_id, device_id) - yield self.auth.check_auth_blocking() + yield self.auth.check_auth_blocking(user_id) # the device *should* have been registered before we got here; however, # it's possible we raced against a DELETE operation. The thing we @@ -734,7 +734,6 @@ def issue_access_token(self, user_id, device_id=None): @defer.inlineCallbacks def validate_short_term_login_token_and_get_user_id(self, login_token): - yield self.auth.check_auth_blocking() auth_api = self.hs.get_auth() user_id = None try: @@ -743,6 +742,7 @@ def validate_short_term_login_token_and_get_user_id(self, login_token): auth_api.validate_macaroon(macaroon, "login", True, user_id) except Exception: raise AuthError(403, "Invalid token", errcode=Codes.FORBIDDEN) + yield self.auth.check_auth_blocking(user_id) defer.returnValue(user_id) @defer.inlineCallbacks diff --git a/tests/handlers/test_auth.py b/tests/handlers/test_auth.py index 56c0f87fb772..9ca7b2ee4ea6 100644 --- a/tests/handlers/test_auth.py +++ b/tests/handlers/test_auth.py @@ -124,7 +124,7 @@ def test_mau_limits_disabled(self): ) @defer.inlineCallbacks - def test_mau_limits_exceeded(self): + def test_mau_limits_exceeded_large(self): self.hs.config.limit_usage_by_mau = True self.hs.get_datastore().get_monthly_active_count = Mock( return_value=defer.succeed(self.large_number_of_users) @@ -141,6 +141,43 @@ def test_mau_limits_exceeded(self): self._get_macaroon().serialize() ) + @defer.inlineCallbacks + def test_mau_limits_parity(self): + self.hs.config.limit_usage_by_mau = True + + # If not in monthly active cohort + self.hs.get_datastore().get_monthly_active_count = Mock( + return_value=defer.succeed(self.hs.config.max_mau_value) + ) + with self.assertRaises(AuthError): + yield self.auth_handler.get_access_token_for_user_id('user_a') + + self.hs.get_datastore().get_monthly_active_count = Mock( + return_value=defer.succeed(self.hs.config.max_mau_value) + ) + with self.assertRaises(AuthError): + yield self.auth_handler.validate_short_term_login_token_and_get_user_id( + self._get_macaroon().serialize() + ) + # If in monthly active cohort + self.hs.get_datastore().user_last_seen_monthly_active = Mock( + return_value=defer.succeed(self.hs.get_clock().time_msec()) + ) + self.hs.get_datastore().get_monthly_active_count = Mock( + return_value=defer.succeed(self.hs.config.max_mau_value) + ) + yield self.auth_handler.get_access_token_for_user_id('user_a') + self.hs.get_datastore().user_last_seen_monthly_active = Mock( + return_value=defer.succeed(self.hs.get_clock().time_msec()) + ) + self.hs.get_datastore().get_monthly_active_count = Mock( + return_value=defer.succeed(self.hs.config.max_mau_value) + ) + yield self.auth_handler.validate_short_term_login_token_and_get_user_id( + self._get_macaroon().serialize() + ) + + @defer.inlineCallbacks def test_mau_limits_not_exceeded(self): self.hs.config.limit_usage_by_mau = True diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 35d1bcab3e9f..a821da075035 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -17,7 +17,7 @@ from twisted.internet import defer -from synapse.api.errors import AuthError +from synapse.api.errors import RegistrationError from synapse.handlers.register import RegistrationHandler from synapse.types import UserID, create_requester @@ -109,7 +109,7 @@ def test_get_or_create_user_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(AuthError): + with self.assertRaises(RegistrationError): yield self.handler.get_or_create_user("requester", 'b', "display_name") @defer.inlineCallbacks @@ -118,7 +118,13 @@ def test_register_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(AuthError): + with self.assertRaises(RegistrationError): + yield self.handler.register(localpart="local_part") + + self.store.get_monthly_active_count = Mock( + return_value=defer.succeed(self.hs.config.max_mau_value) + ) + with self.assertRaises(RegistrationError): yield self.handler.register(localpart="local_part") @defer.inlineCallbacks @@ -127,5 +133,5 @@ def test_register_saml2_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(AuthError): + with self.assertRaises(RegistrationError): yield self.handler.register_saml2(localpart="local_part") From c74c71128d164ce7aa6b50538ab960ec85f7a8da Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 14 Aug 2018 15:06:24 +0100 Subject: [PATCH 109/173] remove blank line --- synapse/handlers/register.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 54e343492854..f03ee1476bac 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -534,4 +534,3 @@ def _join_user_to_room(self, requester, room_identifier): remote_room_hosts=remote_room_hosts, action="join", ) - From 8f9a7eb58de214cd489ba233c381521e9bf79dec Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Mon, 13 Aug 2018 18:00:23 +0100 Subject: [PATCH 110/173] support admin_email config and pass through into blocking errors, return AuthError in all cases --- synapse/handlers/register.py | 1 - tests/handlers/test_register.py | 8 ++++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 54e343492854..f03ee1476bac 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -534,4 +534,3 @@ def _join_user_to_room(self, requester, room_identifier): remote_room_hosts=remote_room_hosts, action="join", ) - diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index a821da075035..6699d251217e 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -17,7 +17,7 @@ from twisted.internet import defer -from synapse.api.errors import RegistrationError +from synapse.api.errors import AuthError from synapse.handlers.register import RegistrationHandler from synapse.types import UserID, create_requester @@ -109,7 +109,7 @@ def test_get_or_create_user_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(RegistrationError): + with self.assertRaises(AuthError): yield self.handler.get_or_create_user("requester", 'b', "display_name") @defer.inlineCallbacks @@ -118,7 +118,7 @@ def test_register_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(RegistrationError): + with self.assertRaises(AuthError): yield self.handler.register(localpart="local_part") self.store.get_monthly_active_count = Mock( @@ -133,5 +133,5 @@ def test_register_saml2_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(RegistrationError): + with self.assertRaises(AuthError): yield self.handler.register_saml2(localpart="local_part") From 06b331ff4035558500e196a5dce79ffe9d2da807 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 14 Aug 2018 15:28:15 +0100 Subject: [PATCH 111/173] fix off by 1 errors --- tests/handlers/test_auth.py | 1 - tests/handlers/test_register.py | 16 ++++++++++++++-- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/tests/handlers/test_auth.py b/tests/handlers/test_auth.py index 9ca7b2ee4ea6..3046bd609365 100644 --- a/tests/handlers/test_auth.py +++ b/tests/handlers/test_auth.py @@ -177,7 +177,6 @@ def test_mau_limits_parity(self): self._get_macaroon().serialize() ) - @defer.inlineCallbacks def test_mau_limits_not_exceeded(self): self.hs.config.limit_usage_by_mau = True diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 6699d251217e..7154816a34a7 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -98,7 +98,7 @@ def test_mau_limits_when_disabled(self): def test_get_or_create_user_mau_not_blocked(self): self.hs.config.limit_usage_by_mau = True self.store.count_monthly_users = Mock( - return_value=defer.succeed(self.small_number_of_users) + return_value=defer.succeed(self.hs.config.max_mau_value - 1) ) # Ensure does not throw exception yield self.handler.get_or_create_user("@user:server", 'c', "User") @@ -112,6 +112,12 @@ def test_get_or_create_user_mau_blocked(self): with self.assertRaises(AuthError): yield self.handler.get_or_create_user("requester", 'b', "display_name") + self.store.get_monthly_active_count = Mock( + return_value=defer.succeed(self.hs.config.max_mau_value) + ) + with self.assertRaises(AuthError): + yield self.handler.get_or_create_user("requester", 'b', "display_name") + @defer.inlineCallbacks def test_register_mau_blocked(self): self.hs.config.limit_usage_by_mau = True @@ -124,7 +130,7 @@ def test_register_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.hs.config.max_mau_value) ) - with self.assertRaises(RegistrationError): + with self.assertRaises(AuthError): yield self.handler.register(localpart="local_part") @defer.inlineCallbacks @@ -135,3 +141,9 @@ def test_register_saml2_mau_blocked(self): ) with self.assertRaises(AuthError): yield self.handler.register_saml2(localpart="local_part") + + self.store.get_monthly_active_count = Mock( + return_value=defer.succeed(self.hs.config.max_mau_value) + ) + with self.assertRaises(AuthError): + yield self.handler.register_saml2(localpart="local_part") From 2545993ce430606f76926723898c2de3840da94a Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 14 Aug 2018 15:48:12 +0100 Subject: [PATCH 112/173] make comments clearer --- synapse/api/auth.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index c31c6a6a0867..18c73f054929 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -780,7 +780,8 @@ def check_auth_blocking(self, user_id=None): such as monthly active user limiting or global disable flag Args: - user_id(str): If present, checks for presence against existing MAU cohort + user_id(str|None): If present, checks for presence against existing + MAU cohort """ if self.hs.config.hs_disabled: raise AuthError( From e7d091fb867ade42889af7b8f9ce6a94a3d5e85e Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 14 Aug 2018 16:26:55 +0100 Subject: [PATCH 113/173] combine mau metrics into one group --- synapse/app/homeserver.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 37a9b126a5c1..f13584e190c3 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -303,9 +303,8 @@ def run_startup_checks(self, db_conn, database_engine): # Gauges to expose monthly active user control metrics -current_mau_gauge = Gauge("synapse_admin_current_mau", "Current MAU") -max_mau_value_gauge = Gauge("synapse_admin_max_mau_value", "MAU Limit") - +current_mau_gauge = Gauge("synapse_admin_mau:current", "Current MAU") +max_mau_gauge = Gauge("synapse_admin_mau:max", "MAU Limit") def setup(config_options): """ @@ -532,7 +531,7 @@ def generate_monthly_active_users(): if hs.config.limit_usage_by_mau: count = yield hs.get_datastore().get_monthly_active_count() current_mau_gauge.set(float(count)) - max_mau_value_gauge.set(float(hs.config.max_mau_value)) + max_mau_gauge.set(float(hs.config.max_mau_value)) hs.get_datastore().initialise_reserved_users( hs.config.mau_limits_reserved_threepids From e5962f845c2e93564a2855696e0776bd05c5f126 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 14 Aug 2018 16:36:14 +0100 Subject: [PATCH 114/173] pep8 --- synapse/app/homeserver.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index f13584e190c3..a98bb506e5fb 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -306,6 +306,7 @@ def run_startup_checks(self, db_conn, database_engine): current_mau_gauge = Gauge("synapse_admin_mau:current", "Current MAU") max_mau_gauge = Gauge("synapse_admin_mau:max", "MAU Limit") + def setup(config_options): """ Args: From 629f390035f2c1eb7c9d5e4a881e2586a9fdc3ab Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 14 Aug 2018 16:38:05 +0100 Subject: [PATCH 115/173] Rename MAU prometheus metric --- changelog.d/3690.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3690.misc diff --git a/changelog.d/3690.misc b/changelog.d/3690.misc new file mode 100644 index 000000000000..710add02433d --- /dev/null +++ b/changelog.d/3690.misc @@ -0,0 +1 @@ +Rename MAU prometheus metrics From 1522ed9c07b11a97064a6642c6fd6c3e786f9cb5 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 14 Aug 2018 16:52:30 +0100 Subject: [PATCH 116/173] in case max_mau is less than I think --- tests/storage/test_monthly_active_users.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index 0d58dcebc52c..511acbde9bc2 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -33,7 +33,7 @@ def setUp(self): @defer.inlineCallbacks def test_initialise_reserved_users(self): - + self.hs.config.max_mau_value = 5 user1 = "@user1:server" user1_email = "user1@matrix.org" user2 = "@user2:server" From 9ecbaf8ba8b12537862e3045a650fffd438ba8a5 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 14 Aug 2018 16:55:28 +0100 Subject: [PATCH 117/173] adding missing yield --- synapse/storage/monthly_active_users.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 7b36accdb6ca..7e417f811e9e 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -46,7 +46,7 @@ def initialise_reserved_users(self, threepids): tp["medium"], tp["address"] ) if user_id: - self.upsert_monthly_active_user(user_id) + yield self.upsert_monthly_active_user(user_id) reserved_user_list.append(user_id) else: logger.warning( From cd0c749c4f86e9350be1152a91958d0064d41c84 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 14 Aug 2018 17:06:51 +0100 Subject: [PATCH 118/173] Fix missing yield in synapse.storage.monthly_active_users.initialise_reserved_users --- changelog.d/3692.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3692.bugfix diff --git a/changelog.d/3692.bugfix b/changelog.d/3692.bugfix new file mode 100644 index 000000000000..f44e13dca154 --- /dev/null +++ b/changelog.d/3692.bugfix @@ -0,0 +1 @@ +Fix missing yield in synapse.storage.monthly_active_users.initialise_reserved_users From 7277216d01261f055886d0ac7b1ae5e5c5fc33cf Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Tue, 14 Aug 2018 17:14:39 +0100 Subject: [PATCH 119/173] fix setup_test_homeserver to be postgres compatible --- tests/handlers/test_sync.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index cfd37f3138d3..8c8b65e04ede 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -29,7 +29,7 @@ class SyncTestCase(tests.unittest.TestCase): @defer.inlineCallbacks def setUp(self): - self.hs = yield setup_test_homeserver() + self.hs = yield setup_test_homeserver(self.addCleanup) self.sync_handler = SyncHandler(self.hs) self.store = self.hs.get_datastore() From ab035bdeaca5019289e6587a625e87c995d3deeb Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 15 Aug 2018 10:16:41 +0100 Subject: [PATCH 120/173] replace admin_email with admin_uri for greater flexibility --- synapse/api/auth.py | 10 +++++----- synapse/api/errors.py | 4 ++-- synapse/config/server.py | 2 +- tests/api/test_auth.py | 4 ++-- tests/utils.py | 2 +- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 108ea0ea093f..3b2a2ab77ade 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -786,8 +786,8 @@ def check_auth_blocking(self, user_id=None): if self.hs.config.hs_disabled: raise AuthError( 403, self.hs.config.hs_disabled_message, - errcode=Codes.HS_DISABLED, - admin_email=self.hs.config.admin_email, + errcode=Codes.RESOURCE_LIMIT_EXCEED, + admin_uri=self.hs.config.admin_uri, ) if self.hs.config.limit_usage_by_mau is True: # If the user is already part of the MAU cohort @@ -799,7 +799,7 @@ def check_auth_blocking(self, user_id=None): current_mau = yield self.store.get_monthly_active_count() if current_mau >= self.hs.config.max_mau_value: raise AuthError( - 403, "MAU Limit Exceeded", - admin_email=self.hs.config.admin_email, - errcode=Codes.MAU_LIMIT_EXCEEDED + 403, "Monthly Active User Limits AU Limit Exceeded", + admin_uri=self.hs.config.admin_uri, + errcode=Codes.RESOURCE_LIMIT_EXCEED ) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index d74848159e44..b677087e7360 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -228,7 +228,7 @@ class AuthError(SynapseError): def __init__(self, *args, **kwargs): if "errcode" not in kwargs: kwargs["errcode"] = Codes.FORBIDDEN - self.admin_email = kwargs.get('admin_email') + self.admin_uri = kwargs.get('admin_uri') self.msg = kwargs.get('msg') self.errcode = kwargs.get('errcode') super(AuthError, self).__init__(*args, errcode=kwargs["errcode"]) @@ -237,7 +237,7 @@ def error_dict(self): return cs_error( self.msg, self.errcode, - admin_email=self.admin_email, + admin_uri=self.admin_uri, ) diff --git a/synapse/config/server.py b/synapse/config/server.py index 64a5121a452f..442ee4fde441 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -84,7 +84,7 @@ def read_config(self, config): # Admin email to direct users at should their instance become blocked # due to resource constraints - self.admin_email = config.get("admin_email", None) + self.admin_uri = config.get("admin_uri", None) # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist = None diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index e8a1894e651c..8c1ee9cdcc5c 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -457,7 +457,7 @@ def test_blocking_mau(self): with self.assertRaises(AuthError) as e: yield self.auth.check_auth_blocking() - self.assertEquals(e.exception.admin_email, self.hs.config.admin_email) + self.assertEquals(e.exception.admin_uri, self.hs.config.admin_uri) self.assertEquals(e.exception.errcode, Codes.MAU_LIMIT_EXCEEDED) self.assertEquals(e.exception.code, 403) @@ -473,6 +473,6 @@ def test_hs_disabled(self): self.hs.config.hs_disabled_message = "Reason for being disabled" with self.assertRaises(AuthError) as e: yield self.auth.check_auth_blocking() - self.assertEquals(e.exception.admin_email, self.hs.config.admin_email) + self.assertEquals(e.exception.admin_uri, self.hs.config.admin_uri) self.assertEquals(e.exception.errcode, Codes.HS_DISABLED) self.assertEquals(e.exception.code, 403) diff --git a/tests/utils.py b/tests/utils.py index 4af81624eb39..52326d4f670b 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -139,7 +139,7 @@ def setup_test_homeserver( config.hs_disabled_message = "" config.max_mau_value = 50 config.mau_limits_reserved_threepids = [] - config.admin_email = None + config.admin_uri = None # we need a sane default_room_version, otherwise attempts to create rooms will # fail. From b8429c7c81f96fdf24f8e297e788fd2b8b3a712a Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 15 Aug 2018 10:19:48 +0100 Subject: [PATCH 121/173] update error codes for resource limiting --- synapse/api/errors.py | 3 +-- tests/api/test_auth.py | 4 ++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index b677087e7360..10b0356e86f5 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -56,8 +56,7 @@ class Codes(object): SERVER_NOT_TRUSTED = "M_SERVER_NOT_TRUSTED" CONSENT_NOT_GIVEN = "M_CONSENT_NOT_GIVEN" CANNOT_LEAVE_SERVER_NOTICE_ROOM = "M_CANNOT_LEAVE_SERVER_NOTICE_ROOM" - MAU_LIMIT_EXCEEDED = "M_MAU_LIMIT_EXCEEDED" - HS_DISABLED = "M_HS_DISABLED" + RESOURCE_LIMIT_EXCEED = "M_RESOURCE_LIMIT_EXCEED" UNSUPPORTED_ROOM_VERSION = "M_UNSUPPORTED_ROOM_VERSION" INCOMPATIBLE_ROOM_VERSION = "M_INCOMPATIBLE_ROOM_VERSION" diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 8c1ee9cdcc5c..32a2b5fc3d0f 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -458,7 +458,7 @@ def test_blocking_mau(self): with self.assertRaises(AuthError) as e: yield self.auth.check_auth_blocking() self.assertEquals(e.exception.admin_uri, self.hs.config.admin_uri) - self.assertEquals(e.exception.errcode, Codes.MAU_LIMIT_EXCEEDED) + self.assertEquals(e.exception.errcode, Codes.RESOURCE_LIMIT_EXCEED) self.assertEquals(e.exception.code, 403) # Ensure does not throw an error @@ -474,5 +474,5 @@ def test_hs_disabled(self): with self.assertRaises(AuthError) as e: yield self.auth.check_auth_blocking() self.assertEquals(e.exception.admin_uri, self.hs.config.admin_uri) - self.assertEquals(e.exception.errcode, Codes.HS_DISABLED) + self.assertEquals(e.exception.errcode, Codes.RESOURCE_LIMIT_EXCEED) self.assertEquals(e.exception.code, 403) From 596ce635764667f1d824eec2cade73ca6063b73e Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 15 Aug 2018 10:23:28 +0100 Subject: [PATCH 122/173] update resource limit error codes --- changelog.d/3697.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3697.misc diff --git a/changelog.d/3697.misc b/changelog.d/3697.misc new file mode 100644 index 000000000000..9082fc466285 --- /dev/null +++ b/changelog.d/3697.misc @@ -0,0 +1 @@ +update resource limit error codes From 75c663c7b97733cf1e217d0a973a6c4c64228444 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 15 Aug 2018 11:27:48 +0100 Subject: [PATCH 123/173] update error codes --- tests/handlers/test_sync.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index 8c8b65e04ede..33d861bd64ae 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -51,7 +51,7 @@ def test_wait_for_sync_for_user_auth_blocking(self): self.hs.config.hs_disabled = True with self.assertRaises(AuthError) as e: yield self.sync_handler.wait_for_sync_for_user(sync_config) - self.assertEquals(e.exception.errcode, Codes.HS_DISABLED) + self.assertEquals(e.exception.errcode, Codes.RESOURCE_LIMIT_EXCEED) self.hs.config.hs_disabled = False @@ -59,7 +59,7 @@ def test_wait_for_sync_for_user_auth_blocking(self): with self.assertRaises(AuthError) as e: yield self.sync_handler.wait_for_sync_for_user(sync_config) - self.assertEquals(e.exception.errcode, Codes.MAU_LIMIT_EXCEEDED) + self.assertEquals(e.exception.errcode, Codes.RESOURCE_LIMIT_EXCEED) def _generate_sync_config(self, user_id): return SyncConfig( From 55afba0fc5164d3be08848668ddec4bba7fba923 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 15 Aug 2018 11:41:18 +0100 Subject: [PATCH 124/173] update admin email to uri --- changelog.d/3687.feature | 2 +- synapse/config/server.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/changelog.d/3687.feature b/changelog.d/3687.feature index 93b24d1acb46..64b89f6411ba 100644 --- a/changelog.d/3687.feature +++ b/changelog.d/3687.feature @@ -1 +1 @@ -set admin email via config, to be used in error messages where the user should contact the administrator +set admin uri via config, to be used in error messages where the user should contact the administrator diff --git a/synapse/config/server.py b/synapse/config/server.py index 442ee4fde441..2190f3210a73 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -82,7 +82,7 @@ def read_config(self, config): self.hs_disabled = config.get("hs_disabled", False) self.hs_disabled_message = config.get("hs_disabled_message", "") - # Admin email to direct users at should their instance become blocked + # Admin uri to direct users at should their instance become blocked # due to resource constraints self.admin_uri = config.get("admin_uri", None) From 87a824bad1c93e8c9cd35d0667ee366c8ea6b41e Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 15 Aug 2018 11:58:03 +0100 Subject: [PATCH 125/173] clean up AuthError --- synapse/api/errors.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index 10b0356e86f5..08f0cb55548e 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -224,13 +224,9 @@ def __init__(self, msg="Not found", errcode=Codes.NOT_FOUND): class AuthError(SynapseError): """An error raised when there was a problem authorising an event.""" - def __init__(self, *args, **kwargs): - if "errcode" not in kwargs: - kwargs["errcode"] = Codes.FORBIDDEN - self.admin_uri = kwargs.get('admin_uri') - self.msg = kwargs.get('msg') - self.errcode = kwargs.get('errcode') - super(AuthError, self).__init__(*args, errcode=kwargs["errcode"]) + def __init__(self, code, msg, errcode=Codes.FORBIDDEN, admin_uri=None): + self.admin_uri = admin_uri + super(AuthError, self).__init__(code, msg, errcode=errcode) def error_dict(self): return cs_error( From eaaa2248ffdda0c913a915a5c5f3e7ea5e2aa8c4 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 15 Aug 2018 08:49:59 +0100 Subject: [PATCH 126/173] Refactor request logging code This commit moves a bunch of the logic for deciding when to log the receipt and completion of HTTP requests into SynapseRequest, rather than in the request handling wrappers. Advantages of this are: * we get logs for *all* requests (including OPTIONS and HEADs), rather than just those that end up hitting handlers we've remembered to decorate correctly. * when a request handler wires up a Producer (as the media stuff does currently, and as other things will do soon), we log at the point that all of the traffic has been sent to the client. --- synapse/http/server.py | 70 +++++--------- synapse/http/site.py | 201 ++++++++++++++++++++++++++++++++--------- 2 files changed, 179 insertions(+), 92 deletions(-) diff --git a/synapse/http/server.py b/synapse/http/server.py index 6dacb31037aa..4e4c02e1700c 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -25,7 +25,7 @@ from twisted.internet import defer from twisted.python import failure -from twisted.web import resource, server +from twisted.web import resource from twisted.web.server import NOT_DONE_YET from twisted.web.util import redirectTo @@ -37,10 +37,8 @@ SynapseError, UnrecognizedRequestError, ) -from synapse.http.request_metrics import requests_counter from synapse.util.caches import intern_dict -from synapse.util.logcontext import LoggingContext, PreserveLoggingContext -from synapse.util.metrics import Measure +from synapse.util.logcontext import preserve_fn logger = logging.getLogger(__name__) @@ -60,11 +58,10 @@ def wrap_json_request_handler(h): """Wraps a request handler method with exception handling. - Also adds logging as per wrap_request_handler_with_logging. + Also does the wrapping with request.processing as per wrap_async_request_handler. The handler method must have a signature of "handle_foo(self, request)", - where "self" must have a "clock" attribute (and "request" must be a - SynapseRequest). + where "request" must be a SynapseRequest. The handler must return a deferred. If the deferred succeeds we assume that a response has been sent. If the deferred fails with a SynapseError we use @@ -108,24 +105,23 @@ def wrapped_request_handler(self, request): pretty_print=_request_user_agent_is_curl(request), ) - return wrap_request_handler_with_logging(wrapped_request_handler) + return wrap_async_request_handler(wrapped_request_handler) def wrap_html_request_handler(h): """Wraps a request handler method with exception handling. - Also adds logging as per wrap_request_handler_with_logging. + Also does the wrapping with request.processing as per wrap_async_request_handler. The handler method must have a signature of "handle_foo(self, request)", - where "self" must have a "clock" attribute (and "request" must be a - SynapseRequest). + where "request" must be a SynapseRequest. """ def wrapped_request_handler(self, request): d = defer.maybeDeferred(h, self, request) d.addErrback(_return_html_error, request) return d - return wrap_request_handler_with_logging(wrapped_request_handler) + return wrap_async_request_handler(wrapped_request_handler) def _return_html_error(f, request): @@ -170,46 +166,26 @@ def _return_html_error(f, request): finish_request(request) -def wrap_request_handler_with_logging(h): - """Wraps a request handler to provide logging and metrics +def wrap_async_request_handler(h): + """Wraps an async request handler so that it calls request.processing. + + This helps ensure that work done by the request handler after the request is completed + is correctly recorded against the request metrics/logs. The handler method must have a signature of "handle_foo(self, request)", - where "self" must have a "clock" attribute (and "request" must be a - SynapseRequest). + where "request" must be a SynapseRequest. - As well as calling `request.processing` (which will log the response and - duration for this request), the wrapped request handler will insert the - request id into the logging context. + The handler may return a deferred, in which case the completion of the request isn't + logged until the deferred completes. """ @defer.inlineCallbacks - def wrapped_request_handler(self, request): - """ - Args: - self: - request (synapse.http.site.SynapseRequest): - """ + def wrapped_async_request_handler(self, request): + with request.processing(): + yield h(self, request) - request_id = request.get_request_id() - with LoggingContext(request_id) as request_context: - request_context.request = request_id - with Measure(self.clock, "wrapped_request_handler"): - # we start the request metrics timer here with an initial stab - # at the servlet name. For most requests that name will be - # JsonResource (or a subclass), and JsonResource._async_render - # will update it once it picks a servlet. - servlet_name = self.__class__.__name__ - with request.processing(servlet_name): - with PreserveLoggingContext(request_context): - d = defer.maybeDeferred(h, self, request) - - # record the arrival of the request *after* - # dispatching to the handler, so that the handler - # can update the servlet name in the request - # metrics - requests_counter.labels(request.method, - request.request_metrics.name).inc() - yield d - return wrapped_request_handler + # we need to preserve_fn here, because the synchronous render method won't yield for + # us (obviously) + return preserve_fn(wrapped_async_request_handler) class HttpServer(object): @@ -272,7 +248,7 @@ def render(self, request): """ This gets called by twisted every time someone sends us a request. """ self._async_render(request) - return server.NOT_DONE_YET + return NOT_DONE_YET @wrap_json_request_handler @defer.inlineCallbacks diff --git a/synapse/http/site.py b/synapse/http/site.py index 5fd30a4c2c4a..f5a8f7840654 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -11,7 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - import contextlib import logging import time @@ -19,8 +18,8 @@ from twisted.web.server import Request, Site from synapse.http import redact_uri -from synapse.http.request_metrics import RequestMetrics -from synapse.util.logcontext import ContextResourceUsage, LoggingContext +from synapse.http.request_metrics import RequestMetrics, requests_counter +from synapse.util.logcontext import LoggingContext, PreserveLoggingContext logger = logging.getLogger(__name__) @@ -34,25 +33,43 @@ class SynapseRequest(Request): It extends twisted's twisted.web.server.Request, and adds: * Unique request ID + * A log context associated with the request * Redaction of access_token query-params in __repr__ * Logging at start and end * Metrics to record CPU, wallclock and DB time by endpoint. - It provides a method `processing` which should be called by the Resource - which is handling the request, and returns a context manager. + It also provides a method `processing`, which returns a context manager. If this + method is called, the request won't be logged until the context manager is closed; + this is useful for asynchronous request handlers which may go on processing the + request even after the client has disconnected. + Attributes: + logcontext(LoggingContext) : the log context for this request """ def __init__(self, site, channel, *args, **kw): Request.__init__(self, channel, *args, **kw) self.site = site - self._channel = channel + self._channel = channel # this is used by the tests self.authenticated_entity = None self.start_time = 0 + # we can't yet create the logcontext, as we don't know the method. + self.logcontext = None + global _next_request_seq self.request_seq = _next_request_seq _next_request_seq += 1 + # whether an asynchronous request handler has called processing() + self._is_processing = False + + # the time when the asynchronous request handler completed its processing + self._processing_finished_time = None + + # what time we finished sending the response to the client (or the connection + # dropped) + self.finish_time = None + def __repr__(self): # We overwrite this so that we don't log ``access_token`` return '<%s at 0x%x method=%r uri=%r clientproto=%r site=%r>' % ( @@ -74,11 +91,116 @@ def get_user_agent(self): return self.requestHeaders.getRawHeaders(b"User-Agent", [None])[-1] def render(self, resrc): + # this is called once a Resource has been found to serve the request; in our + # case the Resource in question will normally be a JsonResource. + + # create a LogContext for this request + request_id = self.get_request_id() + logcontext = self.logcontext = LoggingContext(request_id) + logcontext.request = request_id + # override the Server header which is set by twisted self.setHeader("Server", self.site.server_version_string) - return Request.render(self, resrc) + + with PreserveLoggingContext(self.logcontext): + # we start the request metrics timer here with an initial stab + # at the servlet name. For most requests that name will be + # JsonResource (or a subclass), and JsonResource._async_render + # will update it once it picks a servlet. + servlet_name = resrc.__class__.__name__ + self._started_processing(servlet_name) + + Request.render(self, resrc) + + # record the arrival of the request *after* + # dispatching to the handler, so that the handler + # can update the servlet name in the request + # metrics + requests_counter.labels(self.method, + self.request_metrics.name).inc() + + @contextlib.contextmanager + def processing(self): + """Record the fact that we are processing this request. + + Returns a context manager; the correct way to use this is: + + @defer.inlineCallbacks + def handle_request(request): + with request.processing("FooServlet"): + yield really_handle_the_request() + + Once the context manager is closed, the completion of the request will be logged, + and the various metrics will be updated. + """ + if self._is_processing: + raise RuntimeError("Request is already processing") + self._is_processing = True + + try: + yield + except Exception: + # this should already have been caught, and sent back to the client as a 500. + logger.exception("Asynchronous messge handler raised an uncaught exception") + finally: + # the request handler has finished its work and either sent the whole response + # back, or handed over responsibility to a Producer. + + self._processing_finished_time = time.time() + self._is_processing = False + + # if we've already sent the response, log it now; otherwise, we wait for the + # response to be sent. + if self.finish_time is not None: + self._finished_processing() + + def finish(self): + """Called when all response data has been written to this Request. + + Overrides twisted.web.server.Request.finish to record the finish time and do + logging. + """ + self.finish_time = time.time() + Request.finish(self) + if not self._is_processing: + with PreserveLoggingContext(self.logcontext): + self._finished_processing() + + def connectionLost(self, reason): + """Called when the client connection is closed before the response is written. + + Overrides twisted.web.server.Request.connectionLost to record the finish time and + do logging. + """ + self.finish_time = time.time() + Request.connectionLost(self, reason) + + # we only get here if the connection to the client drops before we send + # the response. + # + # It's useful to log it here so that we can get an idea of when + # the client disconnects. + with PreserveLoggingContext(self.logcontext): + logger.warn( + "Error processing request: %s %s", reason.type, reason.value, + ) + + if not self._is_processing: + self._finished_processing() def _started_processing(self, servlet_name): + """Record the fact that we are processing this request. + + This will log the request's arrival. Once the request completes, + be sure to call finished_processing. + + Args: + servlet_name (str): the name of the servlet which will be + processing this request. This is used in the metrics. + + It is possible to update this afterwards by updating + self.request_metrics.name. + """ self.start_time = time.time() self.request_metrics = RequestMetrics() self.request_metrics.start( @@ -94,13 +216,21 @@ def _started_processing(self, servlet_name): ) def _finished_processing(self): - try: - context = LoggingContext.current_context() - usage = context.get_resource_usage() - except Exception: - usage = ContextResourceUsage() + """Log the completion of this request and update the metrics + """ + + usage = self.logcontext.get_resource_usage() + + if self._processing_finished_time is None: + # we completed the request without anything calling processing() + self._processing_finished_time = time.time() - end_time = time.time() + # the time between receiving the request and the request handler finishing + processing_time = self._processing_finished_time - self.start_time + + # the time between the request handler finishing and the response being sent + # to the client (nb may be negative) + response_send_time = self.finish_time - self._processing_finished_time # need to decode as it could be raw utf-8 bytes # from a IDN servname in an auth header @@ -116,22 +246,31 @@ def _finished_processing(self): user_agent = self.get_user_agent() if user_agent is not None: user_agent = user_agent.decode("utf-8", "replace") + else: + user_agent = "-" + + code = str(self.code) + if not self.finished: + # we didn't send the full response before we gave up (presumably because + # the connection dropped) + code += "!" self.site.access_logger.info( "%s - %s - {%s}" - " Processed request: %.3fsec (%.3fsec, %.3fsec) (%.3fsec/%.3fsec/%d)" + " Processed request: %.3fsec/%.3fsec (%.3fsec, %.3fsec) (%.3fsec/%.3fsec/%d)" " %sB %s \"%s %s %s\" \"%s\" [%d dbevts]", self.getClientIP(), self.site.site_tag, authenticated_entity, - end_time - self.start_time, + processing_time, + response_send_time, usage.ru_utime, usage.ru_stime, usage.db_sched_duration_sec, usage.db_txn_duration_sec, int(usage.db_txn_count), self.sentLength, - self.code, + code, self.method, self.get_redacted_uri(), self.clientproto, @@ -140,38 +279,10 @@ def _finished_processing(self): ) try: - self.request_metrics.stop(end_time, self) + self.request_metrics.stop(self.finish_time, self) except Exception as e: logger.warn("Failed to stop metrics: %r", e) - @contextlib.contextmanager - def processing(self, servlet_name): - """Record the fact that we are processing this request. - - Returns a context manager; the correct way to use this is: - - @defer.inlineCallbacks - def handle_request(request): - with request.processing("FooServlet"): - yield really_handle_the_request() - - This will log the request's arrival. Once the context manager is - closed, the completion of the request will be logged, and the various - metrics will be updated. - - Args: - servlet_name (str): the name of the servlet which will be - processing this request. This is used in the metrics. - - It is possible to update this afterwards by updating - self.request_metrics.servlet_name. - """ - # TODO: we should probably just move this into render() and finish(), - # to save having to call a separate method. - self._started_processing(servlet_name) - yield - self._finished_processing() - class XForwardedForRequest(SynapseRequest): def __init__(self, *args, **kw): From 2de813a61176e4db892aa07dec4cb3a390da5b4b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 15 Aug 2018 13:49:03 +0100 Subject: [PATCH 127/173] changelog --- changelog.d/3700.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3700.bugfix diff --git a/changelog.d/3700.bugfix b/changelog.d/3700.bugfix new file mode 100644 index 000000000000..492cce1dfc33 --- /dev/null +++ b/changelog.d/3700.bugfix @@ -0,0 +1 @@ +Improve HTTP request logging to include all requests \ No newline at end of file From 39176f27f79914ec4608235e1f875b16d0018c92 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 15 Aug 2018 13:53:51 +0100 Subject: [PATCH 128/173] remove changelog referencing another PR --- changelog.d/3697.misc | 1 - 1 file changed, 1 deletion(-) delete mode 100644 changelog.d/3697.misc diff --git a/changelog.d/3697.misc b/changelog.d/3697.misc deleted file mode 100644 index 9082fc466285..000000000000 --- a/changelog.d/3697.misc +++ /dev/null @@ -1 +0,0 @@ -update resource limit error codes From 773db62a22ca8be06c9537ae0296952d0e53837e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 15 Aug 2018 14:10:32 +0100 Subject: [PATCH 129/173] Rename slave TransactionStore to SlaveTransactionStore --- synapse/app/client_reader.py | 4 ++-- synapse/app/event_creator.py | 4 ++-- synapse/app/federation_reader.py | 4 ++-- synapse/app/federation_sender.py | 4 ++-- synapse/app/media_repository.py | 4 ++-- synapse/replication/slave/storage/transactions.py | 2 +- 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index e2c91123db8c..86129ed544bc 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -39,7 +39,7 @@ from synapse.replication.slave.storage.keys import SlavedKeyStore from synapse.replication.slave.storage.registration import SlavedRegistrationStore from synapse.replication.slave.storage.room import RoomStore -from synapse.replication.slave.storage.transactions import TransactionStore +from synapse.replication.slave.storage.transactions import SlavedTransactionStore from synapse.replication.tcp.client import ReplicationClientHandler from synapse.rest.client.v1.room import ( JoinedRoomMemberListRestServlet, @@ -66,7 +66,7 @@ class ClientReaderSlavedStore( DirectoryStore, SlavedApplicationServiceStore, SlavedRegistrationStore, - TransactionStore, + SlavedTransactionStore, SlavedClientIpStore, BaseSlavedStore, ): diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py index 374f1156449c..95bfff181bb2 100644 --- a/synapse/app/event_creator.py +++ b/synapse/app/event_creator.py @@ -43,7 +43,7 @@ from synapse.replication.slave.storage.receipts import SlavedReceiptsStore from synapse.replication.slave.storage.registration import SlavedRegistrationStore from synapse.replication.slave.storage.room import RoomStore -from synapse.replication.slave.storage.transactions import TransactionStore +from synapse.replication.slave.storage.transactions import SlavedTransactionStore from synapse.replication.tcp.client import ReplicationClientHandler from synapse.rest.client.v1.room import ( JoinRoomAliasServlet, @@ -63,7 +63,7 @@ class EventCreatorSlavedStore( DirectoryStore, - TransactionStore, + SlavedTransactionStore, SlavedProfileStore, SlavedAccountDataStore, SlavedPusherStore, diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index 0c557a824227..7e342c178594 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -41,7 +41,7 @@ from synapse.replication.slave.storage.pushers import SlavedPusherStore from synapse.replication.slave.storage.receipts import SlavedReceiptsStore from synapse.replication.slave.storage.room import RoomStore -from synapse.replication.slave.storage.transactions import TransactionStore +from synapse.replication.slave.storage.transactions import SlavedTransactionStore from synapse.replication.tcp.client import ReplicationClientHandler from synapse.server import HomeServer from synapse.storage.engines import create_engine @@ -63,7 +63,7 @@ class FederationReaderSlavedStore( SlavedKeyStore, RoomStore, DirectoryStore, - TransactionStore, + SlavedTransactionStore, BaseSlavedStore, ): pass diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py index 18469013faf0..29911ec68710 100644 --- a/synapse/app/federation_sender.py +++ b/synapse/app/federation_sender.py @@ -36,7 +36,7 @@ from synapse.replication.slave.storage.presence import SlavedPresenceStore from synapse.replication.slave.storage.receipts import SlavedReceiptsStore from synapse.replication.slave.storage.registration import SlavedRegistrationStore -from synapse.replication.slave.storage.transactions import TransactionStore +from synapse.replication.slave.storage.transactions import SlavedTransactionStore from synapse.replication.tcp.client import ReplicationClientHandler from synapse.server import HomeServer from synapse.storage.engines import create_engine @@ -50,7 +50,7 @@ class FederationSenderSlaveStore( - SlavedDeviceInboxStore, TransactionStore, SlavedReceiptsStore, SlavedEventStore, + SlavedDeviceInboxStore, SlavedTransactionStore, SlavedReceiptsStore, SlavedEventStore, SlavedRegistrationStore, SlavedDeviceStore, SlavedPresenceStore, ): def __init__(self, db_conn, hs): diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py index 749bbf37d021..ee16755660d9 100644 --- a/synapse/app/media_repository.py +++ b/synapse/app/media_repository.py @@ -34,7 +34,7 @@ from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore from synapse.replication.slave.storage.client_ips import SlavedClientIpStore from synapse.replication.slave.storage.registration import SlavedRegistrationStore -from synapse.replication.slave.storage.transactions import TransactionStore +from synapse.replication.slave.storage.transactions import SlavedTransactionStore from synapse.replication.tcp.client import ReplicationClientHandler from synapse.rest.media.v0.content_repository import ContentRepoResource from synapse.server import HomeServer @@ -52,7 +52,7 @@ class MediaRepositorySlavedStore( SlavedApplicationServiceStore, SlavedRegistrationStore, SlavedClientIpStore, - TransactionStore, + SlavedTransactionStore, BaseSlavedStore, MediaRepositoryStore, ): diff --git a/synapse/replication/slave/storage/transactions.py b/synapse/replication/slave/storage/transactions.py index cd6416c47c8a..3527beb3c926 100644 --- a/synapse/replication/slave/storage/transactions.py +++ b/synapse/replication/slave/storage/transactions.py @@ -18,5 +18,5 @@ from ._base import BaseSlavedStore -class TransactionStore(TransactionStore, BaseSlavedStore): +class SlavedTransactionStore(TransactionStore, BaseSlavedStore): pass From 488ffe6fdb36c7479052096489c683777597c2aa Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 15 Aug 2018 14:17:18 +0100 Subject: [PATCH 130/173] Use federation handler function rather than duplicate This involves renaming _persist_events to be a public function. --- synapse/handlers/federation.py | 14 ++++---- synapse/replication/http/federation.py | 44 ++------------------------ 2 files changed, 10 insertions(+), 48 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index acabca1d258c..9a37d627ca9a 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1175,7 +1175,7 @@ def on_invite_request(self, origin, pdu): ) context = yield self.state_handler.compute_event_context(event) - yield self._persist_events([(event, context)]) + yield self.persist_events_and_notify([(event, context)]) defer.returnValue(event) @@ -1206,7 +1206,7 @@ def do_remotely_reject_invite(self, target_hosts, room_id, user_id): ) context = yield self.state_handler.compute_event_context(event) - yield self._persist_events([(event, context)]) + yield self.persist_events_and_notify([(event, context)]) defer.returnValue(event) @@ -1449,7 +1449,7 @@ def _handle_new_event(self, origin, event, state=None, auth_events=None, event, context ) - yield self._persist_events( + yield self.persist_events_and_notify( [(event, context)], backfilled=backfilled, ) @@ -1487,7 +1487,7 @@ def _handle_new_events(self, origin, event_infos, backfilled=False): ], consumeErrors=True, )) - yield self._persist_events( + yield self.persist_events_and_notify( [ (ev_info["event"], context) for ev_info, context in zip(event_infos, contexts) @@ -1575,7 +1575,7 @@ def _persist_auth_tree(self, origin, auth_events, state, event): raise events_to_context[e.event_id].rejected = RejectedReason.AUTH_ERROR - yield self._persist_events( + yield self.persist_events_and_notify( [ (e, events_to_context[e.event_id]) for e in itertools.chain(auth_events, state) @@ -1586,7 +1586,7 @@ def _persist_auth_tree(self, origin, auth_events, state, event): event, old_state=state ) - yield self._persist_events( + yield self.persist_events_and_notify( [(event, new_event_context)], ) @@ -2327,7 +2327,7 @@ def _check_key_revocation(self, public_key, url): raise AuthError(403, "Third party certificate was invalid") @defer.inlineCallbacks - def _persist_events(self, event_and_contexts, backfilled=False): + def persist_events_and_notify(self, event_and_contexts, backfilled=False): """Persists events and tells the notifier/pushers about them, if necessary. diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py index 7b0b1cd32ed1..2ddd18f73b31 100644 --- a/synapse/replication/http/federation.py +++ b/synapse/replication/http/federation.py @@ -17,13 +17,10 @@ from twisted.internet import defer -from synapse.api.constants import EventTypes, Membership from synapse.events import FrozenEvent from synapse.events.snapshot import EventContext from synapse.http.servlet import parse_json_object_from_request from synapse.replication.http._base import ReplicationEndpoint -from synapse.types import UserID -from synapse.util.logcontext import run_in_background from synapse.util.metrics import Measure logger = logging.getLogger(__name__) @@ -55,9 +52,7 @@ def __init__(self, hs): self.store = hs.get_datastore() self.clock = hs.get_clock() - self.is_mine_id = hs.is_mine_id - self.notifier = hs.get_notifier() - self.pusher_pool = hs.get_pusherpool() + self.federation_handler = hs.get_handlers().federation_handler @staticmethod @defer.inlineCallbacks @@ -114,45 +109,12 @@ def _handle_request(self, request): len(event_and_contexts), ) - max_stream_id = yield self.store.persist_events( - event_and_contexts, - backfilled=backfilled + yield self.federation_handler.persist_events_and_notify( + event_and_contexts, backfilled, ) - if not backfilled: - for event, _ in event_and_contexts: - self._notify_persisted_event(event, max_stream_id) - defer.returnValue((200, {})) - def _notify_persisted_event(self, event, max_stream_id): - extra_users = [] - if event.type == EventTypes.Member: - target_user_id = event.state_key - - # We notify for memberships if its an invite for one of our - # users - if event.internal_metadata.is_outlier(): - if event.membership != Membership.INVITE: - if not self.is_mine_id(target_user_id): - return - - target_user = UserID.from_string(target_user_id) - extra_users.append(target_user) - elif event.internal_metadata.is_outlier(): - return - - event_stream_id = event.internal_metadata.stream_ordering - self.notifier.on_new_room_event( - event, event_stream_id, max_stream_id, - extra_users=extra_users - ) - - run_in_background( - self.pusher_pool.on_new_notifications, - event_stream_id, max_stream_id, - ) - class ReplicationFederationSendEduRestServlet(ReplicationEndpoint): """Handles EDUs newly received from federation, including persisting and From afcd655ab67611e3bbac3bc992574baa464bc030 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 14 Aug 2018 16:13:17 +0100 Subject: [PATCH 131/173] Use a producer to stream back responses The problem with dumping all of the json response into the Request object at once is that doing so starts the timeout for the next request to be received: so if it takes longer than 60s to stream back the response to the client, the client never gets it. The correct solution is to use a Producer; then the timeout is only started once all of the content is sent over the TCP connection. --- synapse/http/server.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/synapse/http/server.py b/synapse/http/server.py index 6dacb31037aa..5733abb5dcbb 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -27,6 +27,7 @@ from twisted.python import failure from twisted.web import resource, server from twisted.web.server import NOT_DONE_YET +from twisted.web.static import NoRangeStaticProducer from twisted.web.util import redirectTo import synapse.events @@ -42,6 +43,11 @@ from synapse.util.logcontext import LoggingContext, PreserveLoggingContext from synapse.util.metrics import Measure +if PY3: + from io import BytesIO +else: + from cStringIO import StringIO as BytesIO + logger = logging.getLogger(__name__) HTML_ERROR_TEMPLATE = """ @@ -413,8 +419,7 @@ def respond_with_json(request, code, json_object, send_cors=False, return if pretty_print: - json_bytes = (encode_pretty_printed_json(json_object) + "\n" - ).encode("utf-8") + json_bytes = encode_pretty_printed_json(json_object) + b"\n" else: if canonical_json or synapse.events.USE_FROZEN_DICTS: # canonicaljson already encodes to bytes @@ -450,8 +455,12 @@ def respond_with_json_bytes(request, code, json_bytes, send_cors=False, if send_cors: set_cors_headers(request) - request.write(json_bytes) - finish_request(request) + # todo: we can almost certainly avoid this copy and encode the json straight into + # the bytesIO, but it would involve faffing around with string->bytes wrappers. + bytes_io = BytesIO(json_bytes) + + producer = NoRangeStaticProducer(request, bytes_io) + producer.start() return NOT_DONE_YET From a87af25fbb9d8938b1557ef42e31edecf1ee731c Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 15 Aug 2018 23:43:41 +1000 Subject: [PATCH 132/173] Fix the tests --- tests/rest/client/v1/test_register.py | 7 ++---- tests/rest/client/v1/utils.py | 8 +++---- tests/rest/client/v2_alpha/test_filter.py | 23 +++++++----------- tests/rest/client/v2_alpha/test_register.py | 26 +++++++-------------- tests/rest/client/v2_alpha/test_sync.py | 5 ++-- tests/server.py | 23 ++++++++++++++---- tests/test_server.py | 17 +++++--------- 7 files changed, 49 insertions(+), 60 deletions(-) diff --git a/tests/rest/client/v1/test_register.py b/tests/rest/client/v1/test_register.py index 4be88b8a39fe..6b7ff813d567 100644 --- a/tests/rest/client/v1/test_register.py +++ b/tests/rest/client/v1/test_register.py @@ -25,7 +25,7 @@ from synapse.util import Clock from tests import unittest -from tests.server import make_request, setup_test_homeserver +from tests.server import make_request, render, setup_test_homeserver class CreateUserServletTestCase(unittest.TestCase): @@ -77,10 +77,7 @@ def test_POST_createuser_with_valid_user(self): ) request, channel = make_request(b"POST", url, request_data) - request.render(res) - - # Advance the clock because it waits - self.clock.advance(1) + render(request, res, self.clock) self.assertEquals(channel.result["code"], b"200") diff --git a/tests/rest/client/v1/utils.py b/tests/rest/client/v1/utils.py index 9f862f9dfa02..40dc4ea256da 100644 --- a/tests/rest/client/v1/utils.py +++ b/tests/rest/client/v1/utils.py @@ -23,7 +23,7 @@ from synapse.api.constants import Membership from tests import unittest -from tests.server import make_request, wait_until_result +from tests.server import make_request, render class RestTestCase(unittest.TestCase): @@ -171,8 +171,7 @@ def create_room_as(self, room_creator, is_public=True, tok=None): request, channel = make_request( "POST", path, json.dumps(content).encode('utf8') ) - request.render(self.resource) - wait_until_result(self.hs.get_reactor(), channel) + render(request, self.resource, self.hs.get_reactor()) assert channel.result["code"] == b"200", channel.result self.auth_user_id = temp_id @@ -220,8 +219,7 @@ def change_membership(self, room, src, targ, membership, tok=None, expect_code=2 request, channel = make_request("PUT", path, json.dumps(data).encode('utf8')) - request.render(self.resource) - wait_until_result(self.hs.get_reactor(), channel) + render(request, self.resource, self.hs.get_reactor()) assert int(channel.result["code"]) == expect_code, ( "Expected: %d, got: %d, resp: %r" diff --git a/tests/rest/client/v2_alpha/test_filter.py b/tests/rest/client/v2_alpha/test_filter.py index 8260c130f8e5..6a886ee3b832 100644 --- a/tests/rest/client/v2_alpha/test_filter.py +++ b/tests/rest/client/v2_alpha/test_filter.py @@ -24,8 +24,8 @@ from tests.server import ( ThreadedMemoryReactorClock as MemoryReactorClock, make_request, + render, setup_test_homeserver, - wait_until_result, ) PATH_PREFIX = "/_matrix/client/v2_alpha" @@ -76,8 +76,7 @@ def test_add_filter(self): "/_matrix/client/r0/user/%s/filter" % (self.USER_ID), self.EXAMPLE_FILTER_JSON, ) - request.render(self.resource) - wait_until_result(self.clock, channel) + render(request, self.resource, self.clock) self.assertEqual(channel.result["code"], b"200") self.assertEqual(channel.json_body, {"filter_id": "0"}) @@ -91,8 +90,7 @@ def test_add_filter_for_other_user(self): "/_matrix/client/r0/user/%s/filter" % ("@watermelon:test"), self.EXAMPLE_FILTER_JSON, ) - request.render(self.resource) - wait_until_result(self.clock, channel) + render(request, self.resource, self.clock) self.assertEqual(channel.result["code"], b"403") self.assertEquals(channel.json_body["errcode"], Codes.FORBIDDEN) @@ -105,8 +103,7 @@ def test_add_filter_non_local_user(self): "/_matrix/client/r0/user/%s/filter" % (self.USER_ID), self.EXAMPLE_FILTER_JSON, ) - request.render(self.resource) - wait_until_result(self.clock, channel) + render(request, self.resource, self.clock) self.hs.is_mine = _is_mine self.assertEqual(channel.result["code"], b"403") @@ -121,8 +118,7 @@ def test_get_filter(self): request, channel = make_request( "GET", "/_matrix/client/r0/user/%s/filter/%s" % (self.USER_ID, filter_id) ) - request.render(self.resource) - wait_until_result(self.clock, channel) + render(request, self.resource, self.clock) self.assertEqual(channel.result["code"], b"200") self.assertEquals(channel.json_body, self.EXAMPLE_FILTER) @@ -131,8 +127,7 @@ def test_get_filter_non_existant(self): request, channel = make_request( "GET", "/_matrix/client/r0/user/%s/filter/12382148321" % (self.USER_ID) ) - request.render(self.resource) - wait_until_result(self.clock, channel) + render(request, self.resource, self.clock) self.assertEqual(channel.result["code"], b"400") self.assertEquals(channel.json_body["errcode"], Codes.NOT_FOUND) @@ -143,8 +138,7 @@ def test_get_filter_invalid_id(self): request, channel = make_request( "GET", "/_matrix/client/r0/user/%s/filter/foobar" % (self.USER_ID) ) - request.render(self.resource) - wait_until_result(self.clock, channel) + render(request, self.resource, self.clock) self.assertEqual(channel.result["code"], b"400") @@ -153,7 +147,6 @@ def test_get_filter_no_id(self): request, channel = make_request( "GET", "/_matrix/client/r0/user/%s/filter/" % (self.USER_ID) ) - request.render(self.resource) - wait_until_result(self.clock, channel) + render(request, self.resource, self.clock) self.assertEqual(channel.result["code"], b"400") diff --git a/tests/rest/client/v2_alpha/test_register.py b/tests/rest/client/v2_alpha/test_register.py index b72bd0fb7fab..1c128e81f5ac 100644 --- a/tests/rest/client/v2_alpha/test_register.py +++ b/tests/rest/client/v2_alpha/test_register.py @@ -11,7 +11,7 @@ from synapse.util import Clock from tests import unittest -from tests.server import make_request, setup_test_homeserver, wait_until_result +from tests.server import make_request, render, setup_test_homeserver class RegisterRestServletTestCase(unittest.TestCase): @@ -72,8 +72,7 @@ def test_POST_appservice_registration_valid(self): request, channel = make_request( b"POST", self.url + b"?access_token=i_am_an_app_service", request_data ) - request.render(self.resource) - wait_until_result(self.clock, channel) + render(request, self.resource, self.clock) self.assertEquals(channel.result["code"], b"200", channel.result) det_data = { @@ -89,16 +88,14 @@ def test_POST_appservice_registration_invalid(self): request, channel = make_request( b"POST", self.url + b"?access_token=i_am_an_app_service", request_data ) - request.render(self.resource) - wait_until_result(self.clock, channel) + render(request, self.resource, self.clock) self.assertEquals(channel.result["code"], b"401", channel.result) def test_POST_bad_password(self): request_data = json.dumps({"username": "kermit", "password": 666}) request, channel = make_request(b"POST", self.url, request_data) - request.render(self.resource) - wait_until_result(self.clock, channel) + render(request, self.resource, self.clock) self.assertEquals(channel.result["code"], b"400", channel.result) self.assertEquals(channel.json_body["error"], "Invalid password") @@ -106,8 +103,7 @@ def test_POST_bad_password(self): def test_POST_bad_username(self): request_data = json.dumps({"username": 777, "password": "monkey"}) request, channel = make_request(b"POST", self.url, request_data) - request.render(self.resource) - wait_until_result(self.clock, channel) + render(request, self.resource, self.clock) self.assertEquals(channel.result["code"], b"400", channel.result) self.assertEquals(channel.json_body["error"], "Invalid username") @@ -126,8 +122,7 @@ def test_POST_user_valid(self): self.device_handler.check_device_registered = Mock(return_value=device_id) request, channel = make_request(b"POST", self.url, request_data) - request.render(self.resource) - wait_until_result(self.clock, channel) + render(request, self.resource, self.clock) det_data = { "user_id": user_id, @@ -149,8 +144,7 @@ def test_POST_disabled_registration(self): self.registration_handler.register = Mock(return_value=("@user:id", "t")) request, channel = make_request(b"POST", self.url, request_data) - request.render(self.resource) - wait_until_result(self.clock, channel) + render(request, self.resource, self.clock) self.assertEquals(channel.result["code"], b"403", channel.result) self.assertEquals(channel.json_body["error"], "Registration has been disabled") @@ -162,8 +156,7 @@ def test_POST_guest_registration(self): self.registration_handler.register = Mock(return_value=(user_id, None)) request, channel = make_request(b"POST", self.url + b"?kind=guest", b"{}") - request.render(self.resource) - wait_until_result(self.clock, channel) + render(request, self.resource, self.clock) det_data = { "user_id": user_id, @@ -177,8 +170,7 @@ def test_POST_disabled_guest_registration(self): self.hs.config.allow_guest_access = False request, channel = make_request(b"POST", self.url + b"?kind=guest", b"{}") - request.render(self.resource) - wait_until_result(self.clock, channel) + render(request, self.resource, self.clock) self.assertEquals(channel.result["code"], b"403", channel.result) self.assertEquals(channel.json_body["error"], "Guest access is disabled") diff --git a/tests/rest/client/v2_alpha/test_sync.py b/tests/rest/client/v2_alpha/test_sync.py index 2e1d06c50982..9f3d8bd1dbb4 100644 --- a/tests/rest/client/v2_alpha/test_sync.py +++ b/tests/rest/client/v2_alpha/test_sync.py @@ -23,8 +23,8 @@ from tests.server import ( ThreadedMemoryReactorClock as MemoryReactorClock, make_request, + render, setup_test_homeserver, - wait_until_result, ) PATH_PREFIX = "/_matrix/client/v2_alpha" @@ -69,8 +69,7 @@ def get_user_by_req(request, allow_guest=False, rights="access"): def test_sync_argless(self): request, channel = make_request("GET", "/_matrix/client/r0/sync") - request.render(self.resource) - wait_until_result(self.clock, channel) + render(request, self.resource, self.clock) self.assertEqual(channel.result["code"], b"200") self.assertTrue( diff --git a/tests/server.py b/tests/server.py index beb24cf0320a..c63b2c310000 100644 --- a/tests/server.py +++ b/tests/server.py @@ -24,6 +24,7 @@ class FakeChannel(object): """ result = attr.ib(default=attr.Factory(dict)) + _producer = None @property def json_body(self): @@ -49,6 +50,15 @@ def write(self, content): self.result["body"] += content + def registerProducer(self, producer, streaming): + self._producer = producer + + def unregisterProducer(self): + if self._producer is None: + return + + self._producer = None + def requestDone(self, _self): self.result["done"] = True @@ -111,14 +121,19 @@ def make_request(method, path, content=b""): return req, channel -def wait_until_result(clock, channel, timeout=100): +def wait_until_result(clock, request, timeout=100): """ - Wait until the channel has a result. + Wait until the request is finished. """ clock.run() x = 0 - while not channel.result: + while not request.finished: + + # If there's a producer, tell it to resume producing so we get content + if request._channel._producer: + request._channel._producer.resumeProducing() + x += 1 if x > timeout: @@ -129,7 +144,7 @@ def wait_until_result(clock, channel, timeout=100): def render(request, resource, clock): request.render(resource) - wait_until_result(clock, request._channel) + wait_until_result(clock, request) class ThreadedMemoryReactorClock(MemoryReactorClock): diff --git a/tests/test_server.py b/tests/test_server.py index 895e49040688..ef74544e93cc 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -8,7 +8,7 @@ from synapse.util import Clock from tests import unittest -from tests.server import make_request, setup_test_homeserver +from tests.server import make_request, render, setup_test_homeserver class JsonResourceTests(unittest.TestCase): @@ -37,7 +37,7 @@ def _callback(request, **kwargs): ) request, channel = make_request(b"GET", b"/_matrix/foo/%E2%98%83?a=%E2%98%83") - request.render(res) + render(request, res, self.reactor) self.assertEqual(request.args, {b'a': [u"\N{SNOWMAN}".encode('utf8')]}) self.assertEqual(got_kwargs, {u"room_id": u"\N{SNOWMAN}"}) @@ -55,7 +55,7 @@ def _callback(request, **kwargs): res.register_paths("GET", [re.compile("^/_matrix/foo$")], _callback) request, channel = make_request(b"GET", b"/_matrix/foo") - request.render(res) + render(request, res, self.reactor) self.assertEqual(channel.result["code"], b'500') @@ -78,13 +78,8 @@ def _callback(request, **kwargs): res.register_paths("GET", [re.compile("^/_matrix/foo$")], _callback) request, channel = make_request(b"GET", b"/_matrix/foo") - request.render(res) + render(request, res, self.reactor) - # No error has been raised yet - self.assertTrue("code" not in channel.result) - - # Advance time, now there's an error - self.reactor.advance(1) self.assertEqual(channel.result["code"], b'500') def test_callback_synapseerror(self): @@ -100,7 +95,7 @@ def _callback(request, **kwargs): res.register_paths("GET", [re.compile("^/_matrix/foo$")], _callback) request, channel = make_request(b"GET", b"/_matrix/foo") - request.render(res) + render(request, res, self.reactor) self.assertEqual(channel.result["code"], b'403') self.assertEqual(channel.json_body["error"], "Forbidden!!one!") @@ -121,7 +116,7 @@ def _callback(request, **kwargs): res.register_paths("GET", [re.compile("^/_matrix/foo$")], _callback) request, channel = make_request(b"GET", b"/_matrix/foobar") - request.render(res) + render(request, res, self.reactor) self.assertEqual(channel.result["code"], b'400') self.assertEqual(channel.json_body["error"], "Unrecognized request") From d82fa0e9a6e64bf29b727fb73fff0a3aec20ce8a Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 15 Aug 2018 15:11:50 +0100 Subject: [PATCH 133/173] changelog --- changelog.d/3701.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3701.bugfix diff --git a/changelog.d/3701.bugfix b/changelog.d/3701.bugfix new file mode 100644 index 000000000000..c22de3453747 --- /dev/null +++ b/changelog.d/3701.bugfix @@ -0,0 +1 @@ +Avoid timing out requests while we are streaming back the response From 2f78f432c421702e3756d029b3c669db837d8bcd Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Wed, 15 Aug 2018 17:35:22 +0200 Subject: [PATCH 134/173] speed up /members and add at= and membership params (#3568) --- changelog.d/3331.feature | 1 + changelog.d/3567.feature | 1 + changelog.d/3568.feature | 1 + synapse/handlers/message.py | 88 +++++++++++++++++++++++++++++----- synapse/rest/client/v1/room.py | 32 +++++++++++-- synapse/storage/events.py | 2 +- synapse/storage/state.py | 66 ++++++++++++++++++++++++- tests/storage/test_state.py | 2 +- 8 files changed, 174 insertions(+), 19 deletions(-) create mode 100644 changelog.d/3331.feature create mode 100644 changelog.d/3567.feature create mode 100644 changelog.d/3568.feature diff --git a/changelog.d/3331.feature b/changelog.d/3331.feature new file mode 100644 index 000000000000..e574b9bcc3af --- /dev/null +++ b/changelog.d/3331.feature @@ -0,0 +1 @@ +add support for the include_redundant_members filter param as per MSC1227 diff --git a/changelog.d/3567.feature b/changelog.d/3567.feature new file mode 100644 index 000000000000..c74c1f57a90a --- /dev/null +++ b/changelog.d/3567.feature @@ -0,0 +1 @@ +make the /context API filter & lazy-load aware as per MSC1227 diff --git a/changelog.d/3568.feature b/changelog.d/3568.feature new file mode 100644 index 000000000000..247f02ba4e13 --- /dev/null +++ b/changelog.d/3568.feature @@ -0,0 +1 @@ +speed up /members API and add `at` and `membership` params as per MSC1227 diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 01a362360e92..893c9bcdc4db 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -25,7 +25,13 @@ from twisted.internet.defer import succeed from synapse.api.constants import MAX_DEPTH, EventTypes, Membership -from synapse.api.errors import AuthError, Codes, ConsentNotGivenError, SynapseError +from synapse.api.errors import ( + AuthError, + Codes, + ConsentNotGivenError, + NotFoundError, + SynapseError, +) from synapse.api.urls import ConsentURIBuilder from synapse.crypto.event_signing import add_hashes_and_signatures from synapse.events.utils import serialize_event @@ -36,6 +42,7 @@ from synapse.util.frozenutils import frozendict_json_encoder from synapse.util.logcontext import run_in_background from synapse.util.metrics import measure_func +from synapse.visibility import filter_events_for_client from ._base import BaseHandler @@ -82,28 +89,85 @@ def get_room_data(self, user_id=None, room_id=None, defer.returnValue(data) @defer.inlineCallbacks - def get_state_events(self, user_id, room_id, is_guest=False): + def get_state_events( + self, user_id, room_id, types=None, filtered_types=None, + at_token=None, is_guest=False, + ): """Retrieve all state events for a given room. If the user is joined to the room then return the current state. If the user has - left the room return the state events from when they left. + left the room return the state events from when they left. If an explicit + 'at' parameter is passed, return the state events as of that event, if + visible. Args: user_id(str): The user requesting state events. room_id(str): The room ID to get all state events from. + types(list[(str, str|None)]|None): List of (type, state_key) tuples + which are used to filter the state fetched. If `state_key` is None, + all events are returned of the given type. + May be None, which matches any key. + filtered_types(list[str]|None): Only apply filtering via `types` to this + list of event types. Other types of events are returned unfiltered. + If None, `types` filtering is applied to all events. + at_token(StreamToken|None): the stream token of the at which we are requesting + the stats. If the user is not allowed to view the state as of that + stream token, we raise a 403 SynapseError. If None, returns the current + state based on the current_state_events table. + is_guest(bool): whether this user is a guest Returns: A list of dicts representing state events. [{}, {}, {}] + Raises: + NotFoundError (404) if the at token does not yield an event + + AuthError (403) if the user doesn't have permission to view + members of this room. """ - membership, membership_event_id = yield self.auth.check_in_room_or_world_readable( - room_id, user_id - ) + if at_token: + # FIXME this claims to get the state at a stream position, but + # get_recent_events_for_room operates by topo ordering. This therefore + # does not reliably give you the state at the given stream position. + # (https://github.com/matrix-org/synapse/issues/3305) + last_events, _ = yield self.store.get_recent_events_for_room( + room_id, end_token=at_token.room_key, limit=1, + ) - if membership == Membership.JOIN: - room_state = yield self.state.get_current_state(room_id) - elif membership == Membership.LEAVE: - room_state = yield self.store.get_state_for_events( - [membership_event_id], None + if not last_events: + raise NotFoundError("Can't find event for token %s" % (at_token, )) + + visible_events = yield filter_events_for_client( + self.store, user_id, last_events, + ) + + event = last_events[0] + if visible_events: + room_state = yield self.store.get_state_for_events( + [event.event_id], types, filtered_types=filtered_types, + ) + room_state = room_state[event.event_id] + else: + raise AuthError( + 403, + "User %s not allowed to view events in room %s at token %s" % ( + user_id, room_id, at_token, + ) + ) + else: + membership, membership_event_id = ( + yield self.auth.check_in_room_or_world_readable( + room_id, user_id, + ) ) - room_state = room_state[membership_event_id] + + if membership == Membership.JOIN: + state_ids = yield self.store.get_filtered_current_state_ids( + room_id, types, filtered_types=filtered_types, + ) + room_state = yield self.store.get_events(state_ids.values()) + elif membership == Membership.LEAVE: + room_state = yield self.store.get_state_for_events( + [membership_event_id], types, filtered_types=filtered_types, + ) + room_state = room_state[membership_event_id] now = self.clock.time_msec() defer.returnValue( diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index fa5989e74e8c..fcc109176014 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -34,7 +34,7 @@ parse_string, ) from synapse.streams.config import PaginationConfig -from synapse.types import RoomAlias, RoomID, ThirdPartyInstanceID, UserID +from synapse.types import RoomAlias, RoomID, StreamToken, ThirdPartyInstanceID, UserID from .base import ClientV1RestServlet, client_path_patterns @@ -384,15 +384,39 @@ def __init__(self, hs): def on_GET(self, request, room_id): # TODO support Pagination stream API (limit/tokens) requester = yield self.auth.get_user_by_req(request) - events = yield self.message_handler.get_state_events( + handler = self.message_handler + + # request the state as of a given event, as identified by a stream token, + # for consistency with /messages etc. + # useful for getting the membership in retrospect as of a given /sync + # response. + at_token_string = parse_string(request, "at") + if at_token_string is None: + at_token = None + else: + at_token = StreamToken.from_string(at_token_string) + + # let you filter down on particular memberships. + # XXX: this may not be the best shape for this API - we could pass in a filter + # instead, except filters aren't currently aware of memberships. + # See https://github.com/matrix-org/matrix-doc/issues/1337 for more details. + membership = parse_string(request, "membership") + not_membership = parse_string(request, "not_membership") + + events = yield handler.get_state_events( room_id=room_id, user_id=requester.user.to_string(), + at_token=at_token, + types=[(EventTypes.Member, None)], ) chunk = [] for event in events: - if event["type"] != EventTypes.Member: + if ( + (membership and event['content'].get("membership") != membership) or + (not_membership and event['content'].get("membership") == not_membership) + ): continue chunk.append(event) @@ -401,6 +425,8 @@ def on_GET(self, request, room_id): })) +# deprecated in favour of /members?membership=join? +# except it does custom AS logic and has a simpler return format class JoinedRoomMemberListRestServlet(ClientV1RestServlet): PATTERNS = client_path_patterns("/rooms/(?P[^/]*)/joined_members$") diff --git a/synapse/storage/events.py b/synapse/storage/events.py index 135af54fa936..025a7fb6d979 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -1911,7 +1911,7 @@ def _purge_history_txn( max_depth = max(row[0] for row in rows) if max_depth <= token.topological: - # We need to ensure we don't delete all the events from the datanase + # We need to ensure we don't delete all the events from the database # otherwise we wouldn't be able to send any events (due to not # having any backwards extremeties) raise SynapseError( diff --git a/synapse/storage/state.py b/synapse/storage/state.py index 17b14d464baf..754dfa6973fc 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -116,6 +116,69 @@ def _get_current_state_ids_txn(txn): _get_current_state_ids_txn, ) + # FIXME: how should this be cached? + def get_filtered_current_state_ids(self, room_id, types, filtered_types=None): + """Get the current state event of a given type for a room based on the + current_state_events table. This may not be as up-to-date as the result + of doing a fresh state resolution as per state_handler.get_current_state + Args: + room_id (str) + types (list[(Str, (Str|None))]): List of (type, state_key) tuples + which are used to filter the state fetched. `state_key` may be + None, which matches any `state_key` + filtered_types (list[Str]|None): List of types to apply the above filter to. + Returns: + deferred: dict of (type, state_key) -> event + """ + + include_other_types = False if filtered_types is None else True + + def _get_filtered_current_state_ids_txn(txn): + results = {} + sql = """SELECT type, state_key, event_id FROM current_state_events + WHERE room_id = ? %s""" + # Turns out that postgres doesn't like doing a list of OR's and + # is about 1000x slower, so we just issue a query for each specific + # type seperately. + if types: + clause_to_args = [ + ( + "AND type = ? AND state_key = ?", + (etype, state_key) + ) if state_key is not None else ( + "AND type = ?", + (etype,) + ) + for etype, state_key in types + ] + + if include_other_types: + unique_types = set(filtered_types) + clause_to_args.append( + ( + "AND type <> ? " * len(unique_types), + list(unique_types) + ) + ) + else: + # If types is None we fetch all the state, and so just use an + # empty where clause with no extra args. + clause_to_args = [("", [])] + for where_clause, where_args in clause_to_args: + args = [room_id] + args.extend(where_args) + txn.execute(sql % (where_clause,), args) + for row in txn: + typ, state_key, event_id = row + key = (intern_string(typ), intern_string(state_key)) + results[key] = event_id + return results + + return self.runInteraction( + "get_filtered_current_state_ids", + _get_filtered_current_state_ids_txn, + ) + @cached(max_entries=10000, iterable=True) def get_state_group_delta(self, state_group): """Given a state group try to return a previous group and a delta between @@ -389,8 +452,7 @@ def get_state_for_events(self, event_ids, types, filtered_types=None): If None, `types` filtering is applied to all events. Returns: - deferred: A list of dicts corresponding to the event_ids given. - The dicts are mappings from (type, state_key) -> state_events + deferred: A dict of (event_id) -> (type, state_key) -> [state_events] """ event_to_groups = yield self._get_state_group_for_events( event_ids, diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py index 6168c46248d0..ebfd969b36bb 100644 --- a/tests/storage/test_state.py +++ b/tests/storage/test_state.py @@ -148,7 +148,7 @@ def test_get_state_for_event(self): {(e3.type, e3.state_key): e3, (e5.type, e5.state_key): e5}, state ) - # check we can use filter_types to grab a specific room member + # check we can use filtered_types to grab a specific room member # without filtering out the other event types state = yield self.store.get_state_for_event( e5.event_id, From e2e846628d110e49e4486c9a5b1f56734318d063 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 15 Aug 2018 16:38:46 +0100 Subject: [PATCH 135/173] Remove incorrectly re-added changelog files These were removed for the 0.33.2 release, but were incorrectly re-added in --- changelog.d/3331.feature | 1 - changelog.d/3567.feature | 1 - 2 files changed, 2 deletions(-) delete mode 100644 changelog.d/3331.feature delete mode 100644 changelog.d/3567.feature diff --git a/changelog.d/3331.feature b/changelog.d/3331.feature deleted file mode 100644 index e574b9bcc3af..000000000000 --- a/changelog.d/3331.feature +++ /dev/null @@ -1 +0,0 @@ -add support for the include_redundant_members filter param as per MSC1227 diff --git a/changelog.d/3567.feature b/changelog.d/3567.feature deleted file mode 100644 index c74c1f57a90a..000000000000 --- a/changelog.d/3567.feature +++ /dev/null @@ -1 +0,0 @@ -make the /context API filter & lazy-load aware as per MSC1227 From da7fe43899a2ad743ab067d5e0c239cc109d4339 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 15 Aug 2018 17:03:30 +0100 Subject: [PATCH 136/173] Fix mau blocking calulation bug on login --- changelog.d/3689.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3689.bugfix diff --git a/changelog.d/3689.bugfix b/changelog.d/3689.bugfix new file mode 100644 index 000000000000..934d039836ad --- /dev/null +++ b/changelog.d/3689.bugfix @@ -0,0 +1 @@ +Fix mau blocking calulation bug on login From 3f543dc021dd9456c8ed2da7a1e4769a68c07729 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Thu, 16 Aug 2018 10:46:50 +0200 Subject: [PATCH 137/173] initial cut at a room summary API (#3574) --- changelog.d/3574.feature | 1 + synapse/handlers/sync.py | 159 +++++++++++++++++++++++++-- synapse/rest/client/v2_alpha/sync.py | 1 + synapse/storage/_base.py | 7 +- synapse/storage/state.py | 5 +- synapse/storage/stream.py | 4 +- 6 files changed, 159 insertions(+), 18 deletions(-) create mode 100644 changelog.d/3574.feature diff --git a/changelog.d/3574.feature b/changelog.d/3574.feature new file mode 100644 index 000000000000..87ac32df7249 --- /dev/null +++ b/changelog.d/3574.feature @@ -0,0 +1 @@ +implement `summary` block in /sync response as per MSC688 diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 3b21a04a5d34..ac3edf0cc9bc 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -75,6 +75,7 @@ class JoinedSyncResult(collections.namedtuple("JoinedSyncResult", [ "ephemeral", "account_data", "unread_notifications", + "summary", ])): __slots__ = [] @@ -503,10 +504,142 @@ def get_state_at(self, room_id, stream_position, types=None, filtered_types=None state = {} defer.returnValue(state) + @defer.inlineCallbacks + def compute_summary(self, room_id, sync_config, batch, state, now_token): + """ Works out a room summary block for this room, summarising the number + of joined members in the room, and providing the 'hero' members if the + room has no name so clients can consistently name rooms. Also adds + state events to 'state' if needed to describe the heroes. + + Args: + room_id(str): + sync_config(synapse.handlers.sync.SyncConfig): + batch(synapse.handlers.sync.TimelineBatch): The timeline batch for + the room that will be sent to the user. + state(dict): dict of (type, state_key) -> Event as returned by + compute_state_delta + now_token(str): Token of the end of the current batch. + + Returns: + A deferred dict describing the room summary + """ + + # FIXME: this promulgates https://github.com/matrix-org/synapse/issues/3305 + last_events, _ = yield self.store.get_recent_event_ids_for_room( + room_id, end_token=now_token.room_key, limit=1, + ) + + if not last_events: + defer.returnValue(None) + return + + last_event = last_events[-1] + state_ids = yield self.store.get_state_ids_for_event( + last_event.event_id, [ + (EventTypes.Member, None), + (EventTypes.Name, ''), + (EventTypes.CanonicalAlias, ''), + ] + ) + + member_ids = { + state_key: event_id + for (t, state_key), event_id in state_ids.iteritems() + if t == EventTypes.Member + } + name_id = state_ids.get((EventTypes.Name, '')) + canonical_alias_id = state_ids.get((EventTypes.CanonicalAlias, '')) + + summary = {} + + # FIXME: it feels very heavy to load up every single membership event + # just to calculate the counts. + member_events = yield self.store.get_events(member_ids.values()) + + joined_user_ids = [] + invited_user_ids = [] + + for ev in member_events.values(): + if ev.content.get("membership") == Membership.JOIN: + joined_user_ids.append(ev.state_key) + elif ev.content.get("membership") == Membership.INVITE: + invited_user_ids.append(ev.state_key) + + # TODO: only send these when they change. + summary["m.joined_member_count"] = len(joined_user_ids) + summary["m.invited_member_count"] = len(invited_user_ids) + + if name_id or canonical_alias_id: + defer.returnValue(summary) + + # FIXME: order by stream ordering, not alphabetic + + me = sync_config.user.to_string() + if (joined_user_ids or invited_user_ids): + summary['m.heroes'] = sorted( + [ + user_id + for user_id in (joined_user_ids + invited_user_ids) + if user_id != me + ] + )[0:5] + else: + summary['m.heroes'] = sorted( + [user_id for user_id in member_ids.keys() if user_id != me] + )[0:5] + + if not sync_config.filter_collection.lazy_load_members(): + defer.returnValue(summary) + + # ensure we send membership events for heroes if needed + cache_key = (sync_config.user.to_string(), sync_config.device_id) + cache = self.get_lazy_loaded_members_cache(cache_key) + + # track which members the client should already know about via LL: + # Ones which are already in state... + existing_members = set( + user_id for (typ, user_id) in state.keys() + if typ == EventTypes.Member + ) + + # ...or ones which are in the timeline... + for ev in batch.events: + if ev.type == EventTypes.Member: + existing_members.add(ev.state_key) + + # ...and then ensure any missing ones get included in state. + missing_hero_event_ids = [ + member_ids[hero_id] + for hero_id in summary['m.heroes'] + if ( + cache.get(hero_id) != member_ids[hero_id] and + hero_id not in existing_members + ) + ] + + missing_hero_state = yield self.store.get_events(missing_hero_event_ids) + missing_hero_state = missing_hero_state.values() + + for s in missing_hero_state: + cache.set(s.state_key, s.event_id) + state[(EventTypes.Member, s.state_key)] = s + + defer.returnValue(summary) + + def get_lazy_loaded_members_cache(self, cache_key): + cache = self.lazy_loaded_members_cache.get(cache_key) + if cache is None: + logger.debug("creating LruCache for %r", cache_key) + cache = LruCache(LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE) + self.lazy_loaded_members_cache[cache_key] = cache + else: + logger.debug("found LruCache for %r", cache_key) + return cache + @defer.inlineCallbacks def compute_state_delta(self, room_id, batch, sync_config, since_token, now_token, full_state): - """ Works out the differnce in state between the start of the timeline + """ Works out the difference in state between the start of the timeline and the previous sync. Args: @@ -520,7 +653,7 @@ def compute_state_delta(self, room_id, batch, sync_config, since_token, now_toke full_state(bool): Whether to force returning the full state. Returns: - A deferred new event dictionary + A deferred dict of (type, state_key) -> Event """ # TODO(mjark) Check if the state events were received by the server # after the previous sync, since we need to include those state @@ -618,13 +751,7 @@ def compute_state_delta(self, room_id, batch, sync_config, since_token, now_toke if lazy_load_members and not include_redundant_members: cache_key = (sync_config.user.to_string(), sync_config.device_id) - cache = self.lazy_loaded_members_cache.get(cache_key) - if cache is None: - logger.debug("creating LruCache for %r", cache_key) - cache = LruCache(LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE) - self.lazy_loaded_members_cache[cache_key] = cache - else: - logger.debug("found LruCache for %r", cache_key) + cache = self.get_lazy_loaded_members_cache(cache_key) # if it's a new sync sequence, then assume the client has had # amnesia and doesn't want any recent lazy-loaded members @@ -1425,7 +1552,6 @@ def _generate_room_entry(self, sync_result_builder, ignored_users, if events == [] and tags is None: return - since_token = sync_result_builder.since_token now_token = sync_result_builder.now_token sync_config = sync_result_builder.sync_config @@ -1468,6 +1594,18 @@ def _generate_room_entry(self, sync_result_builder, ignored_users, full_state=full_state ) + summary = {} + if ( + sync_config.filter_collection.lazy_load_members() and + ( + any(ev.type == EventTypes.Member for ev in batch.events) or + since_token is None + ) + ): + summary = yield self.compute_summary( + room_id, sync_config, batch, state, now_token + ) + if room_builder.rtype == "joined": unread_notifications = {} room_sync = JoinedSyncResult( @@ -1477,6 +1615,7 @@ def _generate_room_entry(self, sync_result_builder, ignored_users, ephemeral=ephemeral, account_data=account_data_events, unread_notifications=unread_notifications, + summary=summary, ) if room_sync or always_include: diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index 8aa06faf23d7..1275baa1ba9a 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -370,6 +370,7 @@ def serialize(event): ephemeral_events = room.ephemeral result["ephemeral"] = {"events": ephemeral_events} result["unread_notifications"] = room.unread_notifications + result["summary"] = room.summary return result diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 44f37b4c1ee8..08dffd774f81 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -1150,17 +1150,16 @@ def get_user_list_paginate(self, table, keyvalues, pagevalues, retcols, defer.returnValue(retval) def get_user_count_txn(self, txn): - """Get a total number of registerd users in the users list. + """Get a total number of registered users in the users list. Args: txn : Transaction object Returns: - defer.Deferred: resolves to int + int : number of users """ sql_count = "SELECT COUNT(*) FROM users WHERE is_guest = 0;" txn.execute(sql_count) - count = txn.fetchone()[0] - defer.returnValue(count) + return txn.fetchone()[0] def _simple_search_list(self, table, term, col, retcols, desc="_simple_search_list"): diff --git a/synapse/storage/state.py b/synapse/storage/state.py index 754dfa6973fc..dd03c4168b6c 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -480,7 +480,8 @@ def get_state_for_events(self, event_ids, types, filtered_types=None): @defer.inlineCallbacks def get_state_ids_for_events(self, event_ids, types=None, filtered_types=None): """ - Get the state dicts corresponding to a list of events + Get the state dicts corresponding to a list of events, containing the event_ids + of the state events (as opposed to the events themselves) Args: event_ids(list(str)): events whose state should be returned @@ -493,7 +494,7 @@ def get_state_ids_for_events(self, event_ids, types=None, filtered_types=None): If None, `types` filtering is applied to all events. Returns: - A deferred dict from event_id -> (type, state_key) -> state_event + A deferred dict from event_id -> (type, state_key) -> event_id """ event_to_groups = yield self._get_state_group_for_events( event_ids, diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py index b9f2b74ac64b..4c296d72c0f8 100644 --- a/synapse/storage/stream.py +++ b/synapse/storage/stream.py @@ -348,7 +348,7 @@ def get_recent_events_for_room(self, room_id, limit, end_token): end_token (str): The stream token representing now. Returns: - Deferred[tuple[list[FrozenEvent], str]]: Returns a list of + Deferred[tuple[list[FrozenEvent], str]]: Returns a list of events and a token pointing to the start of the returned events. The events returned are in ascending order. @@ -379,7 +379,7 @@ def get_recent_event_ids_for_room(self, room_id, limit, end_token): end_token (str): The stream token representing now. Returns: - Deferred[tuple[list[_EventDictReturn], str]]: Returns a list of + Deferred[tuple[list[_EventDictReturn], str]]: Returns a list of _EventDictReturn and a token pointing to the start of the returned events. The events returned are in ascending order. From 762a758feae40ccb5baf0ba4808308e99e73e13f Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Thu, 16 Aug 2018 15:22:47 +0200 Subject: [PATCH 138/173] lazyload aware /messages (#3589) --- changelog.d/3589.feature | 1 + synapse/handlers/pagination.py | 35 ++++++++++++++++++++++++++++++++- synapse/rest/client/versions.py | 13 +++++++++++- 3 files changed, 47 insertions(+), 2 deletions(-) create mode 100644 changelog.d/3589.feature diff --git a/changelog.d/3589.feature b/changelog.d/3589.feature new file mode 100644 index 000000000000..a8d7124719e0 --- /dev/null +++ b/changelog.d/3589.feature @@ -0,0 +1 @@ +Add lazy-loading support to /messages as per MSC1227 diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index a97d43550f6c..5170d093e365 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -18,7 +18,7 @@ from twisted.internet import defer from twisted.python.failure import Failure -from synapse.api.constants import Membership +from synapse.api.constants import EventTypes, Membership from synapse.api.errors import SynapseError from synapse.events.utils import serialize_event from synapse.types import RoomStreamToken @@ -251,6 +251,33 @@ def get_messages(self, requester, room_id=None, pagin_config=None, is_peeking=(member_event_id is None), ) + state = None + if event_filter and event_filter.lazy_load_members(): + # TODO: remove redundant members + + types = [ + (EventTypes.Member, state_key) + for state_key in set( + event.sender # FIXME: we also care about invite targets etc. + for event in events + ) + ] + + state_ids = yield self.store.get_state_ids_for_event( + events[0].event_id, types=types, + ) + + if state_ids: + state = yield self.store.get_events(list(state_ids.values())) + + if state: + state = yield filter_events_for_client( + self.store, + user_id, + state.values(), + is_peeking=(member_event_id is None), + ) + time_now = self.clock.time_msec() chunk = { @@ -262,4 +289,10 @@ def get_messages(self, requester, room_id=None, pagin_config=None, "end": next_token.to_string(), } + if state: + chunk["state"] = [ + serialize_event(e, time_now, as_client_event) + for e in state + ] + defer.returnValue(chunk) diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 6ac2987b98c8..29e62bfcddea 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -27,11 +27,22 @@ class VersionsRestServlet(RestServlet): def on_GET(self, request): return (200, { "versions": [ + # XXX: at some point we need to decide whether we need to include + # the previous version numbers, given we've defined r0.3.0 to be + # backwards compatible with r0.2.0. But need to check how + # conscientious we've been in compatibility, and decide whether the + # middle number is the major revision when at 0.X.Y (as opposed to + # X.Y.Z). And we need to decide whether it's fair to make clients + # parse the version string to figure out what's going on. "r0.0.1", "r0.1.0", "r0.2.0", "r0.3.0", - ] + ], + # as per MSC1497: + "unstable_features": { + "m.lazy_load_members": True, + } }) From c151b32b1de7a464620d3cc7a827b50dd83388ed Mon Sep 17 00:00:00 2001 From: Will Hunt Date: Thu, 16 Aug 2018 14:23:38 +0100 Subject: [PATCH 139/173] Add GET media/v1/config (#3184) --- changelog.d/3184.feature | 1 + synapse/rest/media/v1/config_resource.py | 48 +++++++++++++++++++++++ synapse/rest/media/v1/media_repository.py | 3 +- 3 files changed, 51 insertions(+), 1 deletion(-) create mode 100644 changelog.d/3184.feature create mode 100644 synapse/rest/media/v1/config_resource.py diff --git a/changelog.d/3184.feature b/changelog.d/3184.feature new file mode 100644 index 000000000000..9f746a57a0fa --- /dev/null +++ b/changelog.d/3184.feature @@ -0,0 +1 @@ +Add /_media/r0/config diff --git a/synapse/rest/media/v1/config_resource.py b/synapse/rest/media/v1/config_resource.py new file mode 100644 index 000000000000..d6605b6027c4 --- /dev/null +++ b/synapse/rest/media/v1/config_resource.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 Will Hunt +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from twisted.internet import defer +from twisted.web.resource import Resource +from twisted.web.server import NOT_DONE_YET + +from synapse.http.server import respond_with_json, wrap_json_request_handler + + +class MediaConfigResource(Resource): + isLeaf = True + + def __init__(self, hs): + Resource.__init__(self) + config = hs.get_config() + self.clock = hs.get_clock() + self.auth = hs.get_auth() + self.limits_dict = { + "m.upload.size": config.max_upload_size, + } + + def render_GET(self, request): + self._async_render_GET(request) + return NOT_DONE_YET + + @wrap_json_request_handler + @defer.inlineCallbacks + def _async_render_GET(self, request): + yield self.auth.get_user_by_req(request) + respond_with_json(request, 200, self.limits_dict) + + def render_OPTIONS(self, request): + respond_with_json(request, 200, {}, send_cors=True) + return NOT_DONE_YET diff --git a/synapse/rest/media/v1/media_repository.py b/synapse/rest/media/v1/media_repository.py index 4c589e05e07d..241c972070e0 100644 --- a/synapse/rest/media/v1/media_repository.py +++ b/synapse/rest/media/v1/media_repository.py @@ -42,6 +42,7 @@ from synapse.util.stringutils import is_ascii, random_string from ._base import FileInfo, respond_404, respond_with_responder +from .config_resource import MediaConfigResource from .download_resource import DownloadResource from .filepath import MediaFilePaths from .identicon_resource import IdenticonResource @@ -754,7 +755,6 @@ def __init__(self, hs): Resource.__init__(self) media_repo = hs.get_media_repository() - self.putChild("upload", UploadResource(hs, media_repo)) self.putChild("download", DownloadResource(hs, media_repo)) self.putChild("thumbnail", ThumbnailResource( @@ -765,3 +765,4 @@ def __init__(self, hs): self.putChild("preview_url", PreviewUrlResource( hs, media_repo, media_repo.media_storage, )) + self.putChild("config", MediaConfigResource(hs)) From 7d5b1a60a3aa9f10e285e74295f407d19a3a9ad5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 16 Aug 2018 15:32:20 +0100 Subject: [PATCH 140/173] Fix inbound federation on reader worker Inbound federation requires calculating push, which in turn relies on having access to account data. --- synapse/app/federation_reader.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index 52522e9d3350..7d8105778d55 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -32,6 +32,7 @@ from synapse.metrics import RegistryProxy from synapse.metrics.resource import METRICS_PREFIX, MetricsResource from synapse.replication.slave.storage._base import BaseSlavedStore +from synapse.replication.slave.storage.account_data import SlavedAccountDataStore from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore from synapse.replication.slave.storage.directory import DirectoryStore from synapse.replication.slave.storage.events import SlavedEventStore @@ -54,6 +55,7 @@ class FederationReaderSlavedStore( + SlavedAccountDataStore, SlavedProfileStore, SlavedApplicationServiceStore, SlavedPusherStore, From aae86a81eff9e5099efc97078ce35f26332f0ff4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 16 Aug 2018 15:41:48 +0100 Subject: [PATCH 141/173] Newsfile --- changelog.d/3705.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3705.bugfix diff --git a/changelog.d/3705.bugfix b/changelog.d/3705.bugfix new file mode 100644 index 000000000000..6c5422994f34 --- /dev/null +++ b/changelog.d/3705.bugfix @@ -0,0 +1 @@ +Support more federation endpoints on workers From 13ad9930c8799ea54671a6ce00533528d89e061b Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 16 Aug 2018 18:02:02 +0100 Subject: [PATCH 142/173] add new error type ResourceLimit --- synapse/api/auth.py | 10 ++++++---- synapse/api/errors.py | 23 +++++++++++++++++++++-- synapse/config/server.py | 1 + tests/api/test_auth.py | 6 +++--- tests/handlers/test_auth.py | 10 +++++----- tests/handlers/test_register.py | 14 +++++++------- tests/handlers/test_sync.py | 6 +++--- tests/utils.py | 1 + 8 files changed, 47 insertions(+), 24 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 3b2a2ab77ade..6945c118d39d 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -25,7 +25,7 @@ import synapse.types from synapse import event_auth from synapse.api.constants import EventTypes, JoinRules, Membership -from synapse.api.errors import AuthError, Codes +from synapse.api.errors import AuthError, Codes, ResourceLimitError from synapse.types import UserID from synapse.util.caches import CACHE_SIZE_FACTOR, register_cache from synapse.util.caches.lrucache import LruCache @@ -784,10 +784,11 @@ def check_auth_blocking(self, user_id=None): MAU cohort """ if self.hs.config.hs_disabled: - raise AuthError( + raise ResourceLimitError( 403, self.hs.config.hs_disabled_message, errcode=Codes.RESOURCE_LIMIT_EXCEED, admin_uri=self.hs.config.admin_uri, + limit_type=self.hs.config.hs_disabled_limit_type ) if self.hs.config.limit_usage_by_mau is True: # If the user is already part of the MAU cohort @@ -798,8 +799,9 @@ def check_auth_blocking(self, user_id=None): # Else if there is no room in the MAU bucket, bail current_mau = yield self.store.get_monthly_active_count() if current_mau >= self.hs.config.max_mau_value: - raise AuthError( + raise ResourceLimitError( 403, "Monthly Active User Limits AU Limit Exceeded", admin_uri=self.hs.config.admin_uri, - errcode=Codes.RESOURCE_LIMIT_EXCEED + errcode=Codes.RESOURCE_LIMIT_EXCEED, + limit_type="monthly_active_user" ) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index 08f0cb55548e..e26001ab12c4 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -224,15 +224,34 @@ def __init__(self, msg="Not found", errcode=Codes.NOT_FOUND): class AuthError(SynapseError): """An error raised when there was a problem authorising an event.""" - def __init__(self, code, msg, errcode=Codes.FORBIDDEN, admin_uri=None): + + def __init__(self, *args, **kwargs): + if "errcode" not in kwargs: + kwargs["errcode"] = Codes.FORBIDDEN + super(AuthError, self).__init__(*args, **kwargs) + + +class ResourceLimitError(SynapseError): + """ + Any error raised when there is a problem with resource usage. + For instance, the monthly active user limit for the server has been exceeded + """ + def __init__( + self, code, msg, + errcode=Codes.RESOURCE_LIMIT_EXCEED, + admin_uri=None, + limit_type=None, + ): self.admin_uri = admin_uri - super(AuthError, self).__init__(code, msg, errcode=errcode) + self.limit_type = limit_type + super(ResourceLimitError, self).__init__(code, msg, errcode=errcode) def error_dict(self): return cs_error( self.msg, self.errcode, admin_uri=self.admin_uri, + limit_type=self.limit_type ) diff --git a/synapse/config/server.py b/synapse/config/server.py index 2190f3210a73..ae72c872d934 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -81,6 +81,7 @@ def read_config(self, config): # Options to disable HS self.hs_disabled = config.get("hs_disabled", False) self.hs_disabled_message = config.get("hs_disabled_message", "") + self.hs_disabled_limit_type = config.get("hs_disabled_limit_type", "") # Admin uri to direct users at should their instance become blocked # due to resource constraints diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index 32a2b5fc3d0f..022d81ce3e9d 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -21,7 +21,7 @@ import synapse.handlers.auth from synapse.api.auth import Auth -from synapse.api.errors import AuthError, Codes +from synapse.api.errors import AuthError, Codes, ResourceLimitError from synapse.types import UserID from tests import unittest @@ -455,7 +455,7 @@ def test_blocking_mau(self): return_value=defer.succeed(lots_of_users) ) - with self.assertRaises(AuthError) as e: + with self.assertRaises(ResourceLimitError) as e: yield self.auth.check_auth_blocking() self.assertEquals(e.exception.admin_uri, self.hs.config.admin_uri) self.assertEquals(e.exception.errcode, Codes.RESOURCE_LIMIT_EXCEED) @@ -471,7 +471,7 @@ def test_blocking_mau(self): def test_hs_disabled(self): self.hs.config.hs_disabled = True self.hs.config.hs_disabled_message = "Reason for being disabled" - with self.assertRaises(AuthError) as e: + with self.assertRaises(ResourceLimitError) as e: yield self.auth.check_auth_blocking() self.assertEquals(e.exception.admin_uri, self.hs.config.admin_uri) self.assertEquals(e.exception.errcode, Codes.RESOURCE_LIMIT_EXCEED) diff --git a/tests/handlers/test_auth.py b/tests/handlers/test_auth.py index 3046bd609365..1e39fe0ec201 100644 --- a/tests/handlers/test_auth.py +++ b/tests/handlers/test_auth.py @@ -20,7 +20,7 @@ import synapse import synapse.api.errors -from synapse.api.errors import AuthError +from synapse.api.errors import ResourceLimitError from synapse.handlers.auth import AuthHandler from tests import unittest @@ -130,13 +130,13 @@ def test_mau_limits_exceeded_large(self): return_value=defer.succeed(self.large_number_of_users) ) - with self.assertRaises(AuthError): + with self.assertRaises(ResourceLimitError): yield self.auth_handler.get_access_token_for_user_id('user_a') self.hs.get_datastore().get_monthly_active_count = Mock( return_value=defer.succeed(self.large_number_of_users) ) - with self.assertRaises(AuthError): + with self.assertRaises(ResourceLimitError): yield self.auth_handler.validate_short_term_login_token_and_get_user_id( self._get_macaroon().serialize() ) @@ -149,13 +149,13 @@ def test_mau_limits_parity(self): self.hs.get_datastore().get_monthly_active_count = Mock( return_value=defer.succeed(self.hs.config.max_mau_value) ) - with self.assertRaises(AuthError): + with self.assertRaises(ResourceLimitError): yield self.auth_handler.get_access_token_for_user_id('user_a') self.hs.get_datastore().get_monthly_active_count = Mock( return_value=defer.succeed(self.hs.config.max_mau_value) ) - with self.assertRaises(AuthError): + with self.assertRaises(ResourceLimitError): yield self.auth_handler.validate_short_term_login_token_and_get_user_id( self._get_macaroon().serialize() ) diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 7154816a34a7..7b4ade3dfbf4 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -17,7 +17,7 @@ from twisted.internet import defer -from synapse.api.errors import AuthError +from synapse.api.errors import ResourceLimitError from synapse.handlers.register import RegistrationHandler from synapse.types import UserID, create_requester @@ -109,13 +109,13 @@ def test_get_or_create_user_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(AuthError): + with self.assertRaises(ResourceLimitError): yield self.handler.get_or_create_user("requester", 'b', "display_name") self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.hs.config.max_mau_value) ) - with self.assertRaises(AuthError): + with self.assertRaises(ResourceLimitError): yield self.handler.get_or_create_user("requester", 'b', "display_name") @defer.inlineCallbacks @@ -124,13 +124,13 @@ def test_register_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(AuthError): + with self.assertRaises(ResourceLimitError): yield self.handler.register(localpart="local_part") self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.hs.config.max_mau_value) ) - with self.assertRaises(AuthError): + with self.assertRaises(ResourceLimitError): yield self.handler.register(localpart="local_part") @defer.inlineCallbacks @@ -139,11 +139,11 @@ def test_register_saml2_mau_blocked(self): self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.lots_of_users) ) - with self.assertRaises(AuthError): + with self.assertRaises(ResourceLimitError): yield self.handler.register_saml2(localpart="local_part") self.store.get_monthly_active_count = Mock( return_value=defer.succeed(self.hs.config.max_mau_value) ) - with self.assertRaises(AuthError): + with self.assertRaises(ResourceLimitError): yield self.handler.register_saml2(localpart="local_part") diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index 33d861bd64ae..a01ab471f594 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -14,7 +14,7 @@ # limitations under the License. from twisted.internet import defer -from synapse.api.errors import AuthError, Codes +from synapse.api.errors import Codes, ResourceLimitError from synapse.api.filtering import DEFAULT_FILTER_COLLECTION from synapse.handlers.sync import SyncConfig, SyncHandler from synapse.types import UserID @@ -49,7 +49,7 @@ def test_wait_for_sync_for_user_auth_blocking(self): # Test that global lock works self.hs.config.hs_disabled = True - with self.assertRaises(AuthError) as e: + with self.assertRaises(ResourceLimitError) as e: yield self.sync_handler.wait_for_sync_for_user(sync_config) self.assertEquals(e.exception.errcode, Codes.RESOURCE_LIMIT_EXCEED) @@ -57,7 +57,7 @@ def test_wait_for_sync_for_user_auth_blocking(self): sync_config = self._generate_sync_config(user_id2) - with self.assertRaises(AuthError) as e: + with self.assertRaises(ResourceLimitError) as e: yield self.sync_handler.wait_for_sync_for_user(sync_config) self.assertEquals(e.exception.errcode, Codes.RESOURCE_LIMIT_EXCEED) diff --git a/tests/utils.py b/tests/utils.py index 52326d4f670b..6f8b1de3e799 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -137,6 +137,7 @@ def setup_test_homeserver( config.limit_usage_by_mau = False config.hs_disabled = False config.hs_disabled_message = "" + config.hs_disabled_limit_type = "" config.max_mau_value = 50 config.mau_limits_reserved_threepids = [] config.admin_uri = None From 7edd11623dac8952d6af04b49cc1390cf579cb68 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 16 Aug 2018 21:04:30 +0100 Subject: [PATCH 143/173] add new error type ResourceLimit --- changelog.d/3707.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3707.misc diff --git a/changelog.d/3707.misc b/changelog.d/3707.misc new file mode 100644 index 000000000000..8123ca65430b --- /dev/null +++ b/changelog.d/3707.misc @@ -0,0 +1 @@ +add new error type ResourceLimit From 372bf073c1a5ac5e66cca717ce9aa72c43ba9404 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 16 Aug 2018 21:25:16 +0100 Subject: [PATCH 144/173] block event creation and room creation on hitting resource limits --- synapse/handlers/message.py | 6 +++++- synapse/handlers/room.py | 4 ++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 893c9bcdc4db..4d006df63c62 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -276,10 +276,14 @@ def create_event(self, requester, event_dict, token_id=None, txn_id=None, where *hashes* is a map from algorithm to hash. If None, they will be requested from the database. - + Raises: + ResourceLimitError if server is blocked to some resource being + exceeded Returns: Tuple of created event (FrozenEvent), Context """ + yield self.auth.check_auth_blocking(requester.user.to_string()) + builder = self.event_builder_factory.new(event_dict) self.validator.validate_new(builder) diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 6a17c42238ec..c3f820b9753b 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -98,9 +98,13 @@ def create_room(self, requester, config, ratelimit=True, Raises: SynapseError if the room ID couldn't be stored, or something went horribly wrong. + ResourceLimitError if server is blocked to some resource being + exceeded """ user_id = requester.user.to_string() + self.auth.check_auth_blocking(user_id) + if not self.spam_checker.user_may_create_room(user_id): raise SynapseError(403, "You are not permitted to create rooms") From bcfeb44afe750dadd4199e7c02302b0157bdab11 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 16 Aug 2018 22:55:32 +0100 Subject: [PATCH 145/173] call reap on start up and fix under reaping bug --- synapse/api/auth.py | 2 +- synapse/app/homeserver.py | 1 + synapse/storage/monthly_active_users.py | 5 ++++- tests/storage/test_monthly_active_users.py | 13 +++++++++++++ 4 files changed, 19 insertions(+), 2 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 3b2a2ab77ade..ab1e3a4e353f 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -799,7 +799,7 @@ def check_auth_blocking(self, user_id=None): current_mau = yield self.store.get_monthly_active_count() if current_mau >= self.hs.config.max_mau_value: raise AuthError( - 403, "Monthly Active User Limits AU Limit Exceeded", + 403, "Monthly Active User Limit Exceeded", admin_uri=self.hs.config.admin_uri, errcode=Codes.RESOURCE_LIMIT_EXCEED ) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index a98bb506e5fb..800b9c0e3c64 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -525,6 +525,7 @@ def generate_user_daily_visit_stats(): clock.looping_call( hs.get_datastore().reap_monthly_active_users, 1000 * 60 * 60 ) + yield hs.get_datastore().reap_monthly_active_users() @defer.inlineCallbacks def generate_monthly_active_users(): diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index 7e417f811e9e..06f9a75a9779 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -96,7 +96,10 @@ def _reap_users(txn): # While Postgres does not require 'LIMIT', but also does not support # negative LIMIT values. So there is no way to write it that both can # support - query_args = [self.hs.config.max_mau_value] + safe_guard = self.hs.config.max_mau_value - len(self.reserved_users) + # Must be greater than zero for postgres + safe_guard = safe_guard if safe_guard > 0 else 0 + query_args = [safe_guard] base_sql = """ DELETE FROM monthly_active_users diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index 511acbde9bc2..f2ed866ae7fc 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -75,6 +75,19 @@ def test_initialise_reserved_users(self): active_count = yield self.store.get_monthly_active_count() self.assertEquals(active_count, user_num) + # Test that regalar users are removed from the db + ru_count = 2 + yield self.store.upsert_monthly_active_user("@ru1:server") + yield self.store.upsert_monthly_active_user("@ru2:server") + active_count = yield self.store.get_monthly_active_count() + + self.assertEqual(active_count, user_num + ru_count) + self.hs.config.max_mau_value = user_num + yield self.store.reap_monthly_active_users() + + active_count = yield self.store.get_monthly_active_count() + self.assertEquals(active_count, user_num) + @defer.inlineCallbacks def test_can_insert_and_count_mau(self): count = yield self.store.get_monthly_active_count() From 7e513421969738428c40d9d6032c798edd9ebab6 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 16 Aug 2018 23:05:20 +0100 Subject: [PATCH 146/173] For resource limit blocked users, prevent writing into rooms --- changelog.d/3708.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3708.feature diff --git a/changelog.d/3708.feature b/changelog.d/3708.feature new file mode 100644 index 000000000000..2f146ba62b34 --- /dev/null +++ b/changelog.d/3708.feature @@ -0,0 +1 @@ +For resource limit blocked users, prevent writing into rooms From 66f7dc8c87a6c585b4d456c5b2c4999452fc82c2 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 17 Aug 2018 00:32:39 +0100 Subject: [PATCH 147/173] Fix logcontexts for running pushers First of all, avoid resetting the logcontext before running the pushers, to fix the "Starting db txn 'get_all_updated_receipts' from sentinel context" warning. Instead, give them their own "background process" logcontexts. --- synapse/app/pusher.py | 4 ++-- synapse/handlers/federation.py | 3 +-- synapse/handlers/message.py | 7 ++----- synapse/handlers/receipts.py | 18 ++++++++---------- synapse/push/pusherpool.py | 17 +++++++++++++++-- 5 files changed, 28 insertions(+), 21 deletions(-) diff --git a/synapse/app/pusher.py b/synapse/app/pusher.py index 9295a51d5b74..0089a7c64f54 100644 --- a/synapse/app/pusher.py +++ b/synapse/app/pusher.py @@ -162,11 +162,11 @@ def poke_pushers(self, stream_name, token, rows): else: yield self.start_pusher(row.user_id, row.app_id, row.pushkey) elif stream_name == "events": - yield self.pusher_pool.on_new_notifications( + self.pusher_pool.on_new_notifications( token, token, ) elif stream_name == "receipts": - yield self.pusher_pool.on_new_receipts( + self.pusher_pool.on_new_receipts( token, token, set(row.room_id for row in rows) ) except Exception: diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index f38b393e4ab4..3dd107a2850b 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -2386,8 +2386,7 @@ def _notify_persisted_event(self, event, max_stream_id): extra_users=extra_users ) - logcontext.run_in_background( - self.pusher_pool.on_new_notifications, + self.pusher_pool.on_new_notifications( event_stream_id, max_stream_id, ) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 893c9bcdc4db..f21d740968bf 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -774,11 +774,8 @@ def is_inviter_member_event(e): event, context=context ) - # this intentionally does not yield: we don't care about the result - # and don't need to wait for it. - run_in_background( - self.pusher_pool.on_new_notifications, - event_stream_id, max_stream_id + self.pusher_pool.on_new_notifications( + event_stream_id, max_stream_id, ) def _notify(): diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py index cb905a390343..a6f3181f099b 100644 --- a/synapse/handlers/receipts.py +++ b/synapse/handlers/receipts.py @@ -18,7 +18,6 @@ from synapse.types import get_domain_from_id from synapse.util import logcontext -from synapse.util.logcontext import PreserveLoggingContext from ._base import BaseHandler @@ -116,16 +115,15 @@ def _handle_new_receipts(self, receipts): affected_room_ids = list(set([r["room_id"] for r in receipts])) - with PreserveLoggingContext(): - self.notifier.on_new_event( - "receipt_key", max_batch_id, rooms=affected_room_ids - ) - # Note that the min here shouldn't be relied upon to be accurate. - self.hs.get_pusherpool().on_new_receipts( - min_batch_id, max_batch_id, affected_room_ids - ) + self.notifier.on_new_event( + "receipt_key", max_batch_id, rooms=affected_room_ids + ) + # Note that the min here shouldn't be relied upon to be accurate. + self.hs.get_pusherpool().on_new_receipts( + min_batch_id, max_batch_id, affected_room_ids, + ) - defer.returnValue(True) + defer.returnValue(True) @logcontext.preserve_fn # caller should not yield on this @defer.inlineCallbacks diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py index 36bb5bbc6543..9f7d5ef217db 100644 --- a/synapse/push/pusherpool.py +++ b/synapse/push/pusherpool.py @@ -18,6 +18,7 @@ from twisted.internet import defer +from synapse.metrics.background_process_metrics import run_as_background_process from synapse.push.pusher import PusherFactory from synapse.util.logcontext import make_deferred_yieldable, run_in_background @@ -122,8 +123,14 @@ def remove_pushers_by_access_token(self, user_id, access_tokens): p['app_id'], p['pushkey'], p['user_name'], ) - @defer.inlineCallbacks def on_new_notifications(self, min_stream_id, max_stream_id): + run_as_background_process( + "on_new_notifications", + self._on_new_notifications, min_stream_id, max_stream_id, + ) + + @defer.inlineCallbacks + def _on_new_notifications(self, min_stream_id, max_stream_id): try: users_affected = yield self.store.get_push_action_users_in_range( min_stream_id, max_stream_id @@ -147,8 +154,14 @@ def on_new_notifications(self, min_stream_id, max_stream_id): except Exception: logger.exception("Exception in pusher on_new_notifications") - @defer.inlineCallbacks def on_new_receipts(self, min_stream_id, max_stream_id, affected_room_ids): + run_as_background_process( + "on_new_receipts", + self._on_new_receipts, min_stream_id, max_stream_id, affected_room_ids, + ) + + @defer.inlineCallbacks + def _on_new_receipts(self, min_stream_id, max_stream_id, affected_room_ids): try: # Need to subtract 1 from the minimum because the lower bound here # is not inclusive From 0e8d78f6aa56020ffb948a4b2c6feadba2d16712 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 17 Aug 2018 00:43:43 +0100 Subject: [PATCH 148/173] Logcontexts for replication command handlers Run the handlers for replication commands as background processes. This should improve the visibility in our metrics, and reduce the number of "running db transaction from sentinel context" warnings. Ideally it means converting the things that fire off deferreds into the night into things that actually return a Deferred when they are done. I've made a bit of a stab at this, but it will probably be leaky. --- synapse/app/appservice.py | 3 ++- synapse/app/federation_sender.py | 3 ++- synapse/app/pusher.py | 3 ++- synapse/app/synchrotron.py | 3 ++- synapse/app/user_dir.py | 3 ++- synapse/replication/tcp/client.py | 4 +-- synapse/replication/tcp/commands.py | 12 +++++++++ synapse/replication/tcp/protocol.py | 42 ++++++++++++++++++++--------- 8 files changed, 53 insertions(+), 20 deletions(-) diff --git a/synapse/app/appservice.py b/synapse/app/appservice.py index 9a37384fb7b5..3348a8ec6dbc 100644 --- a/synapse/app/appservice.py +++ b/synapse/app/appservice.py @@ -117,8 +117,9 @@ def __init__(self, hs): super(ASReplicationHandler, self).__init__(hs.get_datastore()) self.appservice_handler = hs.get_application_service_handler() + @defer.inlineCallbacks def on_rdata(self, stream_name, token, rows): - super(ASReplicationHandler, self).on_rdata(stream_name, token, rows) + yield super(ASReplicationHandler, self).on_rdata(stream_name, token, rows) if stream_name == "events": max_stream_id = self.store.get_room_max_stream_ordering() diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py index 7a4310ca1828..d59007099bec 100644 --- a/synapse/app/federation_sender.py +++ b/synapse/app/federation_sender.py @@ -144,8 +144,9 @@ def __init__(self, hs): super(FederationSenderReplicationHandler, self).__init__(hs.get_datastore()) self.send_handler = FederationSenderHandler(hs, self) + @defer.inlineCallbacks def on_rdata(self, stream_name, token, rows): - super(FederationSenderReplicationHandler, self).on_rdata( + yield super(FederationSenderReplicationHandler, self).on_rdata( stream_name, token, rows ) self.send_handler.process_replication_rows(stream_name, token, rows) diff --git a/synapse/app/pusher.py b/synapse/app/pusher.py index 9295a51d5b74..aa0938c376d6 100644 --- a/synapse/app/pusher.py +++ b/synapse/app/pusher.py @@ -148,8 +148,9 @@ def __init__(self, hs): self.pusher_pool = hs.get_pusherpool() + @defer.inlineCallbacks def on_rdata(self, stream_name, token, rows): - super(PusherReplicationHandler, self).on_rdata(stream_name, token, rows) + yield super(PusherReplicationHandler, self).on_rdata(stream_name, token, rows) run_in_background(self.poke_pushers, stream_name, token, rows) @defer.inlineCallbacks diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py index e201f18efdc5..39c7cbc1bae3 100644 --- a/synapse/app/synchrotron.py +++ b/synapse/app/synchrotron.py @@ -332,8 +332,9 @@ def __init__(self, hs): self.presence_handler = hs.get_presence_handler() self.notifier = hs.get_notifier() + @defer.inlineCallbacks def on_rdata(self, stream_name, token, rows): - super(SyncReplicationHandler, self).on_rdata(stream_name, token, rows) + yield super(SyncReplicationHandler, self).on_rdata(stream_name, token, rows) run_in_background(self.process_and_notify, stream_name, token, rows) def get_streams_to_replicate(self): diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py index cb78de88340a..1388a42b59aa 100644 --- a/synapse/app/user_dir.py +++ b/synapse/app/user_dir.py @@ -169,8 +169,9 @@ def __init__(self, hs): super(UserDirectoryReplicationHandler, self).__init__(hs.get_datastore()) self.user_directory = hs.get_user_directory_handler() + @defer.inlineCallbacks def on_rdata(self, stream_name, token, rows): - super(UserDirectoryReplicationHandler, self).on_rdata( + yield super(UserDirectoryReplicationHandler, self).on_rdata( stream_name, token, rows ) if stream_name == "current_state_deltas": diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py index 970e94313e31..cbe964581711 100644 --- a/synapse/replication/tcp/client.py +++ b/synapse/replication/tcp/client.py @@ -107,7 +107,7 @@ def on_rdata(self, stream_name, token, rows): Can be overriden in subclasses to handle more. """ logger.info("Received rdata %s -> %s", stream_name, token) - self.store.process_replication_rows(stream_name, token, rows) + return self.store.process_replication_rows(stream_name, token, rows) def on_position(self, stream_name, token): """Called when we get new position data. By default this just pokes @@ -115,7 +115,7 @@ def on_position(self, stream_name, token): Can be overriden in subclasses to handle more. """ - self.store.process_replication_rows(stream_name, token, []) + return self.store.process_replication_rows(stream_name, token, []) def on_sync(self, data): """When we received a SYNC we wake up any deferreds that were waiting diff --git a/synapse/replication/tcp/commands.py b/synapse/replication/tcp/commands.py index f3908df642b5..327556f6a1e6 100644 --- a/synapse/replication/tcp/commands.py +++ b/synapse/replication/tcp/commands.py @@ -59,6 +59,12 @@ def to_line(self): """ return self.data + def get_logcontext_id(self): + """Get a suitable string for the logcontext when processing this command""" + + # by default, we just use the command name. + return self.NAME + class ServerCommand(Command): """Sent by the server on new connection and includes the server_name. @@ -116,6 +122,9 @@ def to_line(self): _json_encoder.encode(self.row), )) + def get_logcontext_id(self): + return "RDATA-" + self.stream_name + class PositionCommand(Command): """Sent by the client to tell the client the stream postition without @@ -190,6 +199,9 @@ def from_line(cls, line): def to_line(self): return " ".join((self.stream_name, str(self.token),)) + def get_logcontext_id(self): + return "REPLICATE-" + self.stream_name + class UserSyncCommand(Command): """Sent by the client to inform the server that a user has started or diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py index dec5ac091326..74e892c10416 100644 --- a/synapse/replication/tcp/protocol.py +++ b/synapse/replication/tcp/protocol.py @@ -63,6 +63,8 @@ from twisted.python.failure import Failure from synapse.metrics import LaterGauge +from synapse.metrics.background_process_metrics import run_as_background_process +from synapse.util.logcontext import make_deferred_yieldable, run_in_background from synapse.util.stringutils import random_string from .commands import ( @@ -222,7 +224,11 @@ def lineReceived(self, line): # Now lets try and call on_ function try: - getattr(self, "on_%s" % (cmd_name,))(cmd) + run_as_background_process( + "replication-" + cmd.get_logcontext_id(), + getattr(self, "on_%s" % (cmd_name,)), + cmd, + ) except Exception: logger.exception("[%s] Failed to handle line: %r", self.id(), line) @@ -387,7 +393,7 @@ def on_NAME(self, cmd): self.name = cmd.data def on_USER_SYNC(self, cmd): - self.streamer.on_user_sync( + return self.streamer.on_user_sync( self.conn_id, cmd.user_id, cmd.is_syncing, cmd.last_sync_ms, ) @@ -397,22 +403,33 @@ def on_REPLICATE(self, cmd): if stream_name == "ALL": # Subscribe to all streams we're publishing to. - for stream in iterkeys(self.streamer.streams_by_name): - self.subscribe_to_stream(stream, token) + deferreds = [ + run_in_background( + self.subscribe_to_stream, + stream, token, + ) + for stream in iterkeys(self.streamer.streams_by_name) + ] + + return make_deferred_yieldable( + defer.gatherResults(deferreds, consumeErrors=True) + ) else: - self.subscribe_to_stream(stream_name, token) + return self.subscribe_to_stream(stream_name, token) def on_FEDERATION_ACK(self, cmd): - self.streamer.federation_ack(cmd.token) + return self.streamer.federation_ack(cmd.token) def on_REMOVE_PUSHER(self, cmd): - self.streamer.on_remove_pusher(cmd.app_id, cmd.push_key, cmd.user_id) + return self.streamer.on_remove_pusher( + cmd.app_id, cmd.push_key, cmd.user_id, + ) def on_INVALIDATE_CACHE(self, cmd): - self.streamer.on_invalidate_cache(cmd.cache_func, cmd.keys) + return self.streamer.on_invalidate_cache(cmd.cache_func, cmd.keys) def on_USER_IP(self, cmd): - self.streamer.on_user_ip( + return self.streamer.on_user_ip( cmd.user_id, cmd.access_token, cmd.ip, cmd.user_agent, cmd.device_id, cmd.last_seen, ) @@ -542,14 +559,13 @@ def on_RDATA(self, cmd): # Check if this is the last of a batch of updates rows = self.pending_batches.pop(stream_name, []) rows.append(row) - - self.handler.on_rdata(stream_name, cmd.token, rows) + return self.handler.on_rdata(stream_name, cmd.token, rows) def on_POSITION(self, cmd): - self.handler.on_position(cmd.stream_name, cmd.token) + return self.handler.on_position(cmd.stream_name, cmd.token) def on_SYNC(self, cmd): - self.handler.on_sync(cmd.data) + return self.handler.on_sync(cmd.data) def replicate(self, stream_name, token): """Send the subscription request to the server From d9efd87d55fb7766ba332bedbecd5efe1ca7067b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 17 Aug 2018 00:49:22 +0100 Subject: [PATCH 149/173] changelog --- changelog.d/3709.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3709.misc diff --git a/changelog.d/3709.misc b/changelog.d/3709.misc new file mode 100644 index 000000000000..bbda357d4481 --- /dev/null +++ b/changelog.d/3709.misc @@ -0,0 +1 @@ +Logcontexts for replication command handlers From 4c9da1440f9ab26b79e5efc2a0f774916f18aeba Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 17 Aug 2018 00:50:36 +0100 Subject: [PATCH 150/173] changelog --- changelog.d/3710.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3710.bugfix diff --git a/changelog.d/3710.bugfix b/changelog.d/3710.bugfix new file mode 100644 index 000000000000..c28e17766770 --- /dev/null +++ b/changelog.d/3710.bugfix @@ -0,0 +1 @@ +Fix "Starting db txn 'get_all_updated_receipts' from sentinel context" warning \ No newline at end of file From a8ffc27db76b9805d7c91e7256c278306ab54761 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Fri, 17 Aug 2018 02:46:25 -0600 Subject: [PATCH 151/173] Update the admin register documentation to return a real user ID Presumably this is the intention anyways. I've also updated the domain part to be something more along the lines of what people might expect. --- docs/admin_api/register_api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/admin_api/register_api.rst b/docs/admin_api/register_api.rst index 209cd140fdf2..16d65c86b367 100644 --- a/docs/admin_api/register_api.rst +++ b/docs/admin_api/register_api.rst @@ -33,7 +33,7 @@ As an example:: < { "access_token": "token_here", - "user_id": "@pepper_roni@test", + "user_id": "@pepper_roni:localhost", "home_server": "test", "device_id": "device_id_here" } From b99a0f39415e42aeebb1dbc0e860324bb8f5b37e Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Fri, 17 Aug 2018 02:47:31 -0600 Subject: [PATCH 152/173] Create 3712.misc --- changelog.d/3712.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3712.misc diff --git a/changelog.d/3712.misc b/changelog.d/3712.misc new file mode 100644 index 000000000000..30f8c2af21c0 --- /dev/null +++ b/changelog.d/3712.misc @@ -0,0 +1 @@ +Update admin register API documentation to reference a real user ID. From 521d369e7a6d6497427a03f3e4f81d20bd2e5761 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 17 Aug 2018 10:12:11 +0100 Subject: [PATCH 153/173] remove errant yield --- synapse/app/homeserver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 800b9c0e3c64..005921dcf733 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -525,7 +525,7 @@ def generate_user_daily_visit_stats(): clock.looping_call( hs.get_datastore().reap_monthly_active_users, 1000 * 60 * 60 ) - yield hs.get_datastore().reap_monthly_active_users() + hs.get_datastore().reap_monthly_active_users() @defer.inlineCallbacks def generate_monthly_active_users(): From 3b2dcfff78d5c330937c4b75f34f79f8e991d8e3 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 17 Aug 2018 11:11:06 +0100 Subject: [PATCH 154/173] Fix logging bug in EDU handling over replication --- synapse/replication/http/federation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/replication/http/federation.py b/synapse/replication/http/federation.py index 2ddd18f73b31..64a79da1627a 100644 --- a/synapse/replication/http/federation.py +++ b/synapse/replication/http/federation.py @@ -156,7 +156,7 @@ def _handle_request(self, request, edu_type): edu_content = content["content"] logger.info( - "Got %r edu from $s", + "Got %r edu from %s", edu_type, origin, ) From 73737bd0f9be0197dfe456db5b7887087c41f009 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 17 Aug 2018 11:14:05 +0100 Subject: [PATCH 155/173] Newsfile --- changelog.d/3713.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3713.bugfix diff --git a/changelog.d/3713.bugfix b/changelog.d/3713.bugfix new file mode 100644 index 000000000000..6c5422994f34 --- /dev/null +++ b/changelog.d/3713.bugfix @@ -0,0 +1 @@ +Support more federation endpoints on workers From 0195dfbf52f3cd8e93068e7b108b64f426dba2ff Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Fri, 17 Aug 2018 13:58:25 +0100 Subject: [PATCH 156/173] server limits config docs --- synapse/config/server.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/synapse/config/server.py b/synapse/config/server.py index ae72c872d934..a41c48e69c70 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -341,6 +341,32 @@ def default_config(self, server_name, **kwargs): # - port: 9000 # bind_addresses: ['::1', '127.0.0.1'] # type: manhole + + + # Homeserver blocking + # + # How to reach the server admin, used in ResourceLimitError + # admin_uri: 'mailto:admin@server.com' + # + # Global block config + # + # hs_disabled: False + # hs_disabled_message: 'Human readable reason for why the HS is blocked' + # hs_disabled_limit_type: 'error code(str), to help clients decode reason' + # + # Monthly Active User Blocking + # + # Enables monthly active user checking + # limit_usage_by_mau: False + # max_mau_value: 50 + # + # Sometimes the server admin will want to ensure certain accounts are + # never blocked by mau checking. These accounts are specified here. + # + # mau_limit_reserved_threepids: + # - medium: 'email' + # address: 'reserved_user@example.com' + """ % locals() def read_arguments(self, args): From 04f5d2db62be5fd8ace903fb3c964bd50f6811cc Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Sat, 18 Aug 2018 00:55:01 +1000 Subject: [PATCH 157/173] Remove v1/register's broken shared secret functionality (#3703) --- changelog.d/3703.removal | 1 + synapse/rest/client/v1_only/register.py | 54 ------------------------- 2 files changed, 1 insertion(+), 54 deletions(-) create mode 100644 changelog.d/3703.removal diff --git a/changelog.d/3703.removal b/changelog.d/3703.removal new file mode 100644 index 000000000000..d13ce5adb408 --- /dev/null +++ b/changelog.d/3703.removal @@ -0,0 +1 @@ +The Shared-Secret registration method of the legacy v1/register REST endpoint has been removed. For a replacement, please see [the admin/register API documentation](https://github.com/matrix-org/synapse/blob/master/docs/admin_api/register_api.rst). diff --git a/synapse/rest/client/v1_only/register.py b/synapse/rest/client/v1_only/register.py index 3439c3c6d454..5e99cffbcbd1 100644 --- a/synapse/rest/client/v1_only/register.py +++ b/synapse/rest/client/v1_only/register.py @@ -129,12 +129,9 @@ def on_POST(self, request): login_type = register_json["type"] is_application_server = login_type == LoginType.APPLICATION_SERVICE - is_using_shared_secret = login_type == LoginType.SHARED_SECRET - can_register = ( self.enable_registration or is_application_server - or is_using_shared_secret ) if not can_register: raise SynapseError(403, "Registration has been disabled") @@ -144,7 +141,6 @@ def on_POST(self, request): LoginType.PASSWORD: self._do_password, LoginType.EMAIL_IDENTITY: self._do_email_identity, LoginType.APPLICATION_SERVICE: self._do_app_service, - LoginType.SHARED_SECRET: self._do_shared_secret, } session_info = self._get_session_info(request, session) @@ -325,56 +321,6 @@ def _do_app_service(self, request, register_json, session): "home_server": self.hs.hostname, }) - @defer.inlineCallbacks - def _do_shared_secret(self, request, register_json, session): - assert_params_in_dict(register_json, ["mac", "user", "password"]) - - if not self.hs.config.registration_shared_secret: - raise SynapseError(400, "Shared secret registration is not enabled") - - user = register_json["user"].encode("utf-8") - password = register_json["password"].encode("utf-8") - admin = register_json.get("admin", None) - - # Its important to check as we use null bytes as HMAC field separators - if b"\x00" in user: - raise SynapseError(400, "Invalid user") - if b"\x00" in password: - raise SynapseError(400, "Invalid password") - - # str() because otherwise hmac complains that 'unicode' does not - # have the buffer interface - got_mac = str(register_json["mac"]) - - want_mac = hmac.new( - key=self.hs.config.registration_shared_secret.encode(), - digestmod=sha1, - ) - want_mac.update(user) - want_mac.update(b"\x00") - want_mac.update(password) - want_mac.update(b"\x00") - want_mac.update(b"admin" if admin else b"notadmin") - want_mac = want_mac.hexdigest() - - if compare_digest(want_mac, got_mac): - handler = self.handlers.registration_handler - user_id, token = yield handler.register( - localpart=user.lower(), - password=password, - admin=bool(admin), - ) - self._remove_session(session) - defer.returnValue({ - "user_id": user_id, - "access_token": token, - "home_server": self.hs.hostname, - }) - else: - raise SynapseError( - 403, "HMAC incorrect", - ) - class CreateUserRestServlet(ClientV1RestServlet): """Handles user creation via a server-to-server interface From c334ca67bb89039b3a00b7c9a1ce610e99859653 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Sat, 18 Aug 2018 01:08:45 +1000 Subject: [PATCH 158/173] Integrate presence from hotfixes (#3694) --- changelog.d/3694.feature | 1 + docs/workers.rst | 8 +++ synapse/app/_base.py | 6 +- synapse/app/frontend_proxy.py | 39 ++++++++++- synapse/app/synchrotron.py | 16 +++-- synapse/config/server.py | 6 ++ synapse/federation/transaction_queue.py | 4 ++ synapse/handlers/initial_sync.py | 4 ++ synapse/handlers/presence.py | 26 ++++++-- synapse/handlers/sync.py | 3 +- synapse/rest/client/v1/presence.py | 3 +- tests/app/__init__.py | 0 tests/app/test_frontend_proxy.py | 88 +++++++++++++++++++++++++ tests/rest/client/v1/test_presence.py | 72 ++++++++++++++++++++ tests/rest/client/v2_alpha/test_sync.py | 79 ++++++++++------------ tests/unittest.py | 8 ++- tests/utils.py | 7 +- 17 files changed, 303 insertions(+), 67 deletions(-) create mode 100644 changelog.d/3694.feature create mode 100644 tests/app/__init__.py create mode 100644 tests/app/test_frontend_proxy.py create mode 100644 tests/rest/client/v1/test_presence.py diff --git a/changelog.d/3694.feature b/changelog.d/3694.feature new file mode 100644 index 000000000000..916a342ff48f --- /dev/null +++ b/changelog.d/3694.feature @@ -0,0 +1 @@ +Synapse's presence functionality can now be disabled with the "use_presence" configuration option. diff --git a/docs/workers.rst b/docs/workers.rst index ac9efb621f06..aec319dd8458 100644 --- a/docs/workers.rst +++ b/docs/workers.rst @@ -241,6 +241,14 @@ regular expressions:: ^/_matrix/client/(api/v1|r0|unstable)/keys/upload +If ``use_presence`` is False in the homeserver config, it can also handle REST +endpoints matching the following regular expressions:: + + ^/_matrix/client/(api/v1|r0|unstable)/presence/[^/]+/status + +This "stub" presence handler will pass through ``GET`` request but make the +``PUT`` effectively a no-op. + It will proxy any requests it cannot handle to the main synapse instance. It must therefore be configured with the location of the main instance, via the ``worker_main_http_uri`` setting in the frontend_proxy worker configuration diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 391bd14c5c9c..7c866e246af8 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -140,7 +140,7 @@ def listen_metrics(bind_addresses, port): logger.info("Metrics now reporting on %s:%d", host, port) -def listen_tcp(bind_addresses, port, factory, backlog=50): +def listen_tcp(bind_addresses, port, factory, reactor=reactor, backlog=50): """ Create a TCP socket for a port and several addresses """ @@ -156,7 +156,9 @@ def listen_tcp(bind_addresses, port, factory, backlog=50): check_bind_error(e, address, bind_addresses) -def listen_ssl(bind_addresses, port, factory, context_factory, backlog=50): +def listen_ssl( + bind_addresses, port, factory, context_factory, reactor=reactor, backlog=50 +): """ Create an SSL socket for a port and several addresses """ diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index 671fbbcb2a9d..8d484c1cd4a0 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -38,6 +38,7 @@ from synapse.replication.slave.storage.devices import SlavedDeviceStore from synapse.replication.slave.storage.registration import SlavedRegistrationStore from synapse.replication.tcp.client import ReplicationClientHandler +from synapse.rest.client.v1.base import ClientV1RestServlet, client_path_patterns from synapse.rest.client.v2_alpha._base import client_v2_patterns from synapse.server import HomeServer from synapse.storage.engines import create_engine @@ -49,6 +50,35 @@ logger = logging.getLogger("synapse.app.frontend_proxy") +class PresenceStatusStubServlet(ClientV1RestServlet): + PATTERNS = client_path_patterns("/presence/(?P[^/]*)/status") + + def __init__(self, hs): + super(PresenceStatusStubServlet, self).__init__(hs) + self.http_client = hs.get_simple_http_client() + self.auth = hs.get_auth() + self.main_uri = hs.config.worker_main_http_uri + + @defer.inlineCallbacks + def on_GET(self, request, user_id): + # Pass through the auth headers, if any, in case the access token + # is there. + auth_headers = request.requestHeaders.getRawHeaders("Authorization", []) + headers = { + "Authorization": auth_headers, + } + result = yield self.http_client.get_json( + self.main_uri + request.uri, + headers=headers, + ) + defer.returnValue((200, result)) + + @defer.inlineCallbacks + def on_PUT(self, request, user_id): + yield self.auth.get_user_by_req(request) + defer.returnValue((200, {})) + + class KeyUploadServlet(RestServlet): PATTERNS = client_v2_patterns("/keys/upload(/(?P[^/]+))?$") @@ -135,6 +165,12 @@ def _listen_http(self, listener_config): elif name == "client": resource = JsonResource(self, canonical_json=False) KeyUploadServlet(self).register(resource) + + # If presence is disabled, use the stub servlet that does + # not allow sending presence + if not self.config.use_presence: + PresenceStatusStubServlet(self).register(resource) + resources.update({ "/_matrix/client/r0": resource, "/_matrix/client/unstable": resource, @@ -153,7 +189,8 @@ def _listen_http(self, listener_config): listener_config, root_resource, self.version_string, - ) + ), + reactor=self.get_reactor() ) logger.info("Synapse client reader now listening on port %d", port) diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py index e201f18efdc5..cade09d60e37 100644 --- a/synapse/app/synchrotron.py +++ b/synapse/app/synchrotron.py @@ -114,7 +114,10 @@ def __init__(self, hs): logger.info("Presence process_id is %r", self.process_id) def send_user_sync(self, user_id, is_syncing, last_sync_ms): - self.hs.get_tcp_replication().send_user_sync(user_id, is_syncing, last_sync_ms) + if self.hs.config.use_presence: + self.hs.get_tcp_replication().send_user_sync( + user_id, is_syncing, last_sync_ms + ) def mark_as_coming_online(self, user_id): """A user has started syncing. Send a UserSync to the master, unless they @@ -211,10 +214,13 @@ def process_replication_rows(self, token, rows): yield self.notify_from_replication(states, stream_id) def get_currently_syncing_users(self): - return [ - user_id for user_id, count in iteritems(self.user_to_num_current_syncs) - if count > 0 - ] + if self.hs.config.use_presence: + return [ + user_id for user_id, count in iteritems(self.user_to_num_current_syncs) + if count > 0 + ] + else: + return set() class SynchrotronTyping(object): diff --git a/synapse/config/server.py b/synapse/config/server.py index a41c48e69c70..68a612e59482 100644 --- a/synapse/config/server.py +++ b/synapse/config/server.py @@ -49,6 +49,9 @@ def read_config(self, config): # "disable" federation self.send_federation = config.get("send_federation", True) + # Whether to enable user presence. + self.use_presence = config.get("use_presence", True) + # Whether to update the user directory or not. This should be set to # false only if we are updating the user directory in a worker self.update_user_directory = config.get("update_user_directory", True) @@ -250,6 +253,9 @@ def default_config(self, server_name, **kwargs): # hard limit. soft_file_limit: 0 + # Set to false to disable presence tracking on this homeserver. + use_presence: true + # The GC threshold parameters to pass to `gc.set_threshold`, if defined # gc_thresholds: [700, 10, 10] diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index f603c8a368aa..94d7423d016a 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -58,6 +58,7 @@ class TransactionQueue(object): """ def __init__(self, hs): + self.hs = hs self.server_name = hs.hostname self.store = hs.get_datastore() @@ -308,6 +309,9 @@ def send_presence(self, states): Args: states (list(UserPresenceState)) """ + if not self.hs.config.use_presence: + # No-op if presence is disabled. + return # First we queue up the new presence by user ID, so multiple presence # updates in quick successtion are correctly handled diff --git a/synapse/handlers/initial_sync.py b/synapse/handlers/initial_sync.py index 1fb17fd9a5b1..e0093952074f 100644 --- a/synapse/handlers/initial_sync.py +++ b/synapse/handlers/initial_sync.py @@ -372,6 +372,10 @@ def _room_initial_sync_joined(self, user_id, room_id, pagin_config, @defer.inlineCallbacks def get_presence(): + # If presence is disabled, return an empty list + if not self.hs.config.use_presence: + defer.returnValue([]) + states = yield presence_handler.get_states( [m.user_id for m in room_members], as_event=True, diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 3671d24f60d0..ba3856674d0e 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -395,6 +395,10 @@ def bump_presence_active_time(self, user): """We've seen the user do something that indicates they're interacting with the app. """ + # If presence is disabled, no-op + if not self.hs.config.use_presence: + return + user_id = user.to_string() bump_active_time_counter.inc() @@ -424,6 +428,11 @@ def user_syncing(self, user_id, affect_presence=True): Useful for streams that are not associated with an actual client that is being used by a user. """ + # Override if it should affect the user's presence, if presence is + # disabled. + if not self.hs.config.use_presence: + affect_presence = False + if affect_presence: curr_sync = self.user_to_num_current_syncs.get(user_id, 0) self.user_to_num_current_syncs[user_id] = curr_sync + 1 @@ -469,13 +478,16 @@ def get_currently_syncing_users(self): Returns: set(str): A set of user_id strings. """ - syncing_user_ids = { - user_id for user_id, count in self.user_to_num_current_syncs.items() - if count - } - for user_ids in self.external_process_to_current_syncs.values(): - syncing_user_ids.update(user_ids) - return syncing_user_ids + if self.hs.config.use_presence: + syncing_user_ids = { + user_id for user_id, count in self.user_to_num_current_syncs.items() + if count + } + for user_ids in self.external_process_to_current_syncs.values(): + syncing_user_ids.update(user_ids) + return syncing_user_ids + else: + return set() @defer.inlineCallbacks def update_external_syncs_row(self, process_id, user_id, is_syncing, sync_time_msec): diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index ac3edf0cc9bc..648debc8aa96 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -185,6 +185,7 @@ def __nonzero__(self): class SyncHandler(object): def __init__(self, hs): + self.hs_config = hs.config self.store = hs.get_datastore() self.notifier = hs.get_notifier() self.presence_handler = hs.get_presence_handler() @@ -860,7 +861,7 @@ def generate_sync_result(self, sync_config, since_token=None, full_state=False): since_token is None and sync_config.filter_collection.blocks_all_presence() ) - if not block_all_presence_data: + if self.hs_config.use_presence and not block_all_presence_data: yield self._generate_sync_entry_for_presence( sync_result_builder, newly_joined_rooms, newly_joined_users ) diff --git a/synapse/rest/client/v1/presence.py b/synapse/rest/client/v1/presence.py index a14f0c807e2e..b5a6d6aebfc7 100644 --- a/synapse/rest/client/v1/presence.py +++ b/synapse/rest/client/v1/presence.py @@ -84,7 +84,8 @@ def on_PUT(self, request, user_id): except Exception: raise SynapseError(400, "Unable to parse state") - yield self.presence_handler.set_state(user, state) + if self.hs.config.use_presence: + yield self.presence_handler.set_state(user, state) defer.returnValue((200, {})) diff --git a/tests/app/__init__.py b/tests/app/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/tests/app/test_frontend_proxy.py b/tests/app/test_frontend_proxy.py new file mode 100644 index 000000000000..76b5090fffd1 --- /dev/null +++ b/tests/app/test_frontend_proxy.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.app.frontend_proxy import FrontendProxyServer + +from tests.unittest import HomeserverTestCase + + +class FrontendProxyTests(HomeserverTestCase): + def make_homeserver(self, reactor, clock): + + hs = self.setup_test_homeserver( + http_client=None, homeserverToUse=FrontendProxyServer + ) + + return hs + + def test_listen_http_with_presence_enabled(self): + """ + When presence is on, the stub servlet will not register. + """ + # Presence is on + self.hs.config.use_presence = True + + config = { + "port": 8080, + "bind_addresses": ["0.0.0.0"], + "resources": [{"names": ["client"]}], + } + + # Listen with the config + self.hs._listen_http(config) + + # Grab the resource from the site that was told to listen + self.assertEqual(len(self.reactor.tcpServers), 1) + site = self.reactor.tcpServers[0][1] + self.resource = ( + site.resource.children["_matrix"].children["client"].children["r0"] + ) + + request, channel = self.make_request("PUT", "presence/a/status") + self.render(request) + + # 400 + unrecognised, because nothing is registered + self.assertEqual(channel.code, 400) + self.assertEqual(channel.json_body["errcode"], "M_UNRECOGNIZED") + + def test_listen_http_with_presence_disabled(self): + """ + When presence is on, the stub servlet will register. + """ + # Presence is off + self.hs.config.use_presence = False + + config = { + "port": 8080, + "bind_addresses": ["0.0.0.0"], + "resources": [{"names": ["client"]}], + } + + # Listen with the config + self.hs._listen_http(config) + + # Grab the resource from the site that was told to listen + self.assertEqual(len(self.reactor.tcpServers), 1) + site = self.reactor.tcpServers[0][1] + self.resource = ( + site.resource.children["_matrix"].children["client"].children["r0"] + ) + + request, channel = self.make_request("PUT", "presence/a/status") + self.render(request) + + # 401, because the stub servlet still checks authentication + self.assertEqual(channel.code, 401) + self.assertEqual(channel.json_body["errcode"], "M_MISSING_TOKEN") diff --git a/tests/rest/client/v1/test_presence.py b/tests/rest/client/v1/test_presence.py new file mode 100644 index 000000000000..66c2b687079c --- /dev/null +++ b/tests/rest/client/v1/test_presence.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 New Vector Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from mock import Mock + +from synapse.rest.client.v1 import presence +from synapse.types import UserID + +from tests import unittest + + +class PresenceTestCase(unittest.HomeserverTestCase): + """ Tests presence REST API. """ + + user_id = "@sid:red" + + user = UserID.from_string(user_id) + servlets = [presence.register_servlets] + + def make_homeserver(self, reactor, clock): + + hs = self.setup_test_homeserver( + "red", http_client=None, federation_client=Mock() + ) + + hs.presence_handler = Mock() + + return hs + + def test_put_presence(self): + """ + PUT to the status endpoint with use_presence enabled will call + set_state on the presence handler. + """ + self.hs.config.use_presence = True + + body = {"presence": "here", "status_msg": "beep boop"} + request, channel = self.make_request( + "PUT", "/presence/%s/status" % (self.user_id,), body + ) + self.render(request) + + self.assertEqual(channel.code, 200) + self.assertEqual(self.hs.presence_handler.set_state.call_count, 1) + + def test_put_presence_disabled(self): + """ + PUT to the status endpoint with use_presence disbled will NOT call + set_state on the presence handler. + """ + self.hs.config.use_presence = False + + body = {"presence": "here", "status_msg": "beep boop"} + request, channel = self.make_request( + "PUT", "/presence/%s/status" % (self.user_id,), body + ) + self.render(request) + + self.assertEqual(channel.code, 200) + self.assertEqual(self.hs.presence_handler.set_state.call_count, 0) diff --git a/tests/rest/client/v2_alpha/test_sync.py b/tests/rest/client/v2_alpha/test_sync.py index 9f3d8bd1dbb4..560b1fba967a 100644 --- a/tests/rest/client/v2_alpha/test_sync.py +++ b/tests/rest/client/v2_alpha/test_sync.py @@ -13,71 +13,58 @@ # See the License for the specific language governing permissions and # limitations under the License. -import synapse.types -from synapse.http.server import JsonResource +from mock import Mock + from synapse.rest.client.v2_alpha import sync -from synapse.types import UserID -from synapse.util import Clock from tests import unittest -from tests.server import ( - ThreadedMemoryReactorClock as MemoryReactorClock, - make_request, - render, - setup_test_homeserver, -) - -PATH_PREFIX = "/_matrix/client/v2_alpha" -class FilterTestCase(unittest.TestCase): +class FilterTestCase(unittest.HomeserverTestCase): - USER_ID = "@apple:test" - TO_REGISTER = [sync] + user_id = "@apple:test" + servlets = [sync.register_servlets] - def setUp(self): - self.clock = MemoryReactorClock() - self.hs_clock = Clock(self.clock) + def make_homeserver(self, reactor, clock): - self.hs = setup_test_homeserver( - self.addCleanup, http_client=None, clock=self.hs_clock, reactor=self.clock + hs = self.setup_test_homeserver( + "red", http_client=None, federation_client=Mock() ) + return hs - self.auth = self.hs.get_auth() - - def get_user_by_access_token(token=None, allow_guest=False): - return { - "user": UserID.from_string(self.USER_ID), - "token_id": 1, - "is_guest": False, - } - - def get_user_by_req(request, allow_guest=False, rights="access"): - return synapse.types.create_requester( - UserID.from_string(self.USER_ID), 1, False, None - ) - - self.auth.get_user_by_access_token = get_user_by_access_token - self.auth.get_user_by_req = get_user_by_req + def test_sync_argless(self): + request, channel = self.make_request("GET", "/sync") + self.render(request) - self.store = self.hs.get_datastore() - self.filtering = self.hs.get_filtering() - self.resource = JsonResource(self.hs) + self.assertEqual(channel.code, 200) + self.assertTrue( + set( + [ + "next_batch", + "rooms", + "presence", + "account_data", + "to_device", + "device_lists", + ] + ).issubset(set(channel.json_body.keys())) + ) - for r in self.TO_REGISTER: - r.register_servlets(self.hs, self.resource) + def test_sync_presence_disabled(self): + """ + When presence is disabled, the key does not appear in /sync. + """ + self.hs.config.use_presence = False - def test_sync_argless(self): - request, channel = make_request("GET", "/_matrix/client/r0/sync") - render(request, self.resource, self.clock) + request, channel = self.make_request("GET", "/sync") + self.render(request) - self.assertEqual(channel.result["code"], b"200") + self.assertEqual(channel.code, 200) self.assertTrue( set( [ "next_batch", "rooms", - "presence", "account_data", "to_device", "device_lists", diff --git a/tests/unittest.py b/tests/unittest.py index e6afe3b96d61..d852e2465a06 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -18,6 +18,8 @@ from mock import Mock +from canonicaljson import json + import twisted import twisted.logger from twisted.trial import unittest @@ -241,11 +243,15 @@ def make_request(self, method, path, content=b""): method (bytes/unicode): The HTTP request method ("verb"). path (bytes/unicode): The HTTP path, suitably URL encoded (e.g. escaped UTF-8 & spaces and such). - content (bytes): The body of the request. + content (bytes or dict): The body of the request. JSON-encoded, if + a dict. Returns: A synapse.http.site.SynapseRequest. """ + if isinstance(content, dict): + content = json.dumps(content).encode('utf8') + return make_request(method, path, content) def render(self, request): diff --git a/tests/utils.py b/tests/utils.py index 6f8b1de3e799..bb0fc740540c 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -93,7 +93,8 @@ def _cleanup(): @defer.inlineCallbacks def setup_test_homeserver( - cleanup_func, name="test", datastore=None, config=None, reactor=None, **kargs + cleanup_func, name="test", datastore=None, config=None, reactor=None, + homeserverToUse=HomeServer, **kargs ): """ Setup a homeserver suitable for running tests against. Keyword arguments @@ -192,7 +193,7 @@ def setup_test_homeserver( config.database_config["args"]["cp_openfun"] = db_engine.on_new_connection if datastore is None: - hs = HomeServer( + hs = homeserverToUse( name, config=config, db_config=config.database_config, @@ -235,7 +236,7 @@ def cleanup(): hs.setup() else: - hs = HomeServer( + hs = homeserverToUse( name, db_pool=None, datastore=datastore, From f2a48d87df3129bd32597cb7e28496ade0fa8250 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 20 Aug 2018 13:01:46 +0100 Subject: [PATCH 159/173] Use get_cache_factor_for function for `state_cache` This allows the cache factor for `state_cache` to be individually specified in the enviroment --- synapse/state.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/state.py b/synapse/state.py index 8b92d4057a2a..0f2bedb69449 100644 --- a/synapse/state.py +++ b/synapse/state.py @@ -29,7 +29,7 @@ from synapse.api.errors import AuthError from synapse.events.snapshot import EventContext from synapse.util.async_helpers import Linearizer -from synapse.util.caches import CACHE_SIZE_FACTOR +from synapse.util.caches import get_cache_factor_for from synapse.util.caches.expiringcache import ExpiringCache from synapse.util.logutils import log_function from synapse.util.metrics import Measure @@ -40,7 +40,7 @@ KeyStateTuple = namedtuple("KeyStateTuple", ("context", "type", "state_key")) -SIZE_OF_CACHE = int(100000 * CACHE_SIZE_FACTOR) +SIZE_OF_CACHE = 100000 * get_cache_factor_for("state_cache") EVICTION_TIMEOUT_SECONDS = 60 * 60 From 48a910e128a560a8572de403eeed19734367b8f6 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 20 Aug 2018 13:04:28 +0100 Subject: [PATCH 160/173] Newsfile --- changelog.d/3719.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3719.bugfix diff --git a/changelog.d/3719.bugfix b/changelog.d/3719.bugfix new file mode 100644 index 000000000000..095d9288e897 --- /dev/null +++ b/changelog.d/3719.bugfix @@ -0,0 +1 @@ +Fix bug where `state_cache` cache factor ignored environment variables From 324525f40ca4df19c43971ca82db0d3478114885 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Mon, 20 Aug 2018 23:54:49 +1000 Subject: [PATCH 161/173] Port over enough to get some sytests running on Python 3 (#3668) --- changelog.d/3668.misc | 1 + synapse/api/auth.py | 14 +++--- synapse/api/ratelimiting.py | 2 +- synapse/config/logger.py | 3 +- synapse/http/servlet.py | 56 ++++++++++++++++++------ synapse/http/site.py | 4 +- synapse/rest/client/transactions.py | 2 +- synapse/rest/media/v1/upload_resource.py | 10 ++--- synapse/secrets.py | 2 +- synapse/util/logutils.py | 10 ++++- synapse/util/stringutils.py | 15 +++++++ synapse/util/versionstring.py | 12 ++--- 12 files changed, 91 insertions(+), 40 deletions(-) create mode 100644 changelog.d/3668.misc diff --git a/changelog.d/3668.misc b/changelog.d/3668.misc new file mode 100644 index 000000000000..f9265addf1a0 --- /dev/null +++ b/changelog.d/3668.misc @@ -0,0 +1 @@ +Port over enough to Python 3 to allow the sytests to start. diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 022211e34e0f..6502a6be7b9b 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -211,7 +211,7 @@ def get_user_by_req(self, request, allow_guest=False, rights="access"): user_agent = request.requestHeaders.getRawHeaders( b"User-Agent", default=[b""] - )[0] + )[0].decode('ascii', 'surrogateescape') if user and access_token and ip_addr: yield self.store.insert_client_ip( user_id=user.to_string(), @@ -682,7 +682,7 @@ def has_access_token(request): Returns: bool: False if no access_token was given, True otherwise. """ - query_params = request.args.get("access_token") + query_params = request.args.get(b"access_token") auth_headers = request.requestHeaders.getRawHeaders(b"Authorization") return bool(query_params) or bool(auth_headers) @@ -698,7 +698,7 @@ def get_access_token_from_request(request, token_not_found_http_status=401): 401 since some of the old clients depended on auth errors returning 403. Returns: - str: The access_token + unicode: The access_token Raises: AuthError: If there isn't an access_token in the request. """ @@ -720,9 +720,9 @@ def get_access_token_from_request(request, token_not_found_http_status=401): "Too many Authorization headers.", errcode=Codes.MISSING_TOKEN, ) - parts = auth_headers[0].split(" ") - if parts[0] == "Bearer" and len(parts) == 2: - return parts[1] + parts = auth_headers[0].split(b" ") + if parts[0] == b"Bearer" and len(parts) == 2: + return parts[1].decode('ascii') else: raise AuthError( token_not_found_http_status, @@ -738,7 +738,7 @@ def get_access_token_from_request(request, token_not_found_http_status=401): errcode=Codes.MISSING_TOKEN ) - return query_params[0] + return query_params[0].decode('ascii') @defer.inlineCallbacks def check_in_room_or_world_readable(self, room_id, user_id): diff --git a/synapse/api/ratelimiting.py b/synapse/api/ratelimiting.py index 06cc8d90b87e..3bb5b3da37c5 100644 --- a/synapse/api/ratelimiting.py +++ b/synapse/api/ratelimiting.py @@ -72,7 +72,7 @@ def send_message(self, user_id, time_now_s, msg_rate_hz, burst_count, update=Tru return allowed, time_allowed def prune_message_counts(self, time_now_s): - for user_id in self.message_counts.keys(): + for user_id in list(self.message_counts.keys()): message_count, time_start, msg_rate_hz = ( self.message_counts[user_id] ) diff --git a/synapse/config/logger.py b/synapse/config/logger.py index cfc20dcccf31..3f187adfc8cd 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -168,7 +168,8 @@ def setup_logging(config, use_worker_options=False): if log_file: # TODO: Customisable file size / backup count handler = logging.handlers.RotatingFileHandler( - log_file, maxBytes=(1000 * 1000 * 100), backupCount=3 + log_file, maxBytes=(1000 * 1000 * 100), backupCount=3, + encoding='utf8' ) def sighup(signum, stack): diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index 69f70852911c..a1e4b88e6ddc 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -29,7 +29,7 @@ def parse_integer(request, name, default=None, required=False): Args: request: the twisted HTTP request. - name (str): the name of the query parameter. + name (bytes/unicode): the name of the query parameter. default (int|None): value to use if the parameter is absent, defaults to None. required (bool): whether to raise a 400 SynapseError if the @@ -46,6 +46,10 @@ def parse_integer(request, name, default=None, required=False): def parse_integer_from_args(args, name, default=None, required=False): + + if not isinstance(name, bytes): + name = name.encode('ascii') + if name in args: try: return int(args[name][0]) @@ -65,7 +69,7 @@ def parse_boolean(request, name, default=None, required=False): Args: request: the twisted HTTP request. - name (str): the name of the query parameter. + name (bytes/unicode): the name of the query parameter. default (bool|None): value to use if the parameter is absent, defaults to None. required (bool): whether to raise a 400 SynapseError if the @@ -83,11 +87,15 @@ def parse_boolean(request, name, default=None, required=False): def parse_boolean_from_args(args, name, default=None, required=False): + + if not isinstance(name, bytes): + name = name.encode('ascii') + if name in args: try: return { - "true": True, - "false": False, + b"true": True, + b"false": False, }[args[name][0]] except Exception: message = ( @@ -104,21 +112,29 @@ def parse_boolean_from_args(args, name, default=None, required=False): def parse_string(request, name, default=None, required=False, - allowed_values=None, param_type="string"): - """Parse a string parameter from the request query string. + allowed_values=None, param_type="string", encoding='ascii'): + """ + Parse a string parameter from the request query string. + + If encoding is not None, the content of the query param will be + decoded to Unicode using the encoding, otherwise it will be encoded Args: request: the twisted HTTP request. - name (str): the name of the query parameter. - default (str|None): value to use if the parameter is absent, defaults - to None. + name (bytes/unicode): the name of the query parameter. + default (bytes/unicode|None): value to use if the parameter is absent, + defaults to None. Must be bytes if encoding is None. required (bool): whether to raise a 400 SynapseError if the parameter is absent, defaults to False. - allowed_values (list[str]): List of allowed values for the string, - or None if any value is allowed, defaults to None + allowed_values (list[bytes/unicode]): List of allowed values for the + string, or None if any value is allowed, defaults to None. Must be + the same type as name, if given. + encoding: The encoding to decode the name to, and decode the string + content with. Returns: - str|None: A string value or the default. + bytes/unicode|None: A string value or the default. Unicode if encoding + was given, bytes otherwise. Raises: SynapseError if the parameter is absent and required, or if the @@ -126,14 +142,22 @@ def parse_string(request, name, default=None, required=False, is not one of those allowed values. """ return parse_string_from_args( - request.args, name, default, required, allowed_values, param_type, + request.args, name, default, required, allowed_values, param_type, encoding ) def parse_string_from_args(args, name, default=None, required=False, - allowed_values=None, param_type="string"): + allowed_values=None, param_type="string", encoding='ascii'): + + if not isinstance(name, bytes): + name = name.encode('ascii') + if name in args: value = args[name][0] + + if encoding: + value = value.decode(encoding) + if allowed_values is not None and value not in allowed_values: message = "Query parameter %r must be one of [%s]" % ( name, ", ".join(repr(v) for v in allowed_values) @@ -146,6 +170,10 @@ def parse_string_from_args(args, name, default=None, required=False, message = "Missing %s query parameter %r" % (param_type, name) raise SynapseError(400, message, errcode=Codes.MISSING_PARAM) else: + + if encoding and isinstance(default, bytes): + return default.decode(encoding) + return default diff --git a/synapse/http/site.py b/synapse/http/site.py index f5a8f7840654..ad2a98468e62 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -235,7 +235,7 @@ def _finished_processing(self): # need to decode as it could be raw utf-8 bytes # from a IDN servname in an auth header authenticated_entity = self.authenticated_entity - if authenticated_entity is not None: + if authenticated_entity is not None and isinstance(authenticated_entity, bytes): authenticated_entity = authenticated_entity.decode("utf-8", "replace") # ...or could be raw utf-8 bytes in the User-Agent header. @@ -328,7 +328,7 @@ def __init__(self, logger_name, site_tag, config, resource, proxied = config.get("x_forwarded", False) self.requestFactory = SynapseRequestFactory(self, proxied) self.access_logger = logging.getLogger(logger_name) - self.server_version_string = server_version_string + self.server_version_string = server_version_string.encode('ascii') def log(self, request): pass diff --git a/synapse/rest/client/transactions.py b/synapse/rest/client/transactions.py index 511e96ab00d9..48c17f1b6d41 100644 --- a/synapse/rest/client/transactions.py +++ b/synapse/rest/client/transactions.py @@ -53,7 +53,7 @@ def _get_transaction_key(self, request): str: A transaction key """ token = self.auth.get_access_token_from_request(request) - return request.path + "/" + token + return request.path.decode('utf8') + "/" + token def fetch_or_execute_request(self, request, fn, *args, **kwargs): """A helper function for fetch_or_execute which extracts diff --git a/synapse/rest/media/v1/upload_resource.py b/synapse/rest/media/v1/upload_resource.py index 9b22d204a6d1..c1240e1963dd 100644 --- a/synapse/rest/media/v1/upload_resource.py +++ b/synapse/rest/media/v1/upload_resource.py @@ -55,7 +55,7 @@ def _async_render_POST(self, request): requester = yield self.auth.get_user_by_req(request) # TODO: The checks here are a bit late. The content will have # already been uploaded to a tmp file at this point - content_length = request.getHeader("Content-Length") + content_length = request.getHeader(b"Content-Length").decode('ascii') if content_length is None: raise SynapseError( msg="Request must specify a Content-Length", code=400 @@ -66,10 +66,10 @@ def _async_render_POST(self, request): code=413, ) - upload_name = parse_string(request, "filename") + upload_name = parse_string(request, b"filename", encoding=None) if upload_name: try: - upload_name = upload_name.decode('UTF-8') + upload_name = upload_name.decode('utf8') except UnicodeDecodeError: raise SynapseError( msg="Invalid UTF-8 filename parameter: %r" % (upload_name), @@ -78,8 +78,8 @@ def _async_render_POST(self, request): headers = request.requestHeaders - if headers.hasHeader("Content-Type"): - media_type = headers.getRawHeaders(b"Content-Type")[0] + if headers.hasHeader(b"Content-Type"): + media_type = headers.getRawHeaders(b"Content-Type")[0].decode('ascii') else: raise SynapseError( msg="Upload request missing 'Content-Type'", diff --git a/synapse/secrets.py b/synapse/secrets.py index f05e9ea53595..f6280f951cdf 100644 --- a/synapse/secrets.py +++ b/synapse/secrets.py @@ -38,4 +38,4 @@ def token_bytes(self, nbytes=32): return os.urandom(nbytes) def token_hex(self, nbytes=32): - return binascii.hexlify(self.token_bytes(nbytes)) + return binascii.hexlify(self.token_bytes(nbytes)).decode('ascii') diff --git a/synapse/util/logutils.py b/synapse/util/logutils.py index 62a00189ccd1..ef31458226b3 100644 --- a/synapse/util/logutils.py +++ b/synapse/util/logutils.py @@ -20,6 +20,8 @@ from functools import wraps from inspect import getcallargs +from six import PY3 + _TIME_FUNC_ID = 0 @@ -28,8 +30,12 @@ def _log_debug_as_f(f, msg, msg_args): logger = logging.getLogger(name) if logger.isEnabledFor(logging.DEBUG): - lineno = f.func_code.co_firstlineno - pathname = f.func_code.co_filename + if PY3: + lineno = f.__code__.co_firstlineno + pathname = f.__code__.co_filename + else: + lineno = f.func_code.co_firstlineno + pathname = f.func_code.co_filename record = logging.LogRecord( name=name, diff --git a/synapse/util/stringutils.py b/synapse/util/stringutils.py index 43d9db67ecdd..6f318c6a299a 100644 --- a/synapse/util/stringutils.py +++ b/synapse/util/stringutils.py @@ -16,6 +16,7 @@ import random import string +from six import PY3 from six.moves import range _string_with_symbols = ( @@ -34,6 +35,17 @@ def random_string_with_symbols(length): def is_ascii(s): + + if PY3: + if isinstance(s, bytes): + try: + s.decode('ascii').encode('ascii') + except UnicodeDecodeError: + return False + except UnicodeEncodeError: + return False + return True + try: s.encode("ascii") except UnicodeEncodeError: @@ -49,6 +61,9 @@ def to_ascii(s): If given None then will return None. """ + if PY3: + return s + if s is None: return None diff --git a/synapse/util/versionstring.py b/synapse/util/versionstring.py index 1fbcd411153e..3baba3225afa 100644 --- a/synapse/util/versionstring.py +++ b/synapse/util/versionstring.py @@ -30,7 +30,7 @@ def get_version_string(module): ['git', 'rev-parse', '--abbrev-ref', 'HEAD'], stderr=null, cwd=cwd, - ).strip() + ).strip().decode('ascii') git_branch = "b=" + git_branch except subprocess.CalledProcessError: git_branch = "" @@ -40,7 +40,7 @@ def get_version_string(module): ['git', 'describe', '--exact-match'], stderr=null, cwd=cwd, - ).strip() + ).strip().decode('ascii') git_tag = "t=" + git_tag except subprocess.CalledProcessError: git_tag = "" @@ -50,7 +50,7 @@ def get_version_string(module): ['git', 'rev-parse', '--short', 'HEAD'], stderr=null, cwd=cwd, - ).strip() + ).strip().decode('ascii') except subprocess.CalledProcessError: git_commit = "" @@ -60,7 +60,7 @@ def get_version_string(module): ['git', 'describe', '--dirty=' + dirty_string], stderr=null, cwd=cwd, - ).strip().endswith(dirty_string) + ).strip().decode('ascii').endswith(dirty_string) git_dirty = "dirty" if is_dirty else "" except subprocess.CalledProcessError: @@ -77,8 +77,8 @@ def get_version_string(module): "%s (%s)" % ( module.__version__, git_version, ) - ).encode("ascii") + ) except Exception as e: logger.info("Failed to check for git repository: %s", e) - return module.__version__.encode("ascii") + return module.__version__ From 9a2f960736a1d1aeb6c9c094887253b280a59a37 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Tue, 21 Aug 2018 00:00:19 +1000 Subject: [PATCH 162/173] version --- synapse/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/__init__.py b/synapse/__init__.py index a14d578e36a7..5175a1161d5a 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -17,4 +17,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.33.2" +__version__ = "0.33.3rc1" From 80bf7d35802003cee6e495421ec54c1c08bce525 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Tue, 21 Aug 2018 00:01:14 +1000 Subject: [PATCH 163/173] changelog --- CHANGES.md | 64 ++++++++++++++++++++++++++++++++++++++++ changelog.d/1491.feature | 1 - changelog.d/3184.feature | 1 - changelog.d/3423.misc | 1 - changelog.d/3568.feature | 1 - changelog.d/3574.feature | 1 - changelog.d/3589.feature | 1 - changelog.d/3632.misc | 1 - changelog.d/3633.feature | 1 - changelog.d/3647.misc | 1 - changelog.d/3653.feature | 1 - changelog.d/3654.feature | 1 - changelog.d/3655.feature | 1 - changelog.d/3658.bugfix | 1 - changelog.d/3660.misc | 1 - changelog.d/3661.bugfix | 1 - changelog.d/3662.feature | 1 - changelog.d/3664.feature | 1 - changelog.d/3668.misc | 1 - changelog.d/3669.misc | 1 - changelog.d/3670.feature | 1 - changelog.d/3676.bugfix | 1 - changelog.d/3677.bugfix | 1 - changelog.d/3678.misc | 1 - changelog.d/3679.misc | 1 - changelog.d/3681.bugfix | 1 - changelog.d/3684.misc | 1 - changelog.d/3687.feature | 1 - changelog.d/3689.bugfix | 1 - changelog.d/3690.misc | 1 - changelog.d/3692.bugfix | 1 - changelog.d/3694.feature | 1 - changelog.d/3700.bugfix | 1 - changelog.d/3701.bugfix | 1 - changelog.d/3703.removal | 1 - changelog.d/3705.bugfix | 1 - changelog.d/3707.misc | 1 - changelog.d/3708.feature | 1 - changelog.d/3709.misc | 1 - changelog.d/3710.bugfix | 1 - changelog.d/3712.misc | 1 - changelog.d/3713.bugfix | 1 - changelog.d/3719.bugfix | 1 - 43 files changed, 64 insertions(+), 42 deletions(-) delete mode 100644 changelog.d/1491.feature delete mode 100644 changelog.d/3184.feature delete mode 100644 changelog.d/3423.misc delete mode 100644 changelog.d/3568.feature delete mode 100644 changelog.d/3574.feature delete mode 100644 changelog.d/3589.feature delete mode 100644 changelog.d/3632.misc delete mode 100644 changelog.d/3633.feature delete mode 100644 changelog.d/3647.misc delete mode 100644 changelog.d/3653.feature delete mode 100644 changelog.d/3654.feature delete mode 100644 changelog.d/3655.feature delete mode 100644 changelog.d/3658.bugfix delete mode 100644 changelog.d/3660.misc delete mode 100644 changelog.d/3661.bugfix delete mode 100644 changelog.d/3662.feature delete mode 100644 changelog.d/3664.feature delete mode 100644 changelog.d/3668.misc delete mode 100644 changelog.d/3669.misc delete mode 100644 changelog.d/3670.feature delete mode 100644 changelog.d/3676.bugfix delete mode 100644 changelog.d/3677.bugfix delete mode 100644 changelog.d/3678.misc delete mode 100644 changelog.d/3679.misc delete mode 100644 changelog.d/3681.bugfix delete mode 100644 changelog.d/3684.misc delete mode 100644 changelog.d/3687.feature delete mode 100644 changelog.d/3689.bugfix delete mode 100644 changelog.d/3690.misc delete mode 100644 changelog.d/3692.bugfix delete mode 100644 changelog.d/3694.feature delete mode 100644 changelog.d/3700.bugfix delete mode 100644 changelog.d/3701.bugfix delete mode 100644 changelog.d/3703.removal delete mode 100644 changelog.d/3705.bugfix delete mode 100644 changelog.d/3707.misc delete mode 100644 changelog.d/3708.feature delete mode 100644 changelog.d/3709.misc delete mode 100644 changelog.d/3710.bugfix delete mode 100644 changelog.d/3712.misc delete mode 100644 changelog.d/3713.bugfix delete mode 100644 changelog.d/3719.bugfix diff --git a/CHANGES.md b/CHANGES.md index a299110a6b1b..68ce5d14718c 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,67 @@ +Synapse 0.33.3rc1 (2018-08-21) +============================== + +Features +-------- + +- Add support for the SNI extension to federation TLS connections ([\#1491](https://github.com/matrix-org/synapse/issues/1491)) +- Add /_media/r0/config ([\#3184](https://github.com/matrix-org/synapse/issues/3184)) +- speed up /members API and add `at` and `membership` params as per MSC1227 ([\#3568](https://github.com/matrix-org/synapse/issues/3568)) +- implement `summary` block in /sync response as per MSC688 ([\#3574](https://github.com/matrix-org/synapse/issues/3574)) +- Add lazy-loading support to /messages as per MSC1227 ([\#3589](https://github.com/matrix-org/synapse/issues/3589)) +- Add ability to limit number of monthly active users on the server ([\#3633](https://github.com/matrix-org/synapse/issues/3633)) +- Support more federation endpoints on workers ([\#3653](https://github.com/matrix-org/synapse/issues/3653)) +- Basic support for room versioning ([\#3654](https://github.com/matrix-org/synapse/issues/3654)) +- Ability to disable client/server Synapse via conf toggle ([\#3655](https://github.com/matrix-org/synapse/issues/3655)) +- Ability to whitelist specific threepids against monthly active user limiting ([\#3662](https://github.com/matrix-org/synapse/issues/3662)) +- Add some metrics for the appservice and federation event sending loops ([\#3664](https://github.com/matrix-org/synapse/issues/3664)) +- Where server is disabled, block ability for locked out users to read new messages ([\#3670](https://github.com/matrix-org/synapse/issues/3670)) +- set admin uri via config, to be used in error messages where the user should contact the administrator ([\#3687](https://github.com/matrix-org/synapse/issues/3687)) +- Synapse's presence functionality can now be disabled with the "use_presence" configuration option. ([\#3694](https://github.com/matrix-org/synapse/issues/3694)) +- For resource limit blocked users, prevent writing into rooms ([\#3708](https://github.com/matrix-org/synapse/issues/3708)) + + +Bugfixes +-------- + +- Fix occasional glitches in the synapse_event_persisted_position metric ([\#3658](https://github.com/matrix-org/synapse/issues/3658)) +- Fix bug on deleting 3pid when using identity servers that don't support unbind API ([\#3661](https://github.com/matrix-org/synapse/issues/3661)) +- Make the tests pass on Twisted < 18.7.0 ([\#3676](https://github.com/matrix-org/synapse/issues/3676)) +- Don’t ship recaptcha_ajax.js, use it directly from Google ([\#3677](https://github.com/matrix-org/synapse/issues/3677)) +- Fixes test_reap_monthly_active_users so it passes under postgres ([\#3681](https://github.com/matrix-org/synapse/issues/3681)) +- Fix mau blocking calulation bug on login ([\#3689](https://github.com/matrix-org/synapse/issues/3689)) +- Fix missing yield in synapse.storage.monthly_active_users.initialise_reserved_users ([\#3692](https://github.com/matrix-org/synapse/issues/3692)) +- Improve HTTP request logging to include all requests ([\#3700](https://github.com/matrix-org/synapse/issues/3700)) +- Avoid timing out requests while we are streaming back the response ([\#3701](https://github.com/matrix-org/synapse/issues/3701)) +- Support more federation endpoints on workers ([\#3705](https://github.com/matrix-org/synapse/issues/3705), [\#3713](https://github.com/matrix-org/synapse/issues/3713)) +- Fix "Starting db txn 'get_all_updated_receipts' from sentinel context" warning ([\#3710](https://github.com/matrix-org/synapse/issues/3710)) +- Fix bug where `state_cache` cache factor ignored environment variables ([\#3719](https://github.com/matrix-org/synapse/issues/3719)) + + +Deprecations and Removals +------------------------- + +- The Shared-Secret registration method of the legacy v1/register REST endpoint has been removed. For a replacement, please see [the admin/register API documentation](https://github.com/matrix-org/synapse/blob/master/docs/admin_api/register_api.rst). ([\#3703](https://github.com/matrix-org/synapse/issues/3703)) + + +Internal Changes +---------------- + +- The test suite now can run under PostgreSQL. ([\#3423](https://github.com/matrix-org/synapse/issues/3423)) +- Refactor HTTP replication endpoints to reduce code duplication ([\#3632](https://github.com/matrix-org/synapse/issues/3632)) +- Tests now correctly execute on Python 3. ([\#3647](https://github.com/matrix-org/synapse/issues/3647)) +- Sytests can now be run inside a Docker container. ([\#3660](https://github.com/matrix-org/synapse/issues/3660)) +- Port over enough to Python 3 to allow the sytests to start. ([\#3668](https://github.com/matrix-org/synapse/issues/3668)) +- Update docker base image from alpine 3.7 to 3.8. ([\#3669](https://github.com/matrix-org/synapse/issues/3669)) +- Rename synapse.util.async to synapse.util.async_helpers to mitigate async becoming a keyword on Python 3.7. ([\#3678](https://github.com/matrix-org/synapse/issues/3678)) +- Synapse's tests are now formatted with the black autoformatter. ([\#3679](https://github.com/matrix-org/synapse/issues/3679)) +- Implemented a new testing base class to reduce test boilerplate. ([\#3684](https://github.com/matrix-org/synapse/issues/3684)) +- Rename MAU prometheus metrics ([\#3690](https://github.com/matrix-org/synapse/issues/3690)) +- add new error type ResourceLimit ([\#3707](https://github.com/matrix-org/synapse/issues/3707)) +- Logcontexts for replication command handlers ([\#3709](https://github.com/matrix-org/synapse/issues/3709)) +- Update admin register API documentation to reference a real user ID. ([\#3712](https://github.com/matrix-org/synapse/issues/3712)) + + Synapse 0.33.2 (2018-08-09) =========================== diff --git a/changelog.d/1491.feature b/changelog.d/1491.feature deleted file mode 100644 index 77b6d6ca09ea..000000000000 --- a/changelog.d/1491.feature +++ /dev/null @@ -1 +0,0 @@ -Add support for the SNI extension to federation TLS connections \ No newline at end of file diff --git a/changelog.d/3184.feature b/changelog.d/3184.feature deleted file mode 100644 index 9f746a57a0fa..000000000000 --- a/changelog.d/3184.feature +++ /dev/null @@ -1 +0,0 @@ -Add /_media/r0/config diff --git a/changelog.d/3423.misc b/changelog.d/3423.misc deleted file mode 100644 index 51768c6d14ef..000000000000 --- a/changelog.d/3423.misc +++ /dev/null @@ -1 +0,0 @@ -The test suite now can run under PostgreSQL. diff --git a/changelog.d/3568.feature b/changelog.d/3568.feature deleted file mode 100644 index 247f02ba4e13..000000000000 --- a/changelog.d/3568.feature +++ /dev/null @@ -1 +0,0 @@ -speed up /members API and add `at` and `membership` params as per MSC1227 diff --git a/changelog.d/3574.feature b/changelog.d/3574.feature deleted file mode 100644 index 87ac32df7249..000000000000 --- a/changelog.d/3574.feature +++ /dev/null @@ -1 +0,0 @@ -implement `summary` block in /sync response as per MSC688 diff --git a/changelog.d/3589.feature b/changelog.d/3589.feature deleted file mode 100644 index a8d7124719e0..000000000000 --- a/changelog.d/3589.feature +++ /dev/null @@ -1 +0,0 @@ -Add lazy-loading support to /messages as per MSC1227 diff --git a/changelog.d/3632.misc b/changelog.d/3632.misc deleted file mode 100644 index 9d64bbe83b65..000000000000 --- a/changelog.d/3632.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor HTTP replication endpoints to reduce code duplication diff --git a/changelog.d/3633.feature b/changelog.d/3633.feature deleted file mode 100644 index 8007a048405b..000000000000 --- a/changelog.d/3633.feature +++ /dev/null @@ -1 +0,0 @@ -Add ability to limit number of monthly active users on the server diff --git a/changelog.d/3647.misc b/changelog.d/3647.misc deleted file mode 100644 index dbc66dae6087..000000000000 --- a/changelog.d/3647.misc +++ /dev/null @@ -1 +0,0 @@ -Tests now correctly execute on Python 3. diff --git a/changelog.d/3653.feature b/changelog.d/3653.feature deleted file mode 100644 index 6c5422994f34..000000000000 --- a/changelog.d/3653.feature +++ /dev/null @@ -1 +0,0 @@ -Support more federation endpoints on workers diff --git a/changelog.d/3654.feature b/changelog.d/3654.feature deleted file mode 100644 index 35c95580bc45..000000000000 --- a/changelog.d/3654.feature +++ /dev/null @@ -1 +0,0 @@ -Basic support for room versioning diff --git a/changelog.d/3655.feature b/changelog.d/3655.feature deleted file mode 100644 index 1134e549e7b3..000000000000 --- a/changelog.d/3655.feature +++ /dev/null @@ -1 +0,0 @@ -Ability to disable client/server Synapse via conf toggle diff --git a/changelog.d/3658.bugfix b/changelog.d/3658.bugfix deleted file mode 100644 index 556011a15060..000000000000 --- a/changelog.d/3658.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix occasional glitches in the synapse_event_persisted_position metric diff --git a/changelog.d/3660.misc b/changelog.d/3660.misc deleted file mode 100644 index acd814c27391..000000000000 --- a/changelog.d/3660.misc +++ /dev/null @@ -1 +0,0 @@ -Sytests can now be run inside a Docker container. diff --git a/changelog.d/3661.bugfix b/changelog.d/3661.bugfix deleted file mode 100644 index f2b4703d806b..000000000000 --- a/changelog.d/3661.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug on deleting 3pid when using identity servers that don't support unbind API diff --git a/changelog.d/3662.feature b/changelog.d/3662.feature deleted file mode 100644 index daacef086d03..000000000000 --- a/changelog.d/3662.feature +++ /dev/null @@ -1 +0,0 @@ -Ability to whitelist specific threepids against monthly active user limiting diff --git a/changelog.d/3664.feature b/changelog.d/3664.feature deleted file mode 100644 index 184dde99393d..000000000000 --- a/changelog.d/3664.feature +++ /dev/null @@ -1 +0,0 @@ -Add some metrics for the appservice and federation event sending loops diff --git a/changelog.d/3668.misc b/changelog.d/3668.misc deleted file mode 100644 index f9265addf1a0..000000000000 --- a/changelog.d/3668.misc +++ /dev/null @@ -1 +0,0 @@ -Port over enough to Python 3 to allow the sytests to start. diff --git a/changelog.d/3669.misc b/changelog.d/3669.misc deleted file mode 100644 index fc579ddc607d..000000000000 --- a/changelog.d/3669.misc +++ /dev/null @@ -1 +0,0 @@ -Update docker base image from alpine 3.7 to 3.8. diff --git a/changelog.d/3670.feature b/changelog.d/3670.feature deleted file mode 100644 index ba00f2d2ecb8..000000000000 --- a/changelog.d/3670.feature +++ /dev/null @@ -1 +0,0 @@ -Where server is disabled, block ability for locked out users to read new messages diff --git a/changelog.d/3676.bugfix b/changelog.d/3676.bugfix deleted file mode 100644 index 7b23a2773a55..000000000000 --- a/changelog.d/3676.bugfix +++ /dev/null @@ -1 +0,0 @@ -Make the tests pass on Twisted < 18.7.0 diff --git a/changelog.d/3677.bugfix b/changelog.d/3677.bugfix deleted file mode 100644 index caa551627beb..000000000000 --- a/changelog.d/3677.bugfix +++ /dev/null @@ -1 +0,0 @@ -Don’t ship recaptcha_ajax.js, use it directly from Google diff --git a/changelog.d/3678.misc b/changelog.d/3678.misc deleted file mode 100644 index 0d7c8da64aa7..000000000000 --- a/changelog.d/3678.misc +++ /dev/null @@ -1 +0,0 @@ -Rename synapse.util.async to synapse.util.async_helpers to mitigate async becoming a keyword on Python 3.7. diff --git a/changelog.d/3679.misc b/changelog.d/3679.misc deleted file mode 100644 index 1de0a0f2b41c..000000000000 --- a/changelog.d/3679.misc +++ /dev/null @@ -1 +0,0 @@ -Synapse's tests are now formatted with the black autoformatter. diff --git a/changelog.d/3681.bugfix b/changelog.d/3681.bugfix deleted file mode 100644 index d18a69cd0c58..000000000000 --- a/changelog.d/3681.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fixes test_reap_monthly_active_users so it passes under postgres diff --git a/changelog.d/3684.misc b/changelog.d/3684.misc deleted file mode 100644 index 4c013263c453..000000000000 --- a/changelog.d/3684.misc +++ /dev/null @@ -1 +0,0 @@ -Implemented a new testing base class to reduce test boilerplate. diff --git a/changelog.d/3687.feature b/changelog.d/3687.feature deleted file mode 100644 index 64b89f6411ba..000000000000 --- a/changelog.d/3687.feature +++ /dev/null @@ -1 +0,0 @@ -set admin uri via config, to be used in error messages where the user should contact the administrator diff --git a/changelog.d/3689.bugfix b/changelog.d/3689.bugfix deleted file mode 100644 index 934d039836ad..000000000000 --- a/changelog.d/3689.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix mau blocking calulation bug on login diff --git a/changelog.d/3690.misc b/changelog.d/3690.misc deleted file mode 100644 index 710add02433d..000000000000 --- a/changelog.d/3690.misc +++ /dev/null @@ -1 +0,0 @@ -Rename MAU prometheus metrics diff --git a/changelog.d/3692.bugfix b/changelog.d/3692.bugfix deleted file mode 100644 index f44e13dca154..000000000000 --- a/changelog.d/3692.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix missing yield in synapse.storage.monthly_active_users.initialise_reserved_users diff --git a/changelog.d/3694.feature b/changelog.d/3694.feature deleted file mode 100644 index 916a342ff48f..000000000000 --- a/changelog.d/3694.feature +++ /dev/null @@ -1 +0,0 @@ -Synapse's presence functionality can now be disabled with the "use_presence" configuration option. diff --git a/changelog.d/3700.bugfix b/changelog.d/3700.bugfix deleted file mode 100644 index 492cce1dfc33..000000000000 --- a/changelog.d/3700.bugfix +++ /dev/null @@ -1 +0,0 @@ -Improve HTTP request logging to include all requests \ No newline at end of file diff --git a/changelog.d/3701.bugfix b/changelog.d/3701.bugfix deleted file mode 100644 index c22de3453747..000000000000 --- a/changelog.d/3701.bugfix +++ /dev/null @@ -1 +0,0 @@ -Avoid timing out requests while we are streaming back the response diff --git a/changelog.d/3703.removal b/changelog.d/3703.removal deleted file mode 100644 index d13ce5adb408..000000000000 --- a/changelog.d/3703.removal +++ /dev/null @@ -1 +0,0 @@ -The Shared-Secret registration method of the legacy v1/register REST endpoint has been removed. For a replacement, please see [the admin/register API documentation](https://github.com/matrix-org/synapse/blob/master/docs/admin_api/register_api.rst). diff --git a/changelog.d/3705.bugfix b/changelog.d/3705.bugfix deleted file mode 100644 index 6c5422994f34..000000000000 --- a/changelog.d/3705.bugfix +++ /dev/null @@ -1 +0,0 @@ -Support more federation endpoints on workers diff --git a/changelog.d/3707.misc b/changelog.d/3707.misc deleted file mode 100644 index 8123ca65430b..000000000000 --- a/changelog.d/3707.misc +++ /dev/null @@ -1 +0,0 @@ -add new error type ResourceLimit diff --git a/changelog.d/3708.feature b/changelog.d/3708.feature deleted file mode 100644 index 2f146ba62b34..000000000000 --- a/changelog.d/3708.feature +++ /dev/null @@ -1 +0,0 @@ -For resource limit blocked users, prevent writing into rooms diff --git a/changelog.d/3709.misc b/changelog.d/3709.misc deleted file mode 100644 index bbda357d4481..000000000000 --- a/changelog.d/3709.misc +++ /dev/null @@ -1 +0,0 @@ -Logcontexts for replication command handlers diff --git a/changelog.d/3710.bugfix b/changelog.d/3710.bugfix deleted file mode 100644 index c28e17766770..000000000000 --- a/changelog.d/3710.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix "Starting db txn 'get_all_updated_receipts' from sentinel context" warning \ No newline at end of file diff --git a/changelog.d/3712.misc b/changelog.d/3712.misc deleted file mode 100644 index 30f8c2af21c0..000000000000 --- a/changelog.d/3712.misc +++ /dev/null @@ -1 +0,0 @@ -Update admin register API documentation to reference a real user ID. diff --git a/changelog.d/3713.bugfix b/changelog.d/3713.bugfix deleted file mode 100644 index 6c5422994f34..000000000000 --- a/changelog.d/3713.bugfix +++ /dev/null @@ -1 +0,0 @@ -Support more federation endpoints on workers diff --git a/changelog.d/3719.bugfix b/changelog.d/3719.bugfix deleted file mode 100644 index 095d9288e897..000000000000 --- a/changelog.d/3719.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug where `state_cache` cache factor ignored environment variables From 55e6bdf28798153de5c2a6cf9bf0a6618f59168a Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 20 Aug 2018 18:20:07 +0100 Subject: [PATCH 164/173] Robustness fix for logcontext filter Make the logcontext filter not explode if it somehow ends up with a logcontext of None, since that infinite-loops the whole logging system. --- synapse/util/logcontext.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py index 07e83fadda0c..848765e53041 100644 --- a/synapse/util/logcontext.py +++ b/synapse/util/logcontext.py @@ -385,7 +385,13 @@ def filter(self, record): context = LoggingContext.current_context() for key, value in self.defaults.items(): setattr(record, key, value) - context.copy_to(record) + + # context should never be None, but if it somehow ends up being, then + # we end up in a death spiral of infinite loops, so let's check, for + # robustness' sake. + if context is not None: + context.copy_to(record) + return True From be6527325a829e7d5e4540d7a1983a6e6fda2c0f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 20 Aug 2018 18:21:10 +0100 Subject: [PATCH 165/173] Fix exceptions when a connection is closed before we read the headers This fixes bugs introduced in #3700, by making sure that we behave sanely when an incoming connection is closed before the headers are read. --- synapse/http/site.py | 8 +++++++- synapse/util/logcontext.py | 4 +++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/synapse/http/site.py b/synapse/http/site.py index ad2a98468e62..88ed3714f9be 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -182,7 +182,7 @@ def connectionLost(self, reason): # the client disconnects. with PreserveLoggingContext(self.logcontext): logger.warn( - "Error processing request: %s %s", reason.type, reason.value, + "Error processing request %r: %s %s", self, reason.type, reason.value, ) if not self._is_processing: @@ -219,6 +219,12 @@ def _finished_processing(self): """Log the completion of this request and update the metrics """ + if self.logcontext is None: + # this can happen if the connection closed before we read the + # headers (so render was never called). In that case we'll already + # have logged a warning, so just bail out. + return + usage = self.logcontext.get_resource_usage() if self._processing_finished_time is None: diff --git a/synapse/util/logcontext.py b/synapse/util/logcontext.py index 848765e53041..a0c2d37610ea 100644 --- a/synapse/util/logcontext.py +++ b/synapse/util/logcontext.py @@ -402,7 +402,9 @@ class PreserveLoggingContext(object): __slots__ = ["current_context", "new_context", "has_parent"] - def __init__(self, new_context=LoggingContext.sentinel): + def __init__(self, new_context=None): + if new_context is None: + new_context = LoggingContext.sentinel self.new_context = new_context def __enter__(self): From 012d612f9d3c8ca58538fffe4207309bf1644cd8 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 20 Aug 2018 18:26:27 +0100 Subject: [PATCH 166/173] changelog --- changelog.d/3723.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3723.bugfix diff --git a/changelog.d/3723.bugfix b/changelog.d/3723.bugfix new file mode 100644 index 000000000000..7c21083691f3 --- /dev/null +++ b/changelog.d/3723.bugfix @@ -0,0 +1 @@ +Fix bug in v0.33.3rc1 which caused infinite loops and OOMs From 3b5b64ac99c1a253cc24b6a1063247115ba18cd3 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Tue, 21 Aug 2018 03:48:55 +1000 Subject: [PATCH 167/173] changelog --- CHANGES.md | 9 +++++++++ changelog.d/3723.bugfix | 1 - synapse/__init__.py | 2 +- 3 files changed, 10 insertions(+), 2 deletions(-) delete mode 100644 changelog.d/3723.bugfix diff --git a/CHANGES.md b/CHANGES.md index 68ce5d14718c..df01178971ca 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,12 @@ +Synapse 0.33.3rc2 (2018-08-21) +============================== + +Bugfixes +-------- + +- Fix bug in v0.33.3rc1 which caused infinite loops and OOMs ([\#3723](https://github.com/matrix-org/synapse/issues/3723)) + + Synapse 0.33.3rc1 (2018-08-21) ============================== diff --git a/changelog.d/3723.bugfix b/changelog.d/3723.bugfix deleted file mode 100644 index 7c21083691f3..000000000000 --- a/changelog.d/3723.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug in v0.33.3rc1 which caused infinite loops and OOMs diff --git a/synapse/__init__.py b/synapse/__init__.py index 5175a1161d5a..252c49ca821c 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -17,4 +17,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.33.3rc1" +__version__ = "0.33.3rc2" From f7baff6f7b4af039254ec16e9272b90adb58dab3 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 21 Aug 2018 22:41:41 +0100 Subject: [PATCH 168/173] Fix 500 error from /consent form Fixes #3731 --- synapse/rest/consent/consent_resource.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py index 147ff7d79b01..52954ada12c8 100644 --- a/synapse/rest/consent/consent_resource.py +++ b/synapse/rest/consent/consent_resource.py @@ -175,7 +175,7 @@ def _async_render_POST(self, request): """ version = parse_string(request, "v", required=True) username = parse_string(request, "u", required=True) - userhmac = parse_string(request, "h", required=True) + userhmac = parse_string(request, "h", required=True, encoding=None) self._check_hash(username, userhmac) @@ -210,9 +210,18 @@ def _render_template(self, request, template_name, **template_args): finish_request(request) def _check_hash(self, userid, userhmac): + """ + Args: + userid (unicode): + userhmac (bytes): + + Raises: + SynapseError if the hash doesn't match + + """ want_mac = hmac.new( key=self._hmac_secret, - msg=userid, + msg=userid.encode('utf-8'), digestmod=sha256, ).hexdigest() From f7bf181a909384b649d2b615569921ae6add0505 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 21 Aug 2018 23:14:25 +0100 Subject: [PATCH 169/173] fix another consent encoding fail --- synapse/rest/consent/consent_resource.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/rest/consent/consent_resource.py b/synapse/rest/consent/consent_resource.py index 52954ada12c8..7362e1858db6 100644 --- a/synapse/rest/consent/consent_resource.py +++ b/synapse/rest/consent/consent_resource.py @@ -140,7 +140,7 @@ def _async_render_GET(self, request): version = parse_string(request, "v", default=self._default_consent_version) username = parse_string(request, "u", required=True) - userhmac = parse_string(request, "h", required=True) + userhmac = parse_string(request, "h", required=True, encoding=None) self._check_hash(username, userhmac) From afb4b490a417aec9223cfd1dcf5aa36c0fc5a6d1 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 21 Aug 2018 23:19:14 +0100 Subject: [PATCH 170/173] changelog --- changelog.d/3732.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3732.bugfix diff --git a/changelog.d/3732.bugfix b/changelog.d/3732.bugfix new file mode 100644 index 000000000000..638b6334b024 --- /dev/null +++ b/changelog.d/3732.bugfix @@ -0,0 +1 @@ +Fix bug introduced in v0.33.3rc1 which made the ToS give a 500 error \ No newline at end of file From 9b7d9d8ba056393d73e924785f479c7fc601e868 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 22 Aug 2018 14:06:20 +0100 Subject: [PATCH 171/173] Update attributions and PR links in changelog --- CHANGES.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index df01178971ca..57f9d9a826e9 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -13,7 +13,7 @@ Synapse 0.33.3rc1 (2018-08-21) Features -------- -- Add support for the SNI extension to federation TLS connections ([\#1491](https://github.com/matrix-org/synapse/issues/1491)) +- Add support for the SNI extension to federation TLS connections. Thanks to @vojeroen! ([\#3439](https://github.com/matrix-org/synapse/issues/3439)) - Add /_media/r0/config ([\#3184](https://github.com/matrix-org/synapse/issues/3184)) - speed up /members API and add `at` and `membership` params as per MSC1227 ([\#3568](https://github.com/matrix-org/synapse/issues/3568)) - implement `summary` block in /sync response as per MSC688 ([\#3574](https://github.com/matrix-org/synapse/issues/3574)) @@ -97,7 +97,7 @@ Features Bugfixes -------- -- Make /directory/list API return 404 for room not found instead of 400 ([\#2952](https://github.com/matrix-org/synapse/issues/2952)) +- Make /directory/list API return 404 for room not found instead of 400. Thanks to @fuzzmz! ([\#3620](https://github.com/matrix-org/synapse/issues/3620)) - Default inviter_display_name to mxid for email invites ([\#3391](https://github.com/matrix-org/synapse/issues/3391)) - Don't generate TURN credentials if no TURN config options are set ([\#3514](https://github.com/matrix-org/synapse/issues/3514)) - Correctly announce deleted devices over federation ([\#3520](https://github.com/matrix-org/synapse/issues/3520)) From 4e5a4549b68d3cbc8e74dda9025321c4b40b0222 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 22 Aug 2018 14:07:10 +0100 Subject: [PATCH 172/173] bump version to 0.33.3 --- synapse/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/__init__.py b/synapse/__init__.py index 252c49ca821c..e62901b7614b 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -17,4 +17,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.33.3rc2" +__version__ = "0.33.3" From 3504982cb7918a42881618cb20e40257d3f9e4f9 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 22 Aug 2018 14:07:44 +0100 Subject: [PATCH 173/173] changelog for 0.33.3 --- CHANGES.md | 9 +++++++++ changelog.d/3732.bugfix | 1 - 2 files changed, 9 insertions(+), 1 deletion(-) delete mode 100644 changelog.d/3732.bugfix diff --git a/CHANGES.md b/CHANGES.md index 57f9d9a826e9..a35f5aebc795 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,12 @@ +Synapse 0.33.3 (2018-08-22) +=========================== + +Bugfixes +-------- + +- Fix bug introduced in v0.33.3rc1 which made the ToS give a 500 error ([\#3732](https://github.com/matrix-org/synapse/issues/3732)) + + Synapse 0.33.3rc2 (2018-08-21) ============================== diff --git a/changelog.d/3732.bugfix b/changelog.d/3732.bugfix deleted file mode 100644 index 638b6334b024..000000000000 --- a/changelog.d/3732.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug introduced in v0.33.3rc1 which made the ToS give a 500 error \ No newline at end of file