Move logging utilities out of the side drawer of util/ and into logging/ (#5606)
This commit is contained in:
parent
cb8d568cf9
commit
463b072b12
|
@ -0,0 +1 @@
|
||||||
|
Move logging code out of `synapse.util` and into `synapse.logging`.
|
|
@ -12,7 +12,7 @@ formatters:
|
||||||
|
|
||||||
filters:
|
filters:
|
||||||
context:
|
context:
|
||||||
(): synapse.util.logcontext.LoggingContextFilter
|
(): synapse.logging.context.LoggingContextFilter
|
||||||
request: ""
|
request: ""
|
||||||
|
|
||||||
handlers:
|
handlers:
|
||||||
|
|
|
@ -36,7 +36,7 @@ from synapse.util import origin_from_ucid
|
||||||
|
|
||||||
from synapse.app.homeserver import SynapseHomeServer
|
from synapse.app.homeserver import SynapseHomeServer
|
||||||
|
|
||||||
# from synapse.util.logutils import log_function
|
# from synapse.logging.utils import log_function
|
||||||
|
|
||||||
from twisted.internet import reactor, defer
|
from twisted.internet import reactor, defer
|
||||||
from twisted.python import log
|
from twisted.python import log
|
||||||
|
|
|
@ -8,7 +8,7 @@ formatters:
|
||||||
|
|
||||||
filters:
|
filters:
|
||||||
context:
|
context:
|
||||||
(): synapse.util.logcontext.LoggingContextFilter
|
(): synapse.logging.context.LoggingContextFilter
|
||||||
request: ""
|
request: ""
|
||||||
|
|
||||||
handlers:
|
handlers:
|
||||||
|
|
|
@ -3,6 +3,9 @@ matrix-synapse-py3 (1.0.0+nmu1) UNRELEASED; urgency=medium
|
||||||
[ Silke Hofstra ]
|
[ Silke Hofstra ]
|
||||||
* Include systemd-python to allow logging to the systemd journal.
|
* Include systemd-python to allow logging to the systemd journal.
|
||||||
|
|
||||||
|
[ Amber Brown ]
|
||||||
|
* Update logging config defaults to match API changes in Synapse.
|
||||||
|
|
||||||
-- Silke Hofstra <silke@slxh.eu> Wed, 29 May 2019 09:45:29 +0200
|
-- Silke Hofstra <silke@slxh.eu> Wed, 29 May 2019 09:45:29 +0200
|
||||||
|
|
||||||
matrix-synapse-py3 (1.0.0) stable; urgency=medium
|
matrix-synapse-py3 (1.0.0) stable; urgency=medium
|
||||||
|
|
|
@ -7,7 +7,7 @@ formatters:
|
||||||
|
|
||||||
filters:
|
filters:
|
||||||
context:
|
context:
|
||||||
(): synapse.util.logcontext.LoggingContextFilter
|
(): synapse.logging.context.LoggingContextFilter
|
||||||
request: ""
|
request: ""
|
||||||
|
|
||||||
handlers:
|
handlers:
|
||||||
|
|
|
@ -6,7 +6,7 @@ formatters:
|
||||||
|
|
||||||
filters:
|
filters:
|
||||||
context:
|
context:
|
||||||
(): synapse.util.logcontext.LoggingContextFilter
|
(): synapse.logging.context.LoggingContextFilter
|
||||||
request: ""
|
request: ""
|
||||||
|
|
||||||
handlers:
|
handlers:
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
Log contexts
|
Log Contexts
|
||||||
============
|
============
|
||||||
|
|
||||||
.. contents::
|
.. contents::
|
||||||
|
@ -12,7 +12,7 @@ record.
|
||||||
Logcontexts are also used for CPU and database accounting, so that we can track
|
Logcontexts are also used for CPU and database accounting, so that we can track
|
||||||
which requests were responsible for high CPU use or database activity.
|
which requests were responsible for high CPU use or database activity.
|
||||||
|
|
||||||
The ``synapse.util.logcontext`` module provides a facilities for managing the
|
The ``synapse.logging.context`` module provides a facilities for managing the
|
||||||
current log context (as well as providing the ``LoggingContextFilter`` class).
|
current log context (as well as providing the ``LoggingContextFilter`` class).
|
||||||
|
|
||||||
Deferreds make the whole thing complicated, so this document describes how it
|
Deferreds make the whole thing complicated, so this document describes how it
|
||||||
|
@ -27,19 +27,19 @@ found them:
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
from synapse.util import logcontext # omitted from future snippets
|
from synapse.logging import context # omitted from future snippets
|
||||||
|
|
||||||
def handle_request(request_id):
|
def handle_request(request_id):
|
||||||
request_context = logcontext.LoggingContext()
|
request_context = context.LoggingContext()
|
||||||
|
|
||||||
calling_context = logcontext.LoggingContext.current_context()
|
calling_context = context.LoggingContext.current_context()
|
||||||
logcontext.LoggingContext.set_current_context(request_context)
|
context.LoggingContext.set_current_context(request_context)
|
||||||
try:
|
try:
|
||||||
request_context.request = request_id
|
request_context.request = request_id
|
||||||
do_request_handling()
|
do_request_handling()
|
||||||
logger.debug("finished")
|
logger.debug("finished")
|
||||||
finally:
|
finally:
|
||||||
logcontext.LoggingContext.set_current_context(calling_context)
|
context.LoggingContext.set_current_context(calling_context)
|
||||||
|
|
||||||
def do_request_handling():
|
def do_request_handling():
|
||||||
logger.debug("phew") # this will be logged against request_id
|
logger.debug("phew") # this will be logged against request_id
|
||||||
|
@ -51,7 +51,7 @@ written much more succinctly as:
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
def handle_request(request_id):
|
def handle_request(request_id):
|
||||||
with logcontext.LoggingContext() as request_context:
|
with context.LoggingContext() as request_context:
|
||||||
request_context.request = request_id
|
request_context.request = request_id
|
||||||
do_request_handling()
|
do_request_handling()
|
||||||
logger.debug("finished")
|
logger.debug("finished")
|
||||||
|
@ -74,7 +74,7 @@ blocking operation, and returns a deferred:
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def handle_request(request_id):
|
def handle_request(request_id):
|
||||||
with logcontext.LoggingContext() as request_context:
|
with context.LoggingContext() as request_context:
|
||||||
request_context.request = request_id
|
request_context.request = request_id
|
||||||
yield do_request_handling()
|
yield do_request_handling()
|
||||||
logger.debug("finished")
|
logger.debug("finished")
|
||||||
|
@ -179,7 +179,7 @@ though, we need to make up a new Deferred, or we get a Deferred back from
|
||||||
external code. We need to make it follow our rules.
|
external code. We need to make it follow our rules.
|
||||||
|
|
||||||
The easy way to do it is with a combination of ``defer.inlineCallbacks``, and
|
The easy way to do it is with a combination of ``defer.inlineCallbacks``, and
|
||||||
``logcontext.PreserveLoggingContext``. Suppose we want to implement ``sleep``,
|
``context.PreserveLoggingContext``. Suppose we want to implement ``sleep``,
|
||||||
which returns a deferred which will run its callbacks after a given number of
|
which returns a deferred which will run its callbacks after a given number of
|
||||||
seconds. That might look like:
|
seconds. That might look like:
|
||||||
|
|
||||||
|
@ -204,13 +204,13 @@ That doesn't follow the rules, but we can fix it by wrapping it with
|
||||||
This technique works equally for external functions which return deferreds,
|
This technique works equally for external functions which return deferreds,
|
||||||
or deferreds we have made ourselves.
|
or deferreds we have made ourselves.
|
||||||
|
|
||||||
You can also use ``logcontext.make_deferred_yieldable``, which just does the
|
You can also use ``context.make_deferred_yieldable``, which just does the
|
||||||
boilerplate for you, so the above could be written:
|
boilerplate for you, so the above could be written:
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
def sleep(seconds):
|
def sleep(seconds):
|
||||||
return logcontext.make_deferred_yieldable(get_sleep_deferred(seconds))
|
return context.make_deferred_yieldable(get_sleep_deferred(seconds))
|
||||||
|
|
||||||
|
|
||||||
Fire-and-forget
|
Fire-and-forget
|
||||||
|
@ -279,7 +279,7 @@ Obviously that option means that the operations done in
|
||||||
that might be fixed by setting a different logcontext via a ``with
|
that might be fixed by setting a different logcontext via a ``with
|
||||||
LoggingContext(...)`` in ``background_operation``).
|
LoggingContext(...)`` in ``background_operation``).
|
||||||
|
|
||||||
The second option is to use ``logcontext.run_in_background``, which wraps a
|
The second option is to use ``context.run_in_background``, which wraps a
|
||||||
function so that it doesn't reset the logcontext even when it returns an
|
function so that it doesn't reset the logcontext even when it returns an
|
||||||
incomplete deferred, and adds a callback to the returned deferred to reset the
|
incomplete deferred, and adds a callback to the returned deferred to reset the
|
||||||
logcontext. In other words, it turns a function that follows the Synapse rules
|
logcontext. In other words, it turns a function that follows the Synapse rules
|
||||||
|
@ -293,7 +293,7 @@ It can be used like this:
|
||||||
def do_request_handling():
|
def do_request_handling():
|
||||||
yield foreground_operation()
|
yield foreground_operation()
|
||||||
|
|
||||||
logcontext.run_in_background(background_operation)
|
context.run_in_background(background_operation)
|
||||||
|
|
||||||
# this will now be logged against the request context
|
# this will now be logged against the request context
|
||||||
logger.debug("Request handling complete")
|
logger.debug("Request handling complete")
|
||||||
|
@ -332,7 +332,7 @@ gathered:
|
||||||
result = yield defer.gatherResults([d1, d2])
|
result = yield defer.gatherResults([d1, d2])
|
||||||
|
|
||||||
In this case particularly, though, option two, of using
|
In this case particularly, though, option two, of using
|
||||||
``logcontext.preserve_fn`` almost certainly makes more sense, so that
|
``context.preserve_fn`` almost certainly makes more sense, so that
|
||||||
``operation1`` and ``operation2`` are both logged against the original
|
``operation1`` and ``operation2`` are both logged against the original
|
||||||
logcontext. This looks like:
|
logcontext. This looks like:
|
||||||
|
|
||||||
|
@ -340,8 +340,8 @@ logcontext. This looks like:
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def do_request_handling():
|
def do_request_handling():
|
||||||
d1 = logcontext.preserve_fn(operation1)()
|
d1 = context.preserve_fn(operation1)()
|
||||||
d2 = logcontext.preserve_fn(operation2)()
|
d2 = context.preserve_fn(operation2)()
|
||||||
|
|
||||||
with PreserveLoggingContext():
|
with PreserveLoggingContext():
|
||||||
result = yield defer.gatherResults([d1, d2])
|
result = yield defer.gatherResults([d1, d2])
|
||||||
|
@ -381,7 +381,7 @@ off the background process, and then leave the ``with`` block to wait for it:
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
def handle_request(request_id):
|
def handle_request(request_id):
|
||||||
with logcontext.LoggingContext() as request_context:
|
with context.LoggingContext() as request_context:
|
||||||
request_context.request = request_id
|
request_context.request = request_id
|
||||||
d = do_request_handling()
|
d = do_request_handling()
|
||||||
|
|
||||||
|
@ -414,7 +414,7 @@ runs its callbacks in the original logcontext, all is happy.
|
||||||
|
|
||||||
The business of a Deferred which runs its callbacks in the original logcontext
|
The business of a Deferred which runs its callbacks in the original logcontext
|
||||||
isn't hard to achieve — we have it today, in the shape of
|
isn't hard to achieve — we have it today, in the shape of
|
||||||
``logcontext._PreservingContextDeferred``:
|
``context._PreservingContextDeferred``:
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,7 @@ from twisted.protocols.tls import TLSMemoryBIOFactory
|
||||||
import synapse
|
import synapse
|
||||||
from synapse.app import check_bind_error
|
from synapse.app import check_bind_error
|
||||||
from synapse.crypto import context_factory
|
from synapse.crypto import context_factory
|
||||||
from synapse.util import PreserveLoggingContext
|
from synapse.logging.context import PreserveLoggingContext
|
||||||
from synapse.util.async_helpers import Linearizer
|
from synapse.util.async_helpers import Linearizer
|
||||||
from synapse.util.rlimit import change_resource_limit
|
from synapse.util.rlimit import change_resource_limit
|
||||||
from synapse.util.versionstring import get_version_string
|
from synapse.util.versionstring import get_version_string
|
||||||
|
|
|
@ -26,6 +26,7 @@ from synapse.config._base import ConfigError
|
||||||
from synapse.config.homeserver import HomeServerConfig
|
from synapse.config.homeserver import HomeServerConfig
|
||||||
from synapse.config.logger import setup_logging
|
from synapse.config.logger import setup_logging
|
||||||
from synapse.http.site import SynapseSite
|
from synapse.http.site import SynapseSite
|
||||||
|
from synapse.logging.context import LoggingContext, run_in_background
|
||||||
from synapse.metrics import RegistryProxy
|
from synapse.metrics import RegistryProxy
|
||||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||||
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
|
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
|
||||||
|
@ -36,7 +37,6 @@ from synapse.replication.tcp.client import ReplicationClientHandler
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
from synapse.storage.engines import create_engine
|
from synapse.storage.engines import create_engine
|
||||||
from synapse.util.httpresourcetree import create_resource_tree
|
from synapse.util.httpresourcetree import create_resource_tree
|
||||||
from synapse.util.logcontext import LoggingContext, run_in_background
|
|
||||||
from synapse.util.manhole import manhole
|
from synapse.util.manhole import manhole
|
||||||
from synapse.util.versionstring import get_version_string
|
from synapse.util.versionstring import get_version_string
|
||||||
|
|
||||||
|
|
|
@ -27,6 +27,7 @@ from synapse.config.homeserver import HomeServerConfig
|
||||||
from synapse.config.logger import setup_logging
|
from synapse.config.logger import setup_logging
|
||||||
from synapse.http.server import JsonResource
|
from synapse.http.server import JsonResource
|
||||||
from synapse.http.site import SynapseSite
|
from synapse.http.site import SynapseSite
|
||||||
|
from synapse.logging.context import LoggingContext
|
||||||
from synapse.metrics import RegistryProxy
|
from synapse.metrics import RegistryProxy
|
||||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||||
from synapse.replication.slave.storage._base import BaseSlavedStore
|
from synapse.replication.slave.storage._base import BaseSlavedStore
|
||||||
|
@ -64,7 +65,6 @@ from synapse.rest.client.versions import VersionsRestServlet
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
from synapse.storage.engines import create_engine
|
from synapse.storage.engines import create_engine
|
||||||
from synapse.util.httpresourcetree import create_resource_tree
|
from synapse.util.httpresourcetree import create_resource_tree
|
||||||
from synapse.util.logcontext import LoggingContext
|
|
||||||
from synapse.util.manhole import manhole
|
from synapse.util.manhole import manhole
|
||||||
from synapse.util.versionstring import get_version_string
|
from synapse.util.versionstring import get_version_string
|
||||||
|
|
||||||
|
|
|
@ -27,6 +27,7 @@ from synapse.config.homeserver import HomeServerConfig
|
||||||
from synapse.config.logger import setup_logging
|
from synapse.config.logger import setup_logging
|
||||||
from synapse.http.server import JsonResource
|
from synapse.http.server import JsonResource
|
||||||
from synapse.http.site import SynapseSite
|
from synapse.http.site import SynapseSite
|
||||||
|
from synapse.logging.context import LoggingContext
|
||||||
from synapse.metrics import RegistryProxy
|
from synapse.metrics import RegistryProxy
|
||||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||||
from synapse.replication.slave.storage._base import BaseSlavedStore
|
from synapse.replication.slave.storage._base import BaseSlavedStore
|
||||||
|
@ -59,7 +60,6 @@ from synapse.server import HomeServer
|
||||||
from synapse.storage.engines import create_engine
|
from synapse.storage.engines import create_engine
|
||||||
from synapse.storage.user_directory import UserDirectoryStore
|
from synapse.storage.user_directory import UserDirectoryStore
|
||||||
from synapse.util.httpresourcetree import create_resource_tree
|
from synapse.util.httpresourcetree import create_resource_tree
|
||||||
from synapse.util.logcontext import LoggingContext
|
|
||||||
from synapse.util.manhole import manhole
|
from synapse.util.manhole import manhole
|
||||||
from synapse.util.versionstring import get_version_string
|
from synapse.util.versionstring import get_version_string
|
||||||
|
|
||||||
|
|
|
@ -28,6 +28,7 @@ from synapse.config.homeserver import HomeServerConfig
|
||||||
from synapse.config.logger import setup_logging
|
from synapse.config.logger import setup_logging
|
||||||
from synapse.federation.transport.server import TransportLayerServer
|
from synapse.federation.transport.server import TransportLayerServer
|
||||||
from synapse.http.site import SynapseSite
|
from synapse.http.site import SynapseSite
|
||||||
|
from synapse.logging.context import LoggingContext
|
||||||
from synapse.metrics import RegistryProxy
|
from synapse.metrics import RegistryProxy
|
||||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||||
from synapse.replication.slave.storage._base import BaseSlavedStore
|
from synapse.replication.slave.storage._base import BaseSlavedStore
|
||||||
|
@ -48,7 +49,6 @@ from synapse.rest.key.v2 import KeyApiV2Resource
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
from synapse.storage.engines import create_engine
|
from synapse.storage.engines import create_engine
|
||||||
from synapse.util.httpresourcetree import create_resource_tree
|
from synapse.util.httpresourcetree import create_resource_tree
|
||||||
from synapse.util.logcontext import LoggingContext
|
|
||||||
from synapse.util.manhole import manhole
|
from synapse.util.manhole import manhole
|
||||||
from synapse.util.versionstring import get_version_string
|
from synapse.util.versionstring import get_version_string
|
||||||
|
|
||||||
|
|
|
@ -27,6 +27,7 @@ from synapse.config.homeserver import HomeServerConfig
|
||||||
from synapse.config.logger import setup_logging
|
from synapse.config.logger import setup_logging
|
||||||
from synapse.federation import send_queue
|
from synapse.federation import send_queue
|
||||||
from synapse.http.site import SynapseSite
|
from synapse.http.site import SynapseSite
|
||||||
|
from synapse.logging.context import LoggingContext, run_in_background
|
||||||
from synapse.metrics import RegistryProxy
|
from synapse.metrics import RegistryProxy
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||||
|
@ -44,7 +45,6 @@ from synapse.storage.engines import create_engine
|
||||||
from synapse.types import ReadReceipt
|
from synapse.types import ReadReceipt
|
||||||
from synapse.util.async_helpers import Linearizer
|
from synapse.util.async_helpers import Linearizer
|
||||||
from synapse.util.httpresourcetree import create_resource_tree
|
from synapse.util.httpresourcetree import create_resource_tree
|
||||||
from synapse.util.logcontext import LoggingContext, run_in_background
|
|
||||||
from synapse.util.manhole import manhole
|
from synapse.util.manhole import manhole
|
||||||
from synapse.util.versionstring import get_version_string
|
from synapse.util.versionstring import get_version_string
|
||||||
|
|
||||||
|
|
|
@ -29,6 +29,7 @@ from synapse.config.logger import setup_logging
|
||||||
from synapse.http.server import JsonResource
|
from synapse.http.server import JsonResource
|
||||||
from synapse.http.servlet import RestServlet, parse_json_object_from_request
|
from synapse.http.servlet import RestServlet, parse_json_object_from_request
|
||||||
from synapse.http.site import SynapseSite
|
from synapse.http.site import SynapseSite
|
||||||
|
from synapse.logging.context import LoggingContext
|
||||||
from synapse.metrics import RegistryProxy
|
from synapse.metrics import RegistryProxy
|
||||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||||
from synapse.replication.slave.storage._base import BaseSlavedStore
|
from synapse.replication.slave.storage._base import BaseSlavedStore
|
||||||
|
@ -41,7 +42,6 @@ from synapse.rest.client.v2_alpha._base import client_patterns
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
from synapse.storage.engines import create_engine
|
from synapse.storage.engines import create_engine
|
||||||
from synapse.util.httpresourcetree import create_resource_tree
|
from synapse.util.httpresourcetree import create_resource_tree
|
||||||
from synapse.util.logcontext import LoggingContext
|
|
||||||
from synapse.util.manhole import manhole
|
from synapse.util.manhole import manhole
|
||||||
from synapse.util.versionstring import get_version_string
|
from synapse.util.versionstring import get_version_string
|
||||||
|
|
||||||
|
|
|
@ -54,6 +54,7 @@ from synapse.federation.transport.server import TransportLayerServer
|
||||||
from synapse.http.additional_resource import AdditionalResource
|
from synapse.http.additional_resource import AdditionalResource
|
||||||
from synapse.http.server import RootRedirect
|
from synapse.http.server import RootRedirect
|
||||||
from synapse.http.site import SynapseSite
|
from synapse.http.site import SynapseSite
|
||||||
|
from synapse.logging.context import LoggingContext
|
||||||
from synapse.metrics import RegistryProxy
|
from synapse.metrics import RegistryProxy
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||||
|
@ -72,7 +73,6 @@ from synapse.storage.engines import IncorrectDatabaseSetup, create_engine
|
||||||
from synapse.storage.prepare_database import UpgradeDatabaseException, prepare_database
|
from synapse.storage.prepare_database import UpgradeDatabaseException, prepare_database
|
||||||
from synapse.util.caches import CACHE_SIZE_FACTOR
|
from synapse.util.caches import CACHE_SIZE_FACTOR
|
||||||
from synapse.util.httpresourcetree import create_resource_tree
|
from synapse.util.httpresourcetree import create_resource_tree
|
||||||
from synapse.util.logcontext import LoggingContext
|
|
||||||
from synapse.util.manhole import manhole
|
from synapse.util.manhole import manhole
|
||||||
from synapse.util.module_loader import load_module
|
from synapse.util.module_loader import load_module
|
||||||
from synapse.util.rlimit import change_resource_limit
|
from synapse.util.rlimit import change_resource_limit
|
||||||
|
|
|
@ -27,6 +27,7 @@ from synapse.config._base import ConfigError
|
||||||
from synapse.config.homeserver import HomeServerConfig
|
from synapse.config.homeserver import HomeServerConfig
|
||||||
from synapse.config.logger import setup_logging
|
from synapse.config.logger import setup_logging
|
||||||
from synapse.http.site import SynapseSite
|
from synapse.http.site import SynapseSite
|
||||||
|
from synapse.logging.context import LoggingContext
|
||||||
from synapse.metrics import RegistryProxy
|
from synapse.metrics import RegistryProxy
|
||||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||||
from synapse.replication.slave.storage._base import BaseSlavedStore
|
from synapse.replication.slave.storage._base import BaseSlavedStore
|
||||||
|
@ -40,7 +41,6 @@ from synapse.server import HomeServer
|
||||||
from synapse.storage.engines import create_engine
|
from synapse.storage.engines import create_engine
|
||||||
from synapse.storage.media_repository import MediaRepositoryStore
|
from synapse.storage.media_repository import MediaRepositoryStore
|
||||||
from synapse.util.httpresourcetree import create_resource_tree
|
from synapse.util.httpresourcetree import create_resource_tree
|
||||||
from synapse.util.logcontext import LoggingContext
|
|
||||||
from synapse.util.manhole import manhole
|
from synapse.util.manhole import manhole
|
||||||
from synapse.util.versionstring import get_version_string
|
from synapse.util.versionstring import get_version_string
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,7 @@ from synapse.config._base import ConfigError
|
||||||
from synapse.config.homeserver import HomeServerConfig
|
from synapse.config.homeserver import HomeServerConfig
|
||||||
from synapse.config.logger import setup_logging
|
from synapse.config.logger import setup_logging
|
||||||
from synapse.http.site import SynapseSite
|
from synapse.http.site import SynapseSite
|
||||||
|
from synapse.logging.context import LoggingContext, run_in_background
|
||||||
from synapse.metrics import RegistryProxy
|
from synapse.metrics import RegistryProxy
|
||||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||||
from synapse.replication.slave.storage._base import __func__
|
from synapse.replication.slave.storage._base import __func__
|
||||||
|
@ -38,7 +39,6 @@ from synapse.server import HomeServer
|
||||||
from synapse.storage import DataStore
|
from synapse.storage import DataStore
|
||||||
from synapse.storage.engines import create_engine
|
from synapse.storage.engines import create_engine
|
||||||
from synapse.util.httpresourcetree import create_resource_tree
|
from synapse.util.httpresourcetree import create_resource_tree
|
||||||
from synapse.util.logcontext import LoggingContext, run_in_background
|
|
||||||
from synapse.util.manhole import manhole
|
from synapse.util.manhole import manhole
|
||||||
from synapse.util.versionstring import get_version_string
|
from synapse.util.versionstring import get_version_string
|
||||||
|
|
||||||
|
|
|
@ -31,6 +31,7 @@ from synapse.config.logger import setup_logging
|
||||||
from synapse.handlers.presence import PresenceHandler, get_interested_parties
|
from synapse.handlers.presence import PresenceHandler, get_interested_parties
|
||||||
from synapse.http.server import JsonResource
|
from synapse.http.server import JsonResource
|
||||||
from synapse.http.site import SynapseSite
|
from synapse.http.site import SynapseSite
|
||||||
|
from synapse.logging.context import LoggingContext, run_in_background
|
||||||
from synapse.metrics import RegistryProxy
|
from synapse.metrics import RegistryProxy
|
||||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||||
from synapse.replication.slave.storage._base import BaseSlavedStore, __func__
|
from synapse.replication.slave.storage._base import BaseSlavedStore, __func__
|
||||||
|
@ -57,7 +58,6 @@ from synapse.server import HomeServer
|
||||||
from synapse.storage.engines import create_engine
|
from synapse.storage.engines import create_engine
|
||||||
from synapse.storage.presence import UserPresenceState
|
from synapse.storage.presence import UserPresenceState
|
||||||
from synapse.util.httpresourcetree import create_resource_tree
|
from synapse.util.httpresourcetree import create_resource_tree
|
||||||
from synapse.util.logcontext import LoggingContext, run_in_background
|
|
||||||
from synapse.util.manhole import manhole
|
from synapse.util.manhole import manhole
|
||||||
from synapse.util.stringutils import random_string
|
from synapse.util.stringutils import random_string
|
||||||
from synapse.util.versionstring import get_version_string
|
from synapse.util.versionstring import get_version_string
|
||||||
|
|
|
@ -28,6 +28,7 @@ from synapse.config.homeserver import HomeServerConfig
|
||||||
from synapse.config.logger import setup_logging
|
from synapse.config.logger import setup_logging
|
||||||
from synapse.http.server import JsonResource
|
from synapse.http.server import JsonResource
|
||||||
from synapse.http.site import SynapseSite
|
from synapse.http.site import SynapseSite
|
||||||
|
from synapse.logging.context import LoggingContext, run_in_background
|
||||||
from synapse.metrics import RegistryProxy
|
from synapse.metrics import RegistryProxy
|
||||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||||
from synapse.replication.slave.storage._base import BaseSlavedStore
|
from synapse.replication.slave.storage._base import BaseSlavedStore
|
||||||
|
@ -46,7 +47,6 @@ from synapse.storage.engines import create_engine
|
||||||
from synapse.storage.user_directory import UserDirectoryStore
|
from synapse.storage.user_directory import UserDirectoryStore
|
||||||
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
||||||
from synapse.util.httpresourcetree import create_resource_tree
|
from synapse.util.httpresourcetree import create_resource_tree
|
||||||
from synapse.util.logcontext import LoggingContext, run_in_background
|
|
||||||
from synapse.util.manhole import manhole
|
from synapse.util.manhole import manhole
|
||||||
from synapse.util.versionstring import get_version_string
|
from synapse.util.versionstring import get_version_string
|
||||||
|
|
||||||
|
|
|
@ -53,8 +53,8 @@ import logging
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.appservice import ApplicationServiceState
|
from synapse.appservice import ApplicationServiceState
|
||||||
|
from synapse.logging.context import run_in_background
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.util.logcontext import run_in_background
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,7 @@ from twisted.logger import STDLibLogObserver, globalLogBeginner
|
||||||
|
|
||||||
import synapse
|
import synapse
|
||||||
from synapse.app import _base as appbase
|
from synapse.app import _base as appbase
|
||||||
from synapse.util.logcontext import LoggingContextFilter
|
from synapse.logging.context import LoggingContextFilter
|
||||||
from synapse.util.versionstring import get_version_string
|
from synapse.util.versionstring import get_version_string
|
||||||
|
|
||||||
from ._base import Config
|
from ._base import Config
|
||||||
|
@ -40,7 +40,7 @@ formatters:
|
||||||
|
|
||||||
filters:
|
filters:
|
||||||
context:
|
context:
|
||||||
(): synapse.util.logcontext.LoggingContextFilter
|
(): synapse.logging.context.LoggingContextFilter
|
||||||
request: ""
|
request: ""
|
||||||
|
|
||||||
handlers:
|
handlers:
|
||||||
|
|
|
@ -44,15 +44,16 @@ from synapse.api.errors import (
|
||||||
RequestSendFailed,
|
RequestSendFailed,
|
||||||
SynapseError,
|
SynapseError,
|
||||||
)
|
)
|
||||||
from synapse.storage.keys import FetchKeyResult
|
from synapse.logging.context import (
|
||||||
from synapse.util import logcontext, unwrapFirstError
|
|
||||||
from synapse.util.async_helpers import yieldable_gather_results
|
|
||||||
from synapse.util.logcontext import (
|
|
||||||
LoggingContext,
|
LoggingContext,
|
||||||
PreserveLoggingContext,
|
PreserveLoggingContext,
|
||||||
|
make_deferred_yieldable,
|
||||||
preserve_fn,
|
preserve_fn,
|
||||||
run_in_background,
|
run_in_background,
|
||||||
)
|
)
|
||||||
|
from synapse.storage.keys import FetchKeyResult
|
||||||
|
from synapse.util import unwrapFirstError
|
||||||
|
from synapse.util.async_helpers import yieldable_gather_results
|
||||||
from synapse.util.metrics import Measure
|
from synapse.util.metrics import Measure
|
||||||
from synapse.util.retryutils import NotRetryingDestination
|
from synapse.util.retryutils import NotRetryingDestination
|
||||||
|
|
||||||
|
@ -140,7 +141,7 @@ class Keyring(object):
|
||||||
"""
|
"""
|
||||||
req = VerifyJsonRequest(server_name, json_object, validity_time, request_name)
|
req = VerifyJsonRequest(server_name, json_object, validity_time, request_name)
|
||||||
requests = (req,)
|
requests = (req,)
|
||||||
return logcontext.make_deferred_yieldable(self._verify_objects(requests)[0])
|
return make_deferred_yieldable(self._verify_objects(requests)[0])
|
||||||
|
|
||||||
def verify_json_objects_for_server(self, server_and_json):
|
def verify_json_objects_for_server(self, server_and_json):
|
||||||
"""Bulk verifies signatures of json objects, bulk fetching keys as
|
"""Bulk verifies signatures of json objects, bulk fetching keys as
|
||||||
|
@ -557,7 +558,7 @@ class BaseV2KeyFetcher(object):
|
||||||
|
|
||||||
signed_key_json_bytes = encode_canonical_json(signed_key_json)
|
signed_key_json_bytes = encode_canonical_json(signed_key_json)
|
||||||
|
|
||||||
yield logcontext.make_deferred_yieldable(
|
yield make_deferred_yieldable(
|
||||||
defer.gatherResults(
|
defer.gatherResults(
|
||||||
[
|
[
|
||||||
run_in_background(
|
run_in_background(
|
||||||
|
@ -612,7 +613,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
||||||
|
|
||||||
defer.returnValue({})
|
defer.returnValue({})
|
||||||
|
|
||||||
results = yield logcontext.make_deferred_yieldable(
|
results = yield make_deferred_yieldable(
|
||||||
defer.gatherResults(
|
defer.gatherResults(
|
||||||
[run_in_background(get_key, server) for server in self.key_servers],
|
[run_in_background(get_key, server) for server in self.key_servers],
|
||||||
consumeErrors=True,
|
consumeErrors=True,
|
||||||
|
|
|
@ -19,7 +19,7 @@ from frozendict import frozendict
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.util.logcontext import make_deferred_yieldable, run_in_background
|
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||||
|
|
||||||
|
|
||||||
class EventContext(object):
|
class EventContext(object):
|
||||||
|
|
|
@ -27,8 +27,14 @@ from synapse.crypto.event_signing import check_event_content_hash
|
||||||
from synapse.events import event_type_from_format_version
|
from synapse.events import event_type_from_format_version
|
||||||
from synapse.events.utils import prune_event
|
from synapse.events.utils import prune_event
|
||||||
from synapse.http.servlet import assert_params_in_dict
|
from synapse.http.servlet import assert_params_in_dict
|
||||||
|
from synapse.logging.context import (
|
||||||
|
LoggingContext,
|
||||||
|
PreserveLoggingContext,
|
||||||
|
make_deferred_yieldable,
|
||||||
|
preserve_fn,
|
||||||
|
)
|
||||||
from synapse.types import get_domain_from_id
|
from synapse.types import get_domain_from_id
|
||||||
from synapse.util import logcontext, unwrapFirstError
|
from synapse.util import unwrapFirstError
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -73,7 +79,7 @@ class FederationBase(object):
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def handle_check_result(pdu, deferred):
|
def handle_check_result(pdu, deferred):
|
||||||
try:
|
try:
|
||||||
res = yield logcontext.make_deferred_yieldable(deferred)
|
res = yield make_deferred_yieldable(deferred)
|
||||||
except SynapseError:
|
except SynapseError:
|
||||||
res = None
|
res = None
|
||||||
|
|
||||||
|
@ -102,10 +108,10 @@ class FederationBase(object):
|
||||||
|
|
||||||
defer.returnValue(res)
|
defer.returnValue(res)
|
||||||
|
|
||||||
handle = logcontext.preserve_fn(handle_check_result)
|
handle = preserve_fn(handle_check_result)
|
||||||
deferreds2 = [handle(pdu, deferred) for pdu, deferred in zip(pdus, deferreds)]
|
deferreds2 = [handle(pdu, deferred) for pdu, deferred in zip(pdus, deferreds)]
|
||||||
|
|
||||||
valid_pdus = yield logcontext.make_deferred_yieldable(
|
valid_pdus = yield make_deferred_yieldable(
|
||||||
defer.gatherResults(deferreds2, consumeErrors=True)
|
defer.gatherResults(deferreds2, consumeErrors=True)
|
||||||
).addErrback(unwrapFirstError)
|
).addErrback(unwrapFirstError)
|
||||||
|
|
||||||
|
@ -115,7 +121,7 @@ class FederationBase(object):
|
||||||
defer.returnValue([p for p in valid_pdus if p])
|
defer.returnValue([p for p in valid_pdus if p])
|
||||||
|
|
||||||
def _check_sigs_and_hash(self, room_version, pdu):
|
def _check_sigs_and_hash(self, room_version, pdu):
|
||||||
return logcontext.make_deferred_yieldable(
|
return make_deferred_yieldable(
|
||||||
self._check_sigs_and_hashes(room_version, [pdu])[0]
|
self._check_sigs_and_hashes(room_version, [pdu])[0]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -133,14 +139,14 @@ class FederationBase(object):
|
||||||
* returns a redacted version of the event (if the signature
|
* returns a redacted version of the event (if the signature
|
||||||
matched but the hash did not)
|
matched but the hash did not)
|
||||||
* throws a SynapseError if the signature check failed.
|
* throws a SynapseError if the signature check failed.
|
||||||
The deferreds run their callbacks in the sentinel logcontext.
|
The deferreds run their callbacks in the sentinel
|
||||||
"""
|
"""
|
||||||
deferreds = _check_sigs_on_pdus(self.keyring, room_version, pdus)
|
deferreds = _check_sigs_on_pdus(self.keyring, room_version, pdus)
|
||||||
|
|
||||||
ctx = logcontext.LoggingContext.current_context()
|
ctx = LoggingContext.current_context()
|
||||||
|
|
||||||
def callback(_, pdu):
|
def callback(_, pdu):
|
||||||
with logcontext.PreserveLoggingContext(ctx):
|
with PreserveLoggingContext(ctx):
|
||||||
if not check_event_content_hash(pdu):
|
if not check_event_content_hash(pdu):
|
||||||
# let's try to distinguish between failures because the event was
|
# let's try to distinguish between failures because the event was
|
||||||
# redacted (which are somewhat expected) vs actual ball-tampering
|
# redacted (which are somewhat expected) vs actual ball-tampering
|
||||||
|
@ -178,7 +184,7 @@ class FederationBase(object):
|
||||||
|
|
||||||
def errback(failure, pdu):
|
def errback(failure, pdu):
|
||||||
failure.trap(SynapseError)
|
failure.trap(SynapseError)
|
||||||
with logcontext.PreserveLoggingContext(ctx):
|
with PreserveLoggingContext(ctx):
|
||||||
logger.warn(
|
logger.warn(
|
||||||
"Signature check failed for %s: %s",
|
"Signature check failed for %s: %s",
|
||||||
pdu.event_id,
|
pdu.event_id,
|
||||||
|
|
|
@ -39,10 +39,10 @@ from synapse.api.room_versions import (
|
||||||
)
|
)
|
||||||
from synapse.events import builder, room_version_to_event_format
|
from synapse.events import builder, room_version_to_event_format
|
||||||
from synapse.federation.federation_base import FederationBase, event_from_pdu_json
|
from synapse.federation.federation_base import FederationBase, event_from_pdu_json
|
||||||
from synapse.util import logcontext, unwrapFirstError
|
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||||
|
from synapse.logging.utils import log_function
|
||||||
|
from synapse.util import unwrapFirstError
|
||||||
from synapse.util.caches.expiringcache import ExpiringCache
|
from synapse.util.caches.expiringcache import ExpiringCache
|
||||||
from synapse.util.logcontext import make_deferred_yieldable, run_in_background
|
|
||||||
from synapse.util.logutils import log_function
|
|
||||||
from synapse.util.retryutils import NotRetryingDestination
|
from synapse.util.retryutils import NotRetryingDestination
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -207,7 +207,7 @@ class FederationClient(FederationBase):
|
||||||
]
|
]
|
||||||
|
|
||||||
# FIXME: We should handle signature failures more gracefully.
|
# FIXME: We should handle signature failures more gracefully.
|
||||||
pdus[:] = yield logcontext.make_deferred_yieldable(
|
pdus[:] = yield make_deferred_yieldable(
|
||||||
defer.gatherResults(
|
defer.gatherResults(
|
||||||
self._check_sigs_and_hashes(room_version, pdus), consumeErrors=True
|
self._check_sigs_and_hashes(room_version, pdus), consumeErrors=True
|
||||||
).addErrback(unwrapFirstError)
|
).addErrback(unwrapFirstError)
|
||||||
|
|
|
@ -42,6 +42,8 @@ from synapse.federation.federation_base import FederationBase, event_from_pdu_js
|
||||||
from synapse.federation.persistence import TransactionActions
|
from synapse.federation.persistence import TransactionActions
|
||||||
from synapse.federation.units import Edu, Transaction
|
from synapse.federation.units import Edu, Transaction
|
||||||
from synapse.http.endpoint import parse_server_name
|
from synapse.http.endpoint import parse_server_name
|
||||||
|
from synapse.logging.context import nested_logging_context
|
||||||
|
from synapse.logging.utils import log_function
|
||||||
from synapse.replication.http.federation import (
|
from synapse.replication.http.federation import (
|
||||||
ReplicationFederationSendEduRestServlet,
|
ReplicationFederationSendEduRestServlet,
|
||||||
ReplicationGetQueryRestServlet,
|
ReplicationGetQueryRestServlet,
|
||||||
|
@ -50,8 +52,6 @@ from synapse.types import get_domain_from_id
|
||||||
from synapse.util import glob_to_regex
|
from synapse.util import glob_to_regex
|
||||||
from synapse.util.async_helpers import Linearizer, concurrently_execute
|
from synapse.util.async_helpers import Linearizer, concurrently_execute
|
||||||
from synapse.util.caches.response_cache import ResponseCache
|
from synapse.util.caches.response_cache import ResponseCache
|
||||||
from synapse.util.logcontext import nested_logging_context
|
|
||||||
from synapse.util.logutils import log_function
|
|
||||||
|
|
||||||
# when processing incoming transactions, we try to handle multiple rooms in
|
# when processing incoming transactions, we try to handle multiple rooms in
|
||||||
# parallel, up to this limit.
|
# parallel, up to this limit.
|
||||||
|
|
|
@ -23,7 +23,7 @@ import logging
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.util.logutils import log_function
|
from synapse.logging.utils import log_function
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,11 @@ from synapse.federation.sender.per_destination_queue import PerDestinationQueue
|
||||||
from synapse.federation.sender.transaction_manager import TransactionManager
|
from synapse.federation.sender.transaction_manager import TransactionManager
|
||||||
from synapse.federation.units import Edu
|
from synapse.federation.units import Edu
|
||||||
from synapse.handlers.presence import get_interested_remotes
|
from synapse.handlers.presence import get_interested_remotes
|
||||||
|
from synapse.logging.context import (
|
||||||
|
make_deferred_yieldable,
|
||||||
|
preserve_fn,
|
||||||
|
run_in_background,
|
||||||
|
)
|
||||||
from synapse.metrics import (
|
from synapse.metrics import (
|
||||||
LaterGauge,
|
LaterGauge,
|
||||||
event_processing_loop_counter,
|
event_processing_loop_counter,
|
||||||
|
@ -33,7 +38,6 @@ from synapse.metrics import (
|
||||||
events_processed_counter,
|
events_processed_counter,
|
||||||
)
|
)
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.util import logcontext
|
|
||||||
from synapse.util.metrics import measure_func
|
from synapse.util.metrics import measure_func
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -210,10 +214,10 @@ class FederationSender(object):
|
||||||
for event in events:
|
for event in events:
|
||||||
events_by_room.setdefault(event.room_id, []).append(event)
|
events_by_room.setdefault(event.room_id, []).append(event)
|
||||||
|
|
||||||
yield logcontext.make_deferred_yieldable(
|
yield make_deferred_yieldable(
|
||||||
defer.gatherResults(
|
defer.gatherResults(
|
||||||
[
|
[
|
||||||
logcontext.run_in_background(handle_room_events, evs)
|
run_in_background(handle_room_events, evs)
|
||||||
for evs in itervalues(events_by_room)
|
for evs in itervalues(events_by_room)
|
||||||
],
|
],
|
||||||
consumeErrors=True,
|
consumeErrors=True,
|
||||||
|
@ -360,7 +364,7 @@ class FederationSender(object):
|
||||||
for queue in queues:
|
for queue in queues:
|
||||||
queue.flush_read_receipts_for_room(room_id)
|
queue.flush_read_receipts_for_room(room_id)
|
||||||
|
|
||||||
@logcontext.preserve_fn # the caller should not yield on this
|
@preserve_fn # the caller should not yield on this
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def send_presence(self, states):
|
def send_presence(self, states):
|
||||||
"""Send the new presence states to the appropriate destinations.
|
"""Send the new presence states to the appropriate destinations.
|
||||||
|
|
|
@ -22,7 +22,7 @@ from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.constants import Membership
|
from synapse.api.constants import Membership
|
||||||
from synapse.api.urls import FEDERATION_V1_PREFIX, FEDERATION_V2_PREFIX
|
from synapse.api.urls import FEDERATION_V1_PREFIX, FEDERATION_V2_PREFIX
|
||||||
from synapse.util.logutils import log_function
|
from synapse.logging.utils import log_function
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -36,8 +36,8 @@ from synapse.http.servlet import (
|
||||||
parse_json_object_from_request,
|
parse_json_object_from_request,
|
||||||
parse_string_from_args,
|
parse_string_from_args,
|
||||||
)
|
)
|
||||||
|
from synapse.logging.context import run_in_background
|
||||||
from synapse.types import ThirdPartyInstanceID, get_domain_from_id
|
from synapse.types import ThirdPartyInstanceID, get_domain_from_id
|
||||||
from synapse.util.logcontext import run_in_background
|
|
||||||
from synapse.util.ratelimitutils import FederationRateLimiter
|
from synapse.util.ratelimitutils import FederationRateLimiter
|
||||||
from synapse.util.versionstring import get_version_string
|
from synapse.util.versionstring import get_version_string
|
||||||
|
|
||||||
|
|
|
@ -43,9 +43,9 @@ from signedjson.sign import sign_json
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.errors import HttpResponseException, RequestSendFailed, SynapseError
|
from synapse.api.errors import HttpResponseException, RequestSendFailed, SynapseError
|
||||||
|
from synapse.logging.context import run_in_background
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.types import get_domain_from_id
|
from synapse.types import get_domain_from_id
|
||||||
from synapse.util.logcontext import run_in_background
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -22,10 +22,10 @@ from email.mime.text import MIMEText
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.errors import StoreError
|
from synapse.api.errors import StoreError
|
||||||
|
from synapse.logging.context import make_deferred_yieldable
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID
|
||||||
from synapse.util import stringutils
|
from synapse.util import stringutils
|
||||||
from synapse.util.logcontext import make_deferred_yieldable
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from synapse.push.mailer import load_jinja2_templates
|
from synapse.push.mailer import load_jinja2_templates
|
||||||
|
|
|
@ -23,13 +23,13 @@ from twisted.internet import defer
|
||||||
|
|
||||||
import synapse
|
import synapse
|
||||||
from synapse.api.constants import EventTypes
|
from synapse.api.constants import EventTypes
|
||||||
|
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||||
from synapse.metrics import (
|
from synapse.metrics import (
|
||||||
event_processing_loop_counter,
|
event_processing_loop_counter,
|
||||||
event_processing_loop_room_count,
|
event_processing_loop_room_count,
|
||||||
)
|
)
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.util import log_failure
|
from synapse.util import log_failure
|
||||||
from synapse.util.logcontext import make_deferred_yieldable, run_in_background
|
|
||||||
from synapse.util.metrics import Measure
|
from synapse.util.metrics import Measure
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
|
@ -36,9 +36,9 @@ from synapse.api.errors import (
|
||||||
SynapseError,
|
SynapseError,
|
||||||
)
|
)
|
||||||
from synapse.api.ratelimiting import Ratelimiter
|
from synapse.api.ratelimiting import Ratelimiter
|
||||||
|
from synapse.logging.context import defer_to_thread
|
||||||
from synapse.module_api import ModuleApi
|
from synapse.module_api import ModuleApi
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID
|
||||||
from synapse.util import logcontext
|
|
||||||
from synapse.util.caches.expiringcache import ExpiringCache
|
from synapse.util.caches.expiringcache import ExpiringCache
|
||||||
|
|
||||||
from ._base import BaseHandler
|
from ._base import BaseHandler
|
||||||
|
@ -987,7 +987,7 @@ class AuthHandler(BaseHandler):
|
||||||
bcrypt.gensalt(self.bcrypt_rounds),
|
bcrypt.gensalt(self.bcrypt_rounds),
|
||||||
).decode("ascii")
|
).decode("ascii")
|
||||||
|
|
||||||
return logcontext.defer_to_thread(self.hs.get_reactor(), _do_hash)
|
return defer_to_thread(self.hs.get_reactor(), _do_hash)
|
||||||
|
|
||||||
def validate_hash(self, password, stored_hash):
|
def validate_hash(self, password, stored_hash):
|
||||||
"""Validates that self.hash(password) == stored_hash.
|
"""Validates that self.hash(password) == stored_hash.
|
||||||
|
@ -1013,7 +1013,7 @@ class AuthHandler(BaseHandler):
|
||||||
if not isinstance(stored_hash, bytes):
|
if not isinstance(stored_hash, bytes):
|
||||||
stored_hash = stored_hash.encode("ascii")
|
stored_hash = stored_hash.encode("ascii")
|
||||||
|
|
||||||
return logcontext.defer_to_thread(self.hs.get_reactor(), _do_validate_hash)
|
return defer_to_thread(self.hs.get_reactor(), _do_validate_hash)
|
||||||
else:
|
else:
|
||||||
return defer.succeed(False)
|
return defer.succeed(False)
|
||||||
|
|
||||||
|
|
|
@ -23,8 +23,8 @@ from canonicaljson import encode_canonical_json, json
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.errors import CodeMessageException, FederationDeniedError, SynapseError
|
from synapse.api.errors import CodeMessageException, FederationDeniedError, SynapseError
|
||||||
|
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||||
from synapse.types import UserID, get_domain_from_id
|
from synapse.types import UserID, get_domain_from_id
|
||||||
from synapse.util.logcontext import make_deferred_yieldable, run_in_background
|
|
||||||
from synapse.util.retryutils import NotRetryingDestination
|
from synapse.util.retryutils import NotRetryingDestination
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
|
@ -21,8 +21,8 @@ from twisted.internet import defer
|
||||||
from synapse.api.constants import EventTypes, Membership
|
from synapse.api.constants import EventTypes, Membership
|
||||||
from synapse.api.errors import AuthError, SynapseError
|
from synapse.api.errors import AuthError, SynapseError
|
||||||
from synapse.events import EventBase
|
from synapse.events import EventBase
|
||||||
|
from synapse.logging.utils import log_function
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID
|
||||||
from synapse.util.logutils import log_function
|
|
||||||
from synapse.visibility import filter_events_for_client
|
from synapse.visibility import filter_events_for_client
|
||||||
|
|
||||||
from ._base import BaseHandler
|
from ._base import BaseHandler
|
||||||
|
|
|
@ -45,6 +45,13 @@ from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersions
|
||||||
from synapse.crypto.event_signing import compute_event_signature
|
from synapse.crypto.event_signing import compute_event_signature
|
||||||
from synapse.event_auth import auth_types_for_event
|
from synapse.event_auth import auth_types_for_event
|
||||||
from synapse.events.validator import EventValidator
|
from synapse.events.validator import EventValidator
|
||||||
|
from synapse.logging.context import (
|
||||||
|
make_deferred_yieldable,
|
||||||
|
nested_logging_context,
|
||||||
|
preserve_fn,
|
||||||
|
run_in_background,
|
||||||
|
)
|
||||||
|
from synapse.logging.utils import log_function
|
||||||
from synapse.replication.http.federation import (
|
from synapse.replication.http.federation import (
|
||||||
ReplicationCleanRoomRestServlet,
|
ReplicationCleanRoomRestServlet,
|
||||||
ReplicationFederationSendEventsRestServlet,
|
ReplicationFederationSendEventsRestServlet,
|
||||||
|
@ -52,10 +59,9 @@ from synapse.replication.http.federation import (
|
||||||
from synapse.replication.http.membership import ReplicationUserJoinedLeftRoomRestServlet
|
from synapse.replication.http.membership import ReplicationUserJoinedLeftRoomRestServlet
|
||||||
from synapse.state import StateResolutionStore, resolve_events_with_store
|
from synapse.state import StateResolutionStore, resolve_events_with_store
|
||||||
from synapse.types import UserID, get_domain_from_id
|
from synapse.types import UserID, get_domain_from_id
|
||||||
from synapse.util import logcontext, unwrapFirstError
|
from synapse.util import unwrapFirstError
|
||||||
from synapse.util.async_helpers import Linearizer
|
from synapse.util.async_helpers import Linearizer
|
||||||
from synapse.util.distributor import user_joined_room
|
from synapse.util.distributor import user_joined_room
|
||||||
from synapse.util.logutils import log_function
|
|
||||||
from synapse.util.retryutils import NotRetryingDestination
|
from synapse.util.retryutils import NotRetryingDestination
|
||||||
from synapse.visibility import filter_events_for_server
|
from synapse.visibility import filter_events_for_server
|
||||||
|
|
||||||
|
@ -338,7 +344,7 @@ class FederationHandler(BaseHandler):
|
||||||
|
|
||||||
room_version = yield self.store.get_room_version(room_id)
|
room_version = yield self.store.get_room_version(room_id)
|
||||||
|
|
||||||
with logcontext.nested_logging_context(p):
|
with nested_logging_context(p):
|
||||||
# note that if any of the missing prevs share missing state or
|
# note that if any of the missing prevs share missing state or
|
||||||
# auth events, the requests to fetch those events are deduped
|
# auth events, the requests to fetch those events are deduped
|
||||||
# by the get_pdu_cache in federation_client.
|
# by the get_pdu_cache in federation_client.
|
||||||
|
@ -532,7 +538,7 @@ class FederationHandler(BaseHandler):
|
||||||
event_id,
|
event_id,
|
||||||
ev.event_id,
|
ev.event_id,
|
||||||
)
|
)
|
||||||
with logcontext.nested_logging_context(ev.event_id):
|
with nested_logging_context(ev.event_id):
|
||||||
try:
|
try:
|
||||||
yield self.on_receive_pdu(origin, ev, sent_to_us_directly=False)
|
yield self.on_receive_pdu(origin, ev, sent_to_us_directly=False)
|
||||||
except FederationError as e:
|
except FederationError as e:
|
||||||
|
@ -725,10 +731,10 @@ class FederationHandler(BaseHandler):
|
||||||
missing_auth - failed_to_fetch,
|
missing_auth - failed_to_fetch,
|
||||||
)
|
)
|
||||||
|
|
||||||
results = yield logcontext.make_deferred_yieldable(
|
results = yield make_deferred_yieldable(
|
||||||
defer.gatherResults(
|
defer.gatherResults(
|
||||||
[
|
[
|
||||||
logcontext.run_in_background(
|
run_in_background(
|
||||||
self.federation_client.get_pdu,
|
self.federation_client.get_pdu,
|
||||||
[dest],
|
[dest],
|
||||||
event_id,
|
event_id,
|
||||||
|
@ -994,10 +1000,8 @@ class FederationHandler(BaseHandler):
|
||||||
event_ids = list(extremities.keys())
|
event_ids = list(extremities.keys())
|
||||||
|
|
||||||
logger.debug("calling resolve_state_groups in _maybe_backfill")
|
logger.debug("calling resolve_state_groups in _maybe_backfill")
|
||||||
resolve = logcontext.preserve_fn(
|
resolve = preserve_fn(self.state_handler.resolve_state_groups_for_events)
|
||||||
self.state_handler.resolve_state_groups_for_events
|
states = yield make_deferred_yieldable(
|
||||||
)
|
|
||||||
states = yield logcontext.make_deferred_yieldable(
|
|
||||||
defer.gatherResults(
|
defer.gatherResults(
|
||||||
[resolve(room_id, [e]) for e in event_ids], consumeErrors=True
|
[resolve(room_id, [e]) for e in event_ids], consumeErrors=True
|
||||||
)
|
)
|
||||||
|
@ -1171,7 +1175,7 @@ class FederationHandler(BaseHandler):
|
||||||
# lots of requests for missing prev_events which we do actually
|
# lots of requests for missing prev_events which we do actually
|
||||||
# have. Hence we fire off the deferred, but don't wait for it.
|
# have. Hence we fire off the deferred, but don't wait for it.
|
||||||
|
|
||||||
logcontext.run_in_background(self._handle_queued_pdus, room_queue)
|
run_in_background(self._handle_queued_pdus, room_queue)
|
||||||
|
|
||||||
defer.returnValue(True)
|
defer.returnValue(True)
|
||||||
|
|
||||||
|
@ -1191,7 +1195,7 @@ class FederationHandler(BaseHandler):
|
||||||
p.event_id,
|
p.event_id,
|
||||||
p.room_id,
|
p.room_id,
|
||||||
)
|
)
|
||||||
with logcontext.nested_logging_context(p.event_id):
|
with nested_logging_context(p.event_id):
|
||||||
yield self.on_receive_pdu(origin, p, sent_to_us_directly=True)
|
yield self.on_receive_pdu(origin, p, sent_to_us_directly=True)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warn(
|
logger.warn(
|
||||||
|
@ -1610,7 +1614,7 @@ class FederationHandler(BaseHandler):
|
||||||
success = True
|
success = True
|
||||||
finally:
|
finally:
|
||||||
if not success:
|
if not success:
|
||||||
logcontext.run_in_background(
|
run_in_background(
|
||||||
self.store.remove_push_actions_from_staging, event.event_id
|
self.store.remove_push_actions_from_staging, event.event_id
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1629,7 +1633,7 @@ class FederationHandler(BaseHandler):
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def prep(ev_info):
|
def prep(ev_info):
|
||||||
event = ev_info["event"]
|
event = ev_info["event"]
|
||||||
with logcontext.nested_logging_context(suffix=event.event_id):
|
with nested_logging_context(suffix=event.event_id):
|
||||||
res = yield self._prep_event(
|
res = yield self._prep_event(
|
||||||
origin,
|
origin,
|
||||||
event,
|
event,
|
||||||
|
@ -1639,12 +1643,9 @@ class FederationHandler(BaseHandler):
|
||||||
)
|
)
|
||||||
defer.returnValue(res)
|
defer.returnValue(res)
|
||||||
|
|
||||||
contexts = yield logcontext.make_deferred_yieldable(
|
contexts = yield make_deferred_yieldable(
|
||||||
defer.gatherResults(
|
defer.gatherResults(
|
||||||
[
|
[run_in_background(prep, ev_info) for ev_info in event_infos],
|
||||||
logcontext.run_in_background(prep, ev_info)
|
|
||||||
for ev_info in event_infos
|
|
||||||
],
|
|
||||||
consumeErrors=True,
|
consumeErrors=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -2106,10 +2107,10 @@ class FederationHandler(BaseHandler):
|
||||||
|
|
||||||
room_version = yield self.store.get_room_version(event.room_id)
|
room_version = yield self.store.get_room_version(event.room_id)
|
||||||
|
|
||||||
different_events = yield logcontext.make_deferred_yieldable(
|
different_events = yield make_deferred_yieldable(
|
||||||
defer.gatherResults(
|
defer.gatherResults(
|
||||||
[
|
[
|
||||||
logcontext.run_in_background(
|
run_in_background(
|
||||||
self.store.get_event, d, allow_none=True, allow_rejected=False
|
self.store.get_event, d, allow_none=True, allow_rejected=False
|
||||||
)
|
)
|
||||||
for d in different_auth
|
for d in different_auth
|
||||||
|
|
|
@ -21,12 +21,12 @@ from synapse.api.constants import EventTypes, Membership
|
||||||
from synapse.api.errors import AuthError, Codes, SynapseError
|
from synapse.api.errors import AuthError, Codes, SynapseError
|
||||||
from synapse.events.validator import EventValidator
|
from synapse.events.validator import EventValidator
|
||||||
from synapse.handlers.presence import format_user_presence_state
|
from synapse.handlers.presence import format_user_presence_state
|
||||||
|
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||||
from synapse.streams.config import PaginationConfig
|
from synapse.streams.config import PaginationConfig
|
||||||
from synapse.types import StreamToken, UserID
|
from synapse.types import StreamToken, UserID
|
||||||
from synapse.util import unwrapFirstError
|
from synapse.util import unwrapFirstError
|
||||||
from synapse.util.async_helpers import concurrently_execute
|
from synapse.util.async_helpers import concurrently_execute
|
||||||
from synapse.util.caches.snapshot_cache import SnapshotCache
|
from synapse.util.caches.snapshot_cache import SnapshotCache
|
||||||
from synapse.util.logcontext import make_deferred_yieldable, run_in_background
|
|
||||||
from synapse.visibility import filter_events_for_client
|
from synapse.visibility import filter_events_for_client
|
||||||
|
|
||||||
from ._base import BaseHandler
|
from ._base import BaseHandler
|
||||||
|
|
|
@ -34,13 +34,13 @@ from synapse.api.errors import (
|
||||||
from synapse.api.room_versions import RoomVersions
|
from synapse.api.room_versions import RoomVersions
|
||||||
from synapse.api.urls import ConsentURIBuilder
|
from synapse.api.urls import ConsentURIBuilder
|
||||||
from synapse.events.validator import EventValidator
|
from synapse.events.validator import EventValidator
|
||||||
|
from synapse.logging.context import run_in_background
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.replication.http.send_event import ReplicationSendEventRestServlet
|
from synapse.replication.http.send_event import ReplicationSendEventRestServlet
|
||||||
from synapse.storage.state import StateFilter
|
from synapse.storage.state import StateFilter
|
||||||
from synapse.types import RoomAlias, UserID, create_requester
|
from synapse.types import RoomAlias, UserID, create_requester
|
||||||
from synapse.util.async_helpers import Linearizer
|
from synapse.util.async_helpers import Linearizer
|
||||||
from synapse.util.frozenutils import frozendict_json_encoder
|
from synapse.util.frozenutils import frozendict_json_encoder
|
||||||
from synapse.util.logcontext import run_in_background
|
|
||||||
from synapse.util.metrics import measure_func
|
from synapse.util.metrics import measure_func
|
||||||
from synapse.visibility import filter_events_for_client
|
from synapse.visibility import filter_events_for_client
|
||||||
|
|
||||||
|
|
|
@ -20,10 +20,10 @@ from twisted.python.failure import Failure
|
||||||
|
|
||||||
from synapse.api.constants import EventTypes, Membership
|
from synapse.api.constants import EventTypes, Membership
|
||||||
from synapse.api.errors import SynapseError
|
from synapse.api.errors import SynapseError
|
||||||
|
from synapse.logging.context import run_in_background
|
||||||
from synapse.storage.state import StateFilter
|
from synapse.storage.state import StateFilter
|
||||||
from synapse.types import RoomStreamToken
|
from synapse.types import RoomStreamToken
|
||||||
from synapse.util.async_helpers import ReadWriteLock
|
from synapse.util.async_helpers import ReadWriteLock
|
||||||
from synapse.util.logcontext import run_in_background
|
|
||||||
from synapse.util.stringutils import random_string
|
from synapse.util.stringutils import random_string
|
||||||
from synapse.visibility import filter_events_for_client
|
from synapse.visibility import filter_events_for_client
|
||||||
|
|
||||||
|
|
|
@ -34,14 +34,14 @@ from twisted.internet import defer
|
||||||
import synapse.metrics
|
import synapse.metrics
|
||||||
from synapse.api.constants import EventTypes, Membership, PresenceState
|
from synapse.api.constants import EventTypes, Membership, PresenceState
|
||||||
from synapse.api.errors import SynapseError
|
from synapse.api.errors import SynapseError
|
||||||
|
from synapse.logging.context import run_in_background
|
||||||
|
from synapse.logging.utils import log_function
|
||||||
from synapse.metrics import LaterGauge
|
from synapse.metrics import LaterGauge
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.storage.presence import UserPresenceState
|
from synapse.storage.presence import UserPresenceState
|
||||||
from synapse.types import UserID, get_domain_from_id
|
from synapse.types import UserID, get_domain_from_id
|
||||||
from synapse.util.async_helpers import Linearizer
|
from synapse.util.async_helpers import Linearizer
|
||||||
from synapse.util.caches.descriptors import cachedInlineCallbacks
|
from synapse.util.caches.descriptors import cachedInlineCallbacks
|
||||||
from synapse.util.logcontext import run_in_background
|
|
||||||
from synapse.util.logutils import log_function
|
|
||||||
from synapse.util.metrics import Measure
|
from synapse.util.metrics import Measure
|
||||||
from synapse.util.wheel_timer import WheelTimer
|
from synapse.util.wheel_timer import WheelTimer
|
||||||
|
|
||||||
|
|
|
@ -25,6 +25,7 @@ from prometheus_client import Counter
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.constants import EventTypes, Membership
|
from synapse.api.constants import EventTypes, Membership
|
||||||
|
from synapse.logging.context import LoggingContext
|
||||||
from synapse.push.clientformat import format_push_rules_for_user
|
from synapse.push.clientformat import format_push_rules_for_user
|
||||||
from synapse.storage.roommember import MemberSummary
|
from synapse.storage.roommember import MemberSummary
|
||||||
from synapse.storage.state import StateFilter
|
from synapse.storage.state import StateFilter
|
||||||
|
@ -33,7 +34,6 @@ from synapse.util.async_helpers import concurrently_execute
|
||||||
from synapse.util.caches.expiringcache import ExpiringCache
|
from synapse.util.caches.expiringcache import ExpiringCache
|
||||||
from synapse.util.caches.lrucache import LruCache
|
from synapse.util.caches.lrucache import LruCache
|
||||||
from synapse.util.caches.response_cache import ResponseCache
|
from synapse.util.caches.response_cache import ResponseCache
|
||||||
from synapse.util.logcontext import LoggingContext
|
|
||||||
from synapse.util.metrics import Measure, measure_func
|
from synapse.util.metrics import Measure, measure_func
|
||||||
from synapse.visibility import filter_events_for_client
|
from synapse.visibility import filter_events_for_client
|
||||||
|
|
||||||
|
|
|
@ -19,9 +19,9 @@ from collections import namedtuple
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.errors import AuthError, SynapseError
|
from synapse.api.errors import AuthError, SynapseError
|
||||||
|
from synapse.logging.context import run_in_background
|
||||||
from synapse.types import UserID, get_domain_from_id
|
from synapse.types import UserID, get_domain_from_id
|
||||||
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
||||||
from synapse.util.logcontext import run_in_background
|
|
||||||
from synapse.util.metrics import Measure
|
from synapse.util.metrics import Measure
|
||||||
from synapse.util.wheel_timer import WheelTimer
|
from synapse.util.wheel_timer import WheelTimer
|
||||||
|
|
||||||
|
|
|
@ -45,9 +45,9 @@ from synapse.http import (
|
||||||
cancelled_to_request_timed_out_error,
|
cancelled_to_request_timed_out_error,
|
||||||
redact_uri,
|
redact_uri,
|
||||||
)
|
)
|
||||||
|
from synapse.logging.context import make_deferred_yieldable
|
||||||
from synapse.util.async_helpers import timeout_deferred
|
from synapse.util.async_helpers import timeout_deferred
|
||||||
from synapse.util.caches import CACHE_SIZE_FACTOR
|
from synapse.util.caches import CACHE_SIZE_FACTOR
|
||||||
from synapse.util.logcontext import make_deferred_yieldable
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -30,9 +30,9 @@ from twisted.web.http_headers import Headers
|
||||||
from twisted.web.iweb import IAgent
|
from twisted.web.iweb import IAgent
|
||||||
|
|
||||||
from synapse.http.federation.srv_resolver import SrvResolver, pick_server_from_list
|
from synapse.http.federation.srv_resolver import SrvResolver, pick_server_from_list
|
||||||
|
from synapse.logging.context import make_deferred_yieldable
|
||||||
from synapse.util import Clock
|
from synapse.util import Clock
|
||||||
from synapse.util.caches.ttlcache import TTLCache
|
from synapse.util.caches.ttlcache import TTLCache
|
||||||
from synapse.util.logcontext import make_deferred_yieldable
|
|
||||||
from synapse.util.metrics import Measure
|
from synapse.util.metrics import Measure
|
||||||
|
|
||||||
# period to cache .well-known results for by default
|
# period to cache .well-known results for by default
|
||||||
|
|
|
@ -25,7 +25,7 @@ from twisted.internet.error import ConnectError
|
||||||
from twisted.names import client, dns
|
from twisted.names import client, dns
|
||||||
from twisted.names.error import DNSNameError, DomainError
|
from twisted.names.error import DNSNameError, DomainError
|
||||||
|
|
||||||
from synapse.util.logcontext import make_deferred_yieldable
|
from synapse.logging.context import make_deferred_yieldable
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -48,8 +48,8 @@ from synapse.api.errors import (
|
||||||
from synapse.http import QuieterFileBodyProducer
|
from synapse.http import QuieterFileBodyProducer
|
||||||
from synapse.http.client import BlacklistingAgentWrapper, IPBlacklistingResolver
|
from synapse.http.client import BlacklistingAgentWrapper, IPBlacklistingResolver
|
||||||
from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent
|
from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent
|
||||||
|
from synapse.logging.context import make_deferred_yieldable
|
||||||
from synapse.util.async_helpers import timeout_deferred
|
from synapse.util.async_helpers import timeout_deferred
|
||||||
from synapse.util.logcontext import make_deferred_yieldable
|
|
||||||
from synapse.util.metrics import Measure
|
from synapse.util.metrics import Measure
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
|
@ -19,8 +19,8 @@ import threading
|
||||||
|
|
||||||
from prometheus_client.core import Counter, Histogram
|
from prometheus_client.core import Counter, Histogram
|
||||||
|
|
||||||
|
from synapse.logging.context import LoggingContext
|
||||||
from synapse.metrics import LaterGauge
|
from synapse.metrics import LaterGauge
|
||||||
from synapse.util.logcontext import LoggingContext
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -39,8 +39,8 @@ from synapse.api.errors import (
|
||||||
SynapseError,
|
SynapseError,
|
||||||
UnrecognizedRequestError,
|
UnrecognizedRequestError,
|
||||||
)
|
)
|
||||||
|
from synapse.logging.context import preserve_fn
|
||||||
from synapse.util.caches import intern_dict
|
from synapse.util.caches import intern_dict
|
||||||
from synapse.util.logcontext import preserve_fn
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@ from twisted.web.server import Request, Site
|
||||||
|
|
||||||
from synapse.http import redact_uri
|
from synapse.http import redact_uri
|
||||||
from synapse.http.request_metrics import RequestMetrics, requests_counter
|
from synapse.http.request_metrics import RequestMetrics, requests_counter
|
||||||
from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
|
from synapse.logging.context import LoggingContext, PreserveLoggingContext
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ from prometheus_client.core import REGISTRY, Counter, GaugeMetricFamily
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
|
from synapse.logging.context import LoggingContext, PreserveLoggingContext
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -23,12 +23,12 @@ from twisted.internet import defer
|
||||||
from synapse.api.constants import EventTypes, Membership
|
from synapse.api.constants import EventTypes, Membership
|
||||||
from synapse.api.errors import AuthError
|
from synapse.api.errors import AuthError
|
||||||
from synapse.handlers.presence import format_user_presence_state
|
from synapse.handlers.presence import format_user_presence_state
|
||||||
|
from synapse.logging.context import PreserveLoggingContext
|
||||||
|
from synapse.logging.utils import log_function
|
||||||
from synapse.metrics import LaterGauge
|
from synapse.metrics import LaterGauge
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.types import StreamToken
|
from synapse.types import StreamToken
|
||||||
from synapse.util.async_helpers import ObservableDeferred, timeout_deferred
|
from synapse.util.async_helpers import ObservableDeferred, timeout_deferred
|
||||||
from synapse.util.logcontext import PreserveLoggingContext
|
|
||||||
from synapse.util.logutils import log_function
|
|
||||||
from synapse.util.metrics import Measure
|
from synapse.util.metrics import Measure
|
||||||
from synapse.visibility import filter_events_for_client
|
from synapse.visibility import filter_events_for_client
|
||||||
|
|
||||||
|
|
|
@ -29,6 +29,7 @@ from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.constants import EventTypes
|
from synapse.api.constants import EventTypes
|
||||||
from synapse.api.errors import StoreError
|
from synapse.api.errors import StoreError
|
||||||
|
from synapse.logging.context import make_deferred_yieldable
|
||||||
from synapse.push.presentable_names import (
|
from synapse.push.presentable_names import (
|
||||||
calculate_room_name,
|
calculate_room_name,
|
||||||
descriptor_from_member_events,
|
descriptor_from_member_events,
|
||||||
|
@ -36,7 +37,6 @@ from synapse.push.presentable_names import (
|
||||||
)
|
)
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID
|
||||||
from synapse.util.async_helpers import concurrently_execute
|
from synapse.util.async_helpers import concurrently_execute
|
||||||
from synapse.util.logcontext import make_deferred_yieldable
|
|
||||||
from synapse.visibility import filter_events_for_client
|
from synapse.visibility import filter_events_for_client
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
|
@ -62,9 +62,9 @@ from twisted.internet import defer
|
||||||
from twisted.protocols.basic import LineOnlyReceiver
|
from twisted.protocols.basic import LineOnlyReceiver
|
||||||
from twisted.python.failure import Failure
|
from twisted.python.failure import Failure
|
||||||
|
|
||||||
|
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||||
from synapse.metrics import LaterGauge
|
from synapse.metrics import LaterGauge
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.util.logcontext import make_deferred_yieldable, run_in_background
|
|
||||||
from synapse.util.stringutils import random_string
|
from synapse.util.stringutils import random_string
|
||||||
|
|
||||||
from .commands import (
|
from .commands import (
|
||||||
|
|
|
@ -17,8 +17,8 @@
|
||||||
to ensure idempotency when performing PUTs using the REST API."""
|
to ensure idempotency when performing PUTs using the REST API."""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||||
from synapse.util.async_helpers import ObservableDeferred
|
from synapse.util.async_helpers import ObservableDeferred
|
||||||
from synapse.util.logcontext import make_deferred_yieldable, run_in_background
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ from twisted.protocols.basic import FileSender
|
||||||
|
|
||||||
from synapse.api.errors import Codes, SynapseError, cs_error
|
from synapse.api.errors import Codes, SynapseError, cs_error
|
||||||
from synapse.http.server import finish_request, respond_with_json
|
from synapse.http.server import finish_request, respond_with_json
|
||||||
from synapse.util import logcontext
|
from synapse.logging.context import make_deferred_yieldable
|
||||||
from synapse.util.stringutils import is_ascii
|
from synapse.util.stringutils import is_ascii
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -75,9 +75,7 @@ def respond_with_file(request, media_type, file_path, file_size=None, upload_nam
|
||||||
add_file_headers(request, media_type, file_size, upload_name)
|
add_file_headers(request, media_type, file_size, upload_name)
|
||||||
|
|
||||||
with open(file_path, "rb") as f:
|
with open(file_path, "rb") as f:
|
||||||
yield logcontext.make_deferred_yieldable(
|
yield make_deferred_yieldable(FileSender().beginFileTransfer(f, request))
|
||||||
FileSender().beginFileTransfer(f, request)
|
|
||||||
)
|
|
||||||
|
|
||||||
finish_request(request)
|
finish_request(request)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -33,8 +33,8 @@ from synapse.api.errors import (
|
||||||
RequestSendFailed,
|
RequestSendFailed,
|
||||||
SynapseError,
|
SynapseError,
|
||||||
)
|
)
|
||||||
|
from synapse.logging.context import defer_to_thread
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.util import logcontext
|
|
||||||
from synapse.util.async_helpers import Linearizer
|
from synapse.util.async_helpers import Linearizer
|
||||||
from synapse.util.retryutils import NotRetryingDestination
|
from synapse.util.retryutils import NotRetryingDestination
|
||||||
from synapse.util.stringutils import random_string
|
from synapse.util.stringutils import random_string
|
||||||
|
@ -463,7 +463,7 @@ class MediaRepository(object):
|
||||||
)
|
)
|
||||||
|
|
||||||
thumbnailer = Thumbnailer(input_path)
|
thumbnailer = Thumbnailer(input_path)
|
||||||
t_byte_source = yield logcontext.defer_to_thread(
|
t_byte_source = yield defer_to_thread(
|
||||||
self.hs.get_reactor(),
|
self.hs.get_reactor(),
|
||||||
self._generate_thumbnail,
|
self._generate_thumbnail,
|
||||||
thumbnailer,
|
thumbnailer,
|
||||||
|
@ -511,7 +511,7 @@ class MediaRepository(object):
|
||||||
)
|
)
|
||||||
|
|
||||||
thumbnailer = Thumbnailer(input_path)
|
thumbnailer = Thumbnailer(input_path)
|
||||||
t_byte_source = yield logcontext.defer_to_thread(
|
t_byte_source = yield defer_to_thread(
|
||||||
self.hs.get_reactor(),
|
self.hs.get_reactor(),
|
||||||
self._generate_thumbnail,
|
self._generate_thumbnail,
|
||||||
thumbnailer,
|
thumbnailer,
|
||||||
|
@ -596,7 +596,7 @@ class MediaRepository(object):
|
||||||
return
|
return
|
||||||
|
|
||||||
if thumbnailer.transpose_method is not None:
|
if thumbnailer.transpose_method is not None:
|
||||||
m_width, m_height = yield logcontext.defer_to_thread(
|
m_width, m_height = yield defer_to_thread(
|
||||||
self.hs.get_reactor(), thumbnailer.transpose
|
self.hs.get_reactor(), thumbnailer.transpose
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -616,11 +616,11 @@ class MediaRepository(object):
|
||||||
for (t_width, t_height, t_type), t_method in iteritems(thumbnails):
|
for (t_width, t_height, t_type), t_method in iteritems(thumbnails):
|
||||||
# Generate the thumbnail
|
# Generate the thumbnail
|
||||||
if t_method == "crop":
|
if t_method == "crop":
|
||||||
t_byte_source = yield logcontext.defer_to_thread(
|
t_byte_source = yield defer_to_thread(
|
||||||
self.hs.get_reactor(), thumbnailer.crop, t_width, t_height, t_type
|
self.hs.get_reactor(), thumbnailer.crop, t_width, t_height, t_type
|
||||||
)
|
)
|
||||||
elif t_method == "scale":
|
elif t_method == "scale":
|
||||||
t_byte_source = yield logcontext.defer_to_thread(
|
t_byte_source = yield defer_to_thread(
|
||||||
self.hs.get_reactor(), thumbnailer.scale, t_width, t_height, t_type
|
self.hs.get_reactor(), thumbnailer.scale, t_width, t_height, t_type
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -24,9 +24,8 @@ import six
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from twisted.protocols.basic import FileSender
|
from twisted.protocols.basic import FileSender
|
||||||
|
|
||||||
from synapse.util import logcontext
|
from synapse.logging.context import defer_to_thread, make_deferred_yieldable
|
||||||
from synapse.util.file_consumer import BackgroundFileConsumer
|
from synapse.util.file_consumer import BackgroundFileConsumer
|
||||||
from synapse.util.logcontext import make_deferred_yieldable
|
|
||||||
|
|
||||||
from ._base import Responder
|
from ._base import Responder
|
||||||
|
|
||||||
|
@ -65,7 +64,7 @@ class MediaStorage(object):
|
||||||
|
|
||||||
with self.store_into_file(file_info) as (f, fname, finish_cb):
|
with self.store_into_file(file_info) as (f, fname, finish_cb):
|
||||||
# Write to the main repository
|
# Write to the main repository
|
||||||
yield logcontext.defer_to_thread(
|
yield defer_to_thread(
|
||||||
self.hs.get_reactor(), _write_file_synchronously, source, f
|
self.hs.get_reactor(), _write_file_synchronously, source, f
|
||||||
)
|
)
|
||||||
yield finish_cb()
|
yield finish_cb()
|
||||||
|
|
|
@ -42,11 +42,11 @@ from synapse.http.server import (
|
||||||
wrap_json_request_handler,
|
wrap_json_request_handler,
|
||||||
)
|
)
|
||||||
from synapse.http.servlet import parse_integer, parse_string
|
from synapse.http.servlet import parse_integer, parse_string
|
||||||
|
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.rest.media.v1._base import get_filename_from_headers
|
from synapse.rest.media.v1._base import get_filename_from_headers
|
||||||
from synapse.util.async_helpers import ObservableDeferred
|
from synapse.util.async_helpers import ObservableDeferred
|
||||||
from synapse.util.caches.expiringcache import ExpiringCache
|
from synapse.util.caches.expiringcache import ExpiringCache
|
||||||
from synapse.util.logcontext import make_deferred_yieldable, run_in_background
|
|
||||||
from synapse.util.stringutils import random_string
|
from synapse.util.stringutils import random_string
|
||||||
|
|
||||||
from ._base import FileInfo
|
from ._base import FileInfo
|
||||||
|
|
|
@ -20,8 +20,7 @@ import shutil
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.config._base import Config
|
from synapse.config._base import Config
|
||||||
from synapse.util import logcontext
|
from synapse.logging.context import defer_to_thread, run_in_background
|
||||||
from synapse.util.logcontext import run_in_background
|
|
||||||
|
|
||||||
from .media_storage import FileResponder
|
from .media_storage import FileResponder
|
||||||
|
|
||||||
|
@ -125,7 +124,7 @@ class FileStorageProviderBackend(StorageProvider):
|
||||||
if not os.path.exists(dirname):
|
if not os.path.exists(dirname):
|
||||||
os.makedirs(dirname)
|
os.makedirs(dirname)
|
||||||
|
|
||||||
return logcontext.defer_to_thread(
|
return defer_to_thread(
|
||||||
self.hs.get_reactor(), shutil.copyfile, primary_fname, backup_fname
|
self.hs.get_reactor(), shutil.copyfile, primary_fname, backup_fname
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -28,11 +28,11 @@ from twisted.internet import defer
|
||||||
from synapse.api.constants import EventTypes
|
from synapse.api.constants import EventTypes
|
||||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, StateResolutionVersions
|
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, StateResolutionVersions
|
||||||
from synapse.events.snapshot import EventContext
|
from synapse.events.snapshot import EventContext
|
||||||
|
from synapse.logging.utils import log_function
|
||||||
from synapse.state import v1, v2
|
from synapse.state import v1, v2
|
||||||
from synapse.util.async_helpers import Linearizer
|
from synapse.util.async_helpers import Linearizer
|
||||||
from synapse.util.caches import get_cache_factor_for
|
from synapse.util.caches import get_cache_factor_for
|
||||||
from synapse.util.caches.expiringcache import ExpiringCache
|
from synapse.util.caches.expiringcache import ExpiringCache
|
||||||
from synapse.util.logutils import log_function
|
|
||||||
from synapse.util.metrics import Measure
|
from synapse.util.metrics import Measure
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
|
@ -30,12 +30,12 @@ from prometheus_client import Histogram
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.errors import StoreError
|
from synapse.api.errors import StoreError
|
||||||
|
from synapse.logging.context import LoggingContext, PreserveLoggingContext
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.storage.engines import PostgresEngine, Sqlite3Engine
|
from synapse.storage.engines import PostgresEngine, Sqlite3Engine
|
||||||
from synapse.types import get_domain_from_id
|
from synapse.types import get_domain_from_id
|
||||||
from synapse.util import batch_iter
|
from synapse.util import batch_iter
|
||||||
from synapse.util.caches.descriptors import Cache
|
from synapse.util.caches.descriptors import Cache
|
||||||
from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
|
|
||||||
from synapse.util.stringutils import exception_to_unicode
|
from synapse.util.stringutils import exception_to_unicode
|
||||||
|
|
||||||
# import a function which will return a monotonic time, in seconds
|
# import a function which will return a monotonic time, in seconds
|
||||||
|
|
|
@ -33,6 +33,8 @@ from synapse.api.constants import EventTypes
|
||||||
from synapse.api.errors import SynapseError
|
from synapse.api.errors import SynapseError
|
||||||
from synapse.events import EventBase # noqa: F401
|
from synapse.events import EventBase # noqa: F401
|
||||||
from synapse.events.snapshot import EventContext # noqa: F401
|
from synapse.events.snapshot import EventContext # noqa: F401
|
||||||
|
from synapse.logging.context import PreserveLoggingContext, make_deferred_yieldable
|
||||||
|
from synapse.logging.utils import log_function
|
||||||
from synapse.metrics import BucketCollector
|
from synapse.metrics import BucketCollector
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.state import StateResolutionStore
|
from synapse.state import StateResolutionStore
|
||||||
|
@ -45,8 +47,6 @@ from synapse.util import batch_iter
|
||||||
from synapse.util.async_helpers import ObservableDeferred
|
from synapse.util.async_helpers import ObservableDeferred
|
||||||
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
|
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
|
||||||
from synapse.util.frozenutils import frozendict_json_encoder
|
from synapse.util.frozenutils import frozendict_json_encoder
|
||||||
from synapse.util.logcontext import PreserveLoggingContext, make_deferred_yieldable
|
|
||||||
from synapse.util.logutils import log_function
|
|
||||||
from synapse.util.metrics import Measure
|
from synapse.util.metrics import Measure
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
|
@ -29,14 +29,14 @@ from synapse.api.room_versions import EventFormatVersions
|
||||||
from synapse.events import FrozenEvent, event_type_from_format_version # noqa: F401
|
from synapse.events import FrozenEvent, event_type_from_format_version # noqa: F401
|
||||||
from synapse.events.snapshot import EventContext # noqa: F401
|
from synapse.events.snapshot import EventContext # noqa: F401
|
||||||
from synapse.events.utils import prune_event
|
from synapse.events.utils import prune_event
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.logging.context import (
|
||||||
from synapse.types import get_domain_from_id
|
|
||||||
from synapse.util.logcontext import (
|
|
||||||
LoggingContext,
|
LoggingContext,
|
||||||
PreserveLoggingContext,
|
PreserveLoggingContext,
|
||||||
make_deferred_yieldable,
|
make_deferred_yieldable,
|
||||||
run_in_background,
|
run_in_background,
|
||||||
)
|
)
|
||||||
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
|
from synapse.types import get_domain_from_id
|
||||||
from synapse.util.metrics import Measure
|
from synapse.util.metrics import Measure
|
||||||
|
|
||||||
from ._base import SQLBaseStore
|
from ._base import SQLBaseStore
|
||||||
|
|
|
@ -41,12 +41,12 @@ from six.moves import range
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||||
from synapse.storage._base import SQLBaseStore
|
from synapse.storage._base import SQLBaseStore
|
||||||
from synapse.storage.engines import PostgresEngine
|
from synapse.storage.engines import PostgresEngine
|
||||||
from synapse.storage.events_worker import EventsWorkerStore
|
from synapse.storage.events_worker import EventsWorkerStore
|
||||||
from synapse.types import RoomStreamToken
|
from synapse.types import RoomStreamToken
|
||||||
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
from synapse.util.caches.stream_change_cache import StreamChangeCache
|
||||||
from synapse.util.logcontext import make_deferred_yieldable, run_in_background
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -21,10 +21,14 @@ import attr
|
||||||
|
|
||||||
from twisted.internet import defer, task
|
from twisted.internet import defer, task
|
||||||
|
|
||||||
from synapse.util.logcontext import PreserveLoggingContext
|
from synapse.logging import context, formatter
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Compatibility alias, for existing logconfigs.
|
||||||
|
logcontext = context
|
||||||
|
logformatter = formatter
|
||||||
|
|
||||||
|
|
||||||
def unwrapFirstError(failure):
|
def unwrapFirstError(failure):
|
||||||
# defer.gatherResults and DeferredLists wrap failures.
|
# defer.gatherResults and DeferredLists wrap failures.
|
||||||
|
@ -46,7 +50,7 @@ class Clock(object):
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def sleep(self, seconds):
|
def sleep(self, seconds):
|
||||||
d = defer.Deferred()
|
d = defer.Deferred()
|
||||||
with PreserveLoggingContext():
|
with context.PreserveLoggingContext():
|
||||||
self._reactor.callLater(seconds, d.callback, seconds)
|
self._reactor.callLater(seconds, d.callback, seconds)
|
||||||
res = yield d
|
res = yield d
|
||||||
defer.returnValue(res)
|
defer.returnValue(res)
|
||||||
|
@ -91,10 +95,10 @@ class Clock(object):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def wrapped_callback(*args, **kwargs):
|
def wrapped_callback(*args, **kwargs):
|
||||||
with PreserveLoggingContext():
|
with context.PreserveLoggingContext():
|
||||||
callback(*args, **kwargs)
|
callback(*args, **kwargs)
|
||||||
|
|
||||||
with PreserveLoggingContext():
|
with context.PreserveLoggingContext():
|
||||||
return self._reactor.callLater(delay, wrapped_callback, *args, **kwargs)
|
return self._reactor.callLater(delay, wrapped_callback, *args, **kwargs)
|
||||||
|
|
||||||
def cancel_call_later(self, timer, ignore_errs=False):
|
def cancel_call_later(self, timer, ignore_errs=False):
|
||||||
|
|
|
@ -23,13 +23,12 @@ from twisted.internet import defer
|
||||||
from twisted.internet.defer import CancelledError
|
from twisted.internet.defer import CancelledError
|
||||||
from twisted.python import failure
|
from twisted.python import failure
|
||||||
|
|
||||||
from synapse.util import Clock, logcontext, unwrapFirstError
|
from synapse.logging.context import (
|
||||||
|
|
||||||
from .logcontext import (
|
|
||||||
PreserveLoggingContext,
|
PreserveLoggingContext,
|
||||||
make_deferred_yieldable,
|
make_deferred_yieldable,
|
||||||
run_in_background,
|
run_in_background,
|
||||||
)
|
)
|
||||||
|
from synapse.util import Clock, unwrapFirstError
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -153,7 +152,7 @@ def concurrently_execute(func, args, limit):
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return logcontext.make_deferred_yieldable(
|
return make_deferred_yieldable(
|
||||||
defer.gatherResults(
|
defer.gatherResults(
|
||||||
[run_in_background(_concurrently_execute_inner) for _ in range(limit)],
|
[run_in_background(_concurrently_execute_inner) for _ in range(limit)],
|
||||||
consumeErrors=True,
|
consumeErrors=True,
|
||||||
|
@ -174,7 +173,7 @@ def yieldable_gather_results(func, iter, *args, **kwargs):
|
||||||
Deferred[list]: Resolved when all functions have been invoked, or errors if
|
Deferred[list]: Resolved when all functions have been invoked, or errors if
|
||||||
one of the function calls fails.
|
one of the function calls fails.
|
||||||
"""
|
"""
|
||||||
return logcontext.make_deferred_yieldable(
|
return make_deferred_yieldable(
|
||||||
defer.gatherResults(
|
defer.gatherResults(
|
||||||
[run_in_background(func, item, *args, **kwargs) for item in iter],
|
[run_in_background(func, item, *args, **kwargs) for item in iter],
|
||||||
consumeErrors=True,
|
consumeErrors=True,
|
||||||
|
|
|
@ -24,7 +24,8 @@ from six import itervalues, string_types
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.util import logcontext, unwrapFirstError
|
from synapse.logging.context import make_deferred_yieldable, preserve_fn
|
||||||
|
from synapse.util import unwrapFirstError
|
||||||
from synapse.util.async_helpers import ObservableDeferred
|
from synapse.util.async_helpers import ObservableDeferred
|
||||||
from synapse.util.caches import get_cache_factor_for
|
from synapse.util.caches import get_cache_factor_for
|
||||||
from synapse.util.caches.lrucache import LruCache
|
from synapse.util.caches.lrucache import LruCache
|
||||||
|
@ -388,7 +389,7 @@ class CacheDescriptor(_CacheDescriptorBase):
|
||||||
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
ret = defer.maybeDeferred(
|
ret = defer.maybeDeferred(
|
||||||
logcontext.preserve_fn(self.function_to_call), obj, *args, **kwargs
|
preserve_fn(self.function_to_call), obj, *args, **kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
def onErr(f):
|
def onErr(f):
|
||||||
|
@ -408,7 +409,7 @@ class CacheDescriptor(_CacheDescriptorBase):
|
||||||
observer = result_d.observe()
|
observer = result_d.observe()
|
||||||
|
|
||||||
if isinstance(observer, defer.Deferred):
|
if isinstance(observer, defer.Deferred):
|
||||||
return logcontext.make_deferred_yieldable(observer)
|
return make_deferred_yieldable(observer)
|
||||||
else:
|
else:
|
||||||
return observer
|
return observer
|
||||||
|
|
||||||
|
@ -563,7 +564,7 @@ class CacheListDescriptor(_CacheDescriptorBase):
|
||||||
|
|
||||||
cached_defers.append(
|
cached_defers.append(
|
||||||
defer.maybeDeferred(
|
defer.maybeDeferred(
|
||||||
logcontext.preserve_fn(self.function_to_call), **args_to_call
|
preserve_fn(self.function_to_call), **args_to_call
|
||||||
).addCallbacks(complete_all, errback)
|
).addCallbacks(complete_all, errback)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -571,7 +572,7 @@ class CacheListDescriptor(_CacheDescriptorBase):
|
||||||
d = defer.gatherResults(cached_defers, consumeErrors=True).addCallbacks(
|
d = defer.gatherResults(cached_defers, consumeErrors=True).addCallbacks(
|
||||||
lambda _: results, unwrapFirstError
|
lambda _: results, unwrapFirstError
|
||||||
)
|
)
|
||||||
return logcontext.make_deferred_yieldable(d)
|
return make_deferred_yieldable(d)
|
||||||
else:
|
else:
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
|
@ -16,9 +16,9 @@ import logging
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||||
from synapse.util.async_helpers import ObservableDeferred
|
from synapse.util.async_helpers import ObservableDeferred
|
||||||
from synapse.util.caches import register_cache
|
from synapse.util.caches import register_cache
|
||||||
from synapse.util.logcontext import make_deferred_yieldable, run_in_background
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -78,7 +78,7 @@ class ResponseCache(object):
|
||||||
|
|
||||||
*deferred* should run its callbacks in the sentinel logcontext (ie,
|
*deferred* should run its callbacks in the sentinel logcontext (ie,
|
||||||
you should wrap normal synapse deferreds with
|
you should wrap normal synapse deferreds with
|
||||||
logcontext.run_in_background).
|
synapse.logging.context.run_in_background).
|
||||||
|
|
||||||
Can return either a new Deferred (which also doesn't follow the synapse
|
Can return either a new Deferred (which also doesn't follow the synapse
|
||||||
logcontext rules), or, if *deferred* was already complete, the actual
|
logcontext rules), or, if *deferred* was already complete, the actual
|
||||||
|
|
|
@ -17,8 +17,8 @@ import logging
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||||
from synapse.util.logcontext import make_deferred_yieldable, run_in_background
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,7 @@ from six.moves import queue
|
||||||
|
|
||||||
from twisted.internet import threads
|
from twisted.internet import threads
|
||||||
|
|
||||||
from synapse.util.logcontext import make_deferred_yieldable, run_in_background
|
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
||||||
|
|
||||||
|
|
||||||
class BackgroundFileConsumer(object):
|
class BackgroundFileConsumer(object):
|
||||||
|
|
|
@ -20,8 +20,8 @@ from prometheus_client import Counter
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
from synapse.logging.context import LoggingContext
|
||||||
from synapse.metrics import InFlightGauge
|
from synapse.metrics import InFlightGauge
|
||||||
from synapse.util.logcontext import LoggingContext
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,7 @@ import logging
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.errors import LimitExceededError
|
from synapse.api.errors import LimitExceededError
|
||||||
from synapse.util.logcontext import (
|
from synapse.logging.context import (
|
||||||
PreserveLoggingContext,
|
PreserveLoggingContext,
|
||||||
make_deferred_yieldable,
|
make_deferred_yieldable,
|
||||||
run_in_background,
|
run_in_background,
|
||||||
|
|
|
@ -17,7 +17,7 @@ import random
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
import synapse.util.logcontext
|
import synapse.logging.context
|
||||||
from synapse.api.errors import CodeMessageException
|
from synapse.api.errors import CodeMessageException
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -225,4 +225,4 @@ class RetryDestinationLimiter(object):
|
||||||
logger.exception("Failed to store destination_retry_timings")
|
logger.exception("Failed to store destination_retry_timings")
|
||||||
|
|
||||||
# we deliberately do this in the background.
|
# we deliberately do this in the background.
|
||||||
synapse.util.logcontext.run_in_background(store_retry_timings)
|
synapse.logging.context.run_in_background(store_retry_timings)
|
||||||
|
|
|
@ -22,7 +22,7 @@ from synapse.appservice.scheduler import (
|
||||||
_ServiceQueuer,
|
_ServiceQueuer,
|
||||||
_TransactionController,
|
_TransactionController,
|
||||||
)
|
)
|
||||||
from synapse.util.logcontext import make_deferred_yieldable
|
from synapse.logging.context import make_deferred_yieldable
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
|
|
||||||
|
|
|
@ -30,9 +30,12 @@ from synapse.crypto.keyring import (
|
||||||
ServerKeyFetcher,
|
ServerKeyFetcher,
|
||||||
StoreKeyFetcher,
|
StoreKeyFetcher,
|
||||||
)
|
)
|
||||||
|
from synapse.logging.context import (
|
||||||
|
LoggingContext,
|
||||||
|
PreserveLoggingContext,
|
||||||
|
make_deferred_yieldable,
|
||||||
|
)
|
||||||
from synapse.storage.keys import FetchKeyResult
|
from synapse.storage.keys import FetchKeyResult
|
||||||
from synapse.util import logcontext
|
|
||||||
from synapse.util.logcontext import LoggingContext
|
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
|
|
||||||
|
@ -131,7 +134,7 @@ class KeyringTestCase(unittest.HomeserverTestCase):
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_perspectives(**kwargs):
|
def get_perspectives(**kwargs):
|
||||||
self.assertEquals(LoggingContext.current_context().request, "11")
|
self.assertEquals(LoggingContext.current_context().request, "11")
|
||||||
with logcontext.PreserveLoggingContext():
|
with PreserveLoggingContext():
|
||||||
yield persp_deferred
|
yield persp_deferred
|
||||||
defer.returnValue(persp_resp)
|
defer.returnValue(persp_resp)
|
||||||
|
|
||||||
|
@ -158,7 +161,7 @@ class KeyringTestCase(unittest.HomeserverTestCase):
|
||||||
self.assertFalse(res_deferreds[0].called)
|
self.assertFalse(res_deferreds[0].called)
|
||||||
res_deferreds[0].addBoth(self.check_context, None)
|
res_deferreds[0].addBoth(self.check_context, None)
|
||||||
|
|
||||||
yield logcontext.make_deferred_yieldable(res_deferreds[0])
|
yield make_deferred_yieldable(res_deferreds[0])
|
||||||
|
|
||||||
# let verify_json_objects_for_server finish its work before we kill the
|
# let verify_json_objects_for_server finish its work before we kill the
|
||||||
# logcontext
|
# logcontext
|
||||||
|
@ -184,7 +187,7 @@ class KeyringTestCase(unittest.HomeserverTestCase):
|
||||||
[("server10", json1, 0, "test")]
|
[("server10", json1, 0, "test")]
|
||||||
)
|
)
|
||||||
res_deferreds_2[0].addBoth(self.check_context, None)
|
res_deferreds_2[0].addBoth(self.check_context, None)
|
||||||
yield logcontext.make_deferred_yieldable(res_deferreds_2[0])
|
yield make_deferred_yieldable(res_deferreds_2[0])
|
||||||
|
|
||||||
# let verify_json_objects_for_server finish its work before we kill the
|
# let verify_json_objects_for_server finish its work before we kill the
|
||||||
# logcontext
|
# logcontext
|
||||||
|
|
|
@ -36,8 +36,8 @@ from synapse.http.federation.matrix_federation_agent import (
|
||||||
_cache_period_from_headers,
|
_cache_period_from_headers,
|
||||||
)
|
)
|
||||||
from synapse.http.federation.srv_resolver import Server
|
from synapse.http.federation.srv_resolver import Server
|
||||||
|
from synapse.logging.context import LoggingContext
|
||||||
from synapse.util.caches.ttlcache import TTLCache
|
from synapse.util.caches.ttlcache import TTLCache
|
||||||
from synapse.util.logcontext import LoggingContext
|
|
||||||
|
|
||||||
from tests.http import TestServerTLSConnectionFactory, get_test_ca_cert_file
|
from tests.http import TestServerTLSConnectionFactory, get_test_ca_cert_file
|
||||||
from tests.server import FakeTransport, ThreadedMemoryReactorClock
|
from tests.server import FakeTransport, ThreadedMemoryReactorClock
|
||||||
|
|
|
@ -22,7 +22,7 @@ from twisted.internet.error import ConnectError
|
||||||
from twisted.names import dns, error
|
from twisted.names import dns, error
|
||||||
|
|
||||||
from synapse.http.federation.srv_resolver import SrvResolver
|
from synapse.http.federation.srv_resolver import SrvResolver
|
||||||
from synapse.util.logcontext import LoggingContext
|
from synapse.logging.context import LoggingContext
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
from tests.utils import MockClock
|
from tests.utils import MockClock
|
||||||
|
|
|
@ -29,7 +29,7 @@ from synapse.http.matrixfederationclient import (
|
||||||
MatrixFederationHttpClient,
|
MatrixFederationHttpClient,
|
||||||
MatrixFederationRequest,
|
MatrixFederationRequest,
|
||||||
)
|
)
|
||||||
from synapse.util.logcontext import LoggingContext
|
from synapse.logging.context import LoggingContext
|
||||||
|
|
||||||
from tests.server import FakeTransport
|
from tests.server import FakeTransport
|
||||||
from tests.unittest import HomeserverTestCase
|
from tests.unittest import HomeserverTestCase
|
||||||
|
|
|
@ -28,7 +28,7 @@ def do_patch():
|
||||||
Patch defer.inlineCallbacks so that it checks the state of the logcontext on exit
|
Patch defer.inlineCallbacks so that it checks the state of the logcontext on exit
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from synapse.util.logcontext import LoggingContext
|
from synapse.logging.context import LoggingContext
|
||||||
|
|
||||||
orig_inline_callbacks = defer.inlineCallbacks
|
orig_inline_callbacks = defer.inlineCallbacks
|
||||||
|
|
||||||
|
|
|
@ -18,8 +18,8 @@ from mock import Mock
|
||||||
from twisted.internet.defer import Deferred
|
from twisted.internet.defer import Deferred
|
||||||
|
|
||||||
import synapse.rest.admin
|
import synapse.rest.admin
|
||||||
|
from synapse.logging.context import make_deferred_yieldable
|
||||||
from synapse.rest.client.v1 import login, room
|
from synapse.rest.client.v1 import login, room
|
||||||
from synapse.util.logcontext import make_deferred_yieldable
|
|
||||||
|
|
||||||
from tests.unittest import HomeserverTestCase
|
from tests.unittest import HomeserverTestCase
|
||||||
|
|
||||||
|
|
|
@ -2,9 +2,9 @@ from mock import Mock, call
|
||||||
|
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
|
|
||||||
|
from synapse.logging.context import LoggingContext
|
||||||
from synapse.rest.client.transactions import CLEANUP_PERIOD_MS, HttpTransactionCache
|
from synapse.rest.client.transactions import CLEANUP_PERIOD_MS, HttpTransactionCache
|
||||||
from synapse.util import Clock
|
from synapse.util import Clock
|
||||||
from synapse.util.logcontext import LoggingContext
|
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
from tests.utils import MockClock
|
from tests.utils import MockClock
|
||||||
|
|
|
@ -24,11 +24,11 @@ from six.moves.urllib import parse
|
||||||
|
|
||||||
from twisted.internet.defer import Deferred
|
from twisted.internet.defer import Deferred
|
||||||
|
|
||||||
|
from synapse.logging.context import make_deferred_yieldable
|
||||||
from synapse.rest.media.v1._base import FileInfo
|
from synapse.rest.media.v1._base import FileInfo
|
||||||
from synapse.rest.media.v1.filepath import MediaFilePaths
|
from synapse.rest.media.v1.filepath import MediaFilePaths
|
||||||
from synapse.rest.media.v1.media_storage import MediaStorage
|
from synapse.rest.media.v1.media_storage import MediaStorage
|
||||||
from synapse.rest.media.v1.storage_provider import FileStorageProviderBackend
|
from synapse.rest.media.v1.storage_provider import FileStorageProviderBackend
|
||||||
from synapse.util.logcontext import make_deferred_yieldable
|
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
|
|
||||||
|
|
|
@ -3,9 +3,9 @@ from mock import Mock
|
||||||
from twisted.internet.defer import maybeDeferred, succeed
|
from twisted.internet.defer import maybeDeferred, succeed
|
||||||
|
|
||||||
from synapse.events import FrozenEvent
|
from synapse.events import FrozenEvent
|
||||||
|
from synapse.logging.context import LoggingContext
|
||||||
from synapse.types import Requester, UserID
|
from synapse.types import Requester, UserID
|
||||||
from synapse.util import Clock
|
from synapse.util import Clock
|
||||||
from synapse.util.logcontext import LoggingContext
|
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
from tests.server import ThreadedMemoryReactorClock, setup_test_homeserver
|
from tests.server import ThreadedMemoryReactorClock, setup_test_homeserver
|
||||||
|
|
|
@ -26,8 +26,8 @@ from twisted.web.server import NOT_DONE_YET
|
||||||
from synapse.api.errors import Codes, SynapseError
|
from synapse.api.errors import Codes, SynapseError
|
||||||
from synapse.http.server import JsonResource
|
from synapse.http.server import JsonResource
|
||||||
from synapse.http.site import SynapseSite, logger
|
from synapse.http.site import SynapseSite, logger
|
||||||
|
from synapse.logging.context import make_deferred_yieldable
|
||||||
from synapse.util import Clock
|
from synapse.util import Clock
|
||||||
from synapse.util.logcontext import make_deferred_yieldable
|
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
from tests.server import (
|
from tests.server import (
|
||||||
|
|
|
@ -17,7 +17,7 @@ import os
|
||||||
|
|
||||||
import twisted.logger
|
import twisted.logger
|
||||||
|
|
||||||
from synapse.util.logcontext import LoggingContextFilter
|
from synapse.logging.context import LoggingContextFilter
|
||||||
|
|
||||||
|
|
||||||
class ToTwistedHandler(logging.Handler):
|
class ToTwistedHandler(logging.Handler):
|
||||||
|
|
|
@ -33,9 +33,9 @@ from synapse.api.constants import EventTypes
|
||||||
from synapse.config.homeserver import HomeServerConfig
|
from synapse.config.homeserver import HomeServerConfig
|
||||||
from synapse.http.server import JsonResource
|
from synapse.http.server import JsonResource
|
||||||
from synapse.http.site import SynapseRequest
|
from synapse.http.site import SynapseRequest
|
||||||
|
from synapse.logging.context import LoggingContext
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
from synapse.types import Requester, UserID, create_requester
|
from synapse.types import Requester, UserID, create_requester
|
||||||
from synapse.util.logcontext import LoggingContext
|
|
||||||
|
|
||||||
from tests.server import get_clock, make_request, render, setup_test_homeserver
|
from tests.server import get_clock, make_request, render, setup_test_homeserver
|
||||||
from tests.test_utils.logging_setup import setup_logging
|
from tests.test_utils.logging_setup import setup_logging
|
||||||
|
|
|
@ -21,7 +21,11 @@ import mock
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
|
|
||||||
from synapse.api.errors import SynapseError
|
from synapse.api.errors import SynapseError
|
||||||
from synapse.util import logcontext
|
from synapse.logging.context import (
|
||||||
|
LoggingContext,
|
||||||
|
PreserveLoggingContext,
|
||||||
|
make_deferred_yieldable,
|
||||||
|
)
|
||||||
from synapse.util.caches import descriptors
|
from synapse.util.caches import descriptors
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
|
@ -32,7 +36,7 @@ logger = logging.getLogger(__name__)
|
||||||
def run_on_reactor():
|
def run_on_reactor():
|
||||||
d = defer.Deferred()
|
d = defer.Deferred()
|
||||||
reactor.callLater(0, d.callback, 0)
|
reactor.callLater(0, d.callback, 0)
|
||||||
return logcontext.make_deferred_yieldable(d)
|
return make_deferred_yieldable(d)
|
||||||
|
|
||||||
|
|
||||||
class CacheTestCase(unittest.TestCase):
|
class CacheTestCase(unittest.TestCase):
|
||||||
|
@ -153,7 +157,7 @@ class DescriptorTestCase(unittest.TestCase):
|
||||||
def fn(self, arg1):
|
def fn(self, arg1):
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def inner_fn():
|
def inner_fn():
|
||||||
with logcontext.PreserveLoggingContext():
|
with PreserveLoggingContext():
|
||||||
yield complete_lookup
|
yield complete_lookup
|
||||||
defer.returnValue(1)
|
defer.returnValue(1)
|
||||||
|
|
||||||
|
@ -161,10 +165,10 @@ class DescriptorTestCase(unittest.TestCase):
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def do_lookup():
|
def do_lookup():
|
||||||
with logcontext.LoggingContext() as c1:
|
with LoggingContext() as c1:
|
||||||
c1.name = "c1"
|
c1.name = "c1"
|
||||||
r = yield obj.fn(1)
|
r = yield obj.fn(1)
|
||||||
self.assertEqual(logcontext.LoggingContext.current_context(), c1)
|
self.assertEqual(LoggingContext.current_context(), c1)
|
||||||
defer.returnValue(r)
|
defer.returnValue(r)
|
||||||
|
|
||||||
def check_result(r):
|
def check_result(r):
|
||||||
|
@ -174,18 +178,12 @@ class DescriptorTestCase(unittest.TestCase):
|
||||||
|
|
||||||
# set off a deferred which will do a cache lookup
|
# set off a deferred which will do a cache lookup
|
||||||
d1 = do_lookup()
|
d1 = do_lookup()
|
||||||
self.assertEqual(
|
self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel)
|
||||||
logcontext.LoggingContext.current_context(),
|
|
||||||
logcontext.LoggingContext.sentinel,
|
|
||||||
)
|
|
||||||
d1.addCallback(check_result)
|
d1.addCallback(check_result)
|
||||||
|
|
||||||
# and another
|
# and another
|
||||||
d2 = do_lookup()
|
d2 = do_lookup()
|
||||||
self.assertEqual(
|
self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel)
|
||||||
logcontext.LoggingContext.current_context(),
|
|
||||||
logcontext.LoggingContext.sentinel,
|
|
||||||
)
|
|
||||||
d2.addCallback(check_result)
|
d2.addCallback(check_result)
|
||||||
|
|
||||||
# let the lookup complete
|
# let the lookup complete
|
||||||
|
@ -210,29 +208,25 @@ class DescriptorTestCase(unittest.TestCase):
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def do_lookup():
|
def do_lookup():
|
||||||
with logcontext.LoggingContext() as c1:
|
with LoggingContext() as c1:
|
||||||
c1.name = "c1"
|
c1.name = "c1"
|
||||||
try:
|
try:
|
||||||
d = obj.fn(1)
|
d = obj.fn(1)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
logcontext.LoggingContext.current_context(),
|
LoggingContext.current_context(), LoggingContext.sentinel
|
||||||
logcontext.LoggingContext.sentinel,
|
|
||||||
)
|
)
|
||||||
yield d
|
yield d
|
||||||
self.fail("No exception thrown")
|
self.fail("No exception thrown")
|
||||||
except SynapseError:
|
except SynapseError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
self.assertEqual(logcontext.LoggingContext.current_context(), c1)
|
self.assertEqual(LoggingContext.current_context(), c1)
|
||||||
|
|
||||||
obj = Cls()
|
obj = Cls()
|
||||||
|
|
||||||
# set off a deferred which will do a cache lookup
|
# set off a deferred which will do a cache lookup
|
||||||
d1 = do_lookup()
|
d1 = do_lookup()
|
||||||
self.assertEqual(
|
self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel)
|
||||||
logcontext.LoggingContext.current_context(),
|
|
||||||
logcontext.LoggingContext.sentinel,
|
|
||||||
)
|
|
||||||
|
|
||||||
return d1
|
return d1
|
||||||
|
|
||||||
|
@ -288,23 +282,20 @@ class CachedListDescriptorTestCase(unittest.TestCase):
|
||||||
|
|
||||||
@descriptors.cachedList("fn", "args1", inlineCallbacks=True)
|
@descriptors.cachedList("fn", "args1", inlineCallbacks=True)
|
||||||
def list_fn(self, args1, arg2):
|
def list_fn(self, args1, arg2):
|
||||||
assert logcontext.LoggingContext.current_context().request == "c1"
|
assert LoggingContext.current_context().request == "c1"
|
||||||
# we want this to behave like an asynchronous function
|
# we want this to behave like an asynchronous function
|
||||||
yield run_on_reactor()
|
yield run_on_reactor()
|
||||||
assert logcontext.LoggingContext.current_context().request == "c1"
|
assert LoggingContext.current_context().request == "c1"
|
||||||
defer.returnValue(self.mock(args1, arg2))
|
defer.returnValue(self.mock(args1, arg2))
|
||||||
|
|
||||||
with logcontext.LoggingContext() as c1:
|
with LoggingContext() as c1:
|
||||||
c1.request = "c1"
|
c1.request = "c1"
|
||||||
obj = Cls()
|
obj = Cls()
|
||||||
obj.mock.return_value = {10: "fish", 20: "chips"}
|
obj.mock.return_value = {10: "fish", 20: "chips"}
|
||||||
d1 = obj.list_fn([10, 20], 2)
|
d1 = obj.list_fn([10, 20], 2)
|
||||||
self.assertEqual(
|
self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel)
|
||||||
logcontext.LoggingContext.current_context(),
|
|
||||||
logcontext.LoggingContext.sentinel,
|
|
||||||
)
|
|
||||||
r = yield d1
|
r = yield d1
|
||||||
self.assertEqual(logcontext.LoggingContext.current_context(), c1)
|
self.assertEqual(LoggingContext.current_context(), c1)
|
||||||
obj.mock.assert_called_once_with([10, 20], 2)
|
obj.mock.assert_called_once_with([10, 20], 2)
|
||||||
self.assertEqual(r, {10: "fish", 20: "chips"})
|
self.assertEqual(r, {10: "fish", 20: "chips"})
|
||||||
obj.mock.reset_mock()
|
obj.mock.reset_mock()
|
||||||
|
|
|
@ -16,9 +16,8 @@ from twisted.internet import defer
|
||||||
from twisted.internet.defer import CancelledError, Deferred
|
from twisted.internet.defer import CancelledError, Deferred
|
||||||
from twisted.internet.task import Clock
|
from twisted.internet.task import Clock
|
||||||
|
|
||||||
from synapse.util import logcontext
|
from synapse.logging.context import LoggingContext, PreserveLoggingContext
|
||||||
from synapse.util.async_helpers import timeout_deferred
|
from synapse.util.async_helpers import timeout_deferred
|
||||||
from synapse.util.logcontext import LoggingContext
|
|
||||||
|
|
||||||
from tests.unittest import TestCase
|
from tests.unittest import TestCase
|
||||||
|
|
||||||
|
@ -69,14 +68,14 @@ class TimeoutDeferredTest(TestCase):
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def blocking():
|
def blocking():
|
||||||
non_completing_d = Deferred()
|
non_completing_d = Deferred()
|
||||||
with logcontext.PreserveLoggingContext():
|
with PreserveLoggingContext():
|
||||||
try:
|
try:
|
||||||
yield non_completing_d
|
yield non_completing_d
|
||||||
except CancelledError:
|
except CancelledError:
|
||||||
blocking_was_cancelled[0] = True
|
blocking_was_cancelled[0] = True
|
||||||
raise
|
raise
|
||||||
|
|
||||||
with logcontext.LoggingContext("one") as context_one:
|
with LoggingContext("one") as context_one:
|
||||||
# the errbacks should be run in the test logcontext
|
# the errbacks should be run in the test logcontext
|
||||||
def errback(res, deferred_name):
|
def errback(res, deferred_name):
|
||||||
self.assertIs(
|
self.assertIs(
|
||||||
|
|
|
@ -19,7 +19,8 @@ from six.moves import range
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
from twisted.internet.defer import CancelledError
|
from twisted.internet.defer import CancelledError
|
||||||
|
|
||||||
from synapse.util import Clock, logcontext
|
from synapse.logging.context import LoggingContext
|
||||||
|
from synapse.util import Clock
|
||||||
from synapse.util.async_helpers import Linearizer
|
from synapse.util.async_helpers import Linearizer
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
|
@ -51,13 +52,13 @@ class LinearizerTestCase(unittest.TestCase):
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def func(i, sleep=False):
|
def func(i, sleep=False):
|
||||||
with logcontext.LoggingContext("func(%s)" % i) as lc:
|
with LoggingContext("func(%s)" % i) as lc:
|
||||||
with (yield linearizer.queue("")):
|
with (yield linearizer.queue("")):
|
||||||
self.assertEqual(logcontext.LoggingContext.current_context(), lc)
|
self.assertEqual(LoggingContext.current_context(), lc)
|
||||||
if sleep:
|
if sleep:
|
||||||
yield Clock(reactor).sleep(0)
|
yield Clock(reactor).sleep(0)
|
||||||
|
|
||||||
self.assertEqual(logcontext.LoggingContext.current_context(), lc)
|
self.assertEqual(LoggingContext.current_context(), lc)
|
||||||
|
|
||||||
func(0, sleep=True)
|
func(0, sleep=True)
|
||||||
for i in range(1, 100):
|
for i in range(1, 100):
|
||||||
|
|
|
@ -1,8 +1,14 @@
|
||||||
import twisted.python.failure
|
import twisted.python.failure
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
|
|
||||||
from synapse.util import Clock, logcontext
|
from synapse.logging.context import (
|
||||||
from synapse.util.logcontext import LoggingContext
|
LoggingContext,
|
||||||
|
PreserveLoggingContext,
|
||||||
|
make_deferred_yieldable,
|
||||||
|
nested_logging_context,
|
||||||
|
run_in_background,
|
||||||
|
)
|
||||||
|
from synapse.util import Clock
|
||||||
|
|
||||||
from .. import unittest
|
from .. import unittest
|
||||||
|
|
||||||
|
@ -43,7 +49,7 @@ class LoggingContextTestCase(unittest.TestCase):
|
||||||
context_one.request = "one"
|
context_one.request = "one"
|
||||||
|
|
||||||
# fire off function, but don't wait on it.
|
# fire off function, but don't wait on it.
|
||||||
d2 = logcontext.run_in_background(function)
|
d2 = run_in_background(function)
|
||||||
|
|
||||||
def cb(res):
|
def cb(res):
|
||||||
callback_completed[0] = True
|
callback_completed[0] = True
|
||||||
|
@ -85,7 +91,7 @@ class LoggingContextTestCase(unittest.TestCase):
|
||||||
def test_run_in_background_with_non_blocking_fn(self):
|
def test_run_in_background_with_non_blocking_fn(self):
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def nonblocking_function():
|
def nonblocking_function():
|
||||||
with logcontext.PreserveLoggingContext():
|
with PreserveLoggingContext():
|
||||||
yield defer.succeed(None)
|
yield defer.succeed(None)
|
||||||
|
|
||||||
return self._test_run_in_background(nonblocking_function)
|
return self._test_run_in_background(nonblocking_function)
|
||||||
|
@ -94,7 +100,7 @@ class LoggingContextTestCase(unittest.TestCase):
|
||||||
# a function which returns a deferred which looks like it has been
|
# a function which returns a deferred which looks like it has been
|
||||||
# called, but is actually paused
|
# called, but is actually paused
|
||||||
def testfunc():
|
def testfunc():
|
||||||
return logcontext.make_deferred_yieldable(_chained_deferred_function())
|
return make_deferred_yieldable(_chained_deferred_function())
|
||||||
|
|
||||||
return self._test_run_in_background(testfunc)
|
return self._test_run_in_background(testfunc)
|
||||||
|
|
||||||
|
@ -128,7 +134,7 @@ class LoggingContextTestCase(unittest.TestCase):
|
||||||
with LoggingContext() as context_one:
|
with LoggingContext() as context_one:
|
||||||
context_one.request = "one"
|
context_one.request = "one"
|
||||||
|
|
||||||
d1 = logcontext.make_deferred_yieldable(blocking_function())
|
d1 = make_deferred_yieldable(blocking_function())
|
||||||
# make sure that the context was reset by make_deferred_yieldable
|
# make sure that the context was reset by make_deferred_yieldable
|
||||||
self.assertIs(LoggingContext.current_context(), sentinel_context)
|
self.assertIs(LoggingContext.current_context(), sentinel_context)
|
||||||
|
|
||||||
|
@ -144,7 +150,7 @@ class LoggingContextTestCase(unittest.TestCase):
|
||||||
with LoggingContext() as context_one:
|
with LoggingContext() as context_one:
|
||||||
context_one.request = "one"
|
context_one.request = "one"
|
||||||
|
|
||||||
d1 = logcontext.make_deferred_yieldable(_chained_deferred_function())
|
d1 = make_deferred_yieldable(_chained_deferred_function())
|
||||||
# make sure that the context was reset by make_deferred_yieldable
|
# make sure that the context was reset by make_deferred_yieldable
|
||||||
self.assertIs(LoggingContext.current_context(), sentinel_context)
|
self.assertIs(LoggingContext.current_context(), sentinel_context)
|
||||||
|
|
||||||
|
@ -161,7 +167,7 @@ class LoggingContextTestCase(unittest.TestCase):
|
||||||
with LoggingContext() as context_one:
|
with LoggingContext() as context_one:
|
||||||
context_one.request = "one"
|
context_one.request = "one"
|
||||||
|
|
||||||
d1 = logcontext.make_deferred_yieldable("bum")
|
d1 = make_deferred_yieldable("bum")
|
||||||
self._check_test_key("one")
|
self._check_test_key("one")
|
||||||
|
|
||||||
r = yield d1
|
r = yield d1
|
||||||
|
@ -170,7 +176,7 @@ class LoggingContextTestCase(unittest.TestCase):
|
||||||
|
|
||||||
def test_nested_logging_context(self):
|
def test_nested_logging_context(self):
|
||||||
with LoggingContext(request="foo"):
|
with LoggingContext(request="foo"):
|
||||||
nested_context = logcontext.nested_logging_context(suffix="bar")
|
nested_context = nested_logging_context(suffix="bar")
|
||||||
self.assertEqual(nested_context.request, "foo-bar")
|
self.assertEqual(nested_context.request, "foo-bar")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from synapse.util.logformatter import LogFormatter
|
from synapse.logging.formatter import LogFormatter
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
|
|
||||||
|
|
|
@ -34,6 +34,7 @@ from synapse.config.homeserver import HomeServerConfig
|
||||||
from synapse.config.server import DEFAULT_ROOM_VERSION
|
from synapse.config.server import DEFAULT_ROOM_VERSION
|
||||||
from synapse.federation.transport import server as federation_server
|
from synapse.federation.transport import server as federation_server
|
||||||
from synapse.http.server import HttpServer
|
from synapse.http.server import HttpServer
|
||||||
|
from synapse.logging.context import LoggingContext
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
from synapse.storage import DataStore
|
from synapse.storage import DataStore
|
||||||
from synapse.storage.engines import PostgresEngine, create_engine
|
from synapse.storage.engines import PostgresEngine, create_engine
|
||||||
|
@ -42,7 +43,6 @@ from synapse.storage.prepare_database import (
|
||||||
_setup_new_database,
|
_setup_new_database,
|
||||||
prepare_database,
|
prepare_database,
|
||||||
)
|
)
|
||||||
from synapse.util.logcontext import LoggingContext
|
|
||||||
from synapse.util.ratelimitutils import FederationRateLimiter
|
from synapse.util.ratelimitutils import FederationRateLimiter
|
||||||
|
|
||||||
# set this to True to run the tests against postgres instead of sqlite.
|
# set this to True to run the tests against postgres instead of sqlite.
|
||||||
|
|
Loading…
Reference in New Issue