Merge branch 'master' of github.com:matrix-org/synapse into develop
This commit is contained in:
commit
fa72803490
11
CHANGES.rst
11
CHANGES.rst
|
@ -1,11 +1,10 @@
|
||||||
Unreleased
|
Changes in synapse v0.26.1 (2018-03-15)
|
||||||
==========
|
=======================================
|
||||||
|
|
||||||
synctl no longer starts the main synapse when using ``-a`` option with workers.
|
Bug fixes:
|
||||||
A new worker file should be added with ``worker_app: synapse.app.homeserver``.
|
|
||||||
|
|
||||||
This release also begins the process of renaming a number of the metrics
|
* Fix bug where an invalid event caused server to stop functioning correctly,
|
||||||
reported to prometheus. See `docs/metrics-howto.rst <docs/metrics-howto.rst#block-and-response-metrics-renamed-for-0-27-0>`_.
|
due to parsing and serializing bugs in ujson library.
|
||||||
|
|
||||||
|
|
||||||
Changes in synapse v0.26.0 (2018-01-05)
|
Changes in synapse v0.26.0 (2018-01-05)
|
||||||
|
|
15
README.rst
15
README.rst
|
@ -354,6 +354,10 @@ https://matrix.org/docs/projects/try-matrix-now.html (or build your own with one
|
||||||
Fedora
|
Fedora
|
||||||
------
|
------
|
||||||
|
|
||||||
|
Synapse is in the Fedora repositories as ``matrix-synapse``::
|
||||||
|
|
||||||
|
sudo dnf install matrix-synapse
|
||||||
|
|
||||||
Oleg Girko provides Fedora RPMs at
|
Oleg Girko provides Fedora RPMs at
|
||||||
https://obs.infoserver.lv/project/monitor/matrix-synapse
|
https://obs.infoserver.lv/project/monitor/matrix-synapse
|
||||||
|
|
||||||
|
@ -890,6 +894,17 @@ This should end with a 'PASSED' result::
|
||||||
|
|
||||||
PASSED (successes=143)
|
PASSED (successes=143)
|
||||||
|
|
||||||
|
Running the Integration Tests
|
||||||
|
=============================
|
||||||
|
|
||||||
|
Synapse is accompanied by `SyTest <https://github.com/matrix-org/sytest>`_,
|
||||||
|
a Matrix homeserver integration testing suite, which uses HTTP requests to
|
||||||
|
access the API as a Matrix client would. It is able to run Synapse directly from
|
||||||
|
the source tree, so installation of the server is not required.
|
||||||
|
|
||||||
|
Testing with SyTest is recommended for verifying that changes related to the
|
||||||
|
Client-Server API are functioning correctly. See the `installation instructions
|
||||||
|
<https://github.com/matrix-org/sytest#installing>`_ for details.
|
||||||
|
|
||||||
Building Internal API Documentation
|
Building Internal API Documentation
|
||||||
===================================
|
===================================
|
||||||
|
|
|
@ -16,4 +16,4 @@
|
||||||
""" This is a reference implementation of a Matrix home server.
|
""" This is a reference implementation of a Matrix home server.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__version__ = "0.26.0"
|
__version__ = "0.26.1"
|
||||||
|
|
|
@ -17,7 +17,7 @@ from synapse.storage.presence import UserPresenceState
|
||||||
from synapse.types import UserID, RoomID
|
from synapse.types import UserID, RoomID
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
import ujson as json
|
import simplejson as json
|
||||||
import jsonschema
|
import jsonschema
|
||||||
from jsonschema import FormatChecker
|
from jsonschema import FormatChecker
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import ujson as json
|
import simplejson as json
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from canonicaljson import encode_canonical_json
|
from canonicaljson import encode_canonical_json
|
||||||
|
|
|
@ -38,7 +38,7 @@ from canonicaljson import encode_canonical_json
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import random
|
import random
|
||||||
import ujson
|
import simplejson
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -678,8 +678,8 @@ class EventCreationHandler(object):
|
||||||
|
|
||||||
# Ensure that we can round trip before trying to persist in db
|
# Ensure that we can round trip before trying to persist in db
|
||||||
try:
|
try:
|
||||||
dump = ujson.dumps(unfreeze(event.content))
|
dump = simplejson.dumps(unfreeze(event.content))
|
||||||
ujson.loads(dump)
|
simplejson.loads(dump)
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception("Failed to encode content: %r", event.content)
|
logger.exception("Failed to encode content: %r", event.content)
|
||||||
raise
|
raise
|
||||||
|
|
|
@ -37,7 +37,6 @@ from twisted.web.util import redirectTo
|
||||||
import collections
|
import collections
|
||||||
import logging
|
import logging
|
||||||
import urllib
|
import urllib
|
||||||
import ujson
|
|
||||||
import simplejson
|
import simplejson
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -462,7 +461,6 @@ def respond_with_json(request, code, json_object, send_cors=False,
|
||||||
if canonical_json or synapse.events.USE_FROZEN_DICTS:
|
if canonical_json or synapse.events.USE_FROZEN_DICTS:
|
||||||
json_bytes = encode_canonical_json(json_object)
|
json_bytes = encode_canonical_json(json_object)
|
||||||
else:
|
else:
|
||||||
# ujson doesn't like frozen_dicts.
|
|
||||||
json_bytes = simplejson.dumps(json_object)
|
json_bytes = simplejson.dumps(json_object)
|
||||||
|
|
||||||
return respond_with_json_bytes(
|
return respond_with_json_bytes(
|
||||||
|
|
|
@ -19,7 +19,7 @@ allowed to be sent by which side.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import ujson as json
|
import simplejson as json
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
|
@ -30,7 +30,7 @@ from synapse.http.servlet import (
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import urllib
|
import urllib
|
||||||
import ujson as json
|
import simplejson as json
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ import re
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
import ujson as json
|
import simplejson as json
|
||||||
import urlparse
|
import urlparse
|
||||||
|
|
||||||
from twisted.web.server import NOT_DONE_YET
|
from twisted.web.server import NOT_DONE_YET
|
||||||
|
|
|
@ -23,7 +23,7 @@ from synapse.util.caches.stream_change_cache import StreamChangeCache
|
||||||
from synapse.util.caches.descriptors import cached, cachedList, cachedInlineCallbacks
|
from synapse.util.caches.descriptors import cached, cachedList, cachedInlineCallbacks
|
||||||
|
|
||||||
import abc
|
import abc
|
||||||
import ujson as json
|
import simplejson as json
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
|
@ -19,7 +19,7 @@ from . import engines
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
import ujson as json
|
import simplejson as json
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import ujson
|
import simplejson
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
@ -85,7 +85,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
|
||||||
)
|
)
|
||||||
rows = []
|
rows = []
|
||||||
for destination, edu in remote_messages_by_destination.items():
|
for destination, edu in remote_messages_by_destination.items():
|
||||||
edu_json = ujson.dumps(edu)
|
edu_json = simplejson.dumps(edu)
|
||||||
rows.append((destination, stream_id, now_ms, edu_json))
|
rows.append((destination, stream_id, now_ms, edu_json))
|
||||||
txn.executemany(sql, rows)
|
txn.executemany(sql, rows)
|
||||||
|
|
||||||
|
@ -177,7 +177,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
|
||||||
" WHERE user_id = ?"
|
" WHERE user_id = ?"
|
||||||
)
|
)
|
||||||
txn.execute(sql, (user_id,))
|
txn.execute(sql, (user_id,))
|
||||||
message_json = ujson.dumps(messages_by_device["*"])
|
message_json = simplejson.dumps(messages_by_device["*"])
|
||||||
for row in txn:
|
for row in txn:
|
||||||
# Add the message for all devices for this user on this
|
# Add the message for all devices for this user on this
|
||||||
# server.
|
# server.
|
||||||
|
@ -199,7 +199,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
|
||||||
# Only insert into the local inbox if the device exists on
|
# Only insert into the local inbox if the device exists on
|
||||||
# this server
|
# this server
|
||||||
device = row[0]
|
device = row[0]
|
||||||
message_json = ujson.dumps(messages_by_device[device])
|
message_json = simplejson.dumps(messages_by_device[device])
|
||||||
messages_json_for_user[device] = message_json
|
messages_json_for_user[device] = message_json
|
||||||
|
|
||||||
if messages_json_for_user:
|
if messages_json_for_user:
|
||||||
|
@ -253,7 +253,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
|
||||||
messages = []
|
messages = []
|
||||||
for row in txn:
|
for row in txn:
|
||||||
stream_pos = row[0]
|
stream_pos = row[0]
|
||||||
messages.append(ujson.loads(row[1]))
|
messages.append(simplejson.loads(row[1]))
|
||||||
if len(messages) < limit:
|
if len(messages) < limit:
|
||||||
stream_pos = current_stream_id
|
stream_pos = current_stream_id
|
||||||
return (messages, stream_pos)
|
return (messages, stream_pos)
|
||||||
|
@ -389,7 +389,7 @@ class DeviceInboxStore(BackgroundUpdateStore):
|
||||||
messages = []
|
messages = []
|
||||||
for row in txn:
|
for row in txn:
|
||||||
stream_pos = row[0]
|
stream_pos = row[0]
|
||||||
messages.append(ujson.loads(row[1]))
|
messages.append(simplejson.loads(row[1]))
|
||||||
if len(messages) < limit:
|
if len(messages) < limit:
|
||||||
stream_pos = current_stream_id
|
stream_pos = current_stream_id
|
||||||
return (messages, stream_pos)
|
return (messages, stream_pos)
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
import logging
|
import logging
|
||||||
import ujson as json
|
import simplejson as json
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,7 @@ from twisted.internet import defer
|
||||||
from synapse.util.caches.descriptors import cached
|
from synapse.util.caches.descriptors import cached
|
||||||
|
|
||||||
from canonicaljson import encode_canonical_json
|
from canonicaljson import encode_canonical_json
|
||||||
import ujson as json
|
import simplejson as json
|
||||||
|
|
||||||
from ._base import SQLBaseStore
|
from ._base import SQLBaseStore
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ from synapse.types import RoomStreamToken
|
||||||
from .stream import lower_bound
|
from .stream import lower_bound
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import ujson as json
|
import simplejson as json
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -56,7 +56,6 @@ event_counter = metrics.register_counter(
|
||||||
|
|
||||||
def encode_json(json_object):
|
def encode_json(json_object):
|
||||||
if USE_FROZEN_DICTS:
|
if USE_FROZEN_DICTS:
|
||||||
# ujson doesn't like frozen_dicts
|
|
||||||
return encode_canonical_json(json_object)
|
return encode_canonical_json(json_object)
|
||||||
else:
|
else:
|
||||||
return json.dumps(json_object, ensure_ascii=False)
|
return json.dumps(json_object, ensure_ascii=False)
|
||||||
|
|
|
@ -23,7 +23,7 @@ from twisted.internet import defer
|
||||||
|
|
||||||
import abc
|
import abc
|
||||||
import logging
|
import logging
|
||||||
import ujson as json
|
import simplejson as json
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
|
@ -22,7 +22,7 @@ from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import logging
|
import logging
|
||||||
import ujson as json
|
import simplejson as json
|
||||||
import re
|
import re
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
|
@ -28,7 +28,7 @@ from synapse.api.constants import Membership, EventTypes
|
||||||
from synapse.types import get_domain_from_id
|
from synapse.types import get_domain_from_id
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import ujson as json
|
import simplejson as json
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,7 @@ import logging
|
||||||
from synapse.storage.prepare_database import get_statements
|
from synapse.storage.prepare_database import get_statements
|
||||||
from synapse.storage.engines import PostgresEngine, Sqlite3Engine
|
from synapse.storage.engines import PostgresEngine, Sqlite3Engine
|
||||||
|
|
||||||
import ujson
|
import simplejson
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -66,7 +66,7 @@ def run_create(cur, database_engine, *args, **kwargs):
|
||||||
"max_stream_id_exclusive": max_stream_id + 1,
|
"max_stream_id_exclusive": max_stream_id + 1,
|
||||||
"rows_inserted": 0,
|
"rows_inserted": 0,
|
||||||
}
|
}
|
||||||
progress_json = ujson.dumps(progress)
|
progress_json = simplejson.dumps(progress)
|
||||||
|
|
||||||
sql = (
|
sql = (
|
||||||
"INSERT into background_updates (update_name, progress_json)"
|
"INSERT into background_updates (update_name, progress_json)"
|
||||||
|
|
|
@ -16,7 +16,7 @@ import logging
|
||||||
|
|
||||||
from synapse.storage.prepare_database import get_statements
|
from synapse.storage.prepare_database import get_statements
|
||||||
|
|
||||||
import ujson
|
import simplejson
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -45,7 +45,7 @@ def run_create(cur, database_engine, *args, **kwargs):
|
||||||
"max_stream_id_exclusive": max_stream_id + 1,
|
"max_stream_id_exclusive": max_stream_id + 1,
|
||||||
"rows_inserted": 0,
|
"rows_inserted": 0,
|
||||||
}
|
}
|
||||||
progress_json = ujson.dumps(progress)
|
progress_json = simplejson.dumps(progress)
|
||||||
|
|
||||||
sql = (
|
sql = (
|
||||||
"INSERT into background_updates (update_name, progress_json)"
|
"INSERT into background_updates (update_name, progress_json)"
|
||||||
|
|
|
@ -16,7 +16,7 @@ from synapse.storage.engines import PostgresEngine
|
||||||
from synapse.storage.prepare_database import get_statements
|
from synapse.storage.prepare_database import get_statements
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import ujson
|
import simplejson
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -49,7 +49,7 @@ def run_create(cur, database_engine, *args, **kwargs):
|
||||||
"rows_inserted": 0,
|
"rows_inserted": 0,
|
||||||
"have_added_indexes": False,
|
"have_added_indexes": False,
|
||||||
}
|
}
|
||||||
progress_json = ujson.dumps(progress)
|
progress_json = simplejson.dumps(progress)
|
||||||
|
|
||||||
sql = (
|
sql = (
|
||||||
"INSERT into background_updates (update_name, progress_json)"
|
"INSERT into background_updates (update_name, progress_json)"
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
from synapse.storage.prepare_database import get_statements
|
from synapse.storage.prepare_database import get_statements
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import ujson
|
import simplejson
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -44,7 +44,7 @@ def run_create(cur, database_engine, *args, **kwargs):
|
||||||
"max_stream_id_exclusive": max_stream_id + 1,
|
"max_stream_id_exclusive": max_stream_id + 1,
|
||||||
"rows_inserted": 0,
|
"rows_inserted": 0,
|
||||||
}
|
}
|
||||||
progress_json = ujson.dumps(progress)
|
progress_json = simplejson.dumps(progress)
|
||||||
|
|
||||||
sql = (
|
sql = (
|
||||||
"INSERT into background_updates (update_name, progress_json)"
|
"INSERT into background_updates (update_name, progress_json)"
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import ujson as json
|
import simplejson as json
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@ from synapse.storage.account_data import AccountDataWorkerStore
|
||||||
from synapse.util.caches.descriptors import cached
|
from synapse.util.caches.descriptors import cached
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
import ujson as json
|
import simplejson as json
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
|
@ -23,7 +23,7 @@ from canonicaljson import encode_canonical_json
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import ujson as json
|
import simplejson as json
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue