Merge branch 'develop' into client_v2_filter
This commit is contained in:
commit
0484d7f6e9
|
@ -89,31 +89,31 @@ def prune_event(event):
|
||||||
return type(event)(allowed_fields)
|
return type(event)(allowed_fields)
|
||||||
|
|
||||||
|
|
||||||
def serialize_event(hs, e, client_event=True):
|
def serialize_event(e, time_now_ms, client_event=True):
|
||||||
# FIXME(erikj): To handle the case of presence events and the like
|
# FIXME(erikj): To handle the case of presence events and the like
|
||||||
if not isinstance(e, EventBase):
|
if not isinstance(e, EventBase):
|
||||||
return e
|
return e
|
||||||
|
|
||||||
|
time_now_ms = int(time_now_ms)
|
||||||
|
|
||||||
# Should this strip out None's?
|
# Should this strip out None's?
|
||||||
d = {k: v for k, v in e.get_dict().items()}
|
d = {k: v for k, v in e.get_dict().items()}
|
||||||
|
|
||||||
if not client_event:
|
if not client_event:
|
||||||
# set the age and keep all other keys
|
# set the age and keep all other keys
|
||||||
if "age_ts" in d["unsigned"]:
|
if "age_ts" in d["unsigned"]:
|
||||||
now = int(hs.get_clock().time_msec())
|
d["unsigned"]["age"] = time_now_ms - d["unsigned"]["age_ts"]
|
||||||
d["unsigned"]["age"] = now - d["unsigned"]["age_ts"]
|
|
||||||
return d
|
return d
|
||||||
|
|
||||||
if "age_ts" in d["unsigned"]:
|
if "age_ts" in d["unsigned"]:
|
||||||
now = int(hs.get_clock().time_msec())
|
d["age"] = time_now_ms - d["unsigned"]["age_ts"]
|
||||||
d["age"] = now - d["unsigned"]["age_ts"]
|
|
||||||
del d["unsigned"]["age_ts"]
|
del d["unsigned"]["age_ts"]
|
||||||
|
|
||||||
d["user_id"] = d.pop("sender", None)
|
d["user_id"] = d.pop("sender", None)
|
||||||
|
|
||||||
if "redacted_because" in e.unsigned:
|
if "redacted_because" in e.unsigned:
|
||||||
d["redacted_because"] = serialize_event(
|
d["redacted_because"] = serialize_event(
|
||||||
hs, e.unsigned["redacted_because"]
|
e.unsigned["redacted_because"], time_now_ms
|
||||||
)
|
)
|
||||||
|
|
||||||
del d["unsigned"]["redacted_because"]
|
del d["unsigned"]["redacted_because"]
|
||||||
|
|
|
@ -18,6 +18,7 @@ from twisted.internet import defer
|
||||||
from synapse.util.logcontext import PreserveLoggingContext
|
from synapse.util.logcontext import PreserveLoggingContext
|
||||||
from synapse.util.logutils import log_function
|
from synapse.util.logutils import log_function
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID
|
||||||
|
from synapse.events.utils import serialize_event
|
||||||
|
|
||||||
from ._base import BaseHandler
|
from ._base import BaseHandler
|
||||||
|
|
||||||
|
@ -78,8 +79,10 @@ class EventStreamHandler(BaseHandler):
|
||||||
auth_user, room_ids, pagin_config, timeout
|
auth_user, room_ids, pagin_config, timeout
|
||||||
)
|
)
|
||||||
|
|
||||||
|
time_now = self.clock.time_msec()
|
||||||
|
|
||||||
chunks = [
|
chunks = [
|
||||||
self.hs.serialize_event(e, as_client_event) for e in events
|
serialize_event(e, time_now, as_client_event) for e in events
|
||||||
]
|
]
|
||||||
|
|
||||||
chunk = {
|
chunk = {
|
||||||
|
|
|
@ -18,6 +18,7 @@ from twisted.internet import defer
|
||||||
from synapse.api.constants import EventTypes, Membership
|
from synapse.api.constants import EventTypes, Membership
|
||||||
from synapse.api.errors import RoomError
|
from synapse.api.errors import RoomError
|
||||||
from synapse.streams.config import PaginationConfig
|
from synapse.streams.config import PaginationConfig
|
||||||
|
from synapse.events.utils import serialize_event
|
||||||
from synapse.events.validator import EventValidator
|
from synapse.events.validator import EventValidator
|
||||||
from synapse.util.logcontext import PreserveLoggingContext
|
from synapse.util.logcontext import PreserveLoggingContext
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID
|
||||||
|
@ -100,9 +101,11 @@ class MessageHandler(BaseHandler):
|
||||||
"room_key", next_key
|
"room_key", next_key
|
||||||
)
|
)
|
||||||
|
|
||||||
|
time_now = self.clock.time_msec()
|
||||||
|
|
||||||
chunk = {
|
chunk = {
|
||||||
"chunk": [
|
"chunk": [
|
||||||
self.hs.serialize_event(e, as_client_event) for e in events
|
serialize_event(e, time_now, as_client_event) for e in events
|
||||||
],
|
],
|
||||||
"start": pagin_config.from_token.to_string(),
|
"start": pagin_config.from_token.to_string(),
|
||||||
"end": next_token.to_string(),
|
"end": next_token.to_string(),
|
||||||
|
@ -211,7 +214,8 @@ class MessageHandler(BaseHandler):
|
||||||
|
|
||||||
# TODO: This is duplicating logic from snapshot_all_rooms
|
# TODO: This is duplicating logic from snapshot_all_rooms
|
||||||
current_state = yield self.state_handler.get_current_state(room_id)
|
current_state = yield self.state_handler.get_current_state(room_id)
|
||||||
defer.returnValue([self.hs.serialize_event(c) for c in current_state])
|
now = self.clock.time_msec()
|
||||||
|
defer.returnValue([serialize_event(c, now) for c in current_state])
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def snapshot_all_rooms(self, user_id=None, pagin_config=None,
|
def snapshot_all_rooms(self, user_id=None, pagin_config=None,
|
||||||
|
@ -283,10 +287,11 @@ class MessageHandler(BaseHandler):
|
||||||
|
|
||||||
start_token = now_token.copy_and_replace("room_key", token[0])
|
start_token = now_token.copy_and_replace("room_key", token[0])
|
||||||
end_token = now_token.copy_and_replace("room_key", token[1])
|
end_token = now_token.copy_and_replace("room_key", token[1])
|
||||||
|
time_now = self.clock.time_msec()
|
||||||
|
|
||||||
d["messages"] = {
|
d["messages"] = {
|
||||||
"chunk": [
|
"chunk": [
|
||||||
self.hs.serialize_event(m, as_client_event)
|
serialize_event(m, time_now, as_client_event)
|
||||||
for m in messages
|
for m in messages
|
||||||
],
|
],
|
||||||
"start": start_token.to_string(),
|
"start": start_token.to_string(),
|
||||||
|
@ -297,7 +302,8 @@ class MessageHandler(BaseHandler):
|
||||||
event.room_id
|
event.room_id
|
||||||
)
|
)
|
||||||
d["state"] = [
|
d["state"] = [
|
||||||
self.hs.serialize_event(c) for c in current_state
|
serialize_event(c, time_now, as_client_event)
|
||||||
|
for c in current_state
|
||||||
]
|
]
|
||||||
except:
|
except:
|
||||||
logger.exception("Failed to get snapshot")
|
logger.exception("Failed to get snapshot")
|
||||||
|
@ -320,8 +326,9 @@ class MessageHandler(BaseHandler):
|
||||||
auth_user = UserID.from_string(user_id)
|
auth_user = UserID.from_string(user_id)
|
||||||
|
|
||||||
# TODO: These concurrently
|
# TODO: These concurrently
|
||||||
|
time_now = self.clock.time_msec()
|
||||||
state_tuples = yield self.state_handler.get_current_state(room_id)
|
state_tuples = yield self.state_handler.get_current_state(room_id)
|
||||||
state = [self.hs.serialize_event(x) for x in state_tuples]
|
state = [serialize_event(x, time_now) for x in state_tuples]
|
||||||
|
|
||||||
member_event = (yield self.store.get_room_member(
|
member_event = (yield self.store.get_room_member(
|
||||||
user_id=user_id,
|
user_id=user_id,
|
||||||
|
@ -360,11 +367,13 @@ class MessageHandler(BaseHandler):
|
||||||
"Failed to get member presence of %r", m.user_id
|
"Failed to get member presence of %r", m.user_id
|
||||||
)
|
)
|
||||||
|
|
||||||
|
time_now = self.clock.time_msec()
|
||||||
|
|
||||||
defer.returnValue({
|
defer.returnValue({
|
||||||
"membership": member_event.membership,
|
"membership": member_event.membership,
|
||||||
"room_id": room_id,
|
"room_id": room_id,
|
||||||
"messages": {
|
"messages": {
|
||||||
"chunk": [self.hs.serialize_event(m) for m in messages],
|
"chunk": [serialize_event(m, time_now) for m in messages],
|
||||||
"start": start_token.to_string(),
|
"start": start_token.to_string(),
|
||||||
"end": end_token.to_string(),
|
"end": end_token.to_string(),
|
||||||
},
|
},
|
||||||
|
|
|
@ -16,12 +16,14 @@
|
||||||
"""Contains functions for performing events on rooms."""
|
"""Contains functions for performing events on rooms."""
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
from ._base import BaseHandler
|
||||||
|
|
||||||
from synapse.types import UserID, RoomAlias, RoomID
|
from synapse.types import UserID, RoomAlias, RoomID
|
||||||
from synapse.api.constants import EventTypes, Membership, JoinRules
|
from synapse.api.constants import EventTypes, Membership, JoinRules
|
||||||
from synapse.api.errors import StoreError, SynapseError
|
from synapse.api.errors import StoreError, SynapseError
|
||||||
from synapse.util import stringutils
|
from synapse.util import stringutils
|
||||||
from synapse.util.async import run_on_reactor
|
from synapse.util.async import run_on_reactor
|
||||||
from ._base import BaseHandler
|
from synapse.events.utils import serialize_event
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -293,8 +295,9 @@ class RoomMemberHandler(BaseHandler):
|
||||||
yield self.auth.check_joined_room(room_id, user_id)
|
yield self.auth.check_joined_room(room_id, user_id)
|
||||||
|
|
||||||
member_list = yield self.store.get_room_members(room_id=room_id)
|
member_list = yield self.store.get_room_members(room_id=room_id)
|
||||||
|
time_now = self.clock.time_msec()
|
||||||
event_list = [
|
event_list = [
|
||||||
self.hs.serialize_event(entry)
|
serialize_event(entry, time_now)
|
||||||
for entry in member_list
|
for entry in member_list
|
||||||
]
|
]
|
||||||
chunk_data = {
|
chunk_data = {
|
||||||
|
|
|
@ -19,6 +19,7 @@ from twisted.internet import defer
|
||||||
from synapse.api.errors import SynapseError
|
from synapse.api.errors import SynapseError
|
||||||
from synapse.streams.config import PaginationConfig
|
from synapse.streams.config import PaginationConfig
|
||||||
from .base import ClientV1RestServlet, client_path_pattern
|
from .base import ClientV1RestServlet, client_path_pattern
|
||||||
|
from synapse.events.utils import serialize_event
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -64,14 +65,19 @@ class EventStreamRestServlet(ClientV1RestServlet):
|
||||||
class EventRestServlet(ClientV1RestServlet):
|
class EventRestServlet(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/events/(?P<event_id>[^/]*)$")
|
PATTERN = client_path_pattern("/events/(?P<event_id>[^/]*)$")
|
||||||
|
|
||||||
|
def __init__(self, hs):
|
||||||
|
super(EventRestServlet, self).__init__(hs)
|
||||||
|
self.clock = hs.get_clock()
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_GET(self, request, event_id):
|
def on_GET(self, request, event_id):
|
||||||
auth_user = yield self.auth.get_user_by_req(request)
|
auth_user = yield self.auth.get_user_by_req(request)
|
||||||
handler = self.handlers.event_handler
|
handler = self.handlers.event_handler
|
||||||
event = yield handler.get_event(auth_user, event_id)
|
event = yield handler.get_event(auth_user, event_id)
|
||||||
|
|
||||||
|
time_now = self.clock.time_msec()
|
||||||
if event:
|
if event:
|
||||||
defer.returnValue((200, self.hs.serialize_event(event)))
|
defer.returnValue((200, serialize_event(event, time_now)))
|
||||||
else:
|
else:
|
||||||
defer.returnValue((404, "Event not found."))
|
defer.returnValue((404, "Event not found."))
|
||||||
|
|
||||||
|
|
|
@ -21,6 +21,7 @@ from synapse.api.errors import SynapseError, Codes
|
||||||
from synapse.streams.config import PaginationConfig
|
from synapse.streams.config import PaginationConfig
|
||||||
from synapse.api.constants import EventTypes, Membership
|
from synapse.api.constants import EventTypes, Membership
|
||||||
from synapse.types import UserID, RoomID, RoomAlias
|
from synapse.types import UserID, RoomID, RoomAlias
|
||||||
|
from synapse.events.utils import serialize_event
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
@ -363,6 +364,10 @@ class RoomInitialSyncRestServlet(ClientV1RestServlet):
|
||||||
class RoomTriggerBackfill(ClientV1RestServlet):
|
class RoomTriggerBackfill(ClientV1RestServlet):
|
||||||
PATTERN = client_path_pattern("/rooms/(?P<room_id>[^/]*)/backfill$")
|
PATTERN = client_path_pattern("/rooms/(?P<room_id>[^/]*)/backfill$")
|
||||||
|
|
||||||
|
def __init__(self, hs):
|
||||||
|
super(RoomTriggerBackfill, self).__init__(hs)
|
||||||
|
self.clock = hs.get_clock()
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_GET(self, request, room_id):
|
def on_GET(self, request, room_id):
|
||||||
remote_server = urllib.unquote(
|
remote_server = urllib.unquote(
|
||||||
|
@ -374,7 +379,9 @@ class RoomTriggerBackfill(ClientV1RestServlet):
|
||||||
handler = self.handlers.federation_handler
|
handler = self.handlers.federation_handler
|
||||||
events = yield handler.backfill(remote_server, room_id, limit)
|
events = yield handler.backfill(remote_server, room_id, limit)
|
||||||
|
|
||||||
res = [self.hs.serialize_event(event) for event in events]
|
time_now = self.clock.time_msec()
|
||||||
|
|
||||||
|
res = [serialize_event(event, time_now) for event in events]
|
||||||
defer.returnValue((200, res))
|
defer.returnValue((200, res))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
|
|
||||||
# Imports required for the default HomeServer() implementation
|
# Imports required for the default HomeServer() implementation
|
||||||
from synapse.federation import initialize_http_replication
|
from synapse.federation import initialize_http_replication
|
||||||
from synapse.events.utils import serialize_event
|
|
||||||
from synapse.notifier import Notifier
|
from synapse.notifier import Notifier
|
||||||
from synapse.api.auth import Auth
|
from synapse.api.auth import Auth
|
||||||
from synapse.handlers import Handlers
|
from synapse.handlers import Handlers
|
||||||
|
@ -124,9 +123,6 @@ class BaseHomeServer(object):
|
||||||
|
|
||||||
setattr(BaseHomeServer, "get_%s" % (depname), _get)
|
setattr(BaseHomeServer, "get_%s" % (depname), _get)
|
||||||
|
|
||||||
def serialize_event(self, e, as_client_event=True):
|
|
||||||
return serialize_event(self, e, as_client_event)
|
|
||||||
|
|
||||||
def get_ip_from_request(self, request):
|
def get_ip_from_request(self, request):
|
||||||
# May be an X-Forwarding-For header depending on config
|
# May be an X-Forwarding-For header depending on config
|
||||||
ip_addr = request.getClientIP()
|
ip_addr = request.getClientIP()
|
||||||
|
|
|
@ -13,3 +13,48 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from tests import unittest
|
||||||
|
|
||||||
|
from mock import Mock
|
||||||
|
|
||||||
|
from ....utils import MockHttpResource, MockKey
|
||||||
|
|
||||||
|
from synapse.server import HomeServer
|
||||||
|
from synapse.types import UserID
|
||||||
|
|
||||||
|
|
||||||
|
PATH_PREFIX = "/_matrix/client/v2_alpha"
|
||||||
|
|
||||||
|
|
||||||
|
class V2AlphaRestTestCase(unittest.TestCase):
|
||||||
|
# Consumer must define
|
||||||
|
# USER_ID = <some string>
|
||||||
|
# TO_REGISTER = [<list of REST servlets to register>]
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.mock_resource = MockHttpResource(prefix=PATH_PREFIX)
|
||||||
|
|
||||||
|
mock_config = Mock()
|
||||||
|
mock_config.signing_key = [MockKey()]
|
||||||
|
|
||||||
|
hs = HomeServer("test",
|
||||||
|
db_pool=None,
|
||||||
|
datastore=Mock(spec=[
|
||||||
|
"insert_client_ip",
|
||||||
|
]),
|
||||||
|
http_client=None,
|
||||||
|
resource_for_client=self.mock_resource,
|
||||||
|
resource_for_federation=self.mock_resource,
|
||||||
|
config=mock_config,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_user_by_token(token=None):
|
||||||
|
return {
|
||||||
|
"user": UserID.from_string(self.USER_ID),
|
||||||
|
"admin": False,
|
||||||
|
"device_id": None,
|
||||||
|
}
|
||||||
|
hs.get_auth().get_user_by_token = _get_user_by_token
|
||||||
|
|
||||||
|
for r in self.TO_REGISTER:
|
||||||
|
r.register_servlets(hs, self.mock_resource)
|
||||||
|
|
Loading…
Reference in New Issue