Add a replication stream for direct to device messages
This commit is contained in:
parent
55fc17cf4b
commit
1aa3e1d287
|
@ -42,6 +42,7 @@ STREAM_NAMES = (
|
||||||
("pushers",),
|
("pushers",),
|
||||||
("state",),
|
("state",),
|
||||||
("caches",),
|
("caches",),
|
||||||
|
("to_device",),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -144,6 +145,7 @@ class ReplicationResource(Resource):
|
||||||
pushers_token,
|
pushers_token,
|
||||||
state_token,
|
state_token,
|
||||||
caches_token,
|
caches_token,
|
||||||
|
int(stream_token.to_device_key),
|
||||||
))
|
))
|
||||||
|
|
||||||
@request_handler()
|
@request_handler()
|
||||||
|
@ -193,6 +195,7 @@ class ReplicationResource(Resource):
|
||||||
yield self.pushers(writer, current_token, limit, request_streams)
|
yield self.pushers(writer, current_token, limit, request_streams)
|
||||||
yield self.state(writer, current_token, limit, request_streams)
|
yield self.state(writer, current_token, limit, request_streams)
|
||||||
yield self.caches(writer, current_token, limit, request_streams)
|
yield self.caches(writer, current_token, limit, request_streams)
|
||||||
|
yield self.to_device(writer, current_token, limit, request_streams)
|
||||||
self.streams(writer, current_token, request_streams)
|
self.streams(writer, current_token, request_streams)
|
||||||
|
|
||||||
logger.info("Replicated %d rows", writer.total)
|
logger.info("Replicated %d rows", writer.total)
|
||||||
|
@ -398,6 +401,20 @@ class ReplicationResource(Resource):
|
||||||
"position", "cache_func", "keys", "invalidation_ts"
|
"position", "cache_func", "keys", "invalidation_ts"
|
||||||
))
|
))
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def to_device(self, writer, current_token, limit, request_streams):
|
||||||
|
current_position = current_token.to_device
|
||||||
|
|
||||||
|
to_device = request_streams.get("to_device")
|
||||||
|
|
||||||
|
if to_device is not None:
|
||||||
|
to_device_rows = yield self.store.get_all_new_device_messages(
|
||||||
|
to_device, current_position, limit
|
||||||
|
)
|
||||||
|
writer.write_header_and_rows("to_device", to_device_rows, (
|
||||||
|
"position", "user_id", "device_id", "message_json"
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
class _Writer(object):
|
class _Writer(object):
|
||||||
"""Writes the streams as a JSON object as the response to the request"""
|
"""Writes the streams as a JSON object as the response to the request"""
|
||||||
|
@ -426,7 +443,7 @@ class _Writer(object):
|
||||||
|
|
||||||
class _ReplicationToken(collections.namedtuple("_ReplicationToken", (
|
class _ReplicationToken(collections.namedtuple("_ReplicationToken", (
|
||||||
"events", "presence", "typing", "receipts", "account_data", "backfill",
|
"events", "presence", "typing", "receipts", "account_data", "backfill",
|
||||||
"push_rules", "pushers", "state", "caches",
|
"push_rules", "pushers", "state", "caches", "to_device",
|
||||||
))):
|
))):
|
||||||
__slots__ = []
|
__slots__ = []
|
||||||
|
|
||||||
|
|
|
@ -28,3 +28,15 @@ class SlavedDeviceInboxStore(BaseSlavedStore):
|
||||||
get_to_device_stream_token = DataStore.get_to_device_stream_token.__func__
|
get_to_device_stream_token = DataStore.get_to_device_stream_token.__func__
|
||||||
get_new_messages_for_device = DataStore.get_new_messages_for_device.__func__
|
get_new_messages_for_device = DataStore.get_new_messages_for_device.__func__
|
||||||
delete_messages_for_device = DataStore.delete_messages_for_device.__func__
|
delete_messages_for_device = DataStore.delete_messages_for_device.__func__
|
||||||
|
|
||||||
|
def stream_positions(self):
|
||||||
|
result = super(SlavedDeviceInboxStore, self).stream_positions()
|
||||||
|
result["to_device"] = self._device_inbox_id_gen.get_current_token()
|
||||||
|
return result
|
||||||
|
|
||||||
|
def process_replication(self, result):
|
||||||
|
stream = result.get("to_device")
|
||||||
|
if stream:
|
||||||
|
self._device_inbox_id_gen.advance(int(stream["position"]))
|
||||||
|
|
||||||
|
return super(SlavedDeviceInboxStore, self).process_replication(result)
|
||||||
|
|
|
@ -40,6 +40,7 @@ class SendToDeviceRestServlet(servlet.RestServlet):
|
||||||
self.hs = hs
|
self.hs = hs
|
||||||
self.auth = hs.get_auth()
|
self.auth = hs.get_auth()
|
||||||
self.store = hs.get_datastore()
|
self.store = hs.get_datastore()
|
||||||
|
self.notifier = hs.get_notifier()
|
||||||
self.is_mine_id = hs.is_mine_id
|
self.is_mine_id = hs.is_mine_id
|
||||||
self.txns = HttpTransactionStore()
|
self.txns = HttpTransactionStore()
|
||||||
|
|
||||||
|
@ -71,9 +72,14 @@ class SendToDeviceRestServlet(servlet.RestServlet):
|
||||||
}
|
}
|
||||||
for device_id, message_content in by_device.items()
|
for device_id, message_content in by_device.items()
|
||||||
}
|
}
|
||||||
local_messages[user_id] = messages_by_device
|
if messages_by_device:
|
||||||
|
local_messages[user_id] = messages_by_device
|
||||||
|
|
||||||
yield self.store.add_messages_to_device_inbox(local_messages)
|
stream_id = yield self.store.add_messages_to_device_inbox(local_messages)
|
||||||
|
|
||||||
|
self.notifier.on_new_event(
|
||||||
|
"to_device", stream_id, users=local_messages.keys()
|
||||||
|
)
|
||||||
|
|
||||||
response = (200, {})
|
response = (200, {})
|
||||||
self.txns.store_client_transaction(request, txn_id, response)
|
self.txns.store_client_transaction(request, txn_id, response)
|
||||||
|
|
|
@ -136,5 +136,44 @@ class DeviceInboxStore(SQLBaseStore):
|
||||||
"delete_messages_for_device", delete_messages_for_device_txn
|
"delete_messages_for_device", delete_messages_for_device_txn
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def get_all_new_device_messages(self, last_pos, current_pos, limit):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
last_pos(int):
|
||||||
|
current_pos(int):
|
||||||
|
limit(int):
|
||||||
|
Returns:
|
||||||
|
A deferred list of rows from the device inbox
|
||||||
|
"""
|
||||||
|
if last_pos == current_pos:
|
||||||
|
return defer.succeed([])
|
||||||
|
|
||||||
|
def get_all_new_device_messages_txn(txn):
|
||||||
|
sql = (
|
||||||
|
"SELECT stream_id FROM device_inbox"
|
||||||
|
" WHERE ? < stream_id AND stream_id <= ?"
|
||||||
|
" GROUP BY stream_id"
|
||||||
|
" ORDER BY stream_id ASC"
|
||||||
|
" LIMIT ?"
|
||||||
|
)
|
||||||
|
txn.execute(sql, (last_pos, current_pos, limit))
|
||||||
|
stream_ids = txn.fetchall()
|
||||||
|
if not stream_ids:
|
||||||
|
return []
|
||||||
|
max_stream_id_in_limit = stream_ids[-1]
|
||||||
|
|
||||||
|
sql = (
|
||||||
|
"SELECT stream_id, user_id, device_id, message_json"
|
||||||
|
" FROM device_inbox"
|
||||||
|
" WHERE ? < stream_id AND stream_id <= ?"
|
||||||
|
" ORDER BY stream_id ASC"
|
||||||
|
)
|
||||||
|
txn.execute(sql, (last_pos, max_stream_id_in_limit))
|
||||||
|
return txn.fetchall()
|
||||||
|
|
||||||
|
return self.runInteraction(
|
||||||
|
"get_all_new_device_messages", get_all_new_device_messages_txn
|
||||||
|
)
|
||||||
|
|
||||||
def get_to_device_stream_token(self):
|
def get_to_device_stream_token(self):
|
||||||
return self._device_inbox_id_gen.get_current_token()
|
return self._device_inbox_id_gen.get_current_token()
|
||||||
|
|
Loading…
Reference in New Issue