2015-12-10 10:51:15 -07:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-04 06:39:29 -07:00
|
|
|
# Copyright 2015 OpenMarket Ltd
|
2015-12-10 10:51:15 -07:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
from ._base import SQLBaseStore
|
|
|
|
from twisted.internet import defer
|
2016-01-21 08:02:07 -07:00
|
|
|
from synapse.util.caches.descriptors import cachedInlineCallbacks
|
2016-07-04 12:44:55 -06:00
|
|
|
from synapse.types import RoomStreamToken
|
|
|
|
from .stream import lower_bound
|
2015-12-10 10:51:15 -07:00
|
|
|
|
|
|
|
import logging
|
2016-01-19 04:35:50 -07:00
|
|
|
import ujson as json
|
2015-12-10 10:51:15 -07:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2016-01-04 07:05:37 -07:00
|
|
|
class EventPushActionsStore(SQLBaseStore):
|
2016-05-20 10:56:10 -06:00
|
|
|
def __init__(self, hs):
|
|
|
|
self.stream_ordering_month_ago = None
|
|
|
|
super(EventPushActionsStore, self).__init__(hs)
|
|
|
|
|
2016-02-10 04:09:56 -07:00
|
|
|
def _set_push_actions_for_event_and_users_txn(self, txn, event, tuples):
|
2015-12-22 10:04:31 -07:00
|
|
|
"""
|
2016-04-01 09:08:59 -06:00
|
|
|
Args:
|
|
|
|
event: the event set actions for
|
|
|
|
tuples: list of tuples of (user_id, actions)
|
2015-12-22 10:04:31 -07:00
|
|
|
"""
|
|
|
|
values = []
|
2016-02-18 09:05:13 -07:00
|
|
|
for uid, actions in tuples:
|
2015-12-22 10:04:31 -07:00
|
|
|
values.append({
|
2016-01-06 04:38:09 -07:00
|
|
|
'room_id': event.room_id,
|
|
|
|
'event_id': event.event_id,
|
2015-12-22 10:04:31 -07:00
|
|
|
'user_id': uid,
|
2016-02-03 03:50:49 -07:00
|
|
|
'actions': json.dumps(actions),
|
|
|
|
'stream_ordering': event.internal_metadata.stream_ordering,
|
|
|
|
'topological_ordering': event.depth,
|
|
|
|
'notif': 1,
|
|
|
|
'highlight': 1 if _action_has_highlight(actions) else 0,
|
2015-12-22 10:04:31 -07:00
|
|
|
})
|
|
|
|
|
2016-02-18 09:05:13 -07:00
|
|
|
for uid, __ in tuples:
|
2016-02-09 09:19:15 -07:00
|
|
|
txn.call_after(
|
|
|
|
self.get_unread_event_push_actions_by_room_for_user.invalidate_many,
|
|
|
|
(event.room_id, uid)
|
|
|
|
)
|
|
|
|
self._simple_insert_many_txn(txn, "event_push_actions", values)
|
2015-12-10 10:51:15 -07:00
|
|
|
|
2016-03-22 08:18:21 -06:00
|
|
|
@cachedInlineCallbacks(num_args=3, lru=True, tree=True, max_entries=5000)
|
2016-01-04 07:05:37 -07:00
|
|
|
def get_unread_event_push_actions_by_room_for_user(
|
2015-12-18 10:47:00 -07:00
|
|
|
self, room_id, user_id, last_read_event_id
|
|
|
|
):
|
2016-01-04 07:05:37 -07:00
|
|
|
def _get_unread_event_push_actions_by_room(txn):
|
2015-12-16 11:42:09 -07:00
|
|
|
sql = (
|
|
|
|
"SELECT stream_ordering, topological_ordering"
|
|
|
|
" FROM events"
|
|
|
|
" WHERE room_id = ? AND event_id = ?"
|
|
|
|
)
|
|
|
|
txn.execute(
|
|
|
|
sql, (room_id, last_read_event_id)
|
|
|
|
)
|
|
|
|
results = txn.fetchall()
|
|
|
|
if len(results) == 0:
|
2016-02-03 09:35:00 -07:00
|
|
|
return {"notify_count": 0, "highlight_count": 0}
|
2015-12-16 11:42:09 -07:00
|
|
|
|
|
|
|
stream_ordering = results[0][0]
|
|
|
|
topological_ordering = results[0][1]
|
2016-07-04 12:44:55 -06:00
|
|
|
token = RoomStreamToken(
|
|
|
|
topological_ordering, stream_ordering
|
|
|
|
)
|
2015-12-16 11:42:09 -07:00
|
|
|
|
|
|
|
sql = (
|
2016-02-03 03:50:49 -07:00
|
|
|
"SELECT sum(notif), sum(highlight)"
|
|
|
|
" FROM event_push_actions ea"
|
|
|
|
" WHERE"
|
|
|
|
" user_id = ?"
|
|
|
|
" AND room_id = ?"
|
2016-07-04 12:44:55 -06:00
|
|
|
" AND %s"
|
2016-07-05 03:39:13 -06:00
|
|
|
) % (lower_bound(token, self.database_engine, inclusive=False),)
|
2016-07-04 12:44:55 -06:00
|
|
|
|
|
|
|
txn.execute(sql, (user_id, room_id))
|
2016-02-03 03:50:49 -07:00
|
|
|
row = txn.fetchone()
|
|
|
|
if row:
|
|
|
|
return {
|
|
|
|
"notify_count": row[0] or 0,
|
|
|
|
"highlight_count": row[1] or 0,
|
|
|
|
}
|
|
|
|
else:
|
|
|
|
return {"notify_count": 0, "highlight_count": 0}
|
2015-12-16 11:42:09 -07:00
|
|
|
|
|
|
|
ret = yield self.runInteraction(
|
2016-01-04 07:05:37 -07:00
|
|
|
"get_unread_event_push_actions_by_room",
|
|
|
|
_get_unread_event_push_actions_by_room
|
2015-12-16 11:42:09 -07:00
|
|
|
)
|
|
|
|
defer.returnValue(ret)
|
2015-12-10 10:51:15 -07:00
|
|
|
|
2016-04-06 08:42:15 -06:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_push_action_users_in_range(self, min_stream_ordering, max_stream_ordering):
|
|
|
|
def f(txn):
|
|
|
|
sql = (
|
|
|
|
"SELECT DISTINCT(user_id) FROM event_push_actions WHERE"
|
2016-04-07 10:33:37 -06:00
|
|
|
" stream_ordering >= ? AND stream_ordering <= ?"
|
2016-04-06 08:42:15 -06:00
|
|
|
)
|
|
|
|
txn.execute(sql, (min_stream_ordering, max_stream_ordering))
|
|
|
|
return [r[0] for r in txn.fetchall()]
|
|
|
|
ret = yield self.runInteraction("get_push_action_users_in_range", f)
|
|
|
|
defer.returnValue(ret)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_unread_push_actions_for_user_in_range(self, user_id,
|
|
|
|
min_stream_ordering,
|
2016-06-01 04:45:43 -06:00
|
|
|
max_stream_ordering=None,
|
|
|
|
limit=20):
|
2016-04-13 07:16:45 -06:00
|
|
|
def get_after_receipt(txn):
|
2016-04-06 08:42:15 -06:00
|
|
|
sql = (
|
2016-04-19 07:24:36 -06:00
|
|
|
"SELECT ep.event_id, ep.room_id, ep.stream_ordering, ep.actions, "
|
|
|
|
"e.received_ts "
|
2016-05-04 06:19:59 -06:00
|
|
|
"FROM ("
|
2016-04-19 07:24:36 -06:00
|
|
|
" SELECT room_id, user_id, "
|
|
|
|
" max(topological_ordering) as topological_ordering, "
|
|
|
|
" max(stream_ordering) as stream_ordering "
|
2016-04-13 07:16:45 -06:00
|
|
|
" FROM events"
|
|
|
|
" NATURAL JOIN receipts_linearized WHERE receipt_type = 'm.read'"
|
|
|
|
" GROUP BY room_id, user_id"
|
2016-05-04 06:19:59 -06:00
|
|
|
") AS rl,"
|
|
|
|
" event_push_actions AS ep"
|
2016-04-29 12:28:56 -06:00
|
|
|
" INNER JOIN events AS e USING (room_id, event_id)"
|
|
|
|
" WHERE"
|
2016-04-13 07:16:45 -06:00
|
|
|
" ep.room_id = rl.room_id"
|
|
|
|
" AND ("
|
|
|
|
" ep.topological_ordering > rl.topological_ordering"
|
|
|
|
" OR ("
|
|
|
|
" ep.topological_ordering = rl.topological_ordering"
|
2016-04-14 04:08:31 -06:00
|
|
|
" AND ep.stream_ordering > rl.stream_ordering"
|
2016-04-13 07:16:45 -06:00
|
|
|
" )"
|
|
|
|
" )"
|
2016-04-14 04:08:31 -06:00
|
|
|
" AND ep.stream_ordering > ?"
|
2016-04-13 07:16:45 -06:00
|
|
|
" AND ep.user_id = ?"
|
|
|
|
" AND ep.user_id = rl.user_id"
|
2016-04-06 08:42:15 -06:00
|
|
|
)
|
2016-04-13 07:16:45 -06:00
|
|
|
args = [min_stream_ordering, user_id]
|
2016-04-06 08:42:15 -06:00
|
|
|
if max_stream_ordering is not None:
|
2016-04-13 07:16:45 -06:00
|
|
|
sql += " AND ep.stream_ordering <= ?"
|
2016-04-06 08:42:15 -06:00
|
|
|
args.append(max_stream_ordering)
|
2016-06-22 10:47:18 -06:00
|
|
|
sql += " ORDER BY ep.stream_ordering DESC LIMIT ?"
|
2016-06-01 04:45:43 -06:00
|
|
|
args.append(limit)
|
2016-04-06 08:42:15 -06:00
|
|
|
txn.execute(sql, args)
|
|
|
|
return txn.fetchall()
|
2016-04-13 07:16:45 -06:00
|
|
|
after_read_receipt = yield self.runInteraction(
|
|
|
|
"get_unread_push_actions_for_user_in_range", get_after_receipt
|
|
|
|
)
|
|
|
|
|
|
|
|
def get_no_receipt(txn):
|
|
|
|
sql = (
|
2016-04-29 12:16:15 -06:00
|
|
|
"SELECT ep.event_id, ep.room_id, ep.stream_ordering, ep.actions,"
|
|
|
|
" e.received_ts"
|
|
|
|
" FROM event_push_actions AS ep"
|
|
|
|
" JOIN events e ON ep.room_id = e.room_id AND ep.event_id = e.event_id"
|
|
|
|
" WHERE ep.room_id not in ("
|
2016-04-13 07:16:45 -06:00
|
|
|
" SELECT room_id FROM events NATURAL JOIN receipts_linearized"
|
2016-04-29 12:16:15 -06:00
|
|
|
" WHERE receipt_type = 'm.read' AND user_id = ?"
|
2016-04-13 07:16:45 -06:00
|
|
|
" GROUP BY room_id"
|
|
|
|
") AND ep.user_id = ? AND ep.stream_ordering > ?"
|
|
|
|
)
|
|
|
|
args = [user_id, user_id, min_stream_ordering]
|
|
|
|
if max_stream_ordering is not None:
|
|
|
|
sql += " AND ep.stream_ordering <= ?"
|
|
|
|
args.append(max_stream_ordering)
|
2016-06-22 10:47:18 -06:00
|
|
|
sql += " ORDER BY ep.stream_ordering DESC LIMIT ?"
|
|
|
|
args.append(limit)
|
2016-04-13 07:16:45 -06:00
|
|
|
txn.execute(sql, args)
|
|
|
|
return txn.fetchall()
|
|
|
|
no_read_receipt = yield self.runInteraction(
|
|
|
|
"get_unread_push_actions_for_user_in_range", get_no_receipt
|
|
|
|
)
|
|
|
|
|
2016-06-22 11:07:14 -06:00
|
|
|
# Make a list of dicts from the two sets of results.
|
|
|
|
notifs = [
|
2016-04-06 08:42:15 -06:00
|
|
|
{
|
|
|
|
"event_id": row[0],
|
2016-04-19 07:24:36 -06:00
|
|
|
"room_id": row[1],
|
|
|
|
"stream_ordering": row[2],
|
|
|
|
"actions": json.loads(row[3]),
|
|
|
|
"received_ts": row[4],
|
2016-04-13 07:23:27 -06:00
|
|
|
} for row in after_read_receipt + no_read_receipt
|
2016-06-22 11:07:14 -06:00
|
|
|
]
|
|
|
|
|
|
|
|
# Now sort it so it's ordered correctly, since currently it will
|
|
|
|
# contain results from the first query, correctly ordered, followed
|
|
|
|
# by results from the second query, but we want them all ordered
|
|
|
|
# by received_ts
|
|
|
|
notifs.sort(key=lambda r: -(r['received_ts'] or 0))
|
|
|
|
|
|
|
|
# Now return the first `limit`
|
|
|
|
defer.returnValue(notifs[:limit])
|
2016-04-06 08:42:15 -06:00
|
|
|
|
2016-04-19 07:24:36 -06:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_time_of_last_push_action_before(self, stream_ordering):
|
|
|
|
def f(txn):
|
|
|
|
sql = (
|
2016-04-29 12:16:15 -06:00
|
|
|
"SELECT e.received_ts"
|
|
|
|
" FROM event_push_actions AS ep"
|
|
|
|
" JOIN events e ON ep.room_id = e.room_id AND ep.event_id = e.event_id"
|
|
|
|
" WHERE ep.stream_ordering > ?"
|
|
|
|
" ORDER BY ep.stream_ordering ASC"
|
|
|
|
" LIMIT 1"
|
2016-04-19 07:24:36 -06:00
|
|
|
)
|
|
|
|
txn.execute(sql, (stream_ordering,))
|
|
|
|
return txn.fetchone()
|
|
|
|
result = yield self.runInteraction("get_time_of_last_push_action_before", f)
|
2016-04-29 12:17:10 -06:00
|
|
|
defer.returnValue(result[0] if result else None)
|
2016-04-19 07:24:36 -06:00
|
|
|
|
2016-04-06 08:42:15 -06:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_latest_push_action_stream_ordering(self):
|
|
|
|
def f(txn):
|
|
|
|
txn.execute("SELECT MAX(stream_ordering) FROM event_push_actions")
|
|
|
|
return txn.fetchone()
|
|
|
|
result = yield self.runInteraction(
|
2016-04-06 09:50:47 -06:00
|
|
|
"get_latest_push_action_stream_ordering", f
|
2016-04-06 08:42:15 -06:00
|
|
|
)
|
|
|
|
defer.returnValue(result[0] or 0)
|
|
|
|
|
2016-02-10 04:09:56 -07:00
|
|
|
def _remove_push_actions_for_event_id_txn(self, txn, room_id, event_id):
|
2016-02-09 09:19:15 -07:00
|
|
|
# Sad that we have to blow away the cache for the whole room here
|
|
|
|
txn.call_after(
|
|
|
|
self.get_unread_event_push_actions_by_room_for_user.invalidate_many,
|
|
|
|
(room_id,)
|
|
|
|
)
|
|
|
|
txn.execute(
|
|
|
|
"DELETE FROM event_push_actions WHERE room_id = ? AND event_id = ?",
|
|
|
|
(room_id, event_id)
|
2016-01-06 04:58:46 -07:00
|
|
|
)
|
2016-02-03 03:50:49 -07:00
|
|
|
|
2016-05-20 08:25:12 -06:00
|
|
|
def _remove_old_push_actions_before_txn(self, txn, room_id, user_id,
|
|
|
|
topological_ordering):
|
|
|
|
"""
|
|
|
|
Purges old, stale push actions for a user and room before a given
|
|
|
|
topological_ordering
|
|
|
|
Args:
|
|
|
|
txn: The transcation
|
|
|
|
room_id: Room ID to delete from
|
|
|
|
user_id: user ID to delete for
|
|
|
|
topological_ordering: The lowest topological ordering which will
|
|
|
|
not be deleted.
|
|
|
|
"""
|
2016-05-03 07:22:33 -06:00
|
|
|
txn.call_after(
|
|
|
|
self.get_unread_event_push_actions_by_room_for_user.invalidate_many,
|
|
|
|
(room_id, user_id, )
|
|
|
|
)
|
2016-05-20 08:25:12 -06:00
|
|
|
|
|
|
|
# We need to join on the events table to get the received_ts for
|
|
|
|
# event_push_actions and sqlite won't let us use a join in a delete so
|
|
|
|
# we can't just delete where received_ts < x. Furthermore we can
|
|
|
|
# only identify event_push_actions by a tuple of room_id, event_id
|
|
|
|
# we we can't use a subquery.
|
|
|
|
# Instead, we look up the stream ordering for the last event in that
|
|
|
|
# room received before the threshold time and delete event_push_actions
|
|
|
|
# in the room with a stream_odering before that.
|
2016-05-03 07:22:33 -06:00
|
|
|
txn.execute(
|
2016-05-20 08:25:12 -06:00
|
|
|
"DELETE FROM event_push_actions "
|
|
|
|
" WHERE user_id = ? AND room_id = ? AND "
|
2016-05-20 11:03:31 -06:00
|
|
|
" topological_ordering < ? AND stream_ordering < ?",
|
2016-05-20 10:56:10 -06:00
|
|
|
(user_id, room_id, topological_ordering, self.stream_ordering_month_ago)
|
|
|
|
)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _find_stream_orderings_for_times(self):
|
|
|
|
yield self.runInteraction(
|
|
|
|
"_find_stream_orderings_for_times",
|
|
|
|
self._find_stream_orderings_for_times_txn
|
|
|
|
)
|
|
|
|
|
|
|
|
def _find_stream_orderings_for_times_txn(self, txn):
|
|
|
|
logger.info("Searching for stream ordering 1 month ago")
|
|
|
|
self.stream_ordering_month_ago = self._find_first_stream_ordering_after_ts_txn(
|
|
|
|
txn, self._clock.time_msec() - 30 * 24 * 60 * 60 * 1000
|
|
|
|
)
|
|
|
|
logger.info(
|
|
|
|
"Found stream ordering 1 month ago: it's %d",
|
|
|
|
self.stream_ordering_month_ago
|
2016-05-03 07:22:33 -06:00
|
|
|
)
|
|
|
|
|
2016-05-20 10:56:10 -06:00
|
|
|
def _find_first_stream_ordering_after_ts_txn(self, txn, ts):
|
|
|
|
"""
|
|
|
|
Find the stream_ordering of the first event that was received after
|
|
|
|
a given timestamp. This is relatively slow as there is no index on
|
|
|
|
received_ts but we can then use this to delete push actions before
|
|
|
|
this.
|
|
|
|
|
|
|
|
received_ts must necessarily be in the same order as stream_ordering
|
|
|
|
and stream_ordering is indexed, so we manually binary search using
|
|
|
|
stream_ordering
|
|
|
|
"""
|
|
|
|
txn.execute("SELECT MAX(stream_ordering) FROM events")
|
|
|
|
max_stream_ordering = txn.fetchone()[0]
|
|
|
|
|
2016-05-20 10:58:09 -06:00
|
|
|
if max_stream_ordering is None:
|
|
|
|
return 0
|
|
|
|
|
2016-05-20 10:56:10 -06:00
|
|
|
range_start = 0
|
|
|
|
range_end = max_stream_ordering
|
|
|
|
|
|
|
|
sql = (
|
|
|
|
"SELECT received_ts FROM events"
|
|
|
|
" WHERE stream_ordering > ?"
|
|
|
|
" ORDER BY stream_ordering"
|
|
|
|
" LIMIT 1"
|
|
|
|
)
|
|
|
|
|
|
|
|
while range_end - range_start > 1:
|
|
|
|
middle = int((range_end + range_start) / 2)
|
|
|
|
txn.execute(sql, (middle,))
|
|
|
|
middle_ts = txn.fetchone()[0]
|
|
|
|
if ts > middle_ts:
|
|
|
|
range_start = middle
|
|
|
|
else:
|
|
|
|
range_end = middle
|
|
|
|
|
|
|
|
return range_end
|
|
|
|
|
2016-02-03 03:50:49 -07:00
|
|
|
|
|
|
|
def _action_has_highlight(actions):
|
|
|
|
for action in actions:
|
|
|
|
try:
|
|
|
|
if action.get("set_tweak", None) == "highlight":
|
|
|
|
return action.get("value", True)
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return False
|