2015-01-27 08:50:28 -07:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-06 21:26:29 -07:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2015-01-27 08:50:28 -07:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2015-01-28 04:59:38 -07:00
|
|
|
import logging
|
2015-03-31 07:00:25 -06:00
|
|
|
import urllib
|
2015-03-31 04:00:00 -06:00
|
|
|
import yaml
|
2015-03-09 07:10:31 -06:00
|
|
|
import simplejson as json
|
2015-01-28 04:59:38 -07:00
|
|
|
from twisted.internet import defer
|
2015-01-27 08:50:28 -07:00
|
|
|
|
2015-03-02 04:20:51 -07:00
|
|
|
from synapse.api.constants import Membership
|
2015-03-09 07:54:20 -06:00
|
|
|
from synapse.appservice import ApplicationService, AppServiceTransaction
|
2016-01-14 07:34:01 -07:00
|
|
|
from synapse.config._base import ConfigError
|
2015-02-25 08:00:59 -07:00
|
|
|
from synapse.storage.roommember import RoomsForUser
|
2015-03-31 06:48:03 -06:00
|
|
|
from synapse.types import UserID
|
2015-01-27 08:50:28 -07:00
|
|
|
from ._base import SQLBaseStore
|
|
|
|
|
|
|
|
|
2015-01-28 04:59:38 -07:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2015-02-03 04:26:33 -07:00
|
|
|
|
2015-01-27 08:50:28 -07:00
|
|
|
class ApplicationServiceStore(SQLBaseStore):
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
super(ApplicationServiceStore, self).__init__(hs)
|
2015-03-31 06:48:03 -06:00
|
|
|
self.hostname = hs.hostname
|
2015-02-23 07:38:44 -07:00
|
|
|
self.services_cache = []
|
2015-03-31 04:00:00 -06:00
|
|
|
self._populate_appservice_cache(
|
|
|
|
hs.config.app_service_config_files
|
2015-02-02 10:39:41 -07:00
|
|
|
)
|
2015-01-27 09:53:59 -07:00
|
|
|
|
2015-02-03 04:26:33 -07:00
|
|
|
def get_app_services(self):
|
2015-03-31 04:35:45 -06:00
|
|
|
return defer.succeed(self.services_cache)
|
2015-01-27 08:50:28 -07:00
|
|
|
|
2015-02-25 08:00:59 -07:00
|
|
|
def get_app_service_by_user_id(self, user_id):
|
2015-02-25 10:15:25 -07:00
|
|
|
"""Retrieve an application service from their user ID.
|
|
|
|
|
|
|
|
All application services have associated with them a particular user ID.
|
|
|
|
There is no distinguishing feature on the user ID which indicates it
|
|
|
|
represents an application service. This function allows you to map from
|
|
|
|
a user ID to an application service.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id(str): The user ID to see if it is an application service.
|
|
|
|
Returns:
|
|
|
|
synapse.appservice.ApplicationService or None.
|
|
|
|
"""
|
2015-02-25 08:00:59 -07:00
|
|
|
for service in self.services_cache:
|
|
|
|
if service.sender == user_id:
|
2015-03-31 04:35:45 -06:00
|
|
|
return defer.succeed(service)
|
|
|
|
return defer.succeed(None)
|
2015-02-25 08:00:59 -07:00
|
|
|
|
2015-03-31 04:00:00 -06:00
|
|
|
def get_app_service_by_token(self, token):
|
2015-02-25 10:15:25 -07:00
|
|
|
"""Get the application service with the given appservice token.
|
2015-01-27 08:50:28 -07:00
|
|
|
|
|
|
|
Args:
|
|
|
|
token (str): The application service token.
|
2015-03-31 04:00:00 -06:00
|
|
|
Returns:
|
|
|
|
synapse.appservice.ApplicationService or None.
|
2015-01-27 08:50:28 -07:00
|
|
|
"""
|
2015-03-31 04:00:00 -06:00
|
|
|
for service in self.services_cache:
|
|
|
|
if service.token == token:
|
|
|
|
return defer.succeed(service)
|
2015-03-31 04:35:45 -06:00
|
|
|
return defer.succeed(None)
|
2015-01-27 09:53:59 -07:00
|
|
|
|
2015-02-25 08:00:59 -07:00
|
|
|
def get_app_service_rooms(self, service):
|
2015-02-25 10:15:25 -07:00
|
|
|
"""Get a list of RoomsForUser for this application service.
|
|
|
|
|
|
|
|
Application services may be "interested" in lots of rooms depending on
|
|
|
|
the room ID, the room aliases, or the members in the room. This function
|
|
|
|
takes all of these into account and returns a list of RoomsForUser which
|
|
|
|
represent the entire list of room IDs that this application service
|
|
|
|
wants to know about.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service: The application service to get a room list for.
|
|
|
|
Returns:
|
|
|
|
A list of RoomsForUser.
|
|
|
|
"""
|
2015-03-02 04:20:51 -07:00
|
|
|
return self.runInteraction(
|
|
|
|
"get_app_service_rooms",
|
|
|
|
self._get_app_service_rooms_txn,
|
|
|
|
service,
|
|
|
|
)
|
2015-02-25 10:15:25 -07:00
|
|
|
|
2015-03-02 04:20:51 -07:00
|
|
|
def _get_app_service_rooms_txn(self, txn, service):
|
2015-02-25 10:15:25 -07:00
|
|
|
# get all rooms matching the room ID regex.
|
2015-03-02 04:20:51 -07:00
|
|
|
room_entries = self._simple_select_list_txn(
|
|
|
|
txn=txn, table="rooms", keyvalues=None, retcols=["room_id"]
|
|
|
|
)
|
2015-03-02 02:53:00 -07:00
|
|
|
matching_room_list = set([
|
2015-03-02 03:16:24 -07:00
|
|
|
r["room_id"] for r in room_entries if
|
|
|
|
service.is_interested_in_room(r["room_id"])
|
2015-03-02 02:53:00 -07:00
|
|
|
])
|
2015-02-25 10:15:25 -07:00
|
|
|
|
|
|
|
# resolve room IDs for matching room alias regex.
|
2015-03-02 04:20:51 -07:00
|
|
|
room_alias_mappings = self._simple_select_list_txn(
|
|
|
|
txn=txn, table="room_aliases", keyvalues=None,
|
|
|
|
retcols=["room_id", "room_alias"]
|
|
|
|
)
|
2015-03-02 02:53:00 -07:00
|
|
|
matching_room_list |= set([
|
2015-03-02 04:20:51 -07:00
|
|
|
r["room_id"] for r in room_alias_mappings if
|
|
|
|
service.is_interested_in_alias(r["room_alias"])
|
2015-03-02 02:53:00 -07:00
|
|
|
])
|
2015-02-25 10:15:25 -07:00
|
|
|
|
2015-02-26 07:35:28 -07:00
|
|
|
# get all rooms for every user for this AS. This is scoped to users on
|
|
|
|
# this HS only.
|
2015-03-02 04:20:51 -07:00
|
|
|
user_list = self._simple_select_list_txn(
|
|
|
|
txn=txn, table="users", keyvalues=None, retcols=["name"]
|
|
|
|
)
|
2015-02-26 07:35:28 -07:00
|
|
|
user_list = [
|
|
|
|
u["name"] for u in user_list if
|
|
|
|
service.is_interested_in_user(u["name"])
|
|
|
|
]
|
2015-03-02 02:53:00 -07:00
|
|
|
rooms_for_user_matching_user_id = set() # RoomsForUser list
|
2015-02-26 07:35:28 -07:00
|
|
|
for user_id in user_list:
|
2015-03-02 04:20:51 -07:00
|
|
|
# FIXME: This assumes this store is linked with RoomMemberStore :(
|
|
|
|
rooms_for_user = self._get_rooms_for_user_where_membership_is_txn(
|
|
|
|
txn=txn,
|
|
|
|
user_id=user_id,
|
|
|
|
membership_list=[Membership.JOIN]
|
|
|
|
)
|
2015-03-02 02:53:00 -07:00
|
|
|
rooms_for_user_matching_user_id |= set(rooms_for_user)
|
2015-02-26 07:35:28 -07:00
|
|
|
|
|
|
|
# make RoomsForUser tuples for room ids and aliases which are not in the
|
|
|
|
# main rooms_for_user_list - e.g. they are rooms which do not have AS
|
|
|
|
# registered users in it.
|
|
|
|
known_room_ids = [r.room_id for r in rooms_for_user_matching_user_id]
|
|
|
|
missing_rooms_for_user = [
|
|
|
|
RoomsForUser(r, service.sender, "join") for r in
|
2015-03-02 02:53:00 -07:00
|
|
|
matching_room_list if r not in known_room_ids
|
2015-02-26 07:35:28 -07:00
|
|
|
]
|
|
|
|
rooms_for_user_matching_user_id |= set(missing_rooms_for_user)
|
2015-02-25 10:15:25 -07:00
|
|
|
|
2015-03-02 04:20:51 -07:00
|
|
|
return rooms_for_user_matching_user_id
|
2015-02-25 08:00:59 -07:00
|
|
|
|
2015-03-31 04:00:00 -06:00
|
|
|
def _load_appservice(self, as_info):
|
2015-03-31 06:48:03 -06:00
|
|
|
required_string_fields = [
|
2016-01-14 07:34:01 -07:00
|
|
|
# TODO: Add id here when it's stable to release
|
2015-03-31 06:48:03 -06:00
|
|
|
"url", "as_token", "hs_token", "sender_localpart"
|
|
|
|
]
|
2015-03-31 04:00:00 -06:00
|
|
|
for field in required_string_fields:
|
|
|
|
if not isinstance(as_info.get(field), basestring):
|
|
|
|
raise KeyError("Required string field: '%s'", field)
|
|
|
|
|
2015-03-31 07:00:25 -06:00
|
|
|
localpart = as_info["sender_localpart"]
|
|
|
|
if urllib.quote(localpart) != localpart:
|
|
|
|
raise ValueError(
|
|
|
|
"sender_localpart needs characters which are not URL encoded."
|
|
|
|
)
|
|
|
|
user = UserID(localpart, self.hostname)
|
2015-03-31 06:48:03 -06:00
|
|
|
user_id = user.to_string()
|
|
|
|
|
2015-03-31 04:00:00 -06:00
|
|
|
# namespace checks
|
|
|
|
if not isinstance(as_info.get("namespaces"), dict):
|
|
|
|
raise KeyError("Requires 'namespaces' object.")
|
|
|
|
for ns in ApplicationService.NS_LIST:
|
|
|
|
# specific namespaces are optional
|
|
|
|
if ns in as_info["namespaces"]:
|
|
|
|
# expect a list of dicts with exclusive and regex keys
|
|
|
|
for regex_obj in as_info["namespaces"][ns]:
|
|
|
|
if not isinstance(regex_obj, dict):
|
|
|
|
raise ValueError(
|
|
|
|
"Expected namespace entry in %s to be an object,"
|
|
|
|
" but got %s", ns, regex_obj
|
|
|
|
)
|
|
|
|
if not isinstance(regex_obj.get("regex"), basestring):
|
|
|
|
raise ValueError(
|
|
|
|
"Missing/bad type 'regex' key in %s", regex_obj
|
|
|
|
)
|
|
|
|
if not isinstance(regex_obj.get("exclusive"), bool):
|
|
|
|
raise ValueError(
|
|
|
|
"Missing/bad type 'exclusive' key in %s", regex_obj
|
|
|
|
)
|
|
|
|
return ApplicationService(
|
|
|
|
token=as_info["as_token"],
|
|
|
|
url=as_info["url"],
|
|
|
|
namespaces=as_info["namespaces"],
|
|
|
|
hs_token=as_info["hs_token"],
|
2015-03-31 06:48:03 -06:00
|
|
|
sender=user_id,
|
2016-01-14 07:34:01 -07:00
|
|
|
id=as_info["id"] if "id" in as_info else as_info["as_token"],
|
2015-03-31 04:00:00 -06:00
|
|
|
)
|
2015-03-09 07:10:31 -06:00
|
|
|
|
2015-03-31 04:00:00 -06:00
|
|
|
def _populate_appservice_cache(self, config_files):
|
|
|
|
"""Populates a cache of Application Services from the config files."""
|
2015-03-31 04:35:45 -06:00
|
|
|
if not isinstance(config_files, list):
|
|
|
|
logger.warning(
|
|
|
|
"Expected %s to be a list of AS config files.", config_files
|
|
|
|
)
|
|
|
|
return
|
|
|
|
|
2016-01-14 07:34:01 -07:00
|
|
|
# Dicts of value -> filename
|
|
|
|
seen_as_tokens = {}
|
|
|
|
seen_ids = {}
|
|
|
|
|
2015-03-31 04:00:00 -06:00
|
|
|
for config_file in config_files:
|
|
|
|
try:
|
|
|
|
with open(config_file, 'r') as f:
|
2015-03-31 04:35:45 -06:00
|
|
|
appservice = self._load_appservice(yaml.load(f))
|
2016-01-14 07:34:01 -07:00
|
|
|
if appservice.id in seen_ids:
|
|
|
|
raise ConfigError(
|
|
|
|
"Cannot reuse ID across application services: "
|
|
|
|
"%s (files: %s, %s)" % (
|
|
|
|
appservice.id, config_file, seen_ids[appservice.id],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
seen_ids[appservice.id] = config_file
|
|
|
|
if appservice.token in seen_as_tokens:
|
|
|
|
raise ConfigError(
|
|
|
|
"Cannot reuse as_token across application services: "
|
|
|
|
"%s (files: %s, %s)" % (
|
|
|
|
appservice.token,
|
|
|
|
config_file,
|
|
|
|
seen_as_tokens[appservice.token],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
seen_as_tokens[appservice.token] = config_file
|
2015-03-31 04:00:00 -06:00
|
|
|
logger.info("Loaded application service: %s", appservice)
|
|
|
|
self.services_cache.append(appservice)
|
|
|
|
except Exception as e:
|
|
|
|
logger.error("Failed to load appservice from '%s'", config_file)
|
|
|
|
logger.exception(e)
|
2016-01-13 10:09:24 -07:00
|
|
|
raise
|
2015-03-06 07:53:35 -07:00
|
|
|
|
|
|
|
|
|
|
|
class ApplicationServiceTransactionStore(SQLBaseStore):
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
super(ApplicationServiceTransactionStore, self).__init__(hs)
|
|
|
|
|
2015-03-09 07:10:31 -06:00
|
|
|
@defer.inlineCallbacks
|
2015-03-06 08:12:24 -07:00
|
|
|
def get_appservices_by_state(self, state):
|
|
|
|
"""Get a list of application services based on their state.
|
2015-03-06 07:53:35 -07:00
|
|
|
|
2015-03-06 08:12:24 -07:00
|
|
|
Args:
|
|
|
|
state(ApplicationServiceState): The state to filter on.
|
2015-03-06 07:53:35 -07:00
|
|
|
Returns:
|
|
|
|
A Deferred which resolves to a list of ApplicationServices, which
|
|
|
|
may be empty.
|
|
|
|
"""
|
2015-03-31 05:07:56 -06:00
|
|
|
results = yield self._simple_select_list(
|
|
|
|
"application_services_state",
|
|
|
|
dict(state=state),
|
|
|
|
["as_id"]
|
2015-03-16 04:09:15 -06:00
|
|
|
)
|
2015-03-09 07:10:31 -06:00
|
|
|
# NB: This assumes this class is linked with ApplicationServiceStore
|
2015-03-31 05:07:56 -06:00
|
|
|
as_list = yield self.get_app_services()
|
|
|
|
services = []
|
|
|
|
|
|
|
|
for res in results:
|
|
|
|
for service in as_list:
|
|
|
|
if service.id == res["as_id"]:
|
|
|
|
services.append(service)
|
|
|
|
defer.returnValue(services)
|
2015-03-06 07:53:35 -07:00
|
|
|
|
2015-03-09 07:10:31 -06:00
|
|
|
@defer.inlineCallbacks
|
2015-03-06 09:09:05 -07:00
|
|
|
def get_appservice_state(self, service):
|
|
|
|
"""Get the application service state.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service(ApplicationService): The service whose state to set.
|
|
|
|
Returns:
|
|
|
|
A Deferred which resolves to ApplicationServiceState.
|
|
|
|
"""
|
2015-03-09 07:10:31 -06:00
|
|
|
result = yield self._simple_select_one(
|
|
|
|
"application_services_state",
|
|
|
|
dict(as_id=service.id),
|
|
|
|
["state"],
|
2016-02-03 09:22:35 -07:00
|
|
|
allow_none=True,
|
|
|
|
desc="get_appservice_state",
|
2015-03-09 07:10:31 -06:00
|
|
|
)
|
|
|
|
if result:
|
|
|
|
defer.returnValue(result.get("state"))
|
|
|
|
return
|
|
|
|
defer.returnValue(None)
|
2015-03-06 09:09:05 -07:00
|
|
|
|
2015-03-06 08:12:24 -07:00
|
|
|
def set_appservice_state(self, service, state):
|
|
|
|
"""Set the application service state.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service(ApplicationService): The service whose state to set.
|
|
|
|
state(ApplicationServiceState): The connectivity state to apply.
|
|
|
|
Returns:
|
2015-03-09 07:10:31 -06:00
|
|
|
A Deferred which resolves when the state was set successfully.
|
2015-03-06 08:12:24 -07:00
|
|
|
"""
|
2015-03-06 10:35:14 -07:00
|
|
|
return self._simple_upsert(
|
|
|
|
"application_services_state",
|
|
|
|
dict(as_id=service.id),
|
|
|
|
dict(state=state)
|
|
|
|
)
|
2015-03-06 08:12:24 -07:00
|
|
|
|
2015-03-06 09:09:05 -07:00
|
|
|
def create_appservice_txn(self, service, events):
|
|
|
|
"""Atomically creates a new transaction for this application service
|
|
|
|
with the given list of events.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service(ApplicationService): The service who the transaction is for.
|
|
|
|
events(list<Event>): A list of events to put in the transaction.
|
|
|
|
Returns:
|
2015-03-06 09:16:14 -07:00
|
|
|
AppServiceTransaction: A new transaction.
|
2015-03-06 09:09:05 -07:00
|
|
|
"""
|
2015-03-09 07:54:20 -06:00
|
|
|
return self.runInteraction(
|
|
|
|
"create_appservice_txn",
|
|
|
|
self._create_appservice_txn,
|
|
|
|
service, events
|
|
|
|
)
|
|
|
|
|
|
|
|
def _create_appservice_txn(self, txn, service, events):
|
|
|
|
# work out new txn id (highest txn id for this service += 1)
|
|
|
|
# The highest id may be the last one sent (in which case it is last_txn)
|
|
|
|
# or it may be the highest in the txns list (which are waiting to be/are
|
|
|
|
# being sent)
|
2015-03-09 09:53:03 -06:00
|
|
|
last_txn_id = self._get_last_txn(txn, service.id)
|
2015-03-09 07:54:20 -06:00
|
|
|
|
2015-04-29 09:43:39 -06:00
|
|
|
txn.execute(
|
2015-03-09 07:54:20 -06:00
|
|
|
"SELECT MAX(txn_id) FROM application_services_txns WHERE as_id=?",
|
|
|
|
(service.id,)
|
|
|
|
)
|
2015-04-29 09:43:39 -06:00
|
|
|
highest_txn_id = txn.fetchone()[0]
|
2015-03-09 07:54:20 -06:00
|
|
|
if highest_txn_id is None:
|
|
|
|
highest_txn_id = 0
|
|
|
|
|
|
|
|
new_txn_id = max(highest_txn_id, last_txn_id) + 1
|
|
|
|
|
|
|
|
# Insert new txn into txn table
|
2015-04-16 04:17:52 -06:00
|
|
|
event_ids = json.dumps([e.event_id for e in events])
|
2015-03-09 07:54:20 -06:00
|
|
|
txn.execute(
|
2015-03-09 11:25:20 -06:00
|
|
|
"INSERT INTO application_services_txns(as_id, txn_id, event_ids) "
|
2015-03-09 07:54:20 -06:00
|
|
|
"VALUES(?,?,?)",
|
2015-04-07 11:05:39 -06:00
|
|
|
(service.id, new_txn_id, event_ids)
|
2015-03-09 07:54:20 -06:00
|
|
|
)
|
|
|
|
return AppServiceTransaction(
|
|
|
|
service=service, id=new_txn_id, events=events
|
|
|
|
)
|
2015-03-06 09:09:05 -07:00
|
|
|
|
2015-03-06 07:53:35 -07:00
|
|
|
def complete_appservice_txn(self, txn_id, service):
|
|
|
|
"""Completes an application service transaction.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
txn_id(str): The transaction ID being completed.
|
|
|
|
service(ApplicationService): The application service which was sent
|
|
|
|
this transaction.
|
|
|
|
Returns:
|
2015-03-09 09:53:03 -06:00
|
|
|
A Deferred which resolves if this transaction was stored
|
2015-03-06 07:53:35 -07:00
|
|
|
successfully.
|
|
|
|
"""
|
2015-03-09 09:53:03 -06:00
|
|
|
return self.runInteraction(
|
|
|
|
"complete_appservice_txn",
|
|
|
|
self._complete_appservice_txn,
|
|
|
|
txn_id, service
|
|
|
|
)
|
|
|
|
|
|
|
|
def _complete_appservice_txn(self, txn, txn_id, service):
|
|
|
|
txn_id = int(txn_id)
|
|
|
|
|
|
|
|
# Debugging query: Make sure the txn being completed is EXACTLY +1 from
|
|
|
|
# what was there before. If it isn't, we've got problems (e.g. the AS
|
|
|
|
# has probably missed some events), so whine loudly but still continue,
|
|
|
|
# since it shouldn't fail completion of the transaction.
|
|
|
|
last_txn_id = self._get_last_txn(txn, service.id)
|
|
|
|
if (last_txn_id + 1) != txn_id:
|
|
|
|
logger.error(
|
|
|
|
"appservice: Completing a transaction which has an ID > 1 from "
|
|
|
|
"the last ID sent to this AS. We've either dropped events or "
|
|
|
|
"sent it to the AS out of order. FIX ME. last_txn=%s "
|
|
|
|
"completing_txn=%s service_id=%s", last_txn_id, txn_id,
|
|
|
|
service.id
|
|
|
|
)
|
|
|
|
|
|
|
|
# Set current txn_id for AS to 'txn_id'
|
|
|
|
self._simple_upsert_txn(
|
|
|
|
txn, "application_services_state", dict(as_id=service.id),
|
|
|
|
dict(last_txn=txn_id)
|
|
|
|
)
|
|
|
|
|
2015-03-09 11:25:20 -06:00
|
|
|
# Delete txn
|
2015-03-09 09:53:03 -06:00
|
|
|
self._simple_delete_txn(
|
|
|
|
txn, "application_services_txns",
|
|
|
|
dict(txn_id=txn_id, as_id=service.id)
|
|
|
|
)
|
2015-03-06 09:16:14 -07:00
|
|
|
|
|
|
|
def get_oldest_unsent_txn(self, service):
|
|
|
|
"""Get the oldest transaction which has not been sent for this
|
|
|
|
service.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service(ApplicationService): The app service to get the oldest txn.
|
|
|
|
Returns:
|
|
|
|
A Deferred which resolves to an AppServiceTransaction or
|
|
|
|
None.
|
|
|
|
"""
|
2015-03-09 09:53:03 -06:00
|
|
|
return self.runInteraction(
|
|
|
|
"get_oldest_unsent_appservice_txn",
|
|
|
|
self._get_oldest_unsent_txn,
|
|
|
|
service
|
|
|
|
)
|
|
|
|
|
|
|
|
def _get_oldest_unsent_txn(self, txn, service):
|
|
|
|
# Monotonically increasing txn ids, so just select the smallest
|
2015-03-06 10:16:47 -07:00
|
|
|
# one in the txns table (we delete them when they are sent)
|
2015-04-29 09:35:20 -06:00
|
|
|
txn.execute(
|
2015-04-29 09:30:25 -06:00
|
|
|
"SELECT * FROM application_services_txns WHERE as_id=?"
|
|
|
|
" ORDER BY txn_id ASC LIMIT 1",
|
2015-03-09 09:53:03 -06:00
|
|
|
(service.id,)
|
|
|
|
)
|
2015-04-29 09:35:20 -06:00
|
|
|
rows = self.cursor_to_dict(txn)
|
2015-04-29 09:30:25 -06:00
|
|
|
if not rows:
|
2015-03-09 09:53:03 -06:00
|
|
|
return None
|
|
|
|
|
2015-04-29 09:30:25 -06:00
|
|
|
entry = rows[0]
|
|
|
|
|
2015-03-09 11:25:20 -06:00
|
|
|
event_ids = json.loads(entry["event_ids"])
|
2015-03-09 11:45:41 -06:00
|
|
|
events = self._get_events_txn(txn, event_ids)
|
2015-03-09 11:25:20 -06:00
|
|
|
|
2015-03-09 09:53:03 -06:00
|
|
|
return AppServiceTransaction(
|
2015-03-09 11:25:20 -06:00
|
|
|
service=service, id=entry["txn_id"], events=events
|
2015-03-09 09:53:03 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
def _get_last_txn(self, txn, service_id):
|
2015-04-29 09:43:39 -06:00
|
|
|
txn.execute(
|
2015-03-09 09:53:03 -06:00
|
|
|
"SELECT last_txn FROM application_services_state WHERE as_id=?",
|
|
|
|
(service_id,)
|
|
|
|
)
|
2015-04-29 09:43:39 -06:00
|
|
|
last_txn_id = txn.fetchone()
|
2015-03-09 11:45:41 -06:00
|
|
|
if last_txn_id is None or last_txn_id[0] is None: # no row exists
|
2015-03-09 09:53:03 -06:00
|
|
|
return 0
|
|
|
|
else:
|
|
|
|
return int(last_txn_id[0]) # select 'last_txn' col
|