2015-01-27 08:50:28 -07:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright 2015 OpenMarket Ltd
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2015-01-28 04:59:38 -07:00
|
|
|
import logging
|
2015-03-31 07:00:25 -06:00
|
|
|
import urllib
|
2015-03-31 04:00:00 -06:00
|
|
|
import yaml
|
2015-02-27 03:44:32 -07:00
|
|
|
from simplejson import JSONDecodeError
|
2015-03-09 07:10:31 -06:00
|
|
|
import simplejson as json
|
2015-01-28 04:59:38 -07:00
|
|
|
from twisted.internet import defer
|
2015-01-27 08:50:28 -07:00
|
|
|
|
2015-03-02 04:20:51 -07:00
|
|
|
from synapse.api.constants import Membership
|
2015-03-09 07:54:20 -06:00
|
|
|
from synapse.appservice import ApplicationService, AppServiceTransaction
|
2015-02-25 08:00:59 -07:00
|
|
|
from synapse.storage.roommember import RoomsForUser
|
2015-03-31 06:48:03 -06:00
|
|
|
from synapse.types import UserID
|
2015-01-27 08:50:28 -07:00
|
|
|
from ._base import SQLBaseStore
|
|
|
|
|
|
|
|
|
2015-01-28 04:59:38 -07:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2015-02-03 04:26:33 -07:00
|
|
|
|
2015-01-27 08:50:28 -07:00
|
|
|
class ApplicationServiceStore(SQLBaseStore):
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
super(ApplicationServiceStore, self).__init__(hs)
|
2015-03-31 06:48:03 -06:00
|
|
|
self.hostname = hs.hostname
|
2015-02-23 07:38:44 -07:00
|
|
|
self.services_cache = []
|
2015-03-31 04:00:00 -06:00
|
|
|
self._populate_appservice_cache(
|
|
|
|
hs.config.app_service_config_files
|
2015-02-02 10:39:41 -07:00
|
|
|
)
|
|
|
|
|
2015-02-03 04:26:33 -07:00
|
|
|
def get_app_services(self):
|
2015-03-31 04:35:45 -06:00
|
|
|
return defer.succeed(self.services_cache)
|
2015-01-27 08:50:28 -07:00
|
|
|
|
2015-02-25 08:00:59 -07:00
|
|
|
def get_app_service_by_user_id(self, user_id):
|
2015-02-25 10:15:25 -07:00
|
|
|
"""Retrieve an application service from their user ID.
|
|
|
|
|
|
|
|
All application services have associated with them a particular user ID.
|
|
|
|
There is no distinguishing feature on the user ID which indicates it
|
|
|
|
represents an application service. This function allows you to map from
|
|
|
|
a user ID to an application service.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id(str): The user ID to see if it is an application service.
|
|
|
|
Returns:
|
|
|
|
synapse.appservice.ApplicationService or None.
|
|
|
|
"""
|
2015-02-25 08:00:59 -07:00
|
|
|
for service in self.services_cache:
|
|
|
|
if service.sender == user_id:
|
2015-03-31 04:35:45 -06:00
|
|
|
return defer.succeed(service)
|
|
|
|
return defer.succeed(None)
|
2015-02-25 08:00:59 -07:00
|
|
|
|
2015-03-31 04:00:00 -06:00
|
|
|
def get_app_service_by_token(self, token):
|
2015-02-25 10:15:25 -07:00
|
|
|
"""Get the application service with the given appservice token.
|
2015-01-27 08:50:28 -07:00
|
|
|
|
|
|
|
Args:
|
|
|
|
token (str): The application service token.
|
2015-03-31 04:00:00 -06:00
|
|
|
Returns:
|
|
|
|
synapse.appservice.ApplicationService or None.
|
2015-01-27 08:50:28 -07:00
|
|
|
"""
|
2015-03-31 04:00:00 -06:00
|
|
|
for service in self.services_cache:
|
|
|
|
if service.token == token:
|
|
|
|
return defer.succeed(service)
|
2015-03-31 04:35:45 -06:00
|
|
|
return defer.succeed(None)
|
2015-01-27 09:53:59 -07:00
|
|
|
|
2015-02-25 08:00:59 -07:00
|
|
|
def get_app_service_rooms(self, service):
|
2015-02-25 10:15:25 -07:00
|
|
|
"""Get a list of RoomsForUser for this application service.
|
|
|
|
|
|
|
|
Application services may be "interested" in lots of rooms depending on
|
|
|
|
the room ID, the room aliases, or the members in the room. This function
|
|
|
|
takes all of these into account and returns a list of RoomsForUser which
|
|
|
|
represent the entire list of room IDs that this application service
|
|
|
|
wants to know about.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service: The application service to get a room list for.
|
|
|
|
Returns:
|
|
|
|
A list of RoomsForUser.
|
|
|
|
"""
|
2015-03-02 04:20:51 -07:00
|
|
|
return self.runInteraction(
|
|
|
|
"get_app_service_rooms",
|
|
|
|
self._get_app_service_rooms_txn,
|
|
|
|
service,
|
|
|
|
)
|
2015-02-25 10:15:25 -07:00
|
|
|
|
2015-03-02 04:20:51 -07:00
|
|
|
def _get_app_service_rooms_txn(self, txn, service):
|
2015-02-25 10:15:25 -07:00
|
|
|
# get all rooms matching the room ID regex.
|
2015-03-02 04:20:51 -07:00
|
|
|
room_entries = self._simple_select_list_txn(
|
|
|
|
txn=txn, table="rooms", keyvalues=None, retcols=["room_id"]
|
|
|
|
)
|
2015-03-02 02:53:00 -07:00
|
|
|
matching_room_list = set([
|
2015-03-02 03:16:24 -07:00
|
|
|
r["room_id"] for r in room_entries if
|
|
|
|
service.is_interested_in_room(r["room_id"])
|
2015-03-02 02:53:00 -07:00
|
|
|
])
|
2015-02-25 10:15:25 -07:00
|
|
|
|
|
|
|
# resolve room IDs for matching room alias regex.
|
2015-03-02 04:20:51 -07:00
|
|
|
room_alias_mappings = self._simple_select_list_txn(
|
|
|
|
txn=txn, table="room_aliases", keyvalues=None,
|
|
|
|
retcols=["room_id", "room_alias"]
|
|
|
|
)
|
2015-03-02 02:53:00 -07:00
|
|
|
matching_room_list |= set([
|
2015-03-02 04:20:51 -07:00
|
|
|
r["room_id"] for r in room_alias_mappings if
|
|
|
|
service.is_interested_in_alias(r["room_alias"])
|
2015-03-02 02:53:00 -07:00
|
|
|
])
|
2015-02-25 10:15:25 -07:00
|
|
|
|
2015-02-26 07:35:28 -07:00
|
|
|
# get all rooms for every user for this AS. This is scoped to users on
|
|
|
|
# this HS only.
|
2015-03-02 04:20:51 -07:00
|
|
|
user_list = self._simple_select_list_txn(
|
|
|
|
txn=txn, table="users", keyvalues=None, retcols=["name"]
|
|
|
|
)
|
2015-02-26 07:35:28 -07:00
|
|
|
user_list = [
|
|
|
|
u["name"] for u in user_list if
|
|
|
|
service.is_interested_in_user(u["name"])
|
|
|
|
]
|
2015-03-02 02:53:00 -07:00
|
|
|
rooms_for_user_matching_user_id = set() # RoomsForUser list
|
2015-02-26 07:35:28 -07:00
|
|
|
for user_id in user_list:
|
2015-03-02 04:20:51 -07:00
|
|
|
# FIXME: This assumes this store is linked with RoomMemberStore :(
|
|
|
|
rooms_for_user = self._get_rooms_for_user_where_membership_is_txn(
|
|
|
|
txn=txn,
|
|
|
|
user_id=user_id,
|
|
|
|
membership_list=[Membership.JOIN]
|
|
|
|
)
|
2015-03-02 02:53:00 -07:00
|
|
|
rooms_for_user_matching_user_id |= set(rooms_for_user)
|
2015-02-26 07:35:28 -07:00
|
|
|
|
|
|
|
# make RoomsForUser tuples for room ids and aliases which are not in the
|
|
|
|
# main rooms_for_user_list - e.g. they are rooms which do not have AS
|
|
|
|
# registered users in it.
|
|
|
|
known_room_ids = [r.room_id for r in rooms_for_user_matching_user_id]
|
|
|
|
missing_rooms_for_user = [
|
|
|
|
RoomsForUser(r, service.sender, "join") for r in
|
2015-03-02 02:53:00 -07:00
|
|
|
matching_room_list if r not in known_room_ids
|
2015-02-26 07:35:28 -07:00
|
|
|
]
|
|
|
|
rooms_for_user_matching_user_id |= set(missing_rooms_for_user)
|
2015-02-25 10:15:25 -07:00
|
|
|
|
2015-03-02 04:20:51 -07:00
|
|
|
return rooms_for_user_matching_user_id
|
2015-02-25 08:00:59 -07:00
|
|
|
|
2015-03-09 07:10:31 -06:00
|
|
|
def _parse_services_dict(self, results):
|
2015-01-28 04:59:38 -07:00
|
|
|
# SQL results in the form:
|
|
|
|
# [
|
|
|
|
# {
|
|
|
|
# 'regex': "something",
|
|
|
|
# 'url': "something",
|
|
|
|
# 'namespace': enum,
|
|
|
|
# 'as_id': 0,
|
|
|
|
# 'token': "something",
|
2015-02-05 03:08:12 -07:00
|
|
|
# 'hs_token': "otherthing",
|
2015-01-28 04:59:38 -07:00
|
|
|
# 'id': 0
|
|
|
|
# }
|
|
|
|
# ]
|
|
|
|
services = {}
|
|
|
|
for res in results:
|
|
|
|
as_token = res["token"]
|
2015-03-10 04:04:20 -06:00
|
|
|
if as_token is None:
|
|
|
|
continue
|
2015-01-28 04:59:38 -07:00
|
|
|
if as_token not in services:
|
|
|
|
# add the service
|
|
|
|
services[as_token] = {
|
2015-03-09 07:10:31 -06:00
|
|
|
"id": res["id"],
|
2015-01-28 04:59:38 -07:00
|
|
|
"url": res["url"],
|
|
|
|
"token": as_token,
|
2015-02-05 03:08:12 -07:00
|
|
|
"hs_token": res["hs_token"],
|
2015-02-09 07:14:15 -07:00
|
|
|
"sender": res["sender"],
|
2015-01-28 04:59:38 -07:00
|
|
|
"namespaces": {
|
2015-02-03 06:17:28 -07:00
|
|
|
ApplicationService.NS_USERS: [],
|
|
|
|
ApplicationService.NS_ALIASES: [],
|
|
|
|
ApplicationService.NS_ROOMS: []
|
2015-01-28 04:59:38 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
# add the namespace regex if one exists
|
|
|
|
ns_int = res["namespace"]
|
|
|
|
if ns_int is None:
|
|
|
|
continue
|
|
|
|
try:
|
2015-02-03 06:17:28 -07:00
|
|
|
services[as_token]["namespaces"][
|
|
|
|
ApplicationService.NS_LIST[ns_int]].append(
|
2015-03-09 07:10:31 -06:00
|
|
|
json.loads(res["regex"])
|
2015-01-28 04:59:38 -07:00
|
|
|
)
|
|
|
|
except IndexError:
|
|
|
|
logger.error("Bad namespace enum '%s'. %s", ns_int, res)
|
2015-02-27 03:44:32 -07:00
|
|
|
except JSONDecodeError:
|
|
|
|
logger.error("Bad regex object '%s'", res["regex"])
|
2015-01-28 04:59:38 -07:00
|
|
|
|
2015-03-09 07:10:31 -06:00
|
|
|
service_list = []
|
2015-01-28 04:59:38 -07:00
|
|
|
for service in services.values():
|
2015-03-09 07:10:31 -06:00
|
|
|
service_list.append(ApplicationService(
|
2015-02-05 03:08:12 -07:00
|
|
|
token=service["token"],
|
|
|
|
url=service["url"],
|
|
|
|
namespaces=service["namespaces"],
|
2015-02-09 07:14:15 -07:00
|
|
|
hs_token=service["hs_token"],
|
2015-03-06 10:27:55 -07:00
|
|
|
sender=service["sender"],
|
|
|
|
id=service["id"]
|
2015-01-28 04:59:38 -07:00
|
|
|
))
|
2015-03-09 07:10:31 -06:00
|
|
|
return service_list
|
|
|
|
|
2015-03-31 04:00:00 -06:00
|
|
|
def _load_appservice(self, as_info):
|
2015-03-31 06:48:03 -06:00
|
|
|
required_string_fields = [
|
|
|
|
"url", "as_token", "hs_token", "sender_localpart"
|
|
|
|
]
|
2015-03-31 04:00:00 -06:00
|
|
|
for field in required_string_fields:
|
|
|
|
if not isinstance(as_info.get(field), basestring):
|
|
|
|
raise KeyError("Required string field: '%s'", field)
|
|
|
|
|
2015-03-31 07:00:25 -06:00
|
|
|
localpart = as_info["sender_localpart"]
|
|
|
|
if urllib.quote(localpart) != localpart:
|
|
|
|
raise ValueError(
|
|
|
|
"sender_localpart needs characters which are not URL encoded."
|
|
|
|
)
|
|
|
|
user = UserID(localpart, self.hostname)
|
2015-03-31 06:48:03 -06:00
|
|
|
user_id = user.to_string()
|
|
|
|
|
2015-03-31 04:00:00 -06:00
|
|
|
# namespace checks
|
|
|
|
if not isinstance(as_info.get("namespaces"), dict):
|
|
|
|
raise KeyError("Requires 'namespaces' object.")
|
|
|
|
for ns in ApplicationService.NS_LIST:
|
|
|
|
# specific namespaces are optional
|
|
|
|
if ns in as_info["namespaces"]:
|
|
|
|
# expect a list of dicts with exclusive and regex keys
|
|
|
|
for regex_obj in as_info["namespaces"][ns]:
|
|
|
|
if not isinstance(regex_obj, dict):
|
|
|
|
raise ValueError(
|
|
|
|
"Expected namespace entry in %s to be an object,"
|
|
|
|
" but got %s", ns, regex_obj
|
|
|
|
)
|
|
|
|
if not isinstance(regex_obj.get("regex"), basestring):
|
|
|
|
raise ValueError(
|
|
|
|
"Missing/bad type 'regex' key in %s", regex_obj
|
|
|
|
)
|
|
|
|
if not isinstance(regex_obj.get("exclusive"), bool):
|
|
|
|
raise ValueError(
|
|
|
|
"Missing/bad type 'exclusive' key in %s", regex_obj
|
|
|
|
)
|
|
|
|
return ApplicationService(
|
|
|
|
token=as_info["as_token"],
|
|
|
|
url=as_info["url"],
|
|
|
|
namespaces=as_info["namespaces"],
|
|
|
|
hs_token=as_info["hs_token"],
|
2015-03-31 06:48:03 -06:00
|
|
|
sender=user_id,
|
2015-03-31 05:07:56 -06:00
|
|
|
id=as_info["as_token"] # the token is the only unique thing here
|
2015-03-31 04:00:00 -06:00
|
|
|
)
|
2015-03-09 07:10:31 -06:00
|
|
|
|
2015-03-31 04:00:00 -06:00
|
|
|
def _populate_appservice_cache(self, config_files):
|
|
|
|
"""Populates a cache of Application Services from the config files."""
|
2015-03-31 04:35:45 -06:00
|
|
|
if not isinstance(config_files, list):
|
|
|
|
logger.warning(
|
|
|
|
"Expected %s to be a list of AS config files.", config_files
|
|
|
|
)
|
|
|
|
return
|
|
|
|
|
2015-03-31 04:00:00 -06:00
|
|
|
for config_file in config_files:
|
|
|
|
try:
|
|
|
|
with open(config_file, 'r') as f:
|
2015-03-31 04:35:45 -06:00
|
|
|
appservice = self._load_appservice(yaml.load(f))
|
2015-03-31 04:00:00 -06:00
|
|
|
logger.info("Loaded application service: %s", appservice)
|
|
|
|
self.services_cache.append(appservice)
|
|
|
|
except Exception as e:
|
|
|
|
logger.error("Failed to load appservice from '%s'", config_file)
|
|
|
|
logger.exception(e)
|
2015-03-06 07:53:35 -07:00
|
|
|
|
|
|
|
|
|
|
|
class ApplicationServiceTransactionStore(SQLBaseStore):
|
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
super(ApplicationServiceTransactionStore, self).__init__(hs)
|
|
|
|
|
2015-03-09 07:10:31 -06:00
|
|
|
@defer.inlineCallbacks
|
2015-03-06 08:12:24 -07:00
|
|
|
def get_appservices_by_state(self, state):
|
|
|
|
"""Get a list of application services based on their state.
|
2015-03-06 07:53:35 -07:00
|
|
|
|
2015-03-06 08:12:24 -07:00
|
|
|
Args:
|
|
|
|
state(ApplicationServiceState): The state to filter on.
|
2015-03-06 07:53:35 -07:00
|
|
|
Returns:
|
|
|
|
A Deferred which resolves to a list of ApplicationServices, which
|
|
|
|
may be empty.
|
|
|
|
"""
|
2015-03-31 05:07:56 -06:00
|
|
|
results = yield self._simple_select_list(
|
|
|
|
"application_services_state",
|
|
|
|
dict(state=state),
|
|
|
|
["as_id"]
|
2015-03-16 04:09:15 -06:00
|
|
|
)
|
2015-03-09 07:10:31 -06:00
|
|
|
# NB: This assumes this class is linked with ApplicationServiceStore
|
2015-03-31 05:07:56 -06:00
|
|
|
as_list = yield self.get_app_services()
|
|
|
|
services = []
|
|
|
|
|
|
|
|
for res in results:
|
|
|
|
for service in as_list:
|
|
|
|
if service.id == res["as_id"]:
|
|
|
|
services.append(service)
|
|
|
|
defer.returnValue(services)
|
2015-03-06 07:53:35 -07:00
|
|
|
|
2015-03-09 07:10:31 -06:00
|
|
|
@defer.inlineCallbacks
|
2015-03-06 09:09:05 -07:00
|
|
|
def get_appservice_state(self, service):
|
|
|
|
"""Get the application service state.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service(ApplicationService): The service whose state to set.
|
|
|
|
Returns:
|
|
|
|
A Deferred which resolves to ApplicationServiceState.
|
|
|
|
"""
|
2015-03-09 07:10:31 -06:00
|
|
|
result = yield self._simple_select_one(
|
|
|
|
"application_services_state",
|
|
|
|
dict(as_id=service.id),
|
|
|
|
["state"],
|
|
|
|
allow_none=True
|
|
|
|
)
|
|
|
|
if result:
|
|
|
|
defer.returnValue(result.get("state"))
|
|
|
|
return
|
|
|
|
defer.returnValue(None)
|
2015-03-06 09:09:05 -07:00
|
|
|
|
2015-03-06 08:12:24 -07:00
|
|
|
def set_appservice_state(self, service, state):
|
|
|
|
"""Set the application service state.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service(ApplicationService): The service whose state to set.
|
|
|
|
state(ApplicationServiceState): The connectivity state to apply.
|
|
|
|
Returns:
|
2015-03-09 07:10:31 -06:00
|
|
|
A Deferred which resolves when the state was set successfully.
|
2015-03-06 08:12:24 -07:00
|
|
|
"""
|
2015-03-06 10:35:14 -07:00
|
|
|
return self._simple_upsert(
|
|
|
|
"application_services_state",
|
|
|
|
dict(as_id=service.id),
|
|
|
|
dict(state=state)
|
|
|
|
)
|
2015-03-06 08:12:24 -07:00
|
|
|
|
2015-03-06 09:09:05 -07:00
|
|
|
def create_appservice_txn(self, service, events):
|
|
|
|
"""Atomically creates a new transaction for this application service
|
|
|
|
with the given list of events.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service(ApplicationService): The service who the transaction is for.
|
|
|
|
events(list<Event>): A list of events to put in the transaction.
|
|
|
|
Returns:
|
2015-03-06 09:16:14 -07:00
|
|
|
AppServiceTransaction: A new transaction.
|
2015-03-06 09:09:05 -07:00
|
|
|
"""
|
2015-03-09 07:54:20 -06:00
|
|
|
return self.runInteraction(
|
|
|
|
"create_appservice_txn",
|
|
|
|
self._create_appservice_txn,
|
|
|
|
service, events
|
|
|
|
)
|
|
|
|
|
|
|
|
def _create_appservice_txn(self, txn, service, events):
|
|
|
|
# work out new txn id (highest txn id for this service += 1)
|
|
|
|
# The highest id may be the last one sent (in which case it is last_txn)
|
|
|
|
# or it may be the highest in the txns list (which are waiting to be/are
|
|
|
|
# being sent)
|
2015-03-09 09:53:03 -06:00
|
|
|
last_txn_id = self._get_last_txn(txn, service.id)
|
2015-03-09 07:54:20 -06:00
|
|
|
|
|
|
|
result = txn.execute(
|
|
|
|
"SELECT MAX(txn_id) FROM application_services_txns WHERE as_id=?",
|
|
|
|
(service.id,)
|
|
|
|
)
|
|
|
|
highest_txn_id = result.fetchone()[0]
|
|
|
|
if highest_txn_id is None:
|
|
|
|
highest_txn_id = 0
|
|
|
|
|
|
|
|
new_txn_id = max(highest_txn_id, last_txn_id) + 1
|
|
|
|
|
|
|
|
# Insert new txn into txn table
|
2015-03-09 11:25:20 -06:00
|
|
|
event_ids = [e.event_id for e in events]
|
2015-03-09 07:54:20 -06:00
|
|
|
txn.execute(
|
2015-03-09 11:25:20 -06:00
|
|
|
"INSERT INTO application_services_txns(as_id, txn_id, event_ids) "
|
2015-03-09 07:54:20 -06:00
|
|
|
"VALUES(?,?,?)",
|
2015-03-09 11:25:20 -06:00
|
|
|
(service.id, new_txn_id, json.dumps(event_ids))
|
2015-03-09 07:54:20 -06:00
|
|
|
)
|
|
|
|
return AppServiceTransaction(
|
|
|
|
service=service, id=new_txn_id, events=events
|
|
|
|
)
|
2015-03-06 09:09:05 -07:00
|
|
|
|
2015-03-06 07:53:35 -07:00
|
|
|
def complete_appservice_txn(self, txn_id, service):
|
|
|
|
"""Completes an application service transaction.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
txn_id(str): The transaction ID being completed.
|
|
|
|
service(ApplicationService): The application service which was sent
|
|
|
|
this transaction.
|
|
|
|
Returns:
|
2015-03-09 09:53:03 -06:00
|
|
|
A Deferred which resolves if this transaction was stored
|
2015-03-06 07:53:35 -07:00
|
|
|
successfully.
|
|
|
|
"""
|
2015-03-09 09:53:03 -06:00
|
|
|
return self.runInteraction(
|
|
|
|
"complete_appservice_txn",
|
|
|
|
self._complete_appservice_txn,
|
|
|
|
txn_id, service
|
|
|
|
)
|
|
|
|
|
|
|
|
def _complete_appservice_txn(self, txn, txn_id, service):
|
|
|
|
txn_id = int(txn_id)
|
|
|
|
|
|
|
|
# Debugging query: Make sure the txn being completed is EXACTLY +1 from
|
|
|
|
# what was there before. If it isn't, we've got problems (e.g. the AS
|
|
|
|
# has probably missed some events), so whine loudly but still continue,
|
|
|
|
# since it shouldn't fail completion of the transaction.
|
|
|
|
last_txn_id = self._get_last_txn(txn, service.id)
|
|
|
|
if (last_txn_id + 1) != txn_id:
|
|
|
|
logger.error(
|
|
|
|
"appservice: Completing a transaction which has an ID > 1 from "
|
|
|
|
"the last ID sent to this AS. We've either dropped events or "
|
|
|
|
"sent it to the AS out of order. FIX ME. last_txn=%s "
|
|
|
|
"completing_txn=%s service_id=%s", last_txn_id, txn_id,
|
|
|
|
service.id
|
|
|
|
)
|
|
|
|
|
|
|
|
# Set current txn_id for AS to 'txn_id'
|
|
|
|
self._simple_upsert_txn(
|
|
|
|
txn, "application_services_state", dict(as_id=service.id),
|
|
|
|
dict(last_txn=txn_id)
|
|
|
|
)
|
|
|
|
|
2015-03-09 11:25:20 -06:00
|
|
|
# Delete txn
|
2015-03-09 09:53:03 -06:00
|
|
|
self._simple_delete_txn(
|
|
|
|
txn, "application_services_txns",
|
|
|
|
dict(txn_id=txn_id, as_id=service.id)
|
|
|
|
)
|
2015-03-06 09:16:14 -07:00
|
|
|
|
|
|
|
def get_oldest_unsent_txn(self, service):
|
|
|
|
"""Get the oldest transaction which has not been sent for this
|
|
|
|
service.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service(ApplicationService): The app service to get the oldest txn.
|
|
|
|
Returns:
|
|
|
|
A Deferred which resolves to an AppServiceTransaction or
|
|
|
|
None.
|
|
|
|
"""
|
2015-03-09 09:53:03 -06:00
|
|
|
return self.runInteraction(
|
|
|
|
"get_oldest_unsent_appservice_txn",
|
|
|
|
self._get_oldest_unsent_txn,
|
|
|
|
service
|
|
|
|
)
|
|
|
|
|
|
|
|
def _get_oldest_unsent_txn(self, txn, service):
|
|
|
|
# Monotonically increasing txn ids, so just select the smallest
|
2015-03-06 10:16:47 -07:00
|
|
|
# one in the txns table (we delete them when they are sent)
|
2015-03-09 09:53:03 -06:00
|
|
|
result = txn.execute(
|
2015-03-10 04:04:20 -06:00
|
|
|
"SELECT MIN(txn_id), * FROM application_services_txns WHERE as_id=?",
|
2015-03-09 09:53:03 -06:00
|
|
|
(service.id,)
|
|
|
|
)
|
|
|
|
entry = self.cursor_to_dict(result)[0]
|
|
|
|
if not entry or entry["txn_id"] is None:
|
|
|
|
# the min(txn_id) part will force a row, so entry may not be None
|
|
|
|
return None
|
|
|
|
|
2015-03-09 11:25:20 -06:00
|
|
|
event_ids = json.loads(entry["event_ids"])
|
2015-03-09 11:45:41 -06:00
|
|
|
events = self._get_events_txn(txn, event_ids)
|
2015-03-09 11:25:20 -06:00
|
|
|
|
2015-03-09 09:53:03 -06:00
|
|
|
return AppServiceTransaction(
|
2015-03-09 11:25:20 -06:00
|
|
|
service=service, id=entry["txn_id"], events=events
|
2015-03-09 09:53:03 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
def _get_last_txn(self, txn, service_id):
|
|
|
|
result = txn.execute(
|
|
|
|
"SELECT last_txn FROM application_services_state WHERE as_id=?",
|
|
|
|
(service_id,)
|
|
|
|
)
|
|
|
|
last_txn_id = result.fetchone()
|
2015-03-09 11:45:41 -06:00
|
|
|
if last_txn_id is None or last_txn_id[0] is None: # no row exists
|
2015-03-09 09:53:03 -06:00
|
|
|
return 0
|
|
|
|
else:
|
|
|
|
return int(last_txn_id[0]) # select 'last_txn' col
|