2014-08-12 08:10:52 -06:00
|
|
|
# -*- coding: utf-8 -*-
|
2015-01-06 06:21:39 -07:00
|
|
|
# Copyright 2014, 2015 OpenMarket Ltd
|
2014-08-12 08:10:52 -06:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
import logging
|
|
|
|
|
|
|
|
from synapse.api.errors import StoreError
|
2014-12-05 09:20:48 -07:00
|
|
|
from synapse.events import FrozenEvent
|
|
|
|
from synapse.events.utils import prune_event
|
2014-09-12 10:11:00 -06:00
|
|
|
from synapse.util.logutils import log_function
|
2014-10-29 19:21:33 -06:00
|
|
|
from synapse.util.logcontext import PreserveLoggingContext, LoggingContext
|
2015-02-11 08:01:15 -07:00
|
|
|
from synapse.util.lrucache import LruCache
|
2015-03-04 09:04:46 -07:00
|
|
|
import synapse.metrics
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2014-11-14 04:16:50 -07:00
|
|
|
from twisted.internet import defer
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2015-02-23 11:41:58 -07:00
|
|
|
from collections import namedtuple, OrderedDict
|
2015-03-17 11:19:22 -06:00
|
|
|
import functools
|
2015-02-11 07:23:10 -07:00
|
|
|
import simplejson as json
|
2014-10-29 10:59:24 -06:00
|
|
|
import sys
|
2014-10-28 04:34:05 -06:00
|
|
|
import time
|
2014-08-13 09:27:14 -06:00
|
|
|
|
2014-08-12 08:10:52 -06:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2014-09-12 07:37:55 -06:00
|
|
|
sql_logger = logging.getLogger("synapse.storage.SQL")
|
2014-10-28 05:18:04 -06:00
|
|
|
transaction_logger = logging.getLogger("synapse.storage.txn")
|
2014-09-12 07:37:55 -06:00
|
|
|
|
|
|
|
|
2015-03-04 12:45:16 -07:00
|
|
|
metrics = synapse.metrics.get_metrics_for("synapse.storage")
|
|
|
|
|
2015-03-16 11:21:59 -06:00
|
|
|
sql_scheduling_timer = metrics.register_distribution("schedule_time")
|
|
|
|
|
2015-03-10 09:24:02 -06:00
|
|
|
sql_query_timer = metrics.register_distribution("query_time", labels=["verb"])
|
|
|
|
sql_txn_timer = metrics.register_distribution("transaction_time", labels=["desc"])
|
|
|
|
sql_getevents_timer = metrics.register_distribution("getEvents_time", labels=["desc"])
|
2015-03-06 09:18:21 -07:00
|
|
|
|
|
|
|
caches_by_name = {}
|
|
|
|
cache_counter = metrics.register_cache(
|
|
|
|
"cache",
|
|
|
|
lambda: {(name,): len(caches_by_name[name]) for name in caches_by_name.keys()},
|
|
|
|
labels=["name"],
|
|
|
|
)
|
2015-03-04 12:45:16 -07:00
|
|
|
|
|
|
|
|
2015-02-19 10:29:39 -07:00
|
|
|
# TODO(paul):
|
|
|
|
# * more generic key management
|
2015-02-19 11:36:02 -07:00
|
|
|
# * consider other eviction strategies - LRU?
|
|
|
|
def cached(max_entries=1000):
|
2015-02-19 10:29:39 -07:00
|
|
|
""" A method decorator that applies a memoizing cache around the function.
|
|
|
|
|
|
|
|
The function is presumed to take one additional argument, which is used as
|
|
|
|
the key for the cache. Cache hits are served directly from the cache;
|
|
|
|
misses use the function body to generate the value.
|
|
|
|
|
|
|
|
The wrapped function has an additional member, a callable called
|
|
|
|
"invalidate". This can be used to remove individual entries from the cache.
|
2015-02-23 08:41:54 -07:00
|
|
|
|
|
|
|
The wrapped function has another additional callable, called "prefill",
|
|
|
|
which can be used to insert values into the cache specifically, without
|
|
|
|
calling the calculation function.
|
2015-02-19 10:29:39 -07:00
|
|
|
"""
|
2015-02-19 11:36:02 -07:00
|
|
|
def wrap(orig):
|
2015-02-23 11:41:58 -07:00
|
|
|
cache = OrderedDict()
|
2015-03-06 09:18:21 -07:00
|
|
|
name = orig.__name__
|
2015-02-19 10:29:39 -07:00
|
|
|
|
2015-03-06 09:18:21 -07:00
|
|
|
caches_by_name[name] = cache
|
2015-03-04 09:04:46 -07:00
|
|
|
|
2015-02-23 08:41:54 -07:00
|
|
|
def prefill(key, value):
|
2015-02-19 11:36:02 -07:00
|
|
|
while len(cache) > max_entries:
|
2015-02-23 11:41:58 -07:00
|
|
|
cache.popitem(last=False)
|
2015-02-19 10:29:39 -07:00
|
|
|
|
2015-02-23 08:41:54 -07:00
|
|
|
cache[key] = value
|
|
|
|
|
2015-03-17 11:19:22 -06:00
|
|
|
@functools.wraps(orig)
|
2015-02-23 08:41:54 -07:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def wrapped(self, key):
|
|
|
|
if key in cache:
|
2015-03-06 09:18:21 -07:00
|
|
|
cache_counter.inc_hits(name)
|
2015-02-23 08:41:54 -07:00
|
|
|
defer.returnValue(cache[key])
|
|
|
|
|
2015-03-06 09:18:21 -07:00
|
|
|
cache_counter.inc_misses(name)
|
2015-02-23 08:41:54 -07:00
|
|
|
ret = yield orig(self, key)
|
|
|
|
prefill(key, ret)
|
2015-02-19 11:36:02 -07:00
|
|
|
defer.returnValue(ret)
|
2015-02-19 10:29:39 -07:00
|
|
|
|
2015-02-19 11:36:02 -07:00
|
|
|
def invalidate(key):
|
2015-02-23 11:29:26 -07:00
|
|
|
cache.pop(key, None)
|
2015-02-19 11:36:02 -07:00
|
|
|
|
|
|
|
wrapped.invalidate = invalidate
|
2015-02-23 11:19:13 -07:00
|
|
|
wrapped.prefill = prefill
|
2015-02-19 11:36:02 -07:00
|
|
|
return wrapped
|
|
|
|
|
|
|
|
return wrap
|
2015-02-19 10:29:39 -07:00
|
|
|
|
|
|
|
|
2014-09-12 07:37:55 -06:00
|
|
|
class LoggingTransaction(object):
|
|
|
|
"""An object that almost-transparently proxies for the 'txn' object
|
2015-03-04 12:45:16 -07:00
|
|
|
passed to the constructor. Adds logging and metrics to the .execute()
|
|
|
|
method."""
|
2014-10-28 05:18:04 -06:00
|
|
|
__slots__ = ["txn", "name"]
|
2014-09-12 07:37:55 -06:00
|
|
|
|
2014-10-28 05:18:04 -06:00
|
|
|
def __init__(self, txn, name):
|
2014-09-12 07:37:55 -06:00
|
|
|
object.__setattr__(self, "txn", txn)
|
2014-10-28 05:18:04 -06:00
|
|
|
object.__setattr__(self, "name", name)
|
2014-09-12 07:37:55 -06:00
|
|
|
|
2014-10-28 04:53:11 -06:00
|
|
|
def __getattr__(self, name):
|
|
|
|
return getattr(self.txn, name)
|
2014-09-12 07:37:55 -06:00
|
|
|
|
|
|
|
def __setattr__(self, name, value):
|
2014-10-28 04:53:11 -06:00
|
|
|
setattr(self.txn, name, value)
|
2014-09-12 07:37:55 -06:00
|
|
|
|
|
|
|
def execute(self, sql, *args, **kwargs):
|
|
|
|
# TODO(paul): Maybe use 'info' and 'debug' for values?
|
2014-10-28 05:18:04 -06:00
|
|
|
sql_logger.debug("[SQL] {%s} %s", self.name, sql)
|
2015-03-04 12:45:16 -07:00
|
|
|
|
2014-09-12 07:37:55 -06:00
|
|
|
try:
|
|
|
|
if args and args[0]:
|
|
|
|
values = args[0]
|
2014-10-28 05:18:04 -06:00
|
|
|
sql_logger.debug(
|
2014-11-20 10:10:37 -07:00
|
|
|
"[SQL values] {%s} " + ", ".join(("<%r>",) * len(values)),
|
2014-10-28 05:18:04 -06:00
|
|
|
self.name,
|
|
|
|
*values
|
|
|
|
)
|
2014-09-12 07:37:55 -06:00
|
|
|
except:
|
|
|
|
# Don't let logging failures stop SQL from working
|
|
|
|
pass
|
|
|
|
|
2015-01-06 09:34:26 -07:00
|
|
|
start = time.time() * 1000
|
2014-10-28 04:34:05 -06:00
|
|
|
try:
|
2014-10-28 04:53:11 -06:00
|
|
|
return self.txn.execute(
|
2014-10-28 04:34:05 -06:00
|
|
|
sql, *args, **kwargs
|
|
|
|
)
|
2014-10-29 10:59:24 -06:00
|
|
|
except:
|
|
|
|
logger.exception("[SQL FAIL] {%s}", self.name)
|
|
|
|
raise
|
2014-10-28 04:34:05 -06:00
|
|
|
finally:
|
2015-03-04 12:45:16 -07:00
|
|
|
msecs = (time.time() * 1000) - start
|
|
|
|
sql_logger.debug("[SQL time] {%s} %f", self.name, msecs)
|
2015-03-10 09:21:03 -06:00
|
|
|
sql_query_timer.inc_by(msecs, sql.split()[0])
|
2014-09-12 07:37:55 -06:00
|
|
|
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2015-02-10 07:50:53 -07:00
|
|
|
class PerformanceCounters(object):
|
|
|
|
def __init__(self):
|
|
|
|
self.current_counters = {}
|
|
|
|
self.previous_counters = {}
|
|
|
|
|
|
|
|
def update(self, key, start_time, end_time=None):
|
|
|
|
if end_time is None:
|
2015-02-10 09:30:48 -07:00
|
|
|
end_time = time.time() * 1000
|
2015-02-10 07:50:53 -07:00
|
|
|
duration = end_time - start_time
|
|
|
|
count, cum_time = self.current_counters.get(key, (0, 0))
|
|
|
|
count += 1
|
|
|
|
cum_time += duration
|
|
|
|
self.current_counters[key] = (count, cum_time)
|
|
|
|
return end_time
|
|
|
|
|
|
|
|
def interval(self, interval_duration, limit=3):
|
|
|
|
counters = []
|
|
|
|
for name, (count, cum_time) in self.current_counters.items():
|
|
|
|
prev_count, prev_time = self.previous_counters.get(name, (0, 0))
|
|
|
|
counters.append((
|
|
|
|
(cum_time - prev_time) / interval_duration,
|
|
|
|
count - prev_count,
|
|
|
|
name
|
|
|
|
))
|
|
|
|
|
|
|
|
self.previous_counters = dict(self.current_counters)
|
|
|
|
|
|
|
|
counters.sort(reverse=True)
|
|
|
|
|
|
|
|
top_n_counters = ", ".join(
|
|
|
|
"%s(%d): %.3f%%" % (name, count, 100 * ratio)
|
2015-02-10 07:54:07 -07:00
|
|
|
for ratio, count, name in counters[:limit]
|
2015-02-10 07:50:53 -07:00
|
|
|
)
|
|
|
|
|
|
|
|
return top_n_counters
|
|
|
|
|
|
|
|
|
2014-08-12 08:10:52 -06:00
|
|
|
class SQLBaseStore(object):
|
2014-10-28 05:18:04 -06:00
|
|
|
_TXN_ID = 0
|
2014-08-12 08:10:52 -06:00
|
|
|
|
|
|
|
def __init__(self, hs):
|
2014-08-13 11:15:23 -06:00
|
|
|
self.hs = hs
|
2014-08-12 08:10:52 -06:00
|
|
|
self._db_pool = hs.get_db_pool()
|
2014-08-13 12:18:55 -06:00
|
|
|
self._clock = hs.get_clock()
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2015-02-09 07:22:52 -07:00
|
|
|
self._previous_txn_total_time = 0
|
|
|
|
self._current_txn_total_time = 0
|
|
|
|
self._previous_loop_ts = 0
|
2015-03-05 08:58:03 -07:00
|
|
|
|
|
|
|
# TODO(paul): These can eventually be removed once the metrics code
|
|
|
|
# is running in mainline, and we have some nice monitoring frontends
|
|
|
|
# to watch it
|
2015-02-10 07:50:53 -07:00
|
|
|
self._txn_perf_counters = PerformanceCounters()
|
|
|
|
self._get_event_counters = PerformanceCounters()
|
2015-02-09 07:22:52 -07:00
|
|
|
|
2015-02-11 08:01:15 -07:00
|
|
|
self._get_event_cache = LruCache(hs.config.event_cache_size)
|
2015-03-09 14:39:17 -06:00
|
|
|
|
|
|
|
# Pretend the getEventCache is just another named cache
|
|
|
|
caches_by_name["*getEvent*"] = self._get_event_cache
|
2015-02-11 08:01:15 -07:00
|
|
|
|
2015-02-09 07:22:52 -07:00
|
|
|
def start_profiling(self):
|
|
|
|
self._previous_loop_ts = self._clock.time_msec()
|
|
|
|
|
|
|
|
def loop():
|
|
|
|
curr = self._current_txn_total_time
|
|
|
|
prev = self._previous_txn_total_time
|
|
|
|
self._previous_txn_total_time = curr
|
|
|
|
|
|
|
|
time_now = self._clock.time_msec()
|
|
|
|
time_then = self._previous_loop_ts
|
|
|
|
self._previous_loop_ts = time_now
|
|
|
|
|
|
|
|
ratio = (curr - prev)/(time_now - time_then)
|
|
|
|
|
2015-02-10 07:50:53 -07:00
|
|
|
top_three_counters = self._txn_perf_counters.interval(
|
|
|
|
time_now - time_then, limit=3
|
|
|
|
)
|
|
|
|
|
|
|
|
top_3_event_counters = self._get_event_counters.interval(
|
|
|
|
time_now - time_then, limit=3
|
2015-02-09 10:55:56 -07:00
|
|
|
)
|
|
|
|
|
|
|
|
logger.info(
|
2015-02-10 07:54:07 -07:00
|
|
|
"Total database time: %.3f%% {%s} {%s}",
|
|
|
|
ratio * 100, top_three_counters, top_3_event_counters
|
2015-02-09 10:55:56 -07:00
|
|
|
)
|
2015-02-09 07:22:52 -07:00
|
|
|
|
2015-02-09 07:45:15 -07:00
|
|
|
self._clock.looping_call(loop, 10000)
|
2015-02-09 07:22:52 -07:00
|
|
|
|
2014-10-29 19:21:33 -06:00
|
|
|
@defer.inlineCallbacks
|
2014-10-28 05:18:04 -06:00
|
|
|
def runInteraction(self, desc, func, *args, **kwargs):
|
2014-09-12 06:57:24 -06:00
|
|
|
"""Wraps the .runInteraction() method on the underlying db_pool."""
|
2014-10-29 19:21:33 -06:00
|
|
|
current_context = LoggingContext.current_context()
|
2014-11-20 10:26:36 -07:00
|
|
|
|
2015-03-16 11:21:59 -06:00
|
|
|
start_time = time.time() * 1000
|
|
|
|
|
2014-09-12 07:37:55 -06:00
|
|
|
def inner_func(txn, *args, **kwargs):
|
2014-10-29 19:21:33 -06:00
|
|
|
with LoggingContext("runInteraction") as context:
|
|
|
|
current_context.copy_to(context)
|
2015-01-06 09:34:26 -07:00
|
|
|
start = time.time() * 1000
|
2015-02-09 11:06:31 -07:00
|
|
|
txn_id = self._TXN_ID
|
2014-11-14 04:16:50 -07:00
|
|
|
|
|
|
|
# We don't really need these to be unique, so lets stop it from
|
|
|
|
# growing really large.
|
|
|
|
self._TXN_ID = (self._TXN_ID + 1) % (sys.maxint - 1)
|
|
|
|
|
|
|
|
name = "%s-%x" % (desc, txn_id, )
|
|
|
|
|
2015-03-16 11:21:59 -06:00
|
|
|
sql_scheduling_timer.inc_by(time.time() * 1000 - start_time)
|
2014-11-14 04:16:50 -07:00
|
|
|
transaction_logger.debug("[TXN START] {%s}", name)
|
|
|
|
try:
|
|
|
|
return func(LoggingTransaction(txn, name), *args, **kwargs)
|
|
|
|
except:
|
|
|
|
logger.exception("[TXN FAIL] {%s}", name)
|
|
|
|
raise
|
|
|
|
finally:
|
2015-01-06 09:34:26 -07:00
|
|
|
end = time.time() * 1000
|
2015-03-11 12:57:35 -06:00
|
|
|
duration = end - start
|
2015-02-09 07:22:52 -07:00
|
|
|
|
2015-03-11 12:57:35 -06:00
|
|
|
transaction_logger.debug("[TXN END] {%s} %f", name, duration)
|
2015-02-09 10:55:56 -07:00
|
|
|
|
2015-03-11 12:57:35 -06:00
|
|
|
self._current_txn_total_time += duration
|
|
|
|
self._txn_perf_counters.update(desc, start, end)
|
|
|
|
sql_txn_timer.inc_by(duration, desc)
|
2015-03-05 08:58:03 -07:00
|
|
|
|
2014-10-29 19:21:33 -06:00
|
|
|
with PreserveLoggingContext():
|
|
|
|
result = yield self._db_pool.runInteraction(
|
|
|
|
inner_func, *args, **kwargs
|
|
|
|
)
|
|
|
|
defer.returnValue(result)
|
2014-09-12 06:57:24 -06:00
|
|
|
|
2014-08-12 08:10:52 -06:00
|
|
|
def cursor_to_dict(self, cursor):
|
|
|
|
"""Converts a SQL cursor into an list of dicts.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
cursor : The DBAPI cursor which has executed a query.
|
|
|
|
Returns:
|
|
|
|
A list of dicts where the key is the column header.
|
|
|
|
"""
|
|
|
|
col_headers = list(column[0] for column in cursor.description)
|
|
|
|
results = list(
|
|
|
|
dict(zip(col_headers, row)) for row in cursor.fetchall()
|
|
|
|
)
|
|
|
|
return results
|
|
|
|
|
2015-03-11 11:19:17 -06:00
|
|
|
def _execute(self, desc, decoder, query, *args):
|
2014-08-12 08:10:52 -06:00
|
|
|
"""Runs a single query for a result set.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
decoder - The function which can resolve the cursor results to
|
|
|
|
something meaningful.
|
|
|
|
query - The query string to execute
|
|
|
|
*args - Query args.
|
|
|
|
Returns:
|
|
|
|
The result of decoder(results)
|
|
|
|
"""
|
|
|
|
def interaction(txn):
|
|
|
|
cursor = txn.execute(query, args)
|
2014-08-14 09:02:10 -06:00
|
|
|
if decoder:
|
|
|
|
return decoder(cursor)
|
|
|
|
else:
|
2014-08-15 03:26:35 -06:00
|
|
|
return cursor.fetchall()
|
2014-08-14 09:02:10 -06:00
|
|
|
|
2015-03-11 11:19:17 -06:00
|
|
|
return self.runInteraction(desc, interaction)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2015-03-11 11:08:57 -06:00
|
|
|
def _execute_and_decode(self, desc, query, *args):
|
2015-03-11 11:19:17 -06:00
|
|
|
return self._execute(desc, self.cursor_to_dict, query, *args)
|
2014-08-13 09:27:14 -06:00
|
|
|
|
2014-08-12 08:10:52 -06:00
|
|
|
# "Simple" SQL API methods that operate on a single table with no JOINs,
|
|
|
|
# no complex WHERE clauses, just a dict of values for columns.
|
|
|
|
|
2014-10-13 09:39:15 -06:00
|
|
|
def _simple_insert(self, table, values, or_replace=False, or_ignore=False):
|
2014-08-12 08:10:52 -06:00
|
|
|
"""Executes an INSERT query on the named table.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
table : string giving the table name
|
|
|
|
values : dict of new column names and values for them
|
2014-09-03 11:18:35 -06:00
|
|
|
or_replace : bool; if True performs an INSERT OR REPLACE
|
2014-08-12 08:10:52 -06:00
|
|
|
"""
|
2014-09-12 06:57:24 -06:00
|
|
|
return self.runInteraction(
|
2014-10-28 05:18:04 -06:00
|
|
|
"_simple_insert",
|
2014-10-13 09:39:15 -06:00
|
|
|
self._simple_insert_txn, table, values, or_replace=or_replace,
|
|
|
|
or_ignore=or_ignore,
|
2014-08-26 07:31:48 -06:00
|
|
|
)
|
|
|
|
|
2014-09-12 10:11:00 -06:00
|
|
|
@log_function
|
2014-10-13 09:39:15 -06:00
|
|
|
def _simple_insert_txn(self, txn, table, values, or_replace=False,
|
|
|
|
or_ignore=False):
|
2014-09-03 11:18:35 -06:00
|
|
|
sql = "%s INTO %s (%s) VALUES(%s)" % (
|
2014-10-13 09:39:15 -06:00
|
|
|
("INSERT OR REPLACE" if or_replace else
|
|
|
|
"INSERT OR IGNORE" if or_ignore else "INSERT"),
|
2014-08-12 08:10:52 -06:00
|
|
|
table,
|
|
|
|
", ".join(k for k in values),
|
|
|
|
", ".join("?" for k in values)
|
|
|
|
)
|
2014-09-12 10:11:00 -06:00
|
|
|
|
|
|
|
logger.debug(
|
2014-10-29 19:21:33 -06:00
|
|
|
"[SQL] %s Args=%s",
|
2014-09-12 10:11:00 -06:00
|
|
|
sql, values.values(),
|
|
|
|
)
|
|
|
|
|
2014-08-26 07:31:48 -06:00
|
|
|
txn.execute(sql, values.values())
|
|
|
|
return txn.lastrowid
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2014-12-18 07:49:22 -07:00
|
|
|
def _simple_upsert(self, table, keyvalues, values):
|
|
|
|
"""
|
2015-01-28 07:44:41 -07:00
|
|
|
Args:
|
|
|
|
table (str): The table to upsert into
|
|
|
|
keyvalues (dict): The unique key tables and their new values
|
|
|
|
values (dict): The nonunique columns and their new values
|
|
|
|
Returns: A deferred
|
2014-12-18 07:49:22 -07:00
|
|
|
"""
|
|
|
|
return self.runInteraction(
|
|
|
|
"_simple_upsert",
|
|
|
|
self._simple_upsert_txn, table, keyvalues, values
|
|
|
|
)
|
|
|
|
|
|
|
|
def _simple_upsert_txn(self, txn, table, keyvalues, values):
|
|
|
|
# Try to update
|
|
|
|
sql = "UPDATE %s SET %s WHERE %s" % (
|
|
|
|
table,
|
2015-01-28 07:48:07 -07:00
|
|
|
", ".join("%s = ?" % (k,) for k in values),
|
|
|
|
" AND ".join("%s = ?" % (k,) for k in keyvalues)
|
2014-12-18 07:49:22 -07:00
|
|
|
)
|
|
|
|
sqlargs = values.values() + keyvalues.values()
|
|
|
|
logger.debug(
|
|
|
|
"[SQL] %s Args=%s",
|
|
|
|
sql, sqlargs,
|
|
|
|
)
|
|
|
|
|
|
|
|
txn.execute(sql, sqlargs)
|
|
|
|
if txn.rowcount == 0:
|
|
|
|
# We didn't update and rows so insert a new one
|
|
|
|
allvalues = {}
|
|
|
|
allvalues.update(keyvalues)
|
|
|
|
allvalues.update(values)
|
|
|
|
|
|
|
|
sql = "INSERT INTO %s (%s) VALUES (%s)" % (
|
|
|
|
table,
|
|
|
|
", ".join(k for k in allvalues),
|
|
|
|
", ".join("?" for _ in allvalues)
|
|
|
|
)
|
|
|
|
logger.debug(
|
|
|
|
"[SQL] %s Args=%s",
|
|
|
|
sql, keyvalues.values(),
|
|
|
|
)
|
|
|
|
txn.execute(sql, allvalues.values())
|
|
|
|
|
2014-08-12 08:10:52 -06:00
|
|
|
def _simple_select_one(self, table, keyvalues, retcols,
|
|
|
|
allow_none=False):
|
|
|
|
"""Executes a SELECT query on the named table, which is expected to
|
|
|
|
return a single row, returning a single column from it.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
table : string giving the table name
|
|
|
|
keyvalues : dict of column names and values to select the row with
|
|
|
|
retcols : list of strings giving the names of the columns to return
|
|
|
|
|
|
|
|
allow_none : If true, return None instead of failing if the SELECT
|
|
|
|
statement returns no rows
|
|
|
|
"""
|
|
|
|
return self._simple_selectupdate_one(
|
|
|
|
table, keyvalues, retcols=retcols, allow_none=allow_none
|
|
|
|
)
|
|
|
|
|
|
|
|
def _simple_select_one_onecol(self, table, keyvalues, retcol,
|
|
|
|
allow_none=False):
|
|
|
|
"""Executes a SELECT query on the named table, which is expected to
|
|
|
|
return a single row, returning a single column from it."
|
|
|
|
|
|
|
|
Args:
|
|
|
|
table : string giving the table name
|
|
|
|
keyvalues : dict of column names and values to select the row with
|
|
|
|
retcol : string giving the name of the column to return
|
|
|
|
"""
|
2014-10-28 10:42:35 -06:00
|
|
|
return self.runInteraction(
|
2014-11-10 11:24:43 -07:00
|
|
|
"_simple_select_one_onecol",
|
2014-10-28 10:42:35 -06:00
|
|
|
self._simple_select_one_onecol_txn,
|
|
|
|
table, keyvalues, retcol, allow_none=allow_none,
|
|
|
|
)
|
|
|
|
|
|
|
|
def _simple_select_one_onecol_txn(self, txn, table, keyvalues, retcol,
|
|
|
|
allow_none=False):
|
|
|
|
ret = self._simple_select_onecol_txn(
|
|
|
|
txn,
|
2014-08-12 08:10:52 -06:00
|
|
|
table=table,
|
|
|
|
keyvalues=keyvalues,
|
2014-10-29 10:59:24 -06:00
|
|
|
retcol=retcol,
|
2014-08-12 08:10:52 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
if ret:
|
2014-10-29 10:59:24 -06:00
|
|
|
return ret[0]
|
2014-08-12 08:10:52 -06:00
|
|
|
else:
|
2014-10-28 10:42:35 -06:00
|
|
|
if allow_none:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
raise StoreError(404, "No row found")
|
|
|
|
|
|
|
|
def _simple_select_onecol_txn(self, txn, table, keyvalues, retcol):
|
2014-11-24 03:50:28 -07:00
|
|
|
sql = (
|
|
|
|
"SELECT %(retcol)s FROM %(table)s WHERE %(where)s "
|
|
|
|
"ORDER BY rowid asc"
|
|
|
|
) % {
|
2014-10-28 10:42:35 -06:00
|
|
|
"retcol": retcol,
|
|
|
|
"table": table,
|
|
|
|
"where": " AND ".join("%s = ?" % k for k in keyvalues.keys()),
|
|
|
|
}
|
|
|
|
|
|
|
|
txn.execute(sql, keyvalues.values())
|
|
|
|
|
|
|
|
return [r[0] for r in txn.fetchall()]
|
2014-08-12 08:10:52 -06:00
|
|
|
|
|
|
|
def _simple_select_onecol(self, table, keyvalues, retcol):
|
|
|
|
"""Executes a SELECT query on the named table, which returns a list
|
|
|
|
comprising of the values of the named column from the selected rows.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
table (str): table name
|
|
|
|
keyvalues (dict): column names and values to select the rows with
|
|
|
|
retcol (str): column whos value we wish to retrieve.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred: Results in a list
|
|
|
|
"""
|
2014-10-28 10:42:35 -06:00
|
|
|
return self.runInteraction(
|
|
|
|
"_simple_select_onecol",
|
|
|
|
self._simple_select_onecol_txn,
|
|
|
|
table, keyvalues, retcol
|
|
|
|
)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
|
|
|
def _simple_select_list(self, table, keyvalues, retcols):
|
|
|
|
"""Executes a SELECT query on the named table, which may return zero or
|
|
|
|
more rows, returning the result as a list of dicts.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
table : string giving the table name
|
2015-03-02 03:16:24 -07:00
|
|
|
keyvalues : dict of column names and values to select the rows with,
|
|
|
|
or None to not apply a WHERE clause.
|
2014-08-12 08:10:52 -06:00
|
|
|
retcols : list of strings giving the names of the columns to return
|
|
|
|
"""
|
2014-11-06 08:10:55 -07:00
|
|
|
return self.runInteraction(
|
|
|
|
"_simple_select_list",
|
|
|
|
self._simple_select_list_txn,
|
|
|
|
table, keyvalues, retcols
|
|
|
|
)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2014-11-06 08:10:55 -07:00
|
|
|
def _simple_select_list_txn(self, txn, table, keyvalues, retcols):
|
2014-08-12 08:10:52 -06:00
|
|
|
"""Executes a SELECT query on the named table, which may return zero or
|
|
|
|
more rows, returning the result as a list of dicts.
|
|
|
|
|
|
|
|
Args:
|
2014-11-06 08:10:55 -07:00
|
|
|
txn : Transaction object
|
2014-08-12 08:10:52 -06:00
|
|
|
table : string giving the table name
|
|
|
|
keyvalues : dict of column names and values to select the rows with
|
|
|
|
retcols : list of strings giving the names of the columns to return
|
|
|
|
"""
|
2015-03-02 03:16:24 -07:00
|
|
|
if keyvalues:
|
|
|
|
sql = "SELECT %s FROM %s WHERE %s ORDER BY rowid asc" % (
|
|
|
|
", ".join(retcols),
|
|
|
|
table,
|
|
|
|
" AND ".join("%s = ?" % (k, ) for k in keyvalues)
|
|
|
|
)
|
|
|
|
txn.execute(sql, keyvalues.values())
|
|
|
|
else:
|
|
|
|
sql = "SELECT %s FROM %s ORDER BY rowid asc" % (
|
|
|
|
", ".join(retcols),
|
|
|
|
table
|
|
|
|
)
|
|
|
|
txn.execute(sql)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2014-11-06 08:10:55 -07:00
|
|
|
return self.cursor_to_dict(txn)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
|
|
|
def _simple_update_one(self, table, keyvalues, updatevalues,
|
|
|
|
retcols=None):
|
|
|
|
"""Executes an UPDATE query on the named table, setting new values for
|
|
|
|
columns in a row matching the key values.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
table : string giving the table name
|
|
|
|
keyvalues : dict of column names and values to select the row with
|
|
|
|
updatevalues : dict giving column names and values to update
|
|
|
|
retcols : optional list of column names to return
|
|
|
|
|
|
|
|
If present, retcols gives a list of column names on which to perform
|
|
|
|
a SELECT statement *before* performing the UPDATE statement. The values
|
|
|
|
of these will be returned in a dict.
|
|
|
|
|
|
|
|
These are performed within the same transaction, allowing an atomic
|
|
|
|
get-and-set. This can be used to implement compare-and-set by putting
|
|
|
|
the update column in the 'keyvalues' dict as well.
|
|
|
|
"""
|
|
|
|
return self._simple_selectupdate_one(table, keyvalues, updatevalues,
|
|
|
|
retcols=retcols)
|
|
|
|
|
|
|
|
def _simple_selectupdate_one(self, table, keyvalues, updatevalues=None,
|
|
|
|
retcols=None, allow_none=False):
|
|
|
|
""" Combined SELECT then UPDATE."""
|
|
|
|
if retcols:
|
2014-11-24 03:50:28 -07:00
|
|
|
select_sql = "SELECT %s FROM %s WHERE %s ORDER BY rowid asc" % (
|
2014-08-12 08:10:52 -06:00
|
|
|
", ".join(retcols),
|
|
|
|
table,
|
|
|
|
" AND ".join("%s = ?" % (k) for k in keyvalues)
|
|
|
|
)
|
|
|
|
|
|
|
|
if updatevalues:
|
|
|
|
update_sql = "UPDATE %s SET %s WHERE %s" % (
|
|
|
|
table,
|
2015-01-28 07:48:07 -07:00
|
|
|
", ".join("%s = ?" % (k,) for k in updatevalues),
|
|
|
|
" AND ".join("%s = ?" % (k,) for k in keyvalues)
|
2014-08-12 08:10:52 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
def func(txn):
|
|
|
|
ret = None
|
|
|
|
if retcols:
|
|
|
|
txn.execute(select_sql, keyvalues.values())
|
|
|
|
|
|
|
|
row = txn.fetchone()
|
|
|
|
if not row:
|
|
|
|
if allow_none:
|
|
|
|
return None
|
|
|
|
raise StoreError(404, "No row found")
|
|
|
|
if txn.rowcount > 1:
|
|
|
|
raise StoreError(500, "More than one row matched")
|
|
|
|
|
|
|
|
ret = dict(zip(retcols, row))
|
|
|
|
|
|
|
|
if updatevalues:
|
|
|
|
txn.execute(
|
|
|
|
update_sql,
|
|
|
|
updatevalues.values() + keyvalues.values()
|
|
|
|
)
|
|
|
|
|
|
|
|
if txn.rowcount == 0:
|
|
|
|
raise StoreError(404, "No row found")
|
|
|
|
if txn.rowcount > 1:
|
|
|
|
raise StoreError(500, "More than one row matched")
|
|
|
|
|
|
|
|
return ret
|
2014-10-28 05:18:04 -06:00
|
|
|
return self.runInteraction("_simple_selectupdate_one", func)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
|
|
|
def _simple_delete_one(self, table, keyvalues):
|
|
|
|
"""Executes a DELETE query on the named table, expecting to delete a
|
|
|
|
single row.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
table : string giving the table name
|
|
|
|
keyvalues : dict of column names and values to select the row with
|
|
|
|
"""
|
|
|
|
sql = "DELETE FROM %s WHERE %s" % (
|
|
|
|
table,
|
2014-10-28 05:18:04 -06:00
|
|
|
" AND ".join("%s = ?" % (k, ) for k in keyvalues)
|
2014-08-12 08:10:52 -06:00
|
|
|
)
|
|
|
|
|
|
|
|
def func(txn):
|
|
|
|
txn.execute(sql, keyvalues.values())
|
|
|
|
if txn.rowcount == 0:
|
|
|
|
raise StoreError(404, "No row found")
|
|
|
|
if txn.rowcount > 1:
|
|
|
|
raise StoreError(500, "more than one row matched")
|
2014-10-28 05:18:04 -06:00
|
|
|
return self.runInteraction("_simple_delete_one", func)
|
|
|
|
|
|
|
|
def _simple_delete(self, table, keyvalues):
|
|
|
|
"""Executes a DELETE query on the named table.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
table : string giving the table name
|
|
|
|
keyvalues : dict of column names and values to select the row with
|
|
|
|
"""
|
|
|
|
|
|
|
|
return self.runInteraction("_simple_delete", self._simple_delete_txn)
|
|
|
|
|
|
|
|
def _simple_delete_txn(self, txn, table, keyvalues):
|
|
|
|
sql = "DELETE FROM %s WHERE %s" % (
|
|
|
|
table,
|
|
|
|
" AND ".join("%s = ?" % (k, ) for k in keyvalues)
|
|
|
|
)
|
|
|
|
|
|
|
|
return txn.execute(sql, keyvalues.values())
|
2014-08-12 08:10:52 -06:00
|
|
|
|
|
|
|
def _simple_max_id(self, table):
|
|
|
|
"""Executes a SELECT query on the named table, expecting to return the
|
|
|
|
max value for the column "id".
|
|
|
|
|
|
|
|
Args:
|
|
|
|
table : string giving the table name
|
|
|
|
"""
|
|
|
|
sql = "SELECT MAX(id) AS id FROM %s" % table
|
|
|
|
|
|
|
|
def func(txn):
|
|
|
|
txn.execute(sql)
|
|
|
|
max_id = self.cursor_to_dict(txn)[0]["id"]
|
|
|
|
if max_id is None:
|
|
|
|
return 0
|
|
|
|
return max_id
|
|
|
|
|
2014-10-28 05:18:04 -06:00
|
|
|
return self.runInteraction("_simple_max_id", func)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2015-01-06 06:03:23 -07:00
|
|
|
def _get_events(self, event_ids, check_redacted=True,
|
|
|
|
get_prev_content=False):
|
2014-10-28 05:18:04 -06:00
|
|
|
return self.runInteraction(
|
2015-01-06 06:03:23 -07:00
|
|
|
"_get_events", self._get_events_txn, event_ids,
|
|
|
|
check_redacted=check_redacted, get_prev_content=get_prev_content,
|
2014-10-28 05:18:04 -06:00
|
|
|
)
|
2014-09-05 19:23:36 -06:00
|
|
|
|
2015-01-06 06:03:23 -07:00
|
|
|
def _get_events_txn(self, txn, event_ids, check_redacted=True,
|
|
|
|
get_prev_content=False):
|
2015-01-06 04:18:02 -07:00
|
|
|
if not event_ids:
|
|
|
|
return []
|
2014-09-05 19:23:36 -06:00
|
|
|
|
2015-01-06 07:56:57 -07:00
|
|
|
events = [
|
2015-01-06 06:10:27 -07:00
|
|
|
self._get_event_txn(
|
|
|
|
txn, event_id,
|
2015-01-06 06:13:17 -07:00
|
|
|
check_redacted=check_redacted,
|
|
|
|
get_prev_content=get_prev_content
|
2015-01-06 06:03:23 -07:00
|
|
|
)
|
2015-01-06 06:10:27 -07:00
|
|
|
for event_id in event_ids
|
2015-01-06 06:03:23 -07:00
|
|
|
]
|
2014-11-14 14:25:02 -07:00
|
|
|
|
2015-01-06 07:56:57 -07:00
|
|
|
return [e for e in events if e]
|
|
|
|
|
2014-12-11 08:56:01 -07:00
|
|
|
def _get_event_txn(self, txn, event_id, check_redacted=True,
|
2015-01-22 08:50:17 -07:00
|
|
|
get_prev_content=False, allow_rejected=False):
|
2015-02-11 08:01:15 -07:00
|
|
|
|
|
|
|
start_time = time.time() * 1000
|
2015-03-05 08:58:03 -07:00
|
|
|
|
|
|
|
def update_counter(desc, last_time):
|
|
|
|
curr_time = self._get_event_counters.update(desc, last_time)
|
2015-03-10 09:21:03 -06:00
|
|
|
sql_getevents_timer.inc_by(curr_time - last_time, desc)
|
2015-03-05 08:58:03 -07:00
|
|
|
return curr_time
|
2015-02-11 08:01:15 -07:00
|
|
|
|
2015-02-23 09:55:57 -07:00
|
|
|
cache = self._get_event_cache.setdefault(event_id, {})
|
|
|
|
|
2015-02-11 08:01:15 -07:00
|
|
|
try:
|
|
|
|
# Separate cache entries for each way to invoke _get_event_txn
|
2015-03-04 09:04:46 -07:00
|
|
|
ret = cache[(check_redacted, get_prev_content, allow_rejected)]
|
|
|
|
|
2015-03-09 14:39:17 -06:00
|
|
|
cache_counter.inc_hits("*getEvent*")
|
2015-03-04 09:04:46 -07:00
|
|
|
return ret
|
2015-02-11 08:01:15 -07:00
|
|
|
except KeyError:
|
2015-03-09 14:39:17 -06:00
|
|
|
cache_counter.inc_misses("*getEvent*")
|
2015-02-11 08:01:15 -07:00
|
|
|
pass
|
|
|
|
finally:
|
|
|
|
start_time = update_counter("event_cache", start_time)
|
|
|
|
|
2014-12-11 06:25:19 -07:00
|
|
|
sql = (
|
2015-01-30 07:48:11 -07:00
|
|
|
"SELECT e.internal_metadata, e.json, r.event_id, rej.reason "
|
2015-01-22 08:50:17 -07:00
|
|
|
"FROM event_json as e "
|
2014-12-11 06:25:19 -07:00
|
|
|
"LEFT JOIN redactions as r ON e.event_id = r.redacts "
|
2015-01-22 08:50:17 -07:00
|
|
|
"LEFT JOIN rejections as rej on rej.event_id = e.event_id "
|
2014-12-11 06:25:19 -07:00
|
|
|
"WHERE e.event_id = ? "
|
|
|
|
"LIMIT 1 "
|
|
|
|
)
|
2014-09-05 19:23:36 -06:00
|
|
|
|
2014-12-11 06:25:19 -07:00
|
|
|
txn.execute(sql, (event_id,))
|
2014-09-23 08:28:32 -06:00
|
|
|
|
2014-12-11 06:25:19 -07:00
|
|
|
res = txn.fetchone()
|
2014-09-23 08:28:32 -06:00
|
|
|
|
2014-12-11 06:25:19 -07:00
|
|
|
if not res:
|
|
|
|
return None
|
2014-09-23 08:28:32 -06:00
|
|
|
|
2015-01-22 08:50:17 -07:00
|
|
|
internal_metadata, js, redacted, rejected_reason = res
|
2014-09-23 08:28:32 -06:00
|
|
|
|
2015-02-11 08:01:15 -07:00
|
|
|
start_time = update_counter("select_event", start_time)
|
2015-02-10 07:50:53 -07:00
|
|
|
|
2015-01-22 08:50:17 -07:00
|
|
|
if allow_rejected or not rejected_reason:
|
2015-02-11 08:01:15 -07:00
|
|
|
result = self._get_event_from_row_txn(
|
2015-01-22 08:50:17 -07:00
|
|
|
txn, internal_metadata, js, redacted,
|
|
|
|
check_redacted=check_redacted,
|
|
|
|
get_prev_content=get_prev_content,
|
|
|
|
)
|
2015-02-11 08:01:15 -07:00
|
|
|
cache[(check_redacted, get_prev_content, allow_rejected)] = result
|
|
|
|
return result
|
2015-01-22 08:50:17 -07:00
|
|
|
else:
|
|
|
|
return None
|
2014-12-17 16:37:08 -07:00
|
|
|
|
|
|
|
def _get_event_from_row_txn(self, txn, internal_metadata, js, redacted,
|
2015-01-06 06:03:23 -07:00
|
|
|
check_redacted=True, get_prev_content=False):
|
2015-02-10 07:50:53 -07:00
|
|
|
|
2015-02-10 09:30:48 -07:00
|
|
|
start_time = time.time() * 1000
|
2015-03-05 08:58:03 -07:00
|
|
|
|
|
|
|
def update_counter(desc, last_time):
|
|
|
|
curr_time = self._get_event_counters.update(desc, last_time)
|
2015-03-10 09:21:03 -06:00
|
|
|
sql_getevents_timer.inc_by(curr_time - last_time, desc)
|
2015-03-05 08:58:03 -07:00
|
|
|
return curr_time
|
2015-02-10 07:50:53 -07:00
|
|
|
|
2014-12-11 06:25:19 -07:00
|
|
|
d = json.loads(js)
|
2015-02-10 07:50:53 -07:00
|
|
|
start_time = update_counter("decode_json", start_time)
|
|
|
|
|
2014-12-16 06:17:09 -07:00
|
|
|
internal_metadata = json.loads(internal_metadata)
|
2015-02-10 07:50:53 -07:00
|
|
|
start_time = update_counter("decode_internal", start_time)
|
2014-12-01 09:21:17 -07:00
|
|
|
|
2014-12-16 06:17:09 -07:00
|
|
|
ev = FrozenEvent(d, internal_metadata_dict=internal_metadata)
|
2015-02-10 07:50:53 -07:00
|
|
|
start_time = update_counter("build_frozen_event", start_time)
|
2014-12-11 06:25:19 -07:00
|
|
|
|
|
|
|
if check_redacted and redacted:
|
|
|
|
ev = prune_event(ev)
|
|
|
|
|
|
|
|
ev.unsigned["redacted_by"] = redacted
|
|
|
|
# Get the redaction event.
|
|
|
|
|
|
|
|
because = self._get_event_txn(
|
|
|
|
txn,
|
|
|
|
redacted,
|
|
|
|
check_redacted=False
|
|
|
|
)
|
|
|
|
|
|
|
|
if because:
|
|
|
|
ev.unsigned["redacted_because"] = because
|
2015-02-10 07:50:53 -07:00
|
|
|
start_time = update_counter("redact_event", start_time)
|
2014-12-11 06:25:19 -07:00
|
|
|
|
2014-12-11 08:56:01 -07:00
|
|
|
if get_prev_content and "replaces_state" in ev.unsigned:
|
2014-12-16 11:25:24 -07:00
|
|
|
prev = self._get_event_txn(
|
2014-12-11 08:16:55 -07:00
|
|
|
txn,
|
|
|
|
ev.unsigned["replaces_state"],
|
2014-12-11 08:56:01 -07:00
|
|
|
get_prev_content=False,
|
2014-12-16 11:25:24 -07:00
|
|
|
)
|
|
|
|
if prev:
|
|
|
|
ev.unsigned["prev_content"] = prev.get_dict()["content"]
|
2015-02-10 07:50:53 -07:00
|
|
|
start_time = update_counter("get_prev_content", start_time)
|
2014-12-11 08:16:55 -07:00
|
|
|
|
2014-12-11 06:25:19 -07:00
|
|
|
return ev
|
2014-08-12 08:10:52 -06:00
|
|
|
|
2014-12-09 06:35:26 -07:00
|
|
|
def _parse_events(self, rows):
|
|
|
|
return self.runInteraction(
|
|
|
|
"_parse_events", self._parse_events_txn, rows
|
|
|
|
)
|
|
|
|
|
|
|
|
def _parse_events_txn(self, txn, rows):
|
|
|
|
event_ids = [r["event_id"] for r in rows]
|
|
|
|
|
|
|
|
return self._get_events_txn(txn, event_ids)
|
|
|
|
|
2014-09-24 08:27:59 -06:00
|
|
|
def _has_been_redacted_txn(self, txn, event):
|
|
|
|
sql = "SELECT event_id FROM redactions WHERE redacts = ?"
|
2014-09-23 08:28:32 -06:00
|
|
|
txn.execute(sql, (event.event_id,))
|
2014-09-24 06:29:20 -06:00
|
|
|
result = txn.fetchone()
|
|
|
|
return result[0] if result else None
|
2014-09-23 08:28:32 -06:00
|
|
|
|
|
|
|
|
2014-08-12 08:10:52 -06:00
|
|
|
class Table(object):
|
|
|
|
""" A base class used to store information about a particular table.
|
|
|
|
"""
|
|
|
|
|
|
|
|
table_name = None
|
|
|
|
""" str: The name of the table """
|
|
|
|
|
|
|
|
fields = None
|
|
|
|
""" list: The field names """
|
|
|
|
|
|
|
|
EntryType = None
|
|
|
|
""" Type: A tuple type used to decode the results """
|
|
|
|
|
|
|
|
_select_where_clause = "SELECT %s FROM %s WHERE %s"
|
|
|
|
_select_clause = "SELECT %s FROM %s"
|
|
|
|
_insert_clause = "INSERT OR REPLACE INTO %s (%s) VALUES (%s)"
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def select_statement(cls, where_clause=None):
|
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
where_clause (str): The WHERE clause to use.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
str: An SQL statement to select rows from the table with the given
|
|
|
|
WHERE clause.
|
|
|
|
"""
|
|
|
|
if where_clause:
|
|
|
|
return cls._select_where_clause % (
|
|
|
|
", ".join(cls.fields),
|
|
|
|
cls.table_name,
|
|
|
|
where_clause
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
return cls._select_clause % (
|
|
|
|
", ".join(cls.fields),
|
|
|
|
cls.table_name,
|
|
|
|
)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def insert_statement(cls):
|
|
|
|
return cls._insert_clause % (
|
|
|
|
cls.table_name,
|
|
|
|
", ".join(cls.fields),
|
|
|
|
", ".join(["?"] * len(cls.fields)),
|
|
|
|
)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def decode_single_result(cls, results):
|
|
|
|
""" Given an iterable of tuples, return a single instance of
|
|
|
|
`EntryType` or None if the iterable is empty
|
|
|
|
Args:
|
|
|
|
results (list): The results list to convert to `EntryType`
|
|
|
|
Returns:
|
|
|
|
EntryType: An instance of `EntryType`
|
|
|
|
"""
|
|
|
|
results = list(results)
|
|
|
|
if results:
|
|
|
|
return cls.EntryType(*results[0])
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def decode_results(cls, results):
|
|
|
|
""" Given an iterable of tuples, return a list of `EntryType`
|
|
|
|
Args:
|
|
|
|
results (list): The results list to convert to `EntryType`
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
list: A list of `EntryType`
|
|
|
|
"""
|
|
|
|
return [cls.EntryType(*row) for row in results]
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_fields_string(cls, prefix=None):
|
|
|
|
if prefix:
|
|
|
|
to_join = ("%s.%s" % (prefix, f) for f in cls.fields)
|
|
|
|
else:
|
|
|
|
to_join = cls.fields
|
|
|
|
|
|
|
|
return ", ".join(to_join)
|
|
|
|
|
|
|
|
|
|
|
|
class JoinHelper(object):
|
|
|
|
""" Used to help do joins on tables by looking at the tables' fields and
|
|
|
|
creating a list of unique fields to use with SELECTs and a namedtuple
|
|
|
|
to dump the results into.
|
|
|
|
|
|
|
|
Attributes:
|
2014-12-06 19:26:07 -07:00
|
|
|
tables (list): List of `Table` classes
|
2014-08-12 08:10:52 -06:00
|
|
|
EntryType (type)
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, *tables):
|
|
|
|
self.tables = tables
|
|
|
|
|
|
|
|
res = []
|
|
|
|
for table in self.tables:
|
|
|
|
res += [f for f in table.fields if f not in res]
|
|
|
|
|
2015-02-23 11:41:58 -07:00
|
|
|
self.EntryType = namedtuple("JoinHelperEntry", res)
|
2014-08-12 08:10:52 -06:00
|
|
|
|
|
|
|
def get_fields(self, **prefixes):
|
|
|
|
"""Get a string representing a list of fields for use in SELECT
|
|
|
|
statements with the given prefixes applied to each.
|
|
|
|
|
|
|
|
For example::
|
|
|
|
|
|
|
|
JoinHelper(PdusTable, StateTable).get_fields(
|
|
|
|
PdusTable="pdus",
|
|
|
|
StateTable="state"
|
|
|
|
)
|
|
|
|
"""
|
|
|
|
res = []
|
|
|
|
for field in self.EntryType._fields:
|
|
|
|
for table in self.tables:
|
|
|
|
if field in table.fields:
|
|
|
|
res.append("%s.%s" % (prefixes[table.__name__], field))
|
|
|
|
break
|
|
|
|
|
|
|
|
return ", ".join(res)
|
|
|
|
|
|
|
|
def decode_results(self, rows):
|
|
|
|
return [self.EntryType(*row) for row in rows]
|