2015-08-11 10:59:32 -06:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-06 21:26:29 -07:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2015-08-11 10:59:32 -06:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2015-08-12 03:13:35 -06:00
|
|
|
|
|
|
|
import synapse.metrics
|
2016-03-22 12:22:52 -06:00
|
|
|
from lrucache import LruCache
|
2016-03-23 10:13:05 -06:00
|
|
|
import os
|
|
|
|
|
|
|
|
CACHE_SIZE_FACTOR = float(os.environ.get("SYNAPSE_CACHE_FACTOR", 0.1))
|
2015-08-12 03:13:35 -06:00
|
|
|
|
|
|
|
DEBUG_CACHES = False
|
|
|
|
|
|
|
|
metrics = synapse.metrics.get_metrics_for("synapse.util.caches")
|
|
|
|
|
|
|
|
caches_by_name = {}
|
2016-06-02 04:29:44 -06:00
|
|
|
# cache_counter = metrics.register_cache(
|
|
|
|
# "cache",
|
|
|
|
# lambda: {(name,): len(caches_by_name[name]) for name in caches_by_name.keys()},
|
|
|
|
# labels=["name"],
|
|
|
|
# )
|
|
|
|
|
|
|
|
|
|
|
|
def register_cache(name, cache):
|
|
|
|
caches_by_name[name] = cache
|
|
|
|
return metrics.register_cache(
|
|
|
|
"cache",
|
|
|
|
lambda: len(cache),
|
|
|
|
name,
|
|
|
|
)
|
|
|
|
|
2016-03-22 12:22:52 -06:00
|
|
|
|
2017-01-17 10:04:46 -07:00
|
|
|
_string_cache = LruCache(int(100000 * CACHE_SIZE_FACTOR))
|
|
|
|
_stirng_cache_metrics = register_cache("string_cache", _string_cache)
|
2016-03-22 12:22:52 -06:00
|
|
|
|
|
|
|
|
2016-03-23 10:13:05 -06:00
|
|
|
KNOWN_KEYS = {
|
|
|
|
key: key for key in
|
|
|
|
(
|
|
|
|
"auth_events",
|
|
|
|
"content",
|
|
|
|
"depth",
|
|
|
|
"event_id",
|
|
|
|
"hashes",
|
|
|
|
"origin",
|
|
|
|
"origin_server_ts",
|
|
|
|
"prev_events",
|
|
|
|
"room_id",
|
|
|
|
"sender",
|
|
|
|
"signatures",
|
|
|
|
"state_key",
|
|
|
|
"type",
|
|
|
|
"unsigned",
|
|
|
|
"user_id",
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2016-03-22 12:22:52 -06:00
|
|
|
def intern_string(string):
|
2016-03-23 10:13:05 -06:00
|
|
|
"""Takes a (potentially) unicode string and interns using custom cache
|
|
|
|
"""
|
2017-01-17 10:04:46 -07:00
|
|
|
new_str = _string_cache.setdefault(string, string)
|
|
|
|
if new_str is string:
|
|
|
|
_stirng_cache_metrics.inc_hits()
|
|
|
|
else:
|
|
|
|
_stirng_cache_metrics.inc_misses()
|
|
|
|
return new_str
|
2016-03-23 10:13:05 -06:00
|
|
|
|
|
|
|
|
|
|
|
def intern_dict(dictionary):
|
|
|
|
"""Takes a dictionary and interns well known keys and their values
|
|
|
|
"""
|
2016-03-23 10:34:59 -06:00
|
|
|
return {
|
|
|
|
KNOWN_KEYS.get(key, key): _intern_known_values(key, value)
|
|
|
|
for key, value in dictionary.items()
|
|
|
|
}
|
2016-03-23 10:13:05 -06:00
|
|
|
|
|
|
|
|
2016-03-23 10:34:59 -06:00
|
|
|
def _intern_known_values(key, value):
|
2016-03-23 10:13:05 -06:00
|
|
|
intern_str_keys = ("event_id", "room_id")
|
|
|
|
intern_unicode_keys = ("sender", "user_id", "type", "state_key")
|
|
|
|
|
2016-03-23 10:34:59 -06:00
|
|
|
if key in intern_str_keys:
|
|
|
|
return intern(value.encode('ascii'))
|
2016-03-23 10:13:05 -06:00
|
|
|
|
2016-03-23 10:34:59 -06:00
|
|
|
if key in intern_unicode_keys:
|
|
|
|
return intern_string(value)
|
2016-03-23 10:13:05 -06:00
|
|
|
|
2016-03-23 10:34:59 -06:00
|
|
|
return value
|