2019-01-22 03:59:27 -07:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
|
|
|
# Copyright 2019 New Vector Ltd
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import logging
|
2019-01-21 16:27:57 -07:00
|
|
|
import random
|
2019-01-22 03:59:27 -07:00
|
|
|
import time
|
2021-08-18 11:53:20 -06:00
|
|
|
from typing import Callable, Dict, List
|
2019-01-22 03:59:27 -07:00
|
|
|
|
|
|
|
import attr
|
|
|
|
|
|
|
|
from twisted.internet.error import ConnectError
|
|
|
|
from twisted.names import client, dns
|
|
|
|
from twisted.names.error import DNSNameError, DomainError
|
|
|
|
|
2019-07-03 08:07:04 -06:00
|
|
|
from synapse.logging.context import make_deferred_yieldable
|
2019-01-22 03:59:27 -07:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2021-08-18 11:53:20 -06:00
|
|
|
SERVER_CACHE: Dict[bytes, List["Server"]] = {}
|
2019-01-22 03:59:27 -07:00
|
|
|
|
|
|
|
|
2021-08-18 11:53:20 -06:00
|
|
|
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
2020-09-04 04:54:56 -06:00
|
|
|
class Server:
|
2019-01-22 03:59:27 -07:00
|
|
|
"""
|
|
|
|
Our record of an individual server which can be tried to reach a destination.
|
|
|
|
|
|
|
|
Attributes:
|
2021-08-18 11:53:20 -06:00
|
|
|
host: target hostname
|
|
|
|
port:
|
|
|
|
priority:
|
|
|
|
weight:
|
|
|
|
expires: when the cache should expire this record - in *seconds* since
|
2019-01-22 03:59:27 -07:00
|
|
|
the epoch
|
|
|
|
"""
|
2019-06-20 03:32:02 -06:00
|
|
|
|
2021-08-18 11:53:20 -06:00
|
|
|
host: bytes
|
|
|
|
port: int
|
|
|
|
priority: int = 0
|
|
|
|
weight: int = 0
|
|
|
|
expires: int = 0
|
2019-01-22 03:59:27 -07:00
|
|
|
|
|
|
|
|
2021-08-18 11:53:20 -06:00
|
|
|
def _sort_server_list(server_list: List[Server]) -> List[Server]:
|
2019-08-08 08:30:04 -06:00
|
|
|
"""Given a list of SRV records sort them into priority order and shuffle
|
|
|
|
each priority with the given weight.
|
|
|
|
"""
|
2021-08-18 11:53:20 -06:00
|
|
|
priority_map: Dict[int, List[Server]] = {}
|
2019-08-08 08:30:04 -06:00
|
|
|
|
|
|
|
for server in server_list:
|
|
|
|
priority_map.setdefault(server.priority, []).append(server)
|
|
|
|
|
|
|
|
results = []
|
|
|
|
for priority in sorted(priority_map):
|
2019-08-20 04:49:44 -06:00
|
|
|
servers = priority_map[priority]
|
2019-08-08 08:30:04 -06:00
|
|
|
|
2019-08-27 06:56:42 -06:00
|
|
|
# This algorithms roughly follows the algorithm described in RFC2782,
|
|
|
|
# changed to remove an off-by-one error.
|
2019-08-20 04:49:44 -06:00
|
|
|
#
|
2019-08-27 06:56:42 -06:00
|
|
|
# N.B. Weights can be zero, which means that they should be picked
|
|
|
|
# rarely.
|
2019-08-20 04:49:44 -06:00
|
|
|
|
|
|
|
total_weight = sum(s.weight for s in servers)
|
2019-08-27 06:56:42 -06:00
|
|
|
|
|
|
|
# Total weight can become zero if there are only zero weight servers
|
|
|
|
# left, which we handle by just shuffling and appending to the results.
|
|
|
|
while servers and total_weight:
|
|
|
|
target_weight = random.randint(1, total_weight)
|
2019-08-08 08:30:04 -06:00
|
|
|
|
|
|
|
for s in servers:
|
|
|
|
target_weight -= s.weight
|
|
|
|
|
|
|
|
if target_weight <= 0:
|
|
|
|
break
|
|
|
|
|
|
|
|
results.append(s)
|
|
|
|
servers.remove(s)
|
2019-08-20 04:49:44 -06:00
|
|
|
total_weight -= s.weight
|
2019-08-08 08:30:04 -06:00
|
|
|
|
2019-08-27 06:56:42 -06:00
|
|
|
if servers:
|
|
|
|
random.shuffle(servers)
|
|
|
|
results.extend(servers)
|
|
|
|
|
2019-08-08 08:30:04 -06:00
|
|
|
return results
|
|
|
|
|
|
|
|
|
2020-09-04 04:54:56 -06:00
|
|
|
class SrvResolver:
|
2019-01-22 10:42:26 -07:00
|
|
|
"""Interface to the dns client to do SRV lookups, with result caching.
|
2019-01-22 03:59:27 -07:00
|
|
|
|
|
|
|
The default resolver in twisted.names doesn't do any caching (it has a CacheResolver,
|
|
|
|
but the cache never gets populated), so we add our own caching layer here.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
dns_client (twisted.internet.interfaces.IResolver): twisted resolver impl
|
2021-08-18 11:53:20 -06:00
|
|
|
cache: cache object
|
|
|
|
get_time: clock implementation. Should return seconds since the epoch
|
2019-01-22 03:59:27 -07:00
|
|
|
"""
|
2019-06-20 03:32:02 -06:00
|
|
|
|
2021-08-18 11:53:20 -06:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
dns_client=client,
|
|
|
|
cache: Dict[bytes, List[Server]] = SERVER_CACHE,
|
|
|
|
get_time: Callable[[], float] = time.time,
|
|
|
|
):
|
2019-01-22 10:42:26 -07:00
|
|
|
self._dns_client = dns_client
|
|
|
|
self._cache = cache
|
|
|
|
self._get_time = get_time
|
|
|
|
|
2020-07-23 05:05:57 -06:00
|
|
|
async def resolve_service(self, service_name: bytes) -> List[Server]:
|
2019-01-22 10:42:26 -07:00
|
|
|
"""Look up a SRV record
|
|
|
|
|
|
|
|
Args:
|
2021-08-18 11:53:20 -06:00
|
|
|
service_name: record to look up
|
2019-01-22 10:42:26 -07:00
|
|
|
|
|
|
|
Returns:
|
2020-07-23 05:05:57 -06:00
|
|
|
a list of the SRV records, or an empty list if none found
|
2019-01-22 10:42:26 -07:00
|
|
|
"""
|
|
|
|
now = int(self._get_time())
|
|
|
|
|
|
|
|
if not isinstance(service_name, bytes):
|
|
|
|
raise TypeError("%r is not a byte string" % (service_name,))
|
|
|
|
|
|
|
|
cache_entry = self._cache.get(service_name, None)
|
2019-01-22 03:59:27 -07:00
|
|
|
if cache_entry:
|
2019-01-22 10:42:26 -07:00
|
|
|
if all(s.expires > now for s in cache_entry):
|
|
|
|
servers = list(cache_entry)
|
2019-08-08 08:30:04 -06:00
|
|
|
return _sort_server_list(servers)
|
2019-01-22 10:42:26 -07:00
|
|
|
|
|
|
|
try:
|
2020-07-23 05:05:57 -06:00
|
|
|
answers, _, _ = await make_deferred_yieldable(
|
2019-01-22 10:42:26 -07:00
|
|
|
self._dns_client.lookupService(service_name)
|
2019-01-22 03:59:27 -07:00
|
|
|
)
|
2019-01-22 10:42:26 -07:00
|
|
|
except DNSNameError:
|
|
|
|
# TODO: cache this. We can get the SOA out of the exception, and use
|
|
|
|
# the negative-TTL value.
|
2019-07-23 07:00:55 -06:00
|
|
|
return []
|
2019-01-22 10:42:26 -07:00
|
|
|
except DomainError as e:
|
|
|
|
# We failed to resolve the name (other than a NameError)
|
|
|
|
# Try something in the cache, else rereaise
|
|
|
|
cache_entry = self._cache.get(service_name, None)
|
|
|
|
if cache_entry:
|
2019-10-31 04:23:24 -06:00
|
|
|
logger.warning(
|
2019-01-22 10:42:26 -07:00
|
|
|
"Failed to resolve %r, falling back to cache. %r", service_name, e
|
|
|
|
)
|
2019-07-23 07:00:55 -06:00
|
|
|
return list(cache_entry)
|
2019-01-22 10:42:26 -07:00
|
|
|
else:
|
|
|
|
raise e
|
|
|
|
|
|
|
|
if (
|
|
|
|
len(answers) == 1
|
|
|
|
and answers[0].type == dns.SRV
|
|
|
|
and answers[0].payload
|
|
|
|
and answers[0].payload.target == dns.Name(b".")
|
|
|
|
):
|
2021-08-18 11:53:20 -06:00
|
|
|
raise ConnectError(f"Service {service_name!r} unavailable")
|
2019-01-22 10:42:26 -07:00
|
|
|
|
|
|
|
servers = []
|
|
|
|
|
|
|
|
for answer in answers:
|
|
|
|
if answer.type != dns.SRV or not answer.payload:
|
|
|
|
continue
|
|
|
|
|
|
|
|
payload = answer.payload
|
|
|
|
|
|
|
|
servers.append(
|
|
|
|
Server(
|
|
|
|
host=payload.target.name,
|
|
|
|
port=payload.port,
|
|
|
|
priority=payload.priority,
|
|
|
|
weight=payload.weight,
|
|
|
|
expires=now + answer.ttl,
|
2019-06-20 03:32:02 -06:00
|
|
|
)
|
2019-01-22 10:42:26 -07:00
|
|
|
)
|
|
|
|
|
|
|
|
self._cache[service_name] = list(servers)
|
2019-08-08 08:30:04 -06:00
|
|
|
return _sort_server_list(servers)
|