2016-01-05 11:01:18 -07:00
|
|
|
# Copyright 2014 - 2016 OpenMarket Ltd
|
2017-03-13 11:27:51 -06:00
|
|
|
# Copyright 2017 Vector Creations Ltd
|
2020-12-02 03:38:50 -07:00
|
|
|
# Copyright 2019 - 2020 The Matrix.org Foundation C.I.C.
|
2015-03-23 08:20:28 -06:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2018-07-09 00:09:20 -06:00
|
|
|
import logging
|
2019-07-12 10:26:02 -06:00
|
|
|
import time
|
2018-08-01 08:54:06 -06:00
|
|
|
import unicodedata
|
2020-03-03 03:54:44 -07:00
|
|
|
import urllib.parse
|
2021-05-12 08:04:51 -06:00
|
|
|
from binascii import crc32
|
2021-11-26 07:27:14 -07:00
|
|
|
from http import HTTPStatus
|
2020-10-28 09:12:21 -06:00
|
|
|
from typing import (
|
|
|
|
TYPE_CHECKING,
|
|
|
|
Any,
|
2020-12-11 09:33:31 -07:00
|
|
|
Awaitable,
|
2020-10-28 09:12:21 -06:00
|
|
|
Callable,
|
|
|
|
Dict,
|
|
|
|
Iterable,
|
|
|
|
List,
|
2020-12-02 03:38:50 -07:00
|
|
|
Mapping,
|
2020-10-28 09:12:21 -06:00
|
|
|
Optional,
|
|
|
|
Tuple,
|
2021-09-20 06:56:23 -06:00
|
|
|
Type,
|
2020-10-28 09:12:21 -06:00
|
|
|
Union,
|
2021-06-24 07:33:20 -06:00
|
|
|
cast,
|
2020-10-28 09:12:21 -06:00
|
|
|
)
|
2015-03-23 08:20:28 -06:00
|
|
|
|
2018-07-09 00:09:20 -06:00
|
|
|
import attr
|
2020-10-28 09:12:21 -06:00
|
|
|
import bcrypt
|
2021-05-12 08:04:51 -06:00
|
|
|
import unpaddedbase64
|
2022-10-26 04:45:41 -06:00
|
|
|
from prometheus_client import Counter
|
2018-06-28 07:49:57 -06:00
|
|
|
|
2022-05-09 05:31:14 -06:00
|
|
|
from twisted.internet.defer import CancelledError
|
2021-03-01 10:23:46 -07:00
|
|
|
from twisted.web.server import Request
|
2020-12-08 07:03:38 -07:00
|
|
|
|
2015-03-23 08:20:28 -06:00
|
|
|
from synapse.api.constants import LoginType
|
2017-12-04 08:47:27 -07:00
|
|
|
from synapse.api.errors import (
|
2018-07-09 00:09:20 -06:00
|
|
|
AuthError,
|
|
|
|
Codes,
|
|
|
|
InteractiveAuthIncompleteError,
|
|
|
|
LoginError,
|
2022-10-26 04:45:41 -06:00
|
|
|
NotFoundError,
|
2018-07-09 00:09:20 -06:00
|
|
|
StoreError,
|
2017-12-04 08:47:27 -07:00
|
|
|
SynapseError,
|
|
|
|
)
|
2019-03-15 11:46:16 -06:00
|
|
|
from synapse.api.ratelimiting import Ratelimiter
|
2021-01-12 10:38:03 -07:00
|
|
|
from synapse.handlers.ui_auth import (
|
|
|
|
INTERACTIVE_AUTH_CHECKERS,
|
|
|
|
UIAuthSessionDataConstants,
|
|
|
|
)
|
2019-09-25 04:33:03 -06:00
|
|
|
from synapse.handlers.ui_auth.checkers import UserInteractiveAuthChecker
|
2021-01-12 05:34:16 -07:00
|
|
|
from synapse.http import get_request_user_agent
|
2020-07-01 07:10:23 -06:00
|
|
|
from synapse.http.server import finish_request, respond_with_html
|
2020-03-03 03:54:44 -07:00
|
|
|
from synapse.http.site import SynapseRequest
|
2019-07-03 08:07:04 -06:00
|
|
|
from synapse.logging.context import defer_to_thread
|
2020-04-30 11:47:49 -06:00
|
|
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
2022-10-26 04:45:41 -06:00
|
|
|
from synapse.storage.databases.main.registration import (
|
|
|
|
LoginTokenExpired,
|
|
|
|
LoginTokenLookupResult,
|
|
|
|
LoginTokenReused,
|
|
|
|
)
|
2020-08-28 02:58:17 -06:00
|
|
|
from synapse.types import JsonDict, Requester, UserID
|
2020-07-05 09:32:02 -06:00
|
|
|
from synapse.util import stringutils as stringutils
|
2022-05-09 05:31:14 -06:00
|
|
|
from synapse.util.async_helpers import delay_cancellation, maybe_awaitable
|
2020-08-28 02:58:17 -06:00
|
|
|
from synapse.util.msisdn import phone_number_to_msisdn
|
2021-05-12 08:04:51 -06:00
|
|
|
from synapse.util.stringutils import base62_encode
|
2020-07-03 07:03:13 -06:00
|
|
|
from synapse.util.threepids import canonicalise_email
|
2015-04-01 08:05:30 -06:00
|
|
|
|
2020-10-28 09:12:21 -06:00
|
|
|
if TYPE_CHECKING:
|
2021-10-25 10:45:19 -06:00
|
|
|
from synapse.module_api import ModuleApi
|
2021-08-17 05:57:58 -06:00
|
|
|
from synapse.rest.client.login import LoginResponse
|
2021-03-23 05:12:48 -06:00
|
|
|
from synapse.server import HomeServer
|
2020-10-28 09:12:21 -06:00
|
|
|
|
2015-03-23 08:20:28 -06:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2022-06-07 08:58:48 -06:00
|
|
|
INVALID_USERNAME_OR_PASSWORD = "Invalid username or password"
|
|
|
|
|
2022-10-26 04:45:41 -06:00
|
|
|
invalid_login_token_counter = Counter(
|
|
|
|
"synapse_user_login_invalid_login_tokens",
|
|
|
|
"Counts the number of rejected m.login.token on /login",
|
|
|
|
["reason"],
|
|
|
|
)
|
|
|
|
|
2015-03-23 08:20:28 -06:00
|
|
|
|
2020-08-28 02:58:17 -06:00
|
|
|
def convert_client_dict_legacy_fields_to_identifier(
|
|
|
|
submission: JsonDict,
|
|
|
|
) -> Dict[str, str]:
|
|
|
|
"""
|
|
|
|
Convert a legacy-formatted login submission to an identifier dict.
|
|
|
|
|
|
|
|
Legacy login submissions (used in both login and user-interactive authentication)
|
|
|
|
provide user-identifying information at the top-level instead.
|
|
|
|
|
|
|
|
These are now deprecated and replaced with identifiers:
|
|
|
|
https://matrix.org/docs/spec/client_server/r0.6.1#identifier-types
|
|
|
|
|
|
|
|
Args:
|
|
|
|
submission: The client dict to convert
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The matching identifier dict
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
SynapseError: If the format of the client dict is invalid
|
|
|
|
"""
|
|
|
|
identifier = submission.get("identifier", {})
|
|
|
|
|
|
|
|
# Generate an m.id.user identifier if "user" parameter is present
|
|
|
|
user = submission.get("user")
|
|
|
|
if user:
|
|
|
|
identifier = {"type": "m.id.user", "user": user}
|
|
|
|
|
|
|
|
# Generate an m.id.thirdparty identifier if "medium" and "address" parameters are present
|
|
|
|
medium = submission.get("medium")
|
|
|
|
address = submission.get("address")
|
|
|
|
if medium and address:
|
|
|
|
identifier = {
|
|
|
|
"type": "m.id.thirdparty",
|
|
|
|
"medium": medium,
|
|
|
|
"address": address,
|
|
|
|
}
|
|
|
|
|
|
|
|
# We've converted valid, legacy login submissions to an identifier. If the
|
|
|
|
# submission still doesn't have an identifier, it's invalid
|
|
|
|
if not identifier:
|
|
|
|
raise SynapseError(400, "Invalid login submission", Codes.INVALID_PARAM)
|
|
|
|
|
|
|
|
# Ensure the identifier has a type
|
|
|
|
if "type" not in identifier:
|
|
|
|
raise SynapseError(
|
|
|
|
400,
|
|
|
|
"'identifier' dict has no key 'type'",
|
|
|
|
errcode=Codes.MISSING_PARAM,
|
|
|
|
)
|
|
|
|
|
|
|
|
return identifier
|
|
|
|
|
|
|
|
|
|
|
|
def login_id_phone_to_thirdparty(identifier: JsonDict) -> Dict[str, str]:
|
|
|
|
"""
|
|
|
|
Convert a phone login identifier type to a generic threepid identifier.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
identifier: Login identifier dict of type 'm.id.phone'
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
An equivalent m.id.thirdparty identifier dict
|
|
|
|
"""
|
|
|
|
if "country" not in identifier or (
|
|
|
|
# The specification requires a "phone" field, while Synapse used to require a "number"
|
|
|
|
# field. Accept both for backwards compatibility.
|
|
|
|
"phone" not in identifier
|
|
|
|
and "number" not in identifier
|
|
|
|
):
|
|
|
|
raise SynapseError(
|
|
|
|
400, "Invalid phone-type identifier", errcode=Codes.INVALID_PARAM
|
|
|
|
)
|
|
|
|
|
|
|
|
# Accept both "phone" and "number" as valid keys in m.id.phone
|
|
|
|
phone_number = identifier.get("phone", identifier["number"])
|
|
|
|
|
|
|
|
# Convert user-provided phone number to a consistent representation
|
|
|
|
msisdn = phone_number_to_msisdn(identifier["country"], phone_number)
|
|
|
|
|
|
|
|
return {
|
|
|
|
"type": "m.id.thirdparty",
|
|
|
|
"medium": "msisdn",
|
|
|
|
"address": msisdn,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2022-01-13 06:49:28 -07:00
|
|
|
@attr.s(slots=True, auto_attribs=True)
|
2020-09-30 11:02:43 -06:00
|
|
|
class SsoLoginExtraAttributes:
|
|
|
|
"""Data we track about SAML2 sessions"""
|
|
|
|
|
|
|
|
# time the session was created, in milliseconds
|
2022-01-13 06:49:28 -07:00
|
|
|
creation_time: int
|
|
|
|
extra_attributes: JsonDict
|
2020-09-30 11:02:43 -06:00
|
|
|
|
|
|
|
|
2021-10-08 05:44:43 -06:00
|
|
|
class AuthHandler:
|
2016-03-16 09:42:35 -06:00
|
|
|
SESSION_EXPIRE_MS = 48 * 60 * 60 * 1000
|
2015-03-23 08:20:28 -06:00
|
|
|
|
2020-10-28 09:12:21 -06:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2022-02-23 04:04:02 -07:00
|
|
|
self.store = hs.get_datastores().main
|
2021-10-08 05:44:43 -06:00
|
|
|
self.auth = hs.get_auth()
|
2022-06-14 02:51:15 -06:00
|
|
|
self.auth_blocking = hs.get_auth_blocking()
|
2021-10-08 05:44:43 -06:00
|
|
|
self.clock = hs.get_clock()
|
2021-07-16 11:22:36 -06:00
|
|
|
self.checkers: Dict[str, UserInteractiveAuthChecker] = {}
|
2019-09-25 04:33:03 -06:00
|
|
|
for auth_checker_class in INTERACTIVE_AUTH_CHECKERS:
|
|
|
|
inst = auth_checker_class(hs)
|
2019-09-25 05:10:26 -06:00
|
|
|
if inst.is_enabled():
|
Use mypy 1.0 (#15052)
* Update mypy and mypy-zope
* Remove unused ignores
These used to suppress
```
synapse/storage/engines/__init__.py:28: error: "__new__" must return a
class instance (got "NoReturn") [misc]
```
and
```
synapse/http/matrixfederationclient.py:1270: error: "BaseException" has no attribute "reasons" [attr-defined]
```
(note that we check `hasattr(e, "reasons")` above)
* Avoid empty body warnings, sometimes by marking methods as abstract
E.g.
```
tests/handlers/test_register.py:58: error: Missing return statement [empty-body]
tests/handlers/test_register.py:108: error: Missing return statement [empty-body]
```
* Suppress false positive about `JaegerConfig`
Complaint was
```
synapse/logging/opentracing.py:450: error: Function "Type[Config]" could always be true in boolean context [truthy-function]
```
* Fix not calling `is_state()`
Oops!
```
tests/rest/client/test_third_party_rules.py:428: error: Function "Callable[[], bool]" could always be true in boolean context [truthy-function]
```
* Suppress false positives from ParamSpecs
````
synapse/logging/opentracing.py:971: error: Argument 2 to "_custom_sync_async_decorator" has incompatible type "Callable[[Arg(Callable[P, R], 'func'), **P], _GeneratorContextManager[None]]"; expected "Callable[[Callable[P, R], **P], _GeneratorContextManager[None]]" [arg-type]
synapse/logging/opentracing.py:1017: error: Argument 2 to "_custom_sync_async_decorator" has incompatible type "Callable[[Arg(Callable[P, R], 'func'), **P], _GeneratorContextManager[None]]"; expected "Callable[[Callable[P, R], **P], _GeneratorContextManager[None]]" [arg-type]
````
* Drive-by improvement to `wrapping_logic` annotation
* Workaround false "unreachable" positives
See https://github.com/Shoobx/mypy-zope/issues/91
```
tests/http/test_proxyagent.py:626: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:762: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:826: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:838: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:845: error: Statement is unreachable [unreachable]
tests/http/federation/test_matrix_federation_agent.py:151: error: Statement is unreachable [unreachable]
tests/http/federation/test_matrix_federation_agent.py:452: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:60: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:93: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:127: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:152: error: Statement is unreachable [unreachable]
```
* Changelog
* Tweak DBAPI2 Protocol to be accepted by mypy 1.0
Some extra context in:
- https://github.com/matrix-org/python-canonicaljson/pull/57
- https://github.com/python/mypy/issues/6002
- https://mypy.readthedocs.io/en/latest/common_issues.html#covariant-subtyping-of-mutable-protocol-members-is-rejected
* Pull in updated canonicaljson lib
so the protocol check just works
* Improve comments in opentracing
I tried to workaround the ignores but found it too much trouble.
I think the corresponding issue is
https://github.com/python/mypy/issues/12909. The mypy repo has a PR
claiming to fix this (https://github.com/python/mypy/pull/14677) which
might mean this gets resolved soon?
* Better annotation for INTERACTIVE_AUTH_CHECKERS
* Drive-by AUTH_TYPE annotation, to remove an ignore
2023-02-16 09:09:11 -07:00
|
|
|
self.checkers[inst.AUTH_TYPE] = inst
|
2019-09-25 04:33:03 -06:00
|
|
|
|
2021-10-04 05:18:54 -06:00
|
|
|
self.bcrypt_rounds = hs.config.registration.bcrypt_rounds
|
2017-06-29 07:08:33 -06:00
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
self.password_auth_provider = hs.get_password_auth_provider()
|
2016-11-15 06:03:19 -07:00
|
|
|
|
2016-04-06 09:32:06 -06:00
|
|
|
self.hs = hs # FIXME better possibility to access registrationHandler later?
|
2017-02-02 03:53:36 -07:00
|
|
|
self.macaroon_gen = hs.get_macaroon_generator()
|
2022-05-27 03:44:51 -06:00
|
|
|
self._password_enabled_for_login = hs.config.auth.password_enabled_for_login
|
|
|
|
self._password_enabled_for_reauth = hs.config.auth.password_enabled_for_reauth
|
2021-09-23 05:13:34 -06:00
|
|
|
self._password_localdb_enabled = hs.config.auth.password_localdb_enabled
|
2023-05-04 08:18:22 -06:00
|
|
|
self._third_party_rules = hs.get_module_api_callbacks().third_party_event_rules
|
2023-12-01 07:31:50 -07:00
|
|
|
self._account_validity_handler = hs.get_account_validity_handler()
|
2017-10-31 04:38:40 -06:00
|
|
|
|
2019-11-06 04:00:54 -07:00
|
|
|
# Ratelimiter for failed auth during UIA. Uses same ratelimit config
|
|
|
|
# as per `rc_login.failed_attempts`.
|
2020-06-05 03:47:20 -06:00
|
|
|
self._failed_uia_attempts_ratelimiter = Ratelimiter(
|
2021-03-30 05:06:09 -06:00
|
|
|
store=self.store,
|
2020-06-05 03:47:20 -06:00
|
|
|
clock=self.clock,
|
2023-08-29 17:39:39 -06:00
|
|
|
cfg=self.hs.config.ratelimiting.rc_login_failed_attempts,
|
2020-06-05 03:47:20 -06:00
|
|
|
)
|
2019-03-15 11:46:16 -06:00
|
|
|
|
2020-12-18 05:33:57 -07:00
|
|
|
# The number of seconds to keep a UI auth session active.
|
2021-09-23 05:13:34 -06:00
|
|
|
self._ui_auth_session_timeout = hs.config.auth.ui_auth_session_timeout
|
2020-12-18 05:33:57 -07:00
|
|
|
|
2023-08-29 17:39:39 -06:00
|
|
|
# Ratelimiter for failed /login attempts
|
2020-12-01 10:42:26 -07:00
|
|
|
self._failed_login_attempts_ratelimiter = Ratelimiter(
|
2021-03-30 05:06:09 -06:00
|
|
|
store=self.store,
|
2020-12-01 10:42:26 -07:00
|
|
|
clock=hs.get_clock(),
|
2023-08-29 17:39:39 -06:00
|
|
|
cfg=self.hs.config.ratelimiting.rc_login_failed_attempts,
|
2020-12-01 10:42:26 -07:00
|
|
|
)
|
|
|
|
|
2019-03-15 11:46:16 -06:00
|
|
|
self._clock = self.hs.get_clock()
|
|
|
|
|
2020-04-30 11:47:49 -06:00
|
|
|
# Expire old UI auth sessions after a period of time.
|
2021-09-13 11:07:12 -06:00
|
|
|
if hs.config.worker.run_background_tasks:
|
2020-04-30 11:47:49 -06:00
|
|
|
self._clock.looping_call(
|
|
|
|
run_as_background_process,
|
|
|
|
5 * 60 * 1000,
|
|
|
|
"expire_old_sessions",
|
|
|
|
self._expire_old_sessions,
|
|
|
|
)
|
|
|
|
|
2020-04-01 06:48:00 -06:00
|
|
|
# Load the SSO HTML templates.
|
|
|
|
|
|
|
|
# The following template is shown to the user during a client login via SSO,
|
|
|
|
# after the SSO completes and before redirecting them back to their client.
|
|
|
|
# It notifies the user they are about to give access to their matrix account
|
|
|
|
# to the client.
|
2021-09-24 05:25:21 -06:00
|
|
|
self._sso_redirect_confirm_template = (
|
|
|
|
hs.config.sso.sso_redirect_confirm_template
|
|
|
|
)
|
2020-08-17 10:05:00 -06:00
|
|
|
|
2020-04-01 06:48:00 -06:00
|
|
|
# The following template is shown during user interactive authentication
|
|
|
|
# in the fallback auth scenario. It notifies the user that they are
|
|
|
|
# authenticating for an operation to occur on their account.
|
2021-09-24 05:25:21 -06:00
|
|
|
self._sso_auth_confirm_template = hs.config.sso.sso_auth_confirm_template
|
2020-08-17 10:05:00 -06:00
|
|
|
|
2020-04-17 11:34:55 -06:00
|
|
|
# The following template is shown during the SSO authentication process if
|
|
|
|
# the account is deactivated.
|
2020-04-09 11:28:13 -06:00
|
|
|
self._sso_account_deactivated_template = (
|
2021-09-24 05:25:21 -06:00
|
|
|
hs.config.sso.sso_account_deactivated_template
|
2020-04-09 11:28:13 -06:00
|
|
|
)
|
2020-03-03 03:54:44 -07:00
|
|
|
|
2021-09-13 11:07:12 -06:00
|
|
|
self._server_name = hs.config.server.server_name
|
2020-03-03 03:54:44 -07:00
|
|
|
|
|
|
|
# cast to tuple for use with str.startswith
|
2021-09-24 05:25:21 -06:00
|
|
|
self._whitelisted_sso_clients = tuple(hs.config.sso.sso_client_whitelist)
|
2020-03-03 03:54:44 -07:00
|
|
|
|
2020-09-30 11:02:43 -06:00
|
|
|
# A mapping of user ID to extra attributes to include in the login
|
|
|
|
# response.
|
2021-07-16 11:22:36 -06:00
|
|
|
self._extra_attributes: Dict[str, SsoLoginExtraAttributes] = {}
|
2020-09-30 11:02:43 -06:00
|
|
|
|
2023-05-09 08:20:04 -06:00
|
|
|
self.msc3861_oauth_delegation_enabled = hs.config.experimental.msc3861.enabled
|
2023-05-10 08:08:43 -06:00
|
|
|
|
2020-04-15 10:40:18 -06:00
|
|
|
async def validate_user_via_ui_auth(
|
2020-03-26 05:39:34 -06:00
|
|
|
self,
|
|
|
|
requester: Requester,
|
|
|
|
request: SynapseRequest,
|
|
|
|
request_body: Dict[str, Any],
|
2020-04-01 06:48:00 -06:00
|
|
|
description: str,
|
2021-06-16 09:07:28 -06:00
|
|
|
can_skip_ui_auth: bool = False,
|
2020-12-18 05:33:57 -07:00
|
|
|
) -> Tuple[dict, Optional[str]]:
|
2017-12-04 09:38:10 -07:00
|
|
|
"""
|
|
|
|
Checks that the user is who they claim to be, via a UI auth.
|
|
|
|
|
|
|
|
This is used for things like device deletion and password reset where
|
|
|
|
the user already has a valid access token, but we want to double-check
|
|
|
|
that it isn't stolen by re-authenticating them.
|
|
|
|
|
|
|
|
Args:
|
2022-08-22 07:17:59 -06:00
|
|
|
requester: The user making the request, according to the access token.
|
2017-12-04 09:38:10 -07:00
|
|
|
|
2020-03-26 05:39:34 -06:00
|
|
|
request: The request sent by the client.
|
|
|
|
|
2020-03-12 09:36:27 -06:00
|
|
|
request_body: The body of the request sent by the client
|
2017-12-04 09:38:10 -07:00
|
|
|
|
2020-04-01 06:48:00 -06:00
|
|
|
description: A human readable string to be displayed to the user that
|
|
|
|
describes the operation happening on their account.
|
|
|
|
|
2021-06-16 09:07:28 -06:00
|
|
|
can_skip_ui_auth: True if the UI auth session timeout applies this
|
|
|
|
action. Should be set to False for any "dangerous"
|
|
|
|
actions (e.g. deactivating an account).
|
|
|
|
|
2017-12-04 09:38:10 -07:00
|
|
|
Returns:
|
2020-08-06 06:09:55 -06:00
|
|
|
A tuple of (params, session_id).
|
|
|
|
|
|
|
|
'params' contains the parameters for this request (which may
|
2017-12-04 09:38:10 -07:00
|
|
|
have been given only in a previous call).
|
|
|
|
|
2020-08-06 06:09:55 -06:00
|
|
|
'session_id' is the ID of this session, either passed in by the
|
2020-12-18 05:33:57 -07:00
|
|
|
client or assigned by this call. This is None if UI auth was
|
|
|
|
skipped (by re-using a previous validation).
|
2020-08-06 06:09:55 -06:00
|
|
|
|
2017-12-04 09:38:10 -07:00
|
|
|
Raises:
|
|
|
|
InteractiveAuthIncompleteError if the client has not yet completed
|
|
|
|
any of the permitted login flows
|
|
|
|
|
|
|
|
AuthError if the client has completed a login flow, and it gives
|
|
|
|
a different user to `requester`
|
2019-11-06 04:00:54 -07:00
|
|
|
|
2019-11-18 10:10:16 -07:00
|
|
|
LimitExceededError if the ratelimiter's failed request count for this
|
|
|
|
user is too high to proceed
|
2017-12-04 09:38:10 -07:00
|
|
|
"""
|
2023-05-09 08:20:04 -06:00
|
|
|
if self.msc3861_oauth_delegation_enabled:
|
2023-05-10 08:08:43 -06:00
|
|
|
raise SynapseError(
|
|
|
|
HTTPStatus.INTERNAL_SERVER_ERROR, "UIA shouldn't be used with MSC3861"
|
|
|
|
)
|
|
|
|
|
2021-03-10 11:15:56 -07:00
|
|
|
if not requester.access_token_id:
|
|
|
|
raise ValueError("Cannot validate a user without an access token")
|
2021-06-16 09:07:28 -06:00
|
|
|
if can_skip_ui_auth and self._ui_auth_session_timeout:
|
2020-12-18 05:33:57 -07:00
|
|
|
last_validated = await self.store.get_access_token_last_validated(
|
|
|
|
requester.access_token_id
|
|
|
|
)
|
|
|
|
if self.clock.time_msec() - last_validated < self._ui_auth_session_timeout:
|
|
|
|
# Return the input parameters, minus the auth key, which matches
|
|
|
|
# the logic in check_ui_auth.
|
|
|
|
request_body.pop("auth", None)
|
|
|
|
return request_body, None
|
|
|
|
|
2021-01-12 10:38:03 -07:00
|
|
|
requester_user_id = requester.user.to_string()
|
2019-11-06 04:00:54 -07:00
|
|
|
|
|
|
|
# Check if we should be ratelimited due to too many previous failed attempts
|
2021-03-30 05:06:09 -06:00
|
|
|
await self._failed_uia_attempts_ratelimiter.ratelimit(requester, update=False)
|
2019-11-06 04:00:54 -07:00
|
|
|
|
2017-12-04 09:49:40 -07:00
|
|
|
# build a list of supported flows
|
2020-11-30 17:15:36 -07:00
|
|
|
supported_ui_auth_types = await self._get_available_ui_auth_types(
|
|
|
|
requester.user
|
|
|
|
)
|
|
|
|
flows = [[login_type] for login_type in supported_ui_auth_types]
|
2017-12-04 09:38:10 -07:00
|
|
|
|
2021-01-12 10:38:03 -07:00
|
|
|
def get_new_session_data() -> JsonDict:
|
|
|
|
return {UIAuthSessionDataConstants.REQUEST_USER_ID: requester_user_id}
|
|
|
|
|
2019-11-06 04:00:54 -07:00
|
|
|
try:
|
2020-08-06 06:09:55 -06:00
|
|
|
result, params, session_id = await self.check_ui_auth(
|
2021-01-12 10:38:03 -07:00
|
|
|
flows,
|
|
|
|
request,
|
|
|
|
request_body,
|
|
|
|
description,
|
|
|
|
get_new_session_data,
|
2020-03-26 05:39:34 -06:00
|
|
|
)
|
2019-11-06 04:00:54 -07:00
|
|
|
except LoginError:
|
2020-06-05 03:47:20 -06:00
|
|
|
# Update the ratelimiter to say we failed (`can_do_action` doesn't raise).
|
2021-03-30 05:06:09 -06:00
|
|
|
await self._failed_uia_attempts_ratelimiter.can_do_action(
|
|
|
|
requester,
|
|
|
|
)
|
2019-11-06 04:00:54 -07:00
|
|
|
raise
|
2017-12-04 09:38:10 -07:00
|
|
|
|
2017-12-04 09:49:40 -07:00
|
|
|
# find the completed login type
|
2020-11-30 17:15:36 -07:00
|
|
|
for login_type in supported_ui_auth_types:
|
2017-12-04 09:49:40 -07:00
|
|
|
if login_type not in result:
|
|
|
|
continue
|
|
|
|
|
2021-01-12 10:38:03 -07:00
|
|
|
validated_user_id = result[login_type]
|
2017-12-04 09:49:40 -07:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
# this can't happen
|
|
|
|
raise Exception("check_auth returned True but no successful login type")
|
2017-12-04 09:38:10 -07:00
|
|
|
|
|
|
|
# check that the UI auth matched the access token
|
2021-01-12 10:38:03 -07:00
|
|
|
if validated_user_id != requester_user_id:
|
2017-12-04 09:38:10 -07:00
|
|
|
raise AuthError(403, "Invalid auth")
|
|
|
|
|
2020-12-18 05:33:57 -07:00
|
|
|
# Note that the access token has been validated.
|
|
|
|
await self.store.update_access_token_last_validated(requester.access_token_id)
|
|
|
|
|
2020-08-06 06:09:55 -06:00
|
|
|
return params, session_id
|
2017-12-04 09:38:10 -07:00
|
|
|
|
2020-11-30 17:15:36 -07:00
|
|
|
async def _get_available_ui_auth_types(self, user: UserID) -> Iterable[str]:
|
2022-05-27 03:44:51 -06:00
|
|
|
"""Get a list of the user-interactive authentication types this user can use."""
|
2020-11-30 17:15:36 -07:00
|
|
|
|
|
|
|
ui_auth_types = set()
|
|
|
|
|
|
|
|
# if the HS supports password auth, and the user has a non-null password, we
|
|
|
|
# support password auth
|
2022-05-27 03:44:51 -06:00
|
|
|
if self._password_localdb_enabled and self._password_enabled_for_reauth:
|
2020-11-30 17:15:36 -07:00
|
|
|
lookupres = await self._find_user_id_and_pwd_hash(user.to_string())
|
|
|
|
if lookupres:
|
|
|
|
_, password_hash = lookupres
|
|
|
|
if password_hash:
|
|
|
|
ui_auth_types.add(LoginType.PASSWORD)
|
|
|
|
|
|
|
|
# also allow auth from password providers
|
2021-10-13 05:21:52 -06:00
|
|
|
for t in self.password_auth_provider.get_supported_login_types().keys():
|
2022-05-27 03:44:51 -06:00
|
|
|
if t == LoginType.PASSWORD and not self._password_enabled_for_reauth:
|
2021-10-13 05:21:52 -06:00
|
|
|
continue
|
|
|
|
ui_auth_types.add(t)
|
2020-11-30 17:15:36 -07:00
|
|
|
|
|
|
|
# if sso is enabled, allow the user to log in via SSO iff they have a mapping
|
|
|
|
# from sso to mxid.
|
2021-01-12 10:38:03 -07:00
|
|
|
if await self.hs.get_sso_handler().get_identity_providers_for_user(
|
|
|
|
user.to_string()
|
|
|
|
):
|
2020-11-30 17:15:36 -07:00
|
|
|
ui_auth_types.add(LoginType.SSO)
|
|
|
|
|
|
|
|
return ui_auth_types
|
|
|
|
|
2021-09-20 06:56:23 -06:00
|
|
|
def get_enabled_auth_types(self) -> Iterable[str]:
|
2019-09-25 05:10:26 -06:00
|
|
|
"""Return the enabled user-interactive authentication types
|
|
|
|
|
|
|
|
Returns the UI-Auth types which are supported by the homeserver's current
|
|
|
|
config.
|
|
|
|
"""
|
|
|
|
return self.checkers.keys()
|
|
|
|
|
2020-08-06 06:09:55 -06:00
|
|
|
async def check_ui_auth(
|
2020-03-26 05:39:34 -06:00
|
|
|
self,
|
|
|
|
flows: List[List[str]],
|
|
|
|
request: SynapseRequest,
|
|
|
|
clientdict: Dict[str, Any],
|
2020-04-01 06:48:00 -06:00
|
|
|
description: str,
|
2021-01-12 10:38:03 -07:00
|
|
|
get_new_session_data: Optional[Callable[[], JsonDict]] = None,
|
2020-04-15 10:40:18 -06:00
|
|
|
) -> Tuple[dict, dict, str]:
|
2015-03-23 08:20:28 -06:00
|
|
|
"""
|
|
|
|
Takes a dictionary sent by the client in the login / registration
|
2017-10-26 17:01:00 -06:00
|
|
|
protocol and handles the User-Interactive Auth flow.
|
2015-03-23 08:20:28 -06:00
|
|
|
|
2017-12-04 08:47:27 -07:00
|
|
|
If no auth flows have been completed successfully, raises an
|
|
|
|
InteractiveAuthIncompleteError. To handle this, you can use
|
2021-08-17 05:57:58 -06:00
|
|
|
synapse.rest.client._base.interactive_auth_handler as a
|
2017-12-04 08:47:27 -07:00
|
|
|
decorator.
|
|
|
|
|
2015-03-23 08:20:28 -06:00
|
|
|
Args:
|
2020-03-12 09:36:27 -06:00
|
|
|
flows: A list of login flows. Each flow is an ordered list of
|
|
|
|
strings representing auth-types. At least one full
|
|
|
|
flow must be completed in order for auth to be successful.
|
2017-12-04 08:47:27 -07:00
|
|
|
|
2020-03-26 05:39:34 -06:00
|
|
|
request: The request sent by the client.
|
|
|
|
|
2015-08-12 08:49:37 -06:00
|
|
|
clientdict: The dictionary from the client root level, not the
|
|
|
|
'auth' key: this method prompts for auth if none is sent.
|
2017-12-04 08:47:27 -07:00
|
|
|
|
2020-04-01 06:48:00 -06:00
|
|
|
description: A human readable string to be displayed to the user that
|
|
|
|
describes the operation happening on their account.
|
|
|
|
|
2021-01-12 10:38:03 -07:00
|
|
|
get_new_session_data:
|
|
|
|
an optional callback which will be called when starting a new session.
|
|
|
|
it should return data to be stored as part of the session.
|
|
|
|
|
|
|
|
The keys of the returned data should be entries in
|
|
|
|
UIAuthSessionDataConstants.
|
|
|
|
|
2015-03-23 08:20:28 -06:00
|
|
|
Returns:
|
2020-04-15 10:40:18 -06:00
|
|
|
A tuple of (creds, params, session_id).
|
2015-04-15 08:50:38 -06:00
|
|
|
|
2017-12-04 08:47:27 -07:00
|
|
|
'creds' contains the authenticated credentials of each stage.
|
2015-04-15 08:50:38 -06:00
|
|
|
|
2017-12-04 08:47:27 -07:00
|
|
|
'params' contains the parameters for this request (which may
|
|
|
|
have been given only in a previous call).
|
2016-03-16 05:56:24 -06:00
|
|
|
|
2017-12-04 08:47:27 -07:00
|
|
|
'session_id' is the ID of this session, either passed in by the
|
|
|
|
client or assigned by this call
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
InteractiveAuthIncompleteError if the client has not yet completed
|
|
|
|
all the stages in any of the permitted flows.
|
2015-03-23 08:20:28 -06:00
|
|
|
"""
|
|
|
|
|
2021-07-16 11:22:36 -06:00
|
|
|
sid: Optional[str] = None
|
2020-12-18 05:33:57 -07:00
|
|
|
authdict = clientdict.pop("auth", {})
|
|
|
|
if "session" in authdict:
|
|
|
|
sid = authdict["session"]
|
2020-04-20 06:54:42 -06:00
|
|
|
|
2020-04-30 11:47:49 -06:00
|
|
|
# Convert the URI and method to strings.
|
2022-04-27 07:03:44 -06:00
|
|
|
uri = request.uri.decode("utf-8")
|
2020-06-12 12:12:04 -06:00
|
|
|
method = request.method.decode("utf-8")
|
2020-04-30 11:47:49 -06:00
|
|
|
|
2020-04-20 06:54:42 -06:00
|
|
|
# If there's no session ID, create a new session.
|
|
|
|
if not sid:
|
2021-01-12 10:38:03 -07:00
|
|
|
new_session_data = get_new_session_data() if get_new_session_data else {}
|
|
|
|
|
2020-04-30 11:47:49 -06:00
|
|
|
session = await self.store.create_ui_auth_session(
|
|
|
|
clientdict, uri, method, description
|
2020-03-26 05:39:34 -06:00
|
|
|
)
|
|
|
|
|
2021-01-12 10:38:03 -07:00
|
|
|
for k, v in new_session_data.items():
|
|
|
|
await self.set_session_data(session.session_id, k, v)
|
|
|
|
|
2020-04-20 06:54:42 -06:00
|
|
|
else:
|
2020-04-30 11:47:49 -06:00
|
|
|
try:
|
|
|
|
session = await self.store.get_ui_auth_session(sid)
|
|
|
|
except StoreError:
|
|
|
|
raise SynapseError(400, "Unknown session ID: %s" % (sid,))
|
2020-04-20 06:54:42 -06:00
|
|
|
|
2020-05-08 14:08:58 -06:00
|
|
|
# If the client provides parameters, update what is persisted,
|
|
|
|
# otherwise use whatever was last provided.
|
|
|
|
#
|
|
|
|
# This was designed to allow the client to omit the parameters
|
|
|
|
# and just supply the session in subsequent calls so it split
|
|
|
|
# auth between devices by just sharing the session, (eg. so you
|
|
|
|
# could continue registration from your phone having clicked the
|
|
|
|
# email auth link on there). It's probably too open to abuse
|
|
|
|
# because it lets unauthenticated clients store arbitrary objects
|
|
|
|
# on a homeserver.
|
|
|
|
#
|
|
|
|
# Revisit: Assuming the REST APIs do sensible validation, the data
|
|
|
|
# isn't arbitrary.
|
|
|
|
#
|
|
|
|
# Note that the registration endpoint explicitly removes the
|
|
|
|
# "initial_device_display_name" parameter if it is provided
|
|
|
|
# without a "password" parameter. See the changes to
|
2021-08-17 05:57:58 -06:00
|
|
|
# synapse.rest.client.register.RegisterRestServlet.on_POST
|
2020-05-08 14:08:58 -06:00
|
|
|
# in commit 544722bad23fc31056b9240189c3cbbbf0ffd3f9.
|
2020-04-20 06:54:42 -06:00
|
|
|
if not clientdict:
|
2020-04-30 11:47:49 -06:00
|
|
|
clientdict = session.clientdict
|
2020-04-20 06:54:42 -06:00
|
|
|
|
|
|
|
# Ensure that the queried operation does not vary between stages of
|
|
|
|
# the UI authentication session. This is done by generating a stable
|
2020-05-13 12:26:44 -06:00
|
|
|
# comparator and storing it during the initial query. Subsequent
|
2020-05-08 14:08:58 -06:00
|
|
|
# queries ensure that this comparator has not changed.
|
2020-05-13 12:26:44 -06:00
|
|
|
#
|
|
|
|
# The comparator is based on the requested URI and HTTP method. The
|
|
|
|
# client dict (minus the auth dict) should also be checked, but some
|
|
|
|
# clients are not spec compliant, just warn for now if the client
|
|
|
|
# dict changes.
|
|
|
|
if (session.uri, session.method) != (uri, method):
|
2020-04-20 06:54:42 -06:00
|
|
|
raise SynapseError(
|
|
|
|
403,
|
|
|
|
"Requested operation has changed during the UI authentication session.",
|
|
|
|
)
|
2020-04-01 06:48:00 -06:00
|
|
|
|
2020-05-13 12:26:44 -06:00
|
|
|
if session.clientdict != clientdict:
|
|
|
|
logger.warning(
|
|
|
|
"Requested operation has changed during the UI "
|
|
|
|
"authentication session. A future version of Synapse "
|
|
|
|
"will remove this capability."
|
|
|
|
)
|
|
|
|
|
|
|
|
# For backwards compatibility, changes to the client dict are
|
|
|
|
# persisted as clients modify them throughout their user interactive
|
|
|
|
# authentication flow.
|
|
|
|
await self.store.set_ui_auth_clientdict(sid, clientdict)
|
2020-05-08 14:08:58 -06:00
|
|
|
|
2021-01-12 05:34:16 -07:00
|
|
|
user_agent = get_request_user_agent(request)
|
2022-05-04 12:11:21 -06:00
|
|
|
clientip = request.getClientAddress().host
|
2020-08-20 13:42:58 -06:00
|
|
|
|
|
|
|
await self.store.add_user_agent_ip_to_ui_auth_session(
|
|
|
|
session.session_id, user_agent, clientip
|
|
|
|
)
|
|
|
|
|
2015-04-15 08:50:38 -06:00
|
|
|
if not authdict:
|
2017-12-04 08:47:27 -07:00
|
|
|
raise InteractiveAuthIncompleteError(
|
2020-08-06 06:09:55 -06:00
|
|
|
session.session_id, self._auth_dict_for_flows(flows, session.session_id)
|
2015-04-01 08:05:30 -06:00
|
|
|
)
|
2015-03-23 08:20:28 -06:00
|
|
|
|
|
|
|
# check auth type currently being presented
|
2021-07-16 11:22:36 -06:00
|
|
|
errordict: Dict[str, Any] = {}
|
2015-04-01 08:05:30 -06:00
|
|
|
if "type" in authdict:
|
2021-07-16 11:22:36 -06:00
|
|
|
login_type: str = authdict["type"]
|
2016-10-06 11:20:27 -06:00
|
|
|
try:
|
2020-04-15 10:40:18 -06:00
|
|
|
result = await self._check_auth_dict(authdict, clientip)
|
2016-10-06 11:20:27 -06:00
|
|
|
if result:
|
2020-04-30 11:47:49 -06:00
|
|
|
await self.store.mark_ui_auth_stage_complete(
|
|
|
|
session.session_id, login_type, result
|
|
|
|
)
|
2017-12-04 09:49:40 -07:00
|
|
|
except LoginError as e:
|
2016-10-06 11:20:27 -06:00
|
|
|
# this step failed. Merge the error dict into the response
|
|
|
|
# so that the client can have another go.
|
2022-07-27 06:44:40 -06:00
|
|
|
errordict = e.error_dict(self.hs.config)
|
2015-03-23 08:20:28 -06:00
|
|
|
|
2020-04-30 11:47:49 -06:00
|
|
|
creds = await self.store.get_completed_ui_auth_stages(session.session_id)
|
2015-03-23 08:20:28 -06:00
|
|
|
for f in flows:
|
2020-12-18 05:33:57 -07:00
|
|
|
# If all the required credentials have been supplied, the user has
|
|
|
|
# successfully completed the UI auth process!
|
2018-05-31 03:03:47 -06:00
|
|
|
if len(set(f) - set(creds)) == 0:
|
2016-11-24 03:11:45 -07:00
|
|
|
# it's very useful to know what args are stored, but this can
|
|
|
|
# include the password in the case of registering, so only log
|
2016-11-24 03:54:59 -07:00
|
|
|
# the keys (confusingly, clientdict may contain a password
|
|
|
|
# param, creds is just what the user authed as for UI auth
|
|
|
|
# and is not sensitive).
|
2016-11-24 03:11:45 -07:00
|
|
|
logger.info(
|
|
|
|
"Auth completed with creds: %r. Client dict has keys: %r",
|
2018-05-31 03:03:47 -06:00
|
|
|
creds,
|
|
|
|
list(clientdict),
|
2016-11-24 03:11:45 -07:00
|
|
|
)
|
2020-03-26 05:39:34 -06:00
|
|
|
|
2020-04-30 11:47:49 -06:00
|
|
|
return creds, clientdict, session.session_id
|
2015-03-23 08:20:28 -06:00
|
|
|
|
2020-04-30 11:47:49 -06:00
|
|
|
ret = self._auth_dict_for_flows(flows, session.session_id)
|
2018-05-31 03:03:47 -06:00
|
|
|
ret["completed"] = list(creds)
|
2016-10-06 11:20:27 -06:00
|
|
|
ret.update(errordict)
|
2020-08-06 06:09:55 -06:00
|
|
|
raise InteractiveAuthIncompleteError(session.session_id, ret)
|
2015-03-23 08:20:28 -06:00
|
|
|
|
2020-04-15 10:40:18 -06:00
|
|
|
async def add_oob_auth(
|
|
|
|
self, stagetype: str, authdict: Dict[str, Any], clientip: str
|
2021-08-18 06:13:35 -06:00
|
|
|
) -> None:
|
2015-04-27 07:08:45 -06:00
|
|
|
"""
|
|
|
|
Adds the result of out-of-band authentication into an existing auth
|
|
|
|
session. Currently used for adding the result of fallback auth.
|
2021-08-18 06:13:35 -06:00
|
|
|
|
|
|
|
Raises:
|
|
|
|
LoginError if the stagetype is unknown or the session is missing.
|
|
|
|
LoginError is raised by check_auth if authentication fails.
|
2015-04-27 07:08:45 -06:00
|
|
|
"""
|
2015-04-01 08:05:30 -06:00
|
|
|
if stagetype not in self.checkers:
|
2021-08-18 06:13:35 -06:00
|
|
|
raise LoginError(
|
|
|
|
400, f"Unknown UIA stage type: {stagetype}", Codes.INVALID_PARAM
|
|
|
|
)
|
2015-04-01 08:05:30 -06:00
|
|
|
if "session" not in authdict:
|
2021-08-18 06:13:35 -06:00
|
|
|
raise LoginError(400, "Missing session ID", Codes.MISSING_PARAM)
|
2015-04-01 08:05:30 -06:00
|
|
|
|
2021-08-18 06:13:35 -06:00
|
|
|
# If authentication fails a LoginError is raised. Otherwise, store
|
|
|
|
# the successful result.
|
2020-04-15 10:40:18 -06:00
|
|
|
result = await self.checkers[stagetype].check_auth(authdict, clientip)
|
2021-08-18 06:13:35 -06:00
|
|
|
await self.store.mark_ui_auth_stage_complete(
|
|
|
|
authdict["session"], stagetype, result
|
|
|
|
)
|
2015-04-01 08:05:30 -06:00
|
|
|
|
2020-03-12 09:36:27 -06:00
|
|
|
def get_session_id(self, clientdict: Dict[str, Any]) -> Optional[str]:
|
2016-03-16 13:36:57 -06:00
|
|
|
"""
|
|
|
|
Gets the session ID for a client given the client dictionary
|
2016-04-01 09:08:59 -06:00
|
|
|
|
|
|
|
Args:
|
|
|
|
clientdict: The dictionary sent by the client in the request
|
|
|
|
|
|
|
|
Returns:
|
2020-03-12 09:36:27 -06:00
|
|
|
The string session ID the client sent. If the client did
|
2016-04-01 09:08:59 -06:00
|
|
|
not send a session ID, returns None.
|
2016-03-16 13:36:57 -06:00
|
|
|
"""
|
|
|
|
sid = None
|
|
|
|
if clientdict and "auth" in clientdict:
|
|
|
|
authdict = clientdict["auth"]
|
|
|
|
if "session" in authdict:
|
|
|
|
sid = authdict["session"]
|
|
|
|
return sid
|
|
|
|
|
2020-04-30 11:47:49 -06:00
|
|
|
async def set_session_data(self, session_id: str, key: str, value: Any) -> None:
|
2016-03-16 05:56:24 -06:00
|
|
|
"""
|
|
|
|
Store a key-value pair into the sessions data associated with this
|
|
|
|
request. This data is stored server-side and cannot be modified by
|
|
|
|
the client.
|
2016-04-01 09:08:59 -06:00
|
|
|
|
|
|
|
Args:
|
2020-03-12 09:36:27 -06:00
|
|
|
session_id: The ID of this session as returned from check_auth
|
2021-01-12 10:38:03 -07:00
|
|
|
key: The key to store the data under. An entry from
|
|
|
|
UIAuthSessionDataConstants.
|
2020-03-12 09:36:27 -06:00
|
|
|
value: The data to store
|
2016-03-16 05:56:24 -06:00
|
|
|
"""
|
2020-04-30 11:47:49 -06:00
|
|
|
try:
|
|
|
|
await self.store.set_ui_auth_session_data(session_id, key, value)
|
|
|
|
except StoreError:
|
|
|
|
raise SynapseError(400, "Unknown session ID: %s" % (session_id,))
|
2016-03-16 05:56:24 -06:00
|
|
|
|
2020-04-30 11:47:49 -06:00
|
|
|
async def get_session_data(
|
2020-03-12 09:36:27 -06:00
|
|
|
self, session_id: str, key: str, default: Optional[Any] = None
|
|
|
|
) -> Any:
|
2016-03-16 05:56:24 -06:00
|
|
|
"""
|
|
|
|
Retrieve data stored with set_session_data
|
2016-04-01 09:08:59 -06:00
|
|
|
|
|
|
|
Args:
|
2020-03-12 09:36:27 -06:00
|
|
|
session_id: The ID of this session as returned from check_auth
|
2021-01-12 10:38:03 -07:00
|
|
|
key: The key the data was stored under. An entry from
|
|
|
|
UIAuthSessionDataConstants.
|
2020-03-12 09:36:27 -06:00
|
|
|
default: Value to return if the key has not been set
|
2016-03-16 05:56:24 -06:00
|
|
|
"""
|
2020-04-30 11:47:49 -06:00
|
|
|
try:
|
|
|
|
return await self.store.get_ui_auth_session_data(session_id, key, default)
|
|
|
|
except StoreError:
|
|
|
|
raise SynapseError(400, "Unknown session ID: %s" % (session_id,))
|
|
|
|
|
2021-09-20 06:56:23 -06:00
|
|
|
async def _expire_old_sessions(self) -> None:
|
2020-04-30 11:47:49 -06:00
|
|
|
"""
|
|
|
|
Invalidate any user interactive authentication sessions that have expired.
|
|
|
|
"""
|
|
|
|
now = self._clock.time_msec()
|
|
|
|
expiration_time = now - self.SESSION_EXPIRE_MS
|
|
|
|
await self.store.delete_old_ui_auth_sessions(expiration_time)
|
2016-03-16 05:56:24 -06:00
|
|
|
|
2020-04-15 10:40:18 -06:00
|
|
|
async def _check_auth_dict(
|
|
|
|
self, authdict: Dict[str, Any], clientip: str
|
|
|
|
) -> Union[Dict[str, Any], str]:
|
2017-12-04 09:49:40 -07:00
|
|
|
"""Attempt to validate the auth dict provided by a client
|
|
|
|
|
|
|
|
Args:
|
2020-03-12 09:36:27 -06:00
|
|
|
authdict: auth dict provided by the client
|
|
|
|
clientip: IP address of the client
|
2017-12-04 09:49:40 -07:00
|
|
|
|
|
|
|
Returns:
|
2020-04-15 10:40:18 -06:00
|
|
|
Result of the stage verification.
|
2017-12-04 09:49:40 -07:00
|
|
|
|
|
|
|
Raises:
|
|
|
|
StoreError if there was a problem accessing the database
|
|
|
|
SynapseError if there was a problem with the request
|
|
|
|
LoginError if there was an authentication problem.
|
|
|
|
"""
|
|
|
|
login_type = authdict["type"]
|
|
|
|
checker = self.checkers.get(login_type)
|
|
|
|
if checker is not None:
|
2020-04-15 10:40:18 -06:00
|
|
|
res = await checker.check_auth(authdict, clientip=clientip)
|
2019-07-23 07:00:55 -06:00
|
|
|
return res
|
2017-12-04 09:49:40 -07:00
|
|
|
|
2020-12-01 10:42:26 -07:00
|
|
|
# fall back to the v1 login flow
|
2022-05-27 03:44:51 -06:00
|
|
|
canonical_id, _ = await self.validate_login(authdict, is_reauth=True)
|
2019-07-23 07:00:55 -06:00
|
|
|
return canonical_id
|
2015-03-23 08:20:28 -06:00
|
|
|
|
2020-03-12 09:36:27 -06:00
|
|
|
def _get_params_recaptcha(self) -> dict:
|
2021-09-23 05:13:34 -06:00
|
|
|
return {"public_key": self.hs.config.captcha.recaptcha_public_key}
|
2015-03-30 11:13:10 -06:00
|
|
|
|
2020-03-12 09:36:27 -06:00
|
|
|
def _get_params_terms(self) -> dict:
|
2018-10-03 15:25:53 -06:00
|
|
|
return {
|
2018-10-12 18:03:27 -06:00
|
|
|
"policies": {
|
2018-10-15 16:10:29 -06:00
|
|
|
"privacy_policy": {
|
2021-09-23 05:13:34 -06:00
|
|
|
"version": self.hs.config.consent.user_consent_version,
|
2018-10-15 14:46:09 -06:00
|
|
|
"en": {
|
2021-09-23 05:13:34 -06:00
|
|
|
"name": self.hs.config.consent.user_consent_policy_name,
|
2018-11-15 20:41:53 -07:00
|
|
|
"url": "%s_matrix/consent?v=%s"
|
|
|
|
% (
|
2021-09-13 11:07:12 -06:00
|
|
|
self.hs.config.server.public_baseurl,
|
2021-09-23 05:13:34 -06:00
|
|
|
self.hs.config.consent.user_consent_version,
|
2018-10-31 13:19:28 -06:00
|
|
|
),
|
2018-10-15 14:46:09 -06:00
|
|
|
},
|
2018-10-12 18:03:27 -06:00
|
|
|
}
|
|
|
|
}
|
2018-10-03 15:25:53 -06:00
|
|
|
}
|
|
|
|
|
2020-03-12 09:36:27 -06:00
|
|
|
def _auth_dict_for_flows(
|
2020-04-20 06:54:42 -06:00
|
|
|
self,
|
|
|
|
flows: List[List[str]],
|
|
|
|
session_id: str,
|
2020-03-12 09:36:27 -06:00
|
|
|
) -> Dict[str, Any]:
|
2015-03-30 11:13:10 -06:00
|
|
|
public_flows = []
|
|
|
|
for f in flows:
|
2015-04-15 08:50:38 -06:00
|
|
|
public_flows.append(f)
|
2015-03-30 11:13:10 -06:00
|
|
|
|
|
|
|
get_params = {
|
2015-04-01 08:05:30 -06:00
|
|
|
LoginType.RECAPTCHA: self._get_params_recaptcha,
|
2018-10-15 14:56:13 -06:00
|
|
|
LoginType.TERMS: self._get_params_terms,
|
2015-03-30 11:13:10 -06:00
|
|
|
}
|
|
|
|
|
2021-07-16 11:22:36 -06:00
|
|
|
params: Dict[str, Any] = {}
|
2015-03-30 11:13:10 -06:00
|
|
|
|
|
|
|
for f in public_flows:
|
|
|
|
for stage in f:
|
|
|
|
if stage in get_params and stage not in params:
|
|
|
|
params[stage] = get_params[stage]()
|
2015-03-23 08:20:28 -06:00
|
|
|
|
2015-03-30 11:13:10 -06:00
|
|
|
return {
|
2020-04-20 06:54:42 -06:00
|
|
|
"session": session_id,
|
2015-03-30 11:13:10 -06:00
|
|
|
"flows": [{"stages": f} for f in public_flows],
|
|
|
|
"params": params,
|
2015-03-31 02:50:44 -06:00
|
|
|
}
|
2015-04-01 08:05:30 -06:00
|
|
|
|
2021-06-24 07:33:20 -06:00
|
|
|
async def refresh_token(
|
|
|
|
self,
|
|
|
|
refresh_token: str,
|
2021-11-26 07:27:14 -07:00
|
|
|
access_token_valid_until_ms: Optional[int],
|
|
|
|
refresh_token_valid_until_ms: Optional[int],
|
|
|
|
) -> Tuple[str, str, Optional[int]]:
|
2021-06-24 07:33:20 -06:00
|
|
|
"""
|
|
|
|
Consumes a refresh token and generate both a new access token and a new refresh token from it.
|
|
|
|
|
|
|
|
The consumed refresh token is considered invalid after the first use of the new access token or the new refresh token.
|
|
|
|
|
2021-11-26 07:27:14 -07:00
|
|
|
The lifetime of both the access token and refresh token will be capped so that they
|
|
|
|
do not exceed the session's ultimate expiry time, if applicable.
|
|
|
|
|
2021-06-24 07:33:20 -06:00
|
|
|
Args:
|
|
|
|
refresh_token: The token to consume.
|
2021-11-26 07:27:14 -07:00
|
|
|
access_token_valid_until_ms: The expiration timestamp of the new access token.
|
|
|
|
None if the access token does not expire.
|
|
|
|
refresh_token_valid_until_ms: The expiration timestamp of the new refresh token.
|
|
|
|
None if the refresh token does not expire.
|
2021-06-24 07:33:20 -06:00
|
|
|
Returns:
|
2021-11-26 07:27:14 -07:00
|
|
|
A tuple containing:
|
|
|
|
- the new access token
|
|
|
|
- the new refresh token
|
|
|
|
- the actual expiry time of the access token, which may be earlier than
|
|
|
|
`access_token_valid_until_ms`.
|
2021-06-24 07:33:20 -06:00
|
|
|
"""
|
|
|
|
|
|
|
|
# Verify the token signature first before looking up the token
|
|
|
|
if not self._verify_refresh_token(refresh_token):
|
2021-11-26 07:27:14 -07:00
|
|
|
raise SynapseError(
|
|
|
|
HTTPStatus.UNAUTHORIZED, "invalid refresh token", Codes.UNKNOWN_TOKEN
|
|
|
|
)
|
2021-06-24 07:33:20 -06:00
|
|
|
|
|
|
|
existing_token = await self.store.lookup_refresh_token(refresh_token)
|
|
|
|
if existing_token is None:
|
2021-11-26 07:27:14 -07:00
|
|
|
raise SynapseError(
|
|
|
|
HTTPStatus.UNAUTHORIZED,
|
|
|
|
"refresh token does not exist",
|
|
|
|
Codes.UNKNOWN_TOKEN,
|
|
|
|
)
|
2021-06-24 07:33:20 -06:00
|
|
|
|
|
|
|
if (
|
|
|
|
existing_token.has_next_access_token_been_used
|
|
|
|
or existing_token.has_next_refresh_token_been_refreshed
|
|
|
|
):
|
|
|
|
raise SynapseError(
|
2021-11-26 07:27:14 -07:00
|
|
|
HTTPStatus.FORBIDDEN,
|
|
|
|
"refresh token isn't valid anymore",
|
|
|
|
Codes.FORBIDDEN,
|
2021-06-24 07:33:20 -06:00
|
|
|
)
|
|
|
|
|
2021-11-26 07:27:14 -07:00
|
|
|
now_ms = self._clock.time_msec()
|
|
|
|
|
|
|
|
if existing_token.expiry_ts is not None and existing_token.expiry_ts < now_ms:
|
|
|
|
raise SynapseError(
|
|
|
|
HTTPStatus.FORBIDDEN,
|
|
|
|
"The supplied refresh token has expired",
|
|
|
|
Codes.FORBIDDEN,
|
|
|
|
)
|
|
|
|
|
|
|
|
if existing_token.ultimate_session_expiry_ts is not None:
|
|
|
|
# This session has a bounded lifetime, even across refreshes.
|
|
|
|
|
|
|
|
if access_token_valid_until_ms is not None:
|
|
|
|
access_token_valid_until_ms = min(
|
|
|
|
access_token_valid_until_ms,
|
|
|
|
existing_token.ultimate_session_expiry_ts,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
access_token_valid_until_ms = existing_token.ultimate_session_expiry_ts
|
|
|
|
|
|
|
|
if refresh_token_valid_until_ms is not None:
|
|
|
|
refresh_token_valid_until_ms = min(
|
|
|
|
refresh_token_valid_until_ms,
|
|
|
|
existing_token.ultimate_session_expiry_ts,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
refresh_token_valid_until_ms = existing_token.ultimate_session_expiry_ts
|
|
|
|
if existing_token.ultimate_session_expiry_ts < now_ms:
|
|
|
|
raise SynapseError(
|
|
|
|
HTTPStatus.FORBIDDEN,
|
|
|
|
"The session has expired and can no longer be refreshed",
|
|
|
|
Codes.FORBIDDEN,
|
|
|
|
)
|
|
|
|
|
2021-06-24 07:33:20 -06:00
|
|
|
(
|
|
|
|
new_refresh_token,
|
|
|
|
new_refresh_token_id,
|
2021-11-18 07:45:38 -07:00
|
|
|
) = await self.create_refresh_token_for_user_id(
|
2021-11-26 07:27:14 -07:00
|
|
|
user_id=existing_token.user_id,
|
|
|
|
device_id=existing_token.device_id,
|
|
|
|
expiry_ts=refresh_token_valid_until_ms,
|
|
|
|
ultimate_session_expiry_ts=existing_token.ultimate_session_expiry_ts,
|
2021-06-24 07:33:20 -06:00
|
|
|
)
|
2021-11-17 07:10:57 -07:00
|
|
|
access_token = await self.create_access_token_for_user_id(
|
2021-06-24 07:33:20 -06:00
|
|
|
user_id=existing_token.user_id,
|
|
|
|
device_id=existing_token.device_id,
|
2021-11-26 07:27:14 -07:00
|
|
|
valid_until_ms=access_token_valid_until_ms,
|
2021-06-24 07:33:20 -06:00
|
|
|
refresh_token_id=new_refresh_token_id,
|
|
|
|
)
|
|
|
|
await self.store.replace_refresh_token(
|
|
|
|
existing_token.token_id, new_refresh_token_id
|
|
|
|
)
|
2021-11-26 07:27:14 -07:00
|
|
|
return access_token, new_refresh_token, access_token_valid_until_ms
|
2021-06-24 07:33:20 -06:00
|
|
|
|
|
|
|
def _verify_refresh_token(self, token: str) -> bool:
|
|
|
|
"""
|
|
|
|
Verifies the shape of a refresh token.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
token: The refresh token to verify
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Whether the token has the right shape
|
|
|
|
"""
|
|
|
|
parts = token.split("_", maxsplit=4)
|
|
|
|
if len(parts) != 4:
|
|
|
|
return False
|
|
|
|
|
|
|
|
type, localpart, rand, crc = parts
|
|
|
|
|
|
|
|
# Refresh tokens are prefixed by "syr_", let's check that
|
|
|
|
if type != "syr":
|
|
|
|
return False
|
|
|
|
|
|
|
|
# Check the CRC
|
|
|
|
base = f"{type}_{localpart}_{rand}"
|
|
|
|
expected_crc = base62_encode(crc32(base.encode("ascii")), minwidth=6)
|
|
|
|
if crc != expected_crc:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2022-10-26 04:45:41 -06:00
|
|
|
async def create_login_token_for_user_id(
|
|
|
|
self,
|
|
|
|
user_id: str,
|
|
|
|
duration_ms: int = (2 * 60 * 1000),
|
|
|
|
auth_provider_id: Optional[str] = None,
|
|
|
|
auth_provider_session_id: Optional[str] = None,
|
|
|
|
) -> str:
|
|
|
|
login_token = self.generate_login_token()
|
|
|
|
now = self._clock.time_msec()
|
|
|
|
expiry_ts = now + duration_ms
|
|
|
|
await self.store.add_login_token_to_user(
|
|
|
|
user_id=user_id,
|
|
|
|
token=login_token,
|
|
|
|
expiry_ts=expiry_ts,
|
|
|
|
auth_provider_id=auth_provider_id,
|
|
|
|
auth_provider_session_id=auth_provider_session_id,
|
|
|
|
)
|
|
|
|
return login_token
|
|
|
|
|
2021-11-18 07:45:38 -07:00
|
|
|
async def create_refresh_token_for_user_id(
|
2021-06-24 07:33:20 -06:00
|
|
|
self,
|
|
|
|
user_id: str,
|
|
|
|
device_id: str,
|
2021-11-26 07:27:14 -07:00
|
|
|
expiry_ts: Optional[int],
|
|
|
|
ultimate_session_expiry_ts: Optional[int],
|
2021-06-24 07:33:20 -06:00
|
|
|
) -> Tuple[str, int]:
|
|
|
|
"""
|
|
|
|
Creates a new refresh token for the user with the given user ID.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id: canonical user ID
|
|
|
|
device_id: the device ID to associate with the token.
|
2021-11-26 07:27:14 -07:00
|
|
|
expiry_ts (milliseconds since the epoch): Time after which the
|
|
|
|
refresh token cannot be used.
|
|
|
|
If None, the refresh token never expires until it has been used.
|
|
|
|
ultimate_session_expiry_ts (milliseconds since the epoch):
|
|
|
|
Time at which the session will end and can not be extended any
|
|
|
|
further.
|
|
|
|
If None, the session can be refreshed indefinitely.
|
2021-06-24 07:33:20 -06:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
The newly created refresh token and its ID in the database
|
|
|
|
"""
|
|
|
|
refresh_token = self.generate_refresh_token(UserID.from_string(user_id))
|
|
|
|
refresh_token_id = await self.store.add_refresh_token_to_user(
|
|
|
|
user_id=user_id,
|
|
|
|
token=refresh_token,
|
|
|
|
device_id=device_id,
|
2021-11-26 07:27:14 -07:00
|
|
|
expiry_ts=expiry_ts,
|
|
|
|
ultimate_session_expiry_ts=ultimate_session_expiry_ts,
|
2021-06-24 07:33:20 -06:00
|
|
|
)
|
|
|
|
return refresh_token, refresh_token_id
|
|
|
|
|
2021-11-17 07:10:57 -07:00
|
|
|
async def create_access_token_for_user_id(
|
2020-11-17 03:51:25 -07:00
|
|
|
self,
|
|
|
|
user_id: str,
|
|
|
|
device_id: Optional[str],
|
|
|
|
valid_until_ms: Optional[int],
|
|
|
|
puppets_user_id: Optional[str] = None,
|
2020-12-17 05:55:21 -07:00
|
|
|
is_appservice_ghost: bool = False,
|
2021-06-24 07:33:20 -06:00
|
|
|
refresh_token_id: Optional[int] = None,
|
2020-11-17 03:51:25 -07:00
|
|
|
) -> str:
|
2015-10-07 07:45:57 -06:00
|
|
|
"""
|
2016-11-28 02:52:02 -07:00
|
|
|
Creates a new access token for the user with the given user ID.
|
2016-07-15 05:34:23 -06:00
|
|
|
|
2015-11-11 04:20:23 -07:00
|
|
|
The user is assumed to have been authenticated by some other
|
2020-10-23 10:38:40 -06:00
|
|
|
mechanism (e.g. CAS), and the user_id converted to the canonical case.
|
2015-10-07 07:45:57 -06:00
|
|
|
|
2016-07-22 07:52:53 -06:00
|
|
|
The device will be recorded in the table if it is not there already.
|
|
|
|
|
2015-10-07 07:45:57 -06:00
|
|
|
Args:
|
2020-03-12 09:36:27 -06:00
|
|
|
user_id: canonical User ID
|
|
|
|
device_id: the device ID to associate with the tokens.
|
2016-07-22 07:52:53 -06:00
|
|
|
None to leave the tokens unassociated with a device (deprecated:
|
|
|
|
we should always have a device ID)
|
2020-03-12 09:36:27 -06:00
|
|
|
valid_until_ms: when the token is valid until. None for
|
2019-07-12 10:26:02 -06:00
|
|
|
no expiry.
|
2020-12-17 05:55:21 -07:00
|
|
|
is_appservice_ghost: Whether the user is an application ghost user
|
2021-06-24 07:33:20 -06:00
|
|
|
refresh_token_id: the refresh token ID that will be associated with
|
|
|
|
this access token.
|
2015-10-07 07:45:57 -06:00
|
|
|
Returns:
|
|
|
|
The access token for the user's session.
|
|
|
|
Raises:
|
|
|
|
StoreError if there was a problem storing the token.
|
|
|
|
"""
|
2019-07-12 10:26:02 -06:00
|
|
|
fmt_expiry = ""
|
|
|
|
if valid_until_ms is not None:
|
|
|
|
fmt_expiry = time.strftime(
|
|
|
|
" until %Y-%m-%d %H:%M:%S", time.localtime(valid_until_ms / 1000.0)
|
|
|
|
)
|
2020-11-17 03:51:25 -07:00
|
|
|
|
|
|
|
if puppets_user_id:
|
|
|
|
logger.info(
|
|
|
|
"Logging in user %s as %s%s", user_id, puppets_user_id, fmt_expiry
|
|
|
|
)
|
2021-05-12 08:04:51 -06:00
|
|
|
target_user_id_obj = UserID.from_string(puppets_user_id)
|
2020-11-17 03:51:25 -07:00
|
|
|
else:
|
|
|
|
logger.info(
|
|
|
|
"Logging in user %s on device %s%s", user_id, device_id, fmt_expiry
|
|
|
|
)
|
2021-05-12 08:04:51 -06:00
|
|
|
target_user_id_obj = UserID.from_string(user_id)
|
2019-07-12 10:26:02 -06:00
|
|
|
|
2020-12-17 05:55:21 -07:00
|
|
|
if (
|
|
|
|
not is_appservice_ghost
|
|
|
|
or self.hs.config.appservice.track_appservice_user_ips
|
|
|
|
):
|
2022-06-14 02:51:15 -06:00
|
|
|
await self.auth_blocking.check_auth_blocking(user_id)
|
2016-07-22 07:52:53 -06:00
|
|
|
|
2021-05-12 08:04:51 -06:00
|
|
|
access_token = self.generate_access_token(target_user_id_obj)
|
2020-04-15 10:40:18 -06:00
|
|
|
await self.store.add_access_token_to_user(
|
2020-11-17 03:51:25 -07:00
|
|
|
user_id=user_id,
|
|
|
|
token=access_token,
|
|
|
|
device_id=device_id,
|
|
|
|
valid_until_ms=valid_until_ms,
|
|
|
|
puppets_user_id=puppets_user_id,
|
2021-06-24 07:33:20 -06:00
|
|
|
refresh_token_id=refresh_token_id,
|
2019-07-12 10:26:02 -06:00
|
|
|
)
|
2019-07-10 12:10:07 -06:00
|
|
|
|
2016-07-22 07:52:53 -06:00
|
|
|
# the device *should* have been registered before we got here; however,
|
|
|
|
# it's possible we raced against a DELETE operation. The thing we
|
|
|
|
# really don't want is active access_tokens without a record of the
|
|
|
|
# device, so we double-check it here.
|
|
|
|
if device_id is not None:
|
2021-12-13 08:39:43 -07:00
|
|
|
if await self.store.get_device(user_id, device_id) is None:
|
2020-04-15 10:40:18 -06:00
|
|
|
await self.store.delete_access_token(access_token)
|
2017-11-01 04:23:21 -06:00
|
|
|
raise StoreError(400, "Login raced against device deletion")
|
2016-07-22 07:52:53 -06:00
|
|
|
|
2019-07-23 07:00:55 -06:00
|
|
|
return access_token
|
2015-10-07 07:45:57 -06:00
|
|
|
|
2020-04-15 10:40:18 -06:00
|
|
|
async def check_user_exists(self, user_id: str) -> Optional[str]:
|
2016-07-15 05:34:23 -06:00
|
|
|
"""
|
|
|
|
Checks to see if a user with the given id exists. Will check case
|
|
|
|
insensitively, but return None if there are multiple inexact matches.
|
|
|
|
|
|
|
|
Args:
|
2020-03-12 09:36:27 -06:00
|
|
|
user_id: complete @user:id
|
2016-07-15 05:34:23 -06:00
|
|
|
|
|
|
|
Returns:
|
2020-04-15 10:40:18 -06:00
|
|
|
The canonical_user_id, or None if zero or multiple matches
|
2016-07-15 05:34:23 -06:00
|
|
|
"""
|
2020-04-15 10:40:18 -06:00
|
|
|
res = await self._find_user_id_and_pwd_hash(user_id)
|
2016-10-06 11:20:27 -06:00
|
|
|
if res is not None:
|
2019-07-23 07:00:55 -06:00
|
|
|
return res[0]
|
|
|
|
return None
|
2015-10-07 07:45:57 -06:00
|
|
|
|
2022-09-29 07:23:24 -06:00
|
|
|
async def is_user_approved(self, user_id: str) -> bool:
|
|
|
|
"""Checks if a user is approved and therefore can be allowed to log in.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id: the user to check the approval status of.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A boolean that is True if the user is approved, False otherwise.
|
|
|
|
"""
|
|
|
|
return await self.store.is_user_approved(user_id)
|
|
|
|
|
2020-04-15 10:40:18 -06:00
|
|
|
async def _find_user_id_and_pwd_hash(
|
|
|
|
self, user_id: str
|
|
|
|
) -> Optional[Tuple[str, str]]:
|
2015-08-26 06:42:45 -06:00
|
|
|
"""Checks to see if a user with the given id exists. Will check case
|
2016-10-06 11:20:27 -06:00
|
|
|
insensitively, but will return None if there are multiple inexact
|
|
|
|
matches.
|
2015-08-26 06:42:45 -06:00
|
|
|
|
|
|
|
Returns:
|
2020-04-15 10:40:18 -06:00
|
|
|
A 2-tuple of `(canonical_user_id, password_hash)` or `None`
|
|
|
|
if there is not exactly one match
|
2015-08-26 06:42:45 -06:00
|
|
|
"""
|
2020-04-15 10:40:18 -06:00
|
|
|
user_infos = await self.store.get_users_by_id_case_insensitive(user_id)
|
2016-10-06 11:20:27 -06:00
|
|
|
|
|
|
|
result = None
|
2015-08-26 06:42:45 -06:00
|
|
|
if not user_infos:
|
2019-10-31 04:23:24 -06:00
|
|
|
logger.warning("Attempted to login as %s but they do not exist", user_id)
|
2016-10-06 11:20:27 -06:00
|
|
|
elif len(user_infos) == 1:
|
|
|
|
# a single match (possibly not exact)
|
|
|
|
result = user_infos.popitem()
|
|
|
|
elif user_id in user_infos:
|
|
|
|
# multiple matches, but one is exact
|
|
|
|
result = (user_id, user_infos[user_id])
|
2015-08-26 06:42:45 -06:00
|
|
|
else:
|
2016-10-06 11:20:27 -06:00
|
|
|
# multiple matches, none of them exact
|
2019-10-31 04:23:24 -06:00
|
|
|
logger.warning(
|
2016-10-06 11:20:27 -06:00
|
|
|
"Attempted to login as %s but it matches more than one user "
|
|
|
|
"inexactly: %r",
|
|
|
|
user_id,
|
|
|
|
user_infos.keys(),
|
|
|
|
)
|
2019-07-23 07:00:55 -06:00
|
|
|
return result
|
2015-08-26 06:42:45 -06:00
|
|
|
|
2021-03-16 09:44:25 -06:00
|
|
|
def can_change_password(self) -> bool:
|
|
|
|
"""Get whether users on this server are allowed to change or set a password.
|
|
|
|
|
2021-09-23 05:13:34 -06:00
|
|
|
Both `config.auth.password_enabled` and `config.auth.password_localdb_enabled` must be true.
|
2021-03-16 09:44:25 -06:00
|
|
|
|
|
|
|
Note that any account (even SSO accounts) are allowed to add passwords if the above
|
|
|
|
is true.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Whether users on this server are allowed to change or set a password
|
|
|
|
"""
|
2022-05-27 03:44:51 -06:00
|
|
|
return self._password_enabled_for_login and self._password_localdb_enabled
|
2021-03-16 09:44:25 -06:00
|
|
|
|
2020-03-12 09:36:27 -06:00
|
|
|
def get_supported_login_types(self) -> Iterable[str]:
|
2017-10-31 04:38:40 -06:00
|
|
|
"""Get a the login types supported for the /login API
|
|
|
|
|
|
|
|
By default this is just 'm.login.password' (unless password_enabled is
|
|
|
|
False in the config file), but password auth providers can provide
|
|
|
|
other login types.
|
|
|
|
|
|
|
|
Returns:
|
2020-03-12 09:36:27 -06:00
|
|
|
login types
|
2017-10-31 04:38:40 -06:00
|
|
|
"""
|
2021-10-13 05:21:52 -06:00
|
|
|
# Load any login types registered by modules
|
|
|
|
# This is stored in the password_auth_provider so this doesn't trigger
|
|
|
|
# any callbacks
|
|
|
|
types = list(self.password_auth_provider.get_supported_login_types().keys())
|
|
|
|
|
|
|
|
# This list should include PASSWORD if (either _password_localdb_enabled is
|
|
|
|
# true or if one of the modules registered it) AND _password_enabled is true
|
|
|
|
# Also:
|
|
|
|
# Some clients just pick the first type in the list. In this case, we want
|
|
|
|
# them to use PASSWORD (rather than token or whatever), so we want to make sure
|
|
|
|
# that comes first, where it's present.
|
|
|
|
if LoginType.PASSWORD in types:
|
|
|
|
types.remove(LoginType.PASSWORD)
|
2022-05-27 03:44:51 -06:00
|
|
|
if self._password_enabled_for_login:
|
2021-10-13 05:21:52 -06:00
|
|
|
types.insert(0, LoginType.PASSWORD)
|
2022-05-27 03:44:51 -06:00
|
|
|
elif self._password_localdb_enabled and self._password_enabled_for_login:
|
2021-10-13 05:21:52 -06:00
|
|
|
types.insert(0, LoginType.PASSWORD)
|
|
|
|
|
|
|
|
return types
|
2017-10-31 04:38:40 -06:00
|
|
|
|
2020-04-15 10:40:18 -06:00
|
|
|
async def validate_login(
|
2020-12-01 10:42:26 -07:00
|
|
|
self,
|
|
|
|
login_submission: Dict[str, Any],
|
|
|
|
ratelimit: bool = False,
|
2022-05-27 03:44:51 -06:00
|
|
|
is_reauth: bool = False,
|
2021-06-24 07:33:20 -06:00
|
|
|
) -> Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]:
|
2017-10-31 04:38:40 -06:00
|
|
|
"""Authenticates the user for the /login API
|
2016-07-15 05:34:23 -06:00
|
|
|
|
2020-12-01 10:42:26 -07:00
|
|
|
Also used by the user-interactive auth flow to validate auth types which don't
|
|
|
|
have an explicit UIA handler, including m.password.auth.
|
2016-07-15 05:34:23 -06:00
|
|
|
|
|
|
|
Args:
|
2020-03-12 09:36:27 -06:00
|
|
|
login_submission: the whole of the login submission
|
2017-10-31 04:38:40 -06:00
|
|
|
(including 'type' and other relevant fields)
|
2020-12-01 10:42:26 -07:00
|
|
|
ratelimit: whether to apply the failed_login_attempt ratelimiter
|
2022-05-27 03:44:51 -06:00
|
|
|
is_reauth: whether this is part of a User-Interactive Authorisation
|
|
|
|
flow to reauthenticate for a privileged action (rather than a
|
|
|
|
new login)
|
2016-04-14 12:00:21 -06:00
|
|
|
Returns:
|
2020-04-15 10:40:18 -06:00
|
|
|
A tuple of the canonical user id, and optional callback
|
2017-10-31 09:15:51 -06:00
|
|
|
to be called once the access token and device id are issued
|
2016-07-15 05:34:23 -06:00
|
|
|
Raises:
|
2017-10-31 04:38:40 -06:00
|
|
|
StoreError if there was a problem accessing the database
|
|
|
|
SynapseError if there was a problem with the request
|
|
|
|
LoginError if there was an authentication problem.
|
2016-04-14 12:00:21 -06:00
|
|
|
"""
|
2017-10-31 04:38:40 -06:00
|
|
|
login_type = login_submission.get("type")
|
2020-12-02 03:38:50 -07:00
|
|
|
if not isinstance(login_type, str):
|
|
|
|
raise SynapseError(400, "Bad parameter: type", Codes.INVALID_PARAM)
|
2020-12-01 10:42:26 -07:00
|
|
|
|
|
|
|
# ideally, we wouldn't be checking the identifier unless we know we have a login
|
|
|
|
# method which uses it (https://github.com/matrix-org/synapse/issues/8836)
|
|
|
|
#
|
|
|
|
# But the auth providers' check_auth interface requires a username, so in
|
|
|
|
# practice we can only support login methods which we can map to a username
|
|
|
|
# anyway.
|
2017-10-31 04:38:40 -06:00
|
|
|
|
2017-10-31 04:43:57 -06:00
|
|
|
# special case to check for "password" for the check_password interface
|
|
|
|
# for the auth providers
|
|
|
|
password = login_submission.get("password")
|
2022-05-27 03:44:51 -06:00
|
|
|
|
2017-10-31 04:43:57 -06:00
|
|
|
if login_type == LoginType.PASSWORD:
|
2022-05-27 03:44:51 -06:00
|
|
|
if is_reauth:
|
|
|
|
passwords_allowed_here = self._password_enabled_for_reauth
|
|
|
|
else:
|
|
|
|
passwords_allowed_here = self._password_enabled_for_login
|
|
|
|
|
|
|
|
if not passwords_allowed_here:
|
2017-10-31 04:43:57 -06:00
|
|
|
raise SynapseError(400, "Password login has been disabled.")
|
2020-12-01 10:42:26 -07:00
|
|
|
if not isinstance(password, str):
|
|
|
|
raise SynapseError(400, "Bad parameter: password", Codes.INVALID_PARAM)
|
|
|
|
|
|
|
|
# map old-school login fields into new-school "identifier" fields.
|
|
|
|
identifier_dict = convert_client_dict_legacy_fields_to_identifier(
|
|
|
|
login_submission
|
|
|
|
)
|
|
|
|
|
|
|
|
# convert phone type identifiers to generic threepids
|
|
|
|
if identifier_dict["type"] == "m.id.phone":
|
|
|
|
identifier_dict = login_id_phone_to_thirdparty(identifier_dict)
|
|
|
|
|
|
|
|
# convert threepid identifiers to user IDs
|
|
|
|
if identifier_dict["type"] == "m.id.thirdparty":
|
|
|
|
address = identifier_dict.get("address")
|
|
|
|
medium = identifier_dict.get("medium")
|
|
|
|
|
|
|
|
if medium is None or address is None:
|
|
|
|
raise SynapseError(400, "Invalid thirdparty identifier")
|
|
|
|
|
|
|
|
# For emails, canonicalise the address.
|
|
|
|
# We store all email addresses canonicalised in the DB.
|
|
|
|
# (See add_threepid in synapse/handlers/auth.py)
|
|
|
|
if medium == "email":
|
|
|
|
try:
|
|
|
|
address = canonicalise_email(address)
|
|
|
|
except ValueError as e:
|
|
|
|
raise SynapseError(400, str(e))
|
|
|
|
|
|
|
|
# We also apply account rate limiting using the 3PID as a key, as
|
|
|
|
# otherwise using 3PID bypasses the ratelimiting based on user ID.
|
|
|
|
if ratelimit:
|
2021-03-30 05:06:09 -06:00
|
|
|
await self._failed_login_attempts_ratelimiter.ratelimit(
|
|
|
|
None, (medium, address), update=False
|
2020-12-01 10:42:26 -07:00
|
|
|
)
|
|
|
|
|
|
|
|
# Check for login providers that support 3pid login types
|
|
|
|
if login_type == LoginType.PASSWORD:
|
|
|
|
# we've already checked that there is a (valid) password field
|
|
|
|
assert isinstance(password, str)
|
|
|
|
(
|
|
|
|
canonical_user_id,
|
|
|
|
callback_3pid,
|
|
|
|
) = await self.check_password_provider_3pid(medium, address, password)
|
|
|
|
if canonical_user_id:
|
|
|
|
# Authentication through password provider and 3pid succeeded
|
|
|
|
return canonical_user_id, callback_3pid
|
|
|
|
|
|
|
|
# No password providers were able to handle this 3pid
|
|
|
|
# Check local store
|
2022-02-23 04:04:02 -07:00
|
|
|
user_id = await self.hs.get_datastores().main.get_user_id_by_threepid(
|
2020-12-01 10:42:26 -07:00
|
|
|
medium, address
|
|
|
|
)
|
|
|
|
if not user_id:
|
|
|
|
logger.warning(
|
|
|
|
"unknown 3pid identifier medium %s, address %r", medium, address
|
|
|
|
)
|
|
|
|
# We mark that we've failed to log in here, as
|
|
|
|
# `check_password_provider_3pid` might have returned `None` due
|
|
|
|
# to an incorrect password, rather than the account not
|
|
|
|
# existing.
|
|
|
|
#
|
|
|
|
# If it returned None but the 3PID was bound then we won't hit
|
|
|
|
# this code path, which is fine as then the per-user ratelimit
|
|
|
|
# will kick in below.
|
|
|
|
if ratelimit:
|
2021-03-30 05:06:09 -06:00
|
|
|
await self._failed_login_attempts_ratelimiter.can_do_action(
|
|
|
|
None, (medium, address)
|
2020-12-01 10:42:26 -07:00
|
|
|
)
|
2022-06-07 08:58:48 -06:00
|
|
|
raise LoginError(
|
|
|
|
403, msg=INVALID_USERNAME_OR_PASSWORD, errcode=Codes.FORBIDDEN
|
|
|
|
)
|
2020-12-01 10:42:26 -07:00
|
|
|
|
|
|
|
identifier_dict = {"type": "m.id.user", "user": user_id}
|
|
|
|
|
|
|
|
# by this point, the identifier should be an m.id.user: if it's anything
|
|
|
|
# else, we haven't understood it.
|
|
|
|
if identifier_dict["type"] != "m.id.user":
|
|
|
|
raise SynapseError(400, "Unknown login identifier type")
|
|
|
|
|
|
|
|
username = identifier_dict.get("user")
|
|
|
|
if not username:
|
|
|
|
raise SynapseError(400, "User identifier is missing 'user' key")
|
|
|
|
|
|
|
|
if username.startswith("@"):
|
|
|
|
qualified_user_id = username
|
|
|
|
else:
|
|
|
|
qualified_user_id = UserID(username, self.hs.hostname).to_string()
|
|
|
|
|
|
|
|
# Check if we've hit the failed ratelimit (but don't update it)
|
|
|
|
if ratelimit:
|
2021-03-30 05:06:09 -06:00
|
|
|
await self._failed_login_attempts_ratelimiter.ratelimit(
|
|
|
|
None, qualified_user_id.lower(), update=False
|
2020-12-01 10:42:26 -07:00
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
return await self._validate_userid_login(username, login_submission)
|
|
|
|
except LoginError:
|
|
|
|
# The user has failed to log in, so we need to update the rate
|
|
|
|
# limiter. Using `can_do_action` avoids us raising a ratelimit
|
|
|
|
# exception and masking the LoginError. The actual ratelimiting
|
|
|
|
# should have happened above.
|
|
|
|
if ratelimit:
|
2021-03-30 05:06:09 -06:00
|
|
|
await self._failed_login_attempts_ratelimiter.can_do_action(
|
|
|
|
None, qualified_user_id.lower()
|
2020-12-01 10:42:26 -07:00
|
|
|
)
|
|
|
|
raise
|
|
|
|
|
|
|
|
async def _validate_userid_login(
|
|
|
|
self,
|
|
|
|
username: str,
|
|
|
|
login_submission: Dict[str, Any],
|
2021-06-24 07:33:20 -06:00
|
|
|
) -> Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]:
|
2020-12-01 10:42:26 -07:00
|
|
|
"""Helper for validate_login
|
|
|
|
|
|
|
|
Handles login, once we've mapped 3pids onto userids
|
|
|
|
|
|
|
|
Args:
|
|
|
|
username: the username, from the identifier dict
|
|
|
|
login_submission: the whole of the login submission
|
|
|
|
(including 'type' and other relevant fields)
|
|
|
|
Returns:
|
|
|
|
A tuple of the canonical user id, and optional callback
|
|
|
|
to be called once the access token and device id are issued
|
|
|
|
Raises:
|
|
|
|
StoreError if there was a problem accessing the database
|
|
|
|
SynapseError if there was a problem with the request
|
|
|
|
LoginError if there was an authentication problem.
|
|
|
|
"""
|
|
|
|
if username.startswith("@"):
|
|
|
|
qualified_user_id = username
|
|
|
|
else:
|
|
|
|
qualified_user_id = UserID(username, self.hs.hostname).to_string()
|
|
|
|
|
|
|
|
login_type = login_submission.get("type")
|
2020-12-02 03:38:50 -07:00
|
|
|
# we already checked that we have a valid login type
|
|
|
|
assert isinstance(login_type, str)
|
|
|
|
|
2020-12-01 10:42:26 -07:00
|
|
|
known_login_type = False
|
2017-10-31 04:38:40 -06:00
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
# Check if login_type matches a type registered by one of the modules
|
|
|
|
# We don't need to remove LoginType.PASSWORD from the list if password login is
|
|
|
|
# disabled, since if that were the case then by this point we know that the
|
|
|
|
# login_type is not LoginType.PASSWORD
|
|
|
|
supported_login_types = self.password_auth_provider.get_supported_login_types()
|
|
|
|
# check if the login type being used is supported by a module
|
|
|
|
if login_type in supported_login_types:
|
|
|
|
# Make a note that this login type is supported by the server
|
2017-10-31 04:43:57 -06:00
|
|
|
known_login_type = True
|
2021-10-13 05:21:52 -06:00
|
|
|
# Get all the fields expected for this login types
|
2017-10-31 04:43:57 -06:00
|
|
|
login_fields = supported_login_types[login_type]
|
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
# go through the login submission and keep track of which required fields are
|
|
|
|
# provided/not provided
|
2017-10-31 04:43:57 -06:00
|
|
|
missing_fields = []
|
|
|
|
login_dict = {}
|
|
|
|
for f in login_fields:
|
|
|
|
if f not in login_submission:
|
|
|
|
missing_fields.append(f)
|
|
|
|
else:
|
|
|
|
login_dict[f] = login_submission[f]
|
2021-10-13 05:21:52 -06:00
|
|
|
# raise an error if any of the expected fields for that login type weren't provided
|
2017-10-31 04:43:57 -06:00
|
|
|
if missing_fields:
|
|
|
|
raise SynapseError(
|
|
|
|
400,
|
|
|
|
"Missing parameters for login type %s: %s"
|
|
|
|
% (login_type, missing_fields),
|
|
|
|
)
|
2016-04-15 04:17:18 -06:00
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
# call all of the check_auth hooks for that login_type
|
|
|
|
# it will return a result once the first success is found (or None otherwise)
|
|
|
|
result = await self.password_auth_provider.check_auth(
|
|
|
|
username, login_type, login_dict
|
|
|
|
)
|
2017-10-31 09:15:51 -06:00
|
|
|
if result:
|
2019-07-23 07:00:55 -06:00
|
|
|
return result
|
2017-10-31 04:43:57 -06:00
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
# if no module managed to authenticate the user, then fallback to built in password based auth
|
2020-11-30 17:15:36 -07:00
|
|
|
if login_type == LoginType.PASSWORD and self._password_localdb_enabled:
|
2017-10-31 04:43:57 -06:00
|
|
|
known_login_type = True
|
|
|
|
|
2020-12-01 10:42:26 -07:00
|
|
|
# we've already checked that there is a (valid) password field
|
|
|
|
password = login_submission["password"]
|
|
|
|
assert isinstance(password, str)
|
|
|
|
|
2020-04-15 10:40:18 -06:00
|
|
|
canonical_user_id = await self._check_local_password(
|
2020-12-01 10:42:26 -07:00
|
|
|
qualified_user_id, password
|
2017-10-31 04:43:57 -06:00
|
|
|
)
|
2016-10-06 11:20:27 -06:00
|
|
|
|
2017-10-31 04:43:57 -06:00
|
|
|
if canonical_user_id:
|
2019-08-30 09:28:26 -06:00
|
|
|
return canonical_user_id, None
|
2017-10-31 04:43:57 -06:00
|
|
|
|
|
|
|
if not known_login_type:
|
|
|
|
raise SynapseError(400, "Unknown login type %s" % login_type)
|
2016-10-06 11:20:27 -06:00
|
|
|
|
2019-03-18 06:57:20 -06:00
|
|
|
# We raise a 403 here, but note that if we're doing user-interactive
|
|
|
|
# login, it turns all LoginErrors into a 401 anyway.
|
2022-06-07 08:58:48 -06:00
|
|
|
raise LoginError(403, msg=INVALID_USERNAME_OR_PASSWORD, errcode=Codes.FORBIDDEN)
|
2015-08-12 08:49:37 -06:00
|
|
|
|
2020-04-15 10:40:18 -06:00
|
|
|
async def check_password_provider_3pid(
|
|
|
|
self, medium: str, address: str, password: str
|
2021-06-24 07:33:20 -06:00
|
|
|
) -> Tuple[Optional[str], Optional[Callable[["LoginResponse"], Awaitable[None]]]]:
|
2019-03-26 11:48:30 -06:00
|
|
|
"""Check if a password provider is able to validate a thirdparty login
|
|
|
|
|
|
|
|
Args:
|
2020-03-12 09:36:27 -06:00
|
|
|
medium: The medium of the 3pid (ex. email).
|
|
|
|
address: The address of the 3pid (ex. jdoe@example.com).
|
|
|
|
password: The password of the user.
|
2019-03-26 11:48:30 -06:00
|
|
|
|
|
|
|
Returns:
|
2020-04-15 10:40:18 -06:00
|
|
|
A tuple of `(user_id, callback)`. If authentication is successful,
|
|
|
|
`user_id`is the authenticated, canonical user ID. `callback` is
|
2019-03-26 11:48:30 -06:00
|
|
|
then either a function to be later run after the server has
|
|
|
|
completed login/registration, or `None`. If authentication was
|
|
|
|
unsuccessful, `user_id` and `callback` are both `None`.
|
|
|
|
"""
|
2021-10-13 05:21:52 -06:00
|
|
|
# call all of the check_3pid_auth callbacks
|
|
|
|
# Result will be from the first callback that returns something other than None
|
|
|
|
# If all the callbacks return None, then result is also set to None
|
|
|
|
result = await self.password_auth_provider.check_3pid_auth(
|
|
|
|
medium, address, password
|
|
|
|
)
|
|
|
|
if result:
|
|
|
|
return result
|
2019-03-26 11:48:30 -06:00
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
# if result is None then return (None, None)
|
2019-08-30 09:28:26 -06:00
|
|
|
return None, None
|
2019-03-26 11:48:30 -06:00
|
|
|
|
2020-04-15 10:40:18 -06:00
|
|
|
async def _check_local_password(self, user_id: str, password: str) -> Optional[str]:
|
2016-07-15 05:34:23 -06:00
|
|
|
"""Authenticate a user against the local password database.
|
|
|
|
|
2016-10-06 11:20:27 -06:00
|
|
|
user_id is checked case insensitively, but will return None if there are
|
2016-07-15 05:34:23 -06:00
|
|
|
multiple inexact matches.
|
|
|
|
|
|
|
|
Args:
|
2020-03-12 09:36:27 -06:00
|
|
|
user_id: complete @user:id
|
|
|
|
password: the provided password
|
2016-07-15 05:34:23 -06:00
|
|
|
Returns:
|
2020-04-15 10:40:18 -06:00
|
|
|
The canonical_user_id, or None if unknown user/bad password
|
2016-07-15 05:34:23 -06:00
|
|
|
"""
|
2020-04-15 10:40:18 -06:00
|
|
|
lookupres = await self._find_user_id_and_pwd_hash(user_id)
|
2016-10-06 11:20:27 -06:00
|
|
|
if not lookupres:
|
2019-07-23 07:00:55 -06:00
|
|
|
return None
|
2016-10-06 11:20:27 -06:00
|
|
|
(user_id, password_hash) = lookupres
|
2019-07-15 04:45:29 -06:00
|
|
|
|
2020-04-15 10:40:18 -06:00
|
|
|
result = await self.validate_hash(password, password_hash)
|
2016-07-15 05:34:23 -06:00
|
|
|
if not result:
|
2019-10-31 04:23:24 -06:00
|
|
|
logger.warning("Failed password login for user %s", user_id)
|
2019-07-23 07:00:55 -06:00
|
|
|
return None
|
|
|
|
return user_id
|
2016-04-06 05:02:49 -06:00
|
|
|
|
2022-10-26 04:45:41 -06:00
|
|
|
def generate_login_token(self) -> str:
|
|
|
|
"""Generates an opaque string, for use as an short-term login token"""
|
|
|
|
|
|
|
|
# we use the following format for access tokens:
|
|
|
|
# syl_<random string>_<base62 crc check>
|
|
|
|
|
|
|
|
random_string = stringutils.random_string(20)
|
|
|
|
base = f"syl_{random_string}"
|
|
|
|
|
|
|
|
crc = base62_encode(crc32(base.encode("ascii")), minwidth=6)
|
|
|
|
return f"{base}_{crc}"
|
|
|
|
|
2021-05-12 08:04:51 -06:00
|
|
|
def generate_access_token(self, for_user: UserID) -> str:
|
|
|
|
"""Generates an opaque string, for use as an access token"""
|
|
|
|
|
|
|
|
# we use the following format for access tokens:
|
|
|
|
# syt_<base64 local part>_<random string>_<base62 crc check>
|
|
|
|
|
|
|
|
b64local = unpaddedbase64.encode_base64(for_user.localpart.encode("utf-8"))
|
|
|
|
random_string = stringutils.random_string(20)
|
|
|
|
base = f"syt_{b64local}_{random_string}"
|
|
|
|
|
|
|
|
crc = base62_encode(crc32(base.encode("ascii")), minwidth=6)
|
|
|
|
return f"{base}_{crc}"
|
|
|
|
|
2021-06-24 07:33:20 -06:00
|
|
|
def generate_refresh_token(self, for_user: UserID) -> str:
|
|
|
|
"""Generates an opaque string, for use as a refresh token"""
|
|
|
|
|
|
|
|
# we use the following format for refresh tokens:
|
|
|
|
# syr_<base64 local part>_<random string>_<base62 crc check>
|
|
|
|
|
|
|
|
b64local = unpaddedbase64.encode_base64(for_user.localpart.encode("utf-8"))
|
|
|
|
random_string = stringutils.random_string(20)
|
|
|
|
base = f"syr_{b64local}_{random_string}"
|
|
|
|
|
|
|
|
crc = base62_encode(crc32(base.encode("ascii")), minwidth=6)
|
|
|
|
return f"{base}_{crc}"
|
|
|
|
|
2022-10-26 04:45:41 -06:00
|
|
|
async def consume_login_token(self, login_token: str) -> LoginTokenLookupResult:
|
2015-11-11 04:12:35 -07:00
|
|
|
try:
|
2022-10-26 04:45:41 -06:00
|
|
|
return await self.store.consume_login_token(login_token)
|
|
|
|
except LoginTokenExpired:
|
|
|
|
invalid_login_token_counter.labels("expired").inc()
|
|
|
|
except LoginTokenReused:
|
|
|
|
invalid_login_token_counter.labels("reused").inc()
|
|
|
|
except NotFoundError:
|
|
|
|
invalid_login_token_counter.labels("not found").inc()
|
2019-11-05 10:39:16 -07:00
|
|
|
|
2022-10-26 04:45:41 -06:00
|
|
|
raise AuthError(403, "Invalid login token", errcode=Codes.FORBIDDEN)
|
2015-11-05 07:01:12 -07:00
|
|
|
|
2021-09-20 06:56:23 -06:00
|
|
|
async def delete_access_token(self, access_token: str) -> None:
|
2017-11-01 04:29:34 -06:00
|
|
|
"""Invalidate a single access token
|
|
|
|
|
|
|
|
Args:
|
2020-03-12 09:36:27 -06:00
|
|
|
access_token: access token to be deleted
|
2017-11-01 04:29:34 -06:00
|
|
|
|
|
|
|
"""
|
2022-08-22 07:17:59 -06:00
|
|
|
token = await self.store.get_user_by_access_token(access_token)
|
|
|
|
if not token:
|
|
|
|
# At this point, the token should already have been fetched once by
|
|
|
|
# the caller, so this should not happen, unless of a race condition
|
|
|
|
# between two delete requests
|
|
|
|
raise SynapseError(HTTPStatus.UNAUTHORIZED, "Unrecognised access token")
|
2020-04-15 10:40:18 -06:00
|
|
|
await self.store.delete_access_token(access_token)
|
2017-11-01 09:42:38 -06:00
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
# see if any modules want to know about this
|
|
|
|
await self.password_auth_provider.on_logged_out(
|
2022-08-22 07:17:59 -06:00
|
|
|
user_id=token.user_id,
|
|
|
|
device_id=token.device_id,
|
2021-10-13 05:21:52 -06:00
|
|
|
access_token=access_token,
|
|
|
|
)
|
2017-11-01 04:29:34 -06:00
|
|
|
|
2017-11-29 07:33:05 -07:00
|
|
|
# delete pushers associated with this access token
|
2023-03-24 09:09:39 -06:00
|
|
|
# XXX(quenting): This is only needed until the 'set_device_id_for_pushers'
|
|
|
|
# background update completes.
|
2022-08-22 07:17:59 -06:00
|
|
|
if token.token_id is not None:
|
2023-03-24 09:09:39 -06:00
|
|
|
await self.hs.get_pusherpool().remove_pushers_by_access_tokens(
|
2022-08-22 07:17:59 -06:00
|
|
|
token.user_id, (token.token_id,)
|
2017-11-29 07:33:05 -07:00
|
|
|
)
|
|
|
|
|
2020-04-15 10:40:18 -06:00
|
|
|
async def delete_access_tokens_for_user(
|
2020-03-12 09:36:27 -06:00
|
|
|
self,
|
|
|
|
user_id: str,
|
2021-03-10 11:15:56 -07:00
|
|
|
except_token_id: Optional[int] = None,
|
2020-03-12 09:36:27 -06:00
|
|
|
device_id: Optional[str] = None,
|
2021-09-20 06:56:23 -06:00
|
|
|
) -> None:
|
2017-11-01 04:29:34 -06:00
|
|
|
"""Invalidate access tokens belonging to a user
|
|
|
|
|
|
|
|
Args:
|
2020-03-12 09:36:27 -06:00
|
|
|
user_id: ID of user the tokens belong to
|
|
|
|
except_token_id: access_token ID which should *not* be deleted
|
|
|
|
device_id: ID of device the tokens are associated with.
|
2017-11-01 04:29:34 -06:00
|
|
|
If None, tokens associated with any device (or no device) will
|
|
|
|
be deleted
|
|
|
|
"""
|
2020-04-15 10:40:18 -06:00
|
|
|
tokens_and_devices = await self.store.user_delete_access_tokens(
|
2017-11-01 04:29:34 -06:00
|
|
|
user_id, except_token_id=except_token_id, device_id=device_id
|
|
|
|
)
|
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
# see if any modules want to know about this
|
|
|
|
for token, _, device_id in tokens_and_devices:
|
|
|
|
await self.password_auth_provider.on_logged_out(
|
|
|
|
user_id=user_id, device_id=device_id, access_token=token
|
|
|
|
)
|
2017-11-01 09:42:38 -06:00
|
|
|
|
2017-11-29 07:33:05 -07:00
|
|
|
# delete pushers associated with the access tokens
|
2023-03-24 09:09:39 -06:00
|
|
|
# XXX(quenting): This is only needed until the 'set_device_id_for_pushers'
|
|
|
|
# background update completes.
|
|
|
|
await self.hs.get_pusherpool().remove_pushers_by_access_tokens(
|
2017-11-29 07:33:05 -07:00
|
|
|
user_id, (token_id for _, token_id, _ in tokens_and_devices)
|
|
|
|
)
|
|
|
|
|
2020-04-15 10:40:18 -06:00
|
|
|
async def add_threepid(
|
|
|
|
self, user_id: str, medium: str, address: str, validated_at: int
|
2021-09-20 06:56:23 -06:00
|
|
|
) -> None:
|
2023-02-27 07:19:19 -07:00
|
|
|
"""
|
|
|
|
Adds an association between a user's Matrix ID and a third-party ID (email,
|
|
|
|
phone number).
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id: The ID of the user to associate.
|
|
|
|
medium: The medium of the third-party ID (email, msisdn).
|
|
|
|
address: The address of the third-party ID (i.e. an email address).
|
|
|
|
validated_at: The timestamp in ms of when the validation that the user owns
|
|
|
|
this third-party ID occurred.
|
|
|
|
"""
|
2020-02-07 03:29:36 -07:00
|
|
|
# check if medium has a valid value
|
|
|
|
if medium not in ["email", "msisdn"]:
|
|
|
|
raise SynapseError(
|
|
|
|
code=400,
|
|
|
|
msg=("'%s' is not a valid value for 'medium'" % (medium,)),
|
|
|
|
errcode=Codes.INVALID_PARAM,
|
|
|
|
)
|
|
|
|
|
2016-10-19 04:13:55 -06:00
|
|
|
# 'Canonicalise' email addresses down to lower case.
|
2019-11-12 06:08:12 -07:00
|
|
|
# We've now moving towards the homeserver being the entity that
|
2016-10-19 04:13:55 -06:00
|
|
|
# is responsible for validating threepids used for resetting passwords
|
|
|
|
# on accounts, so in future Synapse will gain knowledge of specific
|
|
|
|
# types (mediums) of threepid. For now, we still use the existing
|
|
|
|
# infrastructure, but this is the start of synapse gaining knowledge
|
|
|
|
# of specific types of threepid (and fixes the fact that checking
|
2016-12-21 02:44:03 -07:00
|
|
|
# for the presence of an email address during password reset was
|
2016-10-19 04:13:55 -06:00
|
|
|
# case sensitive).
|
|
|
|
if medium == "email":
|
2020-07-03 07:03:13 -06:00
|
|
|
address = canonicalise_email(address)
|
2016-10-19 04:13:55 -06:00
|
|
|
|
2020-04-15 10:40:18 -06:00
|
|
|
await self.store.user_add_threepid(
|
2015-08-12 08:49:37 -06:00
|
|
|
user_id, medium, address, validated_at, self.hs.get_clock().time_msec()
|
|
|
|
)
|
|
|
|
|
2023-02-27 07:19:19 -07:00
|
|
|
# Inform Synapse modules that a 3PID association has been created.
|
|
|
|
await self._third_party_rules.on_add_user_third_party_identifier(
|
|
|
|
user_id, medium, address
|
|
|
|
)
|
|
|
|
|
|
|
|
# Deprecated method for informing Synapse modules that a 3PID association
|
|
|
|
# has successfully been created.
|
2022-03-31 10:27:21 -06:00
|
|
|
await self._third_party_rules.on_threepid_bind(user_id, medium, address)
|
|
|
|
|
2023-02-27 07:19:19 -07:00
|
|
|
async def delete_local_threepid(
|
|
|
|
self, user_id: str, medium: str, address: str
|
|
|
|
) -> None:
|
|
|
|
"""Deletes an association between a third-party ID and a user ID from the local
|
|
|
|
database. This method does not unbind the association from any identity servers.
|
|
|
|
|
|
|
|
If `medium` is 'email' and a pusher is associated with this third-party ID, the
|
|
|
|
pusher will also be deleted.
|
2018-08-08 04:54:55 -06:00
|
|
|
|
|
|
|
Args:
|
2020-03-12 09:36:27 -06:00
|
|
|
user_id: ID of user to remove the 3pid from.
|
|
|
|
medium: The medium of the 3pid being removed: "email" or "msisdn".
|
|
|
|
address: The 3pid address to remove.
|
2018-08-08 04:54:55 -06:00
|
|
|
"""
|
2016-12-20 11:27:30 -07:00
|
|
|
# 'Canonicalise' email addresses as per above
|
|
|
|
if medium == "email":
|
2020-07-03 07:03:13 -06:00
|
|
|
address = canonicalise_email(address)
|
2016-12-20 11:27:30 -07:00
|
|
|
|
2023-02-27 07:19:19 -07:00
|
|
|
await self.store.user_delete_threepid(user_id, medium, address)
|
|
|
|
|
|
|
|
# Inform Synapse modules that a 3PID association has been deleted.
|
|
|
|
await self._third_party_rules.on_remove_user_third_party_identifier(
|
|
|
|
user_id, medium, address
|
2018-05-24 04:08:05 -06:00
|
|
|
)
|
|
|
|
|
2021-08-26 06:53:57 -06:00
|
|
|
if medium == "email":
|
|
|
|
await self.store.delete_pusher_by_app_id_pushkey_user_id(
|
|
|
|
app_id="m.email", pushkey=address, user_id=user_id
|
|
|
|
)
|
2016-12-20 11:27:30 -07:00
|
|
|
|
2020-04-15 10:40:18 -06:00
|
|
|
async def hash(self, password: str) -> str:
|
2015-08-26 08:59:32 -06:00
|
|
|
"""Computes a secure hash of password.
|
|
|
|
|
|
|
|
Args:
|
2020-03-12 09:36:27 -06:00
|
|
|
password: Password to hash.
|
2015-08-26 08:59:32 -06:00
|
|
|
|
|
|
|
Returns:
|
2020-04-15 10:40:18 -06:00
|
|
|
Hashed password.
|
2015-08-26 08:59:32 -06:00
|
|
|
"""
|
2019-06-20 03:32:02 -06:00
|
|
|
|
2021-09-20 06:56:23 -06:00
|
|
|
def _do_hash() -> str:
|
2018-08-01 08:54:06 -06:00
|
|
|
# Normalise the Unicode in the password
|
|
|
|
pw = unicodedata.normalize("NFKC", password)
|
|
|
|
|
|
|
|
return bcrypt.hashpw(
|
2021-09-23 05:13:34 -06:00
|
|
|
pw.encode("utf8") + self.hs.config.auth.password_pepper.encode("utf8"),
|
2018-08-01 08:54:06 -06:00
|
|
|
bcrypt.gensalt(self.bcrypt_rounds),
|
|
|
|
).decode("ascii")
|
2018-01-10 11:01:28 -07:00
|
|
|
|
2020-04-15 10:40:18 -06:00
|
|
|
return await defer_to_thread(self.hs.get_reactor(), _do_hash)
|
2015-08-26 08:59:32 -06:00
|
|
|
|
2020-04-15 10:40:18 -06:00
|
|
|
async def validate_hash(
|
|
|
|
self, password: str, stored_hash: Union[bytes, str]
|
|
|
|
) -> bool:
|
2015-08-26 08:59:32 -06:00
|
|
|
"""Validates that self.hash(password) == stored_hash.
|
|
|
|
|
|
|
|
Args:
|
2020-03-12 09:36:27 -06:00
|
|
|
password: Password to hash.
|
|
|
|
stored_hash: Expected hash value.
|
2015-08-26 08:59:32 -06:00
|
|
|
|
|
|
|
Returns:
|
2020-04-15 10:40:18 -06:00
|
|
|
Whether self.hash(password) == stored_hash.
|
2015-08-26 08:59:32 -06:00
|
|
|
"""
|
2019-06-20 03:32:02 -06:00
|
|
|
|
2021-09-20 06:56:23 -06:00
|
|
|
def _do_validate_hash(checked_hash: bytes) -> bool:
|
2018-08-01 08:54:06 -06:00
|
|
|
# Normalise the Unicode in the password
|
|
|
|
pw = unicodedata.normalize("NFKC", password)
|
|
|
|
|
2018-03-05 09:51:09 -07:00
|
|
|
return bcrypt.checkpw(
|
2021-09-23 05:13:34 -06:00
|
|
|
pw.encode("utf8") + self.hs.config.auth.password_pepper.encode("utf8"),
|
2020-10-19 11:32:24 -06:00
|
|
|
checked_hash,
|
2018-03-05 09:51:09 -07:00
|
|
|
)
|
2018-01-10 11:01:28 -07:00
|
|
|
|
|
|
|
if stored_hash:
|
2018-09-06 08:22:23 -06:00
|
|
|
if not isinstance(stored_hash, bytes):
|
|
|
|
stored_hash = stored_hash.encode("ascii")
|
|
|
|
|
2020-10-19 11:32:24 -06:00
|
|
|
return await defer_to_thread(
|
|
|
|
self.hs.get_reactor(), _do_validate_hash, stored_hash
|
|
|
|
)
|
2016-05-11 05:06:02 -06:00
|
|
|
else:
|
2020-04-15 10:40:18 -06:00
|
|
|
return False
|
2016-10-03 03:27:10 -06:00
|
|
|
|
2021-01-12 10:38:03 -07:00
|
|
|
async def start_sso_ui_auth(self, request: SynapseRequest, session_id: str) -> str:
|
2020-04-01 06:48:00 -06:00
|
|
|
"""
|
|
|
|
Get the HTML for the SSO redirect confirmation page.
|
|
|
|
|
|
|
|
Args:
|
2021-01-12 10:38:03 -07:00
|
|
|
request: The incoming HTTP request
|
2020-04-01 06:48:00 -06:00
|
|
|
session_id: The user interactive authentication session ID.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The HTML to render.
|
|
|
|
"""
|
2020-04-30 11:47:49 -06:00
|
|
|
try:
|
|
|
|
session = await self.store.get_ui_auth_session(session_id)
|
|
|
|
except StoreError:
|
|
|
|
raise SynapseError(400, "Unknown session ID: %s" % (session_id,))
|
2021-01-12 10:38:03 -07:00
|
|
|
|
2021-07-16 11:22:36 -06:00
|
|
|
user_id_to_verify: str = await self.get_session_data(
|
2021-01-12 10:38:03 -07:00
|
|
|
session_id, UIAuthSessionDataConstants.REQUEST_USER_ID
|
2021-07-16 11:22:36 -06:00
|
|
|
)
|
2021-01-12 10:38:03 -07:00
|
|
|
|
|
|
|
idps = await self.hs.get_sso_handler().get_identity_providers_for_user(
|
|
|
|
user_id_to_verify
|
|
|
|
)
|
|
|
|
|
|
|
|
if not idps:
|
|
|
|
# we checked that the user had some remote identities before offering an SSO
|
|
|
|
# flow, so either it's been deleted or the client has requested SSO despite
|
|
|
|
# it not being offered.
|
|
|
|
raise SynapseError(400, "User has no SSO identities")
|
|
|
|
|
|
|
|
# for now, just pick one
|
|
|
|
idp_id, sso_auth_provider = next(iter(idps.items()))
|
|
|
|
if len(idps) > 0:
|
|
|
|
logger.warning(
|
|
|
|
"User %r has previously logged in with multiple SSO IdPs; arbitrarily "
|
|
|
|
"picking %r",
|
|
|
|
user_id_to_verify,
|
|
|
|
idp_id,
|
|
|
|
)
|
|
|
|
|
|
|
|
redirect_url = await sso_auth_provider.handle_redirect_request(
|
|
|
|
request, None, session_id
|
|
|
|
)
|
|
|
|
|
2020-04-01 06:48:00 -06:00
|
|
|
return self._sso_auth_confirm_template.render(
|
2021-02-01 11:36:04 -07:00
|
|
|
description=session.description,
|
|
|
|
redirect_url=redirect_url,
|
|
|
|
idp=sso_auth_provider,
|
2020-04-01 06:48:00 -06:00
|
|
|
)
|
|
|
|
|
2020-04-09 11:28:13 -06:00
|
|
|
async def complete_sso_login(
|
2020-03-03 03:54:44 -07:00
|
|
|
self,
|
|
|
|
registered_user_id: str,
|
2021-03-04 07:44:22 -07:00
|
|
|
auth_provider_id: str,
|
2020-12-08 07:03:38 -07:00
|
|
|
request: Request,
|
2020-03-03 03:54:44 -07:00
|
|
|
client_redirect_url: str,
|
2020-09-30 11:02:43 -06:00
|
|
|
extra_attributes: Optional[JsonDict] = None,
|
2021-02-01 08:50:56 -07:00
|
|
|
new_user: bool = False,
|
2021-12-06 10:43:06 -07:00
|
|
|
auth_provider_session_id: Optional[str] = None,
|
2021-09-20 06:56:23 -06:00
|
|
|
) -> None:
|
2020-03-03 03:54:44 -07:00
|
|
|
"""Having figured out a mxid for this user, complete the HTTP request
|
|
|
|
|
|
|
|
Args:
|
|
|
|
registered_user_id: The registered user ID to complete SSO login for.
|
2021-03-04 07:44:22 -07:00
|
|
|
auth_provider_id: The id of the SSO Identity provider that was used for
|
|
|
|
login. This will be stored in the login token for future tracking in
|
|
|
|
prometheus metrics.
|
2020-03-03 03:54:44 -07:00
|
|
|
request: The request to complete.
|
|
|
|
client_redirect_url: The URL to which to redirect the user at the end of the
|
|
|
|
process.
|
2020-09-30 11:02:43 -06:00
|
|
|
extra_attributes: Extra attributes which will be passed to the client
|
|
|
|
during successful login. Must be JSON serializable.
|
2021-02-01 08:50:56 -07:00
|
|
|
new_user: True if we should use wording appropriate to a user who has just
|
|
|
|
registered.
|
2021-12-06 10:43:06 -07:00
|
|
|
auth_provider_session_id: The session ID from the SSO IdP received during login.
|
2020-03-03 03:54:44 -07:00
|
|
|
"""
|
2023-05-23 08:35:43 -06:00
|
|
|
# If the account has been deactivated, do not proceed with the login.
|
|
|
|
#
|
|
|
|
# This gets checked again when the token is submitted but this lets us
|
|
|
|
# provide an HTML error page to the user (instead of issuing a token and
|
|
|
|
# having it error later).
|
2020-04-09 11:28:13 -06:00
|
|
|
deactivated = await self.store.get_user_deactivated_status(registered_user_id)
|
|
|
|
if deactivated:
|
2020-07-01 07:10:23 -06:00
|
|
|
respond_with_html(request, 403, self._sso_account_deactivated_template)
|
2020-04-09 11:28:13 -06:00
|
|
|
return
|
|
|
|
|
2022-09-20 07:18:07 -06:00
|
|
|
user_profile_data = await self.store.get_profileinfo(
|
2023-06-02 18:24:13 -06:00
|
|
|
UserID.from_string(registered_user_id)
|
2021-02-01 08:50:56 -07:00
|
|
|
)
|
|
|
|
|
2020-09-30 11:02:43 -06:00
|
|
|
# Store any extra attributes which will be passed in the login response.
|
|
|
|
# Note that this is per-user so it may overwrite a previous value, this
|
|
|
|
# is considered OK since the newest SSO attributes should be most valid.
|
|
|
|
if extra_attributes:
|
|
|
|
self._extra_attributes[registered_user_id] = SsoLoginExtraAttributes(
|
|
|
|
self._clock.time_msec(),
|
|
|
|
extra_attributes,
|
|
|
|
)
|
|
|
|
|
2020-03-03 03:54:44 -07:00
|
|
|
# Create a login token
|
2022-10-26 04:45:41 -06:00
|
|
|
login_token = await self.create_login_token_for_user_id(
|
2021-12-06 10:43:06 -07:00
|
|
|
registered_user_id,
|
|
|
|
auth_provider_id=auth_provider_id,
|
|
|
|
auth_provider_session_id=auth_provider_session_id,
|
2020-03-03 03:54:44 -07:00
|
|
|
)
|
|
|
|
|
|
|
|
# Append the login token to the original redirect URL (i.e. with its query
|
|
|
|
# parameters kept intact) to build the URL to which the template needs to
|
|
|
|
# redirect the users once they have clicked on the confirmation link.
|
|
|
|
redirect_url = self.add_query_param_to_url(
|
|
|
|
client_redirect_url, "loginToken", login_token
|
|
|
|
)
|
|
|
|
|
2023-12-01 07:31:50 -07:00
|
|
|
# Run post-login module callback handlers
|
|
|
|
await self._account_validity_handler.on_user_login(
|
|
|
|
user_id=registered_user_id,
|
|
|
|
auth_provider_type=LoginType.SSO,
|
|
|
|
auth_provider_id=auth_provider_id,
|
|
|
|
)
|
|
|
|
|
2020-03-03 03:54:44 -07:00
|
|
|
# if the client is whitelisted, we can redirect straight to it
|
|
|
|
if client_redirect_url.startswith(self._whitelisted_sso_clients):
|
|
|
|
request.redirect(redirect_url)
|
|
|
|
finish_request(request)
|
|
|
|
return
|
|
|
|
|
|
|
|
# Otherwise, serve the redirect confirmation page.
|
|
|
|
|
|
|
|
# Remove the query parameters from the redirect URL to get a shorter version of
|
|
|
|
# it. This is only to display a human-readable URL in the template, but not the
|
|
|
|
# URL we redirect users to.
|
2021-02-03 13:31:23 -07:00
|
|
|
url_parts = urllib.parse.urlsplit(client_redirect_url)
|
|
|
|
|
|
|
|
if url_parts.scheme == "https":
|
|
|
|
# for an https uri, just show the netloc (ie, the hostname. Specifically,
|
|
|
|
# the bit between "//" and "/"; this includes any potential
|
|
|
|
# "username:password@" prefix.)
|
|
|
|
display_url = url_parts.netloc
|
|
|
|
else:
|
|
|
|
# for other uris, strip the query-params (including the login token) and
|
|
|
|
# fragment.
|
|
|
|
display_url = urllib.parse.urlunsplit(
|
|
|
|
(url_parts.scheme, url_parts.netloc, url_parts.path, "", "")
|
|
|
|
)
|
2020-03-03 03:54:44 -07:00
|
|
|
|
2020-07-01 07:10:23 -06:00
|
|
|
html = self._sso_redirect_confirm_template.render(
|
2021-02-03 13:31:23 -07:00
|
|
|
display_url=display_url,
|
2020-03-03 03:54:44 -07:00
|
|
|
redirect_url=redirect_url,
|
|
|
|
server_name=self._server_name,
|
2021-02-01 08:50:56 -07:00
|
|
|
new_user=new_user,
|
|
|
|
user_id=registered_user_id,
|
|
|
|
user_profile=user_profile_data,
|
2020-07-01 07:10:23 -06:00
|
|
|
)
|
|
|
|
respond_with_html(request, 200, html)
|
2020-03-03 03:54:44 -07:00
|
|
|
|
2021-06-24 07:33:20 -06:00
|
|
|
async def _sso_login_callback(self, login_result: "LoginResponse") -> None:
|
2020-09-30 11:02:43 -06:00
|
|
|
"""
|
|
|
|
A login callback which might add additional attributes to the login response.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
login_result: The data to be sent to the client. Includes the user
|
|
|
|
ID and access token.
|
|
|
|
"""
|
|
|
|
# Expire attributes before processing. Note that there shouldn't be any
|
|
|
|
# valid logins that still have extra attributes.
|
|
|
|
self._expire_sso_extra_attributes()
|
|
|
|
|
|
|
|
extra_attributes = self._extra_attributes.get(login_result["user_id"])
|
|
|
|
if extra_attributes:
|
2021-06-24 07:33:20 -06:00
|
|
|
login_result_dict = cast(Dict[str, Any], login_result)
|
|
|
|
login_result_dict.update(extra_attributes.extra_attributes)
|
2020-09-30 11:02:43 -06:00
|
|
|
|
|
|
|
def _expire_sso_extra_attributes(self) -> None:
|
|
|
|
"""
|
|
|
|
Iterate through the mapping of user IDs to extra attributes and remove any that are no longer valid.
|
|
|
|
"""
|
|
|
|
# TODO This should match the amount of time the macaroon is valid for.
|
|
|
|
LOGIN_TOKEN_EXPIRATION_TIME = 2 * 60 * 1000
|
|
|
|
expire_before = self._clock.time_msec() - LOGIN_TOKEN_EXPIRATION_TIME
|
|
|
|
to_expire = set()
|
|
|
|
for user_id, data in self._extra_attributes.items():
|
|
|
|
if data.creation_time < expire_before:
|
|
|
|
to_expire.add(user_id)
|
|
|
|
for user_id in to_expire:
|
|
|
|
logger.debug("Expiring extra attributes for user %s", user_id)
|
|
|
|
del self._extra_attributes[user_id]
|
|
|
|
|
2020-03-03 03:54:44 -07:00
|
|
|
@staticmethod
|
2021-09-20 06:56:23 -06:00
|
|
|
def add_query_param_to_url(url: str, param_name: str, param: Any) -> str:
|
2020-03-03 03:54:44 -07:00
|
|
|
url_parts = list(urllib.parse.urlparse(url))
|
2021-01-18 07:52:49 -07:00
|
|
|
query = urllib.parse.parse_qsl(url_parts[4], keep_blank_values=True)
|
|
|
|
query.append((param_name, param))
|
2020-03-03 03:54:44 -07:00
|
|
|
url_parts[4] = urllib.parse.urlencode(query)
|
|
|
|
return urllib.parse.urlunparse(url_parts)
|
|
|
|
|
2016-10-03 03:27:10 -06:00
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
def load_legacy_password_auth_providers(hs: "HomeServer") -> None:
|
|
|
|
module_api = hs.get_module_api()
|
|
|
|
for module, config in hs.config.authproviders.password_providers:
|
|
|
|
load_single_legacy_password_auth_provider(
|
|
|
|
module=module, config=config, api=module_api
|
|
|
|
)
|
2020-12-02 03:38:50 -07:00
|
|
|
|
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
def load_single_legacy_password_auth_provider(
|
2021-10-25 10:45:19 -06:00
|
|
|
module: Type,
|
|
|
|
config: JsonDict,
|
|
|
|
api: "ModuleApi",
|
2021-10-13 05:21:52 -06:00
|
|
|
) -> None:
|
|
|
|
try:
|
|
|
|
provider = module(config=config, account_handler=api)
|
|
|
|
except Exception as e:
|
|
|
|
logger.error("Error while initializing %r: %s", module, e)
|
|
|
|
raise
|
|
|
|
|
|
|
|
# All methods that the module provides should be async, but this wasn't enforced
|
|
|
|
# in the old module system, so we wrap them if needed
|
|
|
|
def async_wrapper(f: Optional[Callable]) -> Optional[Callable[..., Awaitable]]:
|
|
|
|
# f might be None if the callback isn't implemented by the module. In this
|
|
|
|
# case we don't want to register a callback at all so we return None.
|
|
|
|
if f is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# We need to wrap check_password because its old form would return a boolean
|
|
|
|
# but we now want it to behave just like check_auth() and return the matrix id of
|
|
|
|
# the user if authentication succeeded or None otherwise
|
|
|
|
if f.__name__ == "check_password":
|
|
|
|
|
|
|
|
async def wrapped_check_password(
|
|
|
|
username: str, login_type: str, login_dict: JsonDict
|
|
|
|
) -> Optional[Tuple[str, Optional[Callable]]]:
|
|
|
|
# We've already made sure f is not None above, but mypy doesn't do well
|
|
|
|
# across function boundaries so we need to tell it f is definitely not
|
|
|
|
# None.
|
|
|
|
assert f is not None
|
|
|
|
|
|
|
|
matrix_user_id = api.get_qualified_user_id(username)
|
|
|
|
password = login_dict["password"]
|
|
|
|
|
|
|
|
is_valid = await f(matrix_user_id, password)
|
|
|
|
|
|
|
|
if is_valid:
|
|
|
|
return matrix_user_id, None
|
|
|
|
|
|
|
|
return None
|
2020-12-02 03:38:50 -07:00
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
return wrapped_check_password
|
|
|
|
|
|
|
|
# We need to wrap check_auth as in the old form it could return
|
|
|
|
# just a str, but now it must return Optional[Tuple[str, Optional[Callable]]
|
|
|
|
if f.__name__ == "check_auth":
|
|
|
|
|
|
|
|
async def wrapped_check_auth(
|
|
|
|
username: str, login_type: str, login_dict: JsonDict
|
|
|
|
) -> Optional[Tuple[str, Optional[Callable]]]:
|
|
|
|
# We've already made sure f is not None above, but mypy doesn't do well
|
|
|
|
# across function boundaries so we need to tell it f is definitely not
|
|
|
|
# None.
|
|
|
|
assert f is not None
|
|
|
|
|
|
|
|
result = await f(username, login_type, login_dict)
|
|
|
|
|
|
|
|
if isinstance(result, str):
|
|
|
|
return result, None
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
return wrapped_check_auth
|
|
|
|
|
|
|
|
# We need to wrap check_3pid_auth as in the old form it could return
|
|
|
|
# just a str, but now it must return Optional[Tuple[str, Optional[Callable]]
|
|
|
|
if f.__name__ == "check_3pid_auth":
|
|
|
|
|
|
|
|
async def wrapped_check_3pid_auth(
|
|
|
|
medium: str, address: str, password: str
|
|
|
|
) -> Optional[Tuple[str, Optional[Callable]]]:
|
|
|
|
# We've already made sure f is not None above, but mypy doesn't do well
|
|
|
|
# across function boundaries so we need to tell it f is definitely not
|
|
|
|
# None.
|
|
|
|
assert f is not None
|
|
|
|
|
|
|
|
result = await f(medium, address, password)
|
|
|
|
|
|
|
|
if isinstance(result, str):
|
|
|
|
return result, None
|
|
|
|
|
|
|
|
return result
|
2020-12-02 03:38:50 -07:00
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
return wrapped_check_3pid_auth
|
2020-12-02 03:38:50 -07:00
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
def run(*args: Tuple, **kwargs: Dict) -> Awaitable:
|
|
|
|
# mypy doesn't do well across function boundaries so we need to tell it
|
|
|
|
# f is definitely not None.
|
|
|
|
assert f is not None
|
2020-12-02 03:38:50 -07:00
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
return maybe_awaitable(f(*args, **kwargs))
|
2020-12-02 03:38:50 -07:00
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
return run
|
|
|
|
|
2021-11-16 05:53:31 -07:00
|
|
|
# If the module has these methods implemented, then we pull them out
|
|
|
|
# and register them as hooks.
|
|
|
|
check_3pid_auth_hook: Optional[CHECK_3PID_AUTH_CALLBACK] = async_wrapper(
|
|
|
|
getattr(provider, "check_3pid_auth", None)
|
|
|
|
)
|
|
|
|
on_logged_out_hook: Optional[ON_LOGGED_OUT_CALLBACK] = async_wrapper(
|
|
|
|
getattr(provider, "on_logged_out", None)
|
|
|
|
)
|
2021-10-13 05:21:52 -06:00
|
|
|
|
|
|
|
supported_login_types = {}
|
|
|
|
# call get_supported_login_types and add that to the dict
|
|
|
|
g = getattr(provider, "get_supported_login_types", None)
|
|
|
|
if g is not None:
|
|
|
|
# Note the old module style also called get_supported_login_types at loading time
|
|
|
|
# and it is synchronous
|
|
|
|
supported_login_types.update(g())
|
|
|
|
|
|
|
|
auth_checkers = {}
|
|
|
|
# Legacy modules have a check_auth method which expects to be called with one of
|
|
|
|
# the keys returned by get_supported_login_types. New style modules register a
|
|
|
|
# dictionary of login_type->check_auth_method mappings
|
|
|
|
check_auth = async_wrapper(getattr(provider, "check_auth", None))
|
|
|
|
if check_auth is not None:
|
|
|
|
for login_type, fields in supported_login_types.items():
|
|
|
|
# need tuple(fields) since fields can be any Iterable type (so may not be hashable)
|
|
|
|
auth_checkers[(login_type, tuple(fields))] = check_auth
|
|
|
|
|
|
|
|
# if it has a "check_password" method then it should handle all auth checks
|
|
|
|
# with login type of LoginType.PASSWORD
|
|
|
|
check_password = async_wrapper(getattr(provider, "check_password", None))
|
|
|
|
if check_password is not None:
|
|
|
|
# need to use a tuple here for ("password",) not a list since lists aren't hashable
|
|
|
|
auth_checkers[(LoginType.PASSWORD, ("password",))] = check_password
|
|
|
|
|
2021-11-16 05:53:31 -07:00
|
|
|
api.register_password_auth_provider_callbacks(
|
|
|
|
check_3pid_auth=check_3pid_auth_hook,
|
|
|
|
on_logged_out=on_logged_out_hook,
|
|
|
|
auth_checkers=auth_checkers,
|
|
|
|
)
|
2021-10-13 05:21:52 -06:00
|
|
|
|
|
|
|
|
|
|
|
CHECK_3PID_AUTH_CALLBACK = Callable[
|
|
|
|
[str, str, str],
|
|
|
|
Awaitable[
|
|
|
|
Optional[Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]]
|
|
|
|
],
|
|
|
|
]
|
|
|
|
ON_LOGGED_OUT_CALLBACK = Callable[[str, Optional[str], str], Awaitable]
|
|
|
|
CHECK_AUTH_CALLBACK = Callable[
|
|
|
|
[str, str, JsonDict],
|
|
|
|
Awaitable[
|
|
|
|
Optional[Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]]
|
|
|
|
],
|
|
|
|
]
|
2022-01-26 07:21:13 -07:00
|
|
|
GET_USERNAME_FOR_REGISTRATION_CALLBACK = Callable[
|
|
|
|
[JsonDict, JsonDict],
|
|
|
|
Awaitable[Optional[str]],
|
|
|
|
]
|
2022-02-17 09:54:16 -07:00
|
|
|
GET_DISPLAYNAME_FOR_REGISTRATION_CALLBACK = Callable[
|
|
|
|
[JsonDict, JsonDict],
|
|
|
|
Awaitable[Optional[str]],
|
|
|
|
]
|
2022-02-08 03:20:32 -07:00
|
|
|
IS_3PID_ALLOWED_CALLBACK = Callable[[str, str, bool], Awaitable[bool]]
|
2021-10-13 05:21:52 -06:00
|
|
|
|
|
|
|
|
|
|
|
class PasswordAuthProvider:
|
|
|
|
"""
|
|
|
|
A class that the AuthHandler calls when authenticating users
|
|
|
|
It allows modules to provide alternative methods for authentication
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self) -> None:
|
|
|
|
# lists of callbacks
|
|
|
|
self.check_3pid_auth_callbacks: List[CHECK_3PID_AUTH_CALLBACK] = []
|
|
|
|
self.on_logged_out_callbacks: List[ON_LOGGED_OUT_CALLBACK] = []
|
2022-01-26 07:21:13 -07:00
|
|
|
self.get_username_for_registration_callbacks: List[
|
|
|
|
GET_USERNAME_FOR_REGISTRATION_CALLBACK
|
|
|
|
] = []
|
2022-02-17 09:54:16 -07:00
|
|
|
self.get_displayname_for_registration_callbacks: List[
|
|
|
|
GET_DISPLAYNAME_FOR_REGISTRATION_CALLBACK
|
|
|
|
] = []
|
2022-02-08 03:20:32 -07:00
|
|
|
self.is_3pid_allowed_callbacks: List[IS_3PID_ALLOWED_CALLBACK] = []
|
2021-10-13 05:21:52 -06:00
|
|
|
|
|
|
|
# Mapping from login type to login parameters
|
2022-12-16 04:53:01 -07:00
|
|
|
self._supported_login_types: Dict[str, Tuple[str, ...]] = {}
|
2021-10-13 05:21:52 -06:00
|
|
|
|
|
|
|
# Mapping from login type to auth checker callbacks
|
|
|
|
self.auth_checker_callbacks: Dict[str, List[CHECK_AUTH_CALLBACK]] = {}
|
|
|
|
|
|
|
|
def register_password_auth_provider_callbacks(
|
|
|
|
self,
|
|
|
|
check_3pid_auth: Optional[CHECK_3PID_AUTH_CALLBACK] = None,
|
|
|
|
on_logged_out: Optional[ON_LOGGED_OUT_CALLBACK] = None,
|
2022-02-08 03:20:32 -07:00
|
|
|
is_3pid_allowed: Optional[IS_3PID_ALLOWED_CALLBACK] = None,
|
2021-11-04 11:10:11 -06:00
|
|
|
auth_checkers: Optional[
|
|
|
|
Dict[Tuple[str, Tuple[str, ...]], CHECK_AUTH_CALLBACK]
|
|
|
|
] = None,
|
2022-01-26 07:21:13 -07:00
|
|
|
get_username_for_registration: Optional[
|
|
|
|
GET_USERNAME_FOR_REGISTRATION_CALLBACK
|
|
|
|
] = None,
|
2022-02-17 09:54:16 -07:00
|
|
|
get_displayname_for_registration: Optional[
|
|
|
|
GET_DISPLAYNAME_FOR_REGISTRATION_CALLBACK
|
|
|
|
] = None,
|
2021-10-13 05:21:52 -06:00
|
|
|
) -> None:
|
|
|
|
# Register check_3pid_auth callback
|
|
|
|
if check_3pid_auth is not None:
|
|
|
|
self.check_3pid_auth_callbacks.append(check_3pid_auth)
|
|
|
|
|
|
|
|
# register on_logged_out callback
|
|
|
|
if on_logged_out is not None:
|
|
|
|
self.on_logged_out_callbacks.append(on_logged_out)
|
|
|
|
|
|
|
|
if auth_checkers is not None:
|
|
|
|
# register a new supported login_type
|
|
|
|
# Iterate through all of the types being registered
|
|
|
|
for (login_type, fields), callback in auth_checkers.items():
|
|
|
|
# Note: fields may be empty here. This would allow a modules auth checker to
|
|
|
|
# be called with just 'login_type' and no password or other secrets
|
|
|
|
|
|
|
|
# Need to check that all the field names are strings or may get nasty errors later
|
|
|
|
for f in fields:
|
|
|
|
if not isinstance(f, str):
|
|
|
|
raise RuntimeError(
|
|
|
|
"A module tried to register support for login type: %s with parameters %s"
|
|
|
|
" but all parameter names must be strings"
|
|
|
|
% (login_type, fields)
|
|
|
|
)
|
|
|
|
|
|
|
|
# 2 modules supporting the same login type must expect the same fields
|
|
|
|
# e.g. 1 can't expect "pass" if the other expects "password"
|
|
|
|
# so throw an exception if that happens
|
|
|
|
if login_type not in self._supported_login_types.get(login_type, []):
|
|
|
|
self._supported_login_types[login_type] = fields
|
|
|
|
else:
|
|
|
|
fields_currently_supported = self._supported_login_types.get(
|
|
|
|
login_type
|
|
|
|
)
|
|
|
|
if fields_currently_supported != fields:
|
|
|
|
raise RuntimeError(
|
|
|
|
"A module tried to register support for login type: %s with parameters %s"
|
|
|
|
" but another module had already registered support for that type with parameters %s"
|
|
|
|
% (login_type, fields, fields_currently_supported)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Add the new method to the list of auth_checker_callbacks for this login type
|
|
|
|
self.auth_checker_callbacks.setdefault(login_type, []).append(callback)
|
2020-12-02 03:38:50 -07:00
|
|
|
|
2022-01-26 07:21:13 -07:00
|
|
|
if get_username_for_registration is not None:
|
|
|
|
self.get_username_for_registration_callbacks.append(
|
|
|
|
get_username_for_registration,
|
|
|
|
)
|
|
|
|
|
2022-02-17 09:54:16 -07:00
|
|
|
if get_displayname_for_registration is not None:
|
|
|
|
self.get_displayname_for_registration_callbacks.append(
|
|
|
|
get_displayname_for_registration,
|
|
|
|
)
|
|
|
|
|
2022-02-08 03:20:32 -07:00
|
|
|
if is_3pid_allowed is not None:
|
|
|
|
self.is_3pid_allowed_callbacks.append(is_3pid_allowed)
|
|
|
|
|
2020-12-02 03:38:50 -07:00
|
|
|
def get_supported_login_types(self) -> Mapping[str, Iterable[str]]:
|
|
|
|
"""Get the login types supported by this password provider
|
|
|
|
|
|
|
|
Returns a map from a login type identifier (such as m.login.password) to an
|
|
|
|
iterable giving the fields which must be provided by the user in the submission
|
|
|
|
to the /login API.
|
|
|
|
"""
|
2021-10-13 05:21:52 -06:00
|
|
|
|
2020-12-02 03:38:50 -07:00
|
|
|
return self._supported_login_types
|
|
|
|
|
|
|
|
async def check_auth(
|
|
|
|
self, username: str, login_type: str, login_dict: JsonDict
|
2021-10-13 05:21:52 -06:00
|
|
|
) -> Optional[Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]]:
|
2020-12-02 03:38:50 -07:00
|
|
|
"""Check if the user has presented valid login credentials
|
|
|
|
|
|
|
|
Args:
|
|
|
|
username: user id presented by the client. Either an MXID or an unqualified
|
|
|
|
username.
|
|
|
|
|
|
|
|
login_type: the login type being attempted - one of the types returned by
|
|
|
|
get_supported_login_types()
|
|
|
|
|
|
|
|
login_dict: the dictionary of login secrets passed by the client.
|
|
|
|
|
|
|
|
Returns: (user_id, callback) where `user_id` is the fully-qualified mxid of the
|
|
|
|
user, and `callback` is an optional callback which will be called with the
|
|
|
|
result from the /login call (including access_token, device_id, etc.)
|
|
|
|
"""
|
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
# Go through all callbacks for the login type until one returns with a value
|
|
|
|
# other than None (i.e. until a callback returns a success)
|
|
|
|
for callback in self.auth_checker_callbacks[login_type]:
|
|
|
|
try:
|
2022-05-09 05:31:14 -06:00
|
|
|
result = await delay_cancellation(
|
|
|
|
callback(username, login_type, login_dict)
|
|
|
|
)
|
|
|
|
except CancelledError:
|
|
|
|
raise
|
2021-10-13 05:21:52 -06:00
|
|
|
except Exception as e:
|
|
|
|
logger.warning("Failed to run module API callback %s: %s", callback, e)
|
|
|
|
continue
|
2020-12-02 03:38:50 -07:00
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
if result is not None:
|
|
|
|
# Check that the callback returned a Tuple[str, Optional[Callable]]
|
|
|
|
# "type: ignore[unreachable]" is used after some isinstance checks because mypy thinks
|
|
|
|
# result is always the right type, but as it is 3rd party code it might not be
|
|
|
|
|
|
|
|
if not isinstance(result, tuple) or len(result) != 2:
|
|
|
|
logger.warning(
|
|
|
|
"Wrong type returned by module API callback %s: %s, expected"
|
|
|
|
" Optional[Tuple[str, Optional[Callable]]]",
|
|
|
|
callback,
|
|
|
|
result,
|
|
|
|
)
|
|
|
|
continue
|
2020-12-02 03:38:50 -07:00
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
# pull out the two parts of the tuple so we can do type checking
|
|
|
|
str_result, callback_result = result
|
|
|
|
|
|
|
|
# the 1st item in the tuple should be a str
|
|
|
|
if not isinstance(str_result, str):
|
|
|
|
logger.warning( # type: ignore[unreachable]
|
|
|
|
"Wrong type returned by module API callback %s: %s, expected"
|
|
|
|
" Optional[Tuple[str, Optional[Callable]]]",
|
|
|
|
callback,
|
|
|
|
result,
|
|
|
|
)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# the second should be Optional[Callable]
|
|
|
|
if callback_result is not None:
|
|
|
|
if not callable(callback_result):
|
|
|
|
logger.warning( # type: ignore[unreachable]
|
|
|
|
"Wrong type returned by module API callback %s: %s, expected"
|
|
|
|
" Optional[Tuple[str, Optional[Callable]]]",
|
|
|
|
callback,
|
|
|
|
result,
|
|
|
|
)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# The result is a (str, Optional[callback]) tuple so return the successful result
|
|
|
|
return result
|
|
|
|
|
|
|
|
# If this point has been reached then none of the callbacks successfully authenticated
|
|
|
|
# the user so return None
|
|
|
|
return None
|
2020-12-02 03:38:50 -07:00
|
|
|
|
|
|
|
async def check_3pid_auth(
|
|
|
|
self, medium: str, address: str, password: str
|
2021-10-13 05:21:52 -06:00
|
|
|
) -> Optional[Tuple[str, Optional[Callable[["LoginResponse"], Awaitable[None]]]]]:
|
2020-12-02 03:38:50 -07:00
|
|
|
# This function is able to return a deferred that either
|
|
|
|
# resolves None, meaning authentication failure, or upon
|
|
|
|
# success, to a str (which is the user_id) or a tuple of
|
|
|
|
# (user_id, callback_func), where callback_func should be run
|
|
|
|
# after we've finished everything else
|
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
for callback in self.check_3pid_auth_callbacks:
|
|
|
|
try:
|
2022-05-09 05:31:14 -06:00
|
|
|
result = await delay_cancellation(callback(medium, address, password))
|
|
|
|
except CancelledError:
|
|
|
|
raise
|
2021-10-13 05:21:52 -06:00
|
|
|
except Exception as e:
|
|
|
|
logger.warning("Failed to run module API callback %s: %s", callback, e)
|
|
|
|
continue
|
2020-12-02 03:38:50 -07:00
|
|
|
|
2021-10-13 05:21:52 -06:00
|
|
|
if result is not None:
|
|
|
|
# Check that the callback returned a Tuple[str, Optional[Callable]]
|
|
|
|
# "type: ignore[unreachable]" is used after some isinstance checks because mypy thinks
|
|
|
|
# result is always the right type, but as it is 3rd party code it might not be
|
|
|
|
|
|
|
|
if not isinstance(result, tuple) or len(result) != 2:
|
|
|
|
logger.warning(
|
|
|
|
"Wrong type returned by module API callback %s: %s, expected"
|
|
|
|
" Optional[Tuple[str, Optional[Callable]]]",
|
|
|
|
callback,
|
|
|
|
result,
|
|
|
|
)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# pull out the two parts of the tuple so we can do type checking
|
|
|
|
str_result, callback_result = result
|
|
|
|
|
|
|
|
# the 1st item in the tuple should be a str
|
|
|
|
if not isinstance(str_result, str):
|
|
|
|
logger.warning( # type: ignore[unreachable]
|
|
|
|
"Wrong type returned by module API callback %s: %s, expected"
|
|
|
|
" Optional[Tuple[str, Optional[Callable]]]",
|
|
|
|
callback,
|
|
|
|
result,
|
|
|
|
)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# the second should be Optional[Callable]
|
|
|
|
if callback_result is not None:
|
|
|
|
if not callable(callback_result):
|
|
|
|
logger.warning( # type: ignore[unreachable]
|
|
|
|
"Wrong type returned by module API callback %s: %s, expected"
|
|
|
|
" Optional[Tuple[str, Optional[Callable]]]",
|
|
|
|
callback,
|
|
|
|
result,
|
|
|
|
)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# The result is a (str, Optional[callback]) tuple so return the successful result
|
|
|
|
return result
|
|
|
|
|
|
|
|
# If this point has been reached then none of the callbacks successfully authenticated
|
|
|
|
# the user so return None
|
|
|
|
return None
|
2020-12-02 03:38:50 -07:00
|
|
|
|
|
|
|
async def on_logged_out(
|
|
|
|
self, user_id: str, device_id: Optional[str], access_token: str
|
|
|
|
) -> None:
|
2021-10-13 05:21:52 -06:00
|
|
|
# call all of the on_logged_out callbacks
|
|
|
|
for callback in self.on_logged_out_callbacks:
|
|
|
|
try:
|
2022-01-20 11:19:40 -07:00
|
|
|
await callback(user_id, device_id, access_token)
|
2021-10-13 05:21:52 -06:00
|
|
|
except Exception as e:
|
|
|
|
logger.warning("Failed to run module API callback %s: %s", callback, e)
|
|
|
|
continue
|
2022-01-26 07:21:13 -07:00
|
|
|
|
|
|
|
async def get_username_for_registration(
|
|
|
|
self,
|
|
|
|
uia_results: JsonDict,
|
|
|
|
params: JsonDict,
|
|
|
|
) -> Optional[str]:
|
|
|
|
"""Defines the username to use when registering the user, using the credentials
|
|
|
|
and parameters provided during the UIA flow.
|
|
|
|
|
|
|
|
Stops at the first callback that returns a string.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
uia_results: The credentials provided during the UIA flow.
|
|
|
|
params: The parameters provided by the registration request.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The localpart to use when registering this user, or None if no module
|
|
|
|
returned a localpart.
|
|
|
|
"""
|
|
|
|
for callback in self.get_username_for_registration_callbacks:
|
|
|
|
try:
|
2022-05-09 05:31:14 -06:00
|
|
|
res = await delay_cancellation(callback(uia_results, params))
|
2022-01-26 07:21:13 -07:00
|
|
|
|
|
|
|
if isinstance(res, str):
|
|
|
|
return res
|
|
|
|
elif res is not None:
|
|
|
|
# mypy complains that this line is unreachable because it assumes the
|
|
|
|
# data returned by the module fits the expected type. We just want
|
|
|
|
# to make sure this is the case.
|
|
|
|
logger.warning( # type: ignore[unreachable]
|
|
|
|
"Ignoring non-string value returned by"
|
|
|
|
" get_username_for_registration callback %s: %s",
|
|
|
|
callback,
|
|
|
|
res,
|
|
|
|
)
|
2022-05-09 05:31:14 -06:00
|
|
|
except CancelledError:
|
|
|
|
raise
|
2022-01-26 07:21:13 -07:00
|
|
|
except Exception as e:
|
|
|
|
logger.error(
|
|
|
|
"Module raised an exception in get_username_for_registration: %s",
|
|
|
|
e,
|
|
|
|
)
|
|
|
|
raise SynapseError(code=500, msg="Internal Server Error")
|
|
|
|
|
|
|
|
return None
|
2022-02-08 03:20:32 -07:00
|
|
|
|
2022-02-17 09:54:16 -07:00
|
|
|
async def get_displayname_for_registration(
|
|
|
|
self,
|
|
|
|
uia_results: JsonDict,
|
|
|
|
params: JsonDict,
|
|
|
|
) -> Optional[str]:
|
|
|
|
"""Defines the display name to use when registering the user, using the
|
|
|
|
credentials and parameters provided during the UIA flow.
|
|
|
|
|
|
|
|
Stops at the first callback that returns a tuple containing at least one string.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
uia_results: The credentials provided during the UIA flow.
|
|
|
|
params: The parameters provided by the registration request.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A tuple which first element is the display name, and the second is an MXC URL
|
|
|
|
to the user's avatar.
|
|
|
|
"""
|
|
|
|
for callback in self.get_displayname_for_registration_callbacks:
|
|
|
|
try:
|
2022-05-09 05:31:14 -06:00
|
|
|
res = await delay_cancellation(callback(uia_results, params))
|
2022-02-17 09:54:16 -07:00
|
|
|
|
|
|
|
if isinstance(res, str):
|
|
|
|
return res
|
|
|
|
elif res is not None:
|
|
|
|
# mypy complains that this line is unreachable because it assumes the
|
|
|
|
# data returned by the module fits the expected type. We just want
|
|
|
|
# to make sure this is the case.
|
|
|
|
logger.warning( # type: ignore[unreachable]
|
|
|
|
"Ignoring non-string value returned by"
|
|
|
|
" get_displayname_for_registration callback %s: %s",
|
|
|
|
callback,
|
|
|
|
res,
|
|
|
|
)
|
2022-05-09 05:31:14 -06:00
|
|
|
except CancelledError:
|
|
|
|
raise
|
2022-02-17 09:54:16 -07:00
|
|
|
except Exception as e:
|
|
|
|
logger.error(
|
|
|
|
"Module raised an exception in get_displayname_for_registration: %s",
|
|
|
|
e,
|
|
|
|
)
|
|
|
|
raise SynapseError(code=500, msg="Internal Server Error")
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2022-02-08 03:20:32 -07:00
|
|
|
async def is_3pid_allowed(
|
|
|
|
self,
|
|
|
|
medium: str,
|
|
|
|
address: str,
|
|
|
|
registration: bool,
|
|
|
|
) -> bool:
|
|
|
|
"""Check if the user can be allowed to bind a 3PID on this homeserver.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
medium: The medium of the 3PID.
|
|
|
|
address: The address of the 3PID.
|
|
|
|
registration: Whether the 3PID is being bound when registering a new user.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Whether the 3PID is allowed to be bound on this homeserver
|
|
|
|
"""
|
|
|
|
for callback in self.is_3pid_allowed_callbacks:
|
|
|
|
try:
|
2022-05-09 05:31:14 -06:00
|
|
|
res = await delay_cancellation(callback(medium, address, registration))
|
2022-02-08 03:20:32 -07:00
|
|
|
|
|
|
|
if res is False:
|
|
|
|
return res
|
|
|
|
elif not isinstance(res, bool):
|
|
|
|
# mypy complains that this line is unreachable because it assumes the
|
|
|
|
# data returned by the module fits the expected type. We just want
|
|
|
|
# to make sure this is the case.
|
|
|
|
logger.warning( # type: ignore[unreachable]
|
|
|
|
"Ignoring non-string value returned by"
|
|
|
|
" is_3pid_allowed callback %s: %s",
|
|
|
|
callback,
|
|
|
|
res,
|
|
|
|
)
|
2022-05-09 05:31:14 -06:00
|
|
|
except CancelledError:
|
|
|
|
raise
|
2022-02-08 03:20:32 -07:00
|
|
|
except Exception as e:
|
|
|
|
logger.error("Module raised an exception in is_3pid_allowed: %s", e)
|
|
|
|
raise SynapseError(code=500, msg="Internal Server Error")
|
|
|
|
|
|
|
|
return True
|