Merge branch 'develop' into auth
This commit is contained in:
commit
ce2a7ed6e4
74
CHANGES.rst
74
CHANGES.rst
|
@ -1,3 +1,77 @@
|
||||||
|
Changes in synapse v0.10.0-rc1 (2015-08-20)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
Also see v0.9.4-rc1 changelog, which has been amalgamated into this release.
|
||||||
|
|
||||||
|
General:
|
||||||
|
|
||||||
|
* Upgrade to Twisted 15 (PR #173)
|
||||||
|
* Add support for serving and fetching encryption keys over federation.
|
||||||
|
(PR #208)
|
||||||
|
* Add support for logging in with email address (PR #234)
|
||||||
|
* Add support for new ``m.room.canonical_alias`` event. (PR #233)
|
||||||
|
* Error if a user tries to register with an email already in use. (PR #211)
|
||||||
|
* Add extra and improve existing caches (PR #212, #219, #226, #228)
|
||||||
|
* Batch various storage request (PR #226, #228)
|
||||||
|
* Fix bug where we didn't correctly log the entity that triggered the request
|
||||||
|
if the request came in via an application service (PR #230)
|
||||||
|
* Fix bug where we needlessly regenerated the full list of rooms an AS is
|
||||||
|
interested in. (PR #232)
|
||||||
|
* Add support for AS's to use v2_alpha registration API (PR #210)
|
||||||
|
|
||||||
|
|
||||||
|
Configuration:
|
||||||
|
|
||||||
|
* Add ``--generate-keys`` that will generate any missing cert and key files in
|
||||||
|
the configuration files. This is equivalent to running ``--generate-config``
|
||||||
|
on an existing configuration file. (PR #220)
|
||||||
|
* ``--generate-config`` now no longer requires a ``--server-name`` parameter
|
||||||
|
when used on existing configuration files. (PR #220)
|
||||||
|
* Add ``--print-pidfile`` flag that controls the printing of the pid to stdout
|
||||||
|
of the demonised process. (PR #213)
|
||||||
|
|
||||||
|
Media Repository:
|
||||||
|
|
||||||
|
* Fix bug where we picked a lower resolution image than requested. (PR #205)
|
||||||
|
* Add support for specifying if a the media repository should dynamically
|
||||||
|
thumbnail images or not. (PR #206)
|
||||||
|
|
||||||
|
Metrics:
|
||||||
|
|
||||||
|
* Add statistics from the reactor to the metrics API. (PR #224, #225)
|
||||||
|
|
||||||
|
Demo Homeservers:
|
||||||
|
|
||||||
|
* Fix starting the demo homeservers without rate-limiting enabled. (PR #182)
|
||||||
|
* Fix enabling registration on demo homeservers (PR #223)
|
||||||
|
|
||||||
|
|
||||||
|
Changes in synapse v0.9.4-rc1 (2015-07-21)
|
||||||
|
==========================================
|
||||||
|
|
||||||
|
General:
|
||||||
|
|
||||||
|
* Add basic implementation of receipts. (SPEC-99)
|
||||||
|
* Add support for configuration presets in room creation API. (PR #203)
|
||||||
|
* Add auth event that limits the visibility of history for new users.
|
||||||
|
(SPEC-134)
|
||||||
|
* Add SAML2 login/registration support. (PR #201. Thanks Muthu Subramanian!)
|
||||||
|
* Add client side key management APIs for end to end encryption. (PR #198)
|
||||||
|
* Change power level semantics so that you cannot kick, ban or change power
|
||||||
|
levels of users that have equal or greater power level than you. (SYN-192)
|
||||||
|
* Improve performance by bulk inserting events where possible. (PR #193)
|
||||||
|
* Improve performance by bulk verifying signatures where possible. (PR #194)
|
||||||
|
|
||||||
|
|
||||||
|
Configuration:
|
||||||
|
|
||||||
|
* Add support for including TLS certificate chains.
|
||||||
|
|
||||||
|
Media Repository:
|
||||||
|
|
||||||
|
* Add Content-Disposition headers to content repository responses. (SYN-150)
|
||||||
|
|
||||||
|
|
||||||
Changes in synapse v0.9.3 (2015-07-01)
|
Changes in synapse v0.9.3 (2015-07-01)
|
||||||
======================================
|
======================================
|
||||||
|
|
||||||
|
|
87
README.rst
87
README.rst
|
@ -7,7 +7,7 @@ Matrix is an ambitious new ecosystem for open federated Instant Messaging and
|
||||||
VoIP. The basics you need to know to get up and running are:
|
VoIP. The basics you need to know to get up and running are:
|
||||||
|
|
||||||
- Everything in Matrix happens in a room. Rooms are distributed and do not
|
- Everything in Matrix happens in a room. Rooms are distributed and do not
|
||||||
exist on any single server. Rooms can be located using convenience aliases
|
exist on any single server. Rooms can be located using convenience aliases
|
||||||
like ``#matrix:matrix.org`` or ``#test:localhost:8448``.
|
like ``#matrix:matrix.org`` or ``#test:localhost:8448``.
|
||||||
|
|
||||||
- Matrix user IDs look like ``@matthew:matrix.org`` (although in the future
|
- Matrix user IDs look like ``@matthew:matrix.org`` (although in the future
|
||||||
|
@ -23,7 +23,7 @@ The overall architecture is::
|
||||||
accessed by the web client at http://matrix.org/beta or via an IRC bridge at
|
accessed by the web client at http://matrix.org/beta or via an IRC bridge at
|
||||||
irc://irc.freenode.net/matrix.
|
irc://irc.freenode.net/matrix.
|
||||||
|
|
||||||
Synapse is currently in rapid development, but as of version 0.5 we believe it
|
Synapse is currently in rapid development, but as of version 0.5 we believe it
|
||||||
is sufficiently stable to be run as an internet-facing service for real usage!
|
is sufficiently stable to be run as an internet-facing service for real usage!
|
||||||
|
|
||||||
About Matrix
|
About Matrix
|
||||||
|
@ -104,7 +104,7 @@ Installing prerequisites on Ubuntu or Debian::
|
||||||
sudo apt-get install build-essential python2.7-dev libffi-dev \
|
sudo apt-get install build-essential python2.7-dev libffi-dev \
|
||||||
python-pip python-setuptools sqlite3 \
|
python-pip python-setuptools sqlite3 \
|
||||||
libssl-dev python-virtualenv libjpeg-dev
|
libssl-dev python-virtualenv libjpeg-dev
|
||||||
|
|
||||||
Installing prerequisites on ArchLinux::
|
Installing prerequisites on ArchLinux::
|
||||||
|
|
||||||
sudo pacman -S base-devel python2 python-pip \
|
sudo pacman -S base-devel python2 python-pip \
|
||||||
|
@ -115,7 +115,7 @@ Installing prerequisites on Mac OS X::
|
||||||
xcode-select --install
|
xcode-select --install
|
||||||
sudo easy_install pip
|
sudo easy_install pip
|
||||||
sudo pip install virtualenv
|
sudo pip install virtualenv
|
||||||
|
|
||||||
To install the synapse homeserver run::
|
To install the synapse homeserver run::
|
||||||
|
|
||||||
virtualenv -p python2.7 ~/.synapse
|
virtualenv -p python2.7 ~/.synapse
|
||||||
|
@ -174,13 +174,13 @@ traditionally used for convenience and simplicity.
|
||||||
|
|
||||||
The advantages of Postgres include:
|
The advantages of Postgres include:
|
||||||
|
|
||||||
* significant performance improvements due to the superior threading and
|
* significant performance improvements due to the superior threading and
|
||||||
caching model, smarter query optimiser
|
caching model, smarter query optimiser
|
||||||
* allowing the DB to be run on separate hardware
|
* allowing the DB to be run on separate hardware
|
||||||
* allowing basic active/backup high-availability with a "hot spare" synapse
|
* allowing basic active/backup high-availability with a "hot spare" synapse
|
||||||
pointing at the same DB master, as well as enabling DB replication in
|
pointing at the same DB master, as well as enabling DB replication in
|
||||||
synapse itself.
|
synapse itself.
|
||||||
|
|
||||||
The only disadvantage is that the code is relatively new as of April 2015 and
|
The only disadvantage is that the code is relatively new as of April 2015 and
|
||||||
may have a few regressions relative to SQLite.
|
may have a few regressions relative to SQLite.
|
||||||
|
|
||||||
|
@ -190,8 +190,8 @@ For information on how to install and use PostgreSQL, please see
|
||||||
Running Synapse
|
Running Synapse
|
||||||
===============
|
===============
|
||||||
|
|
||||||
To actually run your new homeserver, pick a working directory for Synapse to run
|
To actually run your new homeserver, pick a working directory for Synapse to
|
||||||
(e.g. ``~/.synapse``), and::
|
run (e.g. ``~/.synapse``), and::
|
||||||
|
|
||||||
cd ~/.synapse
|
cd ~/.synapse
|
||||||
source ./bin/activate
|
source ./bin/activate
|
||||||
|
@ -214,13 +214,13 @@ defaults to python 3, but synapse currently assumes python 2.7 by default:
|
||||||
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 )::
|
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 )::
|
||||||
|
|
||||||
sudo pip2.7 install --upgrade pip
|
sudo pip2.7 install --upgrade pip
|
||||||
|
|
||||||
You also may need to explicitly specify python 2.7 again during the install
|
You also may need to explicitly specify python 2.7 again during the install
|
||||||
request::
|
request::
|
||||||
|
|
||||||
pip2.7 install --process-dependency-links \
|
pip2.7 install --process-dependency-links \
|
||||||
https://github.com/matrix-org/synapse/tarball/master
|
https://github.com/matrix-org/synapse/tarball/master
|
||||||
|
|
||||||
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
|
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
|
||||||
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
||||||
compile it under the right architecture. (This should not be needed if
|
compile it under the right architecture. (This should not be needed if
|
||||||
|
@ -228,7 +228,7 @@ installing under virtualenv)::
|
||||||
|
|
||||||
sudo pip2.7 uninstall py-bcrypt
|
sudo pip2.7 uninstall py-bcrypt
|
||||||
sudo pip2.7 install py-bcrypt
|
sudo pip2.7 install py-bcrypt
|
||||||
|
|
||||||
During setup of Synapse you need to call python2.7 directly again::
|
During setup of Synapse you need to call python2.7 directly again::
|
||||||
|
|
||||||
cd ~/.synapse
|
cd ~/.synapse
|
||||||
|
@ -236,25 +236,27 @@ During setup of Synapse you need to call python2.7 directly again::
|
||||||
--server-name machine.my.domain.name \
|
--server-name machine.my.domain.name \
|
||||||
--config-path homeserver.yaml \
|
--config-path homeserver.yaml \
|
||||||
--generate-config
|
--generate-config
|
||||||
|
|
||||||
...substituting your host and domain name as appropriate.
|
...substituting your host and domain name as appropriate.
|
||||||
|
|
||||||
Windows Install
|
Windows Install
|
||||||
---------------
|
---------------
|
||||||
Synapse can be installed on Cygwin. It requires the following Cygwin packages:
|
Synapse can be installed on Cygwin. It requires the following Cygwin packages:
|
||||||
|
|
||||||
- gcc
|
- gcc
|
||||||
- git
|
- git
|
||||||
- libffi-devel
|
- libffi-devel
|
||||||
- openssl (and openssl-devel, python-openssl)
|
- openssl (and openssl-devel, python-openssl)
|
||||||
- python
|
- python
|
||||||
- python-setuptools
|
- python-setuptools
|
||||||
|
|
||||||
The content repository requires additional packages and will be unable to process
|
The content repository requires additional packages and will be unable to process
|
||||||
uploads without them:
|
uploads without them:
|
||||||
- libjpeg8
|
|
||||||
- libjpeg8-devel
|
- libjpeg8
|
||||||
- zlib
|
- libjpeg8-devel
|
||||||
|
- zlib
|
||||||
|
|
||||||
If you choose to install Synapse without these packages, you will need to reinstall
|
If you choose to install Synapse without these packages, you will need to reinstall
|
||||||
``pillow`` for changes to be applied, e.g. ``pip uninstall pillow`` ``pip install
|
``pillow`` for changes to be applied, e.g. ``pip uninstall pillow`` ``pip install
|
||||||
pillow --user``
|
pillow --user``
|
||||||
|
@ -276,8 +278,8 @@ Troubleshooting
|
||||||
Troubleshooting Installation
|
Troubleshooting Installation
|
||||||
----------------------------
|
----------------------------
|
||||||
|
|
||||||
Synapse requires pip 1.7 or later, so if your OS provides too old a version and
|
Synapse requires pip 1.7 or later, so if your OS provides too old a version and
|
||||||
you get errors about ``error: no such option: --process-dependency-links`` you
|
you get errors about ``error: no such option: --process-dependency-links`` you
|
||||||
may need to manually upgrade it::
|
may need to manually upgrade it::
|
||||||
|
|
||||||
sudo pip install --upgrade pip
|
sudo pip install --upgrade pip
|
||||||
|
@ -288,9 +290,9 @@ created. To reset the installation::
|
||||||
|
|
||||||
rm -rf /tmp/pip_install_matrix
|
rm -rf /tmp/pip_install_matrix
|
||||||
|
|
||||||
pip seems to leak *lots* of memory during installation. For instance, a Linux
|
pip seems to leak *lots* of memory during installation. For instance, a Linux
|
||||||
host with 512MB of RAM may run out of memory whilst installing Twisted. If this
|
host with 512MB of RAM may run out of memory whilst installing Twisted. If this
|
||||||
happens, you will have to individually install the dependencies which are
|
happens, you will have to individually install the dependencies which are
|
||||||
failing, e.g.::
|
failing, e.g.::
|
||||||
|
|
||||||
pip install twisted
|
pip install twisted
|
||||||
|
@ -301,8 +303,8 @@ will need to export CFLAGS=-Qunused-arguments.
|
||||||
Troubleshooting Running
|
Troubleshooting Running
|
||||||
-----------------------
|
-----------------------
|
||||||
|
|
||||||
If synapse fails with ``missing "sodium.h"`` crypto errors, you may need
|
If synapse fails with ``missing "sodium.h"`` crypto errors, you may need
|
||||||
to manually upgrade PyNaCL, as synapse uses NaCl (http://nacl.cr.yp.to/) for
|
to manually upgrade PyNaCL, as synapse uses NaCl (http://nacl.cr.yp.to/) for
|
||||||
encryption and digital signatures.
|
encryption and digital signatures.
|
||||||
Unfortunately PyNACL currently has a few issues
|
Unfortunately PyNACL currently has a few issues
|
||||||
(https://github.com/pyca/pynacl/issues/53) and
|
(https://github.com/pyca/pynacl/issues/53) and
|
||||||
|
@ -313,7 +315,7 @@ fix try re-installing from PyPI or directly from
|
||||||
|
|
||||||
# Install from PyPI
|
# Install from PyPI
|
||||||
pip install --user --upgrade --force pynacl
|
pip install --user --upgrade --force pynacl
|
||||||
|
|
||||||
# Install from github
|
# Install from github
|
||||||
pip install --user https://github.com/pyca/pynacl/tarball/master
|
pip install --user https://github.com/pyca/pynacl/tarball/master
|
||||||
|
|
||||||
|
@ -324,7 +326,7 @@ If running `$ synctl start` fails with 'returned non-zero exit status 1',
|
||||||
you will need to explicitly call Python2.7 - either running as::
|
you will need to explicitly call Python2.7 - either running as::
|
||||||
|
|
||||||
python2.7 -m synapse.app.homeserver --daemonize -c homeserver.yaml
|
python2.7 -m synapse.app.homeserver --daemonize -c homeserver.yaml
|
||||||
|
|
||||||
...or by editing synctl with the correct python executable.
|
...or by editing synctl with the correct python executable.
|
||||||
|
|
||||||
Synapse Development
|
Synapse Development
|
||||||
|
@ -362,14 +364,11 @@ This should end with a 'PASSED' result::
|
||||||
Upgrading an existing Synapse
|
Upgrading an existing Synapse
|
||||||
=============================
|
=============================
|
||||||
|
|
||||||
IMPORTANT: Before upgrading an existing synapse to a new version, please
|
The instructions for upgrading synapse are in `UPGRADE.rst`_.
|
||||||
refer to UPGRADE.rst for any additional instructions.
|
Please check these instructions as upgrading may require extra steps for some
|
||||||
|
versions of synapse.
|
||||||
Otherwise, simply re-install the new codebase over the current one - e.g.
|
|
||||||
by ``pip install --process-dependency-links
|
|
||||||
https://github.com/matrix-org/synapse/tarball/master``
|
|
||||||
if using pip, or by ``git pull`` if running off a git working copy.
|
|
||||||
|
|
||||||
|
.. _UPGRADE.rst: UPGRADE.rst
|
||||||
|
|
||||||
Setting up Federation
|
Setting up Federation
|
||||||
=====================
|
=====================
|
||||||
|
@ -431,7 +430,7 @@ private federation (``localhost:8080``, ``localhost:8081`` and
|
||||||
http://localhost:8080. Simply run::
|
http://localhost:8080. Simply run::
|
||||||
|
|
||||||
demo/start.sh
|
demo/start.sh
|
||||||
|
|
||||||
This is mainly useful just for development purposes.
|
This is mainly useful just for development purposes.
|
||||||
|
|
||||||
Running The Demo Web Client
|
Running The Demo Web Client
|
||||||
|
@ -494,7 +493,7 @@ time.
|
||||||
Where's the spec?!
|
Where's the spec?!
|
||||||
==================
|
==================
|
||||||
|
|
||||||
The source of the matrix spec lives at https://github.com/matrix-org/matrix-doc.
|
The source of the matrix spec lives at https://github.com/matrix-org/matrix-doc.
|
||||||
A recent HTML snapshot of this lives at http://matrix.org/docs/spec
|
A recent HTML snapshot of this lives at http://matrix.org/docs/spec
|
||||||
|
|
||||||
|
|
||||||
|
|
33
UPGRADE.rst
33
UPGRADE.rst
|
@ -1,3 +1,36 @@
|
||||||
|
Upgrading Synapse
|
||||||
|
=================
|
||||||
|
|
||||||
|
Before upgrading check if any special steps are required to upgrade from the
|
||||||
|
what you currently have installed to current version of synapse. The extra
|
||||||
|
instructions that may be required are listed later in this document.
|
||||||
|
|
||||||
|
If synapse was installed in a virtualenv then active that virtualenv before
|
||||||
|
upgrading. If synapse is installed in a virtualenv in ``~/.synapse/`` then run:
|
||||||
|
|
||||||
|
.. code:: bash
|
||||||
|
|
||||||
|
source ~/.synapse/bin/activate
|
||||||
|
|
||||||
|
If synapse was installed using pip then upgrade to the latest version by
|
||||||
|
running:
|
||||||
|
|
||||||
|
.. code:: bash
|
||||||
|
|
||||||
|
pip install --upgrade --process-dependency-links https://github.com/matrix-org/synapse/tarball/master
|
||||||
|
|
||||||
|
If synapse was installed using git then upgrade to the latest version by
|
||||||
|
running:
|
||||||
|
|
||||||
|
.. code:: bash
|
||||||
|
|
||||||
|
# Pull the latest version of the master branch.
|
||||||
|
git pull
|
||||||
|
# Update the versions of synapse's python dependencies.
|
||||||
|
python synapse/python_dependencies.py | xargs -n1 pip install
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Upgrading to v0.9.0
|
Upgrading to v0.9.0
|
||||||
===================
|
===================
|
||||||
|
|
||||||
|
|
|
@ -16,4 +16,4 @@
|
||||||
""" This is a reference implementation of a Matrix home server.
|
""" This is a reference implementation of a Matrix home server.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__version__ = "0.9.3"
|
__version__ = "0.10.0-rc1"
|
||||||
|
|
|
@ -76,6 +76,8 @@ class EventTypes(object):
|
||||||
Feedback = "m.room.message.feedback"
|
Feedback = "m.room.message.feedback"
|
||||||
|
|
||||||
RoomHistoryVisibility = "m.room.history_visibility"
|
RoomHistoryVisibility = "m.room.history_visibility"
|
||||||
|
CanonicalAlias = "m.room.canonical_alias"
|
||||||
|
RoomAvatar = "m.room.avatar"
|
||||||
|
|
||||||
# These are used for validation
|
# These are used for validation
|
||||||
Message = "m.room.message"
|
Message = "m.room.message"
|
||||||
|
|
|
@ -40,6 +40,7 @@ class Codes(object):
|
||||||
TOO_LARGE = "M_TOO_LARGE"
|
TOO_LARGE = "M_TOO_LARGE"
|
||||||
EXCLUSIVE = "M_EXCLUSIVE"
|
EXCLUSIVE = "M_EXCLUSIVE"
|
||||||
THREEPID_AUTH_FAILED = "M_THREEPID_AUTH_FAILED"
|
THREEPID_AUTH_FAILED = "M_THREEPID_AUTH_FAILED"
|
||||||
|
THREEPID_IN_USE = "THREEPID_IN_USE"
|
||||||
|
|
||||||
|
|
||||||
class CodeMessageException(RuntimeError):
|
class CodeMessageException(RuntimeError):
|
||||||
|
|
|
@ -14,6 +14,39 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from ._base import Config
|
from ._base import Config
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
|
ThumbnailRequirement = namedtuple(
|
||||||
|
"ThumbnailRequirement", ["width", "height", "method", "media_type"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_thumbnail_requirements(thumbnail_sizes):
|
||||||
|
""" Takes a list of dictionaries with "width", "height", and "method" keys
|
||||||
|
and creates a map from image media types to the thumbnail size, thumnailing
|
||||||
|
method, and thumbnail media type to precalculate
|
||||||
|
|
||||||
|
Args:
|
||||||
|
thumbnail_sizes(list): List of dicts with "width", "height", and
|
||||||
|
"method" keys
|
||||||
|
Returns:
|
||||||
|
Dictionary mapping from media type string to list of
|
||||||
|
ThumbnailRequirement tuples.
|
||||||
|
"""
|
||||||
|
requirements = {}
|
||||||
|
for size in thumbnail_sizes:
|
||||||
|
width = size["width"]
|
||||||
|
height = size["height"]
|
||||||
|
method = size["method"]
|
||||||
|
jpeg_thumbnail = ThumbnailRequirement(width, height, method, "image/jpeg")
|
||||||
|
png_thumbnail = ThumbnailRequirement(width, height, method, "image/png")
|
||||||
|
requirements.setdefault("image/jpeg", []).append(jpeg_thumbnail)
|
||||||
|
requirements.setdefault("image/gif", []).append(png_thumbnail)
|
||||||
|
requirements.setdefault("image/png", []).append(png_thumbnail)
|
||||||
|
return {
|
||||||
|
media_type: tuple(thumbnails)
|
||||||
|
for media_type, thumbnails in requirements.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class ContentRepositoryConfig(Config):
|
class ContentRepositoryConfig(Config):
|
||||||
|
@ -22,6 +55,10 @@ class ContentRepositoryConfig(Config):
|
||||||
self.max_image_pixels = self.parse_size(config["max_image_pixels"])
|
self.max_image_pixels = self.parse_size(config["max_image_pixels"])
|
||||||
self.media_store_path = self.ensure_directory(config["media_store_path"])
|
self.media_store_path = self.ensure_directory(config["media_store_path"])
|
||||||
self.uploads_path = self.ensure_directory(config["uploads_path"])
|
self.uploads_path = self.ensure_directory(config["uploads_path"])
|
||||||
|
self.dynamic_thumbnails = config["dynamic_thumbnails"]
|
||||||
|
self.thumbnail_requirements = parse_thumbnail_requirements(
|
||||||
|
config["thumbnail_sizes"]
|
||||||
|
)
|
||||||
|
|
||||||
def default_config(self, config_dir_path, server_name):
|
def default_config(self, config_dir_path, server_name):
|
||||||
media_store = self.default_path("media_store")
|
media_store = self.default_path("media_store")
|
||||||
|
@ -38,4 +75,26 @@ class ContentRepositoryConfig(Config):
|
||||||
|
|
||||||
# Maximum number of pixels that will be thumbnailed
|
# Maximum number of pixels that will be thumbnailed
|
||||||
max_image_pixels: "32M"
|
max_image_pixels: "32M"
|
||||||
|
|
||||||
|
# Whether to generate new thumbnails on the fly to precisely match
|
||||||
|
# the resolution requested by the client. If true then whenever
|
||||||
|
# a new resolution is requested by the client the server will
|
||||||
|
# generate a new thumbnail. If false the server will pick a thumbnail
|
||||||
|
# from a precalcualted list.
|
||||||
|
dynamic_thumbnails: false
|
||||||
|
|
||||||
|
# List of thumbnail to precalculate when an image is uploaded.
|
||||||
|
thumbnail_sizes:
|
||||||
|
- width: 32
|
||||||
|
height: 32
|
||||||
|
method: crop
|
||||||
|
- width: 96
|
||||||
|
height: 96
|
||||||
|
method: crop
|
||||||
|
- width: 320
|
||||||
|
height: 240
|
||||||
|
method: scale
|
||||||
|
- width: 640
|
||||||
|
height: 480
|
||||||
|
method: scale
|
||||||
""" % locals()
|
""" % locals()
|
||||||
|
|
|
@ -18,7 +18,7 @@ from twisted.internet import defer
|
||||||
from synapse.api.errors import LimitExceededError, SynapseError
|
from synapse.api.errors import LimitExceededError, SynapseError
|
||||||
from synapse.crypto.event_signing import add_hashes_and_signatures
|
from synapse.crypto.event_signing import add_hashes_and_signatures
|
||||||
from synapse.api.constants import Membership, EventTypes
|
from synapse.api.constants import Membership, EventTypes
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID, RoomAlias
|
||||||
|
|
||||||
from synapse.util.logcontext import PreserveLoggingContext
|
from synapse.util.logcontext import PreserveLoggingContext
|
||||||
|
|
||||||
|
@ -130,6 +130,22 @@ class BaseHandler(object):
|
||||||
returned_invite.signatures
|
returned_invite.signatures
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if event.type == EventTypes.CanonicalAlias:
|
||||||
|
# Check the alias is acually valid (at this time at least)
|
||||||
|
room_alias_str = event.content.get("alias", None)
|
||||||
|
if room_alias_str:
|
||||||
|
room_alias = RoomAlias.from_string(room_alias_str)
|
||||||
|
directory_handler = self.hs.get_handlers().directory_handler
|
||||||
|
mapping = yield directory_handler.get_association(room_alias)
|
||||||
|
|
||||||
|
if mapping["room_id"] != event.room_id:
|
||||||
|
raise SynapseError(
|
||||||
|
400,
|
||||||
|
"Room alias %s does not point to the room" % (
|
||||||
|
room_alias_str,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
destinations = set(extra_destinations)
|
destinations = set(extra_destinations)
|
||||||
for k, s in context.current_state.items():
|
for k, s in context.current_state.items():
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -162,7 +162,7 @@ class AuthHandler(BaseHandler):
|
||||||
if not user_id.startswith('@'):
|
if not user_id.startswith('@'):
|
||||||
user_id = UserID.create(user_id, self.hs.hostname).to_string()
|
user_id = UserID.create(user_id, self.hs.hostname).to_string()
|
||||||
|
|
||||||
self._check_password(user_id, password)
|
yield self._check_password(user_id, password)
|
||||||
defer.returnValue(user_id)
|
defer.returnValue(user_id)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
|
|
@ -70,7 +70,15 @@ class EventStreamHandler(BaseHandler):
|
||||||
self._streams_per_user[auth_user] += 1
|
self._streams_per_user[auth_user] += 1
|
||||||
|
|
||||||
rm_handler = self.hs.get_handlers().room_member_handler
|
rm_handler = self.hs.get_handlers().room_member_handler
|
||||||
room_ids = yield rm_handler.get_joined_rooms_for_user(auth_user)
|
|
||||||
|
app_service = yield self.store.get_app_service_by_user_id(
|
||||||
|
auth_user.to_string()
|
||||||
|
)
|
||||||
|
if app_service:
|
||||||
|
rooms = yield self.store.get_app_service_rooms(app_service)
|
||||||
|
room_ids = set(r.room_id for r in rooms)
|
||||||
|
else:
|
||||||
|
room_ids = yield rm_handler.get_joined_rooms_for_user(auth_user)
|
||||||
|
|
||||||
if timeout:
|
if timeout:
|
||||||
# If they've set a timeout set a minimum limit.
|
# If they've set a timeout set a minimum limit.
|
||||||
|
|
|
@ -117,3 +117,28 @@ class IdentityHandler(BaseHandler):
|
||||||
except CodeMessageException as e:
|
except CodeMessageException as e:
|
||||||
data = json.loads(e.msg)
|
data = json.loads(e.msg)
|
||||||
defer.returnValue(data)
|
defer.returnValue(data)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def requestEmailToken(self, id_server, email, client_secret, send_attempt, **kwargs):
|
||||||
|
yield run_on_reactor()
|
||||||
|
http_client = SimpleHttpClient(self.hs)
|
||||||
|
|
||||||
|
params = {
|
||||||
|
'email': email,
|
||||||
|
'client_secret': client_secret,
|
||||||
|
'send_attempt': send_attempt,
|
||||||
|
}
|
||||||
|
params.update(kwargs)
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = yield http_client.post_urlencoded_get_json(
|
||||||
|
"https://%s%s" % (
|
||||||
|
id_server,
|
||||||
|
"/_matrix/identity/api/v1/validate/email/requestToken"
|
||||||
|
),
|
||||||
|
params
|
||||||
|
)
|
||||||
|
defer.returnValue(data)
|
||||||
|
except CodeMessageException as e:
|
||||||
|
logger.info("Proxied requestToken failed: %r", e)
|
||||||
|
raise e
|
||||||
|
|
|
@ -247,9 +247,11 @@ class RoomCreationHandler(BaseHandler):
|
||||||
},
|
},
|
||||||
"users_default": 0,
|
"users_default": 0,
|
||||||
"events": {
|
"events": {
|
||||||
EventTypes.Name: 100,
|
EventTypes.Name: 50,
|
||||||
EventTypes.PowerLevels: 100,
|
EventTypes.PowerLevels: 100,
|
||||||
EventTypes.RoomHistoryVisibility: 100,
|
EventTypes.RoomHistoryVisibility: 100,
|
||||||
|
EventTypes.CanonicalAlias: 50,
|
||||||
|
EventTypes.RoomAvatar: 50,
|
||||||
},
|
},
|
||||||
"events_default": 0,
|
"events_default": 0,
|
||||||
"state_default": 50,
|
"state_default": 50,
|
||||||
|
@ -557,15 +559,9 @@ class RoomMemberHandler(BaseHandler):
|
||||||
"""Returns a list of roomids that the user has any of the given
|
"""Returns a list of roomids that the user has any of the given
|
||||||
membership states in."""
|
membership states in."""
|
||||||
|
|
||||||
app_service = yield self.store.get_app_service_by_user_id(
|
rooms = yield self.store.get_rooms_for_user(
|
||||||
user.to_string()
|
user.to_string(),
|
||||||
)
|
)
|
||||||
if app_service:
|
|
||||||
rooms = yield self.store.get_app_service_rooms(app_service)
|
|
||||||
else:
|
|
||||||
rooms = yield self.store.get_rooms_for_user(
|
|
||||||
user.to_string(),
|
|
||||||
)
|
|
||||||
|
|
||||||
# For some reason the list of events contains duplicates
|
# For some reason the list of events contains duplicates
|
||||||
# TODO(paul): work out why because I really don't think it should
|
# TODO(paul): work out why because I really don't think it should
|
||||||
|
|
|
@ -96,9 +96,18 @@ class SyncHandler(BaseHandler):
|
||||||
return self.current_sync_for_user(sync_config, since_token)
|
return self.current_sync_for_user(sync_config, since_token)
|
||||||
|
|
||||||
rm_handler = self.hs.get_handlers().room_member_handler
|
rm_handler = self.hs.get_handlers().room_member_handler
|
||||||
room_ids = yield rm_handler.get_joined_rooms_for_user(
|
|
||||||
sync_config.user
|
app_service = yield self.store.get_app_service_by_user_id(
|
||||||
|
sync_config.user.to_string()
|
||||||
)
|
)
|
||||||
|
if app_service:
|
||||||
|
rooms = yield self.store.get_app_service_rooms(app_service)
|
||||||
|
room_ids = set(r.room_id for r in rooms)
|
||||||
|
else:
|
||||||
|
room_ids = yield rm_handler.get_joined_rooms_for_user(
|
||||||
|
sync_config.user
|
||||||
|
)
|
||||||
|
|
||||||
result = yield self.notifier.wait_for_events(
|
result = yield self.notifier.wait_for_events(
|
||||||
sync_config.user, room_ids,
|
sync_config.user, room_ids,
|
||||||
sync_config.filter, timeout, current_sync_callback
|
sync_config.filter, timeout, current_sync_callback
|
||||||
|
@ -229,7 +238,16 @@ class SyncHandler(BaseHandler):
|
||||||
logger.debug("Typing %r", typing_by_room)
|
logger.debug("Typing %r", typing_by_room)
|
||||||
|
|
||||||
rm_handler = self.hs.get_handlers().room_member_handler
|
rm_handler = self.hs.get_handlers().room_member_handler
|
||||||
room_ids = yield rm_handler.get_joined_rooms_for_user(sync_config.user)
|
app_service = yield self.store.get_app_service_by_user_id(
|
||||||
|
sync_config.user.to_string()
|
||||||
|
)
|
||||||
|
if app_service:
|
||||||
|
rooms = yield self.store.get_app_service_rooms(app_service)
|
||||||
|
room_ids = set(r.room_id for r in rooms)
|
||||||
|
else:
|
||||||
|
room_ids = yield rm_handler.get_joined_rooms_for_user(
|
||||||
|
sync_config.user
|
||||||
|
)
|
||||||
|
|
||||||
# TODO (mjark): Does public mean "published"?
|
# TODO (mjark): Does public mean "published"?
|
||||||
published_rooms = yield self.store.get_rooms(is_public=True)
|
published_rooms = yield self.store.get_rooms(is_public=True)
|
||||||
|
|
|
@ -294,6 +294,12 @@ class Pusher(object):
|
||||||
if not single_event:
|
if not single_event:
|
||||||
self.last_token = chunk['end']
|
self.last_token = chunk['end']
|
||||||
logger.debug("Event stream timeout for pushkey %s", self.pushkey)
|
logger.debug("Event stream timeout for pushkey %s", self.pushkey)
|
||||||
|
yield self.store.update_pusher_last_token(
|
||||||
|
self.app_id,
|
||||||
|
self.pushkey,
|
||||||
|
self.user_name,
|
||||||
|
self.last_token
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
if not self.alive:
|
if not self.alive:
|
||||||
|
@ -345,7 +351,7 @@ class Pusher(object):
|
||||||
if processed:
|
if processed:
|
||||||
self.backoff_delay = Pusher.INITIAL_BACKOFF
|
self.backoff_delay = Pusher.INITIAL_BACKOFF
|
||||||
self.last_token = chunk['end']
|
self.last_token = chunk['end']
|
||||||
self.store.update_pusher_last_token_and_success(
|
yield self.store.update_pusher_last_token_and_success(
|
||||||
self.app_id,
|
self.app_id,
|
||||||
self.pushkey,
|
self.pushkey,
|
||||||
self.user_name,
|
self.user_name,
|
||||||
|
@ -354,7 +360,7 @@ class Pusher(object):
|
||||||
)
|
)
|
||||||
if self.failing_since:
|
if self.failing_since:
|
||||||
self.failing_since = None
|
self.failing_since = None
|
||||||
self.store.update_pusher_failing_since(
|
yield self.store.update_pusher_failing_since(
|
||||||
self.app_id,
|
self.app_id,
|
||||||
self.pushkey,
|
self.pushkey,
|
||||||
self.user_name,
|
self.user_name,
|
||||||
|
@ -362,7 +368,7 @@ class Pusher(object):
|
||||||
else:
|
else:
|
||||||
if not self.failing_since:
|
if not self.failing_since:
|
||||||
self.failing_since = self.clock.time_msec()
|
self.failing_since = self.clock.time_msec()
|
||||||
self.store.update_pusher_failing_since(
|
yield self.store.update_pusher_failing_since(
|
||||||
self.app_id,
|
self.app_id,
|
||||||
self.pushkey,
|
self.pushkey,
|
||||||
self.user_name,
|
self.user_name,
|
||||||
|
@ -380,7 +386,7 @@ class Pusher(object):
|
||||||
self.user_name, self.pushkey)
|
self.user_name, self.pushkey)
|
||||||
self.backoff_delay = Pusher.INITIAL_BACKOFF
|
self.backoff_delay = Pusher.INITIAL_BACKOFF
|
||||||
self.last_token = chunk['end']
|
self.last_token = chunk['end']
|
||||||
self.store.update_pusher_last_token(
|
yield self.store.update_pusher_last_token(
|
||||||
self.app_id,
|
self.app_id,
|
||||||
self.pushkey,
|
self.pushkey,
|
||||||
self.user_name,
|
self.user_name,
|
||||||
|
@ -388,7 +394,7 @@ class Pusher(object):
|
||||||
)
|
)
|
||||||
|
|
||||||
self.failing_since = None
|
self.failing_since = None
|
||||||
self.store.update_pusher_failing_since(
|
yield self.store.update_pusher_failing_since(
|
||||||
self.app_id,
|
self.app_id,
|
||||||
self.pushkey,
|
self.pushkey,
|
||||||
self.user_name,
|
self.user_name,
|
||||||
|
|
|
@ -74,16 +74,23 @@ class LoginRestServlet(ClientV1RestServlet):
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def do_password_login(self, login_submission):
|
def do_password_login(self, login_submission):
|
||||||
if not login_submission["user"].startswith('@'):
|
if 'medium' in login_submission and 'address' in login_submission:
|
||||||
login_submission["user"] = UserID.create(
|
user_id = yield self.hs.get_datastore().get_user_id_by_threepid(
|
||||||
login_submission["user"], self.hs.hostname).to_string()
|
login_submission['medium'], login_submission['address']
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
user_id = login_submission['user']
|
||||||
|
|
||||||
|
if not user_id.startswith('@'):
|
||||||
|
user_id = UserID.create(
|
||||||
|
user_id, self.hs.hostname).to_string()
|
||||||
|
|
||||||
token = yield self.handlers.auth_handler.login_with_password(
|
token = yield self.handlers.auth_handler.login_with_password(
|
||||||
user_id=login_submission["user"],
|
user_id=user_id,
|
||||||
password=login_submission["password"])
|
password=login_submission["password"])
|
||||||
|
|
||||||
result = {
|
result = {
|
||||||
"user_id": login_submission["user"], # may have changed
|
"user_id": user_id, # may have changed
|
||||||
"access_token": token,
|
"access_token": token,
|
||||||
"home_server": self.hs.hostname,
|
"home_server": self.hs.hostname,
|
||||||
}
|
}
|
||||||
|
|
|
@ -79,7 +79,7 @@ class PasswordRestServlet(RestServlet):
|
||||||
new_password = params['new_password']
|
new_password = params['new_password']
|
||||||
|
|
||||||
yield self.auth_handler.set_password(
|
yield self.auth_handler.set_password(
|
||||||
user_id, new_password, None
|
user_id, new_password
|
||||||
)
|
)
|
||||||
|
|
||||||
defer.returnValue((200, {}))
|
defer.returnValue((200, {}))
|
||||||
|
|
|
@ -54,6 +54,11 @@ class RegisterRestServlet(RestServlet):
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_POST(self, request):
|
def on_POST(self, request):
|
||||||
yield run_on_reactor()
|
yield run_on_reactor()
|
||||||
|
|
||||||
|
if '/register/email/requestToken' in request.path:
|
||||||
|
ret = yield self.onEmailTokenRequest(request)
|
||||||
|
defer.returnValue(ret)
|
||||||
|
|
||||||
body = parse_json_dict_from_request(request)
|
body = parse_json_dict_from_request(request)
|
||||||
|
|
||||||
# we do basic sanity checks here because the auth layer will store these
|
# we do basic sanity checks here because the auth layer will store these
|
||||||
|
@ -208,6 +213,29 @@ class RegisterRestServlet(RestServlet):
|
||||||
"home_server": self.hs.hostname,
|
"home_server": self.hs.hostname,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def onEmailTokenRequest(self, request):
|
||||||
|
body = parse_json_dict_from_request(request)
|
||||||
|
|
||||||
|
required = ['id_server', 'client_secret', 'email', 'send_attempt']
|
||||||
|
absent = []
|
||||||
|
for k in required:
|
||||||
|
if k not in body:
|
||||||
|
absent.append(k)
|
||||||
|
|
||||||
|
if len(absent) > 0:
|
||||||
|
raise SynapseError(400, "Missing params: %r" % absent, Codes.MISSING_PARAM)
|
||||||
|
|
||||||
|
existingUid = yield self.hs.get_datastore().get_user_id_by_threepid(
|
||||||
|
'email', body['email']
|
||||||
|
)
|
||||||
|
|
||||||
|
if existingUid is not None:
|
||||||
|
raise SynapseError(400, "Email is already in use", Codes.THREEPID_IN_USE)
|
||||||
|
|
||||||
|
ret = yield self.identity_handler.requestEmailToken(**body)
|
||||||
|
defer.returnValue((200, ret))
|
||||||
|
|
||||||
|
|
||||||
def register_servlets(hs, http_server):
|
def register_servlets(hs, http_server):
|
||||||
RegisterRestServlet(hs).register(http_server)
|
RegisterRestServlet(hs).register(http_server)
|
||||||
|
|
|
@ -69,6 +69,8 @@ class BaseMediaResource(Resource):
|
||||||
self.filepaths = filepaths
|
self.filepaths = filepaths
|
||||||
self.version_string = hs.version_string
|
self.version_string = hs.version_string
|
||||||
self.downloads = {}
|
self.downloads = {}
|
||||||
|
self.dynamic_thumbnails = hs.config.dynamic_thumbnails
|
||||||
|
self.thumbnail_requirements = hs.config.thumbnail_requirements
|
||||||
|
|
||||||
def _respond_404(self, request):
|
def _respond_404(self, request):
|
||||||
respond_with_json(
|
respond_with_json(
|
||||||
|
@ -208,22 +210,74 @@ class BaseMediaResource(Resource):
|
||||||
self._respond_404(request)
|
self._respond_404(request)
|
||||||
|
|
||||||
def _get_thumbnail_requirements(self, media_type):
|
def _get_thumbnail_requirements(self, media_type):
|
||||||
if media_type == "image/jpeg":
|
return self.thumbnail_requirements.get(media_type, ())
|
||||||
return (
|
|
||||||
(32, 32, "crop", "image/jpeg"),
|
def _generate_thumbnail(self, input_path, t_path, t_width, t_height,
|
||||||
(96, 96, "crop", "image/jpeg"),
|
t_method, t_type):
|
||||||
(320, 240, "scale", "image/jpeg"),
|
thumbnailer = Thumbnailer(input_path)
|
||||||
(640, 480, "scale", "image/jpeg"),
|
m_width = thumbnailer.width
|
||||||
)
|
m_height = thumbnailer.height
|
||||||
elif (media_type == "image/png") or (media_type == "image/gif"):
|
|
||||||
return (
|
if m_width * m_height >= self.max_image_pixels:
|
||||||
(32, 32, "crop", "image/png"),
|
logger.info(
|
||||||
(96, 96, "crop", "image/png"),
|
"Image too large to thumbnail %r x %r > %r",
|
||||||
(320, 240, "scale", "image/png"),
|
m_width, m_height, self.max_image_pixels
|
||||||
(640, 480, "scale", "image/png"),
|
|
||||||
)
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if t_method == "crop":
|
||||||
|
t_len = thumbnailer.crop(t_path, t_width, t_height, t_type)
|
||||||
|
elif t_method == "scale":
|
||||||
|
t_len = thumbnailer.scale(t_path, t_width, t_height, t_type)
|
||||||
else:
|
else:
|
||||||
return ()
|
t_len = None
|
||||||
|
|
||||||
|
return t_len
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def _generate_local_exact_thumbnail(self, media_id, t_width, t_height,
|
||||||
|
t_method, t_type):
|
||||||
|
input_path = self.filepaths.local_media_filepath(media_id)
|
||||||
|
|
||||||
|
t_path = self.filepaths.local_media_thumbnail(
|
||||||
|
media_id, t_width, t_height, t_type, t_method
|
||||||
|
)
|
||||||
|
self._makedirs(t_path)
|
||||||
|
|
||||||
|
t_len = yield threads.deferToThread(
|
||||||
|
self._generate_thumbnail,
|
||||||
|
input_path, t_path, t_width, t_height, t_method, t_type
|
||||||
|
)
|
||||||
|
|
||||||
|
if t_len:
|
||||||
|
yield self.store.store_local_thumbnail(
|
||||||
|
media_id, t_width, t_height, t_type, t_method, t_len
|
||||||
|
)
|
||||||
|
|
||||||
|
defer.returnValue(t_path)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def _generate_remote_exact_thumbnail(self, server_name, file_id, media_id,
|
||||||
|
t_width, t_height, t_method, t_type):
|
||||||
|
input_path = self.filepaths.remote_media_filepath(server_name, file_id)
|
||||||
|
|
||||||
|
t_path = self.filepaths.remote_media_thumbnail(
|
||||||
|
server_name, file_id, t_width, t_height, t_type, t_method
|
||||||
|
)
|
||||||
|
self._makedirs(t_path)
|
||||||
|
|
||||||
|
t_len = yield threads.deferToThread(
|
||||||
|
self._generate_thumbnail,
|
||||||
|
input_path, t_path, t_width, t_height, t_method, t_type
|
||||||
|
)
|
||||||
|
|
||||||
|
if t_len:
|
||||||
|
yield self.store.store_remote_media_thumbnail(
|
||||||
|
server_name, media_id, file_id,
|
||||||
|
t_width, t_height, t_type, t_method, t_len
|
||||||
|
)
|
||||||
|
|
||||||
|
defer.returnValue(t_path)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _generate_local_thumbnails(self, media_id, media_info):
|
def _generate_local_thumbnails(self, media_id, media_info):
|
||||||
|
|
|
@ -43,14 +43,25 @@ class ThumbnailResource(BaseMediaResource):
|
||||||
m_type = parse_string(request, "type", "image/png")
|
m_type = parse_string(request, "type", "image/png")
|
||||||
|
|
||||||
if server_name == self.server_name:
|
if server_name == self.server_name:
|
||||||
yield self._respond_local_thumbnail(
|
if self.dynamic_thumbnails:
|
||||||
request, media_id, width, height, method, m_type
|
yield self._select_or_generate_local_thumbnail(
|
||||||
)
|
request, media_id, width, height, method, m_type
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
yield self._respond_local_thumbnail(
|
||||||
|
request, media_id, width, height, method, m_type
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
yield self._respond_remote_thumbnail(
|
if self.dynamic_thumbnails:
|
||||||
request, server_name, media_id,
|
yield self._select_or_generate_remote_thumbnail(
|
||||||
width, height, method, m_type
|
request, server_name, media_id,
|
||||||
)
|
width, height, method, m_type
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
yield self._respond_remote_thumbnail(
|
||||||
|
request, server_name, media_id,
|
||||||
|
width, height, method, m_type
|
||||||
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _respond_local_thumbnail(self, request, media_id, width, height,
|
def _respond_local_thumbnail(self, request, media_id, width, height,
|
||||||
|
@ -82,6 +93,87 @@ class ThumbnailResource(BaseMediaResource):
|
||||||
request, media_info, width, height, method, m_type,
|
request, media_info, width, height, method, m_type,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def _select_or_generate_local_thumbnail(self, request, media_id, desired_width,
|
||||||
|
desired_height, desired_method,
|
||||||
|
desired_type):
|
||||||
|
media_info = yield self.store.get_local_media(media_id)
|
||||||
|
|
||||||
|
if not media_info:
|
||||||
|
self._respond_404(request)
|
||||||
|
return
|
||||||
|
|
||||||
|
thumbnail_infos = yield self.store.get_local_media_thumbnails(media_id)
|
||||||
|
for info in thumbnail_infos:
|
||||||
|
t_w = info["thumbnail_width"] == desired_width
|
||||||
|
t_h = info["thumbnail_height"] == desired_height
|
||||||
|
t_method = info["thumbnail_method"] == desired_method
|
||||||
|
t_type = info["thumbnail_type"] == desired_type
|
||||||
|
|
||||||
|
if t_w and t_h and t_method and t_type:
|
||||||
|
file_path = self.filepaths.local_media_thumbnail(
|
||||||
|
media_id, desired_width, desired_height, desired_type, desired_method,
|
||||||
|
)
|
||||||
|
yield self._respond_with_file(request, desired_type, file_path)
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.debug("We don't have a local thumbnail of that size. Generating")
|
||||||
|
|
||||||
|
# Okay, so we generate one.
|
||||||
|
file_path = yield self._generate_local_exact_thumbnail(
|
||||||
|
media_id, desired_width, desired_height, desired_method, desired_type
|
||||||
|
)
|
||||||
|
|
||||||
|
if file_path:
|
||||||
|
yield self._respond_with_file(request, desired_type, file_path)
|
||||||
|
else:
|
||||||
|
yield self._respond_default_thumbnail(
|
||||||
|
request, media_info, desired_width, desired_height,
|
||||||
|
desired_method, desired_type,
|
||||||
|
)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def _select_or_generate_remote_thumbnail(self, request, server_name, media_id,
|
||||||
|
desired_width, desired_height,
|
||||||
|
desired_method, desired_type):
|
||||||
|
media_info = yield self._get_remote_media(server_name, media_id)
|
||||||
|
|
||||||
|
thumbnail_infos = yield self.store.get_remote_media_thumbnails(
|
||||||
|
server_name, media_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
file_id = media_info["filesystem_id"]
|
||||||
|
|
||||||
|
for info in thumbnail_infos:
|
||||||
|
t_w = info["thumbnail_width"] == desired_width
|
||||||
|
t_h = info["thumbnail_height"] == desired_height
|
||||||
|
t_method = info["thumbnail_method"] == desired_method
|
||||||
|
t_type = info["thumbnail_type"] == desired_type
|
||||||
|
|
||||||
|
if t_w and t_h and t_method and t_type:
|
||||||
|
file_path = self.filepaths.remote_media_thumbnail(
|
||||||
|
server_name, file_id, desired_width, desired_height,
|
||||||
|
desired_type, desired_method,
|
||||||
|
)
|
||||||
|
yield self._respond_with_file(request, desired_type, file_path)
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.debug("We don't have a local thumbnail of that size. Generating")
|
||||||
|
|
||||||
|
# Okay, so we generate one.
|
||||||
|
file_path = yield self._generate_remote_exact_thumbnail(
|
||||||
|
server_name, file_id, media_id, desired_width,
|
||||||
|
desired_height, desired_method, desired_type
|
||||||
|
)
|
||||||
|
|
||||||
|
if file_path:
|
||||||
|
yield self._respond_with_file(request, desired_type, file_path)
|
||||||
|
else:
|
||||||
|
yield self._respond_default_thumbnail(
|
||||||
|
request, media_info, desired_width, desired_height,
|
||||||
|
desired_method, desired_type,
|
||||||
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _respond_remote_thumbnail(self, request, server_name, media_id, width,
|
def _respond_remote_thumbnail(self, request, server_name, media_id, width,
|
||||||
height, method, m_type):
|
height, method, m_type):
|
||||||
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
CREATE TABLE IF NOT EXISTS user_threepids2 (
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
medium TEXT NOT NULL,
|
||||||
|
address TEXT NOT NULL,
|
||||||
|
validated_at BIGINT NOT NULL,
|
||||||
|
added_at BIGINT NOT NULL,
|
||||||
|
CONSTRAINT medium_address UNIQUE (medium, address)
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO user_threepids2
|
||||||
|
SELECT * FROM user_threepids WHERE added_at IN (
|
||||||
|
SELECT max(added_at) FROM user_threepids GROUP BY medium, address
|
||||||
|
)
|
||||||
|
;
|
||||||
|
|
||||||
|
DROP TABLE user_threepids;
|
||||||
|
ALTER TABLE user_threepids2 RENAME TO user_threepids;
|
||||||
|
|
||||||
|
CREATE INDEX user_threepids_user_id ON user_threepids(user_id);
|
|
@ -13,6 +13,7 @@ class RegisterRestServletTestCase(unittest.TestCase):
|
||||||
self.request_data = ""
|
self.request_data = ""
|
||||||
self.request = Mock(
|
self.request = Mock(
|
||||||
content=Mock(read=Mock(side_effect=lambda: self.request_data)),
|
content=Mock(read=Mock(side_effect=lambda: self.request_data)),
|
||||||
|
path='/_matrix/api/v2_alpha/register'
|
||||||
)
|
)
|
||||||
self.request.args = {}
|
self.request.args = {}
|
||||||
|
|
||||||
|
@ -131,4 +132,4 @@ class RegisterRestServletTestCase(unittest.TestCase):
|
||||||
})
|
})
|
||||||
self.registration_handler.register = Mock(return_value=("@user:id", "t"))
|
self.registration_handler.register = Mock(return_value=("@user:id", "t"))
|
||||||
d = self.servlet.on_POST(self.request)
|
d = self.servlet.on_POST(self.request)
|
||||||
return self.assertFailure(d, SynapseError)
|
return self.assertFailure(d, SynapseError)
|
||||||
|
|
Loading…
Reference in New Issue