Skip to content

Commit 48283b6

Browse files
authored
Merge branch 'develop' into config-allow-underscore-prefixed-registration
2 parents f94b083 + 3c18823 commit 48283b6

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

41 files changed

+729
-102
lines changed

CHANGES.md

+14
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,17 @@
1+
# Synapse 1.127.1 (2025-03-26)
2+
3+
## Security
4+
- Fix [CVE-2025-30355](https://www.cve.org/CVERecord?id=CVE-2025-30355) / [GHSA-v56r-hwv5-mxg6](https://github.com/element-hq/synapse/security/advisories/GHSA-v56r-hwv5-mxg6). **High severity vulnerability affecting federation. The vulnerability has been exploited in the wild.**
5+
6+
7+
8+
# Synapse 1.127.0 (2025-03-25)
9+
10+
No significant changes since 1.127.0rc1.
11+
12+
13+
14+
115
# Synapse 1.127.0rc1 (2025-03-18)
216

317
### Features

Cargo.lock

+2-2
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

changelog.d/18225.doc

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Fix how to obtain access token and change naming from riot to element

changelog.d/18271.docker

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Specify the architecture of installed packages via an APT config option, which is more reliable than appending package names with ":{arch}".

changelog.d/18272.docker

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Always specify base image debian versions with a build argument.

changelog.d/18273.docker

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Allow passing arguments to start_for_complement.sh (to be sent to configure_workers_and_start.py).

changelog.d/18276.doc

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Correct a small typo in the SSO mapping providers documentation.

changelog.d/18277.feature

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Hashes of media files are now tracked by Synapse. Media quarantines will now apply to all files with the same hash.

changelog.d/18283.doc

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Add docs for how to clear out the Poetry wheel cache.

changelog.d/18284.misc

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Add DB delta to remove the old state group deletion job.

debian/changelog

+13-1
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,21 @@
1-
matrix-synapse-py3 (1.127.0~rc1+nmu1) UNRELEASED; urgency=medium
1+
matrix-synapse-py3 (1.128.0~rc1+nmu1) UNRELEASED; urgency=medium
22

33
* Update Poetry to 2.1.1.
44

55
-- Synapse Packaging team <[email protected]> Wed, 19 Mar 2025 17:38:49 +0000
66

7+
matrix-synapse-py3 (1.127.1) stable; urgency=medium
8+
9+
* New Synapse release 1.127.1.
10+
11+
-- Synapse Packaging team <[email protected]> Wed, 26 Mar 2025 21:07:31 +0000
12+
13+
matrix-synapse-py3 (1.127.0) stable; urgency=medium
14+
15+
* New Synapse release 1.127.0.
16+
17+
-- Synapse Packaging team <[email protected]> Tue, 25 Mar 2025 12:04:15 +0000
18+
719
matrix-synapse-py3 (1.127.0~rc1) stable; urgency=medium
820

921
* New Synapse release 1.127.0rc1.

docker/Dockerfile

+1-1
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ RUN \
148148
for arch in arm64 amd64; do \
149149
mkdir -p /tmp/debs-${arch} && \
150150
cd /tmp/debs-${arch} && \
151-
apt-get download $(sed "s/$/:${arch}/" /tmp/pkg-list); \
151+
apt-get -o APT::Architecture="${arch}" download $(cat /tmp/pkg-list); \
152152
done
153153

154154
# Extract the debs for each architecture

docker/Dockerfile-workers

+3-2
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,13 @@
22

33
ARG SYNAPSE_VERSION=latest
44
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
5+
ARG DEBIAN_VERSION=bookworm
56

67
# first of all, we create a base image with an nginx which we can copy into the
78
# target image. For repeated rebuilds, this is much faster than apt installing
89
# each time.
910

10-
FROM docker.io/library/debian:bookworm-slim AS deps_base
11+
FROM docker.io/library/debian:${DEBIAN_VERSION}-slim AS deps_base
1112
RUN \
1213
--mount=type=cache,target=/var/cache/apt,sharing=locked \
1314
--mount=type=cache,target=/var/lib/apt,sharing=locked \
@@ -21,7 +22,7 @@ FROM docker.io/library/debian:bookworm-slim AS deps_base
2122
# which makes it much easier to copy (but we need to make sure we use an image
2223
# based on the same debian version as the synapse image, to make sure we get
2324
# the expected version of libc.
24-
FROM docker.io/library/redis:7-bookworm AS redis_base
25+
FROM docker.io/library/redis:7-${DEBIAN_VERSION} AS redis_base
2526

2627
# now build the final image, based on the the regular Synapse docker image
2728
FROM $FROM

docker/complement/Dockerfile

+5-2
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,9 @@
99
ARG SYNAPSE_VERSION=latest
1010
# This is an intermediate image, to be built locally (not pulled from a registry).
1111
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
12+
ARG DEBIAN_VERSION=bookworm
13+
14+
FROM docker.io/library/postgres:13-${DEBIAN_VERSION} AS postgres_base
1215

1316
FROM $FROM
1417
# First of all, we copy postgres server from the official postgres image,
@@ -20,8 +23,8 @@ FROM $FROM
2023
# the same debian version as Synapse's docker image (so the versions of the
2124
# shared libraries match).
2225
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
23-
COPY --from=docker.io/library/postgres:13-bookworm /usr/lib/postgresql /usr/lib/postgresql
24-
COPY --from=docker.io/library/postgres:13-bookworm /usr/share/postgresql /usr/share/postgresql
26+
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
27+
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
2528
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
2629
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
2730
ENV PGDATA=/var/lib/postgresql/data

docker/complement/conf/start_for_complement.sh

+3-3
Original file line numberDiff line numberDiff line change
@@ -5,12 +5,12 @@
55
set -e
66

77
echo "Complement Synapse launcher"
8-
echo " Args: $@"
8+
echo " Args: $*"
99
echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR"
1010

1111
function log {
1212
d=$(date +"%Y-%m-%d %H:%M:%S,%3N")
13-
echo "$d $@"
13+
echo "$d $*"
1414
}
1515

1616
# Set the server name of the homeserver
@@ -131,4 +131,4 @@ export SYNAPSE_TLS_KEY=/conf/server.tls.key
131131

132132
# Run the script that writes the necessary config files and starts supervisord, which in turn
133133
# starts everything else
134-
exec /configure_workers_and_start.py
134+
exec /configure_workers_and_start.py "$@"

docs/development/dependencies.md

+22
Original file line numberDiff line numberDiff line change
@@ -150,6 +150,28 @@ $ poetry shell
150150
$ poetry install --extras all
151151
```
152152

153+
If you want to go even further and remove the Poetry caches:
154+
155+
```shell
156+
# Find your Poetry cache directory
157+
# Docs: https://github.com/python-poetry/poetry/blob/main/docs/configuration.md#cache-directory
158+
$ poetry config cache-dir
159+
160+
# Remove packages from all cached repositories
161+
$ poetry cache clear --all .
162+
163+
# Go completely nuclear and clear out everything Poetry cache related
164+
# including the wheel artifacts which is not covered by the above command
165+
# (see https://github.com/python-poetry/poetry/issues/10304)
166+
#
167+
# This is necessary in order to rebuild or fetch new wheels. For example, if you update
168+
# the `icu` library in on your system, you will need to rebuild the PyICU Python package
169+
# in order to incorporate the correct dynamically linked library locations otherwise you
170+
# will run into errors like: `ImportError: libicui18n.so.75: cannot open shared object file: No such file or directory`
171+
$ rm -rf $(poetry config cache-dir)
172+
```
173+
174+
153175
## ...run a command in the `poetry` virtualenv?
154176

155177
Use `poetry run cmd args` when you need the python virtualenv context.

docs/sso_mapping_providers.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ As an example, a SSO service may return the email address
1010
to turn that into a displayname when creating a Matrix user for this individual.
1111
It may choose `John Smith`, or `Smith, John [Example.com]` or any number of
1212
variations. As each Synapse configuration may want something different, this is
13-
where SAML mapping providers come into play.
13+
where SSO mapping providers come into play.
1414

1515
SSO mapping providers are currently supported for OpenID and SAML SSO
1616
configurations. Please see the details below for how to implement your own.

docs/usage/administration/admin_faq.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ Using the following curl command:
160160
```console
161161
curl -H 'Authorization: Bearer <access-token>' -X DELETE https://matrix.org/_matrix/client/r0/directory/room/<room-alias>
162162
```
163-
`<access-token>` - can be obtained in riot by looking in the riot settings, down the bottom is:
163+
`<access-token>` - can be obtained in element by looking in All settings, clicking Help & About and down the bottom is:
164164
Access Token:\<click to reveal\>
165165

166166
`<room-alias>` - the room alias, eg. #my_room:matrix.org this possibly needs to be URL encoded also, for example %23my_room%3Amatrix.org

pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ module-name = "synapse.synapse_rust"
9797

9898
[tool.poetry]
9999
name = "matrix-synapse"
100-
version = "1.127.0rc1"
100+
version = "1.127.1"
101101
description = "Homeserver for the Matrix decentralised comms protocol"
102102
authors = ["Matrix.org Team and Contributors <[email protected]>"]
103103
license = "AGPL-3.0-or-later"

synapse/api/constants.py

+7-2
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,13 @@
2929
# the max size of a (canonical-json-encoded) event
3030
MAX_PDU_SIZE = 65536
3131

32-
# the "depth" field on events is limited to 2**63 - 1
33-
MAX_DEPTH = 2**63 - 1
32+
# Max/min size of ints in canonical JSON
33+
CANONICALJSON_MAX_INT = (2**53) - 1
34+
CANONICALJSON_MIN_INT = -CANONICALJSON_MAX_INT
35+
36+
# the "depth" field on events is limited to the same as what
37+
# canonicaljson accepts
38+
MAX_DEPTH = CANONICALJSON_MAX_INT
3439

3540
# the maximum length for a room alias is 255 characters
3641
MAX_ALIAS_LENGTH = 255

synapse/events/utils.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,8 @@
4040
from canonicaljson import encode_canonical_json
4141

4242
from synapse.api.constants import (
43+
CANONICALJSON_MAX_INT,
44+
CANONICALJSON_MIN_INT,
4345
MAX_PDU_SIZE,
4446
EventContentFields,
4547
EventTypes,
@@ -61,9 +63,6 @@
6163
# Find escaped characters, e.g. those with a \ in front of them.
6264
ESCAPE_SEQUENCE_PATTERN = re.compile(r"\\(.)")
6365

64-
CANONICALJSON_MAX_INT = (2**53) - 1
65-
CANONICALJSON_MIN_INT = -CANONICALJSON_MAX_INT
66-
6766

6867
# Module API callback that allows adding fields to the unsigned section of
6968
# events that are sent to clients.

synapse/events/validator.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -86,9 +86,7 @@ def validate_new(self, event: EventBase, config: HomeServerConfig) -> None:
8686

8787
# Depending on the room version, ensure the data is spec compliant JSON.
8888
if event.room_version.strict_canonicaljson:
89-
# Note that only the client controlled portion of the event is
90-
# checked, since we trust the portions of the event we created.
91-
validate_canonicaljson(event.content)
89+
validate_canonicaljson(event.get_pdu_json())
9290

9391
if event.type == EventTypes.Aliases:
9492
if "aliases" in event.content:

synapse/federation/federation_base.py

+11-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
#
2121
#
2222
import logging
23-
from typing import TYPE_CHECKING, Awaitable, Callable, Optional
23+
from typing import TYPE_CHECKING, Awaitable, Callable, List, Optional, Sequence
2424

2525
from synapse.api.constants import MAX_DEPTH, EventContentFields, EventTypes, Membership
2626
from synapse.api.errors import Codes, SynapseError
@@ -29,6 +29,7 @@
2929
from synapse.crypto.keyring import Keyring
3030
from synapse.events import EventBase, make_event_from_dict
3131
from synapse.events.utils import prune_event, validate_canonicaljson
32+
from synapse.federation.units import filter_pdus_for_valid_depth
3233
from synapse.http.servlet import assert_params_in_dict
3334
from synapse.logging.opentracing import log_kv, trace
3435
from synapse.types import JsonDict, get_domain_from_id
@@ -267,6 +268,15 @@ def _is_invite_via_3pid(event: EventBase) -> bool:
267268
)
268269

269270

271+
def parse_events_from_pdu_json(
272+
pdus_json: Sequence[JsonDict], room_version: RoomVersion
273+
) -> List[EventBase]:
274+
return [
275+
event_from_pdu_json(pdu_json, room_version)
276+
for pdu_json in filter_pdus_for_valid_depth(pdus_json)
277+
]
278+
279+
270280
def event_from_pdu_json(pdu_json: JsonDict, room_version: RoomVersion) -> EventBase:
271281
"""Construct an EventBase from an event json received over federation
272282

synapse/federation/federation_client.py

+5-8
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,7 @@
6868
FederationBase,
6969
InvalidEventSignatureError,
7070
event_from_pdu_json,
71+
parse_events_from_pdu_json,
7172
)
7273
from synapse.federation.transport.client import SendJoinResponse
7374
from synapse.http.client import is_unknown_endpoint
@@ -349,7 +350,7 @@ async def backfill(
349350

350351
room_version = await self.store.get_room_version(room_id)
351352

352-
pdus = [event_from_pdu_json(p, room_version) for p in transaction_data_pdus]
353+
pdus = parse_events_from_pdu_json(transaction_data_pdus, room_version)
353354

354355
# Check signatures and hash of pdus, removing any from the list that fail checks
355356
pdus[:] = await self._check_sigs_and_hash_for_pulled_events_and_fetch(
@@ -393,9 +394,7 @@ async def get_pdu_from_destination_raw(
393394
transaction_data,
394395
)
395396

396-
pdu_list: List[EventBase] = [
397-
event_from_pdu_json(p, room_version) for p in transaction_data["pdus"]
398-
]
397+
pdu_list = parse_events_from_pdu_json(transaction_data["pdus"], room_version)
399398

400399
if pdu_list and pdu_list[0]:
401400
pdu = pdu_list[0]
@@ -809,7 +808,7 @@ async def get_event_auth(
809808

810809
room_version = await self.store.get_room_version(room_id)
811810

812-
auth_chain = [event_from_pdu_json(p, room_version) for p in res["auth_chain"]]
811+
auth_chain = parse_events_from_pdu_json(res["auth_chain"], room_version)
813812

814813
signed_auth = await self._check_sigs_and_hash_for_pulled_events_and_fetch(
815814
destination, auth_chain, room_version=room_version
@@ -1529,9 +1528,7 @@ async def get_missing_events(
15291528

15301529
room_version = await self.store.get_room_version(room_id)
15311530

1532-
events = [
1533-
event_from_pdu_json(e, room_version) for e in content.get("events", [])
1534-
]
1531+
events = parse_events_from_pdu_json(content.get("events", []), room_version)
15351532

15361533
signed_events = await self._check_sigs_and_hash_for_pulled_events_and_fetch(
15371534
destination, events, room_version=room_version

synapse/federation/federation_server.py

+13-8
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@
6666
event_from_pdu_json,
6767
)
6868
from synapse.federation.persistence import TransactionActions
69-
from synapse.federation.units import Edu, Transaction
69+
from synapse.federation.units import Edu, Transaction, serialize_and_filter_pdus
7070
from synapse.handlers.worker_lock import NEW_EVENT_DURING_PURGE_LOCK_NAME
7171
from synapse.http.servlet import assert_params_in_dict
7272
from synapse.logging.context import (
@@ -469,7 +469,12 @@ async def _handle_pdus_in_txn(
469469
logger.info("Ignoring PDU: %s", e)
470470
continue
471471

472-
event = event_from_pdu_json(p, room_version)
472+
try:
473+
event = event_from_pdu_json(p, room_version)
474+
except SynapseError as e:
475+
logger.info("Ignoring PDU for failing to deserialize: %s", e)
476+
continue
477+
473478
pdus_by_room.setdefault(room_id, []).append(event)
474479

475480
if event.origin_server_ts > newest_pdu_ts:
@@ -636,8 +641,8 @@ async def _on_context_state_request_compute(
636641
)
637642

638643
return {
639-
"pdus": [pdu.get_pdu_json() for pdu in pdus],
640-
"auth_chain": [pdu.get_pdu_json() for pdu in auth_chain],
644+
"pdus": serialize_and_filter_pdus(pdus),
645+
"auth_chain": serialize_and_filter_pdus(auth_chain),
641646
}
642647

643648
async def on_pdu_request(
@@ -761,8 +766,8 @@ async def on_send_join_request(
761766
event_json = event.get_pdu_json(time_now)
762767
resp = {
763768
"event": event_json,
764-
"state": [p.get_pdu_json(time_now) for p in state_events],
765-
"auth_chain": [p.get_pdu_json(time_now) for p in auth_chain_events],
769+
"state": serialize_and_filter_pdus(state_events, time_now),
770+
"auth_chain": serialize_and_filter_pdus(auth_chain_events, time_now),
766771
"members_omitted": caller_supports_partial_state,
767772
}
768773

@@ -1005,7 +1010,7 @@ async def on_event_auth(
10051010

10061011
time_now = self._clock.time_msec()
10071012
auth_pdus = await self.handler.on_event_auth(event_id)
1008-
res = {"auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus]}
1013+
res = {"auth_chain": serialize_and_filter_pdus(auth_pdus, time_now)}
10091014
return 200, res
10101015

10111016
async def on_query_client_keys(
@@ -1090,7 +1095,7 @@ async def on_get_missing_events(
10901095

10911096
time_now = self._clock.time_msec()
10921097

1093-
return {"events": [ev.get_pdu_json(time_now) for ev in missing_events]}
1098+
return {"events": serialize_and_filter_pdus(missing_events, time_now)}
10941099

10951100
async def on_openid_userinfo(self, token: str) -> Optional[str]:
10961101
ts_now_ms = self._clock.time_msec()

0 commit comments

Comments
 (0)