Skip to content
This repository was archived by the owner on Apr 26, 2024. It is now read-only.

Commit 1e9ec2a

Browse files
committed
Merge commit '3c01724b3' into anoa/dinsic_release_1_21_x
* commit '3c01724b3': Fix the return type of send_nonmember_events. (#8112) Remove : from allowed client_secret chars (#8101) Rename changelog from bugfix to misc. Iteratively encode JSON responses to avoid blocking the reactor. (#8013) Return the previous stream token if a non-member event is a duplicate. (#8093)
2 parents bbc77d3 + 3c01724 commit 1e9ec2a

File tree

13 files changed

+142
-33
lines changed

13 files changed

+142
-33
lines changed

CHANGES.md

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,17 @@
1+
For the next release
2+
====================
3+
4+
Removal warning
5+
---------------
6+
7+
Some older clients used a
8+
[disallowed character](https://matrix.org/docs/spec/client_server/r0.6.1#post-matrix-client-r0-register-email-requesttoken)
9+
(`:`) in the `client_secret` parameter of various endpoints. The incorrect
10+
behaviour was allowed for backwards compatibility, but is now being removed
11+
from Synapse as most users have updated their client. Further context can be
12+
found at [\#6766](https://github.com/matrix-org/synapse/issues/6766).
13+
14+
115
Synapse 1.19.0 (2020-08-17)
216
===========================
317

changelog.d/8013.feature

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Iteratively encode JSON to avoid blocking the reactor.

changelog.d/8093.misc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Return the previous stream token if a non-member event is a duplicate.

changelog.d/8101.bugfix

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Synapse now correctly enforces the valid characters in the `client_secret` parameter used in various endpoints.

changelog.d/8112.misc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Return the previous stream token if a non-member event is a duplicate.

synapse/handlers/message.py

Lines changed: 15 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -670,42 +670,47 @@ async def send_nonmember_event(
670670
assert self.hs.is_mine(user), "User must be our own: %s" % (user,)
671671

672672
if event.is_state():
673-
prev_state = await self.deduplicate_state_event(event, context)
674-
if prev_state is not None:
673+
prev_event = await self.deduplicate_state_event(event, context)
674+
if prev_event is not None:
675675
logger.info(
676676
"Not bothering to persist state event %s duplicated by %s",
677677
event.event_id,
678-
prev_state.event_id,
678+
prev_event.event_id,
679679
)
680-
return prev_state
680+
return await self.store.get_stream_id_for_event(prev_event.event_id)
681681

682682
return await self.handle_new_client_event(
683683
requester=requester, event=event, context=context, ratelimit=ratelimit
684684
)
685685

686686
async def deduplicate_state_event(
687687
self, event: EventBase, context: EventContext
688-
) -> None:
688+
) -> Optional[EventBase]:
689689
"""
690690
Checks whether event is in the latest resolved state in context.
691691
692-
If so, returns the version of the event in context.
693-
Otherwise, returns None.
692+
Args:
693+
event: The event to check for duplication.
694+
context: The event context.
695+
696+
Returns:
697+
The previous verion of the event is returned, if it is found in the
698+
event context. Otherwise, None is returned.
694699
"""
695700
prev_state_ids = await context.get_prev_state_ids()
696701
prev_event_id = prev_state_ids.get((event.type, event.state_key))
697702
if not prev_event_id:
698-
return
703+
return None
699704
prev_event = await self.store.get_event(prev_event_id, allow_none=True)
700705
if not prev_event:
701-
return
706+
return None
702707

703708
if prev_event and event.user_id == prev_event.user_id:
704709
prev_content = encode_canonical_json(prev_event.content)
705710
next_content = encode_canonical_json(event.content)
706711
if prev_content == next_content:
707712
return prev_event
708-
return
713+
return None
709714

710715
async def create_and_send_nonmember_event(
711716
self,

synapse/http/server.py

Lines changed: 89 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -22,12 +22,13 @@
2222
import urllib
2323
from http import HTTPStatus
2424
from io import BytesIO
25-
from typing import Any, Callable, Dict, Tuple, Union
25+
from typing import Any, Callable, Dict, Iterator, List, Tuple, Union
2626

2727
import jinja2
28-
from canonicaljson import encode_canonical_json, encode_pretty_printed_json
28+
from canonicaljson import iterencode_canonical_json, iterencode_pretty_printed_json
29+
from zope.interface import implementer
2930

30-
from twisted.internet import defer
31+
from twisted.internet import defer, interfaces
3132
from twisted.python import failure
3233
from twisted.web import resource
3334
from twisted.web.server import NOT_DONE_YET, Request
@@ -499,6 +500,78 @@ class RootOptionsRedirectResource(OptionsResource, RootRedirect):
499500
pass
500501

501502

503+
@implementer(interfaces.IPullProducer)
504+
class _ByteProducer:
505+
"""
506+
Iteratively write bytes to the request.
507+
"""
508+
509+
# The minimum number of bytes for each chunk. Note that the last chunk will
510+
# usually be smaller than this.
511+
min_chunk_size = 1024
512+
513+
def __init__(
514+
self, request: Request, iterator: Iterator[bytes],
515+
):
516+
self._request = request
517+
self._iterator = iterator
518+
519+
def start(self) -> None:
520+
self._request.registerProducer(self, False)
521+
522+
def _send_data(self, data: List[bytes]) -> None:
523+
"""
524+
Send a list of strings as a response to the request.
525+
"""
526+
if not data:
527+
return
528+
self._request.write(b"".join(data))
529+
530+
def resumeProducing(self) -> None:
531+
# We've stopped producing in the meantime (note that this might be
532+
# re-entrant after calling write).
533+
if not self._request:
534+
return
535+
536+
# Get the next chunk and write it to the request.
537+
#
538+
# The output of the JSON encoder is coalesced until min_chunk_size is
539+
# reached. (This is because JSON encoders produce a very small output
540+
# per iteration.)
541+
#
542+
# Note that buffer stores a list of bytes (instead of appending to
543+
# bytes) to hopefully avoid many allocations.
544+
buffer = []
545+
buffered_bytes = 0
546+
while buffered_bytes < self.min_chunk_size:
547+
try:
548+
data = next(self._iterator)
549+
buffer.append(data)
550+
buffered_bytes += len(data)
551+
except StopIteration:
552+
# The entire JSON object has been serialized, write any
553+
# remaining data, finalize the producer and the request, and
554+
# clean-up any references.
555+
self._send_data(buffer)
556+
self._request.unregisterProducer()
557+
self._request.finish()
558+
self.stopProducing()
559+
return
560+
561+
self._send_data(buffer)
562+
563+
def stopProducing(self) -> None:
564+
self._request = None
565+
566+
567+
def _encode_json_bytes(json_object: Any) -> Iterator[bytes]:
568+
"""
569+
Encode an object into JSON. Returns an iterator of bytes.
570+
"""
571+
for chunk in json_encoder.iterencode(json_object):
572+
yield chunk.encode("utf-8")
573+
574+
502575
def respond_with_json(
503576
request: Request,
504577
code: int,
@@ -533,15 +606,23 @@ def respond_with_json(
533606
return None
534607

535608
if pretty_print:
536-
json_bytes = encode_pretty_printed_json(json_object) + b"\n"
609+
encoder = iterencode_pretty_printed_json
537610
else:
538611
if canonical_json or synapse.events.USE_FROZEN_DICTS:
539-
# canonicaljson already encodes to bytes
540-
json_bytes = encode_canonical_json(json_object)
612+
encoder = iterencode_canonical_json
541613
else:
542-
json_bytes = json_encoder.encode(json_object).encode("utf-8")
614+
encoder = _encode_json_bytes
615+
616+
request.setResponseCode(code)
617+
request.setHeader(b"Content-Type", b"application/json")
618+
request.setHeader(b"Cache-Control", b"no-cache, no-store, must-revalidate")
543619

544-
return respond_with_json_bytes(request, code, json_bytes, send_cors=send_cors)
620+
if send_cors:
621+
set_cors_headers(request)
622+
623+
producer = _ByteProducer(request, encoder(json_object))
624+
producer.start()
625+
return NOT_DONE_YET
545626

546627

547628
def respond_with_json_bytes(

synapse/python_dependencies.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@
4343
"jsonschema>=2.5.1",
4444
"frozendict>=1",
4545
"unpaddedbase64>=1.1.0",
46-
"canonicaljson>=1.2.0",
46+
"canonicaljson>=1.3.0",
4747
# we use the type definitions added in signedjson 1.1.
4848
"signedjson>=1.1.0",
4949
"pynacl>=1.2.1",

synapse/rest/key/v2/remote_key_resource.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,12 +15,12 @@
1515
import logging
1616
from typing import Dict, Set
1717

18-
from canonicaljson import encode_canonical_json, json
18+
from canonicaljson import json
1919
from signedjson.sign import sign_json
2020

2121
from synapse.api.errors import Codes, SynapseError
2222
from synapse.crypto.keyring import ServerKeyFetcher
23-
from synapse.http.server import DirectServeJsonResource, respond_with_json_bytes
23+
from synapse.http.server import DirectServeJsonResource, respond_with_json
2424
from synapse.http.servlet import parse_integer, parse_json_object_from_request
2525

2626
logger = logging.getLogger(__name__)
@@ -223,4 +223,4 @@ async def query_keys(self, request, query, query_remote_on_cache_miss=False):
223223

224224
results = {"server_keys": signed_keys}
225225

226-
respond_with_json_bytes(request, 200, encode_canonical_json(results))
226+
respond_with_json(request, 200, results, canonical_json=True)

synapse/storage/databases/main/stream.py

Lines changed: 15 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -582,6 +582,19 @@ async def get_room_events_max_id(self, room_id: Optional[str] = None) -> str:
582582
)
583583
return "t%d-%d" % (topo, token)
584584

585+
async def get_stream_id_for_event(self, event_id: str) -> int:
586+
"""The stream ID for an event
587+
Args:
588+
event_id: The id of the event to look up a stream token for.
589+
Raises:
590+
StoreError if the event wasn't in the database.
591+
Returns:
592+
A stream ID.
593+
"""
594+
return await self.db_pool.simple_select_one_onecol(
595+
table="events", keyvalues={"event_id": event_id}, retcol="stream_ordering"
596+
)
597+
585598
async def get_stream_token_for_event(self, event_id: str) -> str:
586599
"""The stream token for an event
587600
Args:
@@ -591,10 +604,8 @@ async def get_stream_token_for_event(self, event_id: str) -> str:
591604
Returns:
592605
A "s%d" stream token.
593606
"""
594-
row = await self.db_pool.simple_select_one_onecol(
595-
table="events", keyvalues={"event_id": event_id}, retcol="stream_ordering"
596-
)
597-
return "s%d" % (row,)
607+
stream_id = await self.get_stream_id_for_event(event_id)
608+
return "s%d" % (stream_id,)
598609

599610
async def get_topological_token_for_event(self, event_id: str) -> str:
600611
"""The stream token for an event

0 commit comments

Comments
 (0)