Skip to content
This repository was archived by the owner on Apr 26, 2024. It is now read-only.

Commit d7fd0de

Browse files
committed
Merge commit '112266eaf' into anoa/dinsic_release_1_21_x
* commit '112266eaf': Add StreamStore to mypy (#8232) Re-implement unread counts (again) (#8059)
2 parents 56722c6 + 112266e commit d7fd0de

File tree

16 files changed

+523
-142
lines changed

16 files changed

+523
-142
lines changed

changelog.d/8059.feature

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Add unread messages count to sync responses, as specified in [MSC2654](https://github.com/matrix-org/matrix-doc/pull/2654).

changelog.d/8232.misc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Add type hints to `StreamStore`.

mypy.ini

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@ files =
4343
synapse/server_notices,
4444
synapse/spam_checker_api,
4545
synapse/state,
46+
synapse/storage/databases/main/stream.py,
4647
synapse/storage/databases/main/ui_auth.py,
4748
synapse/storage/database.py,
4849
synapse/storage/engines,

synapse/events/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
import abc
1919
import os
2020
from distutils.util import strtobool
21-
from typing import Dict, Optional, Type
21+
from typing import Dict, Optional, Tuple, Type
2222

2323
from unpaddedbase64 import encode_base64
2424

@@ -120,7 +120,7 @@ def __init__(self, internal_metadata_dict: JsonDict):
120120
# be here
121121
before = DictProperty("before") # type: str
122122
after = DictProperty("after") # type: str
123-
order = DictProperty("order") # type: int
123+
order = DictProperty("order") # type: Tuple[int, int]
124124

125125
def get_dict(self) -> JsonDict:
126126
return dict(self._dict)

synapse/handlers/sync.py

Lines changed: 18 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,12 @@ def __nonzero__(self) -> bool:
9595
__bool__ = __nonzero__ # python3
9696

9797

98-
@attr.s(slots=True, frozen=True)
98+
# We can't freeze this class, because we need to update it after it's instantiated to
99+
# update its unread count. This is because we calculate the unread count for a room only
100+
# if there are updates for it, which we check after the instance has been created.
101+
# This should not be a big deal because we update the notification counts afterwards as
102+
# well anyway.
103+
@attr.s(slots=True)
99104
class JoinedSyncResult:
100105
room_id = attr.ib(type=str)
101106
timeline = attr.ib(type=TimelineBatch)
@@ -104,6 +109,7 @@ class JoinedSyncResult:
104109
account_data = attr.ib(type=List[JsonDict])
105110
unread_notifications = attr.ib(type=JsonDict)
106111
summary = attr.ib(type=Optional[JsonDict])
112+
unread_count = attr.ib(type=int)
107113

108114
def __nonzero__(self) -> bool:
109115
"""Make the result appear empty if there are no updates. This is used
@@ -931,23 +937,18 @@ async def compute_state_delta(
931937

932938
async def unread_notifs_for_room_id(
933939
self, room_id: str, sync_config: SyncConfig
934-
) -> Optional[Dict[str, str]]:
940+
) -> Dict[str, int]:
935941
with Measure(self.clock, "unread_notifs_for_room_id"):
936942
last_unread_event_id = await self.store.get_last_receipt_event_id_for_user(
937943
user_id=sync_config.user.to_string(),
938944
room_id=room_id,
939945
receipt_type="m.read",
940946
)
941947

942-
if last_unread_event_id:
943-
notifs = await self.store.get_unread_event_push_actions_by_room_for_user(
944-
room_id, sync_config.user.to_string(), last_unread_event_id
945-
)
946-
return notifs
947-
948-
# There is no new information in this period, so your notification
949-
# count is whatever it was last time.
950-
return None
948+
notifs = await self.store.get_unread_event_push_actions_by_room_for_user(
949+
room_id, sync_config.user.to_string(), last_unread_event_id
950+
)
951+
return notifs
951952

952953
async def generate_sync_result(
953954
self,
@@ -1886,7 +1887,7 @@ async def _generate_room_entry(
18861887
)
18871888

18881889
if room_builder.rtype == "joined":
1889-
unread_notifications = {} # type: Dict[str, str]
1890+
unread_notifications = {} # type: Dict[str, int]
18901891
room_sync = JoinedSyncResult(
18911892
room_id=room_id,
18921893
timeline=batch,
@@ -1895,14 +1896,16 @@ async def _generate_room_entry(
18951896
account_data=account_data_events,
18961897
unread_notifications=unread_notifications,
18971898
summary=summary,
1899+
unread_count=0,
18981900
)
18991901

19001902
if room_sync or always_include:
19011903
notifs = await self.unread_notifs_for_room_id(room_id, sync_config)
19021904

1903-
if notifs is not None:
1904-
unread_notifications["notification_count"] = notifs["notify_count"]
1905-
unread_notifications["highlight_count"] = notifs["highlight_count"]
1905+
unread_notifications["notification_count"] = notifs["notify_count"]
1906+
unread_notifications["highlight_count"] = notifs["highlight_count"]
1907+
1908+
room_sync.unread_count = notifs["unread_count"]
19061909

19071910
sync_result_builder.joined.append(room_sync)
19081911

synapse/push/bulk_push_rule_evaluator.py

Lines changed: 64 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,10 @@
1919

2020
from prometheus_client import Counter
2121

22-
from synapse.api.constants import EventTypes, Membership
22+
from synapse.api.constants import EventTypes, Membership, RelationTypes
2323
from synapse.event_auth import get_user_power_level
24+
from synapse.events import EventBase
25+
from synapse.events.snapshot import EventContext
2426
from synapse.state import POWER_KEY
2527
from synapse.util.async_helpers import Linearizer
2628
from synapse.util.caches import register_cache
@@ -51,6 +53,48 @@
5153
)
5254

5355

56+
STATE_EVENT_TYPES_TO_MARK_UNREAD = {
57+
EventTypes.Topic,
58+
EventTypes.Name,
59+
EventTypes.RoomAvatar,
60+
EventTypes.Tombstone,
61+
}
62+
63+
64+
def _should_count_as_unread(event: EventBase, context: EventContext) -> bool:
65+
# Exclude rejected and soft-failed events.
66+
if context.rejected or event.internal_metadata.is_soft_failed():
67+
return False
68+
69+
# Exclude notices.
70+
if (
71+
not event.is_state()
72+
and event.type == EventTypes.Message
73+
and event.content.get("msgtype") == "m.notice"
74+
):
75+
return False
76+
77+
# Exclude edits.
78+
relates_to = event.content.get("m.relates_to", {})
79+
if relates_to.get("rel_type") == RelationTypes.REPLACE:
80+
return False
81+
82+
# Mark events that have a non-empty string body as unread.
83+
body = event.content.get("body")
84+
if isinstance(body, str) and body:
85+
return True
86+
87+
# Mark some state events as unread.
88+
if event.is_state() and event.type in STATE_EVENT_TYPES_TO_MARK_UNREAD:
89+
return True
90+
91+
# Mark encrypted events as unread.
92+
if not event.is_state() and event.type == EventTypes.Encrypted:
93+
return True
94+
95+
return False
96+
97+
5498
class BulkPushRuleEvaluator(object):
5599
"""Calculates the outcome of push rules for an event for all users in the
56100
room at once.
@@ -133,9 +177,12 @@ async def _get_power_levels_and_sender_level(self, event, context):
133177
return pl_event.content if pl_event else {}, sender_level
134178

135179
async def action_for_event_by_user(self, event, context) -> None:
136-
"""Given an event and context, evaluate the push rules and insert the
137-
results into the event_push_actions_staging table.
180+
"""Given an event and context, evaluate the push rules, check if the message
181+
should increment the unread count, and insert the results into the
182+
event_push_actions_staging table.
138183
"""
184+
count_as_unread = _should_count_as_unread(event, context)
185+
139186
rules_by_user = await self._get_rules_for_event(event, context)
140187
actions_by_user = {}
141188

@@ -172,6 +219,8 @@ async def action_for_event_by_user(self, event, context) -> None:
172219
if event.type == EventTypes.Member and event.state_key == uid:
173220
display_name = event.content.get("displayname", None)
174221

222+
actions_by_user[uid] = []
223+
175224
for rule in rules:
176225
if "enabled" in rule and not rule["enabled"]:
177226
continue
@@ -189,7 +238,9 @@ async def action_for_event_by_user(self, event, context) -> None:
189238
# Mark in the DB staging area the push actions for users who should be
190239
# notified for this event. (This will then get handled when we persist
191240
# the event)
192-
await self.store.add_push_actions_to_staging(event.event_id, actions_by_user)
241+
await self.store.add_push_actions_to_staging(
242+
event.event_id, actions_by_user, count_as_unread,
243+
)
193244

194245

195246
def _condition_checker(evaluator, conditions, uid, display_name, cache):
@@ -369,8 +420,8 @@ async def _update_rules_with_member_event_ids(
369420
Args:
370421
ret_rules_by_user (dict): Partiallly filled dict of push rules. Gets
371422
updated with any new rules.
372-
member_event_ids (list): List of event ids for membership events that
373-
have happened since the last time we filled rules_by_user
423+
member_event_ids (dict): Dict of user id to event id for membership events
424+
that have happened since the last time we filled rules_by_user
374425
state_group: The state group we are currently computing push rules
375426
for. Used when updating the cache.
376427
"""
@@ -390,34 +441,19 @@ async def _update_rules_with_member_event_ids(
390441
if logger.isEnabledFor(logging.DEBUG):
391442
logger.debug("Found members %r: %r", self.room_id, members.values())
392443

393-
interested_in_user_ids = {
444+
user_ids = {
394445
user_id
395446
for user_id, membership in members.values()
396447
if membership == Membership.JOIN
397448
}
398449

399-
logger.debug("Joined: %r", interested_in_user_ids)
400-
401-
if_users_with_pushers = await self.store.get_if_users_have_pushers(
402-
interested_in_user_ids, on_invalidate=self.invalidate_all_cb
403-
)
404-
405-
user_ids = {
406-
uid for uid, have_pusher in if_users_with_pushers.items() if have_pusher
407-
}
408-
409-
logger.debug("With pushers: %r", user_ids)
410-
411-
users_with_receipts = await self.store.get_users_with_read_receipts_in_room(
412-
self.room_id, on_invalidate=self.invalidate_all_cb
413-
)
414-
415-
logger.debug("With receipts: %r", users_with_receipts)
450+
logger.debug("Joined: %r", user_ids)
416451

417-
# any users with pushers must be ours: they have pushers
418-
for uid in users_with_receipts:
419-
if uid in interested_in_user_ids:
420-
user_ids.add(uid)
452+
# Previously we only considered users with pushers or read receipts in that
453+
# room. We can't do this anymore because we use push actions to calculate unread
454+
# counts, which don't rely on the user having pushers or sent a read receipt into
455+
# the room. Therefore we just need to filter for local users here.
456+
user_ids = list(filter(self.is_mine_id, user_ids))
421457

422458
rules_by_user = await self.store.bulk_get_push_rules(
423459
user_ids, on_invalidate=self.invalidate_all_cb

synapse/push/push_tools.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ async def get_badge_count(store, user_id):
3636
)
3737
# return one badge count per conversation, as count per
3838
# message is so noisy as to be almost useless
39-
badge += 1 if notifs["notify_count"] else 0
39+
badge += 1 if notifs["unread_count"] else 0
4040
return badge
4141

4242

synapse/rest/client/v2_alpha/sync.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -425,6 +425,7 @@ def serialize(events):
425425
result["ephemeral"] = {"events": ephemeral_events}
426426
result["unread_notifications"] = room.unread_notifications
427427
result["summary"] = room.summary
428+
result["org.matrix.msc2654.unread_count"] = room.unread_count
428429

429430
return result
430431

synapse/storage/database.py

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -604,6 +604,18 @@ def cursor_to_dict(cursor: Cursor) -> List[Dict[str, Any]]:
604604
results = [dict(zip(col_headers, row)) for row in cursor]
605605
return results
606606

607+
@overload
608+
async def execute(
609+
self, desc: str, decoder: Literal[None], query: str, *args: Any
610+
) -> List[Tuple[Any, ...]]:
611+
...
612+
613+
@overload
614+
async def execute(
615+
self, desc: str, decoder: Callable[[Cursor], R], query: str, *args: Any
616+
) -> R:
617+
...
618+
607619
async def execute(
608620
self,
609621
desc: str,
@@ -1088,6 +1100,28 @@ async def simple_select_one(
10881100
desc, self.simple_select_one_txn, table, keyvalues, retcols, allow_none
10891101
)
10901102

1103+
@overload
1104+
async def simple_select_one_onecol(
1105+
self,
1106+
table: str,
1107+
keyvalues: Dict[str, Any],
1108+
retcol: Iterable[str],
1109+
allow_none: Literal[False] = False,
1110+
desc: str = "simple_select_one_onecol",
1111+
) -> Any:
1112+
...
1113+
1114+
@overload
1115+
async def simple_select_one_onecol(
1116+
self,
1117+
table: str,
1118+
keyvalues: Dict[str, Any],
1119+
retcol: Iterable[str],
1120+
allow_none: Literal[True] = True,
1121+
desc: str = "simple_select_one_onecol",
1122+
) -> Optional[Any]:
1123+
...
1124+
10911125
async def simple_select_one_onecol(
10921126
self,
10931127
table: str,

0 commit comments

Comments
 (0)