Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Do not try to serialize raw aggregations dict. #11791

Merged
merged 7 commits into from
Jan 21, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions changelog.d/11612.bugfix
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Include the bundled aggregations in the `/sync` response, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675).
1 change: 0 additions & 1 deletion changelog.d/11612.misc

This file was deleted.

1 change: 1 addition & 0 deletions changelog.d/11791.bugfix
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Include the bundled aggregations in the `/sync` response, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675).
4 changes: 2 additions & 2 deletions synapse/events/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -402,7 +402,7 @@ def serialize_event(
if bundle_aggregations:
event_aggregations = bundle_aggregations.get(event.event_id)
if event_aggregations:
self._injected_bundled_aggregations(
self._inject_bundled_aggregations(
event,
time_now,
bundle_aggregations[event.event_id],
Expand All @@ -411,7 +411,7 @@ def serialize_event(

return serialized_event

def _injected_bundled_aggregations(
def _inject_bundled_aggregations(
self,
event: EventBase,
time_now: int,
Expand Down
13 changes: 4 additions & 9 deletions synapse/rest/admin/rooms.py
Original file line number Diff line number Diff line change
Expand Up @@ -744,20 +744,15 @@ async def on_GET(
)

time_now = self.clock.time_msec()
aggregations = results.pop("aggregations", None)
results["events_before"] = self._event_serializer.serialize_events(
results["events_before"],
time_now,
bundle_aggregations=results["aggregations"],
results["events_before"], time_now, bundle_aggregations=aggregations
)
results["event"] = self._event_serializer.serialize_event(
results["event"],
time_now,
bundle_aggregations=results["aggregations"],
results["event"], time_now, bundle_aggregations=aggregations
)
results["events_after"] = self._event_serializer.serialize_events(
results["events_after"],
time_now,
bundle_aggregations=results["aggregations"],
results["events_after"], time_now, bundle_aggregations=aggregations
)
results["state"] = self._event_serializer.serialize_events(
results["state"], time_now
Expand Down
11 changes: 4 additions & 7 deletions synapse/rest/client/room.py
Original file line number Diff line number Diff line change
Expand Up @@ -714,18 +714,15 @@ async def on_GET(
raise SynapseError(404, "Event not found.", errcode=Codes.NOT_FOUND)

time_now = self.clock.time_msec()
aggregations = results.pop("aggregations", None)
results["events_before"] = self._event_serializer.serialize_events(
results["events_before"],
time_now,
bundle_aggregations=results["aggregations"],
results["events_before"], time_now, bundle_aggregations=aggregations
)
results["event"] = self._event_serializer.serialize_event(
results["event"], time_now, bundle_aggregations=results["aggregations"]
results["event"], time_now, bundle_aggregations=aggregations
)
results["events_after"] = self._event_serializer.serialize_events(
results["events_after"],
time_now,
bundle_aggregations=results["aggregations"],
results["events_after"], time_now, bundle_aggregations=aggregations
)
results["state"] = self._event_serializer.serialize_events(
results["state"], time_now
Expand Down
108 changes: 73 additions & 35 deletions tests/rest/client/test_relations.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
from synapse.api.constants import EventTypes, RelationTypes
from synapse.rest import admin
from synapse.rest.client import login, register, relations, room, sync
from synapse.types import JsonDict

from tests import unittest
from tests.server import FakeChannel
Expand Down Expand Up @@ -454,7 +455,14 @@ def test_aggregation_must_be_annotation(self):

@unittest.override_config({"experimental_features": {"msc3440_enabled": True}})
def test_bundled_aggregations(self):
"""Test that annotations, references, and threads get correctly bundled."""
"""
Test that annotations, references, and threads get correctly bundled.

Note that this doesn't test against /relations since only thread relations
get bundled via that API. See test_aggregation_get_event_for_thread.

See test_edit for a similar test for edits.
"""
# Setup by sending a variety of relations.
channel = self._send_relation(RelationTypes.ANNOTATION, "m.reaction", "a")
self.assertEquals(200, channel.code, channel.json_body)
Expand Down Expand Up @@ -482,12 +490,13 @@ def test_bundled_aggregations(self):
self.assertEquals(200, channel.code, channel.json_body)
thread_2 = channel.json_body["event_id"]

def assert_bundle(actual):
def assert_bundle(event_json: JsonDict) -> None:
"""Assert the expected values of the bundled aggregations."""
relations_dict = event_json["unsigned"].get("m.relations")

# Ensure the fields are as expected.
self.assertCountEqual(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this assertion function always gets me with the misleading name 😵‍💫

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Definitely a confusing name. 🤷

actual.keys(),
relations_dict.keys(),
(
RelationTypes.ANNOTATION,
RelationTypes.REFERENCE,
Expand All @@ -503,20 +512,20 @@ def assert_bundle(actual):
{"type": "m.reaction", "key": "b", "count": 1},
]
},
actual[RelationTypes.ANNOTATION],
relations_dict[RelationTypes.ANNOTATION],
)

self.assertEquals(
{"chunk": [{"event_id": reply_1}, {"event_id": reply_2}]},
actual[RelationTypes.REFERENCE],
relations_dict[RelationTypes.REFERENCE],
)

self.assertEquals(
2,
actual[RelationTypes.THREAD].get("count"),
relations_dict[RelationTypes.THREAD].get("count"),
)
self.assertTrue(
actual[RelationTypes.THREAD].get("current_user_participated")
relations_dict[RelationTypes.THREAD].get("current_user_participated")
)
# The latest thread event has some fields that don't matter.
self.assert_dict(
Expand All @@ -533,28 +542,17 @@ def assert_bundle(actual):
"type": "m.room.test",
"user_id": self.user_id,
},
actual[RelationTypes.THREAD].get("latest_event"),
relations_dict[RelationTypes.THREAD].get("latest_event"),
)

def _find_and_assert_event(events):
"""
Find the parent event in a chunk of events and assert that it has the proper bundled aggregations.
"""
for event in events:
if event["event_id"] == self.parent_id:
break
else:
raise AssertionError(f"Event {self.parent_id} not found in chunk")
assert_bundle(event["unsigned"].get("m.relations"))

# Request the event directly.
channel = self.make_request(
"GET",
f"/rooms/{self.room}/event/{self.parent_id}",
access_token=self.user_token,
)
self.assertEquals(200, channel.code, channel.json_body)
assert_bundle(channel.json_body["unsigned"].get("m.relations"))
assert_bundle(channel.json_body)

# Request the room messages.
channel = self.make_request(
Expand All @@ -563,7 +561,7 @@ def _find_and_assert_event(events):
access_token=self.user_token,
)
self.assertEquals(200, channel.code, channel.json_body)
_find_and_assert_event(channel.json_body["chunk"])
assert_bundle(self._find_event_in_chunk(channel.json_body["chunk"]))

# Request the room context.
channel = self.make_request(
Expand All @@ -572,17 +570,14 @@ def _find_and_assert_event(events):
access_token=self.user_token,
)
self.assertEquals(200, channel.code, channel.json_body)
assert_bundle(channel.json_body["event"]["unsigned"].get("m.relations"))
assert_bundle(channel.json_body["event"])

# Request sync.
channel = self.make_request("GET", "/sync", access_token=self.user_token)
self.assertEquals(200, channel.code, channel.json_body)
room_timeline = channel.json_body["rooms"]["join"][self.room]["timeline"]
self.assertTrue(room_timeline["limited"])
_find_and_assert_event(room_timeline["events"])

# Note that /relations is tested separately in test_aggregation_get_event_for_thread
# since it needs different data configured.
self._find_event_in_chunk(room_timeline["events"])

def test_aggregation_get_event_for_annotation(self):
"""Test that annotations do not get bundled aggregations included
Expand Down Expand Up @@ -777,25 +772,58 @@ def test_edit(self):

edit_event_id = channel.json_body["event_id"]

def assert_bundle(event_json: JsonDict) -> None:
"""Assert the expected values of the bundled aggregations."""
relations_dict = event_json["unsigned"].get("m.relations")
self.assertIn(RelationTypes.REPLACE, relations_dict)

m_replace_dict = relations_dict[RelationTypes.REPLACE]
for key in ["event_id", "sender", "origin_server_ts"]:
self.assertIn(key, m_replace_dict)

self.assert_dict(
{"event_id": edit_event_id, "sender": self.user_id}, m_replace_dict
)

channel = self.make_request(
"GET",
"/rooms/%s/event/%s" % (self.room, self.parent_id),
f"/rooms/{self.room}/event/{self.parent_id}",
access_token=self.user_token,
)
self.assertEquals(200, channel.code, channel.json_body)

self.assertEquals(channel.json_body["content"], new_body)
assert_bundle(channel.json_body)

relations_dict = channel.json_body["unsigned"].get("m.relations")
self.assertIn(RelationTypes.REPLACE, relations_dict)
# Request the room messages.
channel = self.make_request(
"GET",
f"/rooms/{self.room}/messages?dir=b",
access_token=self.user_token,
)
self.assertEquals(200, channel.code, channel.json_body)
assert_bundle(self._find_event_in_chunk(channel.json_body["chunk"]))

m_replace_dict = relations_dict[RelationTypes.REPLACE]
for key in ["event_id", "sender", "origin_server_ts"]:
self.assertIn(key, m_replace_dict)
# Request the room context.
channel = self.make_request(
"GET",
f"/rooms/{self.room}/context/{self.parent_id}",
access_token=self.user_token,
)
self.assertEquals(200, channel.code, channel.json_body)
assert_bundle(channel.json_body["event"])

self.assert_dict(
{"event_id": edit_event_id, "sender": self.user_id}, m_replace_dict
# Request sync, but limit the timeline so it becomes limited (and includes
# bundled aggregations).
filter = urllib.parse.quote_plus(
'{"room": {"timeline": {"limit": 2}}}'.encode()
)
channel = self.make_request(
"GET", f"/sync?filter={filter}", access_token=self.user_token
)
self.assertEquals(200, channel.code, channel.json_body)
room_timeline = channel.json_body["rooms"]["join"][self.room]["timeline"]
self.assertTrue(room_timeline["limited"])
assert_bundle(self._find_event_in_chunk(room_timeline["events"]))

def test_multi_edit(self):
"""Test that multiple edits, including attempts by people who
Expand Down Expand Up @@ -1102,6 +1130,16 @@ def test_unknown_relations(self):
self.assertEquals(200, channel.code, channel.json_body)
self.assertEquals(channel.json_body["chunk"], [])

def _find_event_in_chunk(self, events: List[JsonDict]) -> JsonDict:
"""
Find the parent event in a chunk of events and assert that it has the proper bundled aggregations.
"""
for event in events:
if event["event_id"] == self.parent_id:
return event

raise AssertionError(f"Event {self.parent_id} not found in chunk")

def _send_relation(
self,
relation_type: str,
Expand Down