Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Commit

Permalink
Some more debug logging
Browse files Browse the repository at this point in the history
  • Loading branch information
MadLittleMods committed Oct 21, 2021
1 parent 886071b commit 477c15d
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 5 deletions.
2 changes: 2 additions & 0 deletions synapse/federation/federation_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,8 @@ async def on_backfill_request(
origin, room_id, versions, limit
)

logger.info("on_backfill_request pdus(%d)=%s", len(pdus), pdus)

res = self._transaction_dict_from_pdus(pdus)

return 200, res
Expand Down
3 changes: 2 additions & 1 deletion synapse/handlers/federation.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,11 +250,12 @@ async def _maybe_backfill_inner(
]

logger.info(
"room_id: %s, backfill: current_depth: %s, limit: %s, max_depth: %s, extrems: %s filtered_sorted_extremeties_tuple: %s",
"room_id: %s, backfill: current_depth: %s, limit: %s, max_depth: %s, extrems (%d): %s filtered_sorted_extremeties_tuple: %s",
room_id,
current_depth,
limit,
max_depth,
len(sorted_extremeties_tuple),
sorted_extremeties_tuple,
filtered_sorted_extremeties_tuple,
)
Expand Down
8 changes: 4 additions & 4 deletions synapse/handlers/room_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ async def persist_state_events_at_start(

# Make the state events float off on their own so we don't have a
# bunch of `@mxid joined the room` noise between each batch
prev_event_id_for_state_chain = generate_fake_event_id()
prev_event_ids_for_state_chain = [generate_fake_event_id()]

for state_event in state_events_at_start:
assert_params_in_dict(
Expand Down Expand Up @@ -221,7 +221,7 @@ async def persist_state_events_at_start(
action=membership,
content=event_dict["content"],
outlier=True,
prev_event_ids=[prev_event_id_for_state_chain],
prev_event_ids=prev_event_ids_for_state_chain,
# Make sure to use a copy of this list because we modify it
# later in the loop here. Otherwise it will be the same
# reference and also update in the event when we append later.
Expand All @@ -240,7 +240,7 @@ async def persist_state_events_at_start(
),
event_dict,
outlier=True,
prev_event_ids=[prev_event_id_for_state_chain],
prev_event_ids=prev_event_ids_for_state_chain,
# Make sure to use a copy of this list because we modify it
# later in the loop here. Otherwise it will be the same
# reference and also update in the event when we append later.
Expand All @@ -251,7 +251,7 @@ async def persist_state_events_at_start(
state_event_ids_at_start.append(event_id)
auth_event_ids.append(event_id)
# Connect all the state in a floating chain
prev_event_id_for_state_chain = event_id
prev_event_ids_for_state_chain = [event_id]

return state_event_ids_at_start

Expand Down
13 changes: 13 additions & 0 deletions synapse/storage/databases/main/event_federation.py
Original file line number Diff line number Diff line change
Expand Up @@ -1057,6 +1057,11 @@ def _get_backfill_events(self, txn, room_id, event_list, limit):
# we process the newest-in-time messages first going backwards in time.
queue = PriorityQueue()

logger.info(
"_get_backfill_events: seeding backfill with event_list(%d)=%s",
len(event_list),
event_list,
)
for event_id in event_list:
event_lookup_result = self.db_pool.simple_select_one_txn(
txn,
Expand All @@ -1070,6 +1075,14 @@ def _get_backfill_events(self, txn, room_id, event_list, limit):
allow_none=True,
)

logger.info(
"_get_backfill_events: seeding backfill with event_id=%s type=%s depth=%s stream_ordering=%s",
event_id,
event_lookup_result["type"],
event_lookup_result["depth"],
event_lookup_result["stream_ordering"],
)

if event_lookup_result["depth"]:
queue.put(
(
Expand Down

0 comments on commit 477c15d

Please sign in to comment.