-
-
Notifications
You must be signed in to change notification settings - Fork 2.1k
Add logging on startup/shutdown #8448
Changes from all commits
9e5ee74
b39d19e
b3a43f5
13530a5
d412e90
16f3562
e73b61a
99fd05a
fa89423
e232210
7743ddd
1c2cf40
285b96f
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
Add SQL logging on queries that happen during startup. |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -32,6 +32,7 @@ | |
overload, | ||
) | ||
|
||
import attr | ||
from prometheus_client import Histogram | ||
from typing_extensions import Literal | ||
|
||
|
@@ -90,13 +91,17 @@ def make_pool( | |
return adbapi.ConnectionPool( | ||
db_config.config["name"], | ||
cp_reactor=reactor, | ||
cp_openfun=engine.on_new_connection, | ||
cp_openfun=lambda conn: engine.on_new_connection( | ||
LoggingDatabaseConnection(conn, engine, "on_new_connection") | ||
), | ||
**db_config.config.get("args", {}) | ||
) | ||
|
||
|
||
def make_conn( | ||
db_config: DatabaseConnectionConfig, engine: BaseDatabaseEngine | ||
db_config: DatabaseConnectionConfig, | ||
engine: BaseDatabaseEngine, | ||
default_txn_name: str, | ||
) -> Connection: | ||
"""Make a new connection to the database and return it. | ||
|
||
|
@@ -109,11 +114,60 @@ def make_conn( | |
for k, v in db_config.config.get("args", {}).items() | ||
if not k.startswith("cp_") | ||
} | ||
db_conn = engine.module.connect(**db_params) | ||
native_db_conn = engine.module.connect(**db_params) | ||
db_conn = LoggingDatabaseConnection(native_db_conn, engine, default_txn_name) | ||
|
||
engine.on_new_connection(db_conn) | ||
return db_conn | ||
|
||
|
||
@attr.s(slots=True) | ||
class LoggingDatabaseConnection: | ||
"""A wrapper around a database connection that returns `LoggingTransaction` | ||
as its cursor class. | ||
|
||
This is mainly used on startup to ensure that queries get logged correctly | ||
""" | ||
|
||
conn = attr.ib(type=Connection) | ||
engine = attr.ib(type=BaseDatabaseEngine) | ||
default_txn_name = attr.ib(type=str) | ||
|
||
def cursor( | ||
self, *, txn_name=None, after_callbacks=None, exception_callbacks=None | ||
) -> "LoggingTransaction": | ||
if not txn_name: | ||
txn_name = self.default_txn_name | ||
|
||
return LoggingTransaction( | ||
self.conn.cursor(), | ||
name=txn_name, | ||
database_engine=self.engine, | ||
after_callbacks=after_callbacks, | ||
exception_callbacks=exception_callbacks, | ||
) | ||
|
||
def close(self) -> None: | ||
self.conn.close() | ||
|
||
def commit(self) -> None: | ||
self.conn.commit() | ||
|
||
def rollback(self, *args, **kwargs) -> None: | ||
self.conn.rollback(*args, **kwargs) | ||
|
||
def __enter__(self) -> "Connection": | ||
self.conn.__enter__() | ||
return self | ||
|
||
def __exit__(self, exc_type, exc_value, traceback) -> bool: | ||
return self.conn.__exit__(exc_type, exc_value, traceback) | ||
|
||
# Proxy through any unknown lookups to the DB conn class. | ||
def __getattr__(self, name): | ||
return getattr(self.conn, name) | ||
|
||
|
||
# The type of entry which goes on our after_callbacks and exception_callbacks lists. | ||
# | ||
# Python 3.5.2 doesn't support Callable with an ellipsis, so we wrap it in quotes so | ||
|
@@ -247,6 +301,12 @@ def _do_execute(self, func, sql: str, *args: Any) -> None: | |
def close(self) -> None: | ||
self.txn.close() | ||
|
||
def __enter__(self) -> "LoggingTransaction": | ||
return self | ||
|
||
def __exit__(self, exc_type, exc_value, traceback): | ||
self.close() | ||
|
||
|
||
class PerformanceCounters: | ||
def __init__(self): | ||
|
@@ -395,7 +455,7 @@ def loop(): | |
|
||
def new_transaction( | ||
self, | ||
conn: Connection, | ||
conn: LoggingDatabaseConnection, | ||
desc: str, | ||
after_callbacks: List[_CallbackListEntry], | ||
exception_callbacks: List[_CallbackListEntry], | ||
|
@@ -418,12 +478,10 @@ def new_transaction( | |
i = 0 | ||
N = 5 | ||
while True: | ||
cursor = LoggingTransaction( | ||
conn.cursor(), | ||
name, | ||
self.engine, | ||
after_callbacks, | ||
exception_callbacks, | ||
cursor = conn.cursor( | ||
txn_name=name, | ||
after_callbacks=after_callbacks, | ||
exception_callbacks=exception_callbacks, | ||
) | ||
try: | ||
r = func(cursor, *args, **kwargs) | ||
|
@@ -584,7 +642,10 @@ def inner_func(conn, *args, **kwargs): | |
logger.debug("Reconnecting closed database connection") | ||
conn.reconnect() | ||
|
||
return func(conn, *args, **kwargs) | ||
db_conn = LoggingDatabaseConnection( | ||
conn, self.engine, "runWithConnection" | ||
) | ||
return func(db_conn, *args, **kwargs) | ||
Comment on lines
+645
to
+648
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Is this ensuring another location uses There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This just ensures that whenever we have use a connection after a call to |
||
|
||
return await make_deferred_yieldable( | ||
self._db_pool.runWithConnection(inner_func, *args, **kwargs) | ||
|
@@ -1621,7 +1682,7 @@ def simple_delete_many_txn( | |
|
||
def get_cache_dict( | ||
self, | ||
db_conn: Connection, | ||
db_conn: LoggingDatabaseConnection, | ||
table: str, | ||
entity_column: str, | ||
stream_column: str, | ||
|
@@ -1642,9 +1703,7 @@ def get_cache_dict( | |
"limit": limit, | ||
} | ||
|
||
sql = self.engine.convert_param_style(sql) | ||
|
||
txn = db_conn.cursor() | ||
txn = db_conn.cursor(txn_name="get_cache_dict") | ||
txn.execute(sql, (int(max_value),)) | ||
|
||
cache = {row[0]: int(row[1]) for row in txn} | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,6 +1,8 @@ | ||
import logging | ||
from io import StringIO | ||
|
||
from synapse.storage.engines import PostgresEngine | ||
from synapse.storage.prepare_database import execute_statements_from_stream | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
@@ -46,7 +48,4 @@ def run_create(cur, database_engine, *args, **kwargs): | |
select_clause, | ||
) | ||
|
||
if isinstance(database_engine, PostgresEngine): | ||
cur.execute(sql) | ||
else: | ||
cur.executescript(sql) | ||
execute_statements_from_stream(cur, StringIO(sql)) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Why this change? It seems potentially inefficient? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is a delta so it doesn't really matter if its inefficient. We do this because we don't expose an |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Did we never previously used
LoggingTransaction
as a context manager even thoughTransaction
can be used that way?There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Exactly.