Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Commit

Permalink
Add stricter mypy options (#15694)
Browse files Browse the repository at this point in the history
Enable warn_unused_configs, strict_concatenate, disallow_subclassing_any,
and disallow_incomplete_defs.
  • Loading branch information
clokep authored May 31, 2023
1 parent 6fc3deb commit c01343d
Show file tree
Hide file tree
Showing 11 changed files with 40 additions and 24 deletions.
1 change: 1 addition & 0 deletions changelog.d/15694.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Improve type hints.
23 changes: 20 additions & 3 deletions mypy.ini
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,29 @@
namespace_packages = True
plugins = pydantic.mypy, mypy_zope:plugin, scripts-dev/mypy_synapse_plugin.py
follow_imports = normal
check_untyped_defs = True
show_error_codes = True
show_traceback = True
mypy_path = stubs
warn_unreachable = True
warn_unused_ignores = True
local_partial_types = True
no_implicit_optional = True

# Strict checks, see mypy --help
warn_unused_configs = True
# disallow_any_generics = True
disallow_subclassing_any = True
# disallow_untyped_calls = True
disallow_untyped_defs = True
strict_equality = True
disallow_incomplete_defs = True
# check_untyped_defs = True
# disallow_untyped_decorators = True
warn_redundant_casts = True
warn_unused_ignores = True
# warn_return_any = True
# no_implicit_reexport = True
strict_equality = True
strict_concatenate = True

# Run mypy type checking with the minimum supported Python version to catch new usage
# that isn't backwards-compatible (types, overloads, etc).
python_version = 3.8
Expand All @@ -31,6 +43,7 @@ warn_unused_ignores = False

[mypy-synapse.util.caches.treecache]
disallow_untyped_defs = False
disallow_incomplete_defs = False

;; Dependencies without annotations
;; Before ignoring a module, check to see if type stubs are available.
Expand All @@ -40,6 +53,7 @@ disallow_untyped_defs = False
;; which we can pull in as a dev dependency by adding to `pyproject.toml`'s
;; `[tool.poetry.dev-dependencies]` list.

# https://github.com/lepture/authlib/issues/460
[mypy-authlib.*]
ignore_missing_imports = True

Expand All @@ -49,9 +63,11 @@ ignore_missing_imports = True
[mypy-lxml]
ignore_missing_imports = True

# https://github.com/msgpack/msgpack-python/issues/448
[mypy-msgpack]
ignore_missing_imports = True

# https://github.com/wolever/parameterized/issues/143
[mypy-parameterized.*]
ignore_missing_imports = True

Expand All @@ -73,6 +89,7 @@ ignore_missing_imports = True
[mypy-srvlookup.*]
ignore_missing_imports = True

# https://github.com/twisted/treq/pull/366
[mypy-treq.*]
ignore_missing_imports = True

Expand Down
2 changes: 1 addition & 1 deletion synapse/api/auth/msc3861_delegated.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def scope_to_list(scope: str) -> List[str]:
return scope.strip().split(" ")


class PrivateKeyJWTWithKid(PrivateKeyJWT):
class PrivateKeyJWTWithKid(PrivateKeyJWT): # type: ignore[misc]
"""An implementation of the private_key_jwt client auth method that includes a kid header.
This is needed because some providers (Keycloak) require the kid header to figure
Expand Down
4 changes: 2 additions & 2 deletions synapse/federation/federation_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -515,7 +515,7 @@ async def process_pdu(pdu: EventBase) -> JsonDict:
logger.error(
"Failed to handle PDU %s",
event_id,
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore
exc_info=(f.type, f.value, f.getTracebackObject()),
)
return {"error": str(e)}

Expand Down Expand Up @@ -1247,7 +1247,7 @@ async def _process_incoming_pdus_in_room_inner(
logger.error(
"Failed to handle PDU %s",
event.event_id,
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore
exc_info=(f.type, f.value, f.getTracebackObject()),
)

received_ts = await self.store.remove_received_event_from_staging(
Expand Down
2 changes: 1 addition & 1 deletion synapse/handlers/oidc.py
Original file line number Diff line number Diff line change
Expand Up @@ -1354,7 +1354,7 @@ async def handle_backchannel_logout(
finish_request(request)


class LogoutToken(JWTClaims):
class LogoutToken(JWTClaims): # type: ignore[misc]
"""
Holds and verify claims of a logout token, as per
https://openid.net/specs/openid-connect-backchannel-1_0.html#LogoutToken
Expand Down
4 changes: 2 additions & 2 deletions synapse/handlers/pagination.py
Original file line number Diff line number Diff line change
Expand Up @@ -360,7 +360,7 @@ async def _purge_history(
except Exception:
f = Failure()
logger.error(
"[purge] failed", exc_info=(f.type, f.value, f.getTracebackObject()) # type: ignore
"[purge] failed", exc_info=(f.type, f.value, f.getTracebackObject())
)
self._purges_by_id[purge_id].status = PurgeStatus.STATUS_FAILED
self._purges_by_id[purge_id].error = f.getErrorMessage()
Expand Down Expand Up @@ -689,7 +689,7 @@ async def _shutdown_and_purge_room(
f = Failure()
logger.error(
"failed",
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore
exc_info=(f.type, f.value, f.getTracebackObject()),
)
self._delete_by_id[delete_id].status = DeleteStatus.STATUS_FAILED
self._delete_by_id[delete_id].error = f.getErrorMessage()
Expand Down
14 changes: 7 additions & 7 deletions synapse/http/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def return_json_error(

if f.check(SynapseError):
# mypy doesn't understand that f.check asserts the type.
exc: SynapseError = f.value # type: ignore
exc: SynapseError = f.value
error_code = exc.code
error_dict = exc.error_dict(config)
if exc.headers is not None:
Expand All @@ -124,7 +124,7 @@ def return_json_error(
"Got cancellation before client disconnection from %r: %r",
request.request_metrics.name,
request,
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type]
exc_info=(f.type, f.value, f.getTracebackObject()),
)
else:
error_code = 500
Expand All @@ -134,7 +134,7 @@ def return_json_error(
"Failed handle request via %r: %r",
request.request_metrics.name,
request,
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type]
exc_info=(f.type, f.value, f.getTracebackObject()),
)

# Only respond with an error response if we haven't already started writing,
Expand Down Expand Up @@ -172,7 +172,7 @@ def return_html_error(
"""
if f.check(CodeMessageException):
# mypy doesn't understand that f.check asserts the type.
cme: CodeMessageException = f.value # type: ignore
cme: CodeMessageException = f.value
code = cme.code
msg = cme.msg
if cme.headers is not None:
Expand All @@ -189,7 +189,7 @@ def return_html_error(
logger.error(
"Failed handle request %r",
request,
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type]
exc_info=(f.type, f.value, f.getTracebackObject()),
)
elif f.check(CancelledError):
code = HTTP_STATUS_REQUEST_CANCELLED
Expand All @@ -199,7 +199,7 @@ def return_html_error(
logger.error(
"Got cancellation before client disconnection when handling request %r",
request,
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type]
exc_info=(f.type, f.value, f.getTracebackObject()),
)
else:
code = HTTPStatus.INTERNAL_SERVER_ERROR
Expand All @@ -208,7 +208,7 @@ def return_html_error(
logger.error(
"Failed handle request %r",
request,
exc_info=(f.type, f.value, f.getTracebackObject()), # type: ignore[arg-type]
exc_info=(f.type, f.value, f.getTracebackObject()),
)

if isinstance(error_template, str):
Expand Down
4 changes: 2 additions & 2 deletions synapse/util/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def unwrapFirstError(failure: Failure) -> Failure:
# the subFailure's value, which will do a better job of preserving stacktraces.
# (actually, you probably want to use yieldable_gather_results anyway)
failure.trap(defer.FirstError)
return failure.value.subFailure # type: ignore[union-attr] # Issue in Twisted's annotations
return failure.value.subFailure


P = ParamSpec("P")
Expand Down Expand Up @@ -178,7 +178,7 @@ def log_failure(
"""

logger.error(
msg, exc_info=(failure.type, failure.value, failure.getTracebackObject()) # type: ignore[arg-type]
msg, exc_info=(failure.type, failure.value, failure.getTracebackObject())
)

if not consumeErrors:
Expand Down
2 changes: 1 addition & 1 deletion synapse/util/async_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ def errback(f: Failure) -> Optional[Failure]:
for observer in observers:
# This is a little bit of magic to correctly propagate stack
# traces when we `await` on one of the observer deferreds.
f.value.__failure__ = f # type: ignore[union-attr]
f.value.__failure__ = f
try:
observer.errback(f)
except Exception as e:
Expand Down
6 changes: 2 additions & 4 deletions synapse/util/caches/lrucache.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,10 +93,8 @@ def _get_size_of(val: Any, *, recurse: bool = True) -> int:
# a general type var, distinct from either KT or VT
T = TypeVar("T")

P = TypeVar("P")


class _TimedListNode(ListNode[P]):
class _TimedListNode(ListNode[T]):
"""A `ListNode` that tracks last access time."""

__slots__ = ["last_access_ts_secs"]
Expand Down Expand Up @@ -821,7 +819,7 @@ class AsyncLruCache(Generic[KT, VT]):
utilize external cache systems that require await behaviour to be created.
"""

def __init__(self, *args, **kwargs): # type: ignore
def __init__(self, *args: Any, **kwargs: Any):
self._lru_cache: LruCache[KT, VT] = LruCache(*args, **kwargs)

async def get(
Expand Down
2 changes: 1 addition & 1 deletion tests/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -642,7 +642,7 @@ def runInteraction(
pool.runWithConnection = runWithConnection # type: ignore[assignment]
pool.runInteraction = runInteraction # type: ignore[assignment]
# Replace the thread pool with a threadless 'thread' pool
pool.threadpool = ThreadPool(clock._reactor) # type: ignore[assignment]
pool.threadpool = ThreadPool(clock._reactor)
pool.running = True

# We've just changed the Databases to run DB transactions on the same
Expand Down

0 comments on commit c01343d

Please sign in to comment.