Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Commit

Permalink
Merge commit '31acc5c30' into anoa/dinsic_release_1_21_x
Browse files Browse the repository at this point in the history
* commit '31acc5c30':
  Escape the error description on the sso_error template. (#8405)
  Fix occasional "Re-starting finished log context" from keyring (#8398)
  Allow existing users to login via OpenID Connect. (#8345)
  Fix schema delta for servers that have not backfilled (#8396)
  Fix MultiWriteIdGenerator's handling of restarts. (#8374)
  s/URLs/variables in changelog
  s/accidentally/incorrectly in changelog
  Update changelog wording
  Add type annotations to SimpleHttpClient (#8372)
  Add new sequences to port DB script (#8387)
  Add EventStreamPosition type (#8388)
  Mark the shadow_banned column as boolean in synapse_port_db. (#8386)
  • Loading branch information
anoadragon453 committed Oct 21, 2020
2 parents d70f909 + 31acc5c commit 5b0b103
Show file tree
Hide file tree
Showing 40 changed files with 731 additions and 264 deletions.
2 changes: 1 addition & 1 deletion CHANGES.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ Bugfixes
--------

- Fix a bug introduced in v1.20.0 which caused the `synapse_port_db` script to fail. ([\#8386](https://github.com/matrix-org/synapse/issues/8386))
- Fix URLs being accidentally escaped in Jinja2 templates. Broke in v1.20.0. ([\#8394](https://github.com/matrix-org/synapse/issues/8394))
- Fix a bug introduced in v1.20.0 which caused variables to be incorrectly escaped in Jinja2 templates. ([\#8394](https://github.com/matrix-org/synapse/issues/8394))


Synapse 1.20.0 (2020-09-22)
Expand Down
1 change: 1 addition & 0 deletions changelog.d/8345.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Add a configuration option that allows existing users to log in with OpenID Connect. Contributed by @BBBSnowball and @OmmyZhang.
1 change: 1 addition & 0 deletions changelog.d/8372.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Add type annotations to `SimpleHttpClient`.
1 change: 1 addition & 0 deletions changelog.d/8374.bugfix
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fix theoretical race condition where events are not sent down `/sync` if the synchrotron worker is restarted without restarting other workers.
1 change: 1 addition & 0 deletions changelog.d/8386.bugfix
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fix a bug introduced in v1.20.0 which caused the `synapse_port_db` script to fail.
1 change: 1 addition & 0 deletions changelog.d/8387.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Add experimental support for sharding event persister.
1 change: 1 addition & 0 deletions changelog.d/8388.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Add `EventStreamPosition` type.
1 change: 1 addition & 0 deletions changelog.d/8396.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Add experimental support for sharding event persister.
1 change: 1 addition & 0 deletions changelog.d/8398.bugfix
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fix "Re-starting finished log context" warning when receiving an event we already had over federation.
1 change: 1 addition & 0 deletions changelog.d/8405.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Consolidate the SSO error template across all configuration.
5 changes: 5 additions & 0 deletions docs/sample_config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -1864,6 +1864,11 @@ oidc_config:
#
#skip_verification: true

# Uncomment to allow a user logging in via OIDC to match a pre-existing account instead
# of failing. This could be used if switching from password logins to OIDC. Defaults to false.
#
#allow_existing_users: true

# An external module can be provided here as a custom solution to mapping
# attributes returned from a OIDC provider onto a matrix user.
#
Expand Down
24 changes: 24 additions & 0 deletions scripts/synapse_port_db
Original file line number Diff line number Diff line change
Expand Up @@ -630,6 +630,7 @@ class Porter(object):
self.progress.set_state("Setting up sequence generators")
await self._setup_state_group_id_seq()
await self._setup_user_id_seq()
await self._setup_events_stream_seqs()

self.progress.done()
except Exception as e:
Expand Down Expand Up @@ -806,6 +807,29 @@ class Porter(object):

return self.postgres_store.db_pool.runInteraction("setup_user_id_seq", r)

def _setup_events_stream_seqs(self):
def r(txn):
txn.execute("SELECT MAX(stream_ordering) FROM events")
curr_id = txn.fetchone()[0]
if curr_id:
next_id = curr_id + 1
txn.execute(
"ALTER SEQUENCE events_stream_seq RESTART WITH %s", (next_id,)
)

txn.execute("SELECT -MIN(stream_ordering) FROM events")
curr_id = txn.fetchone()[0]
if curr_id:
next_id = curr_id + 1
txn.execute(
"ALTER SEQUENCE events_backfill_stream_seq RESTART WITH %s",
(next_id,),
)

return self.postgres_store.db_pool.runInteraction(
"_setup_events_stream_seqs", r
)


##############################################
# The following is simply UI stuff
Expand Down
2 changes: 1 addition & 1 deletion synapse/appservice/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ async def _get() -> Optional[JsonDict]:
urllib.parse.quote(protocol),
)
try:
info = await self.get_json(uri, {})
info = await self.get_json(uri)

if not _is_valid_3pe_metadata(info):
logger.warning(
Expand Down
6 changes: 6 additions & 0 deletions synapse/config/oidc_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ def read_config(self, config, **kwargs):
self.oidc_userinfo_endpoint = oidc_config.get("userinfo_endpoint")
self.oidc_jwks_uri = oidc_config.get("jwks_uri")
self.oidc_skip_verification = oidc_config.get("skip_verification", False)
self.oidc_allow_existing_users = oidc_config.get("allow_existing_users", False)

ump_config = oidc_config.get("user_mapping_provider", {})
ump_config.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER)
Expand Down Expand Up @@ -158,6 +159,11 @@ def generate_config_section(self, config_dir_path, server_name, **kwargs):
#
#skip_verification: true
# Uncomment to allow a user logging in via OIDC to match a pre-existing account instead
# of failing. This could be used if switching from password logins to OIDC. Defaults to false.
#
#allow_existing_users: true
# An external module can be provided here as a custom solution to mapping
# attributes returned from a OIDC provider onto a matrix user.
#
Expand Down
6 changes: 0 additions & 6 deletions synapse/config/saml2_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,12 +169,6 @@ def read_config(self, config, **kwargs):
saml2_config.get("saml_session_lifetime", "15m")
)

# We enable autoescape here as the message may potentially come from a
# remote resource
self.saml2_error_html_template = self.read_templates(
["saml_error.html"], saml2_config.get("template_dir"), autoescape=True
)[0]

def _default_saml_config_dict(
self, required_attributes: set, optional_attributes: set
):
Expand Down
70 changes: 44 additions & 26 deletions synapse/crypto/keyring.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@
)
from synapse.logging.context import (
PreserveLoggingContext,
current_context,
make_deferred_yieldable,
preserve_fn,
run_in_background,
Expand Down Expand Up @@ -233,8 +232,6 @@ async def _start_key_lookups(self, verify_requests):
"""

try:
ctx = current_context()

# map from server name to a set of outstanding request ids
server_to_request_ids = {}

Expand Down Expand Up @@ -265,12 +262,8 @@ def lookup_done(res, verify_request):

# if there are no more requests for this server, we can drop the lock.
if not server_requests:
with PreserveLoggingContext(ctx):
logger.debug("Releasing key lookup lock on %s", server_name)

# ... but not immediately, as that can cause stack explosions if
# we get a long queue of lookups.
self.clock.call_later(0, drop_server_lock, server_name)
logger.debug("Releasing key lookup lock on %s", server_name)
drop_server_lock(server_name)

return res

Expand Down Expand Up @@ -335,20 +328,32 @@ async def do_iterations():
)

# look for any requests which weren't satisfied
with PreserveLoggingContext():
for verify_request in remaining_requests:
verify_request.key_ready.errback(
SynapseError(
401,
"No key for %s with ids in %s (min_validity %i)"
% (
verify_request.server_name,
verify_request.key_ids,
verify_request.minimum_valid_until_ts,
),
Codes.UNAUTHORIZED,
)
while remaining_requests:
verify_request = remaining_requests.pop()
rq_str = (
"VerifyJsonRequest(server=%s, key_ids=%s, min_valid=%i)"
% (
verify_request.server_name,
verify_request.key_ids,
verify_request.minimum_valid_until_ts,
)
)

# If we run the errback immediately, it may cancel our
# loggingcontext while we are still in it, so instead we
# schedule it for the next time round the reactor.
#
# (this also ensures that we don't get a stack overflow if we
# has a massive queue of lookups waiting for this server).
self.clock.call_later(
0,
verify_request.key_ready.errback,
SynapseError(
401,
"Failed to find any key to satisfy %s" % (rq_str,),
Codes.UNAUTHORIZED,
),
)
except Exception as err:
# we don't really expect to get here, because any errors should already
# have been caught and logged. But if we do, let's log the error and make
Expand Down Expand Up @@ -410,10 +415,23 @@ async def _attempt_key_fetches_with_fetcher(self, fetcher, remaining_requests):
# key was not valid at this point
continue

with PreserveLoggingContext():
verify_request.key_ready.callback(
(server_name, key_id, fetch_key_result.verify_key)
)
# we have a valid key for this request. If we run the callback
# immediately, it may cancel our loggingcontext while we are still in
# it, so instead we schedule it for the next time round the reactor.
#
# (this also ensures that we don't get a stack overflow if we had
# a massive queue of lookups waiting for this server).
logger.debug(
"Found key %s:%s for %s",
server_name,
key_id,
verify_request.request_name,
)
self.clock.call_later(
0,
verify_request.key_ready.callback,
(server_name, key_id, fetch_key_result.verify_key),
)
completed.append(verify_request)
break

Expand Down
16 changes: 10 additions & 6 deletions synapse/handlers/federation.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,8 @@
from synapse.types import (
JsonDict,
MutableStateMap,
PersistedEventPosition,
RoomStreamToken,
StateMap,
UserID,
get_domain_from_id,
Expand Down Expand Up @@ -2966,7 +2968,7 @@ async def persist_events_and_notify(
)
return result["max_stream_id"]
else:
max_stream_id = await self.storage.persistence.persist_events(
max_stream_token = await self.storage.persistence.persist_events(
event_and_contexts, backfilled=backfilled
)

Expand All @@ -2977,12 +2979,12 @@ async def persist_events_and_notify(

if not backfilled: # Never notify for backfilled events
for event, _ in event_and_contexts:
await self._notify_persisted_event(event, max_stream_id)
await self._notify_persisted_event(event, max_stream_token)

return max_stream_id
return max_stream_token.stream

async def _notify_persisted_event(
self, event: EventBase, max_stream_id: int
self, event: EventBase, max_stream_token: RoomStreamToken
) -> None:
"""Checks to see if notifier/pushers should be notified about the
event or not.
Expand All @@ -3008,9 +3010,11 @@ async def _notify_persisted_event(
elif event.internal_metadata.is_outlier():
return

event_stream_id = event.internal_metadata.stream_ordering
event_pos = PersistedEventPosition(
self._instance_name, event.internal_metadata.stream_ordering
)
self.notifier.on_new_room_event(
event, event_stream_id, max_stream_id, extra_users=extra_users
event, event_pos, max_stream_token, extra_users=extra_users
)

async def _clean_room_for_join(self, room_id: str) -> None:
Expand Down
6 changes: 3 additions & 3 deletions synapse/handlers/message.py
Original file line number Diff line number Diff line change
Expand Up @@ -1141,7 +1141,7 @@ def is_inviter_member_event(e):
if prev_state_ids:
raise AuthError(403, "Changing the room create event is forbidden")

event_stream_id, max_stream_id = await self.storage.persistence.persist_event(
event_pos, max_stream_token = await self.storage.persistence.persist_event(
event, context=context
)

Expand All @@ -1152,7 +1152,7 @@ def is_inviter_member_event(e):
def _notify():
try:
self.notifier.on_new_room_event(
event, event_stream_id, max_stream_id, extra_users=extra_users
event, event_pos, max_stream_token, extra_users=extra_users
)
except Exception:
logger.exception("Error notifying about new room event")
Expand All @@ -1164,7 +1164,7 @@ def _notify():
# matters as sometimes presence code can take a while.
run_in_background(self._bump_active_time, requester.user)

return event_stream_id
return event_pos.stream

async def _bump_active_time(self, user: UserID) -> None:
try:
Expand Down
42 changes: 27 additions & 15 deletions synapse/handlers/oidc_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,7 @@ def __init__(self, hs: "HomeServer"):
hs.config.oidc_user_mapping_provider_config
) # type: OidcMappingProvider
self._skip_verification = hs.config.oidc_skip_verification # type: bool
self._allow_existing_users = hs.config.oidc_allow_existing_users # type: bool

self._http_client = hs.get_proxied_http_client()
self._auth_handler = hs.get_auth_handler()
Expand Down Expand Up @@ -849,7 +850,8 @@ async def _map_userinfo_to_user(
If we don't find the user that way, we should register the user,
mapping the localpart and the display name from the UserInfo.
If a user already exists with the mxid we've mapped, raise an exception.
If a user already exists with the mxid we've mapped and allow_existing_users
is disabled, raise an exception.
Args:
userinfo: an object representing the user
Expand Down Expand Up @@ -905,21 +907,31 @@ async def _map_userinfo_to_user(

localpart = map_username_to_mxid_localpart(attributes["localpart"])

user_id = UserID(localpart, self._hostname)
if await self._datastore.get_users_by_id_case_insensitive(user_id.to_string()):
# This mxid is taken
raise MappingException(
"mxid '{}' is already taken".format(user_id.to_string())
user_id = UserID(localpart, self._hostname).to_string()
users = await self._datastore.get_users_by_id_case_insensitive(user_id)
if users:
if self._allow_existing_users:
if len(users) == 1:
registered_user_id = next(iter(users))
elif user_id in users:
registered_user_id = user_id
else:
raise MappingException(
"Attempted to login as '{}' but it matches more than one user inexactly: {}".format(
user_id, list(users.keys())
)
)
else:
# This mxid is taken
raise MappingException("mxid '{}' is already taken".format(user_id))
else:
# It's the first time this user is logging in and the mapped mxid was
# not taken, register the user
registered_user_id = await self._registration_handler.register_user(
localpart=localpart,
default_display_name=attributes["display_name"],
user_agent_ips=(user_agent, ip_address),
)

# It's the first time this user is logging in and the mapped mxid was
# not taken, register the user
registered_user_id = await self._registration_handler.register_user(
localpart=localpart,
default_display_name=attributes["display_name"],
user_agent_ips=(user_agent, ip_address),
)

await self._datastore.record_user_external_id(
self._auth_provider_id, remote_user_id, registered_user_id,
)
Expand Down
Loading

0 comments on commit 5b0b103

Please sign in to comment.