Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Report reason for shutdown in collection.rs #2429

Merged
merged 5 commits into from
Jun 27, 2022
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 40 additions & 10 deletions src/libp2p/collection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
//! the calls to [`Network::inject_connection_message`].
//!

use super::connection::{established, NoiseKey};
use super::connection::{established, handshake, NoiseKey};
use alloc::{
collections::{BTreeMap, BTreeSet, VecDeque},
string::String,
Expand All @@ -65,6 +65,7 @@ use rand_chacha::{rand_core::SeedableRng as _, ChaCha20Rng};
pub use super::peer_id::PeerId;
pub use super::read_write::ReadWrite;
pub use established::{ConfigRequestResponse, ConfigRequestResponseIn, InboundError};
pub use handshake::HandshakeError;

pub use multi_stream::MultiStreamConnectionTask;
pub use single_stream::SingleStreamConnectionTask;
Expand Down Expand Up @@ -972,7 +973,14 @@ where
let connection = &mut self.connections.get_mut(&connection_id).unwrap();

break Some(match message {
ConnectionToCoordinatorInner::StartShutdown => {
ConnectionToCoordinatorInner::StartShutdown(reason) => {
// The `ConnectionToCoordinator` message contains a shutdown reason if
// and only if it sends `StartShutdown` as a response to a shutdown
// initiated by the remote. If the shutdown was initiated locally
// (`api_initiated` is `true`), then it can contain `None`, but it can also
// contain `Some` in case the shutdown was initiated by the remote at the same
// time as it was initiated locally.

let report_event = match &mut connection.state {
InnerConnectionState::ShuttingDown {
api_initiated: true,
Expand Down Expand Up @@ -1007,7 +1015,10 @@ where
// confirmation.
continue;
} else {
Event::StartShutdown { id: connection_id }
Event::StartShutdown {
id: connection_id,
reason: reason.unwrap(), // See comment above.
}
}
}
ConnectionToCoordinatorInner::ShutdownFinished => {
Expand Down Expand Up @@ -1434,10 +1445,11 @@ enum ConnectionToCoordinatorInner {
/// See the corresponding event in [`established::Event`].
PingOutFailed,

/// Sent either in response to [`ConnectionToCoordinatorInner::StartShutdown`] or if the
/// remote has initiated the shutdown. After this, no more [`ConnectionToCoordinatorInner`]
/// will be sent anymore except for [`ConnectionToCoordinatorInner::ShutdownFinished`].
StartShutdown,
/// Sent either in response to [`ConnectionToCoordinatorInner::StartShutdown`] (in which case
/// the content is `None`) or if the remote has initiated the shutdown (in which case the
/// content is `Some`). After this, no more [`ConnectionToCoordinatorInner`] will be sent
/// anymore except for [`ConnectionToCoordinatorInner::ShutdownFinished`].
StartShutdown(Option<ShutdownCause>),

/// Shutdown has now finished. Always sent after
/// [`ConnectionToCoordinatorInner::StartShutdown`]. No message is sent by the connection
Expand Down Expand Up @@ -1535,13 +1547,16 @@ pub enum Event<TConn> {
/// handshake.
///
/// This event is **not** generated when [`Network::start_shutdown`] is called.
// TODO: add reason for shutdown?
StartShutdown { id: ConnectionId },
StartShutdown {
id: ConnectionId,
/// Reason why the connection is starting its shutdown. Because this event is not generated
/// when the shutdown is initiated locally, the reason is always cause by the remote.
reason: ShutdownCause,
},

/// A transport-level connection (e.g. a TCP socket) has been shut down.
///
/// This [`ConnectionId`] is no longer valid, and using it will result in panics.
// TODO: add reason for shutdown?
Shutdown {
id: ConnectionId,
was_established: bool,
Expand Down Expand Up @@ -1650,6 +1665,21 @@ pub enum Event<TConn> {
PingOutFailed { id: ConnectionId },
}

/// Reason why a connection is shutting down. See [`Event::StartShutdown`].
#[derive(Debug, derive_more::Display)]
pub enum ShutdownCause {
/// Shutdown was demanded by the remote and performed cleanly.
CleanShutdown,
/// Remote has abruptly reset the connection.
RemoteReset,
/// Error in the connection protocol of a fully established connection.
ProtocolError(established::Error),
/// Error in the protocol of the handshake.
HandshakeError(HandshakeError),
/// Handshake phase took too long.
HandshakeTimeout,
}

#[derive(Debug, derive_more::Display, Clone)]
pub enum RequestError {
/// Request has been canceled because the connection as a whole is being shut down.
Expand Down
27 changes: 14 additions & 13 deletions src/libp2p/collection/multi_stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ use super::{
super::{connection::established, read_write::ReadWrite},
ConfigRequestResponse, ConnectionToCoordinator, ConnectionToCoordinatorInner,
CoordinatorToConnection, CoordinatorToConnectionInner, NotificationsOutErr, OverlayNetwork,
SubstreamId,
ShutdownCause, SubstreamId,
};

use alloc::{string::ToString as _, sync::Arc};
Expand Down Expand Up @@ -58,9 +58,9 @@ enum MultiStreamConnectionTaskInner<TNow, TSubId> {
/// of the behavior but is used to make sure that the API is used correctly.
was_api_reset: bool,

/// `true` if the [`ConnectionToCoordinatorInner::StartShutdown`] message has already
/// been sent to the coordinator.
start_shutdown_message_sent: bool,
/// `None` if the [`ConnectionToCoordinatorInner::StartShutdown`] message has already
/// been sent to the coordinator. `Some` if the message hasn't been sent yet.
start_shutdown_message_to_send: Option<Option<ShutdownCause>>,

/// `true` if the [`ConnectionToCoordinatorInner::ShutdownFinished`] message has already
/// been sent to the coordinator.
Expand Down Expand Up @@ -251,21 +251,20 @@ where
)
}
MultiStreamConnectionTaskInner::ShutdownWaitingAck {
start_shutdown_message_sent,
start_shutdown_message_to_send,
shutdown_finish_message_sent,
..
} => {
if !*start_shutdown_message_sent {
if let Some(reason) = start_shutdown_message_to_send.take() {
debug_assert!(!*shutdown_finish_message_sent);
*start_shutdown_message_sent = true;
(
Some(self),
Some(ConnectionToCoordinator {
inner: ConnectionToCoordinatorInner::StartShutdown,
inner: ConnectionToCoordinatorInner::StartShutdown(reason),
}),
)
} else if !*shutdown_finish_message_sent {
debug_assert!(*start_shutdown_message_sent);
debug_assert!(start_shutdown_message_to_send.is_none());
*shutdown_finish_message_sent = true;
(
Some(self),
Expand Down Expand Up @@ -410,7 +409,7 @@ where
) => {
// TODO: implement proper shutdown
self.connection = MultiStreamConnectionTaskInner::ShutdownWaitingAck {
start_shutdown_message_sent: false,
start_shutdown_message_to_send: Some(None),
shutdown_finish_message_sent: false,
was_api_reset: false,
};
Expand Down Expand Up @@ -450,12 +449,14 @@ where
(
CoordinatorToConnectionInner::ShutdownFinishedAck,
MultiStreamConnectionTaskInner::ShutdownWaitingAck {
start_shutdown_message_sent,
start_shutdown_message_to_send: start_shutdown_message_sent,
shutdown_finish_message_sent,
was_api_reset: was_reset,
},
) => {
debug_assert!(*start_shutdown_message_sent && *shutdown_finish_message_sent);
debug_assert!(
start_shutdown_message_sent.is_none() && *shutdown_finish_message_sent
);
self.connection = MultiStreamConnectionTaskInner::ShutdownAcked {
was_api_reset: *was_reset,
};
Expand Down Expand Up @@ -568,7 +569,7 @@ where
self.connection = MultiStreamConnectionTaskInner::ShutdownWaitingAck {
was_api_reset: true,
shutdown_finish_message_sent: false,
start_shutdown_message_sent: false,
start_shutdown_message_to_send: Some(Some(ShutdownCause::RemoteReset)),
};
}

Expand Down
38 changes: 23 additions & 15 deletions src/libp2p/collection/single_stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ use super::{
read_write::ReadWrite,
},
ConnectionToCoordinator, ConnectionToCoordinatorInner, CoordinatorToConnection,
CoordinatorToConnectionInner, NotificationsOutErr, OverlayNetwork, SubstreamId,
CoordinatorToConnectionInner, NotificationsOutErr, OverlayNetwork, ShutdownCause, SubstreamId,
};

use alloc::{collections::VecDeque, string::ToString as _, sync::Arc};
Expand Down Expand Up @@ -328,7 +328,7 @@ where
) => {
// TODO: implement proper shutdown
self.pending_messages
.push_back(ConnectionToCoordinatorInner::StartShutdown);
.push_back(ConnectionToCoordinatorInner::StartShutdown(None));
self.pending_messages
.push_back(ConnectionToCoordinatorInner::ShutdownFinished);
self.connection = SingleStreamConnectionTaskInner::ShutdownWaitingAck {
Expand Down Expand Up @@ -416,7 +416,9 @@ where
}

self.pending_messages
.push_back(ConnectionToCoordinatorInner::StartShutdown);
.push_back(ConnectionToCoordinatorInner::StartShutdown(Some(
ShutdownCause::RemoteReset,
)));
self.pending_messages
.push_back(ConnectionToCoordinatorInner::ShutdownFinished);
self.connection = SingleStreamConnectionTaskInner::ShutdownWaitingAck {
Expand Down Expand Up @@ -455,9 +457,11 @@ where
} => match established.read_write(read_write) {
Ok((connection, event)) => {
if read_write.is_dead() && event.is_none() {
// TODO: provide error
self.pending_messages
.push_back(ConnectionToCoordinatorInner::StartShutdown);
self.pending_messages.push_back(
ConnectionToCoordinatorInner::StartShutdown(Some(
ShutdownCause::CleanShutdown,
)),
);
self.pending_messages
.push_back(ConnectionToCoordinatorInner::ShutdownFinished);
self.connection = SingleStreamConnectionTaskInner::ShutdownWaitingAck {
Expand Down Expand Up @@ -574,10 +578,11 @@ where
outbound_substreams_reverse,
};
}
Err(_err) => {
// TODO: provide error
Err(err) => {
self.pending_messages
.push_back(ConnectionToCoordinatorInner::StartShutdown);
.push_back(ConnectionToCoordinatorInner::StartShutdown(Some(
ShutdownCause::ProtocolError(err),
)));
self.pending_messages
.push_back(ConnectionToCoordinatorInner::ShutdownFinished);
self.connection = SingleStreamConnectionTaskInner::ShutdownWaitingAck {
Expand Down Expand Up @@ -607,9 +612,10 @@ where
// guarantees that no horrendously slow connections can accidentally make their
// way through.
if timeout < read_write.now {
// TODO: provide error: ConnectionError::Handshake(HandshakeError::Timeout)
self.pending_messages
.push_back(ConnectionToCoordinatorInner::StartShutdown);
.push_back(ConnectionToCoordinatorInner::StartShutdown(Some(
ShutdownCause::HandshakeTimeout,
)));
self.pending_messages
.push_back(ConnectionToCoordinatorInner::ShutdownFinished);
self.connection = SingleStreamConnectionTaskInner::ShutdownWaitingAck {
Expand All @@ -628,10 +634,12 @@ where

let result = match handshake.read_write(read_write) {
Ok(rw) => rw,
Err(_err) => {
// TODO: provide error: ConnectionError::Handshake(HandshakeError::Protocol(err))
self.pending_messages
.push_back(ConnectionToCoordinatorInner::StartShutdown);
Err(err) => {
self.pending_messages.push_back(
ConnectionToCoordinatorInner::StartShutdown(Some(
ShutdownCause::HandshakeError(err),
)),
);
self.pending_messages
.push_back(ConnectionToCoordinatorInner::ShutdownFinished);
self.connection = SingleStreamConnectionTaskInner::ShutdownWaitingAck {
Expand Down
3 changes: 2 additions & 1 deletion src/libp2p/peers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -406,7 +406,8 @@ where
});
}

collection::Event::StartShutdown { id } => {
collection::Event::StartShutdown { id, .. } => {
// TODO: report the shutdown reason in the API; should be done after https://github.com/paritytech/smoldot/issues/2370
// TODO: this is O(n)
for (_, item) in &mut self.desired_out_notifications {
if let Some((_, connection_id, _)) = item.as_ref() {
Expand Down