Skip to content

Commit

Permalink
Better logging for subscriptions (linera-io#2735)
Browse files Browse the repository at this point in the history
* increase logging for subscriptions

* Update linera-rpc/src/grpc/client.rs

Co-authored-by: Janito Vaqueiro Ferreira Filho <[email protected]>
Signed-off-by: Mathieu Baudet <[email protected]>
  • Loading branch information
ma2bd and jvff committed Oct 30, 2024
1 parent 87ae4cd commit a374a43
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 8 deletions.
4 changes: 3 additions & 1 deletion linera-core/src/client/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3210,7 +3210,9 @@ where
})
.filter_map(move |result| async move {
if let Err(error) = &result {
info!(?error, "Could not connect to validator {name}");
warn!(?error, "Could not connect to validator {name}");
} else {
info!("Connected to validator {name}");
}
result.ok()
})
Expand Down
19 changes: 12 additions & 7 deletions linera-rpc/src/grpc/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,18 +67,18 @@ impl GrpcClient {

/// Returns whether this gRPC status means the server stream should be reconnected to, or not.
/// Logs a warning on unexpected status codes.
fn is_retryable(status: &Status) -> bool {
fn is_retryable(status: &Status, address: &str) -> bool {
match status.code() {
Code::DeadlineExceeded | Code::Aborted | Code::Unavailable | Code::Unknown => {
info!("gRPC request interrupted: {}; retrying", status);
info!("gRPC request to {address} interrupted: {status}; retrying");
true
}
Code::Ok
| Code::Cancelled
| Code::NotFound
| Code::AlreadyExists
| Code::ResourceExhausted => {
error!("Unexpected gRPC status: {}; retrying", status);
error!("gRPC request to {address} interrupted: {status}; retrying");
true
}
Code::InvalidArgument
Expand All @@ -89,7 +89,7 @@ impl GrpcClient {
| Code::Internal
| Code::DataLoss
| Code::Unauthenticated => {
error!("Unexpected gRPC status: {}", status);
error!("Unexpected gRPC status received from {address}: {status}");
false
}
}
Expand All @@ -100,6 +100,7 @@ impl GrpcClient {
f: F,
request: impl TryInto<R> + fmt::Debug + Clone,
handler: &str,
address: &str,
) -> Result<S, NodeError>
where
F: Fn(ValidatorNodeClient<transport::Channel>, Request<R>) -> Fut,
Expand All @@ -113,15 +114,17 @@ impl GrpcClient {
})?;
loop {
match f(self.client.clone(), Request::new(request_inner.clone())).await {
Err(s) if Self::is_retryable(&s) && retry_count < self.max_retries => {
Err(s) if Self::is_retryable(&s, address) && retry_count < self.max_retries => {
let delay = self.retry_delay.saturating_mul(retry_count);
retry_count += 1;
linera_base::time::timer::sleep(delay).await;
continue;
}
Err(s) => {
return Err(NodeError::GrpcError {
error: format!("remote request [{handler}] failed with status: {s:?}",),
error: format!(
"remote request [{handler}] to {address} failed with status: {s:?}",
),
});
}
Ok(result) => return Ok(result.into_inner()),
Expand Down Expand Up @@ -160,6 +163,7 @@ macro_rules! client_delegate {
|mut client, req| async move { client.$handler(req).await },
$req,
stringify!($handler),
&$self.address,
)
.await
}};
Expand Down Expand Up @@ -257,6 +261,7 @@ impl ValidatorNode for GrpcClient {

// The stream of `Notification`s that inserts increasing delays after retriable errors, and
// terminates after unexpected or fatal errors.
let address = self.address.clone();
let notification_stream = endlessly_retrying_notification_stream
.map(|result| {
Option::<Notification>::try_from(result?).map_err(|err| {
Expand All @@ -269,7 +274,7 @@ impl ValidatorNode for GrpcClient {
retry_count = 0;
return future::Either::Left(future::ready(true));
};
if !Self::is_retryable(status) || retry_count >= max_retries {
if !Self::is_retryable(status, &address) || retry_count >= max_retries {
return future::Either::Left(future::ready(false));
}
let delay = retry_delay.saturating_mul(retry_count);
Expand Down

0 comments on commit a374a43

Please sign in to comment.