From 11cae619480f481a1adf05d0555873425f3990c7 Mon Sep 17 00:00:00 2001 From: Lorenzo Delgado Date: Tue, 13 Feb 2024 17:31:08 +0100 Subject: [PATCH] feat(gateway-framework): add streamingfast blockmeta service client --- Cargo.lock | 221 ++++- gateway-framework/Cargo.toml | 13 +- gateway-framework/build.rs | 69 ++ .../proto/sf/blockmeta/v2/blockmeta.proto | 46 + gateway-framework/src/chains/ethereum.rs | 5 +- .../chains/ethereum/sf_blockmeta_client.rs | 181 ++++ .../sf_blockmeta_client/sf.blockmeta.v2.rs | 817 ++++++++++++++++++ .../tests/it_chains_ethereum_sf_blockmeta.rs | 157 ++++ 8 files changed, 1497 insertions(+), 12 deletions(-) create mode 100644 gateway-framework/build.rs create mode 100644 gateway-framework/proto/sf/blockmeta/v2/blockmeta.proto create mode 100644 gateway-framework/src/chains/ethereum/sf_blockmeta_client.rs create mode 100644 gateway-framework/src/chains/ethereum/sf_blockmeta_client/sf.blockmeta.v2.rs create mode 100644 gateway-framework/tests/it_chains_ethereum_sf_blockmeta.rs diff --git a/Cargo.lock b/Cargo.lock index 82b5e93e5..34ed8102c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1908,6 +1908,8 @@ dependencies = [ "ordered-float", "primitive-types", "prometheus", + "prost", + "prost-types", "rand", "receipts", "reqwest", @@ -1917,9 +1919,12 @@ dependencies = [ "serde_with", "siphasher 1.0.0", "tap_core", + "test-with", "thegraph", "thiserror", "tokio", + "tonic", + "tonic-build", "toolshed", "tracing", "tracing-subscriber", @@ -2361,9 +2366,21 @@ dependencies = [ "futures-util", "http 0.2.11", "hyper", - "rustls", + "rustls 0.21.10", + "tokio", + "tokio-rustls 0.24.1", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper", + "pin-project-lite", "tokio", - "tokio-rustls", + "tokio-io-timeout", ] [[package]] @@ -2877,6 +2894,12 @@ dependencies = [ "version_check", ] +[[package]] +name = "multimap" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" + [[package]] name = "native-tls" version = "0.2.11" @@ -3617,6 +3640,28 @@ dependencies = [ "prost-derive", ] +[[package]] +name = "prost-build" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c55e02e35260070b6f716a2423c2ff1c3bb1642ddca6f99e1f26d06268a0e2d2" +dependencies = [ + "bytes", + "heck", + "itertools 0.11.0", + "log", + "multimap", + "once_cell", + "petgraph", + "prettyplease", + "prost", + "prost-types", + "regex", + "syn 2.0.48", + "tempfile", + "which", +] + [[package]] name = "prost-derive" version = "0.12.3" @@ -3630,6 +3675,15 @@ dependencies = [ "syn 2.0.48", ] +[[package]] +name = "prost-types" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "193898f59edcf43c26227dcd4c8427f00d99d61e95dcde58dabd49fa291d470e" +dependencies = [ + "prost", +] + [[package]] name = "protobuf" version = "2.28.0" @@ -3858,8 +3912,8 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls", - "rustls-pemfile", + "rustls 0.21.10", + "rustls-pemfile 1.0.4", "serde", "serde_json", "serde_urlencoded", @@ -3867,7 +3921,7 @@ dependencies = [ "system-configuration", "tokio", "tokio-native-tls", - "tokio-rustls", + "tokio-rustls 0.24.1", "tokio-util", "tower-service", "url", @@ -4065,10 +4119,37 @@ checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" dependencies = [ "log", "ring 0.17.7", - "rustls-webpki", + "rustls-webpki 0.101.7", "sct", ] +[[package]] +name = "rustls" +version = "0.22.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e87c9956bd9807afa1f77e0f7594af32566e830e088a5576d27c5b6f30f49d41" +dependencies = [ + "log", + "ring 0.17.7", + "rustls-pki-types", + "rustls-webpki 0.102.2", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-native-certs" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f1fb85efa936c42c6d5fc28d2629bb51e4b2f4b8a5211e297d599cc5a093792" +dependencies = [ + "openssl-probe", + "rustls-pemfile 2.0.0", + "rustls-pki-types", + "schannel", + "security-framework", +] + [[package]] name = "rustls-pemfile" version = "1.0.4" @@ -4078,6 +4159,22 @@ dependencies = [ "base64 0.21.7", ] +[[package]] +name = "rustls-pemfile" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35e4980fa29e4c4b212ffb3db068a564cbf560e51d3944b7c88bd8bf5bec64f4" +dependencies = [ + "base64 0.21.7", + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a716eb65e3158e90e17cd93d855216e27bde02745ab842f2cab4a39dba1bacf" + [[package]] name = "rustls-webpki" version = "0.101.7" @@ -4088,6 +4185,17 @@ dependencies = [ "untrusted 0.9.0", ] +[[package]] +name = "rustls-webpki" +version = "0.102.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faaa0a62740bedb9b2ef5afa303da42764c012f743917351dc9a237ea1663610" +dependencies = [ + "ring 0.17.7", + "rustls-pki-types", + "untrusted 0.9.0", +] + [[package]] name = "rustversion" version = "1.0.14" @@ -4838,6 +4946,18 @@ dependencies = [ "winapi", ] +[[package]] +name = "test-with" +version = "0.12.4" +source = "git+https://github.com/LNSD/test-with?rev=8c7ce82#8c7ce82ac9b882741b9015f7e12414d37f403449" +dependencies = [ + "proc-macro-error", + "proc-macro2", + "quote", + "regex", + "syn 2.0.48", +] + [[package]] name = "thegraph" version = "0.5.0" @@ -4969,6 +5089,16 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite", + "tokio", +] + [[package]] name = "tokio-macros" version = "2.2.0" @@ -4996,7 +5126,18 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls", + "rustls 0.21.10", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" +dependencies = [ + "rustls 0.22.2", + "rustls-pki-types", "tokio", ] @@ -5032,9 +5173,9 @@ checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" dependencies = [ "futures-util", "log", - "rustls", + "rustls 0.21.10", "tokio", - "tokio-rustls", + "tokio-rustls 0.24.1", "tungstenite", "webpki-roots", ] @@ -5109,6 +5250,50 @@ dependencies = [ "winnow", ] +[[package]] +name = "tonic" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76c4eb7a4e9ef9d4763600161f12f5070b92a578e1b634db88a6887844c91a13" +dependencies = [ + "async-stream", + "async-trait", + "axum", + "base64 0.21.7", + "bytes", + "h2", + "http 0.2.11", + "http-body", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost", + "rustls-native-certs", + "rustls-pemfile 2.0.0", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.25.0", + "tokio-stream", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tonic-build" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4ef6dd70a610078cb4e338a0f79d06bc759ff1b22d2120c2ff02ae264ba9c2" +dependencies = [ + "prettyplease", + "proc-macro2", + "prost-build", + "quote", + "syn 2.0.48", +] + [[package]] name = "toolshed" version = "0.5.0" @@ -5126,9 +5311,13 @@ checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" dependencies = [ "futures-core", "futures-util", + "indexmap 1.9.3", "pin-project", "pin-project-lite", + "rand", + "slab", "tokio", + "tokio-util", "tower-layer", "tower-service", "tracing", @@ -5283,7 +5472,7 @@ dependencies = [ "httparse", "log", "rand", - "rustls", + "rustls 0.21.10", "sha1", "thiserror", "url", @@ -5550,6 +5739,18 @@ version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix", +] + [[package]] name = "widestring" version = "1.0.2" diff --git a/gateway-framework/Cargo.toml b/gateway-framework/Cargo.toml index 3828d0d36..f1bead9dc 100644 --- a/gateway-framework/Cargo.toml +++ b/gateway-framework/Cargo.toml @@ -3,6 +3,10 @@ edition = "2021" name = "gateway-framework" version = "0.0.1" +[features] +# Enable to run protobuf messages code generation at compile time (requires protoc, etc.). +proto-gen = [] + [dependencies] alloy-primitives.workspace = true alloy-sol-types = "0.6.2" @@ -21,6 +25,8 @@ maxminddb = "0.24" ordered-float = "4.2.0" primitive-types.workspace = true prometheus = "0.13.3" +prost = "0.12.3" +prost-types = "0.12.3" rand.workspace = true receipts.workspace = true reqwest.workspace = true @@ -30,12 +36,17 @@ serde_json = "1.0.113" serde_with = "3.6.1" siphasher.workspace = true tap_core = { git = "https://github.com/semiotic-ai/timeline-aggregation-protocol.git", rev = "aa973d1" } -thegraph.workspace = true +thegraph = { workspace = true, features = ["subgraph-client"] } thiserror.workspace = true tokio.workspace = true +tonic = { version = "0.11.0", features = ["tls", "tls-roots"] } toolshed.workspace = true tracing.workspace = true tracing-subscriber.workspace = true [dev-dependencies] assert_matches = "1.5.0" +test-with = { git = "https://github.com/LNSD/test-with", rev = "8c7ce82", default-features = false } + +[build-dependencies] +tonic-build = "0.11.0" diff --git a/gateway-framework/build.rs b/gateway-framework/build.rs new file mode 100644 index 000000000..bed38ccc6 --- /dev/null +++ b/gateway-framework/build.rs @@ -0,0 +1,69 @@ +use std::env; +use std::path::PathBuf; +use std::process::Command; + +/// Return the path to root of the crate being built. +/// +/// The `CARGO_MANIFEST_DIR` env variable contains the path to the directory containing the +/// manifest for the package being built (the package containing the build script). Also note that +/// this is the value of the current working directory of the build script when it starts. +/// +/// https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts +fn root_dir() -> PathBuf { + PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()) +} + +/// Check if all the build requirements are met. +/// +/// This function checks if the following tools are installed: +/// - protoc (required by `prost-build`, see: https://github.com/tokio-rs/prost#protoc) +fn check_build_requirements() -> Result<(), String> { + let mut errors = vec![]; + + // Check if protoc is installed. + let protoc = Command::new("protoc").arg("--version").status().unwrap(); + if !protoc.success() { + errors.push( + "protoc not found. Please install protoc: https://grpc.io/docs/protoc-installation/", + ); + } + + if !errors.is_empty() { + return Err(format!( + "Build requirements not met:\n - {}", + errors.join("\n - ") + )); + } + + Ok(()) +} + +fn main() { + // Run code generation only if 'proto-gen' feature is enabled. + if env::var("CARGO_FEATURE_PROTO_GEN").is_ok() { + // Check if all the build requirements are met. + if let Err(err) = check_build_requirements() { + panic!("{}", err); + } + + let src_dir = root_dir().join("src"); + let proto_dir = root_dir().join("proto"); + + // Streamingfast Blockmeta service gRPC proto files + let sf_blockmeta_proto_dir = proto_dir.join("sf/blockmeta/v2"); + let sf_blockmeta_src_dir = src_dir.join("chains/ethereum/sf_blockmeta_client"); + + let status = tonic_build::configure() + .build_client(true) + .out_dir(sf_blockmeta_src_dir) + .emit_rerun_if_changed(true) + .compile( + &[sf_blockmeta_proto_dir.join("blockmeta.proto")], + &[sf_blockmeta_proto_dir], + ); + + if let Err(err) = status { + panic!("Protobuf code generation failed: {}", err); + } + } +} diff --git a/gateway-framework/proto/sf/blockmeta/v2/blockmeta.proto b/gateway-framework/proto/sf/blockmeta/v2/blockmeta.proto new file mode 100644 index 000000000..96a595d2f --- /dev/null +++ b/gateway-framework/proto/sf/blockmeta/v2/blockmeta.proto @@ -0,0 +1,46 @@ +syntax = "proto3"; +package sf.blockmeta.v2; +import "google/protobuf/timestamp.proto"; + +option go_package = "github.com/streamingfast/blockmeta-service;pbbmsrv"; + +service Block { + rpc NumToID (NumToIDReq) returns (BlockResp); + rpc IDToNum(IDToNumReq) returns (BlockResp); + rpc Head(Empty) returns (BlockResp); +} + +message Empty {} + +service BlockByTime { + rpc At (TimeReq) returns (BlockResp); + rpc After (RelativeTimeReq) returns (BlockResp); + rpc Before (RelativeTimeReq) returns (BlockResp); +} + +// Block Requests +message NumToIDReq { + uint64 blockNum = 1; +} + +message IDToNumReq { + string blockID = 1; +} + +// Block & BlockByTime Responses +message BlockResp { + string id = 1; + uint64 num = 2; + google.protobuf.Timestamp time = 3; +} + +// BlockByTime Requests +message TimeReq { + google.protobuf.Timestamp time = 1; +} + +message RelativeTimeReq { + google.protobuf.Timestamp time = 1; + bool inclusive = 2; +} + diff --git a/gateway-framework/src/chains/ethereum.rs b/gateway-framework/src/chains/ethereum.rs index 06505967f..e3b123ed4 100644 --- a/gateway-framework/src/chains/ethereum.rs +++ b/gateway-framework/src/chains/ethereum.rs @@ -9,9 +9,12 @@ use tokio::time::interval; use toolshed::url::Url; use tracing::Instrument; -use super::{BlockHead, ClientMsg, UnresolvedBlock}; use crate::{config, metrics::METRICS}; +use super::{BlockHead, ClientMsg, UnresolvedBlock}; + +pub mod sf_blockmeta_client; + pub struct Client { chain: config::Chain, http_client: reqwest::Client, diff --git a/gateway-framework/src/chains/ethereum/sf_blockmeta_client.rs b/gateway-framework/src/chains/ethereum/sf_blockmeta_client.rs new file mode 100644 index 000000000..074e8c018 --- /dev/null +++ b/gateway-framework/src/chains/ethereum/sf_blockmeta_client.rs @@ -0,0 +1,181 @@ +//! StreamingFast Blockmeta gRPC client. + +use std::time::Duration; + +use alloy_primitives::bytes::Bytes; +use thegraph::types::{BlockHash, BlockNumber}; +use tonic::codegen::{Body, InterceptedService, StdError}; +use tonic::transport::{Channel, Uri}; + +pub use self::auth::AuthInterceptor; +use self::gen::block_client::BlockClient; +pub use self::gen::BlockResp as Block; +use self::gen::Empty; +use self::gen::IdToNumReq; +use self::gen::NumToIdReq; + +/// These files are **generated** by the `build.rs` when compiling the crate with the `proto-gen` +/// feature enabled. The `build.rs` script uses the `tonic-build` crate to generate the files. +/// +/// ```shell +/// cargo build -p gateway-framework --features proto-gen +/// ``` +mod gen { + include!("sf_blockmeta_client/sf.blockmeta.v2.rs"); +} + +mod auth { + use tonic::{Request, Status}; + + /// The `AuthInterceptor` is a gRPC interceptor that adds an `authorization` header to the request + /// metadata. + /// + /// This middleware inserts the `authorization` header into the request metadata. The header is + /// expected to be in the format `Bearer `. + /// + /// It is used to authenticate requests to the StreamingFast Blockmeta service. + pub struct AuthInterceptor { + header_value: String, + } + + impl AuthInterceptor { + /// Create a new `AuthInterceptor` with the given authorization token. + pub(super) fn with_token(token: &str) -> Self { + Self { + header_value: format!("bearer {}", token), + } + } + } + + impl tonic::service::Interceptor for AuthInterceptor { + fn call(&mut self, mut request: Request<()>) -> Result, Status> { + // The `authorization` header is expected to be in the format `Bearer ` + let auth = self.header_value.parse().map_err(|err| { + Status::new( + tonic::Code::Unauthenticated, + format!("invalid authorization token: {}", err), + ) + })?; + + // Insert the `authorization` header into the request metadata + request.metadata_mut().insert("authorization", auth); + Ok(request) + } + } +} + +/// StreamingFast Blockmeta gRPC client. +/// +/// The `SfBlockmetaClient` is a gRPC client for the StreamingFast Blockmeta service. It provides +/// methods to fetch blocks by hash, number, and the latest block. +#[derive(Debug, Clone)] +pub struct SfBlockmetaClient { + rpc_client: BlockClient, +} + +impl SfBlockmetaClient { + /// Create a new `SfBlockmetaClient` with the given gRPC endpoint. + /// + /// The service will connect once the first request is made. It will attempt to connect for + /// 5 seconds before timing out. + pub fn new(endpoint: Uri) -> Self { + let channel = Channel::builder(endpoint) + .tls_config(Default::default()) + .expect("failed to configure TLS") + .connect_timeout(Duration::from_secs(5)) + .connect_lazy(); + Self { + rpc_client: BlockClient::new(channel), + } + } +} + +impl SfBlockmetaClient> { + /// Create a new `SfBlockmetaClient` with the given gRPC endpoint and authorization token. + /// + /// The cliient will connect to the given endpoint and authenticate requests with the given + /// authorization token inserted into the `authorization` header by the [`AuthInterceptor`]. + /// + /// The service will connect once the first request is made. It will attempt to connect for + /// 5 seconds before timing out. + pub fn new_with_auth(endpoint: Uri, auth: impl AsRef) -> Self { + let interceptor = AuthInterceptor::with_token(auth.as_ref()); + let channel = Channel::builder(endpoint) + .tls_config(Default::default()) + .expect("failed to configure TLS") + .connect_timeout(Duration::from_secs(5)) + .connect_lazy(); + + Self { + rpc_client: BlockClient::with_interceptor(channel, interceptor), + } + } +} + +impl SfBlockmetaClient +where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, +{ + /// Fetch the latest block from the StreamingFast Blockmeta service. + /// + /// Returns `None` if the block does not exist. + pub async fn get_latest_block(&mut self) -> anyhow::Result> { + let request = Empty {}; + + match self.rpc_client.head(request).await { + Ok(res) => Ok(Some(res.into_inner())), + Err(err) if err.code() == tonic::Code::NotFound => Ok(None), + Err(err) => Err(anyhow::anyhow!("request failed: {}", err.message())), + } + } + + /// Fetch the block with the given hash from the StreamingFast Blockmeta service. + /// + /// - `hash`: The block hash to fetch. + /// + /// Returns `None` if the block does not exist. + pub async fn get_block_by_hash(&mut self, hash: BlockHash) -> anyhow::Result> { + let request = IdToNumReq { + block_id: format!("{:x}", hash), // Convert the block hash to the non-0x hex string + }; + + match self.rpc_client.id_to_num(request).await { + Ok(res) => Ok(Some(res.into_inner())), + Err(err) if err.code() == tonic::Code::NotFound => { + println!("request failed: {:?}", err); + Ok(None) + } + Err(err) => { + println!("request failed: {:?}", err); + Err(anyhow::anyhow!("request failed: {}", err.message())) + } + } + } + + /// Fetch the block with the given number from the StreamingFast Blockmeta service. + /// + /// - `number`: The block number to fetch. + /// + /// Returns `None` if the block does not exist. + pub async fn get_block_by_number( + &mut self, + number: BlockNumber, + ) -> anyhow::Result> { + let request = NumToIdReq { block_num: number }; + + match self.rpc_client.num_to_id(request).await { + Ok(res) => Ok(Some(res.into_inner())), + Err(err) if err.code() == tonic::Code::NotFound => { + println!("request failed: {:?}", err); + Ok(None) + } + Err(err) => { + println!("request failed: {:?}", err); + Err(anyhow::anyhow!("request failed: {}", err.message())) + } + } + } +} diff --git a/gateway-framework/src/chains/ethereum/sf_blockmeta_client/sf.blockmeta.v2.rs b/gateway-framework/src/chains/ethereum/sf_blockmeta_client/sf.blockmeta.v2.rs new file mode 100644 index 000000000..64c4c0cb5 --- /dev/null +++ b/gateway-framework/src/chains/ethereum/sf_blockmeta_client/sf.blockmeta.v2.rs @@ -0,0 +1,817 @@ +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Empty {} +/// Block Requests +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct NumToIdReq { + #[prost(uint64, tag = "1")] + pub block_num: u64, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct IdToNumReq { + #[prost(string, tag = "1")] + pub block_id: ::prost::alloc::string::String, +} +/// Block & BlockByTime Responses +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct BlockResp { + #[prost(string, tag = "1")] + pub id: ::prost::alloc::string::String, + #[prost(uint64, tag = "2")] + pub num: u64, + #[prost(message, optional, tag = "3")] + pub time: ::core::option::Option<::prost_types::Timestamp>, +} +/// BlockByTime Requests +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TimeReq { + #[prost(message, optional, tag = "1")] + pub time: ::core::option::Option<::prost_types::Timestamp>, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RelativeTimeReq { + #[prost(message, optional, tag = "1")] + pub time: ::core::option::Option<::prost_types::Timestamp>, + #[prost(bool, tag = "2")] + pub inclusive: bool, +} +/// Generated client implementations. +pub mod block_client { + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] + + use tonic::codegen::http::Uri; + use tonic::codegen::*; + + #[derive(Debug, Clone)] + pub struct BlockClient { + inner: tonic::client::Grpc, + } + impl BlockClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl BlockClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> BlockClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + >>::Error: + Into + Send + Sync, + { + BlockClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + pub async fn num_to_id( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> { + self.inner.ready().await.map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static("/sf.blockmeta.v2.Block/NumToID"); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("sf.blockmeta.v2.Block", "NumToID")); + self.inner.unary(req, path, codec).await + } + pub async fn id_to_num( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> { + self.inner.ready().await.map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static("/sf.blockmeta.v2.Block/IDToNum"); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("sf.blockmeta.v2.Block", "IDToNum")); + self.inner.unary(req, path, codec).await + } + pub async fn head( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> { + self.inner.ready().await.map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static("/sf.blockmeta.v2.Block/Head"); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("sf.blockmeta.v2.Block", "Head")); + self.inner.unary(req, path, codec).await + } + } +} +/// Generated client implementations. +pub mod block_by_time_client { + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] + + use tonic::codegen::http::Uri; + use tonic::codegen::*; + + #[derive(Debug, Clone)] + pub struct BlockByTimeClient { + inner: tonic::client::Grpc, + } + impl BlockByTimeClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl BlockByTimeClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> BlockByTimeClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + >>::Error: + Into + Send + Sync, + { + BlockByTimeClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + pub async fn at( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> { + self.inner.ready().await.map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static("/sf.blockmeta.v2.BlockByTime/At"); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("sf.blockmeta.v2.BlockByTime", "At")); + self.inner.unary(req, path, codec).await + } + pub async fn after( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> { + self.inner.ready().await.map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static("/sf.blockmeta.v2.BlockByTime/After"); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("sf.blockmeta.v2.BlockByTime", "After")); + self.inner.unary(req, path, codec).await + } + pub async fn before( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> { + self.inner.ready().await.map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static("/sf.blockmeta.v2.BlockByTime/Before"); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("sf.blockmeta.v2.BlockByTime", "Before")); + self.inner.unary(req, path, codec).await + } + } +} +/// Generated server implementations. +pub mod block_server { + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] + + use tonic::codegen::*; + + /// Generated trait containing gRPC methods that should be implemented for use with BlockServer. + #[async_trait] + pub trait Block: Send + Sync + 'static { + async fn num_to_id( + &self, + request: tonic::Request, + ) -> std::result::Result, tonic::Status>; + async fn id_to_num( + &self, + request: tonic::Request, + ) -> std::result::Result, tonic::Status>; + async fn head( + &self, + request: tonic::Request, + ) -> std::result::Result, tonic::Status>; + } + #[derive(Debug)] + pub struct BlockServer { + inner: _Inner, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, + } + struct _Inner(Arc); + impl BlockServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, + } + } + pub fn with_interceptor(inner: T, interceptor: F) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } + } + impl tonic::codegen::Service> for BlockServer + where + T: Block, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); + match req.uri().path() { + "/sf.blockmeta.v2.Block/NumToID" => { + #[allow(non_camel_case_types)] + struct NumToIDSvc(pub Arc); + impl tonic::server::UnaryService for NumToIDSvc { + type Response = super::BlockResp; + type Future = BoxFuture, tonic::Status>; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { ::num_to_id(&inner, request).await }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = NumToIDSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/sf.blockmeta.v2.Block/IDToNum" => { + #[allow(non_camel_case_types)] + struct IDToNumSvc(pub Arc); + impl tonic::server::UnaryService for IDToNumSvc { + type Response = super::BlockResp; + type Future = BoxFuture, tonic::Status>; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { ::id_to_num(&inner, request).await }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = IDToNumSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/sf.blockmeta.v2.Block/Head" => { + #[allow(non_camel_case_types)] + struct HeadSvc(pub Arc); + impl tonic::server::UnaryService for HeadSvc { + type Response = super::BlockResp; + type Future = BoxFuture, tonic::Status>; + fn call(&mut self, request: tonic::Request) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { ::head(&inner, request).await }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = HeadSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => Box::pin(async move { + Ok(http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap()) + }), + } + } + } + impl Clone for BlockServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, + } + } + } + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for BlockServer { + const NAME: &'static str = "sf.blockmeta.v2.Block"; + } +} +/// Generated server implementations. +pub mod block_by_time_server { + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] + + use tonic::codegen::*; + + /// Generated trait containing gRPC methods that should be implemented for use with BlockByTimeServer. + #[async_trait] + pub trait BlockByTime: Send + Sync + 'static { + async fn at( + &self, + request: tonic::Request, + ) -> std::result::Result, tonic::Status>; + async fn after( + &self, + request: tonic::Request, + ) -> std::result::Result, tonic::Status>; + async fn before( + &self, + request: tonic::Request, + ) -> std::result::Result, tonic::Status>; + } + #[derive(Debug)] + pub struct BlockByTimeServer { + inner: _Inner, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, + } + struct _Inner(Arc); + impl BlockByTimeServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, + } + } + pub fn with_interceptor(inner: T, interceptor: F) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } + } + impl tonic::codegen::Service> for BlockByTimeServer + where + T: BlockByTime, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); + match req.uri().path() { + "/sf.blockmeta.v2.BlockByTime/At" => { + #[allow(non_camel_case_types)] + struct AtSvc(pub Arc); + impl tonic::server::UnaryService for AtSvc { + type Response = super::BlockResp; + type Future = BoxFuture, tonic::Status>; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { ::at(&inner, request).await }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = AtSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/sf.blockmeta.v2.BlockByTime/After" => { + #[allow(non_camel_case_types)] + struct AfterSvc(pub Arc); + impl tonic::server::UnaryService for AfterSvc { + type Response = super::BlockResp; + type Future = BoxFuture, tonic::Status>; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = + async move { ::after(&inner, request).await }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = AfterSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/sf.blockmeta.v2.BlockByTime/Before" => { + #[allow(non_camel_case_types)] + struct BeforeSvc(pub Arc); + impl tonic::server::UnaryService for BeforeSvc { + type Response = super::BlockResp; + type Future = BoxFuture, tonic::Status>; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = + async move { ::before(&inner, request).await }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = BeforeSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => Box::pin(async move { + Ok(http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap()) + }), + } + } + } + impl Clone for BlockByTimeServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, + } + } + } + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for BlockByTimeServer { + const NAME: &'static str = "sf.blockmeta.v2.BlockByTime"; + } +} diff --git a/gateway-framework/tests/it_chains_ethereum_sf_blockmeta.rs b/gateway-framework/tests/it_chains_ethereum_sf_blockmeta.rs new file mode 100644 index 000000000..e6335bc0f --- /dev/null +++ b/gateway-framework/tests/it_chains_ethereum_sf_blockmeta.rs @@ -0,0 +1,157 @@ +//! Ethereum JSON-RPC API client integration tests + +use assert_matches::assert_matches; +use thegraph::types::{BlockHash, BlockNumber}; +use tonic::transport::Uri; + +use gateway_framework::chains::ethereum::sf_blockmeta_client::SfBlockmetaClient; + +/// Test helper to get the test URI as a [`Uri`]. +/// +/// The URI is expected to be set in the `IT_GATEWAY_FRAMEWORK_SFBLOCKMETA_URI` environment +/// variable. +fn test_rpc_uri() -> Uri { + std::env::var("IT_GATEWAY_FRAMEWORK_SFBLOCKMETA_URI") + .expect("Missing IT_GATEWAY_FRAMEWORK_SFBLOCKMETA_URI") + .parse() + .expect("Invalid URI") +} + +/// Test helper to get the test authorization token. +/// +/// The token is expected to be set in the `IT_GATEWAY_FRAMEWORK_SFBLOCKMETA_AUTH_TOKEN` +/// environment variable. +fn test_auth_token() -> String { + let token = std::env::var("IT_GATEWAY_FRAMEWORK_SFBLOCKMETA_AUTH_TOKEN") + .expect("Missing IT_GATEWAY_FRAMEWORK_SFBLOCKMETA_AUTH_TOKEN"); + if token.is_empty() { + panic!("IT_GATEWAY_FRAMEWORK_SFBLOCKMETA_AUTH_TOKEN is empty"); + } + token +} + +/// It should be able to retrieve the latest block using the `Head` RPC method. +#[test_with::env( + IT_GATEWAY_FRAMEWORK_SFBLOCKMETA_URI, + IT_GATEWAY_FRAMEWORK_SFBLOCKMETA_AUTH_TOKEN +)] +#[tokio::test] +async fn fetch_latest_block() { + //* Given + let mut client = SfBlockmetaClient::new_with_auth(test_rpc_uri(), test_auth_token()); + + //* When + let resp = client.get_latest_block().await; + + //* Then + // Assert the block number and hash are present + assert_matches!(resp, Ok(Some(block)) => { + assert!(block.num > 0); + assert!(!block.id.is_empty()); + }); +} + +/// It should be able to retrieve a block associated with a certain hash using the +/// `IDToNum` RPC method. +#[test_with::env( + IT_GATEWAY_FRAMEWORK_SFBLOCKMETA_URI, + IT_GATEWAY_FRAMEWORK_SFBLOCKMETA_AUTH_TOKEN +)] +#[tokio::test] +async fn fetch_known_block_by_hash() { + //* Given + let mut client = SfBlockmetaClient::new_with_auth(test_rpc_uri(), test_auth_token()); + + // The Merge: https://etherscan.io/block/15537394 + let block_hash: BlockHash = + "0x56a9bb0302da44b8c0b3df540781424684c3af04d0b7a38d72842b762076a664" + .parse() + .expect("invalid hash"); + let expected_block_number: u64 = 15_537_394; + + //* When + let resp = client.get_block_by_hash(block_hash).await; + + //* Then + // Assert the block number and hash are present + assert_matches!(resp, Ok(Some(block)) => { + assert_eq!(block.num, expected_block_number); + assert_eq!(block.id, format!("{:x}", block_hash)); + }); +} + +/// It should be able to retrieve a block associated with a certain hash using the +/// `NumToID` RPC method. +#[test_with::env( + IT_GATEWAY_FRAMEWORK_SFBLOCKMETA_URI, + IT_GATEWAY_FRAMEWORK_SFBLOCKMETA_AUTH_TOKEN +)] +#[tokio::test] +async fn fetch_known_block_by_number() { + //* Given + let mut client = SfBlockmetaClient::new_with_auth(test_rpc_uri(), test_auth_token()); + + // The Merge: https://etherscan.io/block/15537394 + let block_number: BlockNumber = 15_537_394; + let expected_block_hash: BlockHash = + "0x56a9bb0302da44b8c0b3df540781424684c3af04d0b7a38d72842b762076a664" + .parse() + .expect("invalid hash"); + + //* When + let resp = client.get_block_by_number(block_number).await; + + //* Then + // Assert the block number and hash are present + assert_matches!(resp, Ok(Some(block)) => { + assert_eq!(block.num, block_number); + assert_eq!(block.id, format!("{:x}", expected_block_hash)); + }); +} + +/// It should return `None` when trying to retrieve a block that does not exist using the +/// `IDToNum` RPC method. +#[test_with::env( + IT_GATEWAY_FRAMEWORK_SFBLOCKMETA_URI, + IT_GATEWAY_FRAMEWORK_SFBLOCKMETA_AUTH_TOKEN +)] +#[tokio::test] +async fn fetch_inexistent_block_by_hash() { + //* Given + let mut client = SfBlockmetaClient::new_with_auth(test_rpc_uri(), test_auth_token()); + + // Use a hash that does not exist + let block_hash: BlockHash = + "0x0000000000000000000000000000000000000000000000000000000000000000" + .parse() + .expect("invalid hash"); + + //* When + let resp = client.get_block_by_hash(block_hash).await; + + //* Then + // Assert the block number and hash are present + assert_matches!(resp, Ok(None)); +} + +/// It should return `None` when trying to retrieve a block that does not exist using the +/// `NumToID` RPC method. +#[test_with::env( + IT_GATEWAY_FRAMEWORK_SFBLOCKMETA_URI, + IT_GATEWAY_FRAMEWORK_SFBLOCKMETA_AUTH_TOKEN +)] +#[tokio::test] +async fn fetch_inexistent_block_by_number() { + //* Given + let mut client = SfBlockmetaClient::new_with_auth(test_rpc_uri(), test_auth_token()); + + // Use a very large block number to ensure it does not exist + let block_number = i64::MAX as BlockNumber; + + //* When + let resp = client.get_block_by_number(block_number).await; + + //* Then + // Assert the block number and hash are present + assert_matches!(resp, Ok(None)); +}