From a455d5ea254c514ba05ff2d946a0084a1334c500 Mon Sep 17 00:00:00 2001 From: Lohann Paterno Coutinho Ferreira Date: Fri, 7 Jul 2023 05:59:46 -0300 Subject: [PATCH 1/4] Improve apple silicon development --- .dockerignore | 12 + Cargo.toml | 3 + build_connectors.sh | 54 +- chains/astar/Dockerfile | 3 +- chains/bitcoin/Dockerfile | 3 +- chains/ethereum/Dockerfile | 3 +- chains/polkadot/Dockerfile | 3 +- chains/polygon-pos/Dockerfile | 4 + chains/polygon-pos/config/Cargo.toml | 12 + chains/polygon-pos/config/src/lib.rs | 54 ++ chains/polygon-pos/server/Cargo.toml | 27 + chains/polygon-pos/server/README.md | 103 ++++ chains/polygon-pos/server/src/eth_types.rs | 93 ++++ chains/polygon-pos/server/src/lib.rs | 488 ++++++++++++++++++ chains/polygon-pos/server/src/main.rs | 6 + chains/polygon-pos/server/src/proof.rs | 194 +++++++ chains/polygon-pos/server/src/utils.rs | 452 ++++++++++++++++ chains/polygon-pos/tx/Cargo.toml | 16 + chains/polygon-pos/tx/README.md | 30 ++ chains/polygon-pos/tx/src/lib.rs | 105 ++++ ci/dockerfiles/base-ci-linux/Dockerfile | 136 +++++ .../base-ci-linux/base-ci-linux-config | 11 + ci/dockerfiles/builder/Dockerfile | 29 ++ docker-compose.yml | 26 +- rosetta-client/README.md | 17 +- 25 files changed, 1863 insertions(+), 21 deletions(-) create mode 100644 .dockerignore create mode 100644 chains/polygon-pos/Dockerfile create mode 100644 chains/polygon-pos/config/Cargo.toml create mode 100644 chains/polygon-pos/config/src/lib.rs create mode 100644 chains/polygon-pos/server/Cargo.toml create mode 100644 chains/polygon-pos/server/README.md create mode 100644 chains/polygon-pos/server/src/eth_types.rs create mode 100644 chains/polygon-pos/server/src/lib.rs create mode 100644 chains/polygon-pos/server/src/main.rs create mode 100644 chains/polygon-pos/server/src/proof.rs create mode 100644 chains/polygon-pos/server/src/utils.rs create mode 100644 chains/polygon-pos/tx/Cargo.toml create mode 100644 chains/polygon-pos/tx/README.md create mode 100644 chains/polygon-pos/tx/src/lib.rs create mode 100644 ci/dockerfiles/base-ci-linux/Dockerfile create mode 100644 ci/dockerfiles/base-ci-linux/base-ci-linux-config create mode 100644 ci/dockerfiles/builder/Dockerfile diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..91d823b5 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,12 @@ +# Generated by Cargo +**/target + +# Unneeded artifacts +**/.DS_Store + +# The cache for docker container dependency +.cargo +.github + +# Environment +.git diff --git a/Cargo.toml b/Cargo.toml index cc7ee958..20a50480 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,6 +10,9 @@ members = [ "chains/polkadot/config", "chains/polkadot/server", "chains/polkadot/tx", + "chains/polygon-pos/config", + "chains/polygon-pos/server", + "chains/polygon-pos/tx", "rosetta-cli", "rosetta-client", "rosetta-core", diff --git a/build_connectors.sh b/build_connectors.sh index 99abbd1f..af259349 100755 --- a/build_connectors.sh +++ b/build_connectors.sh @@ -1,4 +1,56 @@ -#!/bin/sh +#!/usr/bin/env bash +set -e + +REPO=https://github.com/Analog-Labs/chain-connectors +REGISTRY_PATH=${REGISTRY_PATH:-analoglabs} +DOCKER_IMAGE_NAME=base-ci-linux +DOCKER_IMAGE_VERSION=latest +CONNECTOR_IMAGE_VERSION=0.4.0 + +# Check if docker is running +if ! docker info > /dev/null 2>&1; then + echo "This script uses docker - please start docker and try again!" + exit 1 +fi + +# Build the base-ci-linux if necessary +if [[ "$(docker images -q "${REGISTRY_PATH}/${DOCKER_IMAGE_NAME}:${DOCKER_IMAGE_VERSION}" 2> /dev/null)" == "" ]]; then + docker build \ + -f ./ci/dockerfiles/base-ci-linux/Dockerfile \ + --build-arg VCS_REF=$(git rev-parse HEAD) \ + --build-arg BUILD_DATE=$(date +%Y%m%d) \ + --no-cache \ + -t "${REGISTRY_PATH}/${DOCKER_IMAGE_NAME}:${DOCKER_IMAGE_VERSION}" \ + "./ci/dockerfiles/${DOCKER_IMAGE_NAME}" +fi + +docker build \ + -f ./ci/dockerfiles/builder/Dockerfile \ + --no-cache \ + -t "${REGISTRY_PATH}/builder:latest" \ + . + +docker build \ + -f ./chains/bitcoin/Dockerfile \ + -t analoglabs/connector-bitcoin:${CONNECTOR_IMAGE_VERSION} \ + ./chains/bitcoin + +docker build \ + -f ./chains/ethereum/Dockerfile \ + -t analoglabs/connector-ethereum:${CONNECTOR_IMAGE_VERSION} \ + ./chains/ethereum + +docker build \ + -f ./chains/polkadot/Dockerfile \ + -t analoglabs/connector-polkadot:${CONNECTOR_IMAGE_VERSION} \ + ./chains/polkadot + +docker build \ + -f ./chains/astar/Dockerfile \ + -t analoglabs/connector-astar:${CONNECTOR_IMAGE_VERSION} \ + ./chains/astar + +exit 0 cargo build -p rosetta-server-bitcoin --target x86_64-unknown-linux-musl --release mkdir -p target/release/bitcoin/bin cp target/x86_64-unknown-linux-musl/release/rosetta-server-bitcoin target/release/bitcoin/bin diff --git a/chains/astar/Dockerfile b/chains/astar/Dockerfile index b40a2902..ebf9d734 100644 --- a/chains/astar/Dockerfile +++ b/chains/astar/Dockerfile @@ -1,3 +1,4 @@ +FROM analoglabs/builder:latest AS builder FROM scratch -COPY bin/rosetta-server-astar rosetta-server-astar +COPY --from=builder /chain-connectors/bin/rosetta-server-astar rosetta-server-astar ENTRYPOINT ["/rosetta-server-astar"] diff --git a/chains/bitcoin/Dockerfile b/chains/bitcoin/Dockerfile index acfc2232..3fb848e0 100644 --- a/chains/bitcoin/Dockerfile +++ b/chains/bitcoin/Dockerfile @@ -1,3 +1,4 @@ +FROM analoglabs/builder:latest AS builder FROM scratch -COPY bin/rosetta-server-bitcoin rosetta-server-bitcoin +COPY --from=builder /chain-connectors/bin/rosetta-server-bitcoin /rosetta-server-bitcoin ENTRYPOINT ["/rosetta-server-bitcoin"] diff --git a/chains/ethereum/Dockerfile b/chains/ethereum/Dockerfile index 83ecc419..b17e6ec6 100644 --- a/chains/ethereum/Dockerfile +++ b/chains/ethereum/Dockerfile @@ -1,3 +1,4 @@ +FROM analoglabs/builder:latest AS builder FROM scratch -COPY bin/rosetta-server-ethereum rosetta-server-ethereum +COPY --from=builder /chain-connectors/bin/rosetta-server-ethereum rosetta-server-ethereum ENTRYPOINT ["/rosetta-server-ethereum"] diff --git a/chains/polkadot/Dockerfile b/chains/polkadot/Dockerfile index 88c1ab4e..c9347eda 100644 --- a/chains/polkadot/Dockerfile +++ b/chains/polkadot/Dockerfile @@ -1,3 +1,4 @@ +FROM analoglabs/builder:latest AS builder FROM scratch -COPY bin/rosetta-server-polkadot rosetta-server-polkadot +COPY --from=builder /chain-connectors/bin/rosetta-server-polkadot rosetta-server-polkadot ENTRYPOINT ["/rosetta-server-polkadot"] diff --git a/chains/polygon-pos/Dockerfile b/chains/polygon-pos/Dockerfile new file mode 100644 index 00000000..b13cd287 --- /dev/null +++ b/chains/polygon-pos/Dockerfile @@ -0,0 +1,4 @@ +FROM analoglabs/builder:latest AS builder +FROM scratch +COPY --from=builder /chain-connectors/bin/rosetta-server-polygon-pos rosetta-server-polygon-pos +ENTRYPOINT ["/rosetta-server-polygon-pos"] diff --git a/chains/polygon-pos/config/Cargo.toml b/chains/polygon-pos/config/Cargo.toml new file mode 100644 index 00000000..bff74222 --- /dev/null +++ b/chains/polygon-pos/config/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "rosetta-config-polygon-pos" +version = "0.4.0" +edition = "2021" +license = "MIT" +repository = "https://github.com/analog-labs/chain-connectors" +description = "Polygon configuration." + +[dependencies] +anyhow = "1.0.69" +rosetta-core = { version = "0.4.0", path = "../../../rosetta-core" } +serde = { version = "1.0.153", features = ["derive"] } diff --git a/chains/polygon-pos/config/src/lib.rs b/chains/polygon-pos/config/src/lib.rs new file mode 100644 index 00000000..7c270553 --- /dev/null +++ b/chains/polygon-pos/config/src/lib.rs @@ -0,0 +1,54 @@ +use anyhow::Result; +use rosetta_core::crypto::address::AddressFormat; +use rosetta_core::crypto::Algorithm; +use rosetta_core::BlockchainConfig; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; + +pub fn config(network: &str) -> Result { + anyhow::ensure!(network == "dev"); + Ok(BlockchainConfig { + blockchain: "polygon-pos", + network: "dev", + algorithm: Algorithm::EcdsaRecoverableSecp256k1, + address_format: AddressFormat::Eip55, + coin: 1, // TODO: What this coin field means? is it the BIP44 id? + bip44: true, + utxo: false, + currency_unit: "wei", + currency_symbol: "MATIC", + currency_decimals: 18, + node_port: 8545, + node_image: "ethereum/client-go:v1.10.26", // TODO: use polygon image + node_command: Arc::new(|_network, port| { + vec![ + "--dev".into(), + "--ipcdisable".into(), + "--http".into(), + "--http.addr=0.0.0.0".into(), + format!("--http.port={port}"), + "--http.vhosts=*".into(), + "--http.api=eth,debug,admin,txpool,web3".into(), + ] + }), + node_additional_ports: &[], + connector_port: 8081, + testnet: network == "dev", + }) +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct PolygonMetadataParams { + pub destination: Vec, + pub amount: [u64; 4], + pub data: Vec, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct PolygonMetadata { + pub chain_id: u64, + pub nonce: u64, + pub max_priority_fee_per_gas: [u64; 4], + pub max_fee_per_gas: [u64; 4], + pub gas_limit: [u64; 4], +} diff --git a/chains/polygon-pos/server/Cargo.toml b/chains/polygon-pos/server/Cargo.toml new file mode 100644 index 00000000..8686a281 --- /dev/null +++ b/chains/polygon-pos/server/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "rosetta-server-polygon-pos" +version = "0.4.0" +edition = "2021" +license = "MIT" +repository = "https://github.com/analog-labs/chain-connectors" +description = "Polygon rosetta server." + +[dependencies] +anyhow = "1.0.69" +async-std = { version = "1.12.0", features = ["tokio1"] } +async-trait = "0.1.66" +ethabi = "18.0.0" +ethers = "2.0.2" +hex = "0.4.3" +rosetta-config-polygon-pos = { version = "0.4.0", path = "../config" } +rosetta-server = { version = "0.4.0", path = "../../../rosetta-server" } +serde = "1.0.153" +serde_json = "1.0.94" +tokio = { version = "1.26.0", features = ["rt-multi-thread", "macros"] } + +[dev-dependencies] +ethers-solc = "2.0.1" +rosetta-client = { version = "0.4.0", path = "../../../rosetta-client" } +rosetta-docker = { version = "0.4.0", path = "../../../rosetta-docker" } +rosetta-server = { version = "0.4.0", path = "../../../rosetta-server", features = ["tests"] } +sha3 = "0.10.6" diff --git a/chains/polygon-pos/server/README.md b/chains/polygon-pos/server/README.md new file mode 100644 index 00000000..86ae36a8 --- /dev/null +++ b/chains/polygon-pos/server/README.md @@ -0,0 +1,103 @@ +# **Rosetta Server for Ethereum Chains** + +This Project contains `BlockchainClient` implementation of ethereum chains. + +Methods implemented are: + +- `config` +- `genesis_block` +- `node_version` +- `current_block` +- `balance` +- `faucet` +- `metadata` +- `submit` +- `block` +- `block_transaction` +- `call` + +### **`config`**: + +This method returns `BlockchainConfig` which contains the configuration specific details for ethereum chain. + +### **`genesis_block`**: + +Returns genesis block identifier. + +### **`node_version`**: + +Returns node client version. + +### **`current_block`**: + +Fetches current block using RPC and returns its identifier. + +### **`balance`**: + +Fetches account balance from on chain and returns it. It takes two arguments: +`address`: Address of account we want to fetch balance of. +`block`: block identifier of block at which we want to fetch balance of account. + +### **`block`**: + +This function takes `PartialBlockIdentifier` which contains a block index or hash and returns block transaction and operations happened in that transaction. + +### **`block_transaction`**: + +This function takes: +`block`: Which is a block identifier of block from which we want to fetch transaction from. +`tx`: Transaction identifier of transaction we want to fetch. + +And returns a specific transaction and its operations within specified block. + +### **`faucet`**: + +This method is used to fund an account with some amount of tokens in testnet. It takes two arguments: +`address`: Address of account we want to fund. +`amount`: Amount of tokens we want to fund. + +### **`metadata`**: + +This call is used to fetch nonce of account, It takes two arguments: +`public_key`: This is the public key of sender. +`options`: This is Params needed to create metadata. For ethereum chain it takes + +- `destination`: Address of receivier. +- `amount`: Amount to be transfered to receiver. +- `data`: encoded input data for call + +It returns `EthereumMetadata` which includes `chain_id`, `nonce` and gas details for transaction. + +### **`submit`**: + +It takes transaction bytes which is signed transaction bytes and it Submits signed transaction to chain and return its transaction id. + +### **`call`**: + +This function takes `CallRequest` which contains `method` and `parameters` and returns value returned by function or value stored at specific position in storage or proof of value stored at specific position in storage. + +`method`: its a string containing 3 values separated by `-` (dash).
+ +1. `contract_address`: This is the name of the contract.
+2. `method_signature` in case of contract call or `position` in case of storage call.
+3. `call_type`: This is the type of call. It can be `call`, `storage` or `storage_proof`.
+ +`parameters`: It takes additional parameter needed for call or storage call. In case of storage call or storage_proof call user can pass `block_number`. + +#### _**`contract_address`**_: + +As name suggest first part of the method parameter is contract address. + +#### _**`method_signature`**_: + +For contract call this is the method signature of the function we want to call. For storage call this is the position of storage we want to fetch value from. + +#### **`call_type`**: + +`call`: This is used to call a function in contract to get some value from it. Contract send calls are managed by universal wallet. + +`storage`: This call type can be used to fetch storage from given contract provided the position of storage. + +`storage_proof`: It returns proof of a value stored in contract storage at a given position. + +`transaction_receipt`: This call type can be used to fetch transaction receipt of specified transaction. diff --git a/chains/polygon-pos/server/src/eth_types.rs b/chains/polygon-pos/server/src/eth_types.rs new file mode 100644 index 00000000..a4d1976b --- /dev/null +++ b/chains/polygon-pos/server/src/eth_types.rs @@ -0,0 +1,93 @@ +use ethers::types::{Bytes, H160, U256, U64}; +use serde::{Deserialize, Serialize}; + +pub const FEE_OP_TYPE: &str = "FEE"; +pub const CALL_OP_TYPE: &str = "CALL"; +pub const MINING_REWARD_OP_TYPE: &str = "MINER_REWARD"; +pub const UNCLE_REWARD_OP_TYPE: &str = "UNCLE_REWARD"; +pub const _CALL_CODE_OP_TYPE: &str = "CALLCODE"; +pub const _DELEGATE_CALL_OP_TYPE: &str = "DELEGATECALL"; +pub const _STATIC_CALL_OP_TYPE: &str = "STATICCALL"; +pub const SELF_DESTRUCT_OP_TYPE: &str = "SELFDESTRUCT"; +pub const DESTRUCT_OP_TYPE: &str = "DESTRUCT"; + +pub const CREATE_OP_TYPE: &str = "CREATE"; +pub const CREATE2_OP_TYPE: &str = "CREATE2"; + +pub const SUCCESS_STATUS: &str = "SUCCESS"; +pub const FAILURE_STATUS: &str = "FAILURE"; + +pub const UNCLE_REWARD_MULTIPLIER: u64 = 32; +pub const MAX_UNCLE_DEPTH: u64 = 8; + +pub const _TRANSFER_GAS_LIMIT: u64 = 21000; + +pub const FRONTIER_BLOCK_REWARD: u64 = 5000000000000000000; +pub const BYZANTIUM_BLOCK_REWARD: u64 = 3000000000000000000; +pub const CONSTANTINOPLE_BLOCK_REWARD: u64 = 2000000000000000000; + +pub struct ChainConfig { + pub byzantium_block: u64, + pub constantinople_block: u64, +} + +pub const _MAINNET_CHAIN_CONFIG: ChainConfig = ChainConfig { + byzantium_block: 4370000, + constantinople_block: 7280000, +}; + +pub const TESTNET_CHAIN_CONFIG: ChainConfig = ChainConfig { + byzantium_block: 0, + constantinople_block: 0, +}; + +#[derive(Deserialize, Serialize, Clone, Debug, Eq, PartialEq)] +pub struct Trace { + pub from: H160, + pub gas: U64, + #[serde(rename = "gasUsed")] + pub gas_used: U64, + pub input: Bytes, + pub output: Bytes, + pub to: H160, + #[serde(rename = "type")] + pub trace_type: String, + pub value: U256, + #[serde(default)] + pub revert: bool, + #[serde(rename = "error", default)] + pub error_message: String, + #[serde(default)] + pub calls: Vec, +} + +#[derive(Deserialize, Serialize, Clone, Debug, Eq, PartialEq)] +pub struct FlattenTrace { + pub from: H160, + pub gas: U64, + pub gas_used: U64, + pub input: Bytes, + pub output: Bytes, + pub to: H160, + pub trace_type: String, + pub value: U256, + pub revert: bool, + pub error_message: String, +} + +impl From for FlattenTrace { + fn from(trace: Trace) -> Self { + Self { + from: trace.from, + gas: trace.gas, + gas_used: trace.gas_used, + input: trace.input, + output: trace.output, + to: trace.to, + trace_type: trace.trace_type, + value: trace.value, + revert: trace.revert, + error_message: trace.error_message, + } + } +} diff --git a/chains/polygon-pos/server/src/lib.rs b/chains/polygon-pos/server/src/lib.rs new file mode 100644 index 00000000..bb793ac3 --- /dev/null +++ b/chains/polygon-pos/server/src/lib.rs @@ -0,0 +1,488 @@ +use anyhow::{bail, Context, Result}; +use ethabi::token::{LenientTokenizer, Tokenizer}; +use ethers::abi::{Detokenize, HumanReadableParser, InvalidOutputType, Token}; +use ethers::prelude::*; +use ethers::utils::keccak256; +use ethers::utils::rlp::Encodable; +use proof::verify_proof; +use rosetta_config_polygon_pos::{PolygonMetadata, PolygonMetadataParams}; +use rosetta_server::crypto::address::Address; +use rosetta_server::crypto::PublicKey; +use rosetta_server::types::{ + Block, BlockIdentifier, CallRequest, Coin, PartialBlockIdentifier, Transaction, + TransactionIdentifier, +}; +use rosetta_server::{BlockchainClient, BlockchainConfig}; +use serde_json::{json, Value}; +use std::str::FromStr; +use std::sync::Arc; + +mod eth_types; +mod proof; +mod utils; + +pub struct PolygonPosClient { + config: BlockchainConfig, + client: Arc>, + genesis_block: BlockIdentifier, +} + +#[async_trait::async_trait] +impl BlockchainClient for PolygonPosClient { + type MetadataParams = PolygonMetadataParams; + type Metadata = PolygonMetadata; + + fn create_config(network: &str) -> Result { + rosetta_config_polygon_pos::config(network) + } + + async fn new(config: BlockchainConfig, addr: &str) -> Result { + let client = Arc::new(Provider::::try_from(format!("http://{addr}"))?); + let genesis = client + .get_block(0) + .await? + .context("Failed to get genesis block")?; + let genesis_block = BlockIdentifier { + index: 0, + hash: hex::encode(genesis.hash.as_ref().unwrap()), + }; + Ok(Self { + config, + client, + genesis_block, + }) + } + + fn config(&self) -> &BlockchainConfig { + &self.config + } + + fn genesis_block(&self) -> &BlockIdentifier { + &self.genesis_block + } + + async fn node_version(&self) -> Result { + Ok(self.client.client_version().await?) + } + + async fn current_block(&self) -> Result { + let index = self.client.get_block_number().await?.as_u64(); + let block = self + .client + .get_block(index) + .await? + .context("missing block")?; + Ok(BlockIdentifier { + index, + hash: hex::encode(block.hash.as_ref().unwrap()), + }) + } + + async fn balance(&self, address: &Address, block: &BlockIdentifier) -> Result { + let block = hex::decode(&block.hash)? + .try_into() + .map_err(|_| anyhow::anyhow!("invalid block hash"))?; + let address: H160 = address.address().parse()?; + Ok(self + .client + .get_balance(address, Some(BlockId::Hash(H256(block)))) + .await? + .as_u128()) + } + + async fn coins(&self, _address: &Address, _block: &BlockIdentifier) -> Result> { + anyhow::bail!("not a utxo chain"); + } + + async fn faucet(&self, address: &Address, param: u128) -> Result> { + // first account will be the coinbase account on a dev net + let coinbase = self.client.get_accounts().await?[0]; + let address: H160 = address.address().parse()?; + let tx = TransactionRequest::new() + .to(address) + .value(param) + .from(coinbase); + Ok(self + .client + .send_transaction(tx, None) + .await? + .await? + .unwrap() + .transaction_hash + .0 + .to_vec()) + } + + async fn metadata( + &self, + public_key: &PublicKey, + options: &Self::MetadataParams, + ) -> Result { + let from: H160 = public_key + .to_address(self.config().address_format) + .address() + .parse()?; + let to: Option = if options.destination.len() >= 20 { + Some(H160::from_slice(&options.destination).into()) + } else { + None + }; + let chain_id = self.client.get_chainid().await?; + let nonce = self.client.get_transaction_count(from, None).await?; + let (max_fee_per_gas, max_priority_fee_per_gas) = + self.client.estimate_eip1559_fees(None).await?; + let tx = Eip1559TransactionRequest { + from: Some(from), + to, + value: Some(U256(options.amount)), + data: Some(options.data.clone().into()), + ..Default::default() + }; + let gas_limit = self.client.estimate_gas(&tx.into(), None).await?; + Ok(PolygonMetadata { + chain_id: chain_id.as_u64(), + nonce: nonce.as_u64(), + max_priority_fee_per_gas: max_priority_fee_per_gas.0, + max_fee_per_gas: max_fee_per_gas.0, + gas_limit: gas_limit.0, + }) + } + + async fn submit(&self, transaction: &[u8]) -> Result> { + let tx = transaction.to_vec().into(); + Ok(self + .client + .send_raw_transaction(Bytes(tx)) + .await? + .await? + .context("Failed to get transaction receipt")? + .transaction_hash + .0 + .to_vec()) + } + + async fn block(&self, block_identifier: &PartialBlockIdentifier) -> Result { + let block_id = if let Some(hash) = block_identifier.hash.as_ref() { + BlockId::Hash(H256::from_str(hash)?) + } else { + let index = if let Some(index) = block_identifier.index { + BlockNumber::Number(U64::from(index)) + } else { + BlockNumber::Latest + }; + BlockId::Number(index) + }; + let block = self + .client + .get_block_with_txs(block_id) + .await? + .context("block not found")?; + let block_number = block.number.context("Unable to fetch block number")?; + let block_hash = block.hash.context("Unable to fetch block hash")?; + let mut transactions = vec![]; + let block_reward_transaction = + crate::utils::block_reward_transaction(&self.client, self.config(), &block).await?; + transactions.push(block_reward_transaction); + for transaction in &block.transactions { + let transaction = + crate::utils::get_transaction(&self.client, self.config(), &block, transaction) + .await?; + transactions.push(transaction); + } + Ok(Block { + block_identifier: BlockIdentifier { + index: block_number.as_u64(), + hash: hex::encode(block_hash), + }, + parent_block_identifier: BlockIdentifier { + index: block_number.as_u64().saturating_sub(1), + hash: hex::encode(block.parent_hash), + }, + timestamp: block.timestamp.as_u64() as i64, + transactions, + metadata: None, + }) + } + + async fn block_transaction( + &self, + block: &BlockIdentifier, + tx: &TransactionIdentifier, + ) -> Result { + let tx_id = H256::from_str(&tx.hash)?; + let block = self + .client + .get_block(BlockId::Hash(H256::from_str(&block.hash)?)) + .await? + .context("block not found")?; + let transaction = self + .client + .get_transaction(tx_id) + .await? + .context("transaction not found")?; + let transaction = + crate::utils::get_transaction(&self.client, self.config(), &block, &transaction) + .await?; + Ok(transaction) + } + + async fn call(&self, req: &CallRequest) -> Result { + let call_details = req.method.split('-').collect::>(); + if call_details.len() != 3 { + anyhow::bail!("Invalid length of call request params"); + } + let contract_address = call_details[0]; + let method_or_position = call_details[1]; + let call_type = call_details[2]; + + let params = &req.parameters; + match call_type.to_lowercase().as_str() { + "call" => { + //process constant call + let contract_address = H160::from_str(contract_address)?; + + let function = HumanReadableParser::parse_function(method_or_position)?; + let params: Vec = serde_json::from_value(params.clone())?; + let mut tokens = Vec::with_capacity(params.len()); + for (ty, arg) in function.inputs.iter().zip(params) { + tokens.push(LenientTokenizer::tokenize(&ty.kind, &arg)?); + } + let data = function.encode_input(&tokens)?; + + let tx = Eip1559TransactionRequest { + to: Some(contract_address.into()), + data: Some(data.into()), + ..Default::default() + }; + + let tx = &tx.into(); + let received_data = self.client.call(tx, None).await?; + + struct Detokenizer { + tokens: Vec, + } + impl Detokenize for Detokenizer { + fn from_tokens(tokens: Vec) -> Result { + Ok(Self { tokens }) + } + } + let detokenizer: Detokenizer = + decode_function_data(&function, received_data, false)?; + let mut result = Vec::with_capacity(tokens.len()); + for token in detokenizer.tokens { + result.push(token.to_string()); + } + return Ok(serde_json::to_value(result)?); + } + "storage" => { + //process storage call + let from = H160::from_str(contract_address)?; + + let location = H256::from_str(method_or_position)?; + + let block_num = params["block_number"] + .as_u64() + .map(|block_num| BlockId::Number(block_num.into())); + + let storage_check = self + .client + .get_storage_at(from, location, block_num) + .await?; + return Ok(Value::String(format!("{storage_check:#?}",))); + } + "storage_proof" => { + let from = H160::from_str(contract_address)?; + + let location = H256::from_str(method_or_position)?; + + let block_num = params["block_number"] + .as_u64() + .map(|block_num| BlockId::Number(block_num.into())); + + let proof_data = self + .client + .get_proof(from, vec![location], block_num) + .await?; + + //process verfiicatin of proof + let storage_hash = proof_data.storage_hash; + let storage_proof = proof_data.storage_proof.first().context("No proof found")?; + + let key = &storage_proof.key; + let key_hash = keccak256(key); + let encoded_val = storage_proof.value.rlp_bytes().to_vec(); + + let is_valid = verify_proof( + &storage_proof.proof, + storage_hash.as_bytes(), + &key_hash.to_vec(), + &encoded_val, + ); + + let result = serde_json::to_value(&proof_data)?; + + return Ok(json!({ + "proof": result, + "isValid": is_valid + })); + } + "transaction_receipt" => { + let tx_hash = H256::from_str(contract_address)?; + let receipt = self.client.get_transaction_receipt(tx_hash).await?; + let result = serde_json::to_value(&receipt)?; + return Ok(result); + } + _ => { + bail!("request type not supported") + } + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use ethers_solc::artifacts::Source; + use ethers_solc::{CompilerInput, EvmVersion, Solc}; + use rosetta_client::EthereumExt; + use rosetta_docker::Env; + use sha3::Digest; + use std::collections::BTreeMap; + use std::path::Path; + + #[tokio::test] + async fn test_network_list() -> Result<()> { + let config = rosetta_config_polygon_pos::config("dev")?; + rosetta_server::tests::network_list(config).await + } + + #[tokio::test] + async fn test_network_options() -> Result<()> { + let config = rosetta_config_polygon_pos::config("dev")?; + rosetta_server::tests::network_options::(config).await + } + + #[tokio::test] + async fn test_network_status() -> Result<()> { + let config = rosetta_config_polygon_pos::config("dev")?; + rosetta_server::tests::network_status::(config).await + } + + #[tokio::test] + async fn test_account() -> Result<()> { + let config = rosetta_config_polygon_pos::config("dev")?; + rosetta_server::tests::account(config).await + } + + #[tokio::test] + async fn test_construction() -> Result<()> { + let config = rosetta_config_polygon_pos::config("dev")?; + rosetta_server::tests::construction(config).await + } + + #[tokio::test] + async fn test_find_transaction() -> Result<()> { + let config = rosetta_config_polygon_pos::config("dev")?; + rosetta_server::tests::find_transaction(config).await + } + + #[tokio::test] + async fn test_list_transactions() -> Result<()> { + let config = rosetta_config_polygon_pos::config("dev")?; + rosetta_server::tests::list_transactions(config).await + } + + fn compile_snippet(source: &str) -> Result> { + let solc = Solc::default(); + let source = format!("contract Contract {{ {source} }}"); + let mut sources = BTreeMap::new(); + sources.insert(Path::new("contract.sol").into(), Source::new(source)); + let input = CompilerInput::with_sources(sources)[0] + .clone() + .evm_version(EvmVersion::Homestead); + let output = solc.compile_exact(&input)?; + let file = output.contracts.get("contract.sol").unwrap(); + let contract = file.get("Contract").unwrap(); + let bytecode = contract + .evm + .as_ref() + .unwrap() + .bytecode + .as_ref() + .unwrap() + .object + .as_bytes() + .unwrap() + .to_vec(); + Ok(bytecode) + } + + #[tokio::test] + async fn test_smart_contract() -> Result<()> { + let config = rosetta_config_polygon_pos::config("dev")?; + + let env = Env::new("smart-contract", config.clone()).await?; + + let faucet = 100 * u128::pow(10, config.currency_decimals); + let wallet = env.ephemeral_wallet()?; + wallet.faucet(faucet).await?; + + let bytes = compile_snippet( + r#" + event AnEvent(); + function emitEvent() public { + emit AnEvent(); + } + "#, + )?; + let response = wallet.eth_deploy_contract(bytes).await?; + + let receipt = wallet.eth_transaction_receipt(&response.hash).await?; + let contract_address = receipt.result["contractAddress"].as_str().unwrap(); + let response = wallet + .eth_send_call(contract_address, "function emitEvent()", &[], 0) + .await?; + let receipt = wallet.eth_transaction_receipt(&response.hash).await?; + let logs = receipt.result["logs"].as_array().unwrap(); + assert_eq!(logs.len(), 1); + let topic = logs[0]["topics"][0].as_str().unwrap(); + let expected = format!("0x{}", hex::encode(sha3::Keccak256::digest("AnEvent()"))); + assert_eq!(topic, expected); + Ok(()) + } + + #[tokio::test] + async fn test_smart_contract_view() -> Result<()> { + let config = rosetta_config_polygon_pos::config("dev")?; + + let env = Env::new("smart-contract-view", config.clone()).await?; + + let faucet = 100 * u128::pow(10, config.currency_decimals); + let wallet = env.ephemeral_wallet()?; + wallet.faucet(faucet).await?; + + let bytes = compile_snippet( + r#" + function identity(bool a) public view returns (bool) { + return a; + } + "#, + )?; + let response = wallet.eth_deploy_contract(bytes).await?; + let receipt = wallet.eth_transaction_receipt(&response.hash).await?; + let contract_address = receipt.result["contractAddress"].as_str().unwrap(); + + let response = wallet + .eth_view_call( + contract_address, + "function identity(bool a) returns (bool)", + &["true".into()], + ) + .await?; + println!("{:?}", response); + let result: Vec = serde_json::from_value(response.result)?; + assert_eq!(result[0], "true"); + + Ok(()) + } +} diff --git a/chains/polygon-pos/server/src/main.rs b/chains/polygon-pos/server/src/main.rs new file mode 100644 index 00000000..fd109f37 --- /dev/null +++ b/chains/polygon-pos/server/src/main.rs @@ -0,0 +1,6 @@ +use anyhow::Result; + +#[tokio::main] +async fn main() -> Result<()> { + rosetta_server::main::().await +} diff --git a/chains/polygon-pos/server/src/proof.rs b/chains/polygon-pos/server/src/proof.rs new file mode 100644 index 00000000..d111a540 --- /dev/null +++ b/chains/polygon-pos/server/src/proof.rs @@ -0,0 +1,194 @@ +use ethers::types::{Bytes, EIP1186ProofResponse}; +use ethers::utils::keccak256; +use ethers::utils::rlp::{decode_list, RlpStream}; + +pub fn verify_proof(proof: &Vec, root: &[u8], path: &Vec, value: &Vec) -> bool { + let mut expected_hash = root.to_vec(); + let mut path_offset = 0; + + for (i, node) in proof.iter().enumerate() { + if expected_hash != keccak256(node).to_vec() { + return false; + } + + let node_list: Vec> = decode_list(node); + + if node_list.len() == 17 { + if i == proof.len() - 1 { + // exclusion proof + let nibble = get_nibble(path, path_offset); + let node = &node_list[nibble as usize]; + + if node.is_empty() && is_empty_value(value) { + return true; + } + } else { + let nibble = get_nibble(path, path_offset); + expected_hash = node_list[nibble as usize].clone(); + + path_offset += 1; + } + } else if node_list.len() == 2 { + if i == proof.len() - 1 { + // exclusion proof + if !paths_match(&node_list[0], skip_length(&node_list[0]), path, path_offset) + && is_empty_value(value) + { + return true; + } + + // inclusion proof + if &node_list[1] == value { + return paths_match( + &node_list[0], + skip_length(&node_list[0]), + path, + path_offset, + ); + } + } else { + let node_path = &node_list[0]; + let prefix_length = shared_prefix_length(path, path_offset, node_path); + if prefix_length < node_path.len() * 2 - skip_length(node_path) { + // The proof shows a divergent path, but we're not + // at the end of the proof, so something's wrong. + return false; + } + path_offset += prefix_length; + expected_hash = node_list[1].clone(); + } + } else { + return false; + } + } + + false +} + +fn paths_match(p1: &Vec, s1: usize, p2: &Vec, s2: usize) -> bool { + let len1 = p1.len() * 2 - s1; + let len2 = p2.len() * 2 - s2; + + if len1 != len2 { + return false; + } + + for offset in 0..len1 { + let n1 = get_nibble(p1, s1 + offset); + let n2 = get_nibble(p2, s2 + offset); + + if n1 != n2 { + return false; + } + } + + true +} + +#[allow(dead_code)] +fn get_rest_path(p: &Vec, s: usize) -> String { + let mut ret = String::new(); + for i in s..p.len() * 2 { + let n = get_nibble(p, i); + ret += &format!("{n:01x}"); + } + ret +} + +fn is_empty_value(value: &Vec) -> bool { + let mut stream = RlpStream::new(); + stream.begin_list(4); + stream.append_empty_data(); + stream.append_empty_data(); + let empty_storage_hash = "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"; + stream.append(&hex::decode(empty_storage_hash).unwrap()); + let empty_code_hash = "c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"; + stream.append(&hex::decode(empty_code_hash).unwrap()); + let empty_account = stream.out(); + + let is_empty_slot = value.len() == 1 && value[0] == 0x80; + let is_empty_account = value == &empty_account; + is_empty_slot || is_empty_account +} + +fn shared_prefix_length(path: &Vec, path_offset: usize, node_path: &Vec) -> usize { + let skip_length = skip_length(node_path); + + let len = std::cmp::min( + node_path.len() * 2 - skip_length, + path.len() * 2 - path_offset, + ); + let mut prefix_len = 0; + + for i in 0..len { + let path_nibble = get_nibble(path, i + path_offset); + let node_path_nibble = get_nibble(node_path, i + skip_length); + + if path_nibble == node_path_nibble { + prefix_len += 1; + } else { + break; + } + } + + prefix_len +} + +fn skip_length(node: &Vec) -> usize { + if node.is_empty() { + return 0; + } + + let nibble = get_nibble(node, 0); + match nibble { + 0 => 2, + 1 => 1, + 2 => 2, + 3 => 1, + _ => 0, + } +} + +fn get_nibble(path: &[u8], offset: usize) -> u8 { + let byte = path[offset / 2]; + if offset % 2 == 0 { + byte >> 4 + } else { + byte & 0xF + } +} + +pub fn _encode_account(proof: &EIP1186ProofResponse) -> Vec { + let mut stream = RlpStream::new_list(4); + stream.append(&proof.nonce); + stream.append(&proof.balance); + stream.append(&proof.storage_hash); + stream.append(&proof.code_hash); + let encoded = stream.out(); + encoded.to_vec() +} + +#[cfg(test)] +mod tests { + use crate::proof::shared_prefix_length; + + #[tokio::test] + async fn test_shared_prefix_length() { + // We compare the path starting from the 6th nibble i.e. the 6 in 0x6f + let path: Vec = vec![0x12, 0x13, 0x14, 0x6f, 0x6c, 0x64, 0x21]; + let path_offset = 6; + // Our node path matches only the first 5 nibbles of the path + let node_path: Vec = vec![0x6f, 0x6c, 0x63, 0x21]; + let shared_len = shared_prefix_length(&path, path_offset, &node_path); + assert_eq!(shared_len, 5); + + // Now we compare the path starting from the 5th nibble i.e. the 4 in 0x14 + let path: Vec = vec![0x12, 0x13, 0x14, 0x6f, 0x6c, 0x64, 0x21]; + let path_offset = 5; + // Our node path matches only the first 7 nibbles of the path + // Note the first nibble is 1, so we skip 1 nibble + let node_path: Vec = vec![0x14, 0x6f, 0x6c, 0x64, 0x11]; + let shared_len = shared_prefix_length(&path, path_offset, &node_path); + assert_eq!(shared_len, 7); + } +} diff --git a/chains/polygon-pos/server/src/utils.rs b/chains/polygon-pos/server/src/utils.rs new file mode 100644 index 00000000..7367bca9 --- /dev/null +++ b/chains/polygon-pos/server/src/utils.rs @@ -0,0 +1,452 @@ +use crate::eth_types::{ + FlattenTrace, Trace, BYZANTIUM_BLOCK_REWARD, CALL_OP_TYPE, CONSTANTINOPLE_BLOCK_REWARD, + CREATE2_OP_TYPE, CREATE_OP_TYPE, DESTRUCT_OP_TYPE, FAILURE_STATUS, FEE_OP_TYPE, + FRONTIER_BLOCK_REWARD, MAX_UNCLE_DEPTH, MINING_REWARD_OP_TYPE, SELF_DESTRUCT_OP_TYPE, + SUCCESS_STATUS, TESTNET_CHAIN_CONFIG, UNCLE_REWARD_MULTIPLIER, UNCLE_REWARD_OP_TYPE, +}; +use anyhow::{bail, Context, Result}; +use ethers::{prelude::*, utils::to_checksum}; +use ethers::{ + providers::{Http, Middleware, Provider}, + types::{Block, Transaction, TransactionReceipt, H160, H256, U256, U64}, +}; +use rosetta_server::types as rosetta_types; +use rosetta_server::types::{ + AccountIdentifier, Amount, Currency, Operation, OperationIdentifier, TransactionIdentifier, +}; +use rosetta_server::BlockchainConfig; +use serde_json::json; +use std::collections::{HashMap, VecDeque}; +use std::str::FromStr; + +pub async fn get_transaction( + client: &Provider, + config: &BlockchainConfig, + block: &Block, + tx: &Transaction, +) -> Result { + let tx_receipt = client + .get_transaction_receipt(tx.hash) + .await? + .context("Transaction receipt not found")?; + + if tx_receipt + .block_hash + .context("Block hash not found in tx receipt")? + != block.hash.unwrap() + { + bail!("Transaction receipt block hash does not match block hash"); + } + + let currency = config.currency(); + + let mut operations = vec![]; + let fee_ops = get_fee_operations(block, tx, &tx_receipt, ¤cy)?; + operations.extend(fee_ops); + + let tx_trace = if block.number.unwrap().as_u64() != 0 { + let trace = get_transaction_trace(&tx.hash, client).await?; + let trace_ops = get_trace_operations(trace.clone(), operations.len() as i64, ¤cy)?; + operations.extend(trace_ops); + Some(trace) + } else { + None + }; + + Ok(rosetta_types::Transaction { + transaction_identifier: TransactionIdentifier { + hash: hex::encode(tx.hash), + }, + operations, + related_transactions: None, + metadata: Some(json!({ + "gas_limit" : tx.gas, + "gas_price": tx.gas_price, + "receipt": tx_receipt, + "trace": tx_trace, + })), + }) +} + +fn get_fee_operations( + block: &Block, + tx: &Transaction, + receipt: &TransactionReceipt, + currency: &Currency, +) -> Result> { + let miner = block.author.context("block has no author")?; + let base_fee = block.base_fee_per_gas.context("block has no base fee")?; + let tx_type = tx + .transaction_type + .context("transaction type unavailable")?; + let tx_gas_price = tx.gas_price.context("gas price is not available")?; + let tx_max_priority_fee_per_gas = tx.max_priority_fee_per_gas.unwrap_or_default(); + let gas_used = receipt.gas_used.context("gas used is not available")?; + let gas_price = if tx_type.as_u64() == 2 { + base_fee + tx_max_priority_fee_per_gas + } else { + tx_gas_price + }; + let fee_amount = gas_used * gas_price; + let fee_burned = gas_used * base_fee; + let miner_earned_reward = fee_amount - fee_burned; + + let mut operations = vec![]; + + let first_op = Operation { + operation_identifier: OperationIdentifier { + index: 0, + network_index: None, + }, + related_operations: None, + r#type: FEE_OP_TYPE.into(), + status: Some(SUCCESS_STATUS.into()), + account: Some(AccountIdentifier { + address: to_checksum(&tx.from, None), + sub_account: None, + metadata: None, + }), + amount: Some(Amount { + value: format!("-{miner_earned_reward}"), + currency: currency.clone(), + metadata: None, + }), + coin_change: None, + metadata: None, + }; + + let second_op = Operation { + operation_identifier: OperationIdentifier { + index: 1, + network_index: None, + }, + related_operations: Some(vec![OperationIdentifier { + index: 0, + network_index: None, + }]), + r#type: FEE_OP_TYPE.into(), + status: Some(SUCCESS_STATUS.into()), + account: Some(AccountIdentifier { + address: to_checksum(&miner, None), + sub_account: None, + metadata: None, + }), + amount: Some(Amount { + value: format!("{miner_earned_reward}"), + currency: currency.clone(), + metadata: None, + }), + coin_change: None, + metadata: None, + }; + + operations.push(first_op); + operations.push(second_op); + + if fee_burned != U256::from(0) { + let burned_operation = Operation { + operation_identifier: OperationIdentifier { + index: 2, + network_index: None, + }, + related_operations: None, + r#type: FEE_OP_TYPE.into(), + status: Some(SUCCESS_STATUS.into()), + account: Some(AccountIdentifier { + address: to_checksum(&tx.from, None), + sub_account: None, + metadata: None, + }), + amount: Some(Amount { + value: format!("-{fee_burned}"), + currency: currency.clone(), + metadata: None, + }), + coin_change: None, + metadata: None, + }; + + operations.push(burned_operation); + } + Ok(operations) +} + +async fn get_transaction_trace(hash: &H256, client: &Provider) -> Result { + let params = json!([ + hash, + { + "tracer": "callTracer" + } + ]); + Ok(client.request("debug_traceTransaction", params).await?) +} + +fn get_trace_operations(trace: Trace, op_len: i64, currency: &Currency) -> Result> { + let mut traces = VecDeque::new(); + traces.push_back(trace); + let mut flatten_traces = vec![]; + while let Some(mut trace) = traces.pop_front() { + for mut child in std::mem::take(&mut trace.calls) { + if trace.revert { + child.revert = true; + if child.error_message.is_empty() { + child.error_message = trace.error_message.clone(); + } + } + traces.push_back(child); + } + flatten_traces.push(FlattenTrace::from(trace)); + } + let traces = flatten_traces; + + let mut operations: Vec = vec![]; + let mut destroyed_accs: HashMap = HashMap::new(); + + if traces.is_empty() { + return Ok(operations); + } + + for trace in traces { + let mut metadata: HashMap = HashMap::new(); + let mut operation_status = SUCCESS_STATUS; + if trace.revert { + operation_status = FAILURE_STATUS; + metadata.insert("error".into(), trace.error_message); + } + + let mut zero_value = false; + if trace.value == U256::from(0) { + zero_value = true; + } + + let mut should_add = true; + if zero_value && trace.trace_type == CALL_OP_TYPE { + should_add = false; + } + + let from = to_checksum(&trace.from, None); + let to = to_checksum(&trace.to, None); + + if should_add { + let mut from_operation = Operation { + operation_identifier: OperationIdentifier { + index: op_len + operations.len() as i64, + network_index: None, + }, + related_operations: None, + r#type: trace.trace_type.clone(), + status: Some(operation_status.into()), + account: Some(AccountIdentifier { + address: from.clone(), + sub_account: None, + metadata: None, + }), + amount: Some(Amount { + value: format!("-{}", trace.value), + currency: currency.clone(), + metadata: None, + }), + coin_change: None, + metadata: None, + }; + + if zero_value { + from_operation.amount = None; + } else if let Some(d_from) = destroyed_accs.get(&from) { + if operation_status == SUCCESS_STATUS { + let amount = d_from - trace.value.as_u64(); + destroyed_accs.insert(from.clone(), amount); + } + } + + operations.push(from_operation); + } + + if trace.trace_type == SELF_DESTRUCT_OP_TYPE { + //assigning destroyed from to an empty number + if from == to { + continue; + } + } + + if to.is_empty() { + continue; + } + + // If the account is resurrected, we remove it from + // the destroyed accounts map. + if trace.trace_type == CREATE_OP_TYPE || trace.trace_type == CREATE2_OP_TYPE { + destroyed_accs.remove(&to); + } + + if should_add { + let last_op_index = operations[operations.len() - 1].operation_identifier.index; + let mut to_op = Operation { + operation_identifier: OperationIdentifier { + index: last_op_index + 1, + network_index: None, + }, + related_operations: Some(vec![OperationIdentifier { + index: last_op_index, + network_index: None, + }]), + r#type: trace.trace_type, + status: Some(operation_status.into()), + account: Some(AccountIdentifier { + address: to.clone(), + sub_account: None, + metadata: None, + }), + amount: Some(Amount { + value: format!("{}", trace.value), + currency: currency.clone(), + metadata: None, + }), + coin_change: None, + metadata: None, + }; + + if zero_value { + to_op.amount = None; + } else if let Some(d_to) = destroyed_accs.get(&to) { + if operation_status == SUCCESS_STATUS { + let amount = d_to + trace.value.as_u64(); + destroyed_accs.insert(to.clone(), amount); + } + } + + operations.push(to_op); + } + + for (k, v) in &destroyed_accs { + if v == &0 { + continue; + } + + if v < &0 { + //throw some error + } + + let operation = Operation { + operation_identifier: OperationIdentifier { + index: operations[operations.len() - 1].operation_identifier.index + 1, + network_index: None, + }, + related_operations: None, + r#type: DESTRUCT_OP_TYPE.into(), + status: Some(SUCCESS_STATUS.into()), + account: Some(AccountIdentifier { + address: to_checksum(&H160::from_str(k)?, None), + sub_account: None, + metadata: None, + }), + amount: Some(Amount { + value: format!("-{v}"), + currency: currency.clone(), + metadata: None, + }), + coin_change: None, + metadata: None, + }; + + operations.push(operation); + } + } + + Ok(operations) +} + +pub async fn block_reward_transaction( + client: &Provider, + config: &BlockchainConfig, + block: &Block, +) -> Result { + let block_number = block.number.context("missing block number")?.as_u64(); + let block_hash = block.hash.context("missing block hash")?; + let block_id = BlockId::Hash(block_hash); + let miner = block.author.unwrap(); + + let mut uncles = vec![]; + for (i, _) in block.uncles.iter().enumerate() { + let uncle = client + .get_uncle(block_id, U64::from(i)) + .await? + .context("Uncle block now found")?; + uncles.push(uncle); + } + + let chain_config = TESTNET_CHAIN_CONFIG; + let mut mining_reward = FRONTIER_BLOCK_REWARD; + if chain_config.byzantium_block <= block_number { + mining_reward = BYZANTIUM_BLOCK_REWARD; + } + if chain_config.constantinople_block <= block_number { + mining_reward = CONSTANTINOPLE_BLOCK_REWARD; + } + if !uncles.is_empty() { + mining_reward += (mining_reward / UNCLE_REWARD_MULTIPLIER) * mining_reward; + } + + let mut operations = vec![]; + let mining_reward_operation = Operation { + operation_identifier: OperationIdentifier { + index: 0, + network_index: None, + }, + related_operations: None, + r#type: MINING_REWARD_OP_TYPE.into(), + status: Some(SUCCESS_STATUS.into()), + account: Some(AccountIdentifier { + address: to_checksum(&miner, None), + sub_account: None, + metadata: None, + }), + amount: Some(Amount { + value: mining_reward.to_string(), + currency: config.currency(), + metadata: None, + }), + coin_change: None, + metadata: None, + }; + operations.push(mining_reward_operation); + + for block in uncles { + let uncle_miner = block.author.context("Uncle block has no author")?; + let uncle_number = block.number.context("Uncle block has no number")?; + let uncle_block_reward = + (uncle_number + MAX_UNCLE_DEPTH - block_number) * (mining_reward / MAX_UNCLE_DEPTH); + + let operation = Operation { + operation_identifier: OperationIdentifier { + index: operations.len() as i64, + network_index: None, + }, + related_operations: None, + r#type: UNCLE_REWARD_OP_TYPE.into(), + status: Some(SUCCESS_STATUS.into()), + account: Some(AccountIdentifier { + address: to_checksum(&uncle_miner, None), + sub_account: None, + metadata: None, + }), + amount: Some(Amount { + value: uncle_block_reward.to_string(), + currency: config.currency(), + metadata: None, + }), + coin_change: None, + metadata: None, + }; + operations.push(operation); + } + + Ok(rosetta_types::Transaction { + transaction_identifier: TransactionIdentifier { + hash: hex::encode(block_hash), + }, + related_transactions: None, + operations, + metadata: None, + }) +} diff --git a/chains/polygon-pos/tx/Cargo.toml b/chains/polygon-pos/tx/Cargo.toml new file mode 100644 index 00000000..591b4df7 --- /dev/null +++ b/chains/polygon-pos/tx/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "rosetta-tx-polygon-pos" +version = "0.4.0" +edition = "2021" +license = "MIT" +repository = "https://github.com/analog-labs/chain-connectors" +description = "Polygon transaction builder." + +[dependencies] +anyhow = "1.0.69" +ethabi = "18.0.0" +ethers-core = "2.0.0" +rosetta-config-polygon-pos = { version = "0.4.0", path = "../config" } +rosetta-core = { version = "0.4.0", path = "../../../rosetta-core" } +serde_json = "1.0.94" +sha3 = "0.10.6" diff --git a/chains/polygon-pos/tx/README.md b/chains/polygon-pos/tx/README.md new file mode 100644 index 00000000..ba528654 --- /dev/null +++ b/chains/polygon-pos/tx/README.md @@ -0,0 +1,30 @@ +This project is used to build transactions for ethereum chains. + +## `EthereumTransactionBuilder`: + + Its implementation of `TransactionBuilder` and implements the following methods: + 1. `transfer` + 2. `method_call` + 3. `create_and_sign` + +### `transfer`: + + Creates `EthereumMetadataParams` for transfer call. + +### `method_call`: + + Creates `EthereumMetadataParams` for contract calls. It takes + `method`: this is a string contraining contract address and method signature with `-` seperation. + `params`: array of json_value with params taken by contract methods in string format. + + It returns `EthereumMetadataParams` with `data` field set to the encoded contract call params. + +### `create_and_sign`: + + When `metadata` is created we use this call to create Ethereum Transaction and sign it. It takes following arguments. + `config`: chain sepecific config. + `metadata_params`: Metadata params which created metadata for this call. + `metadata`: Metadata required make transaction. + `secret_key`: wallet's secret key (used to sign the transaction). + + It creates the transaction and signs it and then returns its bytes. diff --git a/chains/polygon-pos/tx/src/lib.rs b/chains/polygon-pos/tx/src/lib.rs new file mode 100644 index 00000000..9382cf84 --- /dev/null +++ b/chains/polygon-pos/tx/src/lib.rs @@ -0,0 +1,105 @@ +use anyhow::Result; +use ethabi::token::{LenientTokenizer, Tokenizer}; +use ethers_core::abi::HumanReadableParser; +use ethers_core::types::{Eip1559TransactionRequest, NameOrAddress, Signature, H160}; +use rosetta_config_polygon_pos::{PolygonMetadata, PolygonMetadataParams}; +use rosetta_core::crypto::address::Address; +use rosetta_core::crypto::SecretKey; +use rosetta_core::{BlockchainConfig, TransactionBuilder}; +use sha3::{Digest, Keccak256}; + +pub use ethers_core::types::U256; + +#[derive(Default)] +pub struct PolygonPosTransactionBuilder; + +impl TransactionBuilder for PolygonPosTransactionBuilder { + type MetadataParams = PolygonMetadataParams; + type Metadata = PolygonMetadata; + + fn transfer(&self, address: &Address, amount: u128) -> Result { + let destination: H160 = address.address().parse()?; + let amount: U256 = amount.into(); + Ok(PolygonMetadataParams { + destination: destination.0.to_vec(), + amount: amount.0, + data: vec![], + }) + } + + fn method_call( + &self, + contract: &str, + method: &str, + params: &[String], + amount: u128, + ) -> Result { + let destination: H160 = contract.parse()?; + let amount: U256 = amount.into(); + let function = HumanReadableParser::parse_function(method)?; + let mut tokens = Vec::with_capacity(params.len()); + for (ty, arg) in function.inputs.iter().zip(params) { + tokens.push(LenientTokenizer::tokenize(&ty.kind, arg)?); + } + let bytes = function.encode_input(&tokens)?; + Ok(PolygonMetadataParams { + destination: destination.0.to_vec(), + amount: amount.0, + data: bytes, + }) + } + + fn deploy_contract(&self, contract_binary: Vec) -> Result { + Ok(PolygonMetadataParams { + destination: vec![], + amount: [0, 0, 0, 0], + data: contract_binary, + }) + } + + fn create_and_sign( + &self, + config: &BlockchainConfig, + metadata_params: &Self::MetadataParams, + metadata: &Self::Metadata, + secret_key: &SecretKey, + ) -> Vec { + let from = secret_key + .public_key() + .to_address(config.address_format) + .address() + .parse() + .unwrap(); + let to: Option = if metadata_params.destination.len() >= 20 { + Some(H160::from_slice(&metadata_params.destination).into()) + } else { + None + }; + let tx = Eip1559TransactionRequest { + from: Some(from), + to, + gas: Some(U256(metadata.gas_limit)), + value: Some(U256(metadata_params.amount)), + data: Some(metadata_params.data.clone().into()), + nonce: Some(metadata.nonce.into()), + access_list: Default::default(), + max_priority_fee_per_gas: Some(U256(metadata.max_priority_fee_per_gas)), + max_fee_per_gas: Some(U256(metadata.max_fee_per_gas)), + chain_id: Some(metadata.chain_id.into()), + }; + let mut hasher = Keccak256::new(); + hasher.update([0x02]); + hasher.update(tx.rlp()); + let hash = hasher.finalize(); + let signature = secret_key.sign_prehashed(&hash).unwrap().to_bytes(); + let rlp = tx.rlp_signed(&Signature { + r: U256::from_big_endian(&signature[..32]), + s: U256::from_big_endian(&signature[32..64]), + v: signature[64] as _, + }); + let mut tx = Vec::with_capacity(rlp.len() + 1); + tx.push(0x02); + tx.extend(rlp); + tx + } +} diff --git a/ci/dockerfiles/base-ci-linux/Dockerfile b/ci/dockerfiles/base-ci-linux/Dockerfile new file mode 100644 index 00000000..e786f190 --- /dev/null +++ b/ci/dockerfiles/base-ci-linux/Dockerfile @@ -0,0 +1,136 @@ +FROM docker.io/library/ubuntu:22.04 + +ARG VCS_REF=master +ARG BUILD_DATE="" +ARG REGISTRY_PATH=docker.io/analoglabs + +# metadata +LABEL summary="Layer 1 image with all dependencies for Rust compilation." \ + name="${REGISTRY_PATH}/base-ci-linux" \ + maintainer="lohann@analog.one" \ + version="1.0.0" \ + description="libssl-dev, clang, libclang-dev, libsasl2-dev, lld, cmake, make, git, pkg-config \ +curl, jq, time, lsof, rhash, rust stable, rust nightly, sccache, cargo-udeps, cargo-tarpaulin" \ + one.analog.image.vendor="Analog.one" \ + one.analog.image.source="https://github.com/Analog-Labs/chain-connectors/blob/${VCS_REF}/\ +ci/dockerfiles/base-ci-linux/Dockerfile" \ + one.analog.image.documentation="https://github.com/Analog-Labs/chain-connectors/blob/${VCS_REF}/\ +ci/dockerfiles/base-ci-linux/README.md" \ + one.analog.image.revision="${VCS_REF}" \ + one.analog.image.created="${BUILD_DATE}" + +WORKDIR /builds + +# config for clang 15 +COPY ./base-ci-linux-config /root/.cargo/config + +ENV RUSTUP_HOME=/usr/local/rustup \ + CARGO_HOME=/usr/local/cargo \ + PATH=/usr/local/cargo/bin:$PATH \ + RUST_VERSION=1.70.0 \ + RUST_NIGHTLY=2023-05-23 \ + CC=clang-15 \ + CXX=clang-15 + +# install tools and dependencies +RUN set -eux; \ + apt-get -y update; \ + dpkgArch="$(dpkg --print-architecture)"; \ + apt-get install -y --no-install-recommends \ + libssl-dev libsasl2-dev make cmake graphviz \ + git pkg-config curl time rhash ca-certificates jq \ + python3 python3-pip lsof ruby ruby-bundler git-restore-mtime xz-utils unzip gnupg protobuf-compiler && \ +# add clang 15 repo + curl -s https://apt.llvm.org/llvm-snapshot.gpg.key | gpg --dearmor -o /usr/share/keyrings/llvm-snapshot.gpg; \ + echo "deb [arch=${dpkgArch} signed-by=/usr/share/keyrings/llvm-snapshot.gpg] http://apt.llvm.org/jammy/ llvm-toolchain-jammy-15 main" >> /etc/apt/sources.list.d/llvm-toolchain-jammy-15.list; \ + apt-get -y update; \ + apt-get install -y --no-install-recommends \ + clang-15 lldb-15 lld-15 libclang-15-dev && \ +# add non-root user + groupadd -g 1000 nonroot && \ + useradd -u 1000 -g 1000 -s /bin/bash -m nonroot && \ +# set a link to clang + update-alternatives --install /usr/bin/cc cc /usr/bin/clang-15 100; \ +# set a link to ldd + update-alternatives --install /usr/bin/ld ld /usr/bin/ld.lld-15 100; \ +# install rustup, use minimum components + case "${dpkgArch##*-}" in \ + amd64) rustArch='x86_64-unknown-linux-gnu'; rustupSha256='0b2f6c8f85a3d02fde2efc0ced4657869d73fccfce59defb4e8d29233116e6db' ;; \ + armhf) rustArch='armv7-unknown-linux-gnueabihf'; rustupSha256='f21c44b01678c645d8fbba1e55e4180a01ac5af2d38bcbd14aa665e0d96ed69a' ;; \ + arm64) rustArch='aarch64-unknown-linux-gnu'; rustupSha256='673e336c81c65e6b16dcdede33f4cc9ed0f08bde1dbe7a935f113605292dc800' ;; \ + i386) rustArch='i686-unknown-linux-gnu'; rustupSha256='e7b0f47557c1afcd86939b118cbcf7fb95a5d1d917bdd355157b63ca00fc4333' ;; \ + *) echo >&2 "unsupported architecture: ${dpkgArch}"; exit 1 ;; \ + esac; \ + url="https://static.rust-lang.org/rustup/archive/1.26.0/${rustArch}/rustup-init"; \ + curl -L "$url" -o rustup-init; \ + echo "${rustupSha256} *rustup-init" | sha256sum -c -; \ + chmod +x rustup-init; \ + ./rustup-init -y --no-modify-path --profile minimal --default-toolchain $RUST_VERSION --default-host ${rustArch}; \ + rm rustup-init; \ + chmod -R a+w $RUSTUP_HOME $CARGO_HOME; \ + chown -R root:nonroot ${RUSTUP_HOME} ${CARGO_HOME}; \ + chmod -R g+w ${RUSTUP_HOME} ${CARGO_HOME}; \ +# versions + rustup show; \ + rustup --version; \ + cargo --version; \ + rustc --version; \ +# install sccache + cargo install sccache --features redis --version 0.3.3; \ +# cargo clean up +# removes compilation artifacts cargo install creates (>250M) + rm -rf "${CARGO_HOME}/registry" "${CARGO_HOME}/git" /root/.cache/sccache; \ +# apt clean up + apt-get autoremove -y; \ + apt-get clean; \ + rm -rf /var/lib/apt/lists/* + +# cache handler +ENV RUSTC_WRAPPER=sccache \ + # show backtraces + RUST_BACKTRACE=1 + +# install tools and dependencies +RUN set -eux && \ + dpkgArch="$(dpkg --print-architecture)"; \ + case "${dpkgArch##*-}" in \ + amd64) rustArch='x86_64-unknown-linux-gnu'; rustTargetArch='x86_64-unknown-linux-musl' ;; \ + armhf) rustArch='armv7-unknown-linux-gnueabihf'; rustTargetArch='armv7-unknown-linux-musleabi' ;; \ + arm64) rustArch='aarch64-unknown-linux-gnu'; rustTargetArch='aarch64-unknown-linux-musl' ;; \ + i386) rustArch='i686-unknown-linux-gnu'; rustTargetArch='i686-unknown-linux-musl' ;; \ + *) echo >&2 "unsupported architecture: ${dpkgArch}"; exit 1 ;; \ + esac; \ +# install `rust-src` component for ui test + rustup component add rust-src rustfmt clippy && \ +# install wasm target into default (stable) toolchain + rustup target add wasm32-unknown-unknown && \ +# install linux-musl target into default (stable) toolchain + rustup target add ${rustTargetArch} && \ +# install specific Rust nightly, default is stable, use minimum components + rustup toolchain install nightly-${RUST_NIGHTLY} --profile minimal --component rustfmt clippy && \ +# install wasm target into nightly toolchain + rustup target add wasm32-unknown-unknown --toolchain "nightly-${RUST_NIGHTLY}" && \ +# install linux-musl target into nightly toolchain + rustup target add ${rustTargetArch} --toolchain "nightly-${RUST_NIGHTLY}" && \ +# "alias" pinned nightly toolchain as nightly + ln -s /usr/local/rustup/toolchains/nightly-${RUST_NIGHTLY}-${rustArch} /usr/local/rustup/toolchains/nightly-${rustArch} && \ +# install wasm-pack + cargo install wasm-pack --version 0.12.1 && \ +# install cargo deny + cargo install cargo-deny --version 0.13.9 && \ +# install dprint + cargo +nightly install dprint --version 0.38.3 && \ +# install udeps + cargo +nightly install cargo-udeps --version 0.1.40 && \ +# install tarpaulin + cargo +nightly install cargo-tarpaulin --version 0.26.1 && \ +# versions + rustup show && \ + cargo --version && \ +# apt clean up + apt-get autoremove -y && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* && \ +# cargo clean up +# removes compilation artifacts cargo install creates (>250M) + rm -rf "${CARGO_HOME}/registry" "${CARGO_HOME}/git" /root/.cache/sccache diff --git a/ci/dockerfiles/base-ci-linux/base-ci-linux-config b/ci/dockerfiles/base-ci-linux/base-ci-linux-config new file mode 100644 index 00000000..4ab7ac34 --- /dev/null +++ b/ci/dockerfiles/base-ci-linux/base-ci-linux-config @@ -0,0 +1,11 @@ +[target.x86_64-unknown-linux-gnu] +# Enables the aes-ni instructions for RustCrypto dependency. +rustflags = ["-Ctarget-feature=+aes,+sse2,+ssse3"] +# setup clang as Linker +linker="clang-15" + +[target.x86_64-unknown-linux-musl] +# Enables the aes-ni instructions for RustCrypto dependency. +rustflags = ["-Ctarget-feature=+aes,+sse2,+ssse3"] +# setup clang as Linker +linker="clang-15" diff --git a/ci/dockerfiles/builder/Dockerfile b/ci/dockerfiles/builder/Dockerfile new file mode 100644 index 00000000..d359e1bb --- /dev/null +++ b/ci/dockerfiles/builder/Dockerfile @@ -0,0 +1,29 @@ +FROM analoglabs/base-ci-linux:latest + +WORKDIR /chain-connectors +COPY . /chain-connectors + +RUN set -eux && \ + dpkgArch="$(dpkg --print-architecture)"; \ + case "${dpkgArch##*-}" in \ + amd64) targetArch='x86_64-unknown-linux-musl' ;; \ + armhf) targetArch='armv7-unknown-linux-musleabi' ;; \ + arm64) targetArch='aarch64-unknown-linux-musl' ;; \ + i386) targetArch='i686-unknown-linux-musl' ;; \ + *) echo >&2 "unsupported architecture: ${dpkgArch}"; exit 1 ;; \ + esac; \ + cargo build \ + --locked \ + --release \ + --target ${targetArch} \ + -p rosetta-server-bitcoin \ + -p rosetta-server-ethereum \ + -p rosetta-server-polkadot \ + -p rosetta-server-astar && \ + mkdir -p ./bin && \ + mv target/${targetArch}/release/rosetta-server-bitcoin ./bin/rosetta-server-bitcoin && \ + mv target/${targetArch}/release/rosetta-server-ethereum ./bin/rosetta-server-ethereum && \ + mv target/${targetArch}/release/rosetta-server-polkadot ./bin/rosetta-server-polkadot && \ + mv target/${targetArch}/release/rosetta-server-astar ./bin/rosetta-server-astar + +ENTRYPOINT ["/chain-connectors/bin/rosetta-server-bitcoin"] diff --git a/docker-compose.yml b/docker-compose.yml index 42a93f7e..91a85e15 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -29,7 +29,7 @@ services: ethereum: image: "ethereum/client-go:v1.10.26" - command: "--dev --ipcdisable --http --http.addr 0.0.0.0 --http.vhosts * --http.api eth,debug,admin,txpool,web3" + command: "--dev --dev.period 5 --ipcdisable --http --http.addr 0.0.0.0 --http.vhosts * --http.api eth,debug,admin,txpool,web3" expose: - "8545" ulimits: @@ -44,6 +44,7 @@ services: - "ethereum-volume:/root" polkadot: + platform: linux/x86_64 image: "parity/polkadot:v0.9.37" command: "--chain dev --rpc-cors all --ws-external --alice --blocks-pruning archive --state-pruning archive --base-path /polkadot" expose: @@ -61,6 +62,7 @@ services: - "polkadot-volume:/polkadot" astar: + platform: linux/x86_64 image: "staketechnologies/astar-collator:latest" command: "astar-collator --chain dev --ws-port 9994 --rpc-port 9995 --rpc-cors all --ws-external --alice --blocks-pruning archive --state-pruning archive --base-path /polkadot" expose: @@ -80,7 +82,7 @@ services: connector-bitcoin: - image: "analoglabs/connector-bitcoin" + image: "analoglabs/connector-bitcoin:0.4.0" command: "--network regtest --addr 0.0.0.0:8080 --node-addr bitcoin:18443 --path /data" ports: - "8080:8080" @@ -90,7 +92,7 @@ services: - "bitcoin-connector-volume:/data" connector-ethereum: - image: "analoglabs/connector-ethereum" + image: "analoglabs/connector-ethereum:0.4.0" command: "--network dev --addr 0.0.0.0:8081 --node-addr ethereum:8545 --path /data" ports: - "8081:8081" @@ -100,7 +102,7 @@ services: - "ethereum-connector-volume:/data" connector-polkadot: - image: "analoglabs/connector-polkadot" + image: "analoglabs/connector-polkadot:0.4.0" command: "--network dev --addr 0.0.0.0:8082 --node-addr polkadot:9944 --path /data" ports: - "8082:8082" @@ -112,7 +114,7 @@ services: restart: always connector-astar: - image: "analoglabs/connector-astar" + image: "analoglabs/connector-astar:0.4.0" command: "--network dev --addr 0.0.0.0:8083 --node-addr astar:9994 --path /data" ports: - "8083:8083" @@ -123,13 +125,13 @@ services: # TODO: need to do a proper health check restart: always - explorer: - image: "analoglabs/rosetta-explorer" + platform: linux/x86_64 + image: "analoglabs/rosetta-explorer:latest" ports: - - "3000:3000" + - "3000:3000" depends_on: - - connector-bitcoin - - connector-ethereum - - connector-polkadot - - connector-astar + - connector-bitcoin + - connector-ethereum + - connector-polkadot + - connector-astar diff --git a/rosetta-client/README.md b/rosetta-client/README.md index 7c06c015..f71ef3fd 100644 --- a/rosetta-client/README.md +++ b/rosetta-client/README.md @@ -8,17 +8,26 @@ Ethereum: `./build_connectors.sh` if you are running on mac you might get gcc error. To solve it please do following. -2. Run `rustup target add x86_64-unknown-linux-musl` in mac. +2. Run `rustup target add x86_64-unknown-linux-musl` in intel macs or `rustup target add aarch64-unknown-linux-musl` for m1 macs. 3. In `~/.cargo/config` add following +Intel Macs + ``` [target.x86_64-unknown-linux-musl] linker = "x86_64-linux-musl-gcc" ``` -4. In `build_connectors.sh` replace `cargo build` with `TARGET_CC=x86_64-linux-musl-gcc cargo build` +Apple Silicon + +``` +[target.aarch64-unknown-linux-musl] +linker = "aarch64-linux-musl-gcc" +``` + +4. In `build_connectors.sh` replace `cargo build` with `TARGET_CC=x86_64-linux-musl-gcc cargo build` or `TARGET_CC=aarch64-linux-musl-gcc cargo build` 5. Run `./build_connectors.sh`. -6. After conenctors are build run `docker compose up`. +6. After connectors are build run `docker compose up`. **Compiling voting contract** @@ -28,7 +37,7 @@ linker = "x86_64-linux-musl-gcc" **Running voting_contract example** -1. This example demonstrate how to interact with smart contract using Aanlog's wallet. We will deploy a basic contracts storing yes or no votes and displays total votes on voting. +1. This example demonstrate how to interact with smart contract using Analog's wallet. We will deploy a basic contracts storing yes or no votes and displays total votes on voting. 2. Run `cargo run --example voting_contract faucet`. to get some funds to deploy contract. 3. To deploy contract run `cargo run --example voting_contract deploy`. You will get deployed contract address as output, make sure you copy it. 4. To vote for yes run From 35e15daf9def1f8ab2c0b79299c4c451f6251c98 Mon Sep 17 00:00:00 2001 From: Lohann Paterno Coutinho Ferreira Date: Wed, 19 Jul 2023 10:06:48 -0300 Subject: [PATCH 2/4] Update dockerfiles --- build_connectors.sh | 119 +++++++++++++++++------------- chains/astar/Dockerfile | 15 +++- chains/bitcoin/Dockerfile | 15 +++- chains/ethereum/Dockerfile | 15 +++- chains/polkadot/Dockerfile | 15 +++- chains/polygon-pos/Dockerfile | 15 +++- ci/dockerfiles/builder/Dockerfile | 29 -------- docker-compose.yml | 6 +- 8 files changed, 136 insertions(+), 93 deletions(-) delete mode 100644 ci/dockerfiles/builder/Dockerfile diff --git a/build_connectors.sh b/build_connectors.sh index af259349..303821d1 100755 --- a/build_connectors.sh +++ b/build_connectors.sh @@ -4,69 +4,86 @@ set -e REPO=https://github.com/Analog-Labs/chain-connectors REGISTRY_PATH=${REGISTRY_PATH:-analoglabs} DOCKER_IMAGE_NAME=base-ci-linux -DOCKER_IMAGE_VERSION=latest CONNECTOR_IMAGE_VERSION=0.4.0 -# Check if docker is running -if ! docker info > /dev/null 2>&1; then - echo "This script uses docker - please start docker and try again!" - exit 1 -fi - -# Build the base-ci-linux if necessary -if [[ "$(docker images -q "${REGISTRY_PATH}/${DOCKER_IMAGE_NAME}:${DOCKER_IMAGE_VERSION}" 2> /dev/null)" == "" ]]; then - docker build \ - -f ./ci/dockerfiles/base-ci-linux/Dockerfile \ - --build-arg VCS_REF=$(git rev-parse HEAD) \ - --build-arg BUILD_DATE=$(date +%Y%m%d) \ - --no-cache \ - -t "${REGISTRY_PATH}/${DOCKER_IMAGE_NAME}:${DOCKER_IMAGE_VERSION}" \ - "./ci/dockerfiles/${DOCKER_IMAGE_NAME}" -fi +# Check for 'uname' and abort if it is not available. +uname -v > /dev/null 2>&1 || { echo >&2 "ERROR - requires 'uname' to identify the platform."; exit 1; } -docker build \ - -f ./ci/dockerfiles/builder/Dockerfile \ - --no-cache \ - -t "${REGISTRY_PATH}/builder:latest" \ - . +# Check for 'docker' and abort if it is not running. +docker info > /dev/null 2>&1 || { echo >&2 "ERROR - requires 'docker', please start docker and try again."; exit 1; } -docker build \ - -f ./chains/bitcoin/Dockerfile \ - -t analoglabs/connector-bitcoin:${CONNECTOR_IMAGE_VERSION} \ - ./chains/bitcoin +# Check for 'rustup' and abort if it is not available. +rustup -V > /dev/null 2>&1 || { echo >&2 "ERROR - requires 'rustup' for compile the binaries"; exit 1; } -docker build \ - -f ./chains/ethereum/Dockerfile \ - -t analoglabs/connector-ethereum:${CONNECTOR_IMAGE_VERSION} \ - ./chains/ethereum +# Detect host architecture +case "$(uname -m)" in + x86_64) + rustTarget='x86_64-unknown-linux-musl' + muslLinker='x86_64-linux-musl-gcc' + ;; + arm64|aarch64) + rustTarget='aarch64-unknown-linux-musl' + muslLinker='aarch64-linux-musl-gcc' + ;; + *) + echo >&2 "ERROR - unsupported architecture: $(uname -m)" + exit 1 + ;; +esac -docker build \ - -f ./chains/polkadot/Dockerfile \ - -t analoglabs/connector-polkadot:${CONNECTOR_IMAGE_VERSION} \ - ./chains/polkadot +# Check if the musl linker is installed +"$muslLinker" --version > /dev/null 2>&1 || { echo >&2 "ERROR - requires '$muslLinker' linker for compile"; exit 1; } -docker build \ - -f ./chains/astar/Dockerfile \ - -t analoglabs/connector-astar:${CONNECTOR_IMAGE_VERSION} \ - ./chains/astar +# Check if the rust target is installed +if ! rustup target list | grep -q "$rustTarget"; then + echo "Installing the musl target with rustup '$rustTarget'" + rustup target add "$rustTarget" +fi -exit 0 -cargo build -p rosetta-server-bitcoin --target x86_64-unknown-linux-musl --release +cargo build -p rosetta-server-bitcoin --target "$rustTarget" --release mkdir -p target/release/bitcoin/bin -cp target/x86_64-unknown-linux-musl/release/rosetta-server-bitcoin target/release/bitcoin/bin -docker build target/release/bitcoin -f chains/bitcoin/Dockerfile -t analoglabs/connector-bitcoin +cp "target/$rustTarget/release/rosetta-server-bitcoin" target/release/bitcoin/bin +docker build target/release/bitcoin \ + --build-arg "REGISTRY_PATH=$REGISTRY_PATH" \ + --build-arg VCS_REF=$(git rev-parse HEAD) \ + --build-arg BUILD_DATE=$(date +%Y%m%d) \ + --build-arg "IMAGE_VERSION=$CONNECTOR_IMAGE_VERSION" \ + -f chains/bitcoin/Dockerfile \ + -t "analoglabs/connector-bitcoin:$CONNECTOR_IMAGE_VERSION" \ + -t analoglabs/connector-bitcoin:latest \ -cargo build -p rosetta-server-ethereum --target x86_64-unknown-linux-musl --release +cargo build -p rosetta-server-ethereum --target "$rustTarget" --release mkdir -p target/release/ethereum/bin -cp target/x86_64-unknown-linux-musl/release/rosetta-server-ethereum target/release/ethereum/bin -docker build target/release/ethereum -f chains/ethereum/Dockerfile -t analoglabs/connector-ethereum +cp "target/$rustTarget/release/rosetta-server-ethereum" target/release/ethereum/bin +docker build target/release/ethereum \ + --build-arg "REGISTRY_PATH=$REGISTRY_PATH" \ + --build-arg VCS_REF=$(git rev-parse HEAD) \ + --build-arg BUILD_DATE=$(date +%Y%m%d) \ + --build-arg "IMAGE_VERSION=$CONNECTOR_IMAGE_VERSION" \ + -f chains/ethereum/Dockerfile \ + -t "analoglabs/connector-ethereum:$CONNECTOR_IMAGE_VERSION" \ + -t analoglabs/connector-ethereum -cargo build -p rosetta-server-polkadot --target x86_64-unknown-linux-musl --release +cargo build -p rosetta-server-polkadot --target "$rustTarget" --release mkdir -p target/release/polkadot/bin -cp target/x86_64-unknown-linux-musl/release/rosetta-server-polkadot target/release/polkadot/bin -docker build target/release/polkadot -f chains/polkadot/Dockerfile -t analoglabs/connector-polkadot +cp "target/$rustTarget/release/rosetta-server-polkadot" target/release/polkadot/bin +docker build target/release/polkadot \ + --build-arg "REGISTRY_PATH=$REGISTRY_PATH" \ + --build-arg VCS_REF=$(git rev-parse HEAD) \ + --build-arg BUILD_DATE=$(date +%Y%m%d) \ + --build-arg "IMAGE_VERSION=$CONNECTOR_IMAGE_VERSION" \ + -f chains/polkadot/Dockerfile \ + -t "analoglabs/connector-polkadot:$CONNECTOR_IMAGE_VERSION" \ + -t analoglabs/connector-polkadot -cargo build -p rosetta-server-astar --target x86_64-unknown-linux-musl --release +cargo build -p rosetta-server-astar --target "$rustTarget" --release mkdir -p target/release/astar/bin -cp target/x86_64-unknown-linux-musl/release/rosetta-server-astar target/release/astar/bin -docker build target/release/astar -f chains/astar/Dockerfile -t analoglabs/connector-astar +cp "target/$rustTarget/release/rosetta-server-astar" target/release/astar/bin +docker build target/release/astar \ + --build-arg "REGISTRY_PATH=$REGISTRY_PATH" \ + --build-arg VCS_REF=$(git rev-parse HEAD) \ + --build-arg BUILD_DATE=$(date +%Y%m%d) \ + --build-arg "IMAGE_VERSION=$CONNECTOR_IMAGE_VERSION" \ + -f chains/astar/Dockerfile \ + -t "analoglabs/connector-astar:$CONNECTOR_IMAGE_VERSION" \ + -t analoglabs/connector-astar diff --git a/chains/astar/Dockerfile b/chains/astar/Dockerfile index ebf9d734..013ff042 100644 --- a/chains/astar/Dockerfile +++ b/chains/astar/Dockerfile @@ -1,4 +1,15 @@ -FROM analoglabs/builder:latest AS builder FROM scratch -COPY --from=builder /chain-connectors/bin/rosetta-server-astar rosetta-server-astar + +# metadata +LABEL summary="Analog’s connectors for astar parachain" \ + name="${REGISTRY_PATH}/connector-astar" \ + version="${IMAGE_VERSION}" \ + description="Astar chain connector" \ + one.analog.image.vendor="Analog.one" \ + one.analog.image.source="https://github.com/Analog-Labs/chain-connectors/blob/${VCS_REF}/\ +chains/astar/Dockerfile" \ + one.analog.image.revision="${VCS_REF}" \ + one.analog.image.created="${BUILD_DATE}" + +COPY bin/rosetta-server-astar rosetta-server-astar ENTRYPOINT ["/rosetta-server-astar"] diff --git a/chains/bitcoin/Dockerfile b/chains/bitcoin/Dockerfile index 3fb848e0..d460f014 100644 --- a/chains/bitcoin/Dockerfile +++ b/chains/bitcoin/Dockerfile @@ -1,4 +1,15 @@ -FROM analoglabs/builder:latest AS builder FROM scratch -COPY --from=builder /chain-connectors/bin/rosetta-server-bitcoin /rosetta-server-bitcoin + +# metadata +LABEL summary="Analog’s connectors for bitcoin network" \ + name="${REGISTRY_PATH}/connector-bitcoin" \ + version="${IMAGE_VERSION}" \ + description="Bitcoin chain connector" \ + one.analog.image.vendor="Analog.one" \ + one.analog.image.source="https://github.com/Analog-Labs/chain-connectors/blob/${VCS_REF}/\ +chains/bitcoin/Dockerfile" \ + one.analog.image.revision="${VCS_REF}" \ + one.analog.image.created="${BUILD_DATE}" + +COPY bin/rosetta-server-bitcoin rosetta-server-bitcoin ENTRYPOINT ["/rosetta-server-bitcoin"] diff --git a/chains/ethereum/Dockerfile b/chains/ethereum/Dockerfile index b17e6ec6..5977cef2 100644 --- a/chains/ethereum/Dockerfile +++ b/chains/ethereum/Dockerfile @@ -1,4 +1,15 @@ -FROM analoglabs/builder:latest AS builder FROM scratch -COPY --from=builder /chain-connectors/bin/rosetta-server-ethereum rosetta-server-ethereum + +# metadata +LABEL summary="Analog’s connectors for ethereum network" \ + name="${REGISTRY_PATH}/connector-ethereum" \ + version="${IMAGE_VERSION}" \ + description="Ethereum chain connector" \ + one.analog.image.vendor="Analog.one" \ + one.analog.image.source="https://github.com/Analog-Labs/chain-connectors/blob/${VCS_REF}/\ +chains/ethereum/Dockerfile" \ + one.analog.image.revision="${VCS_REF}" \ + one.analog.image.created="${BUILD_DATE}" + +COPY bin/rosetta-server-ethereum rosetta-server-ethereum ENTRYPOINT ["/rosetta-server-ethereum"] diff --git a/chains/polkadot/Dockerfile b/chains/polkadot/Dockerfile index c9347eda..3404e2a4 100644 --- a/chains/polkadot/Dockerfile +++ b/chains/polkadot/Dockerfile @@ -1,4 +1,15 @@ -FROM analoglabs/builder:latest AS builder FROM scratch -COPY --from=builder /chain-connectors/bin/rosetta-server-polkadot rosetta-server-polkadot + +# metadata +LABEL summary="Analog’s connectors for polkadot network" \ + name="${REGISTRY_PATH}/connector-polkadot" \ + version="${IMAGE_VERSION}" \ + description="Polkadot chain connector" \ + one.analog.image.vendor="Analog.one" \ + one.analog.image.source="https://github.com/Analog-Labs/chain-connectors/blob/${VCS_REF}/\ +chains/polkadot/Dockerfile" \ + one.analog.image.revision="${VCS_REF}" \ + one.analog.image.created="${BUILD_DATE}" + +COPY bin/rosetta-server-polkadot rosetta-server-polkadot ENTRYPOINT ["/rosetta-server-polkadot"] diff --git a/chains/polygon-pos/Dockerfile b/chains/polygon-pos/Dockerfile index b13cd287..e96591c3 100644 --- a/chains/polygon-pos/Dockerfile +++ b/chains/polygon-pos/Dockerfile @@ -1,4 +1,15 @@ -FROM analoglabs/builder:latest AS builder FROM scratch -COPY --from=builder /chain-connectors/bin/rosetta-server-polygon-pos rosetta-server-polygon-pos + +# metadata +LABEL summary="Analog’s connectors for polygon network" \ + name="${REGISTRY_PATH}/connector-polygon" \ + version="${IMAGE_VERSION}" \ + description="Polygon PoS chain connector" \ + one.analog.image.vendor="Analog.one" \ + one.analog.image.source="https://github.com/Analog-Labs/chain-connectors/blob/${VCS_REF}/\ +chains/polygon-pos/Dockerfile" \ + one.analog.image.revision="${VCS_REF}" \ + one.analog.image.created="${BUILD_DATE}" + +COPY bin/rosetta-server-polygon-pos rosetta-server-polygon-pos ENTRYPOINT ["/rosetta-server-polygon-pos"] diff --git a/ci/dockerfiles/builder/Dockerfile b/ci/dockerfiles/builder/Dockerfile deleted file mode 100644 index d359e1bb..00000000 --- a/ci/dockerfiles/builder/Dockerfile +++ /dev/null @@ -1,29 +0,0 @@ -FROM analoglabs/base-ci-linux:latest - -WORKDIR /chain-connectors -COPY . /chain-connectors - -RUN set -eux && \ - dpkgArch="$(dpkg --print-architecture)"; \ - case "${dpkgArch##*-}" in \ - amd64) targetArch='x86_64-unknown-linux-musl' ;; \ - armhf) targetArch='armv7-unknown-linux-musleabi' ;; \ - arm64) targetArch='aarch64-unknown-linux-musl' ;; \ - i386) targetArch='i686-unknown-linux-musl' ;; \ - *) echo >&2 "unsupported architecture: ${dpkgArch}"; exit 1 ;; \ - esac; \ - cargo build \ - --locked \ - --release \ - --target ${targetArch} \ - -p rosetta-server-bitcoin \ - -p rosetta-server-ethereum \ - -p rosetta-server-polkadot \ - -p rosetta-server-astar && \ - mkdir -p ./bin && \ - mv target/${targetArch}/release/rosetta-server-bitcoin ./bin/rosetta-server-bitcoin && \ - mv target/${targetArch}/release/rosetta-server-ethereum ./bin/rosetta-server-ethereum && \ - mv target/${targetArch}/release/rosetta-server-polkadot ./bin/rosetta-server-polkadot && \ - mv target/${targetArch}/release/rosetta-server-astar ./bin/rosetta-server-astar - -ENTRYPOINT ["/chain-connectors/bin/rosetta-server-bitcoin"] diff --git a/docker-compose.yml b/docker-compose.yml index 91a85e15..2eaa4252 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -44,7 +44,7 @@ services: - "ethereum-volume:/root" polkadot: - platform: linux/x86_64 + platform: linux/amd64 image: "parity/polkadot:v0.9.37" command: "--chain dev --rpc-cors all --ws-external --alice --blocks-pruning archive --state-pruning archive --base-path /polkadot" expose: @@ -62,7 +62,7 @@ services: - "polkadot-volume:/polkadot" astar: - platform: linux/x86_64 + platform: linux/amd64 image: "staketechnologies/astar-collator:latest" command: "astar-collator --chain dev --ws-port 9994 --rpc-port 9995 --rpc-cors all --ws-external --alice --blocks-pruning archive --state-pruning archive --base-path /polkadot" expose: @@ -126,7 +126,7 @@ services: restart: always explorer: - platform: linux/x86_64 + platform: linux/amd64 image: "analoglabs/rosetta-explorer:latest" ports: - "3000:3000" From d91c5f88a66c30ca7f82a580047ab19918a20613 Mon Sep 17 00:00:00 2001 From: Lohann Paterno Coutinho Ferreira Date: Wed, 19 Jul 2023 12:20:39 -0300 Subject: [PATCH 3/4] refactor build_connectors.sh --- build_connectors.sh | 57 ++++++++++++++++++++++++++------------------- 1 file changed, 33 insertions(+), 24 deletions(-) diff --git a/build_connectors.sh b/build_connectors.sh index 303821d1..48c941ac 100755 --- a/build_connectors.sh +++ b/build_connectors.sh @@ -1,10 +1,9 @@ #!/usr/bin/env bash set -e -REPO=https://github.com/Analog-Labs/chain-connectors -REGISTRY_PATH=${REGISTRY_PATH:-analoglabs} -DOCKER_IMAGE_NAME=base-ci-linux -CONNECTOR_IMAGE_VERSION=0.4.0 +REGISTRY_PATH="${REGISTRY_PATH:-analoglabs}" +VCS_REF="$(git rev-parse HEAD)" +CONNECTOR_IMAGE_VERSION='0.4.0' # Check for 'uname' and abort if it is not available. uname -v > /dev/null 2>&1 || { echo >&2 "ERROR - requires 'uname' to identify the platform."; exit 1; } @@ -40,49 +39,59 @@ if ! rustup target list | grep -q "$rustTarget"; then rustup target add "$rustTarget" fi -cargo build -p rosetta-server-bitcoin --target "$rustTarget" --release -mkdir -p target/release/bitcoin/bin +# Build all Connectors +cargo build \ + -p rosetta-server-bitcoin \ + -p rosetta-server-polkadot \ + -p rosetta-server-ethereum \ + -p rosetta-server-astar \ + --target "$rustTarget" \ + --config "target.$rustTarget.linker='$muslLinker'" \ + --config "env.CC_$rustTarget='$muslLinker'" \ + --release + +# Move binaries +mkdir -p target/release/{bitcoin,ethereum,polkadot,astar}/bin cp "target/$rustTarget/release/rosetta-server-bitcoin" target/release/bitcoin/bin +cp "target/$rustTarget/release/rosetta-server-ethereum" target/release/ethereum/bin +cp "target/$rustTarget/release/rosetta-server-polkadot" target/release/polkadot/bin +cp "target/$rustTarget/release/rosetta-server-astar" target/release/astar/bin + +# Build Bitcoin Connector docker build target/release/bitcoin \ --build-arg "REGISTRY_PATH=$REGISTRY_PATH" \ - --build-arg VCS_REF=$(git rev-parse HEAD) \ - --build-arg BUILD_DATE=$(date +%Y%m%d) \ + --build-arg "VCS_REF=$VCS_REF" \ + --build-arg "BUILD_DATE=$(date +%Y%m%d)" \ --build-arg "IMAGE_VERSION=$CONNECTOR_IMAGE_VERSION" \ -f chains/bitcoin/Dockerfile \ -t "analoglabs/connector-bitcoin:$CONNECTOR_IMAGE_VERSION" \ - -t analoglabs/connector-bitcoin:latest \ + -t analoglabs/connector-bitcoin:latest -cargo build -p rosetta-server-ethereum --target "$rustTarget" --release -mkdir -p target/release/ethereum/bin -cp "target/$rustTarget/release/rosetta-server-ethereum" target/release/ethereum/bin +# Build Ethereum Connector docker build target/release/ethereum \ --build-arg "REGISTRY_PATH=$REGISTRY_PATH" \ - --build-arg VCS_REF=$(git rev-parse HEAD) \ - --build-arg BUILD_DATE=$(date +%Y%m%d) \ + --build-arg "VCS_REF=$VCS_REF" \ + --build-arg "BUILD_DATE=$(date +%Y%m%d)" \ --build-arg "IMAGE_VERSION=$CONNECTOR_IMAGE_VERSION" \ -f chains/ethereum/Dockerfile \ -t "analoglabs/connector-ethereum:$CONNECTOR_IMAGE_VERSION" \ -t analoglabs/connector-ethereum -cargo build -p rosetta-server-polkadot --target "$rustTarget" --release -mkdir -p target/release/polkadot/bin -cp "target/$rustTarget/release/rosetta-server-polkadot" target/release/polkadot/bin +# Build Polkadot Connector docker build target/release/polkadot \ --build-arg "REGISTRY_PATH=$REGISTRY_PATH" \ - --build-arg VCS_REF=$(git rev-parse HEAD) \ - --build-arg BUILD_DATE=$(date +%Y%m%d) \ + --build-arg "VCS_REF=$VCS_REF" \ + --build-arg "BUILD_DATE=$(date +%Y%m%d)" \ --build-arg "IMAGE_VERSION=$CONNECTOR_IMAGE_VERSION" \ -f chains/polkadot/Dockerfile \ -t "analoglabs/connector-polkadot:$CONNECTOR_IMAGE_VERSION" \ -t analoglabs/connector-polkadot -cargo build -p rosetta-server-astar --target "$rustTarget" --release -mkdir -p target/release/astar/bin -cp "target/$rustTarget/release/rosetta-server-astar" target/release/astar/bin +# Build Astar Connector docker build target/release/astar \ --build-arg "REGISTRY_PATH=$REGISTRY_PATH" \ - --build-arg VCS_REF=$(git rev-parse HEAD) \ - --build-arg BUILD_DATE=$(date +%Y%m%d) \ + --build-arg "VCS_REF=$VCS_REF" \ + --build-arg "BUILD_DATE=$(date +%Y%m%d)" \ --build-arg "IMAGE_VERSION=$CONNECTOR_IMAGE_VERSION" \ -f chains/astar/Dockerfile \ -t "analoglabs/connector-astar:$CONNECTOR_IMAGE_VERSION" \ From 786b8c6182cb1acd00d8e81f7ce36e2b2c26722c Mon Sep 17 00:00:00 2001 From: Lohann Paterno Coutinho Ferreira Date: Wed, 19 Jul 2023 21:40:01 -0300 Subject: [PATCH 4/4] Update documentation --- README.md | 53 +++++++------ build_connectors.sh | 23 +++--- chains/astar/Dockerfile | 2 +- chains/bitcoin/Dockerfile | 2 +- chains/ethereum/Dockerfile | 2 +- chains/polkadot/Dockerfile | 2 +- chains/polygon-pos/Dockerfile | 2 +- ci/dockerfiles/base-ci-linux/Dockerfile | 2 +- .../base-ci-linux/base-ci-linux-config | 4 +- docker-compose.yml | 79 +++++++++---------- pull_nodes.sh | 3 + rosetta-client/README.md | 40 ++++------ 12 files changed, 110 insertions(+), 104 deletions(-) diff --git a/README.md b/README.md index a45e96b1..af6fd44b 100644 --- a/README.md +++ b/README.md @@ -18,34 +18,13 @@ This repo contains the following modules: ## Getting started - - -To get started with the Rosetta SDK, you must ensure you have [installed the latest version of Docker](https://www.docker.com/get-started/).\ -Run the following command to download chain-connectors: - -``` -$ git clone https://github.com/Analog-Labs/chain-connectors.git -``` - -You can also download the latest pre-built Docker image release from GitHub by running the following command: - -``` -curl -sSfL https://raw.githubusercontent.com/Analog-Labs/chain-connectors/master/install.sh | sh -s -``` - -After cloning this repo, simply run the following command: - -``` -make build-local -``` +To get started with the Rosetta SDK, ensure you have the latest version of [Rust](https://rustup.rs/) and [Docker](https://www.docker.com/get-started/) installed. ### Connector deployment -### Install CLI tools - Install the CLI tools by running the commands below: ``` @@ -138,6 +117,36 @@ You can override the default URL in rosetta-cli and rosetta-wallet with the “ Create a new tag, push to master and use it to create a new github release. +## Building Docker Images + +### 1. Install dependencies + +The docker images requires musl for cross-compilation, so you need to install the following dependencies: + +#### 1.1 MacOS + +if you're on macos, install [musl-cross](https://github.com/FiloSottile/homebrew-musl-cross) for enable musl-target cross-compilation: + +```shell +brew install filosottile/musl-cross/musl-cross --with-aarch64 +``` + +#### 1.2 Debian/Ubuntu + +if you're on debian/ubuntu, install [musl-tools](https://packages.debian.org/sid/musl-tools) for enable musl-target cross-compilation: + +```shell +apt-get install musl-tools +``` + +#### 2. Build Docker Images + +After install the dependencies, simply run the following command: + +```shell +./build_connectors.sh +``` + ## Contributing You can contribute to this repo in a number of ways, including: diff --git a/build_connectors.sh b/build_connectors.sh index 48c941ac..ff10e551 100755 --- a/build_connectors.sh +++ b/build_connectors.sh @@ -1,9 +1,12 @@ #!/usr/bin/env bash set -e -REGISTRY_PATH="${REGISTRY_PATH:-analoglabs}" +# Check for 'git' and abort if it is not available. +git --version > /dev/null 2>&1 || { echo >&2 "ERROR - requires 'git' to get commit hash and tag."; exit 1; } + +REGISTRY_PATH="${REGISTRY_PATH:-docker.io/analoglabs}" VCS_REF="$(git rev-parse HEAD)" -CONNECTOR_IMAGE_VERSION='0.4.0' +IMAGE_TAG="$(git describe --tags | sed 's/^v//')" # Check for 'uname' and abort if it is not available. uname -v > /dev/null 2>&1 || { echo >&2 "ERROR - requires 'uname' to identify the platform."; exit 1; } @@ -62,9 +65,9 @@ docker build target/release/bitcoin \ --build-arg "REGISTRY_PATH=$REGISTRY_PATH" \ --build-arg "VCS_REF=$VCS_REF" \ --build-arg "BUILD_DATE=$(date +%Y%m%d)" \ - --build-arg "IMAGE_VERSION=$CONNECTOR_IMAGE_VERSION" \ + --build-arg "IMAGE_VERSION=$IMAGE_TAG" \ -f chains/bitcoin/Dockerfile \ - -t "analoglabs/connector-bitcoin:$CONNECTOR_IMAGE_VERSION" \ + -t "analoglabs/connector-bitcoin:$IMAGE_TAG" \ -t analoglabs/connector-bitcoin:latest # Build Ethereum Connector @@ -72,9 +75,9 @@ docker build target/release/ethereum \ --build-arg "REGISTRY_PATH=$REGISTRY_PATH" \ --build-arg "VCS_REF=$VCS_REF" \ --build-arg "BUILD_DATE=$(date +%Y%m%d)" \ - --build-arg "IMAGE_VERSION=$CONNECTOR_IMAGE_VERSION" \ + --build-arg "IMAGE_VERSION=$IMAGE_TAG" \ -f chains/ethereum/Dockerfile \ - -t "analoglabs/connector-ethereum:$CONNECTOR_IMAGE_VERSION" \ + -t "analoglabs/connector-ethereum:$IMAGE_TAG" \ -t analoglabs/connector-ethereum # Build Polkadot Connector @@ -82,9 +85,9 @@ docker build target/release/polkadot \ --build-arg "REGISTRY_PATH=$REGISTRY_PATH" \ --build-arg "VCS_REF=$VCS_REF" \ --build-arg "BUILD_DATE=$(date +%Y%m%d)" \ - --build-arg "IMAGE_VERSION=$CONNECTOR_IMAGE_VERSION" \ + --build-arg "IMAGE_VERSION=$IMAGE_TAG" \ -f chains/polkadot/Dockerfile \ - -t "analoglabs/connector-polkadot:$CONNECTOR_IMAGE_VERSION" \ + -t "analoglabs/connector-polkadot:$IMAGE_TAG" \ -t analoglabs/connector-polkadot # Build Astar Connector @@ -92,7 +95,7 @@ docker build target/release/astar \ --build-arg "REGISTRY_PATH=$REGISTRY_PATH" \ --build-arg "VCS_REF=$VCS_REF" \ --build-arg "BUILD_DATE=$(date +%Y%m%d)" \ - --build-arg "IMAGE_VERSION=$CONNECTOR_IMAGE_VERSION" \ + --build-arg "IMAGE_VERSION=$IMAGE_TAG" \ -f chains/astar/Dockerfile \ - -t "analoglabs/connector-astar:$CONNECTOR_IMAGE_VERSION" \ + -t "analoglabs/connector-astar:$IMAGE_TAG" \ -t analoglabs/connector-astar diff --git a/chains/astar/Dockerfile b/chains/astar/Dockerfile index 013ff042..462bb9b2 100644 --- a/chains/astar/Dockerfile +++ b/chains/astar/Dockerfile @@ -5,7 +5,7 @@ LABEL summary="Analog’s connectors for astar parachain" \ name="${REGISTRY_PATH}/connector-astar" \ version="${IMAGE_VERSION}" \ description="Astar chain connector" \ - one.analog.image.vendor="Analog.one" \ + one.analog.image.vendor="Analog One Foundation" \ one.analog.image.source="https://github.com/Analog-Labs/chain-connectors/blob/${VCS_REF}/\ chains/astar/Dockerfile" \ one.analog.image.revision="${VCS_REF}" \ diff --git a/chains/bitcoin/Dockerfile b/chains/bitcoin/Dockerfile index d460f014..b82101bd 100644 --- a/chains/bitcoin/Dockerfile +++ b/chains/bitcoin/Dockerfile @@ -5,7 +5,7 @@ LABEL summary="Analog’s connectors for bitcoin network" \ name="${REGISTRY_PATH}/connector-bitcoin" \ version="${IMAGE_VERSION}" \ description="Bitcoin chain connector" \ - one.analog.image.vendor="Analog.one" \ + one.analog.image.vendor="Analog One Foundation" \ one.analog.image.source="https://github.com/Analog-Labs/chain-connectors/blob/${VCS_REF}/\ chains/bitcoin/Dockerfile" \ one.analog.image.revision="${VCS_REF}" \ diff --git a/chains/ethereum/Dockerfile b/chains/ethereum/Dockerfile index 5977cef2..f5b6564b 100644 --- a/chains/ethereum/Dockerfile +++ b/chains/ethereum/Dockerfile @@ -5,7 +5,7 @@ LABEL summary="Analog’s connectors for ethereum network" \ name="${REGISTRY_PATH}/connector-ethereum" \ version="${IMAGE_VERSION}" \ description="Ethereum chain connector" \ - one.analog.image.vendor="Analog.one" \ + one.analog.image.vendor="Analog One Foundation" \ one.analog.image.source="https://github.com/Analog-Labs/chain-connectors/blob/${VCS_REF}/\ chains/ethereum/Dockerfile" \ one.analog.image.revision="${VCS_REF}" \ diff --git a/chains/polkadot/Dockerfile b/chains/polkadot/Dockerfile index 3404e2a4..38b15f3e 100644 --- a/chains/polkadot/Dockerfile +++ b/chains/polkadot/Dockerfile @@ -5,7 +5,7 @@ LABEL summary="Analog’s connectors for polkadot network" \ name="${REGISTRY_PATH}/connector-polkadot" \ version="${IMAGE_VERSION}" \ description="Polkadot chain connector" \ - one.analog.image.vendor="Analog.one" \ + one.analog.image.vendor="Analog One Foundation" \ one.analog.image.source="https://github.com/Analog-Labs/chain-connectors/blob/${VCS_REF}/\ chains/polkadot/Dockerfile" \ one.analog.image.revision="${VCS_REF}" \ diff --git a/chains/polygon-pos/Dockerfile b/chains/polygon-pos/Dockerfile index e96591c3..bd5558d6 100644 --- a/chains/polygon-pos/Dockerfile +++ b/chains/polygon-pos/Dockerfile @@ -5,7 +5,7 @@ LABEL summary="Analog’s connectors for polygon network" \ name="${REGISTRY_PATH}/connector-polygon" \ version="${IMAGE_VERSION}" \ description="Polygon PoS chain connector" \ - one.analog.image.vendor="Analog.one" \ + one.analog.image.vendor="Analog One Foundation" \ one.analog.image.source="https://github.com/Analog-Labs/chain-connectors/blob/${VCS_REF}/\ chains/polygon-pos/Dockerfile" \ one.analog.image.revision="${VCS_REF}" \ diff --git a/ci/dockerfiles/base-ci-linux/Dockerfile b/ci/dockerfiles/base-ci-linux/Dockerfile index e786f190..a1d7f284 100644 --- a/ci/dockerfiles/base-ci-linux/Dockerfile +++ b/ci/dockerfiles/base-ci-linux/Dockerfile @@ -11,7 +11,7 @@ LABEL summary="Layer 1 image with all dependencies for Rust compilation." \ version="1.0.0" \ description="libssl-dev, clang, libclang-dev, libsasl2-dev, lld, cmake, make, git, pkg-config \ curl, jq, time, lsof, rhash, rust stable, rust nightly, sccache, cargo-udeps, cargo-tarpaulin" \ - one.analog.image.vendor="Analog.one" \ + one.analog.image.vendor="Analog One Foundation" \ one.analog.image.source="https://github.com/Analog-Labs/chain-connectors/blob/${VCS_REF}/\ ci/dockerfiles/base-ci-linux/Dockerfile" \ one.analog.image.documentation="https://github.com/Analog-Labs/chain-connectors/blob/${VCS_REF}/\ diff --git a/ci/dockerfiles/base-ci-linux/base-ci-linux-config b/ci/dockerfiles/base-ci-linux/base-ci-linux-config index 4ab7ac34..18991bac 100644 --- a/ci/dockerfiles/base-ci-linux/base-ci-linux-config +++ b/ci/dockerfiles/base-ci-linux/base-ci-linux-config @@ -4,8 +4,6 @@ rustflags = ["-Ctarget-feature=+aes,+sse2,+ssse3"] # setup clang as Linker linker="clang-15" -[target.x86_64-unknown-linux-musl] -# Enables the aes-ni instructions for RustCrypto dependency. -rustflags = ["-Ctarget-feature=+aes,+sse2,+ssse3"] +[target.aarch64-unknown-linux-gnu] # setup clang as Linker linker="clang-15" diff --git a/docker-compose.yml b/docker-compose.yml index 2eaa4252..92974835 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -27,9 +27,19 @@ services: volumes: - "bitcoin-volume:/home/bitcoin/.bitcoin" + connector-bitcoin: + image: "analoglabs/connector-bitcoin:${IMAGE_TAG:-latest}" + command: "--network regtest --addr 0.0.0.0:8080 --node-addr bitcoin:18443 --path /data" + ports: + - "8080:8080" + depends_on: + - bitcoin + volumes: + - "bitcoin-connector-volume:/data" + ethereum: image: "ethereum/client-go:v1.10.26" - command: "--dev --dev.period 5 --ipcdisable --http --http.addr 0.0.0.0 --http.vhosts * --http.api eth,debug,admin,txpool,web3" + command: "--dev --ipcdisable --dev.period 14 --http --http.addr 0.0.0.0 --http.vhosts * --http.api eth,debug,admin,txpool,web3" expose: - "8545" ulimits: @@ -43,10 +53,20 @@ services: volumes: - "ethereum-volume:/root" + connector-ethereum: + image: "analoglabs/connector-ethereum:${IMAGE_TAG:-latest}" + command: "--network dev --addr 0.0.0.0:8081 --node-addr ethereum:8545 --path /data" + ports: + - "8081:8081" + depends_on: + - ethereum + volumes: + - "ethereum-connector-volume:/data" + polkadot: platform: linux/amd64 image: "parity/polkadot:v0.9.37" - command: "--chain dev --rpc-cors all --ws-external --alice --blocks-pruning archive --state-pruning archive --base-path /polkadot" + command: "--chain dev --rpc-cors all --ws-port 9944 --ws-external --alice --blocks-pruning archive --state-pruning archive --base-path /polkadot" expose: - "9944" user: root @@ -61,10 +81,22 @@ services: volumes: - "polkadot-volume:/polkadot" + connector-polkadot: + image: "analoglabs/connector-polkadot:${IMAGE_TAG:-latest}" + command: "--network dev --addr 0.0.0.0:8082 --node-addr polkadot:9944 --path /data" + ports: + - "8082:8082" + depends_on: + - polkadot + volumes: + - "polkadot-connector-volume:/data" + # TODO: need to do a proper health check + restart: always + astar: platform: linux/amd64 image: "staketechnologies/astar-collator:latest" - command: "astar-collator --chain dev --ws-port 9994 --rpc-port 9995 --rpc-cors all --ws-external --alice --blocks-pruning archive --state-pruning archive --base-path /polkadot" + command: "astar-collator --chain dev --ws-port 9994 --rpc-port 9995 --rpc-cors all --ws-external --alice --blocks-pruning archive --state-pruning archive --base-path /astar" expose: - "9994" - "9995" @@ -80,48 +112,15 @@ services: volumes: - "astar-volume:/astar" - - connector-bitcoin: - image: "analoglabs/connector-bitcoin:0.4.0" - command: "--network regtest --addr 0.0.0.0:8080 --node-addr bitcoin:18443 --path /data" - ports: - - "8080:8080" - depends_on: - - bitcoin - volumes: - - "bitcoin-connector-volume:/data" - - connector-ethereum: - image: "analoglabs/connector-ethereum:0.4.0" - command: "--network dev --addr 0.0.0.0:8081 --node-addr ethereum:8545 --path /data" - ports: - - "8081:8081" - depends_on: - - ethereum - volumes: - - "ethereum-connector-volume:/data" - - connector-polkadot: - image: "analoglabs/connector-polkadot:0.4.0" - command: "--network dev --addr 0.0.0.0:8082 --node-addr polkadot:9944 --path /data" - ports: - - "8082:8082" - depends_on: - - polkadot - volumes: - - "polkadot-connector-volume:/data" - # TODO: need to do a proper health check - restart: always - connector-astar: - image: "analoglabs/connector-astar:0.4.0" + image: "analoglabs/connector-astar:${IMAGE_TAG:-latest}" command: "--network dev --addr 0.0.0.0:8083 --node-addr astar:9994 --path /data" ports: - - "8083:8083" + - "8083:8083" depends_on: - - astar + - astar volumes: - - "astar-connector-volume:/data" + - "astar-connector-volume:/data" # TODO: need to do a proper health check restart: always diff --git a/pull_nodes.sh b/pull_nodes.sh index dc7956e7..363de04e 100755 --- a/pull_nodes.sh +++ b/pull_nodes.sh @@ -1,3 +1,6 @@ +#!/usr/bin/env bash +set -e + docker image pull ruimarinho/bitcoin-core:23 docker image pull ethereum/client-go:v1.10.26 docker image pull parity/polkadot:v0.9.37 diff --git a/rosetta-client/README.md b/rosetta-client/README.md index f71ef3fd..5e4b4f92 100644 --- a/rosetta-client/README.md +++ b/rosetta-client/README.md @@ -1,41 +1,35 @@ This get starter to run example available in rosetta-client. -Ethereum: +## Ethereum Setting up nodes -**Setting up nodes** +### MacOS -1. First you need to build connectors for that you can run - `./build_connectors.sh` - if you are running on mac you might get gcc error. To solve it please do following. +if you're on macos, install [musl-cross](https://github.com/FiloSottile/homebrew-musl-cross) for enable musl-target cross-compilation: -2. Run `rustup target add x86_64-unknown-linux-musl` in intel macs or `rustup target add aarch64-unknown-linux-musl` for m1 macs. -3. In `~/.cargo/config` add following - -Intel Macs - -``` -[target.x86_64-unknown-linux-musl] -linker = "x86_64-linux-musl-gcc" +```shell +brew install filosottile/musl-cross/musl-cross --with-aarch64 ``` -Apple Silicon +### Debian/Ubuntu +if you're on debian/ubuntu, install [musl-tools](https://packages.debian.org/sid/musl-tools) for enable musl-target cross-compilation: + +```shell +apt-get install musl-tools ``` -[target.aarch64-unknown-linux-musl] -linker = "aarch64-linux-musl-gcc" -``` -4. In `build_connectors.sh` replace `cargo build` with `TARGET_CC=x86_64-linux-musl-gcc cargo build` or `TARGET_CC=aarch64-linux-musl-gcc cargo build` -5. Run `./build_connectors.sh`. -6. After connectors are build run `docker compose up`. +### Shared Steps + +1. First you need to build connectors for that you can run `./build_connectors.sh` +2. After connectors are build run `docker compose up -d`. -**Compiling voting contract** +### Compiling voting contract 1. We have a `voting_contract.sol` we have to compile and get its binary in order to deploy it. For this you need to have `solc` installed. You can install it using `brew install solidity` or `sudo apt-get install solc`. 2. Run `solc --combined-json abi,bin --abi --bin voting_contract.sol -o ./voting_contract_files` in contract folder. 3. You will get `voting_contract_files` folder with `voting_contract.abi`, `voting_contract.bin` and `combined_voting_contract.json` which contains both abi and bin since we are only concerned with bin we will use `voting_contract.bin`. and for sake of easiness we have already compiled and imported it in examples folder. -**Running voting_contract example** +### Running voting_contract example 1. This example demonstrate how to interact with smart contract using Analog's wallet. We will deploy a basic contracts storing yes or no votes and displays total votes on voting. 2. Run `cargo run --example voting_contract faucet`. to get some funds to deploy contract. @@ -46,7 +40,7 @@ linker = "aarch64-linux-musl-gcc" `cargo run --example voting_contract vote --contract-address "0x678ea0447843f69805146c521afcbcc07d6e28a2"` you will get `CallResponse` as output containing n array first uint is total of `yes` votes and second for `no` votes in contract. -**Running ethereum example** +### Running ethereum example 1. This examples demonstrate how to interact with ethereum using Analog's wallet. 2. Make sure you have voting contract deployed. If not please follow voting_contract example steps 2 and 3.