From c506e7dfb0f9e8ab57e1c5e72e2578f3fa5b95f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Federico=20Rodr=C3=ADguez?= Date: Mon, 27 May 2024 20:23:16 -0300 Subject: [PATCH] feat: add zksolc support --- .github/workflows/ci.yml | 1 + .github/workflows/zksync-ci.yml | 74 ++ Cargo.toml | 4 + crates/artifacts/artifacts/Cargo.toml | 1 + crates/artifacts/artifacts/src/lib.rs | 1 + crates/artifacts/solc/src/lib.rs | 2 +- crates/artifacts/zksolc/Cargo.toml | 45 ++ crates/artifacts/zksolc/src/bytecode.rs | 28 + crates/artifacts/zksolc/src/contract.rs | 88 +++ crates/artifacts/zksolc/src/error.rs | 270 +++++++ crates/artifacts/zksolc/src/lib.rs | 169 ++++ .../artifacts/zksolc/src/output_selection.rs | 71 ++ crates/compilers/Cargo.toml | 8 +- crates/compilers/src/compilers/mod.rs | 1 + .../compilers/src/compilers/zksolc/input.rs | 138 ++++ crates/compilers/src/compilers/zksolc/mod.rs | 748 ++++++++++++++++++ .../src/compilers/zksolc/settings.rs | 358 +++++++++ crates/compilers/src/config.rs | 3 +- crates/compilers/src/lib.rs | 2 + .../src/zksync/artifact_output/mod.rs | 1 + .../src/zksync/artifact_output/zk.rs | 285 +++++++ crates/compilers/src/zksync/compile/mod.rs | 2 + .../src/zksync/compile/output/contracts.rs | 239 ++++++ .../src/zksync/compile/output/mod.rs | 589 ++++++++++++++ .../compilers/src/zksync/compile/project.rs | 397 ++++++++++ crates/compilers/src/zksync/mod.rs | 182 +++++ crates/compilers/tests/zksync.rs | 473 +++++++++++ test-data/zksync/in/compiler-in-1.json | 1 + test-data/zksync/library-remapping-in-2.json | 1 + test-data/zksync/library-remapping-in.json | 1 + test-data/zksync/yul-sample/SimpleStore.yul | 11 + 31 files changed, 4190 insertions(+), 4 deletions(-) create mode 100644 .github/workflows/zksync-ci.yml create mode 100644 crates/artifacts/zksolc/Cargo.toml create mode 100644 crates/artifacts/zksolc/src/bytecode.rs create mode 100644 crates/artifacts/zksolc/src/contract.rs create mode 100644 crates/artifacts/zksolc/src/error.rs create mode 100644 crates/artifacts/zksolc/src/lib.rs create mode 100644 crates/artifacts/zksolc/src/output_selection.rs create mode 100644 crates/compilers/src/compilers/zksolc/input.rs create mode 100644 crates/compilers/src/compilers/zksolc/mod.rs create mode 100644 crates/compilers/src/compilers/zksolc/settings.rs create mode 100644 crates/compilers/src/zksync/artifact_output/mod.rs create mode 100644 crates/compilers/src/zksync/artifact_output/zk.rs create mode 100644 crates/compilers/src/zksync/compile/mod.rs create mode 100644 crates/compilers/src/zksync/compile/output/contracts.rs create mode 100644 crates/compilers/src/zksync/compile/output/mod.rs create mode 100644 crates/compilers/src/zksync/compile/project.rs create mode 100644 crates/compilers/src/zksync/mod.rs create mode 100644 crates/compilers/tests/zksync.rs create mode 100644 test-data/zksync/in/compiler-in-1.json create mode 100644 test-data/zksync/library-remapping-in-2.json create mode 100644 test-data/zksync/library-remapping-in.json create mode 100644 test-data/zksync/yul-sample/SimpleStore.yul diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 641342f9..2031c3a2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -4,6 +4,7 @@ on: push: branches: [main] pull_request: + branches: [main] env: CARGO_TERM_COLOR: always diff --git a/.github/workflows/zksync-ci.yml b/.github/workflows/zksync-ci.yml new file mode 100644 index 00000000..2f499b40 --- /dev/null +++ b/.github/workflows/zksync-ci.yml @@ -0,0 +1,74 @@ +name: zksync-ci + +on: + push: + branches: + - 'zksync-v**' + pull_request: + branches: + - 'zksync-v**' +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +env: + CARGO_TERM_COLOR: always + +jobs: + doctests: + name: doc tests + runs-on: ubuntu-latest + timeout-minutes: 60 + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@1.80.0 + - uses: Swatinem/rust-cache@v2 + with: + cache-on-failure: true + - name: cargo test + run: cargo test --doc + env: + RUST_TEST_THREADS: 2 + + clippy: + name: clippy + runs-on: ubuntu-latest + timeout-minutes: 60 + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@1.80.0 + with: + components: clippy + - run: cargo clippy --workspace --all-targets --all-features + env: + RUSTFLAGS: -Dwarnings + + fmt: + name: fmt + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install Rust + uses: dtolnay/rust-toolchain@1.80.0 + with: + components: rustfmt + + - name: Run rustfmt + run: cargo fmt -- --check + + cargo-test: + name: cargo-test + runs-on: ubuntu-latest + timeout-minutes: 30 + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + ref: ${{ github.event.pull_request.head.sha }} + - uses: dtolnay/rust-toolchain@1.80.0 + - name: Run tests + run: cargo test zk + env: + RUST_BACKTRACE: full \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml index 290681bf..bf02600a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -64,3 +64,7 @@ futures-util = "0.3" tokio = { version = "1.35", features = ["rt-multi-thread"] } snapbox = "0.6.9" + +# zksync +foundry-compilers-artifacts-zksolc = { path = "crates/artifacts/zksolc", version = "0.11.1" } +globset = "0.4" diff --git a/crates/artifacts/artifacts/Cargo.toml b/crates/artifacts/artifacts/Cargo.toml index 0bea015d..90b07ef9 100644 --- a/crates/artifacts/artifacts/Cargo.toml +++ b/crates/artifacts/artifacts/Cargo.toml @@ -17,6 +17,7 @@ workspace = true [dependencies] foundry-compilers-artifacts-solc.workspace = true foundry-compilers-artifacts-vyper.workspace = true +foundry-compilers-artifacts-zksolc.workspace = true [features] async = ["foundry-compilers-artifacts-solc/async"] diff --git a/crates/artifacts/artifacts/src/lib.rs b/crates/artifacts/artifacts/src/lib.rs index da7ab8e8..1bdd7035 100644 --- a/crates/artifacts/artifacts/src/lib.rs +++ b/crates/artifacts/artifacts/src/lib.rs @@ -5,4 +5,5 @@ pub use foundry_compilers_artifacts_solc as solc; pub use foundry_compilers_artifacts_vyper as vyper; +pub use foundry_compilers_artifacts_zksolc as zksolc; pub use solc::*; diff --git a/crates/artifacts/solc/src/lib.rs b/crates/artifacts/solc/src/lib.rs index 1fb8b3c7..2146613e 100644 --- a/crates/artifacts/solc/src/lib.rs +++ b/crates/artifacts/solc/src/lib.rs @@ -1766,7 +1766,7 @@ pub struct StorageLayout { } impl StorageLayout { - fn is_empty(&self) -> bool { + pub fn is_empty(&self) -> bool { self.storage.is_empty() && self.types.is_empty() } } diff --git a/crates/artifacts/zksolc/Cargo.toml b/crates/artifacts/zksolc/Cargo.toml new file mode 100644 index 00000000..0f3b1efd --- /dev/null +++ b/crates/artifacts/zksolc/Cargo.toml @@ -0,0 +1,45 @@ +[package] +name = "foundry-compilers-artifacts-zksolc" +description = "Rust bindings for ZkSolc JSON artifacts" + +version.workspace = true +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +exclude.workspace = true + +[lints] +workspace = true + +[dependencies] +foundry-compilers-core.workspace = true +foundry-compilers-artifacts-solc.workspace = true + +serde.workspace = true +semver.workspace = true +serde_json.workspace = true +tracing.workspace = true +alloy-primitives.workspace = true +alloy-json-abi.workspace = true +rayon.workspace = true +thiserror.workspace = true +md-5.workspace = true +yansi.workspace = true +futures-util = { workspace = true, optional = true } +tokio = { workspace = true, optional = true } + +walkdir = "2.4" + +[target.'cfg(windows)'.dependencies] +path-slash.workspace = true + +[dev-dependencies] +serde_path_to_error = "0.1" +similar-asserts.workspace = true +foundry-compilers-core = { workspace = true, features = ["test-utils"] } + +[features] +async = ["dep:tokio", "futures-util", "tokio/fs"] diff --git a/crates/artifacts/zksolc/src/bytecode.rs b/crates/artifacts/zksolc/src/bytecode.rs new file mode 100644 index 00000000..8f49b285 --- /dev/null +++ b/crates/artifacts/zksolc/src/bytecode.rs @@ -0,0 +1,28 @@ +use std::collections::BTreeMap; + +use foundry_compilers_artifacts_solc::{ + bytecode::{serialize_bytecode_without_prefix, BytecodeObject}, + CompactBytecode, CompactDeployedBytecode, +}; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct Bytecode { + #[serde(serialize_with = "serialize_bytecode_without_prefix")] + pub object: BytecodeObject, +} + +// NOTE: distinction between bytecode and deployed bytecode make no sense of zkEvm, but +// we implement these conversions in order to be able to use the Artifacts trait. +impl From for CompactBytecode { + fn from(bcode: Bytecode) -> Self { + Self { object: bcode.object, source_map: None, link_references: BTreeMap::default() } + } +} + +impl From for CompactDeployedBytecode { + fn from(bcode: Bytecode) -> Self { + Self { bytecode: Some(bcode.into()), immutable_references: BTreeMap::default() } + } +} diff --git a/crates/artifacts/zksolc/src/contract.rs b/crates/artifacts/zksolc/src/contract.rs new file mode 100644 index 00000000..ba8c67b1 --- /dev/null +++ b/crates/artifacts/zksolc/src/contract.rs @@ -0,0 +1,88 @@ +//! Contract related types. +use crate::Evm; +use alloy_json_abi::JsonAbi; +use foundry_compilers_artifacts_solc::{ + CompactContractBytecode, CompactContractBytecodeCow, CompactContractRef, DevDoc, StorageLayout, + UserDoc, +}; +use serde::{Deserialize, Serialize}; +use std::{borrow::Cow, collections::BTreeMap}; + +/// Represents a compiled solidity contract +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct Contract { + pub abi: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub metadata: Option, + #[serde(default)] + pub userdoc: UserDoc, + #[serde(default)] + pub devdoc: DevDoc, + /// The contract optimized IR code. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub ir_optimized: Option, + /// The contract storage layout. + #[serde(default, skip_serializing_if = "StorageLayout::is_empty")] + pub storage_layout: StorageLayout, + /// The contract EraVM bytecode hash. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub hash: Option, + /// The contract factory dependencies. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub factory_dependencies: Option>, + /// The contract missing libraries. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub missing_libraries: Option>, + /// EVM-related outputs + #[serde(default, skip_serializing_if = "Option::is_none")] + pub evm: Option, +} + +// CompactContract variants +// TODO: for zkEvm, the distinction between bytecode and deployed_bytecode makes little sense, +// and there some fields that the ouptut doesn't provide (e.g: source_map) +// However, we implement these because we get the Artifact trait and can reuse lots of +// the crate's helpers without needing to duplicate everything. Maybe there's a way +// we can get all these without having to add the same bytecode twice on each struct. +// Ideally the Artifacts trait would not be coupled to a specific Contract type +impl<'a> From<&'a Contract> for CompactContractBytecodeCow<'a> { + fn from(artifact: &'a Contract) -> Self { + let (bytecode, deployed_bytecode) = if let Some(ref evm) = artifact.evm { + ( + evm.bytecode.clone().map(Into::into).map(Cow::Owned), + evm.bytecode.clone().map(Into::into).map(Cow::Owned), + ) + } else { + (None, None) + }; + CompactContractBytecodeCow { + abi: artifact.abi.as_ref().map(Cow::Borrowed), + bytecode, + deployed_bytecode, + } + } +} + +impl From for CompactContractBytecode { + fn from(c: Contract) -> Self { + let bytecode = if let Some(evm) = c.evm { evm.bytecode } else { None }; + Self { + abi: c.abi.map(Into::into), + deployed_bytecode: bytecode.clone().map(|b| b.into()), + bytecode: bytecode.clone().map(|b| b.into()), + } + } +} + +impl<'a> From<&'a Contract> for CompactContractRef<'a> { + fn from(c: &'a Contract) -> Self { + let (bin, bin_runtime) = if let Some(ref evm) = c.evm { + (evm.bytecode.as_ref().map(|c| &c.object), evm.bytecode.as_ref().map(|c| &c.object)) + } else { + (None, None) + }; + + Self { abi: c.abi.as_ref(), bin, bin_runtime } + } +} diff --git a/crates/artifacts/zksolc/src/error.rs b/crates/artifacts/zksolc/src/error.rs new file mode 100644 index 00000000..72c5a40a --- /dev/null +++ b/crates/artifacts/zksolc/src/error.rs @@ -0,0 +1,270 @@ +use foundry_compilers_artifacts_solc::error::{Severity, SourceLocation}; + +use foundry_compilers_artifacts_solc::serde_helpers; +use serde::{Deserialize, Serialize}; +use std::{fmt, ops::Range}; +use yansi::{Color, Style}; + +/// The `solc --standard-json` output error. +#[derive(Debug, Serialize, Deserialize, Clone, Eq, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct Error { + /// The component type. + pub component: String, + /// The error code. + #[serde(default, with = "serde_helpers::display_from_str_opt")] + pub error_code: Option, + /// The formatted error message. + pub formatted_message: Option, + /// The non-formatted error message. + pub message: String, + /// The error severity. + pub severity: Severity, + /// The error location data. + pub source_location: Option, + /// The error type. + pub r#type: String, +} + +impl Error { + /// Returns `true` if the error is an error. + pub const fn is_error(&self) -> bool { + self.severity.is_error() + } + + /// Returns `true` if the error is a warning. + pub const fn is_warning(&self) -> bool { + self.severity.is_warning() + } + + /// Returns `true` if the error is an info. + pub const fn is_info(&self) -> bool { + self.severity.is_info() + } +} +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // TODO: Adding short msg for zksolc results in duplicate error messages. + // Check if this is always the case or if it would be useful to + // add it sometimes. + //let mut short_msg = self.message.trim(); + let fmtd_msg = self.formatted_message.as_deref().unwrap_or(""); + + /* + if short_msg.is_empty() { + // if the message is empty, try to extract the first line from the formatted message + if let Some(first_line) = fmtd_msg.lines().next() { + // this is something like `ParserError: ` + if let Some((_, s)) = first_line.split_once(':') { + short_msg = s.trim_start(); + } else { + short_msg = first_line; + } + } + } + */ + + // Error (XXXX): Error Message + styled(f, self.severity.color().bold(), |f| self.fmt_severity(f))?; + //fmt_msg(f, short_msg)?; + + let mut lines = fmtd_msg.lines(); + + /* + // skip the first line if it contains the same message as the one we just formatted, + // unless it also contains a source location, in which case the entire error message is an + // old style error message, like: + // path/to/file:line:column: ErrorType: message + if lines.clone().next().map_or(false, |l| { + l.contains(short_msg) && l.bytes().filter(|b| *b == b':').count() < 3 + }) { + let _ = lines.next(); + } + */ + + // format the main source location + fmt_source_location(f, &mut lines)?; + + // format remaining lines as secondary locations + while let Some(line) = lines.next() { + f.write_str("\n")?; + + if let Some((note, msg)) = line.split_once(':') { + styled(f, Self::secondary_style(), |f| f.write_str(note))?; + fmt_msg(f, msg)?; + } else { + f.write_str(line)?; + } + + fmt_source_location(f, &mut lines)?; + } + + Ok(()) + } +} + +impl Error { + /// The style of the diagnostic severity. + pub fn error_style(&self) -> Style { + self.severity.color().bold() + } + + /// The style of the diagnostic message. + pub fn message_style() -> Style { + Color::White.bold() + } + + /// The style of the secondary source location. + pub fn secondary_style() -> Style { + Color::Cyan.bold() + } + + /// The style of the source location highlight. + pub fn highlight_style() -> Style { + Style::new().fg(Color::Yellow) + } + + /// The style of the diagnostics. + pub fn diag_style() -> Style { + Color::Yellow.bold() + } + + /// The style of the source location frame. + pub fn frame_style() -> Style { + Style::new().fg(Color::Blue) + } + + /// Formats the diagnostic severity: + /// + /// ```text + /// Error (XXXX) + /// ``` + fn fmt_severity(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.severity.as_str())?; + if let Some(code) = self.error_code { + write!(f, " ({code})")?; + } + Ok(()) + } +} + +/// Formats the diagnostic message. +fn fmt_msg(f: &mut fmt::Formatter<'_>, msg: &str) -> fmt::Result { + styled(f, Error::message_style(), |f| { + f.write_str(": ")?; + f.write_str(msg.trim_start()) + }) +} + +fn fmt_source_location(f: &mut fmt::Formatter<'_>, lines: &mut std::str::Lines<'_>) -> fmt::Result { + // --> source + if let Some(line) = lines.next() { + f.write_str("\n")?; + + let arrow = "-->"; + if let Some((left, loc)) = line.split_once(arrow) { + f.write_str(left)?; + styled(f, Error::frame_style(), |f| f.write_str(arrow))?; + f.write_str(loc)?; + } else { + f.write_str(line)?; + } + } + + // get the next 3 lines + let Some(line1) = lines.next() else { + return Ok(()); + }; + let Some(line2) = lines.next() else { + f.write_str("\n")?; + f.write_str(line1)?; + return Ok(()); + }; + let Some(line3) = lines.next() else { + f.write_str("\n")?; + f.write_str(line1)?; + f.write_str("\n")?; + f.write_str(line2)?; + return Ok(()); + }; + + // line 1, just a frame + fmt_framed_location(f, line1, None)?; + + // line 2, frame and code; highlight the text based on line 3's carets + let hl_start = line3.find('^'); + let highlight = hl_start.map(|start| { + let end = if line3.contains("^ (") { + // highlight the entire line because of "spans across multiple lines" diagnostic + line2.len() + } else if let Some(carets) = line3[start..].find(|c: char| c != '^') { + // highlight the text that the carets point to + start + carets + } else { + // the carets span the entire third line + line3.len() + } + // bound in case carets span longer than the code they point to + .min(line2.len()); + (start.min(end)..end, Error::highlight_style()) + }); + fmt_framed_location(f, line2, highlight)?; + + // line 3, frame and maybe highlight, this time till the end unconditionally + let highlight = hl_start.map(|i| (i..line3.len(), Error::diag_style())); + fmt_framed_location(f, line3, highlight) +} + +/// Colors a single Solidity framed source location line. Part of [`fmt_source_location`]. +fn fmt_framed_location( + f: &mut fmt::Formatter<'_>, + line: &str, + highlight: Option<(Range, Style)>, +) -> fmt::Result { + f.write_str("\n")?; + + if let Some((space_or_line_number, rest)) = line.split_once('|') { + // if the potential frame is not just whitespace or numbers, don't color it + if !space_or_line_number.chars().all(|c| c.is_whitespace() || c.is_numeric()) { + return f.write_str(line); + } + + styled(f, Error::frame_style(), |f| { + f.write_str(space_or_line_number)?; + f.write_str("|") + })?; + + if let Some((range, style)) = highlight { + let Range { start, end } = range; + // Skip highlighting if the range is not valid unicode. + if !line.is_char_boundary(start) || !line.is_char_boundary(end) { + f.write_str(rest) + } else { + let rest_start = line.len() - rest.len(); + f.write_str(&line[rest_start..start])?; + styled(f, style, |f| f.write_str(&line[range]))?; + f.write_str(&line[end..]) + } + } else { + f.write_str(rest) + } + } else { + f.write_str(line) + } +} + +/// Calls `fun` in between [`Style::fmt_prefix`] and [`Style::fmt_suffix`]. +fn styled(f: &mut fmt::Formatter<'_>, style: Style, fun: F) -> fmt::Result +where + F: FnOnce(&mut fmt::Formatter<'_>) -> fmt::Result, +{ + let enabled = yansi::is_enabled(); + if enabled { + style.fmt_prefix(f)?; + } + fun(f)?; + if enabled { + style.fmt_suffix(f)?; + } + Ok(()) +} diff --git a/crates/artifacts/zksolc/src/lib.rs b/crates/artifacts/zksolc/src/lib.rs new file mode 100644 index 00000000..504c2656 --- /dev/null +++ b/crates/artifacts/zksolc/src/lib.rs @@ -0,0 +1,169 @@ +use foundry_compilers_artifacts_solc::{ + CompactContractRef, FileToContractsMap, SourceFile, SourceFiles, +}; + +use semver::Version; +use serde::{Deserialize, Serialize}; +use std::{ + collections::{BTreeMap, HashSet}, + path::{Path, PathBuf}, +}; + +pub mod bytecode; +pub mod contract; +pub mod error; +pub mod output_selection; + +use self::{bytecode::Bytecode, contract::Contract, error::Error}; + +/// file -> (contract name -> Contract) +pub type Contracts = FileToContractsMap; + +/// Output type `solc` produces +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Default)] +pub struct CompilerOutput { + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub errors: Vec, + #[serde(default)] + pub sources: BTreeMap, + #[serde(default)] + pub contracts: FileToContractsMap, + /// The `solc` compiler version. + #[serde(skip_serializing_if = "Option::is_none")] + pub version: Option, + /// The `solc` compiler long version. + #[serde(skip_serializing_if = "Option::is_none")] + pub long_version: Option, + /// The `zksolc` compiler version. + #[serde(skip_serializing_if = "Option::is_none")] + pub zk_version: Option, + /// The ZKsync solc compiler version (if it was used). This field is + /// inserted by this crate and not an actual part of the compiler output + #[serde(skip_serializing_if = "Option::is_none")] + pub zksync_solc_version: Option, +} + +impl CompilerOutput { + /// Whether the output contains a compiler error + pub fn has_error(&self) -> bool { + self.errors.iter().any(|err| err.severity.is_error()) + } + + /// Returns the output's source files and contracts separately, wrapped in helper types that + /// provide several helper methods + pub fn split(self) -> (SourceFiles, OutputContracts) { + (SourceFiles(self.sources), OutputContracts(self.contracts)) + } + + /// Retains only those files the given iterator yields + /// + /// In other words, removes all contracts for files not included in the iterator + pub fn retain_files<'a, I>(&mut self, files: I) + where + I: IntoIterator, + { + // Note: use `to_lowercase` here because solc not necessarily emits the exact file name, + // e.g. `src/utils/upgradeProxy.sol` is emitted as `src/utils/UpgradeProxy.sol` + let files: HashSet<_> = + files.into_iter().map(|s| s.to_string_lossy().to_lowercase()).collect(); + self.contracts.retain(|f, _| files.contains(&f.to_string_lossy().to_lowercase())); + self.sources.retain(|f, _| files.contains(&f.to_string_lossy().to_lowercase())); + } + + pub fn merge(&mut self, other: Self) { + self.errors.extend(other.errors); + self.contracts.extend(other.contracts); + self.sources.extend(other.sources); + } + + pub fn join_all(&mut self, root: impl AsRef) { + let root = root.as_ref(); + self.contracts = std::mem::take(&mut self.contracts) + .into_iter() + .map(|(path, contracts)| (root.join(path), contracts)) + .collect(); + self.sources = std::mem::take(&mut self.sources) + .into_iter() + .map(|(path, source)| (root.join(path), source)) + .collect(); + } +} + +#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct Evm { + /// The contract EraVM assembly code. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub assembly: Option, + /// The contract EVM legacy assembly code. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub legacy_assembly: Option, + /// The contract bytecode. + /// Is reset by that of EraVM before yielding the compiled project artifacts. + pub bytecode: Option, + /// The list of function hashes + #[serde(default, skip_serializing_if = "::std::collections::BTreeMap::is_empty")] + pub method_identifiers: BTreeMap, + /// The extra EVMLA metadata. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub extra_metadata: Option, +} + +/// +/// The `solc --standard-json` output contract EVM extra metadata. +#[derive(Debug, Default, Serialize, Deserialize, Clone, Eq, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct ExtraMetadata { + /// The list of recursive functions. + #[serde(default = "Vec::new")] + pub recursive_functions: Vec, +} + +/// +/// The `solc --standard-json` output contract EVM recursive function. +#[derive(Debug, Serialize, Deserialize, Clone, Eq, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct RecursiveFunction { + /// The function name. + pub name: String, + /// The creation code function block tag. + pub creation_tag: Option, + /// The runtime code function block tag. + pub runtime_tag: Option, + /// The number of input arguments. + #[serde(rename = "totalParamSize")] + pub input_size: usize, + /// The number of output arguments. + #[serde(rename = "totalRetParamSize")] + pub output_size: usize, +} + +/// A wrapper helper type for the `Contracts` type alias +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub struct OutputContracts(pub Contracts); + +impl OutputContracts { + /// Returns an iterator over all contracts and their source names. + pub fn into_contracts(self) -> impl Iterator { + self.0.into_values().flatten() + } + + /// Iterate over all contracts and their names + pub fn contracts_iter(&self) -> impl Iterator { + self.0.values().flatten() + } + + /// Finds the _first_ contract with the given name + pub fn find(&self, contract: impl AsRef) -> Option> { + let contract_name = contract.as_ref(); + self.contracts_iter().find_map(|(name, contract)| { + (name == contract_name).then(|| CompactContractRef::from(contract)) + }) + } + + /// Finds the first contract with the given name and removes it from the set + pub fn remove(&mut self, contract: impl AsRef) -> Option { + let contract_name = contract.as_ref(); + self.0.values_mut().find_map(|c| c.remove(contract_name)) + } +} diff --git a/crates/artifacts/zksolc/src/output_selection.rs b/crates/artifacts/zksolc/src/output_selection.rs new file mode 100644 index 00000000..a063fb16 --- /dev/null +++ b/crates/artifacts/zksolc/src/output_selection.rs @@ -0,0 +1,71 @@ +use serde::{Deserialize, Serialize}; + +use std::collections::HashSet; + +/// +/// The `solc --standard-json` output selection. +#[derive(Debug, Default, Serialize, Deserialize, Eq, PartialEq, Clone)] +pub struct OutputSelection { + /// Only the 'all' wildcard is available for robustness reasons. + #[serde(rename = "*", skip_serializing_if = "Option::is_none")] + pub all: Option, +} + +#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)] +pub struct FileOutputSelection { + /// The per-file output selections. + #[serde(rename = "", skip_serializing_if = "Option::is_none")] + pub per_file: Option>, + /// The per-contract output selections. + #[serde(rename = "*", skip_serializing_if = "Option::is_none")] + pub per_contract: Option>, +} + +/// +/// The `solc --standard-json` expected output selection flag. +#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Hash)] +pub enum OutputSelectionFlag { + /// The ABI JSON. + #[serde(rename = "abi")] + ABI, + /// The metadata. + #[serde(rename = "metadata")] + Metadata, + /// The developer documentation. + #[serde(rename = "devdoc")] + Devdoc, + /// The user documentation. + #[serde(rename = "userdoc")] + Userdoc, + /// The function signature hashes JSON. + #[serde(rename = "evm.methodIdentifiers")] + MethodIdentifiers, + /// The storage layout. + #[serde(rename = "storageLayout")] + StorageLayout, + /// The AST JSON. + #[serde(rename = "ast")] + AST, + /// The Yul IR. + #[serde(rename = "irOptimized")] + Yul, + /// The EVM legacy assembly JSON. + #[serde(rename = "evm.legacyAssembly")] + EVMLA, +} + +impl std::fmt::Display for OutputSelectionFlag { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::ABI => write!(f, "abi"), + Self::Metadata => write!(f, "metadata"), + Self::Devdoc => write!(f, "devdoc"), + Self::Userdoc => write!(f, "userdoc"), + Self::MethodIdentifiers => write!(f, "evm.methodIdentifiers"), + Self::StorageLayout => write!(f, "storageLayout"), + Self::AST => write!(f, "ast"), + Self::Yul => write!(f, "irOptimized"), + Self::EVMLA => write!(f, "evm.legacyAssembly"), + } + } +} diff --git a/crates/compilers/Cargo.toml b/crates/compilers/Cargo.toml index ecda8dc5..0694c23e 100644 --- a/crates/compilers/Cargo.toml +++ b/crates/compilers/Cargo.toml @@ -51,6 +51,11 @@ svm = { workspace = true, optional = true } svm-builds = { package = "svm-rs-builds", version = "0.5", default-features = false, optional = true } sha2 = { version = "0.10", default-features = false, optional = true } +# zksync +reqwest = { version = "0.12", default-features = false, optional = true } +walkdir = "2.4" +fs4 = "0.8" + [dev-dependencies] tracing-subscriber = { version = "0.3", default-features = false, features = [ "env-filter", @@ -65,7 +70,7 @@ snapbox.workspace = true foundry-compilers-core = { workspace = true, features = ["test-utils"] } [features] -default = ["rustls"] +default = ["rustls", "async", "svm-solc", "project-util"] test-utils = [] full = ["async", "svm-solc"] @@ -78,6 +83,7 @@ async = [ "tokio/process", "tokio/io-util", "foundry-compilers-artifacts/async", + "reqwest", ] # Enables `svm` to auto-detect and manage `solc` builds. svm-solc = [ diff --git a/crates/compilers/src/compilers/mod.rs b/crates/compilers/src/compilers/mod.rs index 506ea7ca..d839b0b1 100644 --- a/crates/compilers/src/compilers/mod.rs +++ b/crates/compilers/src/compilers/mod.rs @@ -22,6 +22,7 @@ use std::{ pub mod multi; pub mod solc; pub mod vyper; +pub mod zksolc; pub use vyper::*; /// A compiler version is either installed (available locally) or can be downloaded, from the remote diff --git a/crates/compilers/src/compilers/zksolc/input.rs b/crates/compilers/src/compilers/zksolc/input.rs new file mode 100644 index 00000000..0d83cd9c --- /dev/null +++ b/crates/compilers/src/compilers/zksolc/input.rs @@ -0,0 +1,138 @@ +use super::{settings::ZkSolcSettings, ZkSettings}; +use crate::{ + compilers::{solc::SolcLanguage, CompilerInput}, + solc, +}; +use foundry_compilers_artifacts::{remappings::Remapping, solc::serde_helpers, Source, Sources}; +use semver::Version; +use serde::{Deserialize, Serialize}; +use std::{ + borrow::Cow, + path::{Path, PathBuf}, +}; + +#[derive(Debug, Clone, Serialize)] +pub struct ZkSolcVersionedInput { + #[serde(flatten)] + pub input: ZkSolcInput, + pub solc_version: Version, + pub cli_settings: solc::CliSettings, +} + +impl CompilerInput for ZkSolcVersionedInput { + type Settings = ZkSolcSettings; + type Language = SolcLanguage; + + // WARN: version is the solc version and NOT the zksolc version + // This is because we use solc's version resolution to figure + // out what solc to pair zksolc with. + fn build( + sources: Sources, + settings: Self::Settings, + language: Self::Language, + version: Version, + ) -> Self { + let ZkSolcSettings { settings, cli_settings } = settings; + let input = ZkSolcInput { language, sources, settings }.sanitized(&version); + + Self { solc_version: version, input, cli_settings } + } + + fn language(&self) -> Self::Language { + self.input.language + } + + // TODO: This is the solc_version and not the zksolc version. We store this here because + // the input is not associated with a zksolc version and we use solc's version resolution + // features to know what solc to use to compile a file with. We should think about + // how to best honor this api so the value is not confusing. + fn version(&self) -> &Version { + &self.solc_version + } + + fn sources(&self) -> impl Iterator { + self.input.sources.iter().map(|(path, source)| (path.as_path(), source)) + } + + fn compiler_name(&self) -> Cow<'static, str> { + "zksolc and solc".into() + } + + fn strip_prefix(&mut self, base: &Path) { + self.input.strip_prefix(base); + } +} + +/// Input type `zksolc` expects. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ZkSolcInput { + pub language: SolcLanguage, + pub sources: Sources, + pub settings: ZkSettings, +} + +/// Default `language` field is set to `"Solidity"`. +impl Default for ZkSolcInput { + fn default() -> Self { + Self { + language: SolcLanguage::Solidity, + sources: Sources::default(), + settings: ZkSettings::default(), + } + } +} + +impl ZkSolcInput { + /// Removes the `base` path from all source files + pub fn strip_prefix(&mut self, base: impl AsRef) { + let base = base.as_ref(); + self.sources = std::mem::take(&mut self.sources) + .into_iter() + .map(|(path, s)| (path.strip_prefix(base).map(Into::into).unwrap_or(path), s)) + .collect(); + + self.settings.strip_prefix(base); + } + /// The flag indicating whether the current [CompilerInput] is + /// constructed for the yul sources + pub fn is_yul(&self) -> bool { + self.language == SolcLanguage::Yul + } + /// Consumes the type and returns a [ZkSolcInput::sanitized] version + pub fn sanitized(mut self, version: &Version) -> Self { + self.settings.sanitize(version); + self + } + + pub fn with_remappings(mut self, remappings: Vec) -> Self { + if self.language == SolcLanguage::Yul { + if !remappings.is_empty() { + warn!("omitting remappings supplied for the yul sources"); + } + } else { + self.settings.remappings = remappings; + } + + self + } +} + +/// A `CompilerInput` representation used for verify +/// +/// This type is an alternative `CompilerInput` but uses non-alphabetic ordering of the `sources` +/// and instead emits the (Path -> Source) path in the same order as the pairs in the `sources` +/// `Vec`. This is used over a map, so we can determine the order in which etherscan will display +/// the verified contracts +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct StandardJsonCompilerInput { + pub language: SolcLanguage, + #[serde(with = "serde_helpers::tuple_vec_map")] + pub sources: Vec<(PathBuf, Source)>, + pub settings: ZkSettings, +} + +impl StandardJsonCompilerInput { + pub fn new(sources: Vec<(PathBuf, Source)>, settings: ZkSettings) -> Self { + Self { language: SolcLanguage::Solidity, sources, settings } + } +} diff --git a/crates/compilers/src/compilers/zksolc/mod.rs b/crates/compilers/src/compilers/zksolc/mod.rs new file mode 100644 index 00000000..153b76da --- /dev/null +++ b/crates/compilers/src/compilers/zksolc/mod.rs @@ -0,0 +1,748 @@ +use self::input::{ZkSolcInput, ZkSolcVersionedInput}; +use crate::{ + error::{Result, SolcError}, + resolver::parse::SolData, + solc::SolcCompiler, + CompilationError, Compiler, CompilerVersion, +}; +use foundry_compilers_artifacts::{ + solc::error::SourceLocation, + zksolc::{error::Error, CompilerOutput}, + Severity, SolcLanguage, +}; + +use itertools::Itertools; +use semver::Version; +use serde::{Deserialize, Serialize}; +use std::{ + collections::BTreeSet, + path::{Path, PathBuf}, + process::{Command, Output, Stdio}, + str::FromStr, +}; + +#[cfg(feature = "async")] +use std::{ + fs::{self, create_dir_all, set_permissions, File}, + io::Write, +}; + +#[cfg(target_family = "unix")] +#[cfg(feature = "async")] +use std::os::unix::fs::PermissionsExt; + +pub mod input; +pub mod settings; +pub use settings::ZkSettings; +pub use settings::ZkSolcSettings; + +pub const ZKSOLC: &str = "zksolc"; +pub const ZKSYNC_SOLC_RELEASE: Version = Version::new(1, 0, 1); +pub const ZKSOLC_VERSION: Version = Version::new(1, 5, 4); + +#[derive(Debug, Clone, Serialize)] +enum ZkSolcOS { + LinuxAMD64, + LinuxARM64, + MacAMD, + MacARM, +} + +fn get_operating_system() -> Result { + match std::env::consts::OS { + "linux" => match std::env::consts::ARCH { + "aarch64" => Ok(ZkSolcOS::LinuxARM64), + _ => Ok(ZkSolcOS::LinuxAMD64), + }, + "macos" | "darwin" => match std::env::consts::ARCH { + "aarch64" => Ok(ZkSolcOS::MacARM), + _ => Ok(ZkSolcOS::MacAMD), + }, + _ => Err(SolcError::msg(format!("Unsupported operating system {}", std::env::consts::OS))), + } +} + +impl ZkSolcOS { + fn get_zksolc_prefix(&self) -> &str { + match self { + Self::LinuxAMD64 => "zksolc-linux-amd64-musl-", + Self::LinuxARM64 => "zksolc-linux-arm64-musl-", + Self::MacAMD => "zksolc-macosx-amd64-", + Self::MacARM => "zksolc-macosx-arm64-", + } + } + + fn get_solc_prefix(&self) -> &str { + match self { + Self::LinuxAMD64 => "solc-linux-amd64-", + Self::LinuxARM64 => "solc-linux-arm64-", + Self::MacAMD => "solc-macosx-amd64-", + Self::MacARM => "solc-macosx-arm64-", + } + } +} + +#[derive(Debug, Clone)] +pub struct ZkSolcCompiler { + pub zksolc: PathBuf, + pub solc: SolcCompiler, +} + +#[cfg(feature = "project-util")] +impl Default for ZkSolcCompiler { + fn default() -> Self { + let zksolc = + ZkSolc::get_path_for_version(&ZKSOLC_VERSION).expect("Could not install zksolc"); + Self { zksolc, solc: Default::default() } + } +} + +impl Compiler for ZkSolcCompiler { + type Input = ZkSolcVersionedInput; + type CompilationError = Error; + type ParsedSource = SolData; + type Settings = ZkSolcSettings; + type Language = SolcLanguage; + + fn compile( + &self, + _input: &Self::Input, + ) -> Result> { + // This method cannot be implemented until CompilerOutput is decoupled from + // evm Contract + panic!( + "`Compiler::compile` not supported for `ZkSolcCompiler`, should call ZkSolc::compile()" + ); + } + + // NOTE: This is used in the context of matching source files to compiler version so + // the solc versions are returned + fn available_versions(&self, _language: &Self::Language) -> Vec { + match &self.solc { + SolcCompiler::Specific(solc) => vec![CompilerVersion::Installed(Version::new( + solc.version.major, + solc.version.minor, + solc.version.patch, + ))], + SolcCompiler::AutoDetect => { + let mut all_versions = ZkSolc::solc_installed_versions() + .into_iter() + .map(CompilerVersion::Installed) + .collect::>(); + let mut uniques = all_versions + .iter() + .map(|v| { + let v = v.as_ref(); + (v.major, v.minor, v.patch) + }) + .collect::>(); + all_versions.extend( + ZkSolc::solc_available_versions() + .into_iter() + .filter(|v| uniques.insert((v.major, v.minor, v.patch))) + .map(CompilerVersion::Remote), + ); + all_versions.sort_unstable(); + all_versions + } + } + } +} + +impl ZkSolcCompiler { + pub fn zksolc(&self, input: &ZkSolcVersionedInput) -> Result { + let solc = match &self.solc { + SolcCompiler::Specific(solc) => Some(solc.solc.clone()), + SolcCompiler::AutoDetect => { + #[cfg(test)] + crate::take_solc_installer_lock!(_lock); + + let solc_version_without_metadata = format!( + "{}.{}.{}", + input.solc_version.major, input.solc_version.minor, input.solc_version.patch + ); + let maybe_solc = + ZkSolc::find_solc_installed_version(&solc_version_without_metadata)?; + if let Some(solc) = maybe_solc { + Some(solc) + } else { + #[cfg(feature = "async")] + { + let installed_solc_path = + ZkSolc::solc_blocking_install(&solc_version_without_metadata)?; + Some(installed_solc_path) + } + } + } + }; + + let mut zksolc = ZkSolc::new(self.zksolc.clone(), solc)?; + + zksolc.base_path.clone_from(&input.cli_settings.base_path); + zksolc.allow_paths.clone_from(&input.cli_settings.allow_paths); + zksolc.include_paths.clone_from(&input.cli_settings.include_paths); + + Ok(zksolc) + } +} + +/// Version metadata. Will include `zksync_version` if compiler is zksync solc. +#[derive(Debug, Clone, Eq, PartialEq, PartialOrd, Ord, Serialize, Deserialize)] +pub struct SolcVersionInfo { + /// The solc compiler version (e.g: 0.8.20) + pub version: Version, + /// The full zksync solc compiler version (e.g: 0.8.20-1.0.1) + pub zksync_version: Option, +} + +/// Given a solc path, get both the solc semver and optional zkSync version. +pub fn get_solc_version_info(path: &Path) -> Result { + let mut cmd = Command::new(path); + cmd.arg("--version").stdin(Stdio::piped()).stderr(Stdio::piped()).stdout(Stdio::piped()); + debug!(?cmd, "getting Solc versions"); + + let output = cmd.output().map_err(|e| SolcError::io(e, path))?; + trace!(?output); + + if !output.status.success() { + return Err(SolcError::solc_output(&output)); + } + + let stdout = String::from_utf8_lossy(&output.stdout); + let lines: Vec<&str> = stdout.lines().filter(|l| !l.trim().is_empty()).collect(); + + // Get solc version from second line + let version = lines.get(1).ok_or_else(|| SolcError::msg("Version not found in Solc output"))?; + let version = + Version::from_str(&version.trim_start_matches("Version: ").replace(".g++", ".gcc"))?; + + // Check for ZKsync version in the last line + let zksync_version = lines.last().and_then(|line| { + if line.starts_with("ZKsync") { + let version_str = line.trim_start_matches("ZKsync:").trim(); + Version::parse(version_str).ok() + } else { + None + } + }); + + Ok(SolcVersionInfo { version, zksync_version }) +} + +/// Abstraction over `zksolc` command line utility +/// +/// Supports sync and async functions. +/// +/// By default the zksolc path is configured as follows, with descending priority: +/// 1. `ZKSOLC_PATH` environment variable +/// 2. `zksolc` otherwise +#[derive(Debug, Clone, Eq, PartialEq, PartialOrd, Ord, Serialize, Deserialize)] +pub struct ZkSolc { + /// Path to the `zksolc` executable + pub zksolc: PathBuf, + /// Value for --base path + pub base_path: Option, + /// Value for --allow-paths arg. + pub allow_paths: BTreeSet, + /// Value for --include-paths arg. + pub include_paths: BTreeSet, + /// Value for --solc arg + pub solc: Option, + /// Version data for solc + pub solc_version_info: SolcVersionInfo, +} + +impl ZkSolc { + /// A new instance which points to `zksolc` + pub fn new(path: PathBuf, solc: Option) -> Result { + let default_solc_path = PathBuf::from("solc"); + let solc_path = solc.as_ref().unwrap_or(&default_solc_path); + let solc_version_info = get_solc_version_info(solc_path)?; + Ok(Self { + zksolc: path, + base_path: None, + allow_paths: Default::default(), + include_paths: Default::default(), + solc, + solc_version_info, + }) + } + + pub fn get_path_for_version(version: &Version) -> Result { + let maybe_zksolc = Self::find_installed_version(version)?; + + let path = + if let Some(zksolc) = maybe_zksolc { zksolc } else { Self::blocking_install(version)? }; + + Ok(path) + } + + /// Invokes `zksolc --version` and parses the output as a SemVer [`Version`]. + pub fn get_version_for_path(path: &Path) -> Result { + let mut cmd = Command::new(path); + cmd.arg("--version").stdin(Stdio::piped()).stderr(Stdio::piped()).stdout(Stdio::piped()); + debug!(?cmd, "getting ZkSolc version"); + let output = cmd.output().map_err(map_io_err(path))?; + trace!(?output); + let version = version_from_output(output)?; + debug!(%version); + Ok(version) + } + + /// Sets zksolc's base path + pub fn with_base_path(mut self, base_path: impl Into) -> Self { + self.base_path = Some(base_path.into()); + self + } + + /// Compiles with `--standard-json` and deserializes the output as [`CompilerOutput`]. + pub fn compile(&self, input: &ZkSolcInput) -> Result { + // If solc is zksync solc, override the returned version to put the complete zksolc one + let output = self.compile_output(input)?; + // Only run UTF-8 validation once. + let output = std::str::from_utf8(&output).map_err(|_| SolcError::InvalidUtf8)?; + + let mut compiler_output: CompilerOutput = serde_json::from_str(output)?; + // Add zksync version so that there's some way to identify if zksync solc was used + // by looking at build info + compiler_output.zksync_solc_version = self.solc_version_info.zksync_version.clone(); + Ok(compiler_output) + } + + pub fn solc_installed_versions() -> Vec { + if let Ok(dir) = Self::compilers_dir() { + let os = get_operating_system().unwrap(); + let solc_prefix = os.get_solc_prefix(); + let mut versions: Vec = walkdir::WalkDir::new(dir) + .max_depth(1) + .into_iter() + .filter_map(std::result::Result::ok) + .filter(|e| e.file_type().is_file()) + .filter_map(|e| e.file_name().to_str().map(|s| s.to_string())) + .filter(|e| e.ends_with(&ZKSYNC_SOLC_RELEASE.to_string())) + .filter_map(|e| { + e.strip_prefix(solc_prefix) + .and_then(|s| s.split('-').next()) + .and_then(|s| Version::parse(s).ok()) + }) + .collect(); + versions.sort(); + versions + } else { + vec![] + } + } + + pub fn solc_available_versions() -> Vec { + let mut ret = vec![]; + let min_max_patch_by_minor_versions = + vec![(4, 12, 26), (5, 0, 17), (6, 0, 12), (7, 0, 6), (8, 0, 27)]; + for (minor, min_patch, max_patch) in min_max_patch_by_minor_versions { + for i in min_patch..=max_patch { + ret.push(Version::new(0, minor, i)); + } + } + + ret + } + + /// Compiles with `--standard-json` and returns the raw `stdout` output. + #[instrument(name = "compile", level = "debug", skip_all)] + pub fn compile_output(&self, input: &ZkSolcInput) -> Result> { + let mut cmd = Command::new(&self.zksolc); + + if !self.allow_paths.is_empty() { + cmd.arg("--allow-paths"); + cmd.arg(self.allow_paths.iter().map(|p| p.display()).join(",")); + } + + if let Some(base_path) = &self.base_path { + for path in self.include_paths.iter().filter(|p| p.as_path() != base_path.as_path()) { + cmd.arg("--include-path").arg(path); + } + + cmd.arg("--base-path").arg(base_path); + + cmd.current_dir(base_path); + } + + // don't pass solc argument in yul mode (avoid verification) + if !input.is_yul() { + if let Some(solc) = &self.solc { + cmd.arg("--solc").arg(solc); + } + } + + cmd.arg("--standard-json"); + cmd.stdin(Stdio::piped()).stderr(Stdio::piped()).stdout(Stdio::piped()); + + trace!(input=%serde_json::to_string(input).unwrap_or_else(|e| e.to_string())); + debug!(?cmd, "compiling"); + + let mut child = cmd.spawn().map_err(map_io_err(&self.zksolc))?; + debug!("spawned"); + + let stdin = child.stdin.as_mut().unwrap(); + serde_json::to_writer(stdin, input)?; + debug!("wrote JSON input to stdin"); + + let output = child.wait_with_output().map_err(map_io_err(&self.zksolc))?; + debug!(%output.status, output.stderr = ?String::from_utf8_lossy(&output.stderr), "finished"); + + compile_output(output) + } + + fn compilers_dir() -> Result { + let mut compilers_dir = dirs::home_dir() + .ok_or(SolcError::msg("Could not build SolcManager - homedir not found"))?; + compilers_dir.push(".zksync"); + Ok(compilers_dir) + } + + fn compiler_path(version: &Version) -> Result { + let os = get_operating_system()?; + Ok(Self::compilers_dir()?.join(format!("{}v{}", os.get_zksolc_prefix(), version))) + } + + fn solc_path(version_str: &str) -> Result { + let os = get_operating_system()?; + Ok(Self::compilers_dir()?.join(format!( + "{}{}-{}", + os.get_solc_prefix(), + version_str, + ZKSYNC_SOLC_RELEASE + ))) + } + + /// Install zksolc version and block the thread + #[cfg(feature = "async")] + pub fn blocking_install(version: &Version) -> Result { + let os = get_operating_system()?; + let compiler_prefix = os.get_zksolc_prefix(); + let download_url = if version.pre.is_empty() { + format!( + "https://github.com/matter-labs/zksolc-bin/releases/download/v{version}/{compiler_prefix}v{version}", + ) + } else { + let pre = version.pre.as_str(); + // Use version as string without pre-release and build metadata + let version_str = version.to_string(); + let version_str = version_str.split('-').next().unwrap(); + // Matter Labs uses a different repositiry for pre-releases + format!( + "https://github.com/matter-labs/era-compiler-solidity/releases/download/{pre}/zksolc-{compiler_prefix}v{version_str}", + ) + }; + let compilers_dir = Self::compilers_dir()?; + if !compilers_dir.exists() { + create_dir_all(compilers_dir) + .map_err(|e| SolcError::msg(format!("Could not create compilers path: {e}")))?; + } + let compiler_path = Self::compiler_path(version)?; + let lock_path = lock_file_path("zksolc", &version.to_string()); + + let label = format!("zksolc-{version}"); + let install = compiler_blocking_install(compiler_path, lock_path, &download_url, &label); + + match install { + Ok(path) => { + //crate::report::solc_installation_success(version); + Ok(path) + } + Err(err) => { + //crate::report::solc_installation_error(version, &err.to_string()); + Err(err) + } + } + } + + /// Install zksync solc version and block the thread + #[cfg(feature = "async")] + pub fn solc_blocking_install(version_str: &str) -> Result { + let os = get_operating_system()?; + let solc_os_namespace = os.get_solc_prefix(); + let download_url = format!( + "https://github.com/matter-labs/era-solidity/releases/download/{version_str}-{ZKSYNC_SOLC_RELEASE}/{solc_os_namespace}{version_str}-{ZKSYNC_SOLC_RELEASE}", + ); + + let compilers_dir = Self::compilers_dir()?; + if !compilers_dir.exists() { + create_dir_all(compilers_dir) + .map_err(|e| SolcError::msg(format!("Could not create compilers path: {e}")))?; + } + let solc_path = Self::solc_path(version_str)?; + let lock_path = lock_file_path("solc", version_str); + + let label = format!("solc-{version_str}"); + compiler_blocking_install(solc_path, lock_path, &download_url, &label) + } + + pub fn find_installed_version(version: &Version) -> Result> { + let zksolc = Self::compiler_path(version)?; + + if !zksolc.is_file() { + return Ok(None); + } + Ok(Some(zksolc)) + } + + pub fn find_solc_installed_version(version_str: &str) -> Result> { + let solc = Self::solc_path(version_str)?; + + if !solc.is_file() { + return Ok(None); + } + Ok(Some(solc)) + } +} + +fn map_io_err(zksolc_path: &Path) -> impl FnOnce(std::io::Error) -> SolcError + '_ { + move |err| SolcError::io(err, zksolc_path) +} + +fn compile_output(output: Output) -> Result> { + if output.status.success() { + Ok(output.stdout) + } else { + Err(SolcError::solc_output(&output)) + } +} + +fn version_from_output(output: Output) -> Result { + if output.status.success() { + let stdout = String::from_utf8_lossy(&output.stdout); + let version = stdout + .lines() + .filter(|l| !l.trim().is_empty()) + .last() + .ok_or_else(|| SolcError::msg("Version not found in zksolc output"))?; + + version + .split_whitespace() + .find_map(|s| { + let trimmed = s.trim_start_matches('v'); + Version::from_str(trimmed).ok() + }) + .ok_or_else(|| SolcError::msg("Unable to retrieve version from zksolc output")) + } else { + Err(SolcError::solc_output(&output)) + } +} + +impl AsRef for ZkSolc { + fn as_ref(&self) -> &Path { + &self.zksolc + } +} + +impl CompilationError for Error { + fn is_warning(&self) -> bool { + self.severity.is_warning() + } + fn is_error(&self) -> bool { + self.severity.is_error() + } + + fn source_location(&self) -> Option { + self.source_location.clone() + } + + fn severity(&self) -> Severity { + self.severity + } + + fn error_code(&self) -> Option { + self.error_code + } +} + +#[cfg(feature = "async")] +fn compiler_blocking_install( + compiler_path: PathBuf, + lock_path: PathBuf, + download_url: &str, + label: &str, +) -> Result { + use foundry_compilers_core::utils::RuntimeOrHandle; + trace!("blocking installing {label}"); + //trace!("blocking installing {label}"); + // An async block is used because the underlying `reqwest::blocking::Client` does not behave + // well inside of a Tokio runtime. See: https://github.com/seanmonstar/reqwest/issues/1017 + RuntimeOrHandle::new().block_on(async { + let client = reqwest::Client::new(); + let response = client + .get(download_url) + .send() + .await + .map_err(|e| SolcError::msg(format!("Failed to download {label} file: {e}")))?; + + if response.status().is_success() { + let content = response + .bytes() + .await + .map_err(|e| SolcError::msg(format!("failed to download {label} file: {e}")))?; + trace!("downloaded {label}"); + + // lock file to indicate that installation of this compiler version will be in progress. + // wait until lock file is released, possibly by another parallel thread trying to + // install the same compiler version. + trace!("try to get lock for {label}"); + let _lock = try_lock_file(lock_path)?; + trace!("got lock for {label}"); + + // Only write to file if it is not there. The check is doneafter adquiring the lock + // to ensure the thread remains blocked until the required compiler is + // fully installed + if !compiler_path.exists() { + trace!("creating binary for {label}"); + //trace!("creating binary for {label}"); + let mut output_file = File::create(&compiler_path).map_err(|e| { + SolcError::msg(format!("Failed to create output {label} file: {e}")) + })?; + + output_file.write_all(&content).map_err(|e| { + SolcError::msg(format!("Failed to write the downloaded {label} file: {e}")) + })?; + + set_permissions(&compiler_path, PermissionsExt::from_mode(0o755)).map_err(|e| { + SolcError::msg(format!("Failed to set {label} permissions: {e}")) + })?; + } else { + trace!("found binary for {label}"); + } + } else { + return Err(SolcError::msg(format!( + "Failed to download {label} file: status code {}", + response.status() + ))); + } + trace!("{label} instalation completed"); + Ok(compiler_path) + }) +} + +/// Creates the file and locks it exclusively, this will block if the file is currently locked +#[cfg(feature = "async")] +fn try_lock_file(lock_path: PathBuf) -> Result { + use fs4::FileExt; + let _lock_file = std::fs::OpenOptions::new() + .create(true) + .truncate(true) + .read(true) + .write(true) + .open(&lock_path) + .map_err(|_| SolcError::msg("Error creating lock file"))?; + _lock_file.lock_exclusive().map_err(|_| SolcError::msg("Error taking the lock"))?; + Ok(LockFile { lock_path, _lock_file }) +} + +/// Represents a lockfile that's removed once dropped +#[cfg(feature = "async")] +struct LockFile { + _lock_file: File, + lock_path: PathBuf, +} + +#[cfg(feature = "async")] +impl Drop for LockFile { + fn drop(&mut self) { + let _ = fs::remove_file(&self.lock_path); + } +} + +/// Returns the lockfile to use for a specific file +#[cfg(feature = "async")] +fn lock_file_path(compiler: &str, version: &str) -> PathBuf { + ZkSolc::compilers_dir() + .expect("could not detect zksolc compilers directory") + .join(format!(".lock-{compiler}-{version}")) +} + +#[cfg(test)] +mod tests { + use similar_asserts::assert_eq; + + use crate::solc::Solc; + + use super::*; + + fn zksolc() -> ZkSolc { + let zksolc_path = ZkSolc::get_path_for_version(&ZKSOLC_VERSION).unwrap(); + let solc_version = "0.8.27"; + + crate::take_solc_installer_lock!(_lock); + let maybe_solc = ZkSolc::find_solc_installed_version(solc_version).unwrap(); + let solc_path = if let Some(solc) = maybe_solc { + solc + } else { + ZkSolc::solc_blocking_install(solc_version).unwrap() + }; + ZkSolc::new(zksolc_path, Some(solc_path)).unwrap() + } + + fn vanilla_solc() -> Solc { + if let Some(solc) = Solc::find_svm_installed_version(&Version::new(0, 8, 18)).unwrap() { + solc + } else { + Solc::blocking_install(&Version::new(0, 8, 18)).unwrap() + } + } + + #[test] + fn zksolc_version_works() { + ZkSolc::get_version_for_path(&zksolc().zksolc).unwrap(); + } + + #[test] + fn get_solc_type_and_version_works_for_zksync_solc() { + let zksolc = zksolc(); + let solc = zksolc.solc.unwrap(); + let solc_v = get_solc_version_info(&solc).unwrap(); + let zksync_v = solc_v.zksync_version.unwrap(); + let prerelease = Version::parse(zksync_v.pre.as_str()).unwrap(); + assert_eq!(solc_v.version.minor, 8); + assert_eq!(prerelease, ZKSYNC_SOLC_RELEASE); + } + + #[test] + fn get_solc_type_and_version_works_for_vanilla_solc() { + let solc = vanilla_solc(); + let solc_v = get_solc_version_info(&solc.solc).unwrap(); + assert_eq!(solc_v.version.minor, 8); + assert!(solc_v.zksync_version.is_none()); + } + + #[test] + fn zksolc_compile_works() { + let input = include_str!("../../../../../test-data/zksync/in/compiler-in-1.json"); + let input: ZkSolcInput = serde_json::from_str(input).unwrap(); + let out = zksolc().compile(&input).unwrap(); + assert!(!out.has_error()); + } + + #[test] + fn zksolc_can_compile_with_remapped_links() { + let input: ZkSolcInput = serde_json::from_str(include_str!( + "../../../../../test-data/zksync/library-remapping-in.json" + )) + .unwrap(); + let out = zksolc().compile(&input).unwrap(); + let (_, mut contracts) = out.split(); + let contract = contracts.remove("LinkTest").unwrap(); + let bytecode = &contract.evm.unwrap().bytecode.unwrap().object; + assert!(!bytecode.is_unlinked()); + } + + #[test] + fn zksolc_can_compile_with_remapped_links_temp_dir() { + let input: ZkSolcInput = serde_json::from_str(include_str!( + "../../../../../test-data/zksync/library-remapping-in-2.json" + )) + .unwrap(); + let out = zksolc().compile(&input).unwrap(); + let (_, mut contracts) = out.split(); + let contract = contracts.remove("LinkTest").unwrap(); + let bytecode = &contract.evm.unwrap().bytecode.unwrap().object; + assert!(!bytecode.is_unlinked()); + } +} diff --git a/crates/compilers/src/compilers/zksolc/settings.rs b/crates/compilers/src/compilers/zksolc/settings.rs new file mode 100644 index 00000000..1711f3c4 --- /dev/null +++ b/crates/compilers/src/compilers/zksolc/settings.rs @@ -0,0 +1,358 @@ +use crate::{ + artifacts::{serde_helpers, EvmVersion, Libraries}, + compilers::CompilerSettings, + solc, OutputSelection, +}; +use foundry_compilers_artifacts::{ + remappings::Remapping, zksolc::output_selection::OutputSelection as ZkOutputSelection, +}; +use semver::Version; +use serde::{Deserialize, Serialize}; +use std::{ + collections::BTreeSet, + fmt, + path::{Path, PathBuf}, + str::FromStr, +}; + +/// zksolc standard json input settings. See: +/// https://docs.zksync.io/zk-stack/components/compiler/toolchain/solidity.html#standard-json for differences +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ZkSettings { + // same + /// Change compilation pipeline to go through the Yul intermediate representation. This is + /// false by default. + #[serde(rename = "viaIR", default, skip_serializing_if = "Option::is_none")] + pub via_ir: Option, + // TODO: era-compiler-solidity uses a BTreeSet of strings. In theory the serialization + // should be the same but maybe we should double check + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub remappings: Vec, + #[serde( + default, + with = "serde_helpers::display_from_str_opt", + skip_serializing_if = "Option::is_none" + )] + pub evm_version: Option, + + // check if the same (and use `compilers version`) + /// This field can be used to select desired outputs based + /// on file and contract names. + /// If this field is omitted, then the compiler loads and does type + /// checking, but will not generate any outputs apart from errors. + #[serde(default)] + pub output_selection: ZkOutputSelection, + + #[serde(default)] + pub optimizer: Optimizer, + /// Metadata settings + #[serde(default, skip_serializing_if = "Option::is_none")] + pub metadata: Option, + #[serde(default)] + pub libraries: Libraries, + /// Switch to missing deployable libraries detection mode. + /// Contracts are not compiled in this mode, and all compilation artifacts are not included. + #[serde(default, rename = "detectMissingLibraries")] + pub detect_missing_libraries: bool, + // zksolc arguments + /// A flag indicating whether to enable the system contract compilation mode. + /// Whether to enable EraVM extensions. + #[serde(default, rename = "enableEraVMExtensions")] + pub enable_eravm_extensions: bool, + /// The extra LLVM options. + #[serde(default, rename = "LLVMOptions", skip_serializing_if = "Vec::is_empty")] + pub llvm_options: Vec, + /// Whether to compile via EVM assembly. + #[serde(default, rename = "forceEVMLA")] + pub force_evmla: bool, +} + +// Analogous to SolcSettings for Zk compiler +#[derive(Debug, Clone, Serialize, Deserialize, Default, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct ZkSolcSettings { + /// JSON settings expected by Solc + #[serde(flatten)] + pub settings: ZkSettings, + /// Additional CLI args configuration + #[serde(flatten)] + pub cli_settings: solc::CliSettings, +} + +impl ZkSettings { + /// Creates a new `Settings` instance with the given `output_selection` + pub fn new(output_selection: impl Into) -> Self { + Self { output_selection: output_selection.into(), ..Default::default() } + } + + /// Consumes the type and returns a [ZkSettings::sanitize] version + pub fn sanitized(mut self, solc_version: &Version) -> Self { + self.sanitize(solc_version); + self + } + + /// This will remove/adjust values in the settings that are not compatible with this version. + pub fn sanitize(&mut self, solc_version: &Version) { + if let Some(ref mut evm_version) = self.evm_version { + self.evm_version = evm_version.normalize_version_solc(solc_version); + } + } + + pub fn strip_prefix(&mut self, base: impl AsRef) { + let base = base.as_ref(); + self.remappings.iter_mut().for_each(|r| { + r.strip_prefix(base); + }); + + self.libraries.libs = std::mem::take(&mut self.libraries.libs) + .into_iter() + .map(|(file, libs)| (file.strip_prefix(base).map(Into::into).unwrap_or(file), libs)) + .collect(); + } + + /// Strips `base` from all paths + pub fn with_base_path(mut self, base: impl AsRef) -> Self { + let base = base.as_ref(); + self.remappings.iter_mut().for_each(|r| { + r.strip_prefix(base); + }); + + self.libraries.libs = self + .libraries + .libs + .into_iter() + .map(|(file, libs)| (file.strip_prefix(base).map(Into::into).unwrap_or(file), libs)) + .collect(); + + self + } +} + +impl Default for ZkSettings { + fn default() -> Self { + Self { + optimizer: Default::default(), + metadata: None, + output_selection: Default::default(), + evm_version: Some(EvmVersion::default()), + via_ir: None, + libraries: Default::default(), + remappings: Default::default(), + detect_missing_libraries: false, + enable_eravm_extensions: false, + llvm_options: Default::default(), + force_evmla: false, + } + } +} + +impl CompilerSettings for ZkSolcSettings { + fn update_output_selection(&mut self, _f: impl FnOnce(&mut OutputSelection) + Copy) { + // TODO: see how to support this, noop for now + //f(&mut self.output_selection) + } + + fn can_use_cached(&self, other: &Self) -> bool { + let Self { + settings: + ZkSettings { + via_ir, + remappings, + evm_version, + output_selection, + optimizer, + metadata, + libraries, + detect_missing_libraries, + enable_eravm_extensions, + llvm_options, + force_evmla, + }, + .. + } = self; + + *via_ir == other.settings.via_ir + && *remappings == other.settings.remappings + && *evm_version == other.settings.evm_version + && *output_selection == other.settings.output_selection + && *optimizer == other.settings.optimizer + && *metadata == other.settings.metadata + && *libraries == other.settings.libraries + && *detect_missing_libraries == other.settings.detect_missing_libraries + && *enable_eravm_extensions == other.settings.enable_eravm_extensions + && *llvm_options == other.settings.llvm_options + && *force_evmla == other.settings.force_evmla + } + + fn with_remappings(mut self, remappings: &[Remapping]) -> Self { + self.settings.remappings = remappings.to_vec(); + + self + } + + fn with_allow_paths(mut self, allowed_paths: &BTreeSet) -> Self { + self.cli_settings.allow_paths.clone_from(allowed_paths); + self + } + + fn with_base_path(mut self, base_path: &Path) -> Self { + self.cli_settings.base_path = Some(base_path.to_path_buf()); + self + } + + fn with_include_paths(mut self, include_paths: &BTreeSet) -> Self { + self.cli_settings.include_paths.clone_from(include_paths); + self + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct Optimizer { + // TODO: does this have to be an option? + #[serde(default, skip_serializing_if = "Option::is_none")] + pub enabled: Option, + /// Switch optimizer components on or off in detail. + /// The "enabled" switch above provides two defaults which can be + /// tweaked here. If "details" is given, "enabled" can be omitted. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub details: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub mode: Option, + /// Whether to try to recompile with -Oz if the bytecode is too large. + #[serde(skip_serializing_if = "Option::is_none")] + pub fallback_to_optimizing_for_size: Option, + /// Whether to disable the system request memoization. + #[serde(skip_serializing_if = "Option::is_none")] + pub disable_system_request_memoization: Option, + /// Set the jump table density threshold. + #[serde(skip_serializing_if = "Option::is_none")] + pub jump_table_density_threshold: Option, +} + +impl Optimizer { + pub fn disable(&mut self) { + self.enabled.take(); + } + + pub fn enable(&mut self) { + self.enabled = Some(true) + } +} + +impl Default for Optimizer { + fn default() -> Self { + Self { + enabled: Some(false), + mode: None, + fallback_to_optimizing_for_size: None, + disable_system_request_memoization: None, + jump_table_density_threshold: None, + details: None, + } + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct OptimizerDetails { + /// The peephole optimizer is always on if no details are given, + /// use details to switch it off. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub peephole: Option, + /// The inliner is always on if no details are given, + /// use details to switch it off. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub inliner: Option, + /// The unused jumpdest remover is always on if no details are given, + /// use details to switch it off. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub jumpdest_remover: Option, + /// Sometimes re-orders literals in commutative operations. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub order_literals: Option, + /// Removes duplicate code blocks + #[serde(default, skip_serializing_if = "Option::is_none")] + pub deduplicate: Option, + /// Common subexpression elimination, this is the most complicated step but + /// can also provide the largest gain. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub cse: Option, + /// Optimize representation of literal numbers and strings in code. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub constant_optimizer: Option, +} + +impl OptimizerDetails { + /// Returns true if no settings are set. + pub fn is_empty(&self) -> bool { + self.peephole.is_none() + && self.inliner.is_none() + && self.jumpdest_remover.is_none() + && self.order_literals.is_none() + && self.deduplicate.is_none() + && self.cse.is_none() + && self.constant_optimizer.is_none() + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct SettingsMetadata { + /// Use the given hash method for the metadata hash that is appended to the bytecode. + /// The metadata hash can be removed from the bytecode via option "none". + /// `zksolc` only supports keccak256 + #[serde( + default, + rename = "bytecodeHash", + skip_serializing_if = "Option::is_none", + with = "serde_helpers::display_from_str_opt" + )] + pub bytecode_hash: Option, +} + +impl SettingsMetadata { + pub fn new(hash: BytecodeHash) -> Self { + Self { bytecode_hash: Some(hash) } + } +} + +impl From for SettingsMetadata { + fn from(hash: BytecodeHash) -> Self { + Self { bytecode_hash: Some(hash) } + } +} + +/// Determines the hash method for the metadata hash that is appended to the bytecode. +/// Zksolc only supports keccak256 +#[derive(Clone, Debug, Default, Copy, PartialEq, Eq, Serialize, Deserialize)] +pub enum BytecodeHash { + /// Do not include bytecode hash. + #[default] + #[serde(rename = "none")] + None, + /// The default keccak256 hash. + #[serde(rename = "keccak256")] + Keccak256, +} + +impl FromStr for BytecodeHash { + type Err = String; + + fn from_str(s: &str) -> Result { + match s { + "none" => Ok(Self::None), + "keccak256" => Ok(Self::Keccak256), + s => Err(format!("Unknown bytecode hash: {s}")), + } + } +} + +impl fmt::Display for BytecodeHash { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let s = match self { + Self::Keccak256 => "keccak256", + Self::None => "none", + }; + f.write_str(s) + } +} diff --git a/crates/compilers/src/config.rs b/crates/compilers/src/config.rs index 3d9a4566..c8f64a7d 100644 --- a/crates/compilers/src/config.rs +++ b/crates/compilers/src/config.rs @@ -15,6 +15,7 @@ use foundry_compilers_core::{ error::{Result, SolcError, SolcIoError}, utils::{self, strip_prefix_owned}, }; + use serde::{Deserialize, Serialize}; use std::{ collections::BTreeSet, @@ -261,7 +262,6 @@ impl ProjectPathsConfig { libraries: self.libraries.iter().cloned().collect(), } } - /// Same as [`paths`][ProjectPathsConfig::paths] but strips the `root` form all paths. /// /// See: [`ProjectPaths::strip_prefix_all`] @@ -270,7 +270,6 @@ impl ProjectPathsConfig { paths.strip_prefix_all(&self.root); paths } - /// Creates all configured dirs and files pub fn create_all(&self) -> std::result::Result<(), SolcIoError> { if let Some(parent) = self.cache.parent() { diff --git a/crates/compilers/src/lib.rs b/crates/compilers/src/lib.rs index fc2f8c3a..474fd3b4 100644 --- a/crates/compilers/src/lib.rs +++ b/crates/compilers/src/lib.rs @@ -36,6 +36,8 @@ pub use config::{PathStyle, ProjectPaths, ProjectPathsConfig, SolcConfig}; mod filter; pub use filter::{FileFilter, SparseOutputFilter, TestFileFilter}; +pub mod zksync; + pub mod report; /// Utilities for creating, mocking and testing of (temporary) projects diff --git a/crates/compilers/src/zksync/artifact_output/mod.rs b/crates/compilers/src/zksync/artifact_output/mod.rs new file mode 100644 index 00000000..fc488726 --- /dev/null +++ b/crates/compilers/src/zksync/artifact_output/mod.rs @@ -0,0 +1 @@ +pub mod zk; diff --git a/crates/compilers/src/zksync/artifact_output/zk.rs b/crates/compilers/src/zksync/artifact_output/zk.rs new file mode 100644 index 00000000..e78674b8 --- /dev/null +++ b/crates/compilers/src/zksync/artifact_output/zk.rs @@ -0,0 +1,285 @@ +use crate::{ + artifact_output::{ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, OutputContext}, + artifacts::{DevDoc, SourceFile, StorageLayout, UserDoc}, + compile::output::sources::VersionedSourceFiles, + config::ProjectPathsConfig, + error::{Result, SolcIoError}, + zksync::compile::output::contracts::VersionedContracts, +}; +use alloy_json_abi::JsonAbi; +use foundry_compilers_artifacts::{ + solc::{ + CompactBytecode, CompactContract, CompactContractBytecode, CompactContractBytecodeCow, + CompactDeployedBytecode, + }, + zksolc::{bytecode::Bytecode, contract::Contract, Evm}, + SolcLanguage, +}; +use path_slash::PathBufExt; +use serde::{Deserialize, Serialize}; +use std::{ + borrow::Cow, + collections::{BTreeMap, HashSet}, + fs, + path::Path, +}; + +#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct ZkContractArtifact { + pub abi: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub bytecode: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub assembly: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub method_identifiers: Option>, + //#[serde(default, skip_serializing_if = "Vec::is_empty")] + //pub generated_sources: Vec, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub metadata: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub storage_layout: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub userdoc: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub devdoc: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub ir_optimized: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub hash: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub factory_dependencies: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub missing_libraries: Option>, + /// The identifier of the source file + #[serde(default, skip_serializing_if = "Option::is_none")] + pub id: Option, +} + +// CompactContract variants +// TODO: for zkEvm, the distinction between bytecode and deployed_bytecode makes little sense, +// and there some fields that the ouptut doesn't provide (e.g: source_map) +// However, we implement these because we get the Artifact trait and can reuse lots of +// the crate's helpers without needing to duplicate everything. Maybe there's a way +// we can get all these without having to add the same bytecode twice on each struct. +// Ideally the Artifacts trait would not be coupled to a specific Contract type +impl<'a> From<&'a ZkContractArtifact> for CompactContractBytecodeCow<'a> { + fn from(artifact: &'a ZkContractArtifact) -> Self { + // TODO: artifact.abi might have None, we need to get this field from solc_metadata + CompactContractBytecodeCow { + abi: artifact.abi.as_ref().map(Cow::Borrowed), + bytecode: artifact.bytecode.clone().map(|b| Cow::Owned(CompactBytecode::from(b))), + deployed_bytecode: artifact + .bytecode + .clone() + .map(|b| Cow::Owned(CompactDeployedBytecode::from(b))), + } + } +} + +impl From for CompactContractBytecode { + fn from(c: ZkContractArtifact) -> Self { + Self { + abi: c.abi.map(Into::into), + deployed_bytecode: c.bytecode.clone().map(|b| b.into()), + bytecode: c.bytecode.clone().map(|b| b.into()), + } + } +} + +impl From for CompactContract { + fn from(c: ZkContractArtifact) -> Self { + // TODO: c.abi might have None, we need to get this field from solc_metadata + Self { + bin: c.bytecode.clone().map(|b| b.object), + bin_runtime: c.bytecode.clone().map(|b| b.object), + abi: c.abi, + } + } +} + +#[derive(Debug, Copy, Clone, Eq, PartialEq, Default)] +pub struct ZkArtifactOutput(); + +impl ArtifactOutput for ZkArtifactOutput { + type Artifact = ZkContractArtifact; + + fn contract_to_artifact( + &self, + _file: &Path, + _name: &str, + _contract: foundry_compilers_artifacts::Contract, + _source_file: Option<&SourceFile>, + ) -> Self::Artifact { + panic!("Unsupported use zksync_contract_to_artifact instead"); + } + + fn standalone_source_file_to_artifact( + &self, + _path: &Path, + _file: &crate::VersionedSourceFile, + ) -> Option { + None + } +} + +impl ZkArtifactOutput { + fn zksync_contract_to_artifact( + &self, + _file: &Path, + _name: &str, + contract: Contract, + source_file: Option<&SourceFile>, + ) -> ZkContractArtifact { + let mut artifact_bytecode = None; + let mut artifact_method_identifiers = None; + let mut artifact_assembly = None; + + let Contract { + abi, + metadata, + userdoc, + devdoc, + storage_layout, + evm, + ir_optimized, + hash, + factory_dependencies, + missing_libraries, + } = contract; + + if let Some(evm) = evm { + let Evm { + assembly, + bytecode, + method_identifiers, + extra_metadata: _, + legacy_assembly: _, + } = evm; + + artifact_bytecode = bytecode.map(Into::into); + artifact_method_identifiers = Some(method_identifiers); + artifact_assembly = assembly; + } + + ZkContractArtifact { + abi, + hash, + factory_dependencies, + missing_libraries, + storage_layout: Some(storage_layout), + bytecode: artifact_bytecode, + assembly: artifact_assembly, + method_identifiers: artifact_method_identifiers, + metadata, + userdoc: Some(userdoc), + devdoc: Some(devdoc), + ir_optimized, + id: source_file.as_ref().map(|s| s.id), + } + } + + pub fn zksync_on_output( + &self, + contracts: &VersionedContracts, + sources: &VersionedSourceFiles, + layout: &ProjectPathsConfig, + ctx: OutputContext<'_>, + ) -> Result> { + let mut artifacts = self.zksync_output_to_artifacts(contracts, sources, ctx, layout); + fs::create_dir_all(&layout.artifacts).map_err(|err| { + error!(dir=?layout.artifacts, "Failed to create artifacts folder"); + SolcIoError::new(err, &layout.artifacts) + })?; + + artifacts.join_all(&layout.artifacts); + artifacts.write_all()?; + + Ok(artifacts) + } + + /// Convert the compiler output into a set of artifacts + /// + /// **Note:** This does only convert, but _NOT_ write the artifacts to disk, See + /// [`Self::on_output()`] + pub fn zksync_output_to_artifacts( + &self, + contracts: &VersionedContracts, + sources: &VersionedSourceFiles, + ctx: OutputContext<'_>, + layout: &ProjectPathsConfig, + ) -> Artifacts { + let mut artifacts = ArtifactsMap::new(); + + // this tracks all the `SourceFile`s that we successfully mapped to a contract + let mut non_standalone_sources = HashSet::new(); + + // prepopulate taken paths set with cached artifacts + let mut taken_paths_lowercase = ctx + .existing_artifacts + .values() + .flat_map(|artifacts| artifacts.values().flat_map(|artifacts| artifacts.values())) + .map(|a| a.path.to_slash_lossy().to_lowercase()) + .collect::>(); + + let mut files = contracts.keys().collect::>(); + // Iterate starting with top-most files to ensure that they get the shortest paths. + files.sort_by(|file1, file2| { + (file1.components().count(), file1).cmp(&(file2.components().count(), file2)) + }); + for file in files { + for (name, versioned_contracts) in &contracts[file] { + for contract in versioned_contracts { + // track `SourceFile`s that can be mapped to contracts + let source_file = sources.find_file_and_version(file, &contract.version); + + if let Some(source) = source_file { + non_standalone_sources.insert((source.id, &contract.version)); + } + + let artifact_path = Self::get_artifact_path( + &ctx, + &taken_paths_lowercase, + file, + name, + layout.artifacts.as_path(), + &contract.version, + versioned_contracts.len() > 1, + ); + + taken_paths_lowercase.insert(artifact_path.to_slash_lossy().to_lowercase()); + + trace!( + "use artifact file {:?} for contract file {} {}", + artifact_path, + file.display(), + contract.version + ); + + let artifact = self.zksync_contract_to_artifact( + file, + name, + contract.contract.clone(), + source_file, + ); + + let artifact = ArtifactFile { + artifact, + file: artifact_path, + version: contract.version.clone(), + build_id: contract.build_id.clone(), + }; + + artifacts + .entry(file.to_path_buf()) + .or_default() + .entry(name.to_string()) + .or_default() + .push(artifact); + } + } + } + Artifacts(artifacts) + } +} diff --git a/crates/compilers/src/zksync/compile/mod.rs b/crates/compilers/src/zksync/compile/mod.rs new file mode 100644 index 00000000..9e53532e --- /dev/null +++ b/crates/compilers/src/zksync/compile/mod.rs @@ -0,0 +1,2 @@ +pub mod output; +pub mod project; diff --git a/crates/compilers/src/zksync/compile/output/contracts.rs b/crates/compilers/src/zksync/compile/output/contracts.rs new file mode 100644 index 00000000..d8ca8e53 --- /dev/null +++ b/crates/compilers/src/zksync/compile/output/contracts.rs @@ -0,0 +1,239 @@ +use crate::artifacts::FileToContractsMap; +use foundry_compilers_artifacts::{solc::CompactContractRef, zksolc::contract::Contract}; +use semver::Version; +use serde::{Deserialize, Serialize}; +use std::{ + collections::BTreeMap, + ops::Deref, + path::{Path, PathBuf}, +}; + +/// file -> [(contract name -> Contract + solc version)] +#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)] +#[serde(transparent)] +pub struct VersionedContracts(pub FileToContractsMap>); + +impl VersionedContracts { + /// Converts all `\\` separators in _all_ paths to `/` + pub fn slash_paths(&mut self) { + #[cfg(windows)] + { + use path_slash::PathExt; + self.0 = std::mem::take(&mut self.0) + .into_iter() + .map(|(path, files)| (Path::new(&path).to_slash_lossy().to_string(), files)) + .collect() + } + } + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + pub fn len(&self) -> usize { + self.0.len() + } + + /// Returns an iterator over all files + pub fn files(&self) -> impl Iterator + '_ { + self.0.keys() + } + + /// Finds the _first_ contract with the given name + pub fn find_first(&self, contract: impl AsRef) -> Option> { + let contract_name = contract.as_ref(); + self.contracts().find_map(|(name, contract)| { + (name == contract_name).then(|| CompactContractRef::from(contract)) + }) + } + + /// Finds the contract with matching path and name + pub fn find( + &self, + path: impl AsRef, + contract: impl AsRef, + ) -> Option> { + let contract_path = path.as_ref(); + let contract_name = contract.as_ref(); + self.contracts_with_files().find_map(|(path, name, contract)| { + (path == contract_path && name == contract_name) + .then(|| CompactContractRef::from(contract)) + }) + } + + /// Removes the _first_ contract with the given name from the set + pub fn remove_first(&mut self, contract: impl AsRef) -> Option { + let contract_name = contract.as_ref(); + self.0.values_mut().find_map(|all_contracts| { + let mut contract = None; + if let Some((c, mut contracts)) = all_contracts.remove_entry(contract_name) { + if !contracts.is_empty() { + contract = Some(contracts.remove(0).contract); + } + if !contracts.is_empty() { + all_contracts.insert(c, contracts); + } + } + contract + }) + } + + /// Removes the contract with matching path and name + pub fn remove( + &mut self, + path: impl AsRef, + contract: impl AsRef, + ) -> Option { + let contract_name = contract.as_ref(); + let (key, mut all_contracts) = self.0.remove_entry(path.as_ref())?; + let mut contract = None; + if let Some((c, mut contracts)) = all_contracts.remove_entry(contract_name) { + if !contracts.is_empty() { + contract = Some(contracts.remove(0).contract); + } + if !contracts.is_empty() { + all_contracts.insert(c, contracts); + } + } + + if !all_contracts.is_empty() { + self.0.insert(key, all_contracts); + } + contract + } + + /// Given the contract file's path and the contract's name, tries to return the contract's + /// bytecode, runtime bytecode, and ABI. + pub fn get( + &self, + path: impl AsRef, + contract: impl AsRef, + ) -> Option> { + let contract = contract.as_ref(); + self.0 + .get(path.as_ref()) + .and_then(|contracts| { + contracts.get(contract).and_then(|c| c.first().map(|c| &c.contract)) + }) + .map(CompactContractRef::from) + } + + /// Returns an iterator over all contracts and their names. + pub fn contracts(&self) -> impl Iterator { + self.0 + .values() + .flat_map(|c| c.iter().flat_map(|(name, c)| c.iter().map(move |c| (name, &c.contract)))) + } + + /// Returns an iterator over (`file`, `name`, `Contract`). + pub fn contracts_with_files(&self) -> impl Iterator { + self.0.iter().flat_map(|(file, contracts)| { + contracts + .iter() + .flat_map(move |(name, c)| c.iter().map(move |c| (file, name, &c.contract))) + }) + } + + /// Returns an iterator over (`file`, `name`, `Contract`, `Version`). + pub fn contracts_with_files_and_version( + &self, + ) -> impl Iterator { + self.0.iter().flat_map(|(file, contracts)| { + contracts.iter().flat_map(move |(name, c)| { + c.iter().map(move |c| (file, name, &c.contract, &c.version)) + }) + }) + } + + /// Returns an iterator over all contracts and their source names. + pub fn into_contracts(self) -> impl Iterator { + self.0.into_values().flat_map(|c| { + c.into_iter() + .flat_map(|(name, c)| c.into_iter().map(move |c| (name.clone(), c.contract))) + }) + } + + /// Returns an iterator over (`file`, `name`, `Contract`) + pub fn into_contracts_with_files(self) -> impl Iterator { + self.0.into_iter().flat_map(|(file, contracts)| { + contracts.into_iter().flat_map(move |(name, c)| { + let file = file.clone(); + c.into_iter().map(move |c| (file.clone(), name.clone(), c.contract)) + }) + }) + } + + /// Returns an iterator over (`file`, `name`, `Contract`, `Version`) + pub fn into_contracts_with_files_and_version( + self, + ) -> impl Iterator { + self.0.into_iter().flat_map(|(file, contracts)| { + contracts.into_iter().flat_map(move |(name, c)| { + let file = file.clone(); + c.into_iter().map(move |c| (file.clone(), name.clone(), c.contract, c.version)) + }) + }) + } + + /// Sets the contract's file paths to `root` adjoined to `self.file`. + pub fn join_all(&mut self, root: impl AsRef) -> &mut Self { + let root = root.as_ref(); + self.0 = std::mem::take(&mut self.0) + .into_iter() + .map(|(contract_path, contracts)| (root.join(contract_path), contracts)) + .collect(); + self + } + + /// Removes `base` from all contract paths + pub fn strip_prefix_all(&mut self, base: impl AsRef) -> &mut Self { + let base = base.as_ref(); + self.0 = std::mem::take(&mut self.0) + .into_iter() + .map(|(contract_path, contracts)| { + ( + contract_path.strip_prefix(base).unwrap_or(&contract_path).to_path_buf(), + contracts, + ) + }) + .collect(); + self + } +} + +impl AsRef>> for VersionedContracts { + fn as_ref(&self) -> &FileToContractsMap> { + &self.0 + } +} + +impl AsMut>> for VersionedContracts { + fn as_mut(&mut self) -> &mut FileToContractsMap> { + &mut self.0 + } +} + +impl Deref for VersionedContracts { + type Target = FileToContractsMap>; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl IntoIterator for VersionedContracts { + type Item = (PathBuf, BTreeMap>); + type IntoIter = + std::collections::btree_map::IntoIter>>; + + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + +/// A contract and the compiler version used to compile it +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct VersionedContract { + pub contract: Contract, + pub version: Version, + pub build_id: String, +} diff --git a/crates/compilers/src/zksync/compile/output/mod.rs b/crates/compilers/src/zksync/compile/output/mod.rs new file mode 100644 index 00000000..74d502e5 --- /dev/null +++ b/crates/compilers/src/zksync/compile/output/mod.rs @@ -0,0 +1,589 @@ +use crate::{ + artifact_output::{ArtifactId, Artifacts}, + artifacts::error::Severity, + buildinfo::RawBuildInfo, + compile::output::{ + info::ContractInfoRef, + sources::{VersionedSourceFile, VersionedSourceFiles}, + }, + output::Builds, + zksync::{ + artifact_output::zk::{ZkArtifactOutput, ZkContractArtifact}, + compile::output::contracts::{VersionedContract, VersionedContracts}, + }, + ArtifactOutput, +}; +use foundry_compilers_artifacts::{ + solc::CompactContractRef, + zksolc::{contract::Contract, error::Error, CompilerOutput}, + SolcLanguage, +}; +use foundry_compilers_core::error::{SolcError, SolcIoError}; +use semver::Version; +use serde::{Deserialize, Serialize}; +use std::{ + fmt, + path::{Path, PathBuf}, +}; +use yansi::Paint; + +pub mod contracts; + +#[derive(Clone, Debug)] +pub struct ProjectCompileOutput { + /// contains the aggregated `CompilerOutput` + pub compiler_output: AggregatedCompilerOutput, + /// all artifact files from `output` that were freshly compiled and written + pub compiled_artifacts: Artifacts, + /// All artifacts that were read from cache + pub cached_artifacts: Artifacts, + /// errors that should be omitted + pub ignored_error_codes: Vec, + /// paths that should be omitted + pub ignored_file_paths: Vec, + /// set minimum level of severity that is treated as an error + pub compiler_severity_filter: Severity, + /// all build infos that were just compiled + pub builds: Builds, +} + +impl ProjectCompileOutput { + /// Converts all `\\` separators in _all_ paths to `/` + pub fn slash_paths(&mut self) { + self.compiler_output.slash_paths(); + self.compiled_artifacts.slash_paths(); + self.cached_artifacts.slash_paths(); + } + + /// All artifacts together with their contract file name and name `:`. + /// + /// This returns a chained iterator of both cached and recompiled contract artifacts. + pub fn artifact_ids(&self) -> impl Iterator { + let Self { cached_artifacts, compiled_artifacts, .. } = self; + cached_artifacts + .artifacts::() + .chain(compiled_artifacts.artifacts::()) + } + + /// All artifacts together with their contract file name and name `:` + /// + /// This returns a chained iterator of both cached and recompiled contract artifacts + pub fn into_artifacts(self) -> impl Iterator { + let Self { cached_artifacts, compiled_artifacts, .. } = self; + cached_artifacts + .into_artifacts::() + .chain(compiled_artifacts.into_artifacts::()) + } + + pub fn with_stripped_file_prefixes(mut self, base: impl AsRef) -> Self { + let base = base.as_ref(); + self.cached_artifacts = self.cached_artifacts.into_stripped_file_prefixes(base); + self.compiled_artifacts = self.compiled_artifacts.into_stripped_file_prefixes(base); + self.compiler_output.strip_prefix_all(base); + self + } + + /// Returns whether this type does not contain compiled contracts. + pub fn is_unchanged(&self) -> bool { + self.compiler_output.is_unchanged() + } + + /// Returns whether any errors were emitted by the compiler. + pub fn has_compiler_errors(&self) -> bool { + self.compiler_output.has_error( + &self.ignored_error_codes, + &self.ignored_file_paths, + &self.compiler_severity_filter, + ) + } + + /// Panics if any errors were emitted by the compiler. + #[track_caller] + pub fn assert_success(&self) { + assert!(!self.has_compiler_errors(), "\n{self}\n"); + } + + pub fn versioned_artifacts( + &self, + ) -> impl Iterator { + self.cached_artifacts + .artifact_files() + .chain(self.compiled_artifacts.artifact_files()) + .filter_map(|artifact| { + ZkArtifactOutput::contract_name(&artifact.file) + .map(|name| (name, (&artifact.artifact, &artifact.version))) + }) + } + + pub fn artifacts(&self) -> impl Iterator { + self.versioned_artifacts().map(|(name, (artifact, _))| (name, artifact)) + } + + pub fn output(&self) -> &AggregatedCompilerOutput { + &self.compiler_output + } + + pub fn into_output(self) -> AggregatedCompilerOutput { + self.compiler_output + } + + /// Finds the artifact with matching path and name + pub fn find(&self, path: &Path, name: &str) -> Option<&ZkContractArtifact> { + if let artifact @ Some(_) = self.compiled_artifacts.find(path, name) { + return artifact; + } + self.cached_artifacts.find(path, name) + } + + /// Finds the first contract with the given name + pub fn find_first(&self, name: &str) -> Option<&ZkContractArtifact> { + if let artifact @ Some(_) = self.compiled_artifacts.find_first(name) { + return artifact; + } + self.cached_artifacts.find_first(name) + } + + /// Returns the set of `Artifacts` that were cached and got reused during + /// [`crate::Project::compile()`] + pub fn cached_artifacts(&self) -> &Artifacts { + &self.cached_artifacts + } + + /// Returns the set of `Artifacts` that were compiled with `zksolc` in + /// [`crate::Project::compile()`] + pub fn compiled_artifacts(&self) -> &Artifacts { + &self.compiled_artifacts + } + + /// Removes the artifact with matching path and name + pub fn remove(&mut self, path: &Path, name: &str) -> Option { + if let artifact @ Some(_) = self.compiled_artifacts.remove(path, name) { + return artifact; + } + self.cached_artifacts.remove(path, name) + } + + /// Removes the _first_ contract with the given name from the set + pub fn remove_first(&mut self, contract_name: impl AsRef) -> Option { + let contract_name = contract_name.as_ref(); + if let artifact @ Some(_) = self.compiled_artifacts.remove_first(contract_name) { + return artifact; + } + self.cached_artifacts.remove_first(contract_name) + } + + /// Removes the contract with matching path and name using the `:` pattern + /// where `path` is optional. + /// + /// If the `path` segment is `None`, then the first matching `Contract` is returned, see + /// [Self::remove_first] + pub fn remove_contract<'a>( + &mut self, + info: impl Into>, + ) -> Option { + let ContractInfoRef { path, name } = info.into(); + if let Some(path) = path { + self.remove(path[..].as_ref(), &name) + } else { + self.remove_first(&name) + } + } +} + +impl fmt::Display for ProjectCompileOutput { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.compiler_output.is_unchanged() { + f.write_str("Nothing to compile") + } else { + self.compiler_output + .diagnostics( + &self.ignored_error_codes, + &self.ignored_file_paths, + self.compiler_severity_filter, + ) + .fmt(f) + } + } +} + +/// The aggregated output of (multiple) compile jobs +/// +/// This is effectively a solc version aware `CompilerOutput` +#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] +pub struct AggregatedCompilerOutput { + /// all errors from all `CompilerOutput` + pub errors: Vec, + /// All source files combined with the solc version used to compile them + pub sources: VersionedSourceFiles, + /// All compiled contracts combined with the solc version used to compile them + pub contracts: VersionedContracts, + // All the `BuildInfo`s of zksolc invocations. + pub build_infos: Vec>, +} + +impl AggregatedCompilerOutput { + /// Converts all `\\` separators in _all_ paths to `/` + pub fn slash_paths(&mut self) { + self.sources.slash_paths(); + self.contracts.slash_paths(); + } + + /// Whether the output contains a compiler error + /// + /// This adheres to the given `compiler_severity_filter` and also considers [Error] with the + /// given [Severity] as errors. For example [Severity::Warning] will consider [Error]s with + /// [Severity::Warning] and [Severity::Error] as errors. + pub fn has_error( + &self, + ignored_error_codes: &[u64], + ignored_file_paths: &[PathBuf], + compiler_severity_filter: &Severity, + ) -> bool { + self.errors.iter().any(|err| { + if err.is_error() { + // [Severity::Error] is always treated as an error + return true; + } + // check if the filter is set to something higher than the error's severity + if compiler_severity_filter.ge(&err.severity) { + if compiler_severity_filter.is_warning() { + // skip ignored error codes and file path from warnings + return self.has_warning(ignored_error_codes, ignored_file_paths); + } + return true; + } + false + }) + } + + /// Checks if there are any compiler warnings that are not ignored by the specified error codes + /// and file paths. + pub fn has_warning(&self, ignored_error_codes: &[u64], ignored_file_paths: &[PathBuf]) -> bool { + self.errors + .iter() + .any(|error| !self.should_ignore(ignored_error_codes, ignored_file_paths, error)) + } + + pub fn should_ignore( + &self, + ignored_error_codes: &[u64], + ignored_file_paths: &[PathBuf], + error: &Error, + ) -> bool { + if !error.is_warning() { + return false; + } + + let mut ignore = false; + + if let Some(code) = error.error_code { + ignore |= ignored_error_codes.contains(&code); + if let Some(loc) = error.source_location.as_ref() { + let path = Path::new(&loc.file); + ignore |= + ignored_file_paths.iter().any(|ignored_path| path.starts_with(ignored_path)); + + // we ignore spdx and contract size warnings in test + // files. if we are looking at one of these warnings + // from a test file we skip + ignore |= self.is_test(path) && (code == 1878 || code == 5574); + } + } + + ignore + } + + /// Returns true if the contract is a expected to be a test + fn is_test(&self, contract_path: &Path) -> bool { + if contract_path.to_string_lossy().ends_with(".t.sol") { + return true; + } + + self.contracts.contracts_with_files().filter(|(path, _, _)| *path == contract_path).any( + |(_, _, contract)| { + contract.abi.as_ref().map_or(false, |abi| abi.functions.contains_key("IS_TEST")) + }, + ) + } + + pub fn diagnostics<'a>( + &'a self, + ignored_error_codes: &'a [u64], + ignored_file_paths: &'a [PathBuf], + compiler_severity_filter: Severity, + ) -> OutputDiagnostics<'a> { + OutputDiagnostics { + compiler_output: self, + ignored_error_codes, + ignored_file_paths, + compiler_severity_filter, + } + } + + pub fn is_empty(&self) -> bool { + self.contracts.is_empty() + } + + pub fn is_unchanged(&self) -> bool { + self.contracts.is_empty() && self.errors.is_empty() + } + + /// adds a new `CompilerOutput` to the aggregated output + pub fn extend( + &mut self, + version: Version, + build_info: RawBuildInfo, + output: CompilerOutput, + ) { + let build_id = build_info.id.clone(); + self.build_infos.push(build_info); + + let CompilerOutput { errors, sources, contracts, .. } = output; + self.errors.extend(errors); + + for (path, source_file) in sources { + let sources = self.sources.as_mut().entry(path).or_default(); + sources.push(VersionedSourceFile { + source_file, + version: version.clone(), + build_id: build_id.clone(), + }); + } + + for (file_name, new_contracts) in contracts { + let contracts = self.contracts.as_mut().entry(file_name).or_default(); + for (contract_name, contract) in new_contracts { + let versioned = contracts.entry(contract_name).or_default(); + versioned.push(VersionedContract { + contract, + version: version.clone(), + build_id: build_id.clone(), + }); + } + } + } + + /// Creates all `BuildInfo` files in the given `build_info_dir` + /// + /// There can be multiple `BuildInfo`, since we support multiple versions. + /// + /// The created files have the md5 hash `{_format,solcVersion,solcLongVersion,input}` as their + /// file name + pub fn write_build_infos(&self, build_info_dir: &Path) -> Result<(), SolcError> { + if self.build_infos.is_empty() { + return Ok(()); + } + std::fs::create_dir_all(build_info_dir) + .map_err(|err| SolcIoError::new(err, build_info_dir))?; + for build_info in &self.build_infos { + trace!("writing build info file {}", build_info.id); + let file_name = format!("{}.json", build_info.id); + let file = build_info_dir.join(file_name); + std::fs::write(&file, &serde_json::to_string(build_info)?) + .map_err(|err| SolcIoError::new(err, file))?; + } + Ok(()) + } + + /// Finds the _first_ contract with the given name + pub fn find_first(&self, contract: impl AsRef) -> Option> { + self.contracts.find_first(contract) + } + + /// Removes the _first_ contract with the given name from the set + pub fn remove_first(&mut self, contract: impl AsRef) -> Option { + self.contracts.remove_first(contract) + } + + /// Removes the contract with matching path and name + pub fn remove( + &mut self, + path: impl AsRef, + contract: impl AsRef, + ) -> Option { + self.contracts.remove(path, contract) + } + + /// Removes the contract with matching path and name using the `:` pattern + /// where `path` is optional. + /// + /// If the `path` segment is `None`, then the first matching `Contract` is returned, see + /// [Self::remove_first] + pub fn remove_contract<'a>( + &mut self, + info: impl Into>, + ) -> Option { + let ContractInfoRef { path, name } = info.into(); + if let Some(path) = path { + self.remove(Path::new(path.as_ref()), name) + } else { + self.remove_first(name) + } + } + + /// Iterate over all contracts and their names + pub fn contracts_iter(&self) -> impl Iterator { + self.contracts.contracts() + } + + /// Iterate over all contracts and their names + pub fn contracts_into_iter(self) -> impl Iterator { + self.contracts.into_contracts() + } + + /// Returns an iterator over (`file`, `name`, `Contract`) + pub fn contracts_with_files_iter( + &self, + ) -> impl Iterator { + self.contracts.contracts_with_files() + } + + /// Returns an iterator over (`file`, `name`, `Contract`) + pub fn contracts_with_files_into_iter( + self, + ) -> impl Iterator { + self.contracts.into_contracts_with_files() + } + + /// Returns an iterator over (`file`, `name`, `Contract`, `Version`) + pub fn contracts_with_files_and_version_iter( + &self, + ) -> impl Iterator { + self.contracts.contracts_with_files_and_version() + } + + /// Returns an iterator over (`file`, `name`, `Contract`, `Version`) + pub fn contracts_with_files_and_version_into_iter( + self, + ) -> impl Iterator { + self.contracts.into_contracts_with_files_and_version() + } + + /// Given the contract file's path and the contract's name, tries to return the contract's + /// bytecode, runtime bytecode, and ABI. + pub fn get( + &self, + path: impl AsRef, + contract: impl AsRef, + ) -> Option> { + self.contracts.get(path, contract) + } + + /// Returns the output's source files and contracts separately, wrapped in helper types that + /// provide several helper methods + pub fn split(self) -> (VersionedSourceFiles, VersionedContracts) { + (self.sources, self.contracts) + } + + /// Joins all file path with `root` + pub fn join_all(&mut self, root: impl AsRef) -> &mut Self { + let root = root.as_ref(); + self.contracts.join_all(root); + self.sources.join_all(root); + self + } + + /// Strips the given prefix from all file paths to make them relative to the given + /// `base` argument. + /// + /// Convenience method for [Self::strip_prefix_all()] that consumes the type. + pub fn with_stripped_file_prefixes(mut self, base: impl AsRef) -> Self { + let base = base.as_ref(); + self.contracts.strip_prefix_all(base); + self.sources.strip_prefix_all(base); + self + } + + /// Removes `base` from all contract paths + pub fn strip_prefix_all(&mut self, base: impl AsRef) -> &mut Self { + let base = base.as_ref(); + self.contracts.strip_prefix_all(base); + self.sources.strip_prefix_all(base); + self + } +} + +/// Helper type to implement display for solc errors +#[derive(Clone, Debug)] +pub struct OutputDiagnostics<'a> { + /// output of the compiled project + compiler_output: &'a AggregatedCompilerOutput, + /// the error codes to ignore + ignored_error_codes: &'a [u64], + /// the file paths to ignore + ignored_file_paths: &'a [PathBuf], + /// set minimum level of severity that is treated as an error + compiler_severity_filter: Severity, +} + +impl<'a> OutputDiagnostics<'a> { + /// Returns true if there is at least one error of high severity + pub fn has_error(&self) -> bool { + self.compiler_output.has_error( + self.ignored_error_codes, + self.ignored_file_paths, + &self.compiler_severity_filter, + ) + } + + /// Returns true if there is at least one warning + pub fn has_warning(&self) -> bool { + self.compiler_output.has_warning(self.ignored_error_codes, self.ignored_file_paths) + } + + /// Returns true if the contract is a expected to be a test + fn is_test>(&self, contract_path: T) -> bool { + if contract_path.as_ref().ends_with(".t.sol") { + return true; + } + + self.compiler_output.find_first(&contract_path).map_or(false, |contract| { + contract.abi.map_or(false, |abi| abi.functions.contains_key("IS_TEST")) + }) + } +} + +impl<'a> fmt::Display for OutputDiagnostics<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("Compiler run ")?; + if self.has_error() { + Paint::red("failed:") + } else if self.has_warning() { + Paint::yellow("successful with warnings:") + } else { + Paint::green("successful!") + } + .fmt(f)?; + + for err in &self.compiler_output.errors { + let mut ignored = false; + if err.severity.is_warning() { + if let Some(code) = err.error_code { + if let Some(source_location) = &err.source_location { + // we ignore spdx and contract size warnings in test + // files. if we are looking at one of these warnings + // from a test file we skip + ignored = + self.is_test(&source_location.file) && (code == 1878 || code == 5574); + + // we ignore warnings coming from ignored files + let source_path = Path::new(&source_location.file); + ignored |= self + .ignored_file_paths + .iter() + .any(|ignored_path| source_path.starts_with(ignored_path)); + } + + ignored |= self.ignored_error_codes.contains(&code); + } + } + + if !ignored { + f.write_str("\n")?; + err.fmt(f)?; + } + } + + Ok(()) + } +} diff --git a/crates/compilers/src/zksync/compile/project.rs b/crates/compilers/src/zksync/compile/project.rs new file mode 100644 index 00000000..d44f2fb4 --- /dev/null +++ b/crates/compilers/src/zksync/compile/project.rs @@ -0,0 +1,397 @@ +use crate::{ + artifact_output::Artifacts, + cache::ArtifactsCache, + compilers::{zksolc::ZkSolcCompiler, CompilerInput, CompilerSettings}, + error::Result, + filter::SparseOutputFilter, + output::Builds, + report, + resolver::{parse::SolData, GraphEdges}, + zksolc::input::ZkSolcVersionedInput, + zksync::{ + self, + artifact_output::zk::{ZkArtifactOutput, ZkContractArtifact}, + compile::output::{AggregatedCompilerOutput, ProjectCompileOutput}, + }, + Graph, Project, Sources, +}; +use foundry_compilers_artifacts::{zksolc::CompilerOutput, SolcLanguage}; +use semver::Version; +use std::{collections::HashMap, path::PathBuf, time::Instant}; + +/// A set of different Solc installations with their version and the sources to be compiled +pub(crate) type VersionedSources = HashMap>; + +#[derive(Debug)] +pub struct ProjectCompiler<'a> { + /// Contains the relationship of the source files and their imports + edges: GraphEdges, + project: &'a Project, + /// how to compile all the sources + sources: CompilerSources, +} + +impl<'a> ProjectCompiler<'a> { + /// Create a new `ProjectCompiler` to bootstrap the compilation process of the project's + /// sources. + pub fn new(project: &'a Project) -> Result { + Self::with_sources(project, project.paths.read_input_files()?) + } + + /// Bootstraps the compilation process by resolving the dependency graph of all sources and the + /// appropriate `Solc` -> `Sources` set as well as the compile mode to use (parallel, + /// sequential) + /// + /// Multiple (`Solc` -> `Sources`) pairs can be compiled in parallel if the `Project` allows + /// multiple `jobs`, see [`crate::Project::set_solc_jobs()`]. + pub fn with_sources( + project: &'a Project, + sources: Sources, + ) -> Result { + let graph = Graph::resolve_sources(&project.paths, sources)?; + let (sources, edges) = graph.into_sources_by_version( + project.offline, + &project.locked_versions, + &project.compiler, + )?; + // If there are multiple different versions, and we can use multiple jobs we can compile + // them in parallel. + let sources = CompilerSources { sources }; + Ok(Self { edges, project, sources }) + } + + pub fn compile(self) -> Result { + let slash_paths = self.project.slash_paths; + + // drive the compiler statemachine to completion + let mut output = self.preprocess()?.compile()?.write_artifacts()?.write_cache()?; + + if slash_paths { + // ensures we always use `/` paths + output.slash_paths(); + } + + Ok(output) + } + + /// Does basic preprocessing + /// - sets proper source unit names + /// - check cache + fn preprocess(self) -> Result> { + trace!("preprocessing"); + let Self { edges, project, mut sources } = self; + + // convert paths on windows to ensure consistency with the `CompilerOutput` `solc` emits, + // which is unix style `/` + sources.slash_paths(); + + let mut cache = ArtifactsCache::new(project, edges)?; + // retain and compile only dirty sources and all their imports + sources.filter(&mut cache); + + Ok(PreprocessedState { sources, cache }) + } +} + +/// A series of states that comprise the [`ProjectCompiler::compile()`] state machine +/// +/// The main reason is to debug all states individually +#[derive(Debug)] +struct PreprocessedState<'a> { + /// Contains all the sources to compile. + sources: CompilerSources, + + /// Cache that holds `CacheEntry` objects if caching is enabled and the project is recompiled + cache: ArtifactsCache<'a, ZkArtifactOutput, ZkSolcCompiler>, +} + +impl<'a> PreprocessedState<'a> { + /// advance to the next state by compiling all sources + fn compile(self) -> Result> { + trace!("compiling"); + let PreprocessedState { sources, mut cache } = self; + + let mut output = sources.compile(&mut cache)?; + + // source paths get stripped before handing them over to solc, so solc never uses absolute + // paths, instead `--base-path ` is set. this way any metadata that's derived from + // data (paths) is relative to the project dir and should be independent of the current OS + // disk. However internally we still want to keep absolute paths, so we join the + // contracts again + output.join_all(cache.project().root()); + + Ok(CompiledState { output, cache }) + } +} + +/// Represents the state after `zksolc` was successfully invoked +#[derive(Debug)] +struct CompiledState<'a> { + output: AggregatedCompilerOutput, + cache: ArtifactsCache<'a, ZkArtifactOutput, ZkSolcCompiler>, +} + +impl<'a> CompiledState<'a> { + /// advance to the next state by handling all artifacts + /// + /// Writes all output contracts to disk if enabled in the `Project` and if the build was + /// successful + #[instrument(skip_all, name = "write-artifacts")] + fn write_artifacts(self) -> Result> { + let CompiledState { output, cache } = self; + + let project = cache.project(); + let ctx = cache.output_ctx(); + // write all artifacts via the handler but only if the build succeeded and project wasn't + // configured with `no_artifacts == true` + let compiled_artifacts = if project.no_artifacts { + project.artifacts.zksync_output_to_artifacts( + &output.contracts, + &output.sources, + ctx, + &project.paths, + ) + } else if output.has_error( + &project.ignored_error_codes, + &project.ignored_file_paths, + &project.compiler_severity_filter, + ) { + trace!("skip writing cache file due to solc errors: {:?}", output.errors); + project.artifacts.zksync_output_to_artifacts( + &output.contracts, + &output.sources, + ctx, + &project.paths, + ) + } else { + trace!( + "handling artifact output for {} contracts and {} sources", + output.contracts.len(), + output.sources.len() + ); + // this emits the artifacts via the project's artifacts handler + let artifacts = project.artifacts.zksync_on_output( + &output.contracts, + &output.sources, + &project.paths, + ctx, + )?; + + // emits all the build infos, if they exist + output.write_build_infos(project.build_info_path())?; + + artifacts + }; + + Ok(ArtifactsState { output, cache, compiled_artifacts }) + } +} + +/// Represents the state after all artifacts were written to disk +#[derive(Debug)] +struct ArtifactsState<'a> { + output: AggregatedCompilerOutput, + cache: ArtifactsCache<'a, ZkArtifactOutput, ZkSolcCompiler>, + compiled_artifacts: Artifacts, +} + +impl<'a> ArtifactsState<'a> { + /// Writes the cache file + /// + /// this concludes the [`Project::compile()`] statemachine + fn write_cache(self) -> Result { + let ArtifactsState { output, cache, compiled_artifacts } = self; + let project = cache.project(); + let ignored_error_codes = project.ignored_error_codes.clone(); + let ignored_file_paths = project.ignored_file_paths.clone(); + let compiler_severity_filter = project.compiler_severity_filter; + let has_error = + output.has_error(&ignored_error_codes, &ignored_file_paths, &compiler_severity_filter); + let skip_write_to_disk = project.no_artifacts || has_error; + trace!(has_error, project.no_artifacts, skip_write_to_disk, cache_path=?project.cache_path(),"prepare writing cache file"); + + let (cached_artifacts, cached_builds) = + cache.consume(&compiled_artifacts, &output.build_infos, !skip_write_to_disk)?; + + //project.artifacts_handler().handle_cached_artifacts(&cached_artifacts)?; + // + let builds = Builds( + output + .build_infos + .iter() + .map(|build_info| (build_info.id.clone(), build_info.build_context.clone())) + .chain(cached_builds) + .map(|(id, context)| (id, context.with_joined_paths(project.paths.root.as_path()))) + .collect(), + ); + + Ok(ProjectCompileOutput { + compiler_output: output, + compiled_artifacts, + cached_artifacts, + ignored_error_codes, + ignored_file_paths, + compiler_severity_filter, + builds, + }) + } +} + +/// Determines how the `solc <-> sources` pairs are executed +#[derive(Debug, Clone)] +struct CompilerSources { + sources: VersionedSources, +} + +impl CompilerSources { + /// Converts all `\\` separators to `/` + /// + /// This effectively ensures that `solc` can find imported files like `/src/Cheats.sol` in the + /// VFS (the `ZkSolcInput` as json) under `src/Cheats.sol`. + fn slash_paths(&mut self) { + #[cfg(windows)] + { + use path_slash::PathBufExt; + + self.sources.values_mut().for_each(|versioned_sources| { + versioned_sources.values_mut().for_each(|sources| { + *sources = std::mem::take(sources) + .into_iter() + .map(|(path, source)| { + (PathBuf::from(path.to_slash_lossy().as_ref()), source) + }) + .collect() + }) + }); + } + } + + /// Filters out all sources that don't need to be compiled, see [`ArtifactsCache::filter`] + fn filter(&mut self, cache: &mut ArtifactsCache<'_, ZkArtifactOutput, ZkSolcCompiler>) { + cache.remove_dirty_sources(); + for versioned_sources in self.sources.values_mut() { + for (version, sources) in versioned_sources { + trace!("Filtering {} sources for {}", sources.len(), version); + cache.filter(sources, version); + trace!( + "Detected {} sources to compile {:?}", + sources.dirty().count(), + sources.dirty_files().collect::>() + ); + } + } + } + + /// Compiles all the files with `ZkSolc` + fn compile( + self, + cache: &mut ArtifactsCache<'_, ZkArtifactOutput, ZkSolcCompiler>, + ) -> Result { + let project = cache.project(); + let graph = cache.graph(); + + let sparse_output = SparseOutputFilter::new(project.sparse_output.as_deref()); + + // Include additional paths collected during graph resolution. + let mut include_paths = project.paths.include_paths.clone(); + include_paths.extend(graph.include_paths().clone()); + + let mut jobs = Vec::new(); + for (language, versioned_sources) in self.sources { + for (version, sources) in versioned_sources { + if sources.is_empty() { + // nothing to compile + trace!("skip {} for empty sources set", version); + continue; + } + + // depending on the composition of the filtered sources, the output selection can be + // optimized + let mut opt_settings = project.settings.clone(); + let actually_dirty = + sparse_output.sparse_sources(&sources, &mut opt_settings, graph); + + if actually_dirty.is_empty() { + // nothing to compile for this particular language, all dirty files are in the + // other language set + trace!("skip {} run due to empty source set", version); + continue; + } + + trace!("calling {} with {} sources {:?}", version, sources.len(), sources.keys()); + let zksync_settings = project + .settings + .clone() + .with_base_path(&project.paths.root) + .with_allow_paths(&project.paths.allowed_paths) + .with_include_paths(&include_paths) + .with_remappings(&project.paths.remappings); + + let mut input = ZkSolcVersionedInput::build( + sources, + zksync_settings, + language, + version.clone(), + ); + + input.strip_prefix(project.paths.root.as_path()); + + jobs.push((input, actually_dirty)); + } + } + + let results = compile_sequential(&project.compiler, jobs)?; + + let mut aggregated = AggregatedCompilerOutput::default(); + + for (input, mut output, actually_dirty) in results { + let version = input.version(); + + // Mark all files as seen by the compiler + for file in &actually_dirty { + cache.compiler_seen(file); + } + + let build_info = zksync::raw_build_info_new(&input, &output, project.build_info)?; + + output.retain_files( + actually_dirty + .iter() + .map(|f| f.strip_prefix(project.paths.root.as_path()).unwrap_or(f)), + ); + output.join_all(project.paths.root.as_path()); + + aggregated.extend(version.clone(), build_info, output); + } + + Ok(aggregated) + } +} + +/// Compiles the input set sequentially and returns an aggregated set of the solc `CompilerOutput`s +fn compile_sequential( + zksolc_compiler: &ZkSolcCompiler, + jobs: Vec<(ZkSolcVersionedInput, Vec)>, +) -> Result)>> { + jobs.into_iter() + .map(|(input, actually_dirty)| { + let zksolc = zksolc_compiler.zksolc(&input)?; + + let (compiler_name, version) = + if let Some(zk_version) = zksolc.solc_version_info.zksync_version.as_ref() { + ("zksolc and ZKsync solc".to_string(), zk_version.clone()) + } else { + (input.compiler_name().to_string(), input.version().clone()) + }; + + let start = Instant::now(); + report::compiler_spawn(&compiler_name, &version, actually_dirty.as_slice()); + + let output = zksolc.compile(&input.input)?; + + report::compiler_success(&compiler_name, &version, &start.elapsed()); + + Ok((input, output, actually_dirty)) + }) + .collect() +} diff --git a/crates/compilers/src/zksync/mod.rs b/crates/compilers/src/zksync/mod.rs new file mode 100644 index 00000000..4bf38ec8 --- /dev/null +++ b/crates/compilers/src/zksync/mod.rs @@ -0,0 +1,182 @@ +use std::{ + collections::{BTreeMap, HashSet}, + path::{Path, PathBuf}, +}; + +use alloy_primitives::hex; +use foundry_compilers_artifacts::{zksolc::CompilerOutput, SolcLanguage}; +use foundry_compilers_core::error::SolcError; + +use crate::{ + buildinfo::{BuildContext, RawBuildInfo, ETHERS_FORMAT_VERSION}, + error::Result, + resolver::parse::SolData, + zksolc::{ + input::{StandardJsonCompilerInput, ZkSolcVersionedInput}, + settings::ZkSolcSettings, + ZkSolcCompiler, + }, + CompilerInput, Graph, Project, Source, +}; + +use md5::Digest; + +use self::{artifact_output::zk::ZkArtifactOutput, compile::output::ProjectCompileOutput}; + +pub mod artifact_output; +pub mod compile; + +pub fn project_compile( + project: &Project, +) -> Result { + self::compile::project::ProjectCompiler::new(project)?.compile() +} + +pub fn project_compile_files( + project: &Project, + files: I, +) -> Result +where + I: IntoIterator, + P: Into, +{ + let sources = Source::read_all(files)?; + self::compile::project::ProjectCompiler::with_sources(project, sources)?.compile() +} + +pub fn project_standard_json_input( + project: &Project, + target: &Path, +) -> Result { + tracing::debug!(?target, "standard_json_input for zksync"); + let graph = Graph::::resolve(&project.paths)?; + let target_index = graph + .files() + .get(target) + .ok_or_else(|| SolcError::msg(format!("cannot resolve file at {:?}", target.display())))?; + + let mut sources = Vec::new(); + let mut unique_paths = HashSet::new(); + let (path, source) = graph.node(*target_index).unpack(); + unique_paths.insert(path.clone()); + sources.push((path, source)); + sources.extend( + graph + .all_imported_nodes(*target_index) + .map(|index| graph.node(index).unpack()) + .filter(|(p, _)| unique_paths.insert(p.to_path_buf())), + ); + + let root = project.root(); + let sources = sources + .into_iter() + .map(|(path, source)| (rebase_path(root, path), source.clone())) + .collect(); + + let mut zk_solc_settings: ZkSolcSettings = project.settings.clone(); + // strip the path to the project root from all remappings + zk_solc_settings.settings.remappings = project + .paths + .remappings + .clone() + .into_iter() + .map(|r| r.into_relative(project.root()).to_relative_remapping()) + .collect::>(); + + zk_solc_settings.settings.libraries.libs = zk_solc_settings + .settings + .libraries + .libs + .into_iter() + .map(|(f, libs)| (f.strip_prefix(project.root()).unwrap_or(&f).to_path_buf(), libs)) + .collect(); + + let input = StandardJsonCompilerInput::new(sources, zk_solc_settings.settings); + + Ok(input) +} + +// Copied from compilers/lib private method +fn rebase_path(base: &Path, path: &Path) -> PathBuf { + use path_slash::PathExt; + + let mut base_components = base.components(); + let mut path_components = path.components(); + + let mut new_path = PathBuf::new(); + + while let Some(path_component) = path_components.next() { + let base_component = base_components.next(); + + if Some(path_component) != base_component { + if base_component.is_some() { + new_path.extend( + std::iter::repeat(std::path::Component::ParentDir) + .take(base_components.count() + 1), + ); + } + + new_path.push(path_component); + new_path.extend(path_components); + + break; + } + } + + new_path.to_slash_lossy().into_owned().into() +} + +pub fn build_context_new( + input: &ZkSolcVersionedInput, + output: &CompilerOutput, +) -> Result> { + let mut source_id_to_path = BTreeMap::new(); + + let input_sources = input.sources().map(|(path, _)| path).collect::>(); + for (path, source) in output.sources.iter() { + if input_sources.contains(path.as_path()) { + source_id_to_path.insert(source.id, path.to_path_buf()); + } + } + + Ok(BuildContext { source_id_to_path, language: input.language() }) +} + +pub fn raw_build_info_new( + input: &ZkSolcVersionedInput, + output: &CompilerOutput, + full_build_info: bool, +) -> Result> { + // TODO: evaluate if this should be zksolc version instead + let version = input.solc_version.clone(); + let build_context = build_context_new(input, output)?; + + let mut hasher = md5::Md5::new(); + + hasher.update(ETHERS_FORMAT_VERSION); + + let solc_short = format!("{}.{}.{}", version.major, version.minor, version.patch); + hasher.update(&solc_short); + hasher.update(version.to_string()); + + let input = serde_json::to_value(input)?; + hasher.update(&serde_json::to_string(&input)?); + + // create the hash for `{_format,solcVersion,solcLongVersion,input}` + // N.B. this is not exactly the same as hashing the json representation of these values but + // the must efficient one + let result = hasher.finalize(); + let id = hex::encode(result); + + let mut build_info = BTreeMap::new(); + + if full_build_info { + build_info.insert("_format".to_string(), serde_json::to_value(ETHERS_FORMAT_VERSION)?); + build_info.insert("solcVersion".to_string(), serde_json::to_value(&solc_short)?); + build_info.insert("solcLongVersion".to_string(), serde_json::to_value(&version)?); + build_info.insert("input".to_string(), input); + build_info.insert("output".to_string(), serde_json::to_value(output)?); + } + + Ok(RawBuildInfo { id, build_info, build_context }) +} diff --git a/crates/compilers/tests/zksync.rs b/crates/compilers/tests/zksync.rs new file mode 100644 index 00000000..efacf653 --- /dev/null +++ b/crates/compilers/tests/zksync.rs @@ -0,0 +1,473 @@ +use std::{collections::HashMap, fs, path::PathBuf, str::FromStr}; + +use foundry_compilers::{ + buildinfo::BuildInfo, + cache::CompilerCache, + project_util::*, + resolver::parse::SolData, + zksolc::{input::ZkSolcInput, ZkSolcCompiler, ZkSolcSettings}, + zksync::{self, artifact_output::zk::ZkArtifactOutput}, + Graph, ProjectBuilder, ProjectPathsConfig, +}; +use foundry_compilers_artifacts::Remapping; + +#[test] +fn zksync_can_compile_dapp_sample() { + let _ = tracing_subscriber::fmt() + .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) + .try_init() + .ok(); + let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample"); + let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); + let project = TempProject::::new(paths).unwrap(); + + let compiled = zksync::project_compile(project.project()).unwrap(); + assert!(compiled.find_first("Dapp").is_some()); + compiled.assert_success(); + + // nothing to compile + let compiled = zksync::project_compile(project.project()).unwrap(); + assert!(compiled.find_first("Dapp").is_some()); + assert!(compiled.is_unchanged()); + + let cache = CompilerCache::::read(project.cache_path()).unwrap(); + + // delete artifacts + std::fs::remove_dir_all(&project.paths().artifacts).unwrap(); + let compiled = zksync::project_compile(project.project()).unwrap(); + assert!(compiled.find_first("Dapp").is_some()); + assert!(!compiled.is_unchanged()); + + let updated_cache = CompilerCache::::read(project.cache_path()).unwrap(); + assert_eq!(cache, updated_cache); +} + +#[test] +fn zksync_can_compile_dapp_detect_changes_in_libs() { + let _ = tracing_subscriber::fmt() + .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) + .try_init() + .ok(); + let mut project = TempProject::::dapptools().unwrap(); + + let remapping = project.paths().libraries[0].join("remapping"); + project + .paths_mut() + .remappings + .push(Remapping::from_str(&format!("remapping/={}/", remapping.display())).unwrap()); + + let src = project + .add_source( + "Foo", + r#" + pragma solidity ^0.8.10; + import "remapping/Bar.sol"; + + contract Foo {} + "#, + ) + .unwrap(); + + let lib = project + .add_lib( + "remapping/Bar", + r" + pragma solidity ^0.8.10; + + contract Bar {} + ", + ) + .unwrap(); + + let graph = Graph::::resolve(project.paths()).unwrap(); + assert_eq!(graph.files().len(), 2); + assert_eq!(graph.files().clone(), HashMap::from([(src, 0), (lib, 1),])); + + let compiled = zksync::project_compile(project.project()).unwrap(); + assert!(compiled.find_first("Foo").is_some()); + assert!(compiled.find_first("Bar").is_some()); + compiled.assert_success(); + + // nothing to compile + let compiled = zksync::project_compile(project.project()).unwrap(); + assert!(compiled.find_first("Foo").is_some()); + assert!(compiled.is_unchanged()); + + let cache = CompilerCache::::read(&project.paths().cache).unwrap(); + assert_eq!(cache.files.len(), 2); + + // overwrite lib + project + .add_lib( + "remapping/Bar", + r" + pragma solidity ^0.8.10; + + // changed lib + contract Bar {} + ", + ) + .unwrap(); + + let graph = Graph::::resolve(project.paths()).unwrap(); + assert_eq!(graph.files().len(), 2); + + let compiled = zksync::project_compile(project.project()).unwrap(); + assert!(compiled.find_first("Foo").is_some()); + assert!(compiled.find_first("Bar").is_some()); + // ensure change is detected + assert!(!compiled.is_unchanged()); +} + +#[test] +fn zksync_can_compile_dapp_detect_changes_in_sources() { + let project = TempProject::::dapptools().unwrap(); + + let src = project + .add_source( + "DssSpell.t", + r#" + pragma solidity ^0.8.10; + import "./DssSpell.t.base.sol"; + + contract DssSpellTest is DssSpellTestBase { } + "#, + ) + .unwrap(); + + let base = project + .add_source( + "DssSpell.t.base", + r" + pragma solidity ^0.8.10; + + contract DssSpellTestBase { + address deployed_spell; + function setUp() public { + deployed_spell = address(0xA867399B43aF7790aC800f2fF3Fa7387dc52Ec5E); + } + } + ", + ) + .unwrap(); + + let graph = Graph::::resolve(project.paths()).unwrap(); + assert_eq!(graph.files().len(), 2); + assert_eq!(graph.files().clone(), HashMap::from([(base, 0), (src, 1),])); + assert_eq!(graph.imported_nodes(1).to_vec(), vec![0]); + + let compiled = zksync::project_compile(project.project()).unwrap(); + compiled.assert_success(); + assert!(compiled.find_first("DssSpellTest").is_some()); + assert!(compiled.find_first("DssSpellTestBase").is_some()); + + // nothing to compile + let compiled = zksync::project_compile(project.project()).unwrap(); + assert!(compiled.is_unchanged()); + assert!(compiled.find_first("DssSpellTest").is_some()); + assert!(compiled.find_first("DssSpellTestBase").is_some()); + + let cache = CompilerCache::::read(&project.paths().cache).unwrap(); + assert_eq!(cache.files.len(), 2); + + let artifacts = compiled.into_artifacts().collect::>(); + + // overwrite import + let _ = project + .add_source( + "DssSpell.t.base", + r" + pragma solidity ^0.8.10; + + contract DssSpellTestBase { + address deployed_spell; + function setUp() public { + deployed_spell = address(0); + } + } + ", + ) + .unwrap(); + let graph = Graph::::resolve(project.paths()).unwrap(); + assert_eq!(graph.files().len(), 2); + + let compiled = zksync::project_compile(project.project()).unwrap(); + assert!(compiled.find_first("DssSpellTest").is_some()); + assert!(compiled.find_first("DssSpellTestBase").is_some()); + // ensure change is detected + assert!(!compiled.is_unchanged()); + + // and all recompiled artifacts are different + for (p, artifact) in compiled.into_artifacts() { + let other = artifacts + .iter() + .find(|(id, _)| id.name == p.name && id.version == p.version && id.source == p.source) + .unwrap() + .1; + assert_ne!(artifact, *other); + } +} + +#[test] +fn zksync_can_emit_build_info() { + let mut project = TempProject::::dapptools().unwrap(); + project.project_mut().build_info = true; + project + .add_source( + "A", + r#" +pragma solidity ^0.8.10; +import "./B.sol"; +contract A { } +"#, + ) + .unwrap(); + + project + .add_source( + "B", + r" +pragma solidity ^0.8.10; +contract B { } +", + ) + .unwrap(); + + let compiled = zksync::project_compile(project.project()).unwrap(); + compiled.assert_success(); + + let info_dir = project.project().build_info_path(); + assert!(info_dir.exists()); + + let mut build_info_count = 0; + for entry in fs::read_dir(info_dir).unwrap() { + let info = + BuildInfo::::read( + &entry.unwrap().path(), + ) + .unwrap(); + assert!(info.output.zksync_solc_version.is_some()); + build_info_count += 1; + } + assert_eq!(build_info_count, 1); +} + +#[test] +fn zksync_can_clean_build_info() { + let mut project = TempProject::::dapptools().unwrap(); + + project.project_mut().build_info = true; + project.project_mut().paths.build_infos = project.project_mut().paths.root.join("build-info"); + project + .add_source( + "A", + r#" +pragma solidity ^0.8.10; +import "./B.sol"; +contract A { } +"#, + ) + .unwrap(); + + project + .add_source( + "B", + r" +pragma solidity ^0.8.10; +contract B { } +", + ) + .unwrap(); + + let compiled = zksync::project_compile(project.project()).unwrap(); + compiled.assert_success(); + + let info_dir = project.project().build_info_path(); + assert!(info_dir.exists()); + + let mut build_info_count = 0; + for entry in fs::read_dir(info_dir).unwrap() { + let _info = + BuildInfo::::read( + &entry.unwrap().path(), + ) + .unwrap(); + build_info_count += 1; + } + assert_eq!(build_info_count, 1); + + project.project().cleanup().unwrap(); + + assert!(!project.project().build_info_path().exists()); +} + +#[test] +fn zksync_cant_compile_a_file_outside_allowed_paths() { + // For this test we should create the following directory structure: + // project_root/ + // ├── outer/ + // │ ├── Util.sol + // │ └── Helper.sol + // └── contracts/ + // ├── src/ + // │ └── Main.sol + + let tmp_dir = tempfile::tempdir().unwrap(); + let project_root = tmp_dir.path().to_path_buf(); + let contracts_dir = tempfile::tempdir_in(&project_root).unwrap(); + + fs::create_dir_all(contracts_dir.path().join("src")).unwrap(); + fs::create_dir_all(project_root.join("outer")).unwrap(); + + fs::write( + contracts_dir.path().join("src/Main.sol"), + r#" +pragma solidity ^0.8.0; +import "@outer/Helper.sol"; +contract Main { + Helper helper = new Helper(); + function run() public {} +} +"#, + ) + .unwrap(); + + fs::write( + project_root.join("outer/Helper.sol"), + r#" +pragma solidity ^0.8.0; +import "./Util.sol"; +contract Helper { + Util util = new Util(); +} +"#, + ) + .unwrap(); + + fs::write( + project_root.join("outer/Util.sol"), + r#" +pragma solidity ^0.8.0; +contract Util {} +"#, + ) + .unwrap(); + + let root = contracts_dir.path().to_path_buf(); + let paths = ProjectPathsConfig::builder() + .root(root.clone()) + .sources(root.join("src")) + .remappings(vec![Remapping::from_str("@outer/=../outer/").unwrap()]) + .build() + .unwrap(); + + let inner = ProjectBuilder::::new(Default::default()) + .paths(paths) + .build(Default::default()) + .unwrap(); + let project = + TempProject::::create_new(contracts_dir, inner).unwrap(); + + let compiled = zksync::project_compile(project.project()).unwrap(); + assert!(compiled.has_compiler_errors()); + assert!(compiled.compiler_output.errors.iter().any(|error| error + .formatted_message + .as_ref() + .map_or(false, |msg| msg.contains("File outside of allowed directories")))); +} + +#[test] +fn zksync_can_compile_a_file_in_allowed_paths_successfully() { + let tmp_dir = tempfile::tempdir().unwrap(); + let project_root = tmp_dir.path().to_path_buf(); + let contracts_dir = tempfile::tempdir_in(&project_root).unwrap(); + + fs::create_dir_all(contracts_dir.path().join("src")).unwrap(); + fs::create_dir_all(project_root.join("outer")).unwrap(); + + fs::write( + contracts_dir.path().join("src/Main.sol"), + r#" +pragma solidity ^0.8.0; +import "@outer/Helper.sol"; +contract Main { + Helper helper = new Helper(); + function run() public {} +} +"#, + ) + .unwrap(); + + fs::write( + project_root.join("outer/Helper.sol"), + r#" +pragma solidity ^0.8.0; +import "./Util.sol"; +contract Helper { + Util util = new Util(); +} +"#, + ) + .unwrap(); + + fs::write( + project_root.join("outer/Util.sol"), + r#" +pragma solidity ^0.8.0; +contract Util {} +"#, + ) + .unwrap(); + + let root = contracts_dir.path().to_path_buf(); + let paths = ProjectPathsConfig::builder() + .root(root.clone()) + .sources(root.join("src")) + .allowed_paths(vec!["../"]) + .remappings(vec![Remapping::from_str("@outer/=../outer/").unwrap()]) + .build() + .unwrap(); + + let inner = ProjectBuilder::::new(Default::default()) + .paths(paths) + .build(Default::default()) + .unwrap(); + let project = + TempProject::::create_new(contracts_dir, inner).unwrap(); + + let compiled = zksync::project_compile(project.project()).unwrap(); + compiled.assert_success(); +} + +#[test] +fn zksync_can_compile_yul_sample() { + let _ = tracing_subscriber::fmt() + .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) + .try_init() + .ok(); + let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../../test-data/zksync/yul-sample"); + let paths = ProjectPathsConfig::builder().sources(root); + let project = TempProject::::new(paths).unwrap(); + + let compiled = zksync::project_compile(project.project()).unwrap(); + compiled.assert_success(); + + let simple_store_artifact = compiled + .compiled_artifacts + .values() + .find_map(|contracts| { + contracts + .iter() + .find(|(name, _)| name.ends_with("SimpleStore.yul")) + .and_then(|(_, artifacts)| artifacts.first()) + }) + .expect("SimpleStore.yul artifact not found") + .artifact + .bytecode + .clone() + .unwrap(); + + let yul_bytecode = simple_store_artifact.object.as_bytes().unwrap(); + + assert!(!yul_bytecode.is_empty(), "SimpleStore.yul bytecode is empty"); +} diff --git a/test-data/zksync/in/compiler-in-1.json b/test-data/zksync/in/compiler-in-1.json new file mode 100644 index 00000000..a74b01a5 --- /dev/null +++ b/test-data/zksync/in/compiler-in-1.json @@ -0,0 +1 @@ +{"language":"Solidity","sources":{"contracts/Contract.sol":{"content":"pragma solidity >0.7.0;\n\ncontract Contract {\n}\n"}},"settings":{"optimizer":{"enabled":false},"outputSelection":{"*":{"*":["abi","evm.methodIdentifiers"],"":["ast"]}}}} diff --git a/test-data/zksync/library-remapping-in-2.json b/test-data/zksync/library-remapping-in-2.json new file mode 100644 index 00000000..8c4e897a --- /dev/null +++ b/test-data/zksync/library-remapping-in-2.json @@ -0,0 +1 @@ +{"language":"Solidity","sources":{"/private/var/folders/l5/lprhf87s6xv8djgd017f0b2h0000gn/T/can_create_on_goerli-0vd5EOs/lib/remapping/MyLib.sol":{"content":"\n// SPDX-License-Identifier: MIT\nlibrary MyLib {\n function foobar(uint256 a) public view returns (uint256) {\n \treturn a * 100;\n }\n}\n"},"/private/var/folders/l5/lprhf87s6xv8djgd017f0b2h0000gn/T/can_create_on_goerli-0vd5EOs/src/LinkTest.sol":{"content":"\n// SPDX-License-Identifier: MIT\nimport \"remapping/MyLib.sol\";\ncontract LinkTest {\n function foo() public returns (uint256) {\n return MyLib.foobar(1);\n }\n}\n"}},"settings":{"remappings":["remapping/=/private/var/folders/l5/lprhf87s6xv8djgd017f0b2h0000gn/T/can_create_on_goerli-0vd5EOs/lib/remapping/","src/=/private/var/folders/l5/lprhf87s6xv8djgd017f0b2h0000gn/T/can_create_on_goerli-0vd5EOs/src/"],"optimizer":{"enabled":true},"metadata":{"bytecodeHash":"none"},"outputSelection":{"*":{"":["ast"],"*":["abi"]}},"evmVersion":"london","libraries":{"/private/var/folders/l5/lprhf87s6xv8djgd017f0b2h0000gn/T/can_create_on_goerli-0vd5EOs/lib/remapping/MyLib.sol":{"MyLib":"0x0000000000000000000000000000000000000000"}}}} diff --git a/test-data/zksync/library-remapping-in.json b/test-data/zksync/library-remapping-in.json new file mode 100644 index 00000000..2a73f137 --- /dev/null +++ b/test-data/zksync/library-remapping-in.json @@ -0,0 +1 @@ +{"language":"Solidity","sources":{"/private/var/folders/l5/lprhf87s6xv8djgd017f0b2h0000gn/T/tmp_dappPyXsdD/lib/remapping/MyLib.sol":{"content":"\n// SPDX-License-Identifier: MIT\nlibrary MyLib {\n function foobar(uint256 a) public view returns (uint256) {\n \treturn a * 100;\n }\n}\n"},"/private/var/folders/l5/lprhf87s6xv8djgd017f0b2h0000gn/T/tmp_dappPyXsdD/src/LinkTest.sol":{"content":"\n// SPDX-License-Identifier: MIT\nimport \"remapping/MyLib.sol\";\ncontract LinkTest {\n function foo() public returns (uint256) {\n return MyLib.foobar(1);\n }\n}\n"}},"settings":{"remappings":["remapping/=/private/var/folders/l5/lprhf87s6xv8djgd017f0b2h0000gn/T/tmp_dappPyXsdD/lib/remapping/"],"optimizer":{"enabled":false,"runs":200},"outputSelection":{"*":{"":["ast"],"*":["abi"]}},"evmVersion":"london","libraries":{"/private/var/folders/l5/lprhf87s6xv8djgd017f0b2h0000gn/T/tmp_dappPyXsdD/lib/remapping/MyLib.sol":{"MyLib":"0x0000000000000000000000000000000000000000"}}}} diff --git a/test-data/zksync/yul-sample/SimpleStore.yul b/test-data/zksync/yul-sample/SimpleStore.yul new file mode 100644 index 00000000..cef5d9bc --- /dev/null +++ b/test-data/zksync/yul-sample/SimpleStore.yul @@ -0,0 +1,11 @@ +object "SimpleStore" { + code { + datacopy(0, dataoffset("SimpleStore_deployed"), datasize("SimpleStore_deployed")) + return(0, datasize("SimpleStore_deployed")) + } + object "SimpleStore_deployed" { + code { + calldatacopy(0, 0, 36) // write calldata to memory + } + } +}