Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Prover Storage Manager #1133

Merged
merged 13 commits into from
Nov 23, 2023
14 changes: 14 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ members = [
"full-node/sov-ethereum",
"full-node/sov-ledger-rpc",
"full-node/sov-stf-runner",
"full-node/sov-prover-storage-manager",
# Utils
"utils/zk-cycle-macros",
"utils/zk-cycle-utils",
Expand Down
2 changes: 1 addition & 1 deletion adapters/avail/src/spec/hash.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use primitive_types::H256;
use serde::{Deserialize, Serialize};
use sov_rollup_interface::da::BlockHashTrait;

#[derive(Serialize, Deserialize, Default, Clone, Debug, PartialEq, Eq)]
#[derive(Serialize, Deserialize, Default, Clone, Debug, PartialEq, Eq, Hash)]
pub struct AvailHash(H256);

impl AvailHash {
Expand Down
8 changes: 8 additions & 0 deletions adapters/mock-da/src/types/mod.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
mod address;

use std::fmt::Formatter;
use std::hash::Hasher;

pub use address::{MockAddress, MOCK_SEQUENCER_DA_ADDRESS};
use borsh::{BorshDeserialize, BorshSerialize};
Expand Down Expand Up @@ -45,6 +46,13 @@ impl From<MockHash> for [u8; 32] {
}
}

impl std::hash::Hash for MockHash {
fn hash<H: Hasher>(&self, state: &mut H) {
state.write(&self.0);
state.finish();
}
}

impl BlockHashTrait for MockHash {}

/// A mock block header used for testing.
Expand Down
1 change: 0 additions & 1 deletion full-node/db/sov-db/src/schema/tables.rs
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,6 @@ macro_rules! define_table_with_seek_key_codec {
};
}

// fn deser(target: &mut &[u8]) -> Result<Self, DeserializationError>;
define_table_with_seek_key_codec!(
/// The primary source for slot data
(SlotByNumber) SlotNumber => StoredSlot
Expand Down
5 changes: 2 additions & 3 deletions full-node/db/sov-schema-db/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -311,14 +311,13 @@ impl SchemaBatch {
column_writes.insert(key, operation);
}

#[allow(dead_code)]
pub(crate) fn read<S: Schema>(
&self,
key: &impl KeyCodec<S>,
) -> anyhow::Result<Option<Operation>> {
) -> anyhow::Result<Option<&Operation>> {
let key = key.encode_key()?;
if let Some(column_writes) = self.last_writes.get(&S::COLUMN_FAMILY_NAME) {
return Ok(column_writes.get(&key).cloned());
return Ok(column_writes.get(&key));
}
Ok(None)
}
Expand Down
28 changes: 24 additions & 4 deletions full-node/db/sov-schema-db/src/snapshot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ pub type SnapshotId = u64;

/// A trait to make nested calls to several [`SchemaBatch`]s and eventually [`crate::DB`]
pub trait QueryManager {
/// Get a value from snapshot or its parents
/// Get a value from parents of given [`SnapshotId`]
/// In case of unknown [`SnapshotId`] return `Ok(None)`
fn get<S: Schema>(
&self,
snapshot_id: SnapshotId,
Expand All @@ -35,6 +36,12 @@ impl<T> ReadOnlyLock<T> {
}
}

impl<T> From<Arc<RwLock<T>>> for ReadOnlyLock<T> {
fn from(value: Arc<RwLock<T>>) -> Self {
Self::new(value)
}
}

/// Wrapper around [`QueryManager`] that allows to read from snapshots
pub struct DbSnapshot<Q> {
id: SnapshotId,
Expand Down Expand Up @@ -107,7 +114,7 @@ pub struct FrozenDbSnapshot {

impl FrozenDbSnapshot {
/// Get value from its own cache
pub fn get<S: Schema>(&self, key: &impl KeyCodec<S>) -> anyhow::Result<Option<Operation>> {
pub fn get<S: Schema>(&self, key: &impl KeyCodec<S>) -> anyhow::Result<Option<&Operation>> {
self.cache.read(key)
}

Expand Down Expand Up @@ -135,12 +142,25 @@ impl From<FrozenDbSnapshot> for SchemaBatch {
}
}

fn decode_operation<S: Schema>(operation: Operation) -> anyhow::Result<Option<S::Value>> {
fn decode_operation<S: Schema>(operation: &Operation) -> anyhow::Result<Option<S::Value>> {
match operation {
Operation::Put { value } => {
let value = S::Value::decode_value(&value)?;
let value = S::Value::decode_value(value)?;
Ok(Some(value))
}
Operation::Delete => Ok(None),
}
}

/// QueryManager, which never returns any values
pub struct NoopQueryManager;

impl QueryManager for NoopQueryManager {
fn get<S: Schema>(
&self,
_snapshot_id: SnapshotId,
_key: &impl KeyCodec<S>,
) -> anyhow::Result<Option<S::Value>> {
Ok(None)
}
}
2 changes: 1 addition & 1 deletion full-node/db/sov-schema-db/tests/snapshot_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ impl QueryManager for LinearSnapshotManager {
for snapshot in self.snapshots[..snapshot_id as usize].iter().rev() {
if let Some(operation) = snapshot.get(key)? {
return match operation {
Operation::Put { value } => Ok(Some(S::Value::decode_value(&value)?)),
Operation::Put { value } => Ok(Some(S::Value::decode_value(value)?)),
Operation::Delete => Ok(None),
};
}
Expand Down
25 changes: 25 additions & 0 deletions full-node/sov-prover-storage-manager/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
[package]
name = "sov-prover-storage-manager"
description = "Hierarchical storage manager for prover storage"
license = { workspace = true }
edition = { workspace = true }
authors = { workspace = true }
homepage = { workspace = true }
repository = { workspace = true }

version = { workspace = true }
readme = "README.md"
resolver = "2"

[dependencies]
anyhow = { workspace = true }
sov-rollup-interface = { path = "../../rollup-interface" }
sov-db = { path = "../db/sov-db" }
sov-schema-db = { path = "../db/sov-schema-db" }
sov-state = { path = "../../module-system/sov-state", features = ["native"] }
# TODO: Remove this after integrated with `sov-db` fully
byteorder = { workspace = true, default-features = true }

[dev-dependencies]
sov-mock-da = { path = "../../adapters/mock-da", features = ["native"] }
tempfile = { workspace = true }
3 changes: 3 additions & 0 deletions full-node/sov-prover-storage-manager/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# `sov-prover-storage-manager`

Implementation of `StorageManager` for `ProverStorage` that can handle forks and re-orgs
159 changes: 159 additions & 0 deletions full-node/sov-prover-storage-manager/src/dummy_storage.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,159 @@
// This module is used as a temporary filler for `ProverStorage`, until this module is integrated.
// It is going to be deleted after integration has been completed
use std::marker::PhantomData;

use byteorder::{BigEndian, ReadBytesExt};
use sov_schema_db::schema::{KeyDecoder, KeyEncoder, Result as CodecResult, ValueCodec};
use sov_schema_db::snapshot::{DbSnapshot, QueryManager};
use sov_schema_db::{define_schema, CodecError};
use sov_state::MerkleProofSpec;

// Oversimplified representation of [`sov_state::ProverStorage`]
pub struct NewProverStorage<Mps: MerkleProofSpec, Q> {
state_db: DbSnapshot<Q>,
native_db: DbSnapshot<Q>,
p: PhantomData<Mps>,
}

impl<Mps: MerkleProofSpec, Q: QueryManager> NewProverStorage<Mps, Q> {
pub(crate) fn with_db_handlers(
state_db_snapshot: DbSnapshot<Q>,
native_db_snapshot: DbSnapshot<Q>,
) -> Self {
NewProverStorage {
state_db: state_db_snapshot,
native_db: native_db_snapshot,
p: Default::default(),
}
}

pub(crate) fn freeze(self) -> (DbSnapshot<Q>, DbSnapshot<Q>) {
let NewProverStorage {
state_db,
native_db,
..
} = self;
(state_db, native_db)
}

#[cfg(test)]
pub(crate) fn read_state(&self, key: u64) -> anyhow::Result<Option<u64>> {
let key = DummyField(key);
citizen-stig marked this conversation as resolved.
Show resolved Hide resolved
Ok(self
.state_db
.read::<DummyStateSchema>(&key)?
.map(Into::into))
}

#[cfg(test)]
pub(crate) fn write_state(&self, key: u64, value: u64) -> anyhow::Result<()> {
let key = DummyField(key);
let value = DummyField(value);
self.state_db.put::<DummyStateSchema>(&key, &value)
}

#[cfg(test)]
pub(crate) fn delete_state(&self, key: u64) -> anyhow::Result<()> {
let key = DummyField(key);
self.state_db.delete::<DummyStateSchema>(&key)
}

#[cfg(test)]
pub(crate) fn read_native(&self, key: u64) -> anyhow::Result<Option<u64>> {
let key = DummyField(key);
Ok(self
.native_db
.read::<DummyNativeSchema>(&key)?
.map(Into::into))
}

#[cfg(test)]
pub(crate) fn write_native(&self, key: u64, value: u64) -> anyhow::Result<()> {
let key = DummyField(key);
let value = DummyField(value);
self.native_db.put::<DummyNativeSchema>(&key, &value)
}

#[cfg(test)]
pub(crate) fn delete_native(&self, key: u64) -> anyhow::Result<()> {
let key = DummyField(key);
self.native_db.delete::<DummyNativeSchema>(&key)
}
}

// --------------
// The code below used to emulate native and state db, but on oversimplified level

pub(crate) const DUMMY_STATE_CF: &str = "DummyStateCF";
pub(crate) const DUMMY_NATIVE_CF: &str = "DummyNativeCF";

define_schema!(DummyStateSchema, DummyField, DummyField, DUMMY_STATE_CF);
define_schema!(DummyNativeSchema, DummyField, DummyField, DUMMY_NATIVE_CF);

#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub(crate) struct DummyField(pub(crate) u64);

impl From<DummyField> for u64 {
fn from(value: DummyField) -> Self {
value.0
}
}

impl DummyField {
fn as_bytes(&self) -> Vec<u8> {
self.0.to_be_bytes().to_vec()
}

fn from_bytes(data: &[u8]) -> CodecResult<Self> {
let mut reader = std::io::Cursor::new(data);
Ok(Self(
reader
.read_u64::<BigEndian>()
.map_err(|e| CodecError::Wrapped(e.into()))?,
))
}
}

impl KeyEncoder<DummyStateSchema> for DummyField {
fn encode_key(&self) -> CodecResult<Vec<u8>> {
Ok(self.as_bytes())
}
}

impl KeyDecoder<DummyStateSchema> for DummyField {
fn decode_key(data: &[u8]) -> CodecResult<Self> {
Self::from_bytes(data)
}
}

impl ValueCodec<DummyStateSchema> for DummyField {
fn encode_value(&self) -> CodecResult<Vec<u8>> {
Ok(self.as_bytes())
}

fn decode_value(data: &[u8]) -> CodecResult<Self> {
Self::from_bytes(data)
}
}

impl KeyEncoder<DummyNativeSchema> for DummyField {
fn encode_key(&self) -> CodecResult<Vec<u8>> {
Ok(self.as_bytes())
}
}

impl KeyDecoder<DummyNativeSchema> for DummyField {
fn decode_key(data: &[u8]) -> CodecResult<Self> {
Self::from_bytes(data)
}
}

impl ValueCodec<DummyNativeSchema> for DummyField {
fn encode_value(&self) -> CodecResult<Vec<u8>> {
Ok(self.as_bytes())
}

fn decode_value(data: &[u8]) -> CodecResult<Self> {
Self::from_bytes(data)
}
}
Loading
Loading