Skip to content

Commit

Permalink
Merge pull request #12 from Trantorian1/feat/root
Browse files Browse the repository at this point in the history
fix(root): 🐛 Got state root to work
  • Loading branch information
antiyro authored Mar 12, 2024
2 parents c8b20d4 + a7b3f9c commit f28f8bd
Show file tree
Hide file tree
Showing 30 changed files with 541 additions and 539 deletions.
7 changes: 3 additions & 4 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,7 @@ starknet-e2e-test/contracts/build
# vscode settings
.vscode/settings.json

# script files
output_deoxys.json
output_pathfinder.json
# rpc output
output*

tmp/
tmp/
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ git # Deoxys Changelog

## Next release

- fix(root): got state root to work (does not support class root yet)
- refactor(substrate_hash): Substrate hash is now retrieved via rpc client in
`l2.rs`
- fix(worflows): fix toolchain and cache issue
Expand Down
6 changes: 4 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 3 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ pallet-grandpa = { default-features = true, git = "https://github.com/massalabs/
pallet-timestamp = { default-features = true, git = "https://github.com/massalabs/polkadot-sdk", branch = "release-polkadot-v1.3.0-std" }

# Bonsai trie dependencies
bonsai-trie = { default-features = false, git = "https://github.com/antiyro/bonsai-trie.git", branch = "oss" }
bonsai-trie = { default-features = false, git = "https://github.com/trantorian1/bonsai-trie.git", branch = "oss" }

# Madara pallets
pallet-starknet = { path = "crates/pallets/starknet", default-features = false, features = [
Expand Down Expand Up @@ -266,7 +266,8 @@ starknet-crypto = { git = "https://github.com/jbcaron/starknet-rs.git", branch =
starknet-ff = { git = "https://github.com/jbcaron/starknet-rs.git", branch = "classes", default-features = false }
starknet-providers = { git = "https://github.com/jbcaron/starknet-rs.git", branch = "classes", default-features = false }
starknet-signers = { git = "https://github.com/jbcaron/starknet-rs.git", branch = "classes", default-features = false }
starknet-types-core = { git = "https://github.com/starknet-io/types-rs", branch = "main", default-features = false }

starknet-types-core = { git = "https://github.com/starknet-io/types-rs.git", branch = "main", default-features = false }

blockifier = { git = "https://github.com/massalabs/blockifier", branch = "no_std-support-7578442-std", default-features = false, features = [
"parity-scale-codec",
Expand Down
4 changes: 4 additions & 0 deletions crates/client/db/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,10 @@ sc-client-db = { workspace = true, default-features = true }
sp-core = { workspace = true, default-features = true }
sp-database = { workspace = true, default-features = true }
sp-runtime = { workspace = true, default-features = true }
starknet-types-core = { workspace = true, default-features = false, features = [
"hash",
"parity-scale-codec",
] }
starknet_api = { workspace = true, default-features = true, features = [
"parity-scale-codec",
] }
Expand Down
37 changes: 25 additions & 12 deletions crates/client/db/src/bonsai_db.rs
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
use std::marker::PhantomData;
use std::sync::Arc;

use bonsai_trie::id::Id;
use bonsai_trie::{BonsaiDatabase, BonsaiPersistentDatabase, DatabaseKey};
use bonsai_trie::id::{BasicId, Id};
use bonsai_trie::{BonsaiDatabase, BonsaiPersistentDatabase, BonsaiStorage, BonsaiStorageConfig, DatabaseKey};
use kvdb::{DBTransaction, KeyValueDB};
use sp_runtime::traits::Block as BlockT;
use starknet_types_core::hash::{Pedersen, Poseidon};

use crate::error::BonsaiDbError;

#[derive(Debug)]
pub enum TrieColumn {
Class,
Contract,
Storage,
}

#[derive(Debug)]
Expand All @@ -22,6 +22,25 @@ pub enum KeyType {
TrieLog,
}

pub struct BonsaiConfigs<B: BlockT> {
pub contract: BonsaiStorage<BasicId, BonsaiDb<B>, Pedersen>,
pub class: BonsaiStorage<BasicId, BonsaiDb<B>, Poseidon>,
}

impl<B: BlockT> BonsaiConfigs<B> {
pub fn new(contract: BonsaiDb<B>, class: BonsaiDb<B>) -> Self {
let config = BonsaiStorageConfig::default();

let contract =
BonsaiStorage::<_, _, Pedersen>::new(contract, config.clone()).expect("Failed to create bonsai storage");

let class =
BonsaiStorage::<_, _, Poseidon>::new(class, config.clone()).expect("Failed to create bonsai storage");

Self { contract, class }
}
}

impl TrieColumn {
pub fn to_index(&self, key_type: KeyType) -> u32 {
match self {
Expand All @@ -35,11 +54,6 @@ impl TrieColumn {
KeyType::Flat => crate::columns::FLAT_BONSAI_CONTRACTS,
KeyType::TrieLog => crate::columns::LOG_BONSAI_CONTRACTS,
},
TrieColumn::Storage => match key_type {
KeyType::Trie => crate::columns::TRIE_BONSAI_STORAGE,
KeyType::Flat => crate::columns::FLAT_BONSAI_STORAGE,
KeyType::TrieLog => crate::columns::LOG_BONSAI_STORAGE,
},
}
}
}
Expand All @@ -62,7 +76,7 @@ pub fn key_type(key: &DatabaseKey) -> KeyType {
}
}

impl<B: BlockT> BonsaiDatabase for &BonsaiDb<B> {
impl<B: BlockT> BonsaiDatabase for BonsaiDb<B> {
type Batch = DBTransaction;
type DatabaseError = BonsaiDbError;

Expand All @@ -86,8 +100,7 @@ impl<B: BlockT> BonsaiDatabase for &BonsaiDb<B> {
value: &[u8],
batch: Option<&mut Self::Batch>,
) -> Result<Option<Vec<u8>>, Self::DatabaseError> {
// println!("Key and keytype: {:?} {:?}", self.current_column, key_type(key));
let key_type = key_type(key);
let key_type: KeyType = key_type(key);
let column = self.current_column.to_index(key_type);
let key_slice = key.as_slice();
let previous_value = self.db.get(column, key_slice)?;
Expand Down Expand Up @@ -227,7 +240,7 @@ impl BonsaiDatabase for TransactionWrapper {
}

/// This implementation is a stub to mute any error but is is currently not used.
impl<B: BlockT, ID: Id> BonsaiPersistentDatabase<ID> for &BonsaiDb<B> {
impl<B: BlockT, ID: Id> BonsaiPersistentDatabase<ID> for BonsaiDb<B> {
type Transaction = TransactionWrapper;
type DatabaseError = BonsaiDbError;

Expand Down
66 changes: 31 additions & 35 deletions crates/client/db/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
//! flags. Support for custom databases is possible but not supported yet.

mod error;
use bonsai_trie::id::BasicId;
use bonsai_trie::BonsaiStorage;
pub use error::{BonsaiDbError, DbError};

mod mapping_db;
Expand All @@ -24,15 +26,16 @@ mod db_opening_utils;
mod messaging_db;
mod sierra_classes_db;
pub use messaging_db::LastSyncedEventBlock;
use starknet_types_core::hash::{Pedersen, Poseidon};
pub mod bonsai_db;
mod l1_handler_tx_fee;
mod meta_db;

use std::marker::PhantomData;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::sync::{Arc, Mutex};

use bonsai_db::{BonsaiDb, TrieColumn};
use bonsai_db::{BonsaiConfigs, BonsaiDb, TrieColumn};
use da_db::DaDb;
use l1_handler_tx_fee::L1HandlerTxFeeDb;
use mapping_db::MappingDb;
Expand All @@ -56,7 +59,7 @@ pub(crate) mod columns {
// ===== /!\ ===================================================================================
// MUST BE INCREMENTED WHEN A NEW COLUMN IN ADDED
// ===== /!\ ===================================================================================
pub const NUM_COLUMNS: u32 = 18;
pub const NUM_COLUMNS: u32 = 16;

pub const META: u32 = 0;
pub const BLOCK_MAPPING: u32 = 1;
Expand All @@ -70,14 +73,19 @@ pub(crate) mod columns {
/// This column should only be accessed if the `--cache` flag is enabled.
pub const STARKNET_TRANSACTION_HASHES_CACHE: u32 = 5;

/// This column is used to map starknet block numbers to their block hashes.
///
/// This column should only be accessed if the `--cache` flag is enabled.
pub const STARKNET_BLOCK_HASHES_CACHE: u32 = 6;

/// This column contains last synchronized L1 block.
pub const MESSAGING: u32 = 6;
pub const MESSAGING: u32 = 7;

/// This column contains the Sierra contract classes
pub const SIERRA_CONTRACT_CLASSES: u32 = 7;
pub const SIERRA_CONTRACT_CLASSES: u32 = 8;

/// This column stores the fee paid on l1 for L1Handler transactions
pub const L1_HANDLER_PAID_FEE: u32 = 8;
pub const L1_HANDLER_PAID_FEE: u32 = 9;

/// The bonsai columns are triplicated since we need to set a column for
///
Expand All @@ -87,15 +95,12 @@ pub(crate) mod columns {
/// as defined in https://github.com/keep-starknet-strange/bonsai-trie/blob/oss/src/databases/rocks_db.rs
///
/// For each tries CONTRACTS, CLASSES and STORAGE
pub const TRIE_BONSAI_CONTRACTS: u32 = 9;
pub const FLAT_BONSAI_CONTRACTS: u32 = 10;
pub const LOG_BONSAI_CONTRACTS: u32 = 11;
pub const TRIE_BONSAI_CLASSES: u32 = 12;
pub const FLAT_BONSAI_CLASSES: u32 = 13;
pub const LOG_BONSAI_CLASSES: u32 = 14;
pub const TRIE_BONSAI_STORAGE: u32 = 15;
pub const FLAT_BONSAI_STORAGE: u32 = 16;
pub const LOG_BONSAI_STORAGE: u32 = 17;
pub const TRIE_BONSAI_CONTRACTS: u32 = 10;
pub const FLAT_BONSAI_CONTRACTS: u32 = 11;
pub const LOG_BONSAI_CONTRACTS: u32 = 12;
pub const TRIE_BONSAI_CLASSES: u32 = 13;
pub const FLAT_BONSAI_CLASSES: u32 = 14;
pub const LOG_BONSAI_CLASSES: u32 = 15;
}

pub mod static_keys {
Expand All @@ -109,7 +114,6 @@ pub mod static_keys {
pub struct BonsaiDbs<B: BlockT> {
pub contract: Arc<BonsaiDb<B>>,
pub class: Arc<BonsaiDb<B>>,
pub storage: Arc<BonsaiDb<B>>,
}

/// The Madara client database backend
Expand All @@ -127,7 +131,8 @@ pub struct Backend<B: BlockT> {
messaging: Arc<MessagingDb>,
sierra_classes: Arc<SierraClassesDb>,
l1_handler_paid_fee: Arc<L1HandlerTxFeeDb>,
bonsai: BonsaiDbs<B>,
bonsai_contract: Arc<Mutex<BonsaiStorage<BasicId, BonsaiDb<B>, Pedersen>>>,
bonsai_class: Arc<Mutex<BonsaiStorage<BasicId, BonsaiDb<B>, Poseidon>>>,
}

/// Returns the Starknet database directory.
Expand Down Expand Up @@ -166,15 +171,9 @@ impl<B: BlockT> Backend<B> {
let kvdb: Arc<dyn KeyValueDB> = db.0;
let spdb: Arc<dyn Database<DbHash>> = db.1;

let bonsai_dbs = BonsaiDbs {
contract: Arc::new(BonsaiDb {
db: kvdb.clone(),
_marker: PhantomData,
current_column: TrieColumn::Contract,
}),
class: Arc::new(BonsaiDb { db: kvdb.clone(), _marker: PhantomData, current_column: TrieColumn::Class }),
storage: Arc::new(BonsaiDb { db: kvdb, _marker: PhantomData, current_column: TrieColumn::Storage }),
};
let contract = BonsaiDb { db: kvdb.clone(), _marker: PhantomData, current_column: TrieColumn::Contract };
let class = BonsaiDb { db: kvdb.clone(), _marker: PhantomData, current_column: TrieColumn::Class };
let config = BonsaiConfigs::new(contract, class);

Ok(Self {
mapping: Arc::new(MappingDb::new(spdb.clone(), cache_more_things)),
Expand All @@ -183,7 +182,8 @@ impl<B: BlockT> Backend<B> {
messaging: Arc::new(MessagingDb { db: spdb.clone() }),
sierra_classes: Arc::new(SierraClassesDb { db: spdb.clone() }),
l1_handler_paid_fee: Arc::new(L1HandlerTxFeeDb { db: spdb.clone() }),
bonsai: bonsai_dbs,
bonsai_contract: Arc::new(Mutex::new(config.contract)),
bonsai_class: Arc::new(Mutex::new(config.class)),
})
}

Expand Down Expand Up @@ -212,16 +212,12 @@ impl<B: BlockT> Backend<B> {
&self.sierra_classes
}

pub fn bonsai_contract(&self) -> &Arc<BonsaiDb<B>> {
&self.bonsai.contract
}

pub fn bonsai_class(&self) -> &Arc<BonsaiDb<B>> {
&self.bonsai.class
pub fn bonsai_contract(&self) -> &Arc<Mutex<BonsaiStorage<BasicId, BonsaiDb<B>, Pedersen>>> {
&self.bonsai_contract
}

pub fn bonsai_storage(&self) -> &Arc<BonsaiDb<B>> {
&self.bonsai.storage
pub fn bonsai_class(&self) -> &Arc<Mutex<BonsaiStorage<BasicId, BonsaiDb<B>, Poseidon>>> {
&self.bonsai_class
}

/// Return l1 handler tx paid fee database manager
Expand Down
36 changes: 36 additions & 0 deletions crates/client/db/src/mapping_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ use crate::{DbError, DbHash};
/// The mapping to write in db
#[derive(Debug)]
pub struct MappingCommitment<B: BlockT> {
pub block_number: u64,
pub block_hash: B::Hash,
pub starknet_block_hash: StarkHash,
pub starknet_transaction_hashes: Vec<StarkHash>,
Expand Down Expand Up @@ -106,6 +107,12 @@ impl<B: BlockT> MappingDb<B> {
&commitment.starknet_block_hash.encode(),
&commitment.starknet_transaction_hashes.encode(),
);

transaction.set(
crate::columns::STARKNET_BLOCK_HASHES_CACHE,
&commitment.block_number.encode(),
&commitment.starknet_block_hash.encode(),
);
}

self.db.commit(transaction)?;
Expand Down Expand Up @@ -156,4 +163,33 @@ impl<B: BlockT> MappingDb<B> {
None => Ok(None),
}
}

/// Returns the cached block hash of a given block number.
///
/// # Arguments
///
/// * `block_number` - the block number to search for.
///
/// # Returns
///
/// The block hash of a given block number.
///
/// This function may return `None` for two separate reasons:
///
/// - The cache is disabled.
/// - The provided `starknet_hash` is not present in the cache.
pub fn cached_block_hash_from_block_number(
&self,
starknet_block_number: u64,
) -> Result<Option<StarkHash>, DbError> {
if !self.cache_more_things {
// The cache is not enabled, no need to even touch the database.
return Ok(None);
}

match self.db.get(crate::columns::STARKNET_BLOCK_HASHES_CACHE, &starknet_block_number.encode()) {
Some(raw) => Ok(Some(<StarkHash>::decode(&mut &raw[..])?)),
None => Ok(None),
}
}
}
2 changes: 2 additions & 0 deletions crates/client/mapping-sync/src/sync_blocks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ where

// Success, we write the Starknet to Substate hashes mapping to db
let mapping_commitment = mc_db::MappingCommitment {
block_number: digest_starknet_block.header().block_number,
block_hash: substrate_block_hash,
starknet_block_hash: digest_starknet_block_hash.into(),
starknet_transaction_hashes: digest_starknet_block
Expand Down Expand Up @@ -90,6 +91,7 @@ where
};
let block_hash = block.header().hash::<H>();
let mapping_commitment = mc_db::MappingCommitment::<B> {
block_number: block.header().block_number,
block_hash: substrate_block_hash,
starknet_block_hash: block_hash.into(),
starknet_transaction_hashes: Vec::new(),
Expand Down
Loading

0 comments on commit f28f8bd

Please sign in to comment.