diff --git a/block-producer/Cargo.toml b/block-producer/Cargo.toml index cb88c54d4..2e48a4593 100644 --- a/block-producer/Cargo.toml +++ b/block-producer/Cargo.toml @@ -10,13 +10,15 @@ edition = "2021" rust-version = "1.73" [dev-dependencies] +miden-crypto = { workspace = true } miden-mock = { package = "miden-mock", git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "main", default-features = false } +once_cell = { version = "1.18" } winterfell = "0.7" [dependencies] anyhow = { version = "1.0" } async-trait = { version = "0.1" } -clap = { version = "4.3" , features = ["derive"] } +clap = { version = "4.3", features = ["derive"] } figment = { version = "0.10", features = ["toml", "env"] } itertools = { version = "0.12" } miden-air = { package = "miden-air", git = "https://github.com/0xPolygonMiden/miden-vm.git", branch = "next", default-features = false } @@ -25,10 +27,10 @@ miden-node-utils = { path = "../utils" } miden_objects = { workspace = true } miden_stdlib = { package = "miden-stdlib", git = "https://github.com/0xPolygonMiden/miden-vm.git", branch = "next", default-features = false } miden_vm = { package = "miden-vm", git = "https://github.com/0xPolygonMiden/miden-vm.git", branch = "next", default-features = false } -serde = { version = "1.0" , features = ["derive"] } +serde = { version = "1.0", features = ["derive"] } thiserror = { workspace = true } tokio = { version = "1.29", features = ["rt-multi-thread", "net", "macros", "sync", "time"] } toml = { version = "0.8" } tonic = { version = "0.10" } tracing = { version = "0.1" } -tracing-subscriber = { version = "0.3" , features = ["fmt"] } +tracing-subscriber = { version = "0.3", features = ["fmt"] } diff --git a/block-producer/src/batch_builder/mod.rs b/block-producer/src/batch_builder/mod.rs index 5af7ca2db..62bbadbef 100644 --- a/block-producer/src/batch_builder/mod.rs +++ b/block-producer/src/batch_builder/mod.rs @@ -64,12 +64,9 @@ impl TransactionBatch { SimpleSmt::with_contiguous_leaves( CREATED_NOTES_SMT_DEPTH, - created_notes - .into_iter() - .flat_map(|note_envelope| { - [note_envelope.note_hash().into(), note_envelope.metadata().into()] - }) - .collect::>(), + created_notes.into_iter().flat_map(|note_envelope| { + [note_envelope.note_hash().into(), note_envelope.metadata().into()] + }), )? }; diff --git a/block-producer/src/block_builder/errors.rs b/block-producer/src/block_builder/errors.rs index 93a70f496..9df194899 100644 --- a/block-producer/src/block_builder/errors.rs +++ b/block-producer/src/block_builder/errors.rs @@ -4,11 +4,12 @@ use thiserror::Error; use crate::store::{ApplyBlockError, BlockInputsError}; -use super::prover::CREATED_NOTES_TREE_INSERTION_DEPTH; +use super::prover::block_witness::CREATED_NOTES_TREE_INSERTION_DEPTH; + #[derive(Debug, Error, PartialEq, Eq)] pub enum BuildBlockError { - #[error("failed to update account root: {0}")] - AccountRootUpdateFailed(#[from] BlockProverError), + #[error("failed to compute new block: {0}")] + BlockProverFailed(#[from] BlockProverError), #[error("failed to apply block: {0}")] ApplyBlockFailed(#[from] ApplyBlockError), #[error("failed to get block inputs from store: {0}")] diff --git a/block-producer/src/block_builder/mod.rs b/block-producer/src/block_builder/mod.rs index 1c47876cd..d6d4e2431 100644 --- a/block-producer/src/block_builder/mod.rs +++ b/block-producer/src/block_builder/mod.rs @@ -7,10 +7,10 @@ use crate::{block::Block, store::Store, SharedTxBatch}; pub mod errors; -mod prover; +pub(crate) mod prover; use self::{ errors::BuildBlockError, - prover::{BlockProver, BlockWitness}, + prover::{block_witness::BlockWitness, BlockProver}, }; #[cfg(test)] diff --git a/block-producer/src/block_builder/prover/block_witness.rs b/block-producer/src/block_builder/prover/block_witness.rs new file mode 100644 index 000000000..626f22982 --- /dev/null +++ b/block-producer/src/block_builder/prover/block_witness.rs @@ -0,0 +1,258 @@ +use std::collections::{BTreeMap, BTreeSet}; + +use miden_node_proto::domain::BlockInputs; +use miden_objects::{ + accounts::AccountId, + crypto::merkle::{EmptySubtreeRoots, MerkleStore, MmrPeaks}, + BlockHeader, Digest, Felt, ToAdviceInputs, +}; +use miden_vm::{crypto::MerklePath, AdviceInputs, StackInputs}; + +use crate::{ + batch_builder, + block_builder::errors::{BlockProverError, BuildBlockError}, + SharedTxBatch, +}; + +// CONSTANTS +// ================================================================================================= + +/// The depth at which we insert roots from the batches. +pub(crate) const CREATED_NOTES_TREE_INSERTION_DEPTH: u8 = 8; + +/// The depth of the created notes tree in the block. +pub(crate) const CREATED_NOTES_TREE_DEPTH: u8 = + CREATED_NOTES_TREE_INSERTION_DEPTH + batch_builder::CREATED_NOTES_SMT_DEPTH; + +pub(crate) const MAX_BATCHES_PER_BLOCK: usize = + 2_usize.pow(CREATED_NOTES_TREE_INSERTION_DEPTH as u32); + +// BLOCK WITNESS +// ================================================================================================= + +/// Provides inputs to the `BlockKernel` so that it can generate the new header. +#[derive(Debug, PartialEq)] +pub struct BlockWitness { + pub(super) updated_accounts: BTreeMap, + /// (batch_index, created_notes_root) for batches that contain notes + pub(super) batch_created_notes_roots: BTreeMap, + pub(super) chain_peaks: MmrPeaks, + pub(super) prev_header: BlockHeader, +} + +impl BlockWitness { + pub fn new( + block_inputs: BlockInputs, + batches: Vec, + ) -> Result { + Self::validate_inputs(&block_inputs, &batches)?; + + let updated_accounts = { + let mut account_initial_states: BTreeMap = + batches.iter().flat_map(|batch| batch.account_initial_states()).collect(); + + let mut account_merkle_proofs: BTreeMap = block_inputs + .account_states + .into_iter() + .map(|record| (record.account_id, record.proof)) + .collect(); + + batches + .iter() + .flat_map(|batch| batch.updated_accounts()) + .map(|(account_id, final_state_hash)| { + let initial_state_hash = account_initial_states + .remove(&account_id) + .expect("already validated that key exists"); + let proof = account_merkle_proofs + .remove(&account_id) + .expect("already validated that key exists"); + + ( + account_id, + AccountUpdate { + initial_state_hash, + final_state_hash, + proof, + }, + ) + }) + .collect() + }; + + let batch_created_notes_roots = batches + .iter() + .enumerate() + .filter_map(|(batch_index, batch)| { + if batch.created_notes().next().is_none() { + None + } else { + Some((batch_index, batch.created_notes_root())) + } + }) + .collect(); + + Ok(Self { + updated_accounts, + batch_created_notes_roots, + chain_peaks: block_inputs.chain_peaks, + prev_header: block_inputs.block_header, + }) + } + + pub(super) fn into_program_inputs( + self + ) -> Result<(AdviceInputs, StackInputs), BlockProverError> { + let stack_inputs = { + // Note: `StackInputs::new()` reverses the input vector, so we need to construct the stack + // from the bottom to the top + let mut stack_inputs = Vec::new(); + + // Chain MMR stack inputs + { + stack_inputs.extend(self.prev_header.hash()); + stack_inputs.extend(self.chain_peaks.hash_peaks()); + } + + // Notes stack inputs + { + let num_created_notes_roots = self.batch_created_notes_roots.len(); + for (batch_index, batch_created_notes_root) in self.batch_created_notes_roots { + stack_inputs.extend(batch_created_notes_root); + + let batch_index = u64::try_from(batch_index) + .expect("can't be more than 2^64 - 1 notes created"); + stack_inputs.push(Felt::from(batch_index)); + } + + let empty_root = EmptySubtreeRoots::entry(CREATED_NOTES_TREE_DEPTH, 0); + stack_inputs.extend(*empty_root); + stack_inputs.push(Felt::from( + u64::try_from(num_created_notes_roots) + .expect("can't be more than 2^64 - 1 notes created"), + )); + } + + // Account stack inputs + let mut num_accounts_updated: u64 = 0; + for (idx, (&account_id, account_update)) in self.updated_accounts.iter().enumerate() { + stack_inputs.push(account_id.into()); + stack_inputs.extend(account_update.final_state_hash); + + let idx = u64::try_from(idx).expect("can't be more than 2^64 - 1 accounts"); + num_accounts_updated = idx + 1; + } + + // append initial account root + stack_inputs.extend(self.prev_header.account_root()); + + // append number of accounts updated + stack_inputs.push(num_accounts_updated.into()); + + StackInputs::new(stack_inputs) + }; + + let advice_inputs = { + let mut merkle_store = MerkleStore::default(); + merkle_store + .add_merkle_paths(self.updated_accounts.into_iter().map( + |( + account_id, + AccountUpdate { + initial_state_hash, + final_state_hash: _, + proof, + }, + )| { (u64::from(account_id), initial_state_hash, proof) }, + )) + .map_err(BlockProverError::InvalidMerklePaths)?; + + let mut advice_inputs = AdviceInputs::default().with_merkle_store(merkle_store); + self.chain_peaks.to_advice_inputs(&mut advice_inputs); + + advice_inputs + }; + + Ok((advice_inputs, stack_inputs)) + } + + // HELPERS + // --------------------------------------------------------------------------------------------- + + fn validate_inputs( + block_inputs: &BlockInputs, + batches: &[SharedTxBatch], + ) -> Result<(), BuildBlockError> { + // TODO: + // - Block height returned for each nullifier is 0. + + if batches.len() > MAX_BATCHES_PER_BLOCK { + return Err(BuildBlockError::TooManyBatchesInBlock(batches.len())); + } + + Self::validate_account_states(block_inputs, batches)?; + + Ok(()) + } + + /// Validate that initial account states coming from the batches are the same as the account + /// states returned from the store + fn validate_account_states( + block_inputs: &BlockInputs, + batches: &[SharedTxBatch], + ) -> Result<(), BuildBlockError> { + let batches_initial_states: BTreeMap = + batches.iter().flat_map(|batch| batch.account_initial_states()).collect(); + + let accounts_in_batches: BTreeSet = + batches_initial_states.keys().cloned().collect(); + let accounts_in_store: BTreeSet = block_inputs + .account_states + .iter() + .map(|record| &record.account_id) + .cloned() + .collect(); + + if accounts_in_batches == accounts_in_store { + let accounts_with_different_hashes: Vec = block_inputs + .account_states + .iter() + .filter_map(|record| { + let hash_in_store = record.account_hash; + let hash_in_batches = batches_initial_states + .get(&record.account_id) + .expect("we already verified that account id is contained in batches"); + + if hash_in_store == *hash_in_batches { + None + } else { + Some(record.account_id) + } + }) + .collect(); + + if accounts_with_different_hashes.is_empty() { + Ok(()) + } else { + Err(BuildBlockError::InconsistentAccountStates(accounts_with_different_hashes)) + } + } else { + // The batches and store don't modify the same set of accounts + let union: BTreeSet = + accounts_in_batches.union(&accounts_in_store).cloned().collect(); + let intersection: BTreeSet = + accounts_in_batches.intersection(&accounts_in_store).cloned().collect(); + + let difference: Vec = union.difference(&intersection).cloned().collect(); + + Err(BuildBlockError::InconsistentAccountIds(difference)) + } + } +} + +#[derive(Debug, PartialEq, Eq)] +pub(super) struct AccountUpdate { + pub initial_state_hash: Digest, + pub final_state_hash: Digest, + pub proof: MerklePath, +} diff --git a/block-producer/src/block_builder/prover/mod.rs b/block-producer/src/block_builder/prover/mod.rs index 78889815c..eb5908095 100644 --- a/block-producer/src/block_builder/prover/mod.rs +++ b/block-producer/src/block_builder/prover/mod.rs @@ -1,22 +1,11 @@ -use std::{ - collections::{BTreeMap, BTreeSet}, - time::{SystemTime, UNIX_EPOCH}, -}; - -use miden_air::ExecutionOptions; -use miden_node_proto::domain::BlockInputs; -use miden_objects::{ - accounts::AccountId, - assembly::Assembler, - crypto::merkle::{EmptySubtreeRoots, MerkleStore}, - BlockHeader, Digest, Felt, -}; +use std::time::{SystemTime, UNIX_EPOCH}; + +use miden_air::{ExecutionOptions, Felt}; +use miden_objects::{assembly::Assembler, BlockHeader, Digest, ONE}; use miden_stdlib::StdLibrary; -use miden_vm::{ - crypto::MerklePath, execute, AdviceInputs, DefaultHost, MemAdviceProvider, Program, StackInputs, -}; +use miden_vm::{execute, DefaultHost, MemAdviceProvider, Program}; -use crate::{batch_builder, SharedTxBatch}; +use self::block_witness::BlockWitness; use super::{errors::BlockProverError, BuildBlockError}; @@ -26,12 +15,10 @@ pub const ACCOUNT_ROOT_WORD_IDX: usize = 0; /// The index of the word at which the note root is stored on the output stack. pub const NOTE_ROOT_WORD_IDX: usize = 4; -/// The depth at which we insert roots from the batches. -pub(crate) const CREATED_NOTES_TREE_INSERTION_DEPTH: u8 = 8; +/// The index of the word at which the note root is stored on the output stack. +pub const CHAIN_MMR_ROOT_WORD_IDX: usize = 8; -/// The depth of the created notes tree in the block. -pub(crate) const CREATED_NOTES_TREE_DEPTH: u8 = - CREATED_NOTES_TREE_INSERTION_DEPTH + batch_builder::CREATED_NOTES_SMT_DEPTH; +pub mod block_witness; #[cfg(test)] mod tests; @@ -43,6 +30,9 @@ mod tests; /// NEW_ACCOUNT_HASH_n, account_id_n] const BLOCK_KERNEL_MASM: &str = " use.std::collections::smt64 +use.std::collections::mmr + +const.CHAIN_MMR_PTR=1000 #! Compute the account root #! @@ -116,22 +106,51 @@ proc.compute_note_root # => [ROOT_{n-1}] end -# Stack: [, ] +#! Compute the chain MMR root +#! +#! Stack: [ PREV_CHAIN_MMR_HASH, PREV_BLOCK_HASH_TO_INSERT ] +#! Advice map: PREV_CHAIN_MMR_HASH -> NUM_LEAVES || peak_0 || .. || peak_{n-1} || +#! +#! Output: [ CHAIN_MMR_ROOT ] +proc.compute_chain_mmr_root + push.CHAIN_MMR_PTR movdn.4 + # => [ PREV_CHAIN_MMR_HASH, chain_mmr_ptr, PREV_BLOCK_HASH_TO_INSERT ] + + # load the chain MMR (as of previous block) at memory location CHAIN_MMR_PTR + exec.mmr::unpack + # => [ PREV_BLOCK_HASH_TO_INSERT ] + + push.CHAIN_MMR_PTR movdn.4 + # => [ PREV_BLOCK_HASH_TO_INSERT, chain_mmr_ptr ] + + # add PREV_BLOCK_HASH_TO_INSERT to chain MMR + exec.mmr::add + # => [ ] + + # Compute new MMR root + push.CHAIN_MMR_PTR exec.mmr::pack + # => [ CHAIN_MMR_ROOT ] +end + +# Stack: [, , ] begin exec.compute_account_root mem_storew.0 dropw - #=> [] + # => [, ] + + exec.compute_note_root mem_storew.1 dropw + # => [ ] - exec.compute_note_root - #=> [ ] + exec.compute_chain_mmr_root + # => [ CHAIN_MMR_ROOT ] # Load output on stack - padw mem_loadw.0 - #=> [ ACCOUNT_ROOT, NOTE_ROOT] + padw mem_loadw.1 padw mem_loadw.0 + #=> [ ACCOUNT_ROOT, NOTE_ROOT, CHAIN_MMR_ROOT ] end "; #[derive(Debug)] -pub(super) struct BlockProver { +pub(crate) struct BlockProver { kernel: Program, } @@ -157,13 +176,12 @@ impl BlockProver { &self, witness: BlockWitness, ) -> Result { - let prev_hash = witness.prev_header.prev_hash(); - let block_num = witness.prev_header.block_num(); + let prev_hash = witness.prev_header.hash(); + let block_num = witness.prev_header.block_num() + ONE; let version = witness.prev_header.version(); - let (account_root, note_root) = self.compute_roots(witness)?; + let (account_root, note_root, chain_root) = self.compute_roots(witness)?; - let chain_root = Digest::default(); let nullifier_root = Digest::default(); let batch_root = Digest::default(); let proof_hash = Digest::default(); @@ -190,8 +208,8 @@ impl BlockProver { fn compute_roots( &self, witness: BlockWitness, - ) -> Result<(Digest, Digest), BlockProverError> { - let (advice_inputs, stack_inputs) = witness.into_parts()?; + ) -> Result<(Digest, Digest, Digest), BlockProverError> { + let (advice_inputs, stack_inputs) = witness.into_program_inputs()?; let host = { let advice_provider = MemAdviceProvider::from(advice_inputs); @@ -212,221 +230,11 @@ impl BlockProver { .get_stack_word(NOTE_ROOT_WORD_IDX) .ok_or(BlockProverError::InvalidRootOutput("note".to_string()))?; - Ok((new_account_root.into(), new_note_root.into())) - } -} - -// BLOCK WITNESS -// ================================================================================================= - -/// Provides inputs to the `BlockKernel` so that it can generate the new header -#[derive(Debug, PartialEq, Eq)] -pub(super) struct BlockWitness { - updated_accounts: BTreeMap, - /// (batch_index, created_notes_root) for batches that contain notes - batch_created_notes_roots: Vec<(usize, Digest)>, - prev_header: BlockHeader, -} - -impl BlockWitness { - pub(super) fn new( - block_inputs: BlockInputs, - batches: Vec, - ) -> Result { - Self::validate_inputs(&block_inputs, &batches)?; - - let updated_accounts = { - let mut account_initial_states: BTreeMap = - batches.iter().flat_map(|batch| batch.account_initial_states()).collect(); - - let mut account_merkle_proofs: BTreeMap = block_inputs - .account_states - .into_iter() - .map(|record| (record.account_id, record.proof)) - .collect(); - - batches - .iter() - .flat_map(|batch| batch.updated_accounts()) - .map(|(account_id, final_state_hash)| { - let initial_state_hash = account_initial_states - .remove(&account_id) - .expect("already validated that key exists"); - let proof = account_merkle_proofs - .remove(&account_id) - .expect("already validated that key exists"); - - ( - account_id, - AccountUpdate { - initial_state_hash, - final_state_hash, - proof, - }, - ) - }) - .collect() - }; - - let batch_created_notes_roots = batches - .iter() - .enumerate() - .filter_map(|(batch_index, batch)| { - if batch.created_notes().next().is_none() { - None - } else { - Some((batch_index, batch.created_notes_root())) - } - }) - .collect(); - - Ok(Self { - updated_accounts, - batch_created_notes_roots, - prev_header: block_inputs.block_header, - }) - } - - fn validate_inputs( - block_inputs: &BlockInputs, - batches: &[SharedTxBatch], - ) -> Result<(), BuildBlockError> { - // TODO: - // - Block height returned for each nullifier is 0. - - // Validate that there aren't too many batches in the block. - if batches.len() > 2usize.pow(CREATED_NOTES_TREE_INSERTION_DEPTH.into()) { - return Err(BuildBlockError::TooManyBatchesInBlock(batches.len())); - } - - Self::validate_account_states(block_inputs, batches)?; - - Ok(()) - } - - /// Validate that initial account states coming from the batches are the same as the account - /// states returned from the store - fn validate_account_states( - block_inputs: &BlockInputs, - batches: &[SharedTxBatch], - ) -> Result<(), BuildBlockError> { - let batches_initial_states: BTreeMap = - batches.iter().flat_map(|batch| batch.account_initial_states()).collect(); - - let accounts_in_batches: BTreeSet = - batches_initial_states.keys().cloned().collect(); - let accounts_in_store: BTreeSet = block_inputs - .account_states - .iter() - .map(|record| &record.account_id) - .cloned() - .collect(); - - if accounts_in_batches == accounts_in_store { - let accounts_with_different_hashes: Vec = block_inputs - .account_states - .iter() - .filter_map(|record| { - let hash_in_store = record.account_hash; - let hash_in_batches = batches_initial_states - .get(&record.account_id) - .expect("we already verified that account id is contained in batches"); - - if hash_in_store == *hash_in_batches { - None - } else { - Some(record.account_id) - } - }) - .collect(); - - if accounts_with_different_hashes.is_empty() { - Ok(()) - } else { - Err(BuildBlockError::InconsistentAccountStates(accounts_with_different_hashes)) - } - } else { - // The batches and store don't modify the same set of accounts - let union: BTreeSet = - accounts_in_batches.union(&accounts_in_store).cloned().collect(); - let intersection: BTreeSet = - accounts_in_batches.intersection(&accounts_in_store).cloned().collect(); - - let difference: Vec = union.difference(&intersection).cloned().collect(); - - Err(BuildBlockError::InconsistentAccountIds(difference)) - } - } - - fn into_parts(self) -> Result<(AdviceInputs, StackInputs), BlockProverError> { - let stack_inputs = { - // Note: `StackInputs::new()` reverses the input vector, so we need to construct the stack - // from the bottom to the top - let mut stack_inputs = Vec::new(); - - // Notes stack inputs - { - let num_created_notes_roots = self.batch_created_notes_roots.len(); - for (batch_index, batch_created_notes_root) in self.batch_created_notes_roots { - stack_inputs.extend(batch_created_notes_root); - - let batch_index = u64::try_from(batch_index) - .expect("can't be more than 2^64 - 1 notes created"); - stack_inputs.push(Felt::from(batch_index)); - } - - let empty_root = EmptySubtreeRoots::entry(CREATED_NOTES_TREE_DEPTH, 0); - stack_inputs.extend(*empty_root); - stack_inputs.push(Felt::from( - u64::try_from(num_created_notes_roots) - .expect("can't be more than 2^64 - 1 notes created"), - )); - } - - // Account stack inputs - let mut num_accounts_updated: u64 = 0; - for (idx, (&account_id, account_update)) in self.updated_accounts.iter().enumerate() { - stack_inputs.push(account_id.into()); - stack_inputs.extend(account_update.final_state_hash); - - let idx = u64::try_from(idx).expect("can't be more than 2^64 - 1 accounts"); - num_accounts_updated = idx + 1; - } - - // append initial account root - stack_inputs.extend(self.prev_header.account_root()); - - // append number of accounts updated - stack_inputs.push(num_accounts_updated.into()); - - StackInputs::new(stack_inputs) - }; - - let advice_inputs = { - let mut merkle_store = MerkleStore::default(); - merkle_store - .add_merkle_paths(self.updated_accounts.into_iter().map( - |( - account_id, - AccountUpdate { - initial_state_hash, - final_state_hash: _, - proof, - }, - )| { (u64::from(account_id), initial_state_hash, proof) }, - )) - .map_err(BlockProverError::InvalidMerklePaths)?; - - AdviceInputs::default().with_merkle_store(merkle_store) - }; + let new_chain_mmr_root = execution_output + .stack_outputs() + .get_stack_word(CHAIN_MMR_ROOT_WORD_IDX) + .ok_or(BlockProverError::InvalidRootOutput("chain mmr".to_string()))?; - Ok((advice_inputs, stack_inputs)) + Ok((new_account_root.into(), new_note_root.into(), new_chain_mmr_root.into())) } } - -#[derive(Debug, PartialEq, Eq)] -pub(super) struct AccountUpdate { - pub initial_state_hash: Digest, - pub final_state_hash: Digest, - pub proof: MerklePath, -} diff --git a/block-producer/src/block_builder/prover/tests.rs b/block-producer/src/block_builder/prover/tests.rs index 506e62bf5..8bf11727b 100644 --- a/block-producer/src/block_builder/prover/tests.rs +++ b/block-producer/src/block_builder/prover/tests.rs @@ -1,18 +1,25 @@ use std::sync::Arc; -use miden_air::FieldElement; +use miden_crypto::merkle::Mmr; use miden_mock::mock::block::mock_block_header; -use miden_node_proto::domain::AccountInputRecord; +use miden_node_proto::domain::{AccountInputRecord, BlockInputs}; use miden_objects::{ - crypto::merkle::MmrPeaks, + accounts::AccountId, + crypto::merkle::{EmptySubtreeRoots, MmrPeaks}, notes::{NoteEnvelope, NoteMetadata}, + ZERO, }; -use miden_vm::crypto::SimpleSmt; +use miden_vm::crypto::{MerklePath, SimpleSmt}; use crate::{ batch_builder::TransactionBatch, + block_builder::prover::block_witness::CREATED_NOTES_TREE_DEPTH, store::Store, - test_utils::{DummyProvenTxGenerator, MockStoreSuccess}, + test_utils::{ + block::{build_actual_block_header, build_expected_block_header, MockBlockBuilder}, + DummyProvenTxGenerator, MockStoreSuccessBuilder, + }, + SharedTxBatch, }; use super::*; @@ -27,12 +34,12 @@ use super::*; #[test] fn test_block_witness_validation_inconsistent_account_ids() { let tx_gen = DummyProvenTxGenerator::new(); - let account_id_1 = AccountId::new_unchecked(Felt::ZERO); - let account_id_2 = AccountId::new_unchecked(Felt::ONE); + let account_id_1 = AccountId::new_unchecked(ZERO); + let account_id_2 = AccountId::new_unchecked(ONE); let account_id_3 = AccountId::new_unchecked(Felt::new(42)); let block_inputs_from_store: BlockInputs = { - let block_header = mock_block_header(Felt::ZERO, None, None, &[]); + let block_header = mock_block_header(ZERO, None, None, &[]); let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); let account_states = vec![ @@ -99,8 +106,8 @@ fn test_block_witness_validation_inconsistent_account_ids() { #[test] fn test_block_witness_validation_inconsistent_account_hashes() { let tx_gen = DummyProvenTxGenerator::new(); - let account_id_1 = AccountId::new_unchecked(Felt::ZERO); - let account_id_2 = AccountId::new_unchecked(Felt::ONE); + let account_id_1 = AccountId::new_unchecked(ZERO); + let account_id_2 = AccountId::new_unchecked(ONE); let account_1_hash_store = Digest::new([Felt::from(1u64), Felt::from(2u64), Felt::from(3u64), Felt::from(4u64)]); @@ -108,7 +115,7 @@ fn test_block_witness_validation_inconsistent_account_hashes() { Digest::new([Felt::from(4u64), Felt::from(3u64), Felt::from(2u64), Felt::from(1u64)]); let block_inputs_from_store: BlockInputs = { - let block_header = mock_block_header(Felt::ZERO, None, None, &[]); + let block_header = mock_block_header(ZERO, None, None, &[]); let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); let account_states = vec![ @@ -207,13 +214,15 @@ async fn test_compute_account_root_success() { // Set up store's account SMT // --------------------------------------------------------------------------------------------- - let store = MockStoreSuccess::new( - account_ids - .iter() - .zip(account_initial_states.iter()) - .map(|(&account_id, &account_hash)| (account_id, account_hash.into())), - BTreeSet::new(), - ); + let store = MockStoreSuccessBuilder::new() + .initial_accounts( + account_ids + .iter() + .zip(account_initial_states.iter()) + .map(|(&account_id, &account_hash)| (account_id, account_hash.into())), + ) + .build(); + // Block prover // --------------------------------------------------------------------------------------------- @@ -249,18 +258,20 @@ async fn test_compute_account_root_success() { // Update SMT by hand to get new root // --------------------------------------------------------------------------------------------- - store - .update_accounts( + let block = MockBlockBuilder::new(&store) + .await + .account_updates( account_ids .iter() .zip(account_final_states.iter()) - .map(|(&account_id, &account_hash)| (account_id, account_hash.into())), + .map(|(&account_id, &account_hash)| (account_id, account_hash.into())) + .collect(), ) - .await; + .build(); // Compare roots // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.account_root(), store.account_root().await); + assert_eq!(block_header.account_root(), block.header.account_root()); } /// Test that the current account root is returned if the batches are empty @@ -287,13 +298,15 @@ async fn test_compute_account_root_empty_batches() { // Set up store's account SMT // --------------------------------------------------------------------------------------------- - let store = MockStoreSuccess::new( - account_ids - .iter() - .zip(account_initial_states.iter()) - .map(|(&account_id, &account_hash)| (account_id, account_hash.into())), - BTreeSet::new(), - ); + let store = MockStoreSuccessBuilder::new() + .initial_accounts( + account_ids + .iter() + .zip(account_initial_states.iter()) + .map(|(&account_id, &account_hash)| (account_id, account_hash.into())), + ) + .build(); + // Block prover // --------------------------------------------------------------------------------------------- @@ -322,7 +335,7 @@ async fn test_compute_note_root_empty_batches_success() { // Set up store // --------------------------------------------------------------------------------------------- - let store = MockStoreSuccess::new(std::iter::empty(), BTreeSet::new()); + let store = MockStoreSuccessBuilder::new().build(); // Block prover // --------------------------------------------------------------------------------------------- @@ -351,7 +364,7 @@ async fn test_compute_note_root_empty_notes_success() { // Set up store // --------------------------------------------------------------------------------------------- - let store = MockStoreSuccess::new(std::iter::empty(), BTreeSet::new()); + let store = MockStoreSuccessBuilder::new().build(); // Block prover // --------------------------------------------------------------------------------------------- @@ -406,7 +419,7 @@ async fn test_compute_note_root_success() { // Set up store // --------------------------------------------------------------------------------------------- - let store = MockStoreSuccess::new(std::iter::empty(), BTreeSet::new()); + let store = MockStoreSuccessBuilder::new().build(); // Block prover // --------------------------------------------------------------------------------------------- @@ -468,3 +481,57 @@ async fn test_compute_note_root_success() { // --------------------------------------------------------------------------------------------- assert_eq!(block_header.note_root(), notes_smt.root()); } + +// CHAIN MMR ROOT TESTS +// ================================================================================================= + +/// Test that the chain mmr root is as expected if the batches are empty +#[tokio::test] +async fn test_compute_chain_mmr_root_empty_mmr() { + let store = MockStoreSuccessBuilder::new().build(); + + let expected_block_header = build_expected_block_header(&store, &[]).await; + let actual_block_header = build_actual_block_header(&store, Vec::new()).await; + + assert_eq!(actual_block_header.chain_root(), expected_block_header.chain_root()); +} + +/// add header to non-empty MMR (1 peak), and check that we get the expected commitment +#[tokio::test] +async fn test_compute_chain_mmr_root_mmr_1_peak() { + let initial_chain_mmr = { + let mut mmr = Mmr::new(); + mmr.add(Digest::default()); + + mmr + }; + + let store = MockStoreSuccessBuilder::new().initial_chain_mmr(initial_chain_mmr).build(); + + let expected_block_header = build_expected_block_header(&store, &[]).await; + let actual_block_header = build_actual_block_header(&store, Vec::new()).await; + + assert_eq!(actual_block_header.chain_root(), expected_block_header.chain_root()); +} + +/// add header to an MMR with 17 peaks, and check that we get the expected commitment +#[tokio::test] +async fn test_compute_chain_mmr_root_mmr_17_peaks() { + let initial_chain_mmr = { + let mut mmr = Mmr::new(); + for _ in 0..(2_u32.pow(17) - 1) { + mmr.add(Digest::default()); + } + + assert_eq!(mmr.peaks(mmr.forest()).unwrap().peaks().len(), 17); + + mmr + }; + + let store = MockStoreSuccessBuilder::new().initial_chain_mmr(initial_chain_mmr).build(); + + let expected_block_header = build_expected_block_header(&store, &[]).await; + let actual_block_header = build_actual_block_header(&store, Vec::new()).await; + + assert_eq!(actual_block_header.chain_root(), expected_block_header.chain_root()); +} diff --git a/block-producer/src/block_builder/tests.rs b/block-producer/src/block_builder/tests.rs index c21860d94..ef528364c 100644 --- a/block-producer/src/block_builder/tests.rs +++ b/block-producer/src/block_builder/tests.rs @@ -1,5 +1,3 @@ -use std::collections::BTreeSet; - // block builder tests (higher level) // 1. `apply_block()` is called use super::*; @@ -8,7 +6,7 @@ use miden_air::Felt; use crate::{ batch_builder::TransactionBatch, - test_utils::{DummyProvenTxGenerator, MockStoreFailure, MockStoreSuccess}, + test_utils::{DummyProvenTxGenerator, MockStoreFailure, MockStoreSuccessBuilder}, }; /// Tests that `build_block()` succeeds when the transaction batches are not empty @@ -18,10 +16,11 @@ async fn test_apply_block_called_nonempty_batches() { let account_id = AccountId::new_unchecked(42u64.into()); let account_initial_hash: Digest = [Felt::from(1u64), Felt::from(1u64), Felt::from(1u64), Felt::from(1u64)].into(); - let store = Arc::new(MockStoreSuccess::new( - std::iter::once((account_id, account_initial_hash)), - BTreeSet::new(), - )); + let store = Arc::new( + MockStoreSuccessBuilder::new() + .initial_accounts(std::iter::once((account_id, account_initial_hash))) + .build(), + ); let block_builder = DefaultBlockBuilder::new(store.clone()); @@ -52,10 +51,11 @@ async fn test_apply_block_called_empty_batches() { let account_id = AccountId::new_unchecked(42u64.into()); let account_hash: Digest = [Felt::from(1u64), Felt::from(1u64), Felt::from(1u64), Felt::from(1u64)].into(); - let store = Arc::new(MockStoreSuccess::new( - std::iter::once((account_id, account_hash)), - BTreeSet::new(), - )); + let store = Arc::new( + MockStoreSuccessBuilder::new() + .initial_accounts(std::iter::once((account_id, account_hash))) + .build(), + ); let block_builder = DefaultBlockBuilder::new(store.clone()); diff --git a/block-producer/src/state_view/tests/apply_block.rs b/block-producer/src/state_view/tests/apply_block.rs index b39ee8180..f1e48052f 100644 --- a/block-producer/src/state_view/tests/apply_block.rs +++ b/block-producer/src/state_view/tests/apply_block.rs @@ -6,7 +6,7 @@ use std::iter; -use crate::test_utils::MockStoreSuccess; +use crate::test_utils::{block::MockBlockBuilder, MockStoreSuccessBuilder}; use super::*; @@ -16,10 +16,11 @@ async fn test_apply_block_ab1() { let tx_gen = DummyProvenTxGenerator::new(); let account: MockPrivateAccount<3> = MockPrivateAccount::from(0); - let store = Arc::new(MockStoreSuccess::new( - iter::once((account.id, account.states[0])), - BTreeSet::new(), - )); + let store = Arc::new( + MockStoreSuccessBuilder::new() + .initial_accounts(iter::once((account.id, account.states[0]))) + .build(), + ); let tx = tx_gen.dummy_proven_tx_with_params( account.id, @@ -35,9 +36,16 @@ async fn test_apply_block_ab1() { let verify_tx_res = state_view.verify_tx(tx.into()).await; assert!(verify_tx_res.is_ok()); - let block = Arc::new(get_dummy_block(vec![account], Vec::new())); + let block = MockBlockBuilder::new(&store) + .await + .account_updates( + std::iter::once(account) + .map(|mock_account| (mock_account.id, mock_account.states[1])) + .collect(), + ) + .build(); - let apply_block_res = state_view.apply_block(block).await; + let apply_block_res = state_view.apply_block(Arc::new(block)).await; assert!(apply_block_res.is_ok()); assert_eq!(*store.num_apply_block_called.read().await, 1); @@ -50,13 +58,16 @@ async fn test_apply_block_ab2() { let (txs, accounts): (Vec<_>, Vec<_>) = get_txs_and_accounts(&tx_gen, 3).unzip(); - let store = Arc::new(MockStoreSuccess::new( - accounts - .clone() - .into_iter() - .map(|mock_account| (mock_account.id, mock_account.states[0])), - BTreeSet::new(), - )); + let store = Arc::new( + MockStoreSuccessBuilder::new() + .initial_accounts( + accounts + .clone() + .into_iter() + .map(|mock_account| (mock_account.id, mock_account.states[0])), + ) + .build(), + ); let state_view = DefaulStateView::new(store.clone()); @@ -67,11 +78,19 @@ async fn test_apply_block_ab2() { } // All except the first account will go into the block. - let accounts_in_block = accounts.iter().skip(1).cloned().collect(); - - let block = Arc::new(get_dummy_block(accounts_in_block, Vec::new())); - - let apply_block_res = state_view.apply_block(block).await; + let accounts_in_block: Vec = accounts.iter().skip(1).cloned().collect(); + + let block = MockBlockBuilder::new(&store) + .await + .account_updates( + accounts_in_block + .into_iter() + .map(|mock_account| (mock_account.id, mock_account.states[1])) + .collect(), + ) + .build(); + + let apply_block_res = state_view.apply_block(Arc::new(block)).await; assert!(apply_block_res.is_ok()); let accounts_still_in_flight = state_view.accounts_in_flight.read().await; @@ -88,13 +107,16 @@ async fn test_apply_block_ab3() { let (txs, accounts): (Vec<_>, Vec<_>) = get_txs_and_accounts(&tx_gen, 3).unzip(); - let store = Arc::new(MockStoreSuccess::new( - accounts - .clone() - .into_iter() - .map(|mock_account| (mock_account.id, mock_account.states[0])), - BTreeSet::new(), - )); + let store = Arc::new( + MockStoreSuccessBuilder::new() + .initial_accounts( + accounts + .clone() + .into_iter() + .map(|mock_account| (mock_account.id, mock_account.states[0])), + ) + .build(), + ); let state_view = DefaulStateView::new(store.clone()); @@ -104,9 +126,18 @@ async fn test_apply_block_ab3() { assert!(verify_tx_res.is_ok()); } - let block = Arc::new(get_dummy_block(accounts.clone(), Vec::new())); - - let apply_block_res = state_view.apply_block(block).await; + let block = MockBlockBuilder::new(&store) + .await + .account_updates( + accounts + .clone() + .into_iter() + .map(|mock_account| (mock_account.id, mock_account.states[1])) + .collect(), + ) + .build(); + + let apply_block_res = state_view.apply_block(Arc::new(block)).await; assert!(apply_block_res.is_ok()); // Craft a new transaction which tries to consume the same note that was consumed in in the diff --git a/block-producer/src/state_view/tests/mod.rs b/block-producer/src/state_view/tests/mod.rs index 6ecda9fae..d795a0e2b 100644 --- a/block-producer/src/state_view/tests/mod.rs +++ b/block-producer/src/state_view/tests/mod.rs @@ -1,6 +1,6 @@ use super::*; -use miden_objects::{transaction::ConsumedNoteInfo, BlockHeader, Felt, Hasher}; +use miden_objects::{transaction::ConsumedNoteInfo, Hasher}; use crate::test_utils::{DummyProvenTxGenerator, MockPrivateAccount}; @@ -10,15 +10,23 @@ mod verify_tx; // HELPERS // ------------------------------------------------------------------------------------------------- -pub fn consumed_note_by_index(index: u8) -> ConsumedNoteInfo { - ConsumedNoteInfo::new(Hasher::hash(&[index]), Hasher::hash(&[index, index])) +pub fn consumed_note_by_index(index: u32) -> ConsumedNoteInfo { + ConsumedNoteInfo::new( + Hasher::hash(&index.to_be_bytes()), + Hasher::hash( + &[index.to_be_bytes(), index.to_be_bytes()] + .into_iter() + .flatten() + .collect::>(), + ), + ) } /// Returns `num` transactions, and the corresponding account they modify. /// The transactions each consume a single different note pub fn get_txs_and_accounts( tx_gen: &DummyProvenTxGenerator, - num: u8, + num: u32, ) -> impl Iterator + '_ { (0..num).map(|index| { let account = MockPrivateAccount::from(index); @@ -33,33 +41,3 @@ pub fn get_txs_and_accounts( (Arc::new(tx), account) }) } - -pub fn get_dummy_block( - updated_accounts: Vec, - new_nullifiers: Vec, -) -> Block { - let header = BlockHeader::new( - Digest::default(), - Felt::new(42), - Digest::default(), - Digest::default(), - Digest::default(), - Digest::default(), - Digest::default(), - Digest::default(), - Felt::new(0), - Felt::new(42), - ); - - let updated_accounts = updated_accounts - .into_iter() - .map(|mock_account| (mock_account.id, mock_account.states[1])) - .collect(); - - Block { - header, - updated_accounts, - created_notes: Vec::new(), - produced_nullifiers: new_nullifiers, - } -} diff --git a/block-producer/src/state_view/tests/verify_tx.rs b/block-producer/src/state_view/tests/verify_tx.rs index 0f1bafd2d..74a580173 100644 --- a/block-producer/src/state_view/tests/verify_tx.rs +++ b/block-producer/src/state_view/tests/verify_tx.rs @@ -14,7 +14,7 @@ use std::iter; use tokio::task::JoinSet; -use crate::test_utils::MockStoreSuccess; +use crate::test_utils::MockStoreSuccessBuilder; use super::*; @@ -26,12 +26,15 @@ async fn test_verify_tx_happy_path() { let (txs, accounts): (Vec, Vec) = get_txs_and_accounts(&tx_gen, 3).unzip(); - let store = Arc::new(MockStoreSuccess::new( - accounts - .into_iter() - .map(|mock_account| (mock_account.id, mock_account.states[0])), - BTreeSet::new(), - )); + let store = Arc::new( + MockStoreSuccessBuilder::new() + .initial_accounts( + accounts + .into_iter() + .map(|mock_account| (mock_account.id, mock_account.states[0])), + ) + .build(), + ); let state_view = DefaulStateView::new(store); @@ -50,12 +53,15 @@ async fn test_verify_tx_happy_path_concurrent() { let (txs, accounts): (Vec, Vec) = get_txs_and_accounts(&tx_gen, 3).unzip(); - let store = Arc::new(MockStoreSuccess::new( - accounts - .into_iter() - .map(|mock_account| (mock_account.id, mock_account.states[0])), - BTreeSet::new(), - )); + let store = Arc::new( + MockStoreSuccessBuilder::new() + .initial_accounts( + accounts + .into_iter() + .map(|mock_account| (mock_account.id, mock_account.states[0])), + ) + .build(), + ); let state_view = Arc::new(DefaulStateView::new(store)); @@ -78,10 +84,11 @@ async fn test_verify_tx_vt1() { let account = MockPrivateAccount::<3>::from(0); - let store = Arc::new(MockStoreSuccess::new( - iter::once((account.id, account.states[0])), - BTreeSet::new(), - )); + let store = Arc::new( + MockStoreSuccessBuilder::new() + .initial_accounts(iter::once((account.id, account.states[0]))) + .build(), + ); // The transaction's initial account hash uses `account.states[1]`, where the store expects // `account.states[0]` @@ -114,7 +121,7 @@ async fn test_verify_tx_vt2() { let account_not_in_store: MockPrivateAccount<3> = MockPrivateAccount::from(0); // Notice: account is not added to the store - let store = Arc::new(MockStoreSuccess::new(iter::empty(), BTreeSet::new())); + let store = Arc::new(MockStoreSuccessBuilder::new().build()); let tx = tx_gen.dummy_proven_tx_with_params( account_not_in_store.id, @@ -147,10 +154,12 @@ async fn test_verify_tx_vt3() { let consumed_note_in_store = consumed_note_by_index(0); // Notice: `consumed_note_in_store` is added to the store - let store = Arc::new(MockStoreSuccess::new( - iter::once((account.id, account.states[0])), - BTreeSet::from_iter(iter::once(consumed_note_in_store.nullifier())), - )); + let store = Arc::new( + MockStoreSuccessBuilder::new() + .initial_accounts(iter::once((account.id, account.states[0]))) + .initial_nullifiers(BTreeSet::from_iter(iter::once(consumed_note_in_store.nullifier()))) + .build(), + ); let tx = tx_gen.dummy_proven_tx_with_params( account.id, @@ -179,10 +188,11 @@ async fn test_verify_tx_vt4() { let account: MockPrivateAccount<3> = MockPrivateAccount::from(0); - let store = Arc::new(MockStoreSuccess::new( - iter::once((account.id, account.states[0])), - BTreeSet::new(), - )); + let store = Arc::new( + MockStoreSuccessBuilder::new() + .initial_accounts(iter::once((account.id, account.states[0]))) + .build(), + ); let tx1 = tx_gen.dummy_proven_tx_with_params( account.id, @@ -224,12 +234,15 @@ async fn test_verify_tx_vt5() { let consumed_note_in_both_txs = consumed_note_by_index(0); // Notice: `consumed_note_in_both_txs` is NOT in the store - let store = Arc::new(MockStoreSuccess::new( - vec![account_1, account_2] - .into_iter() - .map(|account| (account.id, account.states[0])), - BTreeSet::new(), - )); + let store = Arc::new( + MockStoreSuccessBuilder::new() + .initial_accounts( + vec![account_1, account_2] + .into_iter() + .map(|account| (account.id, account.states[0])), + ) + .build(), + ); let tx1 = tx_gen.dummy_proven_tx_with_params( account_1.id, diff --git a/block-producer/src/test_utils/account.rs b/block-producer/src/test_utils/account.rs index a54430683..d5355dffb 100644 --- a/block-producer/src/test_utils/account.rs +++ b/block-producer/src/test_utils/account.rs @@ -37,12 +37,11 @@ impl MockPrivateAccount { } } -impl From for MockPrivateAccount { +impl From for MockPrivateAccount { /// Each index gives rise to a different account ID - fn from(index: u8) -> Self { - let mut init_seed: [u8; 32] = [0; 32]; - init_seed[0] = index; + fn from(index: u32) -> Self { + let init_seed: Vec<_> = index.to_be_bytes().into_iter().chain([0u8; 28]).collect(); - Self::new(init_seed) + Self::new(init_seed.try_into().unwrap()) } } diff --git a/block-producer/src/test_utils/batch.rs b/block-producer/src/test_utils/batch.rs new file mode 100644 index 000000000..c66bf0181 --- /dev/null +++ b/block-producer/src/test_utils/batch.rs @@ -0,0 +1,33 @@ +use std::sync::Arc; + +use crate::{batch_builder::TransactionBatch, test_utils::MockProvenTxBuilder}; + +pub trait TransactionBatchConstructor { + /// Returns a `TransactionBatch` with `notes_per_tx.len()` transactions, where the i'th + /// transaction has `notes_per_tx[i]` notes created + fn from_notes_created(notes_per_tx: &[u64]) -> Self; + + /// Returns a `TransactionBatch` which contains `num_txs_in_batch` transactions + fn from_txs(num_txs_in_batch: u64) -> Self; +} + +impl TransactionBatchConstructor for TransactionBatch { + fn from_notes_created(notes_per_tx: &[u64]) -> Self { + let txs: Vec<_> = notes_per_tx + .iter() + .map(|&num_notes| MockProvenTxBuilder::new().num_notes_created(num_notes).build()) + .map(Arc::new) + .collect(); + + Self::new(txs).unwrap() + } + + fn from_txs(num_txs_in_batch: u64) -> Self { + let txs: Vec<_> = (0..num_txs_in_batch) + .map(|_| MockProvenTxBuilder::new().build()) + .map(Arc::new) + .collect(); + + Self::new(txs).unwrap() + } +} diff --git a/block-producer/src/test_utils/block.rs b/block-producer/src/test_utils/block.rs new file mode 100644 index 000000000..9d43260a9 --- /dev/null +++ b/block-producer/src/test_utils/block.rs @@ -0,0 +1,173 @@ +use std::sync::Arc; + +use miden_node_proto::domain::BlockInputs; +use miden_objects::{accounts::AccountId, crypto::merkle::Mmr, BlockHeader, Digest, ONE, ZERO}; +use miden_vm::crypto::SimpleSmt; + +use crate::{ + batch_builder::TransactionBatch, + block::Block, + block_builder::prover::{block_witness::BlockWitness, BlockProver}, + store::Store, +}; + +use super::MockStoreSuccess; + +/// Constructs the block we expect to be built given the store state, and a set of transaction +/// batches to be applied +pub async fn build_expected_block_header( + store: &MockStoreSuccess, + batches: &[TransactionBatch], +) -> BlockHeader { + let last_block_header = *store.last_block_header.read().await; + + // Compute new account root + let updated_accounts: Vec<(AccountId, Digest)> = + batches.iter().flat_map(|batch| batch.updated_accounts()).collect(); + let new_account_root = { + let mut store_accounts = store.accounts.read().await.clone(); + for (account_id, new_account_state) in updated_accounts.iter() { + store_accounts + .update_leaf(u64::from(*account_id), new_account_state.into()) + .unwrap(); + } + + store_accounts.root() + }; + + // Compute created notes root + // FIXME: compute the right root. Needs + // https://github.com/0xPolygonMiden/crypto/issues/220#issuecomment-1823911017 + let new_created_notes_root = Digest::default(); + + // Compute new chain MMR root + let new_chain_mmr_root = { + let mut store_chain_mmr = store.chain_mmr.read().await.clone(); + + store_chain_mmr.add(last_block_header.hash()); + + store_chain_mmr.peaks(store_chain_mmr.forest()).unwrap().hash_peaks() + }; + + // Build header + BlockHeader::new( + last_block_header.hash(), + last_block_header.block_num() + ONE, + new_chain_mmr_root, + new_account_root, + // FIXME: FILL IN CORRECT NULLIFIER ROOT + Digest::default(), + // FIXME: FILL IN CORRECT CREATED NOTES ROOT + new_created_notes_root, + Digest::default(), + Digest::default(), + ZERO, + ONE, + ) +} + +/// Builds the "actual" block header; i.e. the block header built using the Miden VM, used in the +/// node +pub async fn build_actual_block_header( + store: &MockStoreSuccess, + batches: Vec, +) -> BlockHeader { + let updated_accounts: Vec<(AccountId, Digest)> = + batches.iter().flat_map(|batch| batch.updated_accounts()).collect(); + let produced_nullifiers: Vec = + batches.iter().flat_map(|batch| batch.produced_nullifiers()).collect(); + + let block_inputs_from_store: BlockInputs = store + .get_block_inputs( + updated_accounts.iter().map(|(account_id, _)| account_id), + produced_nullifiers.iter(), + ) + .await + .unwrap(); + + let block_witness = + BlockWitness::new(block_inputs_from_store, batches.into_iter().map(Arc::new).collect()) + .unwrap(); + + BlockProver::new().prove(block_witness).unwrap() +} + +#[derive(Debug)] +pub struct MockBlockBuilder { + store_accounts: SimpleSmt, + store_chain_mmr: Mmr, + last_block_header: BlockHeader, + + updated_accounts: Option>, + created_notes: Option>, + produced_nullifiers: Option>, +} + +impl MockBlockBuilder { + pub async fn new(store: &MockStoreSuccess) -> Self { + Self { + store_accounts: store.accounts.read().await.clone(), + store_chain_mmr: store.chain_mmr.read().await.clone(), + last_block_header: *store.last_block_header.read().await, + + updated_accounts: None, + created_notes: None, + produced_nullifiers: None, + } + } + + pub fn account_updates( + mut self, + updated_accounts: Vec<(AccountId, Digest)>, + ) -> Self { + for (account_id, new_account_state) in updated_accounts.iter() { + self.store_accounts + .update_leaf(u64::from(*account_id), new_account_state.into()) + .unwrap(); + } + + self.updated_accounts = Some(updated_accounts); + + self + } + + pub fn created_notes( + mut self, + created_notes: Vec, + ) -> Self { + self.created_notes = Some(created_notes); + + self + } + + pub fn produced_nullifiers( + mut self, + produced_nullifiers: Vec, + ) -> Self { + self.produced_nullifiers = Some(produced_nullifiers); + + self + } + + pub fn build(self) -> Block { + let header = BlockHeader::new( + self.last_block_header.hash(), + self.last_block_header.block_num() + ONE, + self.store_chain_mmr.peaks(self.store_chain_mmr.forest()).unwrap().hash_peaks(), + self.store_accounts.root(), + Digest::default(), + Digest::default(), + Digest::default(), + Digest::default(), + ZERO, + ONE, + ); + + Block { + header, + updated_accounts: self.updated_accounts.unwrap_or_default(), + created_notes: self.created_notes.unwrap_or_default(), + produced_nullifiers: self.produced_nullifiers.unwrap_or_default(), + } + } +} diff --git a/block-producer/src/test_utils/mod.rs b/block-producer/src/test_utils/mod.rs index 9cdb64614..8ddf42a7c 100644 --- a/block-producer/src/test_utils/mod.rs +++ b/block-producer/src/test_utils/mod.rs @@ -5,10 +5,14 @@ use tokio::sync::RwLock; use miden_objects::{accounts::AccountId, Digest}; mod proven_tx; -pub use proven_tx::DummyProvenTxGenerator; +pub use proven_tx::{DummyProvenTxGenerator, MockProvenTxBuilder}; mod store; -pub use store::{MockStoreFailure, MockStoreSuccess}; +pub use store::{MockStoreFailure, MockStoreSuccess, MockStoreSuccessBuilder}; mod account; pub use account::MockPrivateAccount; + +pub mod block; + +pub mod batch; diff --git a/block-producer/src/test_utils/proven_tx.rs b/block-producer/src/test_utils/proven_tx.rs index b4c672745..41786fba2 100644 --- a/block-producer/src/test_utils/proven_tx.rs +++ b/block-producer/src/test_utils/proven_tx.rs @@ -1,13 +1,17 @@ //! FibSmall taken from the `fib_small` example in `winterfell` +use std::sync::{Arc, Mutex}; + use miden_air::{ExecutionProof, HashFunction}; +use miden_crypto::hash::rpo::Rpo256; use miden_mock::constants::ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_ON_CHAIN; use miden_objects::{ accounts::AccountId, - notes::NoteEnvelope, + notes::{NoteEnvelope, NoteMetadata}, transaction::{ConsumedNoteInfo, ProvenTransaction}, - Digest, + Digest, ONE, ZERO, }; +use once_cell::sync::Lazy; use winterfell::{ crypto::{hashers::Blake3_192, DefaultRandomCoin}, math::fields::f64::BaseElement, @@ -19,6 +23,78 @@ use winterfell::{ TransitionConstraintDegree, }; +use super::MockPrivateAccount; + +/// Keeps track how many accounts were created as a source of randomness +static NUM_ACCOUNTS_CREATED: Lazy>> = Lazy::new(|| Arc::new(Mutex::new(0))); + +/// Keeps track how many accounts were created as a source of randomness +static NUM_NOTES_CREATED: Lazy>> = Lazy::new(|| Arc::new(Mutex::new(0))); + +pub struct MockProvenTxBuilder { + mock_account: MockPrivateAccount, + notes_created: Option>, +} + +impl MockProvenTxBuilder { + pub fn new() -> Self { + let account_index: u32 = { + let mut locked_num_accounts_created = NUM_ACCOUNTS_CREATED.lock().unwrap(); + + let account_index = *locked_num_accounts_created; + + *locked_num_accounts_created += 1; + + account_index + }; + Self { + mock_account: account_index.into(), + notes_created: None, + } + } + + pub fn num_notes_created( + mut self, + num_notes_created_in_tx: u64, + ) -> Self { + let mut locked_num_notes_created = NUM_NOTES_CREATED.lock().unwrap(); + + let notes_created: Vec<_> = (*locked_num_notes_created + ..(*locked_num_notes_created + num_notes_created_in_tx)) + .map(|note_index| { + let note_hash = Rpo256::hash(¬e_index.to_be_bytes()); + + NoteEnvelope::new(note_hash, NoteMetadata::new(self.mock_account.id, ONE, ZERO)) + }) + .collect(); + + // update state + self.notes_created = Some(notes_created); + *locked_num_notes_created += num_notes_created_in_tx; + + self + } + + pub fn build(self) -> ProvenTransaction { + ProvenTransaction::new( + self.mock_account.id, + self.mock_account.states[0], + self.mock_account.states[1], + Vec::new(), + self.notes_created.unwrap_or_default(), + None, + Digest::default(), + ExecutionProof::new(StarkProof::new_dummy(), HashFunction::Blake3_192), + ) + } +} + +impl Default for MockProvenTxBuilder { + fn default() -> Self { + Self::new() + } +} + /// We need to generate a new `ProvenTransaction` every time because it doesn't /// derive `Clone`. Doing it this way allows us to compute the `StarkProof` /// once, and clone it for each new `ProvenTransaction`. diff --git a/block-producer/src/test_utils/store.rs b/block-producer/src/test_utils/store.rs index 31a6e9750..28bbcbd7f 100644 --- a/block-producer/src/test_utils/store.rs +++ b/block-producer/src/test_utils/store.rs @@ -1,7 +1,6 @@ use async_trait::async_trait; -use miden_air::{Felt, FieldElement}; use miden_node_proto::domain::{AccountInputRecord, BlockInputs}; -use miden_objects::{crypto::merkle::MmrPeaks, BlockHeader, EMPTY_WORD}; +use miden_objects::{crypto::merkle::Mmr, BlockHeader, EMPTY_WORD, ONE, ZERO}; use miden_vm::crypto::SimpleSmt; use crate::{ @@ -12,50 +11,105 @@ use crate::{ use super::*; -pub struct MockStoreSuccess { - /// Map account id -> account hash - accounts: Arc>, - - /// Stores the nullifiers of the notes that were consumed - consumed_nullifiers: Arc>>, +const ACCOUNT_SMT_DEPTH: u8 = 64; - /// The number of times `apply_block()` was called - pub num_apply_block_called: Arc>, +/// Builds a [`MockStoreSuccess`] +#[derive(Debug, Default)] +pub struct MockStoreSuccessBuilder { + accounts: Option, + consumed_nullifiers: Option>, + chain_mmr: Option, } -impl MockStoreSuccess { - /// Initializes the known accounts from provided mock accounts, where the account hash in the - /// store is the first state in `MockAccount.states`. - pub fn new( +impl MockStoreSuccessBuilder { + /// FIXME: the store always needs to be properly initialized with initial accounts + /// see https://github.com/0xPolygonMiden/miden-node/issues/79 + pub fn new() -> Self { + Self::default() + } + + pub fn initial_accounts( + mut self, accounts: impl Iterator, + ) -> Self { + let accounts_smt = { + let accounts = + accounts.into_iter().map(|(account_id, hash)| (account_id.into(), hash.into())); + + SimpleSmt::with_leaves(ACCOUNT_SMT_DEPTH, accounts).unwrap() + }; + + self.accounts = Some(accounts_smt); + + self + } + + pub fn initial_nullifiers( + mut self, consumed_nullifiers: BTreeSet, ) -> Self { - let accounts: Vec<_> = accounts - .into_iter() - .map(|(account_id, hash)| (account_id.into(), hash.into())) - .collect(); - let store_accounts = SimpleSmt::with_leaves(64, accounts).unwrap(); + self.consumed_nullifiers = Some(consumed_nullifiers); - Self { - accounts: Arc::new(RwLock::new(store_accounts)), - consumed_nullifiers: Arc::new(RwLock::new(consumed_nullifiers)), - num_apply_block_called: Arc::new(RwLock::new(0)), - } + self } - /// Update some accounts in the store - pub async fn update_accounts( - &self, - updated_accounts: impl Iterator, - ) { - let mut locked_accounts = self.accounts.write().await; - for (account_id, new_account_state) in updated_accounts { - locked_accounts - .update_leaf(account_id.into(), new_account_state.into()) - .unwrap(); + pub fn initial_chain_mmr( + mut self, + chain_mmr: Mmr, + ) -> Self { + self.chain_mmr = Some(chain_mmr); + + self + } + + pub fn build(self) -> MockStoreSuccess { + let accounts_smt = self.accounts.unwrap_or(SimpleSmt::new(ACCOUNT_SMT_DEPTH).unwrap()); + let chain_mmr = self.chain_mmr.unwrap_or_default(); + + let initial_block_header = BlockHeader::new( + Digest::default(), + ZERO, + chain_mmr.peaks(chain_mmr.forest()).unwrap().hash_peaks(), + accounts_smt.root(), + Digest::default(), + // FIXME: FILL IN CORRECT VALUE + Digest::default(), + Digest::default(), + Digest::default(), + ZERO, + ONE, + ); + + MockStoreSuccess { + accounts: Arc::new(RwLock::new(accounts_smt)), + consumed_nullifiers: Arc::new(RwLock::new( + self.consumed_nullifiers.unwrap_or_default(), + )), + chain_mmr: Arc::new(RwLock::new(chain_mmr)), + last_block_header: Arc::new(RwLock::new(initial_block_header)), + num_apply_block_called: Arc::new(RwLock::new(0)), } } +} + +pub struct MockStoreSuccess { + /// Map account id -> account hash + pub accounts: Arc>, + + /// Stores the nullifiers of the notes that were consumed + pub consumed_nullifiers: Arc>>, + + // Stores the chain MMR + pub chain_mmr: Arc>, + // Stores the header of the last applied block + pub last_block_header: Arc>, + + /// The number of times `apply_block()` was called + pub num_apply_block_called: Arc>, +} + +impl MockStoreSuccess { pub async fn account_root(&self) -> Digest { let locked_accounts = self.accounts.read().await; @@ -73,14 +127,28 @@ impl ApplyBlock for MockStoreSuccess { let mut locked_accounts = self.accounts.write().await; let mut locked_consumed_nullifiers = self.consumed_nullifiers.write().await; + // update accounts for &(account_id, account_hash) in block.updated_accounts.iter() { locked_accounts.update_leaf(account_id.into(), account_hash.into()).unwrap(); } + debug_assert_eq!(locked_accounts.root(), block.header.account_root()); + // update nullifiers let mut new_nullifiers: BTreeSet = block.produced_nullifiers.iter().cloned().collect(); locked_consumed_nullifiers.append(&mut new_nullifiers); + // update chain mmr with new block header hash + { + let mut chain_mmr = self.chain_mmr.write().await; + + chain_mmr.add(block.header.hash()); + } + + // update last block header + *self.last_block_header.write().await = block.header; + + // update num_apply_block_called *self.num_apply_block_called.write().await += 1; Ok(()) @@ -123,31 +191,11 @@ impl Store for MockStoreSuccess { updated_accounts: impl Iterator + Send, _produced_nullifiers: impl Iterator + Send, ) -> Result { - let block_header = { - let prev_hash: Digest = Digest::default(); - let chain_root: Digest = Digest::default(); - let acct_root: Digest = self.account_root().await; - let nullifier_root: Digest = Digest::default(); - let note_root: Digest = Digest::default(); - let batch_root: Digest = Digest::default(); - let proof_hash: Digest = Digest::default(); - - BlockHeader::new( - prev_hash, - Felt::ZERO, - chain_root, - acct_root, - nullifier_root, - note_root, - batch_root, - proof_hash, - Felt::ZERO, - Felt::ONE, - ) + let chain_peaks = { + let locked_chain_mmr = self.chain_mmr.read().await; + locked_chain_mmr.peaks(locked_chain_mmr.forest()).unwrap() }; - let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); - let account_states = { let locked_accounts = self.accounts.read().await; @@ -166,7 +214,7 @@ impl Store for MockStoreSuccess { }; Ok(BlockInputs { - block_header, + block_header: *self.last_block_header.read().await, chain_peaks, account_states, // TODO: return a proper nullifiers iterator