From 5e9b75b03b56c6196b0807f8f87353820ea95b48 Mon Sep 17 00:00:00 2001 From: Bobbin Threadbare Date: Thu, 23 Jan 2025 01:05:51 -0800 Subject: [PATCH 1/7] chore: increment crate versions to v0.8.0 and MSRV to 1.84 --- CHANGELOG.md | 6 ++++++ Cargo.lock | 10 +++++----- Cargo.toml | 8 ++++---- README.md | 4 ++-- bin/proving-service/Cargo.toml | 6 +++--- crates/miden-lib/Cargo.toml | 2 +- crates/miden-lib/src/transaction/memory.rs | 1 + crates/miden-objects/Cargo.toml | 2 +- crates/miden-tx/Cargo.toml | 2 +- docs/index.md | 2 +- rust-toolchain.toml | 2 +- 11 files changed, 26 insertions(+), 19 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 68e23fc97..522a380e6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # Changelog +## 0.8.0 (TBD) + +### Changes + +- [BREAKING] Incremented minimum supported Rust version to 1.84. + ## 0.7.0 (2025-01-22) ### Highlights diff --git a/Cargo.lock b/Cargo.lock index 8b348eaa4..f354ff921 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "addr2line" @@ -1880,7 +1880,7 @@ dependencies = [ [[package]] name = "miden-lib" -version = "0.7.0" +version = "0.8.0" dependencies = [ "miden-assembly", "miden-objects", @@ -1935,7 +1935,7 @@ dependencies = [ [[package]] name = "miden-objects" -version = "0.7.0" +version = "0.8.0" dependencies = [ "anyhow", "assert_matches", @@ -1988,7 +1988,7 @@ dependencies = [ [[package]] name = "miden-proving-service" -version = "0.7.0" +version = "0.8.0" dependencies = [ "async-trait", "axum", @@ -2058,7 +2058,7 @@ dependencies = [ [[package]] name = "miden-tx" -version = "0.7.0" +version = "0.8.0" dependencies = [ "anyhow", "assert_matches", diff --git a/Cargo.toml b/Cargo.toml index 834db8127..6faffb034 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,7 +11,7 @@ members = [ [workspace.package] edition = "2021" -rust-version = "1.82" +rust-version = "1.84" license = "MIT" authors = ["Miden contributors"] homepage = "https://polygon.technology/polygon-miden" @@ -36,11 +36,11 @@ lto = true assembly = { package = "miden-assembly", version = "0.12", default-features = false } assert_matches = { version = "1.5", default-features = false } miden-crypto = { version = "0.13", default-features = false } -miden-lib = { path = "crates/miden-lib", version = "0.7", default-features = false } -miden-objects = { path = "crates/miden-objects", version = "0.7", default-features = false } +miden-lib = { path = "crates/miden-lib", version = "0.8", default-features = false } +miden-objects = { path = "crates/miden-objects", version = "0.8", default-features = false } miden-prover = { version = "0.12", default-features = false } miden-stdlib = { version = "0.12", default-features = false } -miden-tx = { path = "crates/miden-tx", version = "0.7", default-features = false } +miden-tx = { path = "crates/miden-tx", version = "0.8", default-features = false } miden-verifier = { version = "0.12", default-features = false } rand = { version = "0.8", default-features = false } thiserror = { version = "2.0", default-features = false } diff --git a/README.md b/README.md index 2893c5a78..27b1a5575 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ [![LICENSE](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/0xPolygonMiden/miden-base/blob/main/LICENSE) [![test](https://github.com/0xPolygonMiden/miden-base/actions/workflows/test.yml/badge.svg)](https://github.com/0xPolygonMiden/miden-base/actions/workflows/test.yml) [![build](https://github.com/0xPolygonMiden/miden-base/actions/workflows/build.yml/badge.svg)](https://github.com/0xPolygonMiden/miden-base/actions/workflows/build.yml) -[![RUST_VERSION](https://img.shields.io/badge/rustc-1.82+-lightgray.svg)](https://www.rust-lang.org/tools/install) +[![RUST_VERSION](https://img.shields.io/badge/rustc-1.84+-lightgray.svg)](https://www.rust-lang.org/tools/install) [![GitHub Release](https://img.shields.io/github/release/0xPolygonMiden/miden-base)](https://github.com/0xPolygonMiden/miden-base/releases/) Description and core structures for the Miden Rollup protocol. @@ -23,7 +23,7 @@ If you want to join the technical discussion or learn more about the project, pl ## Status and features -Polygon Miden is currently on release v0.7. This is an early version of the protocol and its components. We expect to keep making changes (including breaking changes) to all components. +Polygon Miden is currently on release v0.8. This is an early version of the protocol and its components. We expect to keep making changes (including breaking changes) to all components. ### Feature highlights diff --git a/bin/proving-service/Cargo.toml b/bin/proving-service/Cargo.toml index f7fdec068..f9ad82844 100644 --- a/bin/proving-service/Cargo.toml +++ b/bin/proving-service/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-proving-service" -version = "0.7.0" +version = "0.8.0" description = "Miden rollup proving service" readme = "README.md" keywords = ["miden", "proving-service"] @@ -21,7 +21,7 @@ concurrent = ["miden-tx/concurrent"] [dependencies] async-trait = "0.1" -axum = {version = "0.7" } +axum = { version = "0.7" } bytes = "1.0" clap = { version = "4.5", features = ["derive"] } figment = { version = "0.10", features = ["toml", "env"] } @@ -45,7 +45,7 @@ tokio = { version = "1.38", features = ["full"] } tokio-stream = { version = "0.1", features = [ "net" ]} toml = { version = "0.8" } thiserror = { workspace = true } -tonic = { version = "0.12", default-features = false, features = ["prost", "codegen", "transport"] } +tonic = { version = "0.12", default-features = false, features = ["codegen", "prost", "transport"] } tonic-health = { version = "0.12" } tonic-web = { version = "0.12" } tracing = { version = "0.1" } diff --git a/crates/miden-lib/Cargo.toml b/crates/miden-lib/Cargo.toml index dd5461cec..bd77f8d18 100644 --- a/crates/miden-lib/Cargo.toml +++ b/crates/miden-lib/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-lib" -version = "0.7.0" +version = "0.8.0" description = "Standard library of the Miden rollup" readme = "README.md" categories = ["no-std"] diff --git a/crates/miden-lib/src/transaction/memory.rs b/crates/miden-lib/src/transaction/memory.rs index a92e76d2d..6e0ca413c 100644 --- a/crates/miden-lib/src/transaction/memory.rs +++ b/crates/miden-lib/src/transaction/memory.rs @@ -259,6 +259,7 @@ pub const NATIVE_ACCT_STORAGE_SLOTS_SECTION_PTR: MemoryAddress = /// The size of the memory segment allocated to each note. pub const NOTE_MEM_SIZE: MemoryAddress = 2048; +#[allow(clippy::empty_line_after_outer_attr)] #[rustfmt::skip] // INPUT NOTES DATA // ------------------------------------------------------------------------------------------------ diff --git a/crates/miden-objects/Cargo.toml b/crates/miden-objects/Cargo.toml index e7ed161a3..08587ef18 100644 --- a/crates/miden-objects/Cargo.toml +++ b/crates/miden-objects/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-objects" -version = "0.7.0" +version = "0.8.0" description = "Core components of the Miden rollup" readme = "README.md" categories = ["no-std"] diff --git a/crates/miden-tx/Cargo.toml b/crates/miden-tx/Cargo.toml index b56327019..606a41705 100644 --- a/crates/miden-tx/Cargo.toml +++ b/crates/miden-tx/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-tx" -version = "0.7.0" +version = "0.8.0" description = "Miden rollup transaction compiler, executor, and prover" readme = "README.md" categories = ["no-std"] diff --git a/docs/index.md b/docs/index.md index 68f0488bc..f7da3f122 100644 --- a/docs/index.md +++ b/docs/index.md @@ -16,7 +16,7 @@ If you want to join the technical discussion, please check out the following: ## Status and features -Polygon Miden is currently on release v0.7. This is an early version of the protocol and its components. +Polygon Miden is currently on release v0.8. This is an early version of the protocol and its components. > **Important** > We expect breaking changes on all components. diff --git a/rust-toolchain.toml b/rust-toolchain.toml index a1c01e041..252a5088e 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "1.82" +channel = "1.84" components = ["rustfmt", "rust-src", "clippy"] targets = ["wasm32-unknown-unknown"] profile = "minimal" From 89f54bcc6dfdee819ba10ae81a81cdf23102107a Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Thu, 23 Jan 2025 12:11:53 +0100 Subject: [PATCH 2/7] feat: Compile account components from files rather than constants (#1097) --- crates/miden-lib/build.rs | 64 ++++++++++++--------- crates/miden-lib/src/account/auth/mod.rs | 7 ++- crates/miden-lib/src/account/faucets/mod.rs | 7 ++- crates/miden-lib/src/account/wallets/mod.rs | 7 ++- 4 files changed, 54 insertions(+), 31 deletions(-) diff --git a/crates/miden-lib/build.rs b/crates/miden-lib/build.rs index 5f0b5de38..5d2c3eff9 100644 --- a/crates/miden-lib/build.rs +++ b/crates/miden-lib/build.rs @@ -75,7 +75,11 @@ fn main() -> Result<()> { )?; // compile account components - compile_account_components(&target_dir.join(ASM_ACCOUNT_COMPONENTS_DIR), assembler)?; + compile_account_components( + &source_dir.join(ASM_ACCOUNT_COMPONENTS_DIR), + &target_dir.join(ASM_ACCOUNT_COMPONENTS_DIR), + assembler, + )?; generate_kernel_error_constants(&source_dir)?; @@ -233,7 +237,7 @@ fn parse_proc_offsets(filename: impl AsRef) -> Result Result<()> { @@ -283,33 +287,37 @@ fn compile_note_scripts(source_dir: &Path, target_dir: &Path, assembler: Assembl Ok(()) } -// COMPILE DEFAULT ACCOUNT COMPONENTS +// COMPILE ACCOUNT COMPONENTS // ================================================================================================ -const BASIC_WALLET_CODE: &str = " - export.::miden::contracts::wallets::basic::receive_asset - export.::miden::contracts::wallets::basic::create_note - export.::miden::contracts::wallets::basic::move_asset_to_note -"; - -const RPO_FALCON_AUTH_CODE: &str = " - export.::miden::contracts::auth::basic::auth_tx_rpo_falcon512 -"; - -const BASIC_FUNGIBLE_FAUCET_CODE: &str = " - export.::miden::contracts::faucets::basic_fungible::distribute - export.::miden::contracts::faucets::basic_fungible::burn -"; - -/// Compiles the default account components into a MASL library and stores the complied files in -/// `target_dir`. -fn compile_account_components(target_dir: &Path, assembler: Assembler) -> Result<()> { - for (component_name, component_code) in [ - ("basic_wallet", BASIC_WALLET_CODE), - ("rpo_falcon_512", RPO_FALCON_AUTH_CODE), - ("basic_fungible_faucet", BASIC_FUNGIBLE_FAUCET_CODE), - ] { - let component_library = assembler.clone().assemble_library([component_code])?; +/// Compiles the account components in `source_dir` into MASL libraries and stores the compiled +/// files in `target_dir`. +fn compile_account_components( + source_dir: &Path, + target_dir: &Path, + assembler: Assembler, +) -> Result<()> { + if !target_dir.exists() { + fs::create_dir_all(target_dir).unwrap(); + } + + for masm_file_path in get_masm_files(source_dir).unwrap() { + let component_name = masm_file_path + .file_stem() + .expect("masm file should have a file stem") + .to_str() + .expect("file stem should be valid UTF-8") + .to_owned(); + + // Read the source code to string instead of passing it to assemble_library directly since + // that would attempt to interpret the path as a LibraryPath which would fail. + let component_source_code = fs::read_to_string(masm_file_path) + .expect("reading the component's MASM source code should succeed"); + + let component_library = assembler + .clone() + .assemble_library([component_source_code]) + .expect("library assembly should succeed"); let component_file_path = target_dir.join(component_name).with_extension(Library::LIBRARY_EXTENSION); component_library.write_to_file(component_file_path).into_diagnostic()?; diff --git a/crates/miden-lib/src/account/auth/mod.rs b/crates/miden-lib/src/account/auth/mod.rs index ce93fafed..8a25fe578 100644 --- a/crates/miden-lib/src/account/auth/mod.rs +++ b/crates/miden-lib/src/account/auth/mod.rs @@ -8,11 +8,16 @@ use crate::account::components::rpo_falcon_512_library; /// An [`AccountComponent`] implementing the RpoFalcon512 signature scheme for authentication of /// transactions. /// -/// Its exported procedures are: +/// It reexports the procedures from `miden::contracts::auth::basic`. When linking against this +/// component, the `miden` library (i.e. [`MidenLib`](crate::MidenLib)) must be available to the +/// assembler which is the case when using [`TransactionKernel::assembler()`][kasm]. The procedures +/// of this component are: /// - `auth_tx_rpo_falcon512`, which can be used to verify a signature provided via the advice stack /// to authenticate a transaction. /// /// This component supports all account types. +/// +/// [kasm]: crate::transaction::TransactionKernel::assembler pub struct RpoFalcon512 { public_key: PublicKey, } diff --git a/crates/miden-lib/src/account/faucets/mod.rs b/crates/miden-lib/src/account/faucets/mod.rs index 0238e2ef4..1c0388107 100644 --- a/crates/miden-lib/src/account/faucets/mod.rs +++ b/crates/miden-lib/src/account/faucets/mod.rs @@ -15,7 +15,10 @@ use crate::account::{auth::RpoFalcon512, components::basic_fungible_faucet_libra /// An [`AccountComponent`] implementing a basic fungible faucet. /// -/// Its exported procedures are: +/// It reexports the procedures from `miden::contracts::faucets::basic_fungible`. When linking +/// against this component, the `miden` library (i.e. [`MidenLib`](crate::MidenLib)) must be +/// available to the assembler which is the case when using +/// [`TransactionKernel::assembler()`][kasm]. The procedures of this component are: /// - `distribute`, which mints an assets and create a note for the provided recipient. /// - `burn`, which burns the provided asset. /// @@ -24,6 +27,8 @@ use crate::account::{auth::RpoFalcon512, components::basic_fungible_faucet_libra /// authentication. /// /// This component supports accounts of type [`AccountType::FungibleFaucet`]. +/// +/// [kasm]: crate::transaction::TransactionKernel::assembler pub struct BasicFungibleFaucet { symbol: TokenSymbol, decimals: u8, diff --git a/crates/miden-lib/src/account/wallets/mod.rs b/crates/miden-lib/src/account/wallets/mod.rs index 2d89e2530..9d8e0e33d 100644 --- a/crates/miden-lib/src/account/wallets/mod.rs +++ b/crates/miden-lib/src/account/wallets/mod.rs @@ -15,7 +15,10 @@ use crate::account::{auth::RpoFalcon512, components::basic_wallet_library}; /// An [`AccountComponent`] implementing a basic wallet. /// -/// Its exported procedures are: +/// It reexports the procedures from `miden::contracts::wallets::basic`. When linking against this +/// component, the `miden` library (i.e. [`MidenLib`](crate::MidenLib)) must be available to the +/// assembler which is the case when using [`TransactionKernel::assembler()`][kasm]. The procedures +/// of this component are: /// - `receive_asset`, which can be used to add an asset to the account. /// - `create_note`, which can be used to create a new note without any assets attached to it. /// - `move_asset_to_note`, which can be used to remove the specified asset from the account and add @@ -25,6 +28,8 @@ use crate::account::{auth::RpoFalcon512, components::basic_wallet_library}; /// providing authentication. /// /// This component supports all account types. +/// +/// [kasm]: crate::transaction::TransactionKernel::assembler pub struct BasicWallet; impl From for AccountComponent { From 9930c363cc7d793d6235e06a676e9d79b8bf09fb Mon Sep 17 00:00:00 2001 From: Santiago Pittella <87827390+SantiagoPittella@users.noreply.github.com> Date: Tue, 28 Jan 2025 02:27:01 -0300 Subject: [PATCH 3/7] chore: restructure proving service clients crate (#1102) --- CHANGELOG.md | 1 + crates/miden-proving-service-client/build.rs | 2 +- crates/miden-proving-service-client/src/lib.rs | 7 +------ .../src/{ => tx_prover}/generated/mod.rs | 0 .../src/{ => tx_prover}/generated/nostd/api.rs | 0 .../src/{ => tx_prover}/generated/nostd/mod.rs | 0 .../src/{ => tx_prover}/generated/std/api.rs | 0 .../src/{ => tx_prover}/generated/std/mod.rs | 0 .../src/{prover.rs => tx_prover/mod.rs} | 7 +++++-- 9 files changed, 8 insertions(+), 9 deletions(-) rename crates/miden-proving-service-client/src/{ => tx_prover}/generated/mod.rs (100%) rename crates/miden-proving-service-client/src/{ => tx_prover}/generated/nostd/api.rs (100%) rename crates/miden-proving-service-client/src/{ => tx_prover}/generated/nostd/mod.rs (100%) rename crates/miden-proving-service-client/src/{ => tx_prover}/generated/std/api.rs (100%) rename crates/miden-proving-service-client/src/{ => tx_prover}/generated/std/mod.rs (100%) rename crates/miden-proving-service-client/src/{prover.rs => tx_prover/mod.rs} (95%) diff --git a/CHANGELOG.md b/CHANGELOG.md index 318987fa8..ca180f898 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ ### Changes - [BREAKING] Incremented minimum supported Rust version to 1.84. +- [BREAKING] Moved `generated` module from `miden-proving-service-client` crate to `tx_prover::generated` hierarchy (#1102). ## 0.7.1 (2025-01-24) - `miden-objects` crate only diff --git a/crates/miden-proving-service-client/build.rs b/crates/miden-proving-service-client/build.rs index ad10d95b7..e5167ad51 100644 --- a/crates/miden-proving-service-client/build.rs +++ b/crates/miden-proving-service-client/build.rs @@ -49,7 +49,7 @@ fn copy_proto_files() -> miette::Result<()> { fn compile_tonic_client_proto() -> miette::Result<()> { let crate_root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR should be set")); - let dst_dir = crate_root.join("src").join("generated"); + let dst_dir = crate_root.join("src").join("tx_prover").join("generated"); // Remove `api.rs` if it exists. fs::remove_file(dst_dir.join("api.rs")).into_diagnostic().ok(); diff --git a/crates/miden-proving-service-client/src/lib.rs b/crates/miden-proving-service-client/src/lib.rs index a12fd0251..221c029a3 100644 --- a/crates/miden-proving-service-client/src/lib.rs +++ b/crates/miden-proving-service-client/src/lib.rs @@ -13,12 +13,7 @@ extern crate std; use thiserror::Error; #[cfg(feature = "tx-prover")] -pub mod generated; - -#[cfg(feature = "tx-prover")] -mod prover; -#[cfg(feature = "tx-prover")] -pub use prover::RemoteTransactionProver; +pub mod tx_prover; /// Protobuf definition for the Miden proving service pub const SERVICE_PROTO: &str = include_str!("../proto/api.proto"); diff --git a/crates/miden-proving-service-client/src/generated/mod.rs b/crates/miden-proving-service-client/src/tx_prover/generated/mod.rs similarity index 100% rename from crates/miden-proving-service-client/src/generated/mod.rs rename to crates/miden-proving-service-client/src/tx_prover/generated/mod.rs diff --git a/crates/miden-proving-service-client/src/generated/nostd/api.rs b/crates/miden-proving-service-client/src/tx_prover/generated/nostd/api.rs similarity index 100% rename from crates/miden-proving-service-client/src/generated/nostd/api.rs rename to crates/miden-proving-service-client/src/tx_prover/generated/nostd/api.rs diff --git a/crates/miden-proving-service-client/src/generated/nostd/mod.rs b/crates/miden-proving-service-client/src/tx_prover/generated/nostd/mod.rs similarity index 100% rename from crates/miden-proving-service-client/src/generated/nostd/mod.rs rename to crates/miden-proving-service-client/src/tx_prover/generated/nostd/mod.rs diff --git a/crates/miden-proving-service-client/src/generated/std/api.rs b/crates/miden-proving-service-client/src/tx_prover/generated/std/api.rs similarity index 100% rename from crates/miden-proving-service-client/src/generated/std/api.rs rename to crates/miden-proving-service-client/src/tx_prover/generated/std/api.rs diff --git a/crates/miden-proving-service-client/src/generated/std/mod.rs b/crates/miden-proving-service-client/src/tx_prover/generated/std/mod.rs similarity index 100% rename from crates/miden-proving-service-client/src/generated/std/mod.rs rename to crates/miden-proving-service-client/src/tx_prover/generated/std/mod.rs diff --git a/crates/miden-proving-service-client/src/prover.rs b/crates/miden-proving-service-client/src/tx_prover/mod.rs similarity index 95% rename from crates/miden-proving-service-client/src/prover.rs rename to crates/miden-proving-service-client/src/tx_prover/mod.rs index 58385cbda..b96bdb4ad 100644 --- a/crates/miden-proving-service-client/src/prover.rs +++ b/crates/miden-proving-service-client/src/tx_prover/mod.rs @@ -1,12 +1,15 @@ +pub mod generated; + use alloc::{ boxed::Box, string::{String, ToString}, }; +use generated::api_client::ApiClient; use miden_objects::transaction::{ProvenTransaction, TransactionWitness}; use miden_tx::{utils::sync::RwLock, TransactionProver, TransactionProverError}; -use crate::{generated::api_client::ApiClient, RemoteProverError}; +use crate::RemoteProverError; // REMOTE TRANSACTION PROVER // ================================================================================================ @@ -84,7 +87,7 @@ impl TransactionProver for RemoteTransactionProver { .ok_or_else(|| TransactionProverError::other("client should be connected"))? .clone(); - let request = tonic::Request::new(crate::generated::ProveTransactionRequest { + let request = tonic::Request::new(generated::ProveTransactionRequest { transaction_witness: tx_witness.to_bytes(), }); From beddc2a2fe180b0c84a806efed188800080d0938 Mon Sep 17 00:00:00 2001 From: Santiago Pittella <87827390+SantiagoPittella@users.noreply.github.com> Date: Thu, 30 Jan 2025 17:11:36 -0300 Subject: [PATCH 4/7] feat: update workers service in proxy (#1107) * feat: use different service for workers update fix: move changelog entry to right place * review: use http/2 for update workers endpoint * review: replace service with endpoint in the changelog * review: re-arrange ProxyConfig struct * review: update command documentation * review: rename to LoadBalancerUpdateService * review: add error message in the empty query params case * review: move response creation function to update worker file * fix: typo in LoadBalancerUpdateService * review: make function private * review: rename health check frequency --- CHANGELOG.md | 1 + bin/proving-service/src/commands/mod.rs | 3 + bin/proving-service/src/commands/proxy.rs | 13 +- .../src/commands/update_workers.rs | 13 +- bin/proving-service/src/proxy/health_check.rs | 86 ++++++++++ bin/proving-service/src/proxy/mod.rs | 144 +--------------- .../src/proxy/update_workers.rs | 155 ++++++++++++++++++ bin/proving-service/src/proxy/worker.rs | 3 +- bin/proving-service/src/utils.rs | 47 +----- 9 files changed, 279 insertions(+), 186 deletions(-) create mode 100644 bin/proving-service/src/proxy/health_check.rs create mode 100644 bin/proving-service/src/proxy/update_workers.rs diff --git a/CHANGELOG.md b/CHANGELOG.md index df4d16868..3ba1693d7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ - [BREAKING] Incremented minimum supported Rust version to 1.84. - [BREAKING] Moved `generated` module from `miden-proving-service-client` crate to `tx_prover::generated` hierarchy (#1102). +- Added an endpoint to the `miden-proving-service` to update the workers (#1107). ## 0.7.2 (2025-01-28) - `miden-objects` crate only diff --git a/bin/proving-service/src/commands/mod.rs b/bin/proving-service/src/commands/mod.rs index 42d6c11ba..db7bacff8 100644 --- a/bin/proving-service/src/commands/mod.rs +++ b/bin/proving-service/src/commands/mod.rs @@ -27,6 +27,8 @@ pub struct ProxyConfig { pub host: String, /// Port of the proxy. pub port: u16, + /// Worker update service port. + pub workers_update_port: u16, /// Maximum time in seconds to complete the entire request. pub timeout_secs: u64, /// Maximum time in seconds to establish a connection. @@ -61,6 +63,7 @@ impl Default for ProxyConfig { health_check_interval_secs: 1, prometheus_host: "127.0.0.1".into(), prometheus_port: 6192, + workers_update_port: 8083, } } } diff --git a/bin/proving-service/src/commands/proxy.rs b/bin/proving-service/src/commands/proxy.rs index 60d246148..f898bb66d 100644 --- a/bin/proving-service/src/commands/proxy.rs +++ b/bin/proving-service/src/commands/proxy.rs @@ -4,13 +4,14 @@ use pingora::{ lb::Backend, prelude::{background_service, Opt}, server::Server, + services::listening::Service, }; use pingora_proxy::http_proxy_service; use tracing::warn; use crate::{ error::TxProverServiceError, - proxy::{LoadBalancer, LoadBalancerState}, + proxy::{update_workers::LoadBalancerUpdateService, LoadBalancer, LoadBalancerState}, utils::MIDEN_PROVING_SERVICE, }; @@ -58,8 +59,17 @@ impl StartProxy { let worker_lb = LoadBalancerState::new(workers, &proxy_config).await?; let health_check_service = background_service("health_check", worker_lb); + let worker_lb = health_check_service.task(); + let updater_service = LoadBalancerUpdateService::new(worker_lb.clone()); + + let mut update_workers_service = + Service::new("update_workers".to_string(), updater_service); + update_workers_service.add_tcp( + format!("{}:{}", proxy_config.host, proxy_config.workers_update_port).as_str(), + ); + // Set up the load balancer let mut lb = http_proxy_service(&server.configuration, LoadBalancer(worker_lb)); @@ -84,6 +94,7 @@ impl StartProxy { server.add_service(prometheus_service_http); server.add_service(health_check_service); + server.add_service(update_workers_service); server.add_service(lb); tokio::task::spawn_blocking(|| server.run_forever()) .await diff --git a/bin/proving-service/src/commands/update_workers.rs b/bin/proving-service/src/commands/update_workers.rs index 6bde5a532..9a0e0762e 100644 --- a/bin/proving-service/src/commands/update_workers.rs +++ b/bin/proving-service/src/commands/update_workers.rs @@ -40,19 +40,17 @@ pub struct UpdateWorkers { } impl UpdateWorkers { - /// Makes a requests to the proxy to update the workers. + /// Makes a requests to the update workers endpoint to update the workers. /// /// It works by sending a GET request to the proxy with the query parameters. The query /// parameters are serialized from the struct fields. /// - /// This method will work only if the proxy is running and the user is in the same computer as - /// the proxy, since the proxy checks for the source IP address and checks that the sender is - /// localhost. + /// It will use the same host as the proxy and the workers update port from the configuration + /// file. /// /// The request will return the new number of workers in the X-Worker-Count header. /// /// # Errors - /// - If a tokio runtime cannot be created. /// - If the query parameters cannot be serialized. /// - If the request fails. /// - If the status code is not successful. @@ -68,7 +66,10 @@ impl UpdateWorkers { let proxy_config = ProxyConfig::load_config_from_file()?; // Create the full URL - let url = format!("http://{}:{}?{}", proxy_config.host, proxy_config.port, query_params); + let url = format!( + "http://{}:{}?{}", + proxy_config.host, proxy_config.workers_update_port, query_params + ); // Create an HTTP/2 client let client = Client::builder() diff --git a/bin/proving-service/src/proxy/health_check.rs b/bin/proving-service/src/proxy/health_check.rs new file mode 100644 index 000000000..06946472a --- /dev/null +++ b/bin/proving-service/src/proxy/health_check.rs @@ -0,0 +1,86 @@ +use std::time::Duration; + +use axum::async_trait; +use pingora::{prelude::sleep, server::ShutdownWatch, services::background::BackgroundService}; +use tonic::transport::Channel; +use tonic_health::pb::health_client::HealthClient; +use tracing::debug_span; + +use super::{ + metrics::{WORKER_COUNT, WORKER_UNHEALTHY}, + LoadBalancerState, +}; +use crate::error::TxProverServiceError; + +/// Implement the BackgroundService trait for the LoadBalancer +/// +/// A [BackgroundService] can be run as part of a Pingora application to add supporting logic that +/// exists outside of the request/response lifecycle. +/// +/// We use this implementation to periodically check the health of the workers and update the list +/// of available workers. +#[async_trait] +impl BackgroundService for LoadBalancerState { + /// Starts the health check background service. + /// + /// This function is called when the Pingora server tries to start all the services. The + /// background service can return at anytime or wait for the `shutdown` signal. + /// + /// The health check background service will periodically check the health of the workers + /// using the gRPC health check protocol. If a worker is not healthy, it will be removed from + /// the list of available workers. + /// + /// # Errors + /// - If the worker has an invalid URI. + async fn start(&self, _shutdown: ShutdownWatch) { + Box::pin(async move { + loop { + // Create a new spawn to perform the health check + let span = debug_span!("proxy:health_check"); + let _guard = span.enter(); + + let mut workers = self.workers.write().await; + let initial_workers_len = workers.len(); + + // Perform health checks on workers and retain healthy ones + let healthy_workers = self.check_workers_health(workers.iter_mut()).await; + + // Update the worker list with healthy workers + *workers = healthy_workers; + + // Update the worker count and worker unhealhy count metrics + WORKER_COUNT.set(workers.len() as i64); + let unhealthy_workers = initial_workers_len - workers.len(); + WORKER_UNHEALTHY.inc_by(unhealthy_workers as u64); + + // Sleep for the defined interval before the next health check + sleep(self.health_check_interval).await; + } + }) + .await; + } +} + +// HELPERS +// ================================================================================================ + +/// Create a gRPC [HealthClient] for the given worker address. +/// +/// # Errors +/// - [TxProverServiceError::InvalidURI] if the worker address is invalid. +/// - [TxProverServiceError::ConnectionFailed] if the connection to the worker fails. +pub async fn create_health_check_client( + address: String, + connection_timeout: Duration, + total_timeout: Duration, +) -> Result, TxProverServiceError> { + let channel = Channel::from_shared(format!("http://{}", address)) + .map_err(|err| TxProverServiceError::InvalidURI(err, address.clone()))? + .connect_timeout(connection_timeout) + .timeout(total_timeout) + .connect() + .await + .map_err(|err| TxProverServiceError::ConnectionFailed(err, address))?; + + Ok(HealthClient::new(channel)) +} diff --git a/bin/proving-service/src/proxy/mod.rs b/bin/proving-service/src/proxy/mod.rs index 7e770322d..aefde3eed 100644 --- a/bin/proving-service/src/proxy/mod.rs +++ b/bin/proving-service/src/proxy/mod.rs @@ -1,7 +1,5 @@ use std::{ collections::VecDeque, - future::Future, - pin::Pin, sync::{Arc, LazyLock}, time::{Duration, Instant}, }; @@ -11,22 +9,20 @@ use bytes::Bytes; use metrics::{ QUEUE_LATENCY, QUEUE_SIZE, RATE_LIMITED_REQUESTS, RATE_LIMIT_VIOLATIONS, REQUEST_COUNT, REQUEST_FAILURE_COUNT, REQUEST_LATENCY, REQUEST_RETRIES, WORKER_BUSY, WORKER_COUNT, - WORKER_REQUEST_COUNT, WORKER_UNHEALTHY, + WORKER_REQUEST_COUNT, }; use pingora::{ http::ResponseHeader, lb::Backend, prelude::*, protocols::Digest, - server::ShutdownWatch, - services::background::BackgroundService, upstreams::peer::{Peer, ALPN}, }; use pingora_core::{upstreams::peer::HttpPeer, Result}; use pingora_limits::rate::Rate; use pingora_proxy::{ProxyHttp, Session}; -use tokio::{sync::RwLock, time::sleep}; -use tracing::{debug_span, error, info, info_span, warn, Span}; +use tokio::sync::RwLock; +use tracing::{error, info, info_span, warn, Span}; use uuid::Uuid; use worker::Worker; @@ -38,16 +34,15 @@ use crate::{ error::TxProverServiceError, utils::{ create_queue_full_response, create_response_with_error_message, - create_too_many_requests_response, create_workers_updated_response, MIDEN_PROVING_SERVICE, + create_too_many_requests_response, MIDEN_PROVING_SERVICE, }, }; +mod health_check; pub mod metrics; +pub(crate) mod update_workers; mod worker; -/// Localhost address -const LOCALHOST_ADDR: &str = "127.0.0.1"; - // LOAD BALANCER STATE // ================================================================================================ @@ -61,7 +56,7 @@ pub struct LoadBalancerState { max_retries_per_request: usize, max_req_per_sec: isize, available_workers_polling_time: Duration, - health_check_frequency: Duration, + health_check_interval: Duration, } impl LoadBalancerState { @@ -99,7 +94,7 @@ impl LoadBalancerState { available_workers_polling_time: Duration::from_millis( config.available_workers_polling_time_ms, ), - health_check_frequency: Duration::from_secs(config.health_check_interval_secs), + health_check_interval: Duration::from_secs(config.health_check_interval_secs), }) } @@ -191,66 +186,6 @@ impl LoadBalancerState { self.workers.read().await.iter().filter(|w| !w.is_available()).count() } - /// Handles the update workers request. - /// - /// # Behavior - /// - Reads the HTTP request from the session. - /// - If query parameters are present, attempts to parse them as an `UpdateWorkers` object. - /// - If the parsing fails, returns an error response. - /// - If successful, updates the list of workers by calling `update_workers`. - /// - If the update is successful, returns the count of available workers. - /// - /// # Errors - /// - If the HTTP request cannot be read. - /// - If the query parameters cannot be parsed. - /// - If the workers cannot be updated. - /// - If the response cannot be created. - pub async fn handle_update_workers_request( - &self, - session: &mut Session, - ) -> Option> { - let http_session = session.as_downstream_mut(); - - // Attempt to read the HTTP request - if let Err(err) = http_session.read_request().await { - let error_message = format!("Failed to read request: {}", err); - error!("{}", error_message); - return Some(create_response_with_error_message(session, error_message).await); - } - - // Extract and parse query parameters, if there are not any, return early to continue - // processing the request as a regular proving request. - let query_params = match http_session.req_header().as_ref().uri.query() { - Some(params) => params, - None => { - return None; - }, - }; - - // Parse the query parameters - let update_workers: Result = serde_qs::from_str(query_params); - let update_workers = match update_workers { - Ok(workers) => workers, - Err(err) => { - let error_message = format!("Failed to parse query parameters: {}", err); - error!("{}", error_message); - return Some(create_response_with_error_message(session, error_message).await); - }, - }; - - // Update workers and handle potential errors - if let Err(err) = self.update_workers(update_workers).await { - let error_message = format!("Failed to update workers: {}", err); - error!("{}", error_message); - return Some(create_response_with_error_message(session, error_message).await); - } - - // Successfully updated workers - info!("Workers updated successfully"); - let workers_count = self.num_workers().await; - Some(create_workers_updated_response(session, workers_count).await) - } - /// Check the health of the workers and returns a list of healthy workers. /// /// Performs a health check on each worker using the gRPC health check protocol. If a worker @@ -425,7 +360,7 @@ impl ProxyHttp for LoadBalancer { Some(addr) => addr.to_string(), None => { return create_response_with_error_message( - session, + session.as_downstream_mut(), "No socket address".to_string(), ) .await; @@ -434,13 +369,6 @@ impl ProxyHttp for LoadBalancer { info!("Client address: {:?}", client_addr); - // Special handling for localhost - if client_addr.contains(LOCALHOST_ADDR) { - if let Some(response) = self.0.handle_update_workers_request(session).await { - return response; - } - } - // Increment the request count REQUEST_COUNT.inc(); @@ -746,57 +674,3 @@ impl ProxyHttp for ProxyHttpDefaultImpl { unimplemented!("This is a dummy implementation, should not be called") } } - -/// Implement the BackgroundService trait for the LoadBalancer -/// -/// A [BackgroundService] can be run as part of a Pingora application to add supporting logic that -/// exists outside of the request/response lifecycle. -/// -/// We use this implementation to periodically check the health of the workers and update the list -/// of available workers. -impl BackgroundService for LoadBalancerState { - /// Starts the health check background service. - /// - /// This function is called when the Pingora server tries to start all the services. The - /// background service can return at anytime or wait for the `shutdown` signal. - /// - /// The health check background service will periodically check the health of the workers - /// using the gRPC health check protocol. If a worker is not healthy, it will be removed from - /// the list of available workers. - /// - /// # Errors - /// - If the worker has an invalid URI. - fn start<'life0, 'async_trait>( - &'life0 self, - _shutdown: ShutdownWatch, - ) -> Pin + ::core::marker::Send + 'async_trait>> - where - 'life0: 'async_trait, - Self: 'async_trait, - { - Box::pin(async move { - loop { - // Create a new spawn to perform the health check - let span = debug_span!("proxy:health_check"); - let _guard = span.enter(); - - let mut workers = self.workers.write().await; - let initial_workers_len = workers.len(); - - // Perform health checks on workers and retain healthy ones - let healthy_workers = self.check_workers_health(workers.iter_mut()).await; - - // Update the worker list with healthy workers - *workers = healthy_workers; - - // Update the worker count and worker unhealhy count metrics - WORKER_COUNT.set(workers.len() as i64); - let unhealthy_workers = initial_workers_len - workers.len(); - WORKER_UNHEALTHY.inc_by(unhealthy_workers as u64); - - // Sleep for the defined interval before the next health check - sleep(self.health_check_frequency).await; - } - }) - } -} diff --git a/bin/proving-service/src/proxy/update_workers.rs b/bin/proving-service/src/proxy/update_workers.rs new file mode 100644 index 000000000..cd19ecb99 --- /dev/null +++ b/bin/proving-service/src/proxy/update_workers.rs @@ -0,0 +1,155 @@ +use core::fmt; +use std::sync::Arc; + +use axum::async_trait; +use pingora::{ + apps::{HttpServerApp, HttpServerOptions}, + http::ResponseHeader, + protocols::{http::ServerSession, Stream}, + server::ShutdownWatch, +}; +use tracing::{error, info}; + +use super::LoadBalancerState; +use crate::{ + commands::update_workers::UpdateWorkers, + utils::{create_response_with_error_message, MIDEN_PROVING_SERVICE}, +}; + +/// The Load Balancer Updater Service. +/// +/// This service is responsible for updating the list of workers in the load balancer. +pub(crate) struct LoadBalancerUpdateService { + lb_state: Arc, + server_opts: HttpServerOptions, +} + +/// Manually implement Debug for LoadBalancerUpdateService. +/// [HttpServerOptions] does not implement Debug, so we cannot derive Debug for +/// [LoadBalancerUpdateService], which is needed for the tracing instrumentation. +impl fmt::Debug for LoadBalancerUpdateService { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("LBUpdaterService").field("lb_state", &self.lb_state).finish() + } +} + +impl LoadBalancerUpdateService { + pub(crate) fn new(lb_state: Arc) -> Self { + let mut server_opts = HttpServerOptions::default(); + server_opts.h2c = true; + + Self { lb_state, server_opts } + } +} + +#[async_trait] +impl HttpServerApp for LoadBalancerUpdateService { + /// Handles the update workers request. + /// + /// # Behavior + /// - Reads the HTTP request from the session. + /// - If query parameters are present, attempts to parse them as an `UpdateWorkers` object. + /// - If the parsing fails, returns an error response. + /// - If successful, updates the list of workers by calling `update_workers`. + /// - If the update is successful, returns the count of available workers. + /// + /// # Errors + /// - If the HTTP request cannot be read. + /// - If the query parameters cannot be parsed. + /// - If the workers cannot be updated. + /// - If the response cannot be created. + #[tracing::instrument(target = MIDEN_PROVING_SERVICE, name = "lb_updater_service:process_new_http", skip(http))] + async fn process_new_http( + self: &Arc, + mut http: ServerSession, + _shutdown: &ShutdownWatch, + ) -> Option { + match http.read_request().await { + Ok(res) => { + if !res { + error!("Failed to read request header"); + create_response_with_error_message( + &mut http, + "Failed to read request header".to_string(), + ) + .await + .ok(); + return None; + } + }, + Err(e) => { + error!("HTTP server fails to read from downstream: {e}"); + create_response_with_error_message( + &mut http, + format!("HTTP server fails to read from downstream: {e}"), + ) + .await + .ok(); + return None; + }, + } + + info!("Successfully get a new request to update workers"); + + // Extract and parse query parameters, if there are not any, return early. + let query_params = match http.req_header().as_ref().uri.query() { + Some(params) => params, + None => { + let error_message = "No query parameters provided".to_string(); + error!("{}", error_message); + create_response_with_error_message(&mut http, error_message).await.ok(); + return None; + }, + }; + + let update_workers: Result = serde_qs::from_str(query_params); + let update_workers = match update_workers { + Ok(workers) => workers, + Err(err) => { + let error_message = format!("Failed to parse query parameters: {}", err); + error!("{}", error_message); + create_response_with_error_message(&mut http, error_message).await.ok(); + return None; + }, + }; + + // Update workers and handle potential errors + if let Err(err) = self.lb_state.update_workers(update_workers).await { + let error_message = format!("Failed to update workers: {}", err); + error!("{}", error_message); + create_response_with_error_message(&mut http, error_message).await.ok(); + return None; + } + + create_workers_updated_response(&mut http, self.lb_state.num_workers().await) + .await + .ok(); + + info!("Successfully updated workers"); + + None + } + + /// Provide HTTP server options used to override default behavior. This function will be called + /// every time a new connection is processed. + fn server_options(&self) -> Option<&HttpServerOptions> { + Some(&self.server_opts) + } +} + +// HELPERS +// ================================================================================================ + +/// Create a 200 response for updated workers +/// +/// It will set the X-Worker-Count header to the number of workers. +async fn create_workers_updated_response( + session: &mut ServerSession, + workers: usize, +) -> pingora_core::Result { + let mut header = ResponseHeader::build(200, None)?; + header.insert_header("X-Worker-Count", workers.to_string())?; + session.set_keepalive(None); + session.write_response_header(Box::new(header)).await?; + Ok(true) +} diff --git a/bin/proving-service/src/proxy/worker.rs b/bin/proving-service/src/proxy/worker.rs index 0d8cf3e4b..fed2746ce 100644 --- a/bin/proving-service/src/proxy/worker.rs +++ b/bin/proving-service/src/proxy/worker.rs @@ -7,7 +7,8 @@ use tonic_health::pb::{ }; use tracing::error; -use crate::{error::TxProverServiceError, utils::create_health_check_client}; +use super::health_check::create_health_check_client; +use crate::error::TxProverServiceError; // WORKER // ================================================================================================ diff --git a/bin/proving-service/src/utils.rs b/bin/proving-service/src/utils.rs index 915fbcc84..1c3b22ada 100644 --- a/bin/proving-service/src/utils.rs +++ b/bin/proving-service/src/utils.rs @@ -1,5 +1,3 @@ -use std::time::Duration; - use opentelemetry::{trace::TracerProvider as _, KeyValue}; use opentelemetry_sdk::{ runtime, @@ -10,13 +8,11 @@ use opentelemetry_semantic_conventions::{ resource::{SERVICE_NAME, SERVICE_VERSION}, SCHEMA_URL, }; -use pingora::{http::ResponseHeader, Error, ErrorType}; +use pingora::{http::ResponseHeader, protocols::http::ServerSession, Error, ErrorType}; use pingora_proxy::Session; -use tonic::transport::Channel; -use tonic_health::pb::health_client::HealthClient; use tracing_subscriber::{layer::SubscriberExt, Registry}; -use crate::{error::TxProverServiceError, proxy::metrics::QUEUE_DROP_COUNT}; +use crate::proxy::metrics::QUEUE_DROP_COUNT; pub const MIDEN_PROVING_SERVICE: &str = "miden-proving-service"; @@ -137,51 +133,16 @@ pub async fn create_too_many_requests_response( Ok(true) } -/// Create a 200 response for updated workers -/// -/// It will set the X-Worker-Count header to the number of workers. -pub async fn create_workers_updated_response( - session: &mut Session, - workers: usize, -) -> pingora_core::Result { - let mut header = ResponseHeader::build(200, None)?; - header.insert_header("X-Worker-Count", workers.to_string())?; - session.set_keepalive(None); - session.write_response_header(Box::new(header), true).await?; - Ok(true) -} - /// Create a 400 response with an error message /// /// It will set the X-Error-Message header to the error message. pub async fn create_response_with_error_message( - session: &mut Session, + session: &mut ServerSession, error_msg: String, ) -> pingora_core::Result { let mut header = ResponseHeader::build(400, None)?; header.insert_header("X-Error-Message", error_msg)?; session.set_keepalive(None); - session.write_response_header(Box::new(header), true).await?; + session.write_response_header(Box::new(header)).await?; Ok(true) } - -/// Create a gRPC [HealthClient] for the given worker address. -/// -/// # Errors -/// - [TxProverServiceError::InvalidURI] if the worker address is invalid. -/// - [TxProverServiceError::ConnectionFailed] if the connection to the worker fails. -pub async fn create_health_check_client( - address: String, - connection_timeout: Duration, - total_timeout: Duration, -) -> Result, TxProverServiceError> { - let channel = Channel::from_shared(format!("http://{}", address)) - .map_err(|err| TxProverServiceError::InvalidURI(err, address.clone()))? - .connect_timeout(connection_timeout) - .timeout(total_timeout) - .connect() - .await - .map_err(|err| TxProverServiceError::ConnectionFailed(err, address))?; - - Ok(HealthClient::new(channel)) -} From 74edeace8e8bb973ad25daee664f26ea5ad93c65 Mon Sep 17 00:00:00 2001 From: Santiago Pittella <87827390+SantiagoPittella@users.noreply.github.com> Date: Thu, 30 Jan 2025 18:13:45 -0300 Subject: [PATCH 5/7] chore: rename api.proto file to tx_prover.proto (#1110) --- CHANGELOG.md | 1 + bin/proving-service/build.rs | 13 +++++++------ .../proto/{api.proto => tx_prover.proto} | 2 +- bin/proving-service/src/generated/mod.rs | 4 ++-- .../src/generated/{api.rs => tx_prover.rs} | 11 +++++++---- crates/miden-proving-service-client/build.rs | 17 +++++++++-------- .../proto/tx_prover.proto | 2 +- crates/miden-proving-service-client/src/lib.rs | 2 +- .../src/tx_prover/generated/mod.rs | 4 ++-- .../src/tx_prover/generated/nostd/mod.rs | 2 +- .../generated/nostd/{api.rs => tx_prover.rs} | 7 +++++-- .../src/tx_prover/generated/std/mod.rs | 2 +- .../generated/std/{api.rs => tx_prover.rs} | 7 +++++-- .../proto/api.proto => proto/tx_prover.proto | 2 +- 14 files changed, 44 insertions(+), 32 deletions(-) rename bin/proving-service/proto/{api.proto => tx_prover.proto} (94%) rename bin/proving-service/src/generated/{api.rs => tx_prover.rs} (97%) rename proto/api.proto => crates/miden-proving-service-client/proto/tx_prover.proto (94%) rename crates/miden-proving-service-client/src/tx_prover/generated/nostd/{api.rs => tx_prover.rs} (94%) rename crates/miden-proving-service-client/src/tx_prover/generated/std/{api.rs => tx_prover.rs} (94%) rename crates/miden-proving-service-client/proto/api.proto => proto/tx_prover.proto (94%) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3ba1693d7..1d12deca7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ - [BREAKING] Incremented minimum supported Rust version to 1.84. - [BREAKING] Moved `generated` module from `miden-proving-service-client` crate to `tx_prover::generated` hierarchy (#1102). - Added an endpoint to the `miden-proving-service` to update the workers (#1107). +- Renamed the protobuf file of the transaction prover to `tx_prover.proto` (#1110). ## 0.7.2 (2025-01-28) - `miden-objects` crate only diff --git a/bin/proving-service/build.rs b/bin/proving-service/build.rs index 264d3dd2d..536ac1a1f 100644 --- a/bin/proving-service/build.rs +++ b/bin/proving-service/build.rs @@ -33,10 +33,11 @@ fn main() -> miette::Result<()> { // HELPER FUNCTIONS // ================================================================================================ -/// Copies all api.proto file from the root proto directory to the proto directory of this crate. +/// Copies the tx_prover.proto file from the root proto directory to the proto directory of this +/// crate. fn copy_proto_files() -> miette::Result<()> { - let src_file = format!("{REPO_PROTO_DIR}/api.proto"); - let dest_file = format!("{CRATE_PROTO_DIR}/api.proto"); + let src_file = format!("{REPO_PROTO_DIR}/tx_prover.proto"); + let dest_file = format!("{CRATE_PROTO_DIR}/tx_prover.proto"); fs::remove_dir_all(CRATE_PROTO_DIR).into_diagnostic()?; fs::create_dir_all(CRATE_PROTO_DIR).into_diagnostic()?; @@ -50,14 +51,14 @@ fn compile_tonic_server_proto() -> miette::Result<()> { PathBuf::from(env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR should be set")); let dst_dir = crate_root.join("src").join("generated"); - // Remove `api.rs` if it exists. - fs::remove_file(dst_dir.join("api.rs")).into_diagnostic().ok(); + // Remove `tx_prover.rs` if it exists. + fs::remove_file(dst_dir.join("tx_prover.rs")).into_diagnostic().ok(); let out_dir = env::var("OUT_DIR").into_diagnostic()?; let file_descriptor_path = PathBuf::from(out_dir).join("file_descriptor_set.bin"); let proto_dir: PathBuf = CRATE_PROTO_DIR.into(); - let protos = &[proto_dir.join("api.proto")]; + let protos = &[proto_dir.join("tx_prover.proto")]; let includes = &[proto_dir]; let file_descriptors = protox::compile(protos, includes)?; diff --git a/bin/proving-service/proto/api.proto b/bin/proving-service/proto/tx_prover.proto similarity index 94% rename from bin/proving-service/proto/api.proto rename to bin/proving-service/proto/tx_prover.proto index 4555b326f..de39c685c 100644 --- a/bin/proving-service/proto/api.proto +++ b/bin/proving-service/proto/tx_prover.proto @@ -1,6 +1,6 @@ // Specification of the user facing gRPC API. syntax = "proto3"; -package api; +package tx_prover; service Api { rpc ProveTransaction(ProveTransactionRequest) returns (ProveTransactionResponse) {} diff --git a/bin/proving-service/src/generated/mod.rs b/bin/proving-service/src/generated/mod.rs index 78397c954..86e50a776 100644 --- a/bin/proving-service/src/generated/mod.rs +++ b/bin/proving-service/src/generated/mod.rs @@ -2,9 +2,9 @@ use miden_objects::transaction::ProvenTransaction; use miden_tx::utils::{Deserializable, DeserializationError, Serializable}; #[rustfmt::skip] -pub mod api; +pub mod tx_prover; -pub use api::*; +pub use tx_prover::*; // CONVERSIONS // ================================================================================================ diff --git a/bin/proving-service/src/generated/api.rs b/bin/proving-service/src/generated/tx_prover.rs similarity index 97% rename from bin/proving-service/src/generated/api.rs rename to bin/proving-service/src/generated/tx_prover.rs index fa143409e..73c46a4f5 100644 --- a/bin/proving-service/src/generated/api.rs +++ b/bin/proving-service/src/generated/tx_prover.rs @@ -116,9 +116,12 @@ pub mod api_client { ) })?; let codec = tonic::codec::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/api.Api/ProveTransaction"); + let path = http::uri::PathAndQuery::from_static( + "/tx_prover.Api/ProveTransaction", + ); let mut req = request.into_request(); - req.extensions_mut().insert(GrpcMethod::new("api.Api", "ProveTransaction")); + req.extensions_mut() + .insert(GrpcMethod::new("tx_prover.Api", "ProveTransaction")); self.inner.unary(req, path, codec).await } } @@ -220,7 +223,7 @@ pub mod api_server { } fn call(&mut self, req: http::Request) -> Self::Future { match req.uri().path() { - "/api.Api/ProveTransaction" => { + "/tx_prover.Api/ProveTransaction" => { #[allow(non_camel_case_types)] struct ProveTransactionSvc(pub Arc); impl< @@ -298,7 +301,7 @@ pub mod api_server { } } /// Generated gRPC service name - pub const SERVICE_NAME: &str = "api.Api"; + pub const SERVICE_NAME: &str = "tx_prover.Api"; impl tonic::server::NamedService for ApiServer { const NAME: &'static str = SERVICE_NAME; } diff --git a/crates/miden-proving-service-client/build.rs b/crates/miden-proving-service-client/build.rs index e5167ad51..fdee85e76 100644 --- a/crates/miden-proving-service-client/build.rs +++ b/crates/miden-proving-service-client/build.rs @@ -34,10 +34,11 @@ fn main() -> miette::Result<()> { // HELPER FUNCTIONS // ================================================================================================ -/// Copies all api.proto file from the root proto directory to the proto directory of this crate. +/// Copies the tx_prover.proto file from the root proto directory to the proto directory of this +/// crate. fn copy_proto_files() -> miette::Result<()> { - let src_file = format!("{REPO_PROTO_DIR}/api.proto"); - let dest_file = format!("{CRATE_PROTO_DIR}/api.proto"); + let src_file = format!("{REPO_PROTO_DIR}/tx_prover.proto"); + let dest_file = format!("{CRATE_PROTO_DIR}/tx_prover.proto"); fs::remove_dir_all(CRATE_PROTO_DIR).into_diagnostic()?; fs::create_dir_all(CRATE_PROTO_DIR).into_diagnostic()?; @@ -51,14 +52,14 @@ fn compile_tonic_client_proto() -> miette::Result<()> { PathBuf::from(env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR should be set")); let dst_dir = crate_root.join("src").join("tx_prover").join("generated"); - // Remove `api.rs` if it exists. - fs::remove_file(dst_dir.join("api.rs")).into_diagnostic().ok(); + // Remove `tx_prover.rs` if it exists. + fs::remove_file(dst_dir.join("tx_prover.rs")).into_diagnostic().ok(); let out_dir = env::var("OUT_DIR").into_diagnostic()?; let file_descriptor_path = PathBuf::from(out_dir).join("file_descriptor_set.bin"); let proto_dir: PathBuf = CRATE_PROTO_DIR.into(); - let protos = &[proto_dir.join("api.proto")]; + let protos = &[proto_dir.join("tx_prover.proto")]; let includes = &[proto_dir]; let file_descriptors = protox::compile(protos, includes)?; @@ -70,9 +71,9 @@ fn compile_tonic_client_proto() -> miette::Result<()> { build_tonic_client(&file_descriptor_path, &std_path, protos, includes, false)?; build_tonic_client(&file_descriptor_path, &nostd_path, protos, includes, true)?; - // Replace `std` references with `core` and `alloc` in `api.rs`. + // Replace `std` references with `core` and `alloc` in `tx_prover.rs`. // (Only for nostd version) - let nostd_file_path = nostd_path.join("api.rs"); + let nostd_file_path = nostd_path.join("tx_prover.rs"); let file_content = fs::read_to_string(&nostd_file_path).into_diagnostic()?; let updated_content = file_content .replace("std::result", "core::result") diff --git a/proto/api.proto b/crates/miden-proving-service-client/proto/tx_prover.proto similarity index 94% rename from proto/api.proto rename to crates/miden-proving-service-client/proto/tx_prover.proto index 4555b326f..de39c685c 100644 --- a/proto/api.proto +++ b/crates/miden-proving-service-client/proto/tx_prover.proto @@ -1,6 +1,6 @@ // Specification of the user facing gRPC API. syntax = "proto3"; -package api; +package tx_prover; service Api { rpc ProveTransaction(ProveTransactionRequest) returns (ProveTransactionResponse) {} diff --git a/crates/miden-proving-service-client/src/lib.rs b/crates/miden-proving-service-client/src/lib.rs index 221c029a3..3b18eea80 100644 --- a/crates/miden-proving-service-client/src/lib.rs +++ b/crates/miden-proving-service-client/src/lib.rs @@ -16,7 +16,7 @@ use thiserror::Error; pub mod tx_prover; /// Protobuf definition for the Miden proving service -pub const SERVICE_PROTO: &str = include_str!("../proto/api.proto"); +pub const TX_PROVER_PROTO: &str = include_str!("../proto/tx_prover.proto"); /// ERRORS /// =============================================================================================== diff --git a/crates/miden-proving-service-client/src/tx_prover/generated/mod.rs b/crates/miden-proving-service-client/src/tx_prover/generated/mod.rs index 11bbbfa4c..8f4f2baf6 100644 --- a/crates/miden-proving-service-client/src/tx_prover/generated/mod.rs +++ b/crates/miden-proving-service-client/src/tx_prover/generated/mod.rs @@ -7,12 +7,12 @@ compile_error!("The `std` feature cannot be used when targeting `wasm32`."); #[cfg(feature = "std")] mod std; #[cfg(feature = "std")] -pub use std::api::*; +pub use std::tx_prover::*; #[cfg(not(feature = "std"))] mod nostd; #[cfg(not(feature = "std"))] -pub use nostd::api::*; +pub use nostd::tx_prover::*; // CONVERSIONS // ================================================================================================ diff --git a/crates/miden-proving-service-client/src/tx_prover/generated/nostd/mod.rs b/crates/miden-proving-service-client/src/tx_prover/generated/nostd/mod.rs index 1b28e9b38..18134780a 100644 --- a/crates/miden-proving-service-client/src/tx_prover/generated/nostd/mod.rs +++ b/crates/miden-proving-service-client/src/tx_prover/generated/nostd/mod.rs @@ -1,2 +1,2 @@ #[rustfmt::skip] -pub mod api; +pub mod tx_prover; diff --git a/crates/miden-proving-service-client/src/tx_prover/generated/nostd/api.rs b/crates/miden-proving-service-client/src/tx_prover/generated/nostd/tx_prover.rs similarity index 94% rename from crates/miden-proving-service-client/src/tx_prover/generated/nostd/api.rs rename to crates/miden-proving-service-client/src/tx_prover/generated/nostd/tx_prover.rs index 8c84619ac..73a6ed46e 100644 --- a/crates/miden-proving-service-client/src/tx_prover/generated/nostd/api.rs +++ b/crates/miden-proving-service-client/src/tx_prover/generated/nostd/tx_prover.rs @@ -105,9 +105,12 @@ pub mod api_client { ) })?; let codec = tonic::codec::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/api.Api/ProveTransaction"); + let path = http::uri::PathAndQuery::from_static( + "/tx_prover.Api/ProveTransaction", + ); let mut req = request.into_request(); - req.extensions_mut().insert(GrpcMethod::new("api.Api", "ProveTransaction")); + req.extensions_mut() + .insert(GrpcMethod::new("tx_prover.Api", "ProveTransaction")); self.inner.unary(req, path, codec).await } } diff --git a/crates/miden-proving-service-client/src/tx_prover/generated/std/mod.rs b/crates/miden-proving-service-client/src/tx_prover/generated/std/mod.rs index 1b28e9b38..18134780a 100644 --- a/crates/miden-proving-service-client/src/tx_prover/generated/std/mod.rs +++ b/crates/miden-proving-service-client/src/tx_prover/generated/std/mod.rs @@ -1,2 +1,2 @@ #[rustfmt::skip] -pub mod api; +pub mod tx_prover; diff --git a/crates/miden-proving-service-client/src/tx_prover/generated/std/api.rs b/crates/miden-proving-service-client/src/tx_prover/generated/std/tx_prover.rs similarity index 94% rename from crates/miden-proving-service-client/src/tx_prover/generated/std/api.rs rename to crates/miden-proving-service-client/src/tx_prover/generated/std/tx_prover.rs index 6abe53d07..717b7bb1a 100644 --- a/crates/miden-proving-service-client/src/tx_prover/generated/std/api.rs +++ b/crates/miden-proving-service-client/src/tx_prover/generated/std/tx_prover.rs @@ -116,9 +116,12 @@ pub mod api_client { ) })?; let codec = tonic::codec::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/api.Api/ProveTransaction"); + let path = http::uri::PathAndQuery::from_static( + "/tx_prover.Api/ProveTransaction", + ); let mut req = request.into_request(); - req.extensions_mut().insert(GrpcMethod::new("api.Api", "ProveTransaction")); + req.extensions_mut() + .insert(GrpcMethod::new("tx_prover.Api", "ProveTransaction")); self.inner.unary(req, path, codec).await } } diff --git a/crates/miden-proving-service-client/proto/api.proto b/proto/tx_prover.proto similarity index 94% rename from crates/miden-proving-service-client/proto/api.proto rename to proto/tx_prover.proto index 4555b326f..de39c685c 100644 --- a/crates/miden-proving-service-client/proto/api.proto +++ b/proto/tx_prover.proto @@ -1,6 +1,6 @@ // Specification of the user facing gRPC API. syntax = "proto3"; -package api; +package tx_prover; service Api { rpc ProveTransaction(ProveTransactionRequest) returns (ProveTransactionResponse) {} From 723e47c88f9d388ce0195da07a2c7b74b6b5210b Mon Sep 17 00:00:00 2001 From: Serge Radinovich <47865535+sergerad@users.noreply.github.com> Date: Mon, 3 Feb 2025 08:50:59 +1300 Subject: [PATCH 6/7] refactor: rename AccountData struct (#1116) --- CHANGELOG.md | 2 +- .../src/account/{data.rs => file.rs} | 52 +++++++++---------- crates/miden-objects/src/account/mod.rs | 4 +- 3 files changed, 29 insertions(+), 29 deletions(-) rename crates/miden-objects/src/account/{data.rs => file.rs} (74%) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1d12deca7..17ddc0485 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ - [BREAKING] Moved `generated` module from `miden-proving-service-client` crate to `tx_prover::generated` hierarchy (#1102). - Added an endpoint to the `miden-proving-service` to update the workers (#1107). - Renamed the protobuf file of the transaction prover to `tx_prover.proto` (#1110). +- [BREAKING] Renamed `AccountData` to `AccountFile` (#1116). ## 0.7.2 (2025-01-28) - `miden-objects` crate only @@ -22,7 +23,6 @@ - Added missing doc comments (#1100). - Fixed setting of supporting types when instantiating `AccountComponent` from templates (#1103). - ## 0.7.0 (2025-01-22) ### Highlights diff --git a/crates/miden-objects/src/account/data.rs b/crates/miden-objects/src/account/file.rs similarity index 74% rename from crates/miden-objects/src/account/data.rs rename to crates/miden-objects/src/account/file.rs index 2f8789558..ccc227c3b 100644 --- a/crates/miden-objects/src/account/data.rs +++ b/crates/miden-objects/src/account/file.rs @@ -15,22 +15,22 @@ use super::{ Account, AuthSecretKey, Word, }; -// ACCOUNT DATA +// ACCOUNT FILE // ================================================================================================ -/// Account data contains a complete description of an account, including the [Account] struct as +/// Account file contains a complete description of an account, including the [Account] struct as /// well as account seed and account authentication info. /// /// The intent of this struct is to provide an easy way to serialize and deserialize all /// account-related data as a single unit (e.g., to/from files). #[derive(Debug, Clone)] -pub struct AccountData { +pub struct AccountFile { pub account: Account, pub account_seed: Option, pub auth_secret_key: AuthSecretKey, } -impl AccountData { +impl AccountFile { pub fn new(account: Account, account_seed: Option, auth: AuthSecretKey) -> Self { Self { account, @@ -39,14 +39,14 @@ impl AccountData { } } + /// Serialises and writes binary [AccountFile] to specified file #[cfg(feature = "std")] - /// Serialises and writes binary AccountData to specified file pub fn write(&self, filepath: impl AsRef) -> io::Result<()> { fs::write(filepath, self.to_bytes()) } + /// Reads from file and tries to deserialise an [AccountFile] #[cfg(feature = "std")] - /// Reads from file and tries to deserialise an AccountData pub fn read(filepath: impl AsRef) -> io::Result { let mut file = File::open(filepath)?; let mut buffer = Vec::new(); @@ -54,16 +54,16 @@ impl AccountData { file.read_to_end(&mut buffer)?; let mut reader = SliceReader::new(&buffer); - Ok(AccountData::read_from(&mut reader).map_err(|_| io::ErrorKind::InvalidData)?) + Ok(AccountFile::read_from(&mut reader).map_err(|_| io::ErrorKind::InvalidData)?) } } // SERIALIZATION // ================================================================================================ -impl Serializable for AccountData { +impl Serializable for AccountFile { fn write_into(&self, target: &mut W) { - let AccountData { + let AccountFile { account, account_seed, auth_secret_key: auth, @@ -75,7 +75,7 @@ impl Serializable for AccountData { } } -impl Deserializable for AccountData { +impl Deserializable for AccountFile { fn read_from(source: &mut R) -> Result { let account = Account::read_from(source)?; let account_seed = >::read_from(source)?; @@ -102,14 +102,14 @@ mod tests { #[cfg(feature = "std")] use tempfile::tempdir; - use super::AccountData; + use super::AccountFile; use crate::{ account::{storage, Account, AccountCode, AccountId, AuthSecretKey, Felt, Word}, asset::AssetVault, testing::account_id::ACCOUNT_ID_REGULAR_ACCOUNT_IMMUTABLE_CODE_ON_CHAIN, }; - fn build_account_data() -> AccountData { + fn build_account_file() -> AccountFile { let id = AccountId::try_from(ACCOUNT_ID_REGULAR_ACCOUNT_IMMUTABLE_CODE_ON_CHAIN).unwrap(); let code = AccountCode::mock(); @@ -121,19 +121,19 @@ mod tests { let account_seed = Some(Word::default()); let auth_secret_key = AuthSecretKey::RpoFalcon512(SecretKey::new()); - AccountData::new(account, account_seed, auth_secret_key) + AccountFile::new(account, account_seed, auth_secret_key) } #[test] fn test_serde() { - let account_data = build_account_data(); - let serialized = account_data.to_bytes(); - let deserialized = AccountData::read_from_bytes(&serialized).unwrap(); - assert_eq!(deserialized.account, account_data.account); - assert_eq!(deserialized.account_seed, account_data.account_seed); + let account_file = build_account_file(); + let serialized = account_file.to_bytes(); + let deserialized = AccountFile::read_from_bytes(&serialized).unwrap(); + assert_eq!(deserialized.account, account_file.account); + assert_eq!(deserialized.account_seed, account_file.account_seed); assert_eq!( deserialized.auth_secret_key.to_bytes(), - account_data.auth_secret_key.to_bytes() + account_file.auth_secret_key.to_bytes() ); } @@ -141,17 +141,17 @@ mod tests { #[test] fn test_serde_file() { let dir = tempdir().unwrap(); - let filepath = dir.path().join("account_data.mac"); + let filepath = dir.path().join("account_file.mac"); - let account_data = build_account_data(); - account_data.write(filepath.as_path()).unwrap(); - let deserialized = AccountData::read(filepath.as_path()).unwrap(); + let account_file = build_account_file(); + account_file.write(filepath.as_path()).unwrap(); + let deserialized = AccountFile::read(filepath.as_path()).unwrap(); - assert_eq!(deserialized.account, account_data.account); - assert_eq!(deserialized.account_seed, account_data.account_seed); + assert_eq!(deserialized.account, account_file.account); + assert_eq!(deserialized.account_seed, account_file.account_seed); assert_eq!( deserialized.auth_secret_key.to_bytes(), - account_data.auth_secret_key.to_bytes() + account_file.auth_secret_key.to_bytes() ); } } diff --git a/crates/miden-objects/src/account/mod.rs b/crates/miden-objects/src/account/mod.rs index 3bb2f2fee..2893733c5 100644 --- a/crates/miden-objects/src/account/mod.rs +++ b/crates/miden-objects/src/account/mod.rs @@ -39,8 +39,8 @@ pub use storage::{AccountStorage, AccountStorageHeader, StorageMap, StorageSlot, mod header; pub use header::AccountHeader; -mod data; -pub use data::AccountData; +mod file; +pub use file::AccountFile; // ACCOUNT // ================================================================================================ From e82dee03de7589ef3fb12b7fd901cef25ae5535d Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Tue, 4 Feb 2025 10:25:26 +0100 Subject: [PATCH 7/7] Migrate batch kernel parts from miden-node (#1112) * feat: Add miden-batch-prover crate * feat: Add `BatchId` * Introduce `AccountUpdateError` * feat: Add `ProposedBatch` * feat: Add `LocalBatchProver` * feat: Add `ProvenBatch` * feat: Migrate `LocalBatchProver` from node * chore: Rename `NoteAuthenticationInfo` * feat:Add batch expiration block num * chore: Use core instead of std for `Display` * feat: Migrate `MockProvenTxBuilder` * feat: Test tx ordering in batches * feat: Add `BatchAccountUpdate` * chore: Extend test assertions * feat: Refactor and document batch output note tracker * feat: Add input/output notes commitment test * feat: Remove `BlockNumber::from_usize` * feat: Check for duplicate input notes * feat: Add unauthenticated/authenticated scenario tests * chore: Misc doc fixes * feat: Move `ProposedBatch` and `ProvenBatch` to objects * chore: Add changelog entry * feat: Use MockChain for batch tests (partially) * chore: Move most validation to `ProposedBatch` * feat: Use MockChain for all tests * feat: Implement note authentication and tests for it * feat: Verify transactions in batch * chore: Document error conditions on proposed batch * feat: Check max input/output notes and account updates in batch * feat: Check for duplicate transactions in batch * feat: Add tests with a circular note dependency * feat: Rename batch prover crate * chore: Address review comments * chore: Unbox the `BlockHeader` * chore: Remove unused dependencies * feat: Use `InputNotes` rather than `Vec` * feat: Compute batch ID as sequential hash over tx ID and account ID * chore: Remove `BlockInclusionProof` and `NoteInclusionProofs` * chore: Address more review comments * chore: Move tests into dedicated file * feat: Add `block_num` field to `ProvenTransaction` * feat: Add `ChainMmr::from_mmr` * feat: Add `BatchAccountUpdate::into_parts` * feat: Add `ProvenBatch::produced_nullifiers` * feat: Add `BatchId::compute_from_ids` * feat: Expose `BatchProveError` and rename `BatchProposeError` * feat: Remove `compute` prefix from batch id constructors * chore: Move `ChainMmr::from_mmr` behind test flag * chore: Address review comments * feat: Disallow empty transaction batches * chore: Address review comments --- CHANGELOG.md | 1 + Cargo.lock | 27 + Cargo.toml | 1 + .../miden-objects/src/batch/account_update.rs | 161 +++++ crates/miden-objects/src/batch/batch_id.rs | 65 ++ crates/miden-objects/src/batch/mod.rs | 12 + crates/miden-objects/src/batch/note_tree.rs | 5 + .../miden-objects/src/batch/proposed_batch.rs | 508 +++++++++++++++ .../miden-objects/src/batch/proven_batch.rs | 93 +++ .../miden-objects/src/block/block_number.rs | 5 - crates/miden-objects/src/block/mod.rs | 6 +- crates/miden-objects/src/errors.rs | 105 +++- crates/miden-objects/src/lib.rs | 6 +- crates/miden-objects/src/testing/chain_mmr.rs | 46 ++ crates/miden-objects/src/testing/mod.rs | 1 + .../src/transaction/chain_mmr.rs | 20 +- .../miden-objects/src/transaction/inputs.rs | 13 + .../src/transaction/proven_tx.rs | 20 + crates/miden-tx-batch-prover/Cargo.toml | 41 ++ crates/miden-tx-batch-prover/README.md | 7 + crates/miden-tx-batch-prover/src/errors.rs | 12 + crates/miden-tx-batch-prover/src/lib.rs | 18 + .../src/local_batch_prover.rs | 58 ++ .../miden-tx-batch-prover/src/testing/mod.rs | 2 + .../src/testing/proven_tx_builder.rs | 111 ++++ crates/miden-tx-batch-prover/src/tests/mod.rs | 1 + .../src/tests/proposed_batch.rs | 591 ++++++++++++++++++ crates/miden-tx/src/prover/mod.rs | 2 + crates/miden-tx/src/testing/mock_chain/mod.rs | 35 +- crates/miden-tx/src/tests/mod.rs | 2 +- crates/miden-tx/src/verifier/mod.rs | 2 +- crates/miden-tx/tests/integration/main.rs | 2 +- 32 files changed, 1939 insertions(+), 40 deletions(-) create mode 100644 crates/miden-objects/src/batch/account_update.rs create mode 100644 crates/miden-objects/src/batch/batch_id.rs create mode 100644 crates/miden-objects/src/batch/proposed_batch.rs create mode 100644 crates/miden-objects/src/batch/proven_batch.rs create mode 100644 crates/miden-objects/src/testing/chain_mmr.rs create mode 100644 crates/miden-tx-batch-prover/Cargo.toml create mode 100644 crates/miden-tx-batch-prover/README.md create mode 100644 crates/miden-tx-batch-prover/src/errors.rs create mode 100644 crates/miden-tx-batch-prover/src/lib.rs create mode 100644 crates/miden-tx-batch-prover/src/local_batch_prover.rs create mode 100644 crates/miden-tx-batch-prover/src/testing/mod.rs create mode 100644 crates/miden-tx-batch-prover/src/testing/proven_tx_builder.rs create mode 100644 crates/miden-tx-batch-prover/src/tests/mod.rs create mode 100644 crates/miden-tx-batch-prover/src/tests/proposed_batch.rs diff --git a/CHANGELOG.md b/CHANGELOG.md index 17ddc0485..0d1144cc3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ - Added an endpoint to the `miden-proving-service` to update the workers (#1107). - Renamed the protobuf file of the transaction prover to `tx_prover.proto` (#1110). - [BREAKING] Renamed `AccountData` to `AccountFile` (#1116). +- Implement transaction batch prover in Rust (#1112). ## 0.7.2 (2025-01-28) - `miden-objects` crate only diff --git a/Cargo.lock b/Cargo.lock index 625e70ab0..e71b0dd5e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2076,6 +2076,22 @@ dependencies = [ "winter-maybe-async", ] +[[package]] +name = "miden-tx-batch-prover" +version = "0.8.0" +dependencies = [ + "anyhow", + "miden-core", + "miden-crypto", + "miden-lib", + "miden-objects", + "miden-processor", + "miden-tx", + "rand", + "thiserror 2.0.11", + "winterfell", +] + [[package]] name = "miden-verifier" version = "0.12.0" @@ -5092,6 +5108,17 @@ dependencies = [ "winter-utils", ] +[[package]] +name = "winterfell" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6bdcd01333bbf4a349d8d13f269281524bd6d1a36ae3a853187f0665bf1cfd4" +dependencies = [ + "winter-air", + "winter-prover", + "winter-verifier", +] + [[package]] name = "write16" version = "1.0.0" diff --git a/Cargo.toml b/Cargo.toml index 6faffb034..1328ea6d2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,6 +3,7 @@ resolver = "2" members = [ "bin/bench-tx", "bin/proving-service", + "crates/miden-tx-batch-prover", "crates/miden-lib", "crates/miden-objects", "crates/miden-proving-service-client", diff --git a/crates/miden-objects/src/batch/account_update.rs b/crates/miden-objects/src/batch/account_update.rs new file mode 100644 index 000000000..0dab76cd5 --- /dev/null +++ b/crates/miden-objects/src/batch/account_update.rs @@ -0,0 +1,161 @@ +use alloc::vec::Vec; + +use vm_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; +use vm_processor::{DeserializationError, Digest}; + +use crate::{ + account::{delta::AccountUpdateDetails, AccountId}, + errors::BatchAccountUpdateError, + transaction::{ProvenTransaction, TransactionId}, +}; + +// BATCH ACCOUNT UPDATE +// ================================================================================================ + +/// Represents the changes made to an account resulting from executing a batch of transactions. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct BatchAccountUpdate { + /// ID of the updated account. + account_id: AccountId, + + /// Commitment to the state of the account before this update is applied. + /// + /// Equal to `Digest::default()` for new accounts. + initial_state_commitment: Digest, + + /// Commitment to the state of the account after this update is applied. + final_state_commitment: Digest, + + /// IDs of all transactions that updated the account. + transactions: Vec, + + /// A set of changes which can be applied to the previous account state (i.e. `initial_state`) + /// to get the new account state. For private accounts, this is set to + /// [`AccountUpdateDetails::Private`]. + details: AccountUpdateDetails, +} + +impl BatchAccountUpdate { + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + /// Creates a [`BatchAccountUpdate`] by cloning the update and other details from the provided + /// [`ProvenTransaction`]. + pub fn from_transaction(transaction: &ProvenTransaction) -> Self { + Self { + account_id: transaction.account_id(), + initial_state_commitment: transaction.account_update().init_state_hash(), + final_state_commitment: transaction.account_update().final_state_hash(), + transactions: vec![transaction.id()], + details: transaction.account_update().details().clone(), + } + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the ID of the updated account. + pub fn account_id(&self) -> AccountId { + self.account_id + } + + /// Returns a commitment to the state of the account before this update is applied. + /// + /// This is equal to [`Digest::default()`] for new accounts. + pub fn initial_state_commitment(&self) -> Digest { + self.initial_state_commitment + } + + /// Returns a commitment to the state of the account after this update is applied. + pub fn final_state_commitment(&self) -> Digest { + self.final_state_commitment + } + + /// Returns a slice of [`TransactionId`]s that updated this account's state. + pub fn transactions(&self) -> &[TransactionId] { + &self.transactions + } + + /// Returns the contained [`AccountUpdateDetails`]. + /// + /// This update can be used to build the new account state from the previous account state. + pub fn details(&self) -> &AccountUpdateDetails { + &self.details + } + + /// Returns `true` if the account update details are for a private account. + pub fn is_private(&self) -> bool { + self.details.is_private() + } + + // MUTATORS + // -------------------------------------------------------------------------------------------- + + /// Merges the transaction's update into this account update. + /// + /// # Errors + /// + /// Returns an error if: + /// - The account ID of the merging transaction does not match the account ID of the existing + /// update. + /// - The merging transaction's initial state commitment does not match the final state + /// commitment of the current update. + /// - If the underlying [`AccountUpdateDetails::merge`] fails. + pub fn merge_proven_tx( + &mut self, + tx: &ProvenTransaction, + ) -> Result<(), BatchAccountUpdateError> { + if self.account_id != tx.account_id() { + return Err(BatchAccountUpdateError::AccountUpdateIdMismatch { + transaction: tx.id(), + expected_account_id: self.account_id, + actual_account_id: tx.account_id(), + }); + } + + if self.final_state_commitment != tx.account_update().init_state_hash() { + return Err(BatchAccountUpdateError::AccountUpdateInitialStateMismatch(tx.id())); + } + + self.details = self.details.clone().merge(tx.account_update().details().clone()).map_err( + |source_err| BatchAccountUpdateError::TransactionUpdateMergeError(tx.id(), source_err), + )?; + self.final_state_commitment = tx.account_update().final_state_hash(); + self.transactions.push(tx.id()); + + Ok(()) + } + + // CONVERSIONS + // -------------------------------------------------------------------------------------------- + + /// Consumes the update and returns the non-[`Copy`] parts. + pub fn into_parts(self) -> (Vec, AccountUpdateDetails) { + (self.transactions, self.details) + } +} + +// SERIALIZATION +// ================================================================================================ + +impl Serializable for BatchAccountUpdate { + fn write_into(&self, target: &mut W) { + self.account_id.write_into(target); + self.initial_state_commitment.write_into(target); + self.final_state_commitment.write_into(target); + self.transactions.write_into(target); + self.details.write_into(target); + } +} + +impl Deserializable for BatchAccountUpdate { + fn read_from(source: &mut R) -> Result { + Ok(Self { + account_id: AccountId::read_from(source)?, + initial_state_commitment: Digest::read_from(source)?, + final_state_commitment: Digest::read_from(source)?, + transactions: >::read_from(source)?, + details: AccountUpdateDetails::read_from(source)?, + }) + } +} diff --git a/crates/miden-objects/src/batch/batch_id.rs b/crates/miden-objects/src/batch/batch_id.rs new file mode 100644 index 000000000..ae7100eab --- /dev/null +++ b/crates/miden-objects/src/batch/batch_id.rs @@ -0,0 +1,65 @@ +use alloc::{string::String, vec::Vec}; + +use vm_core::{Felt, ZERO}; +use vm_processor::Digest; + +use crate::{ + account::AccountId, + transaction::{ProvenTransaction, TransactionId}, + Hasher, +}; + +// BATCH ID +// ================================================================================================ + +/// Uniquely identifies a batch of transactions, i.e. both +/// [`ProposedBatch`](crate::batch::ProposedBatch) and [`ProvenBatch`](crate::batch::ProvenBatch). +/// +/// This is a sequential hash of the tuple `(TRANSACTION_ID || [account_id_prefix, +/// account_id_suffix, 0, 0])` of all transactions and the accounts their executed against in the +/// batch. +#[derive(Debug, Copy, Clone, Eq, Ord, PartialEq, PartialOrd)] +pub struct BatchId(Digest); + +impl BatchId { + /// Calculates a batch ID from the given set of transactions. + pub fn from_transactions<'tx, T>(txs: T) -> Self + where + T: Iterator, + { + Self::from_ids(txs.map(|tx| (tx.id(), tx.account_id()))) + } + + /// Calculates a batch ID from the given transaction ID and account ID tuple. + pub fn from_ids(iter: impl Iterator) -> Self { + let mut elements: Vec = Vec::new(); + for (tx_id, account_id) in iter { + elements.extend_from_slice(tx_id.as_elements()); + let [account_id_prefix, account_id_suffix] = <[Felt; 2]>::from(account_id); + elements.extend_from_slice(&[account_id_prefix, account_id_suffix, ZERO, ZERO]); + } + + Self(Hasher::hash_elements(&elements)) + } + + /// Returns the elements representation of this batch ID. + pub fn as_elements(&self) -> &[Felt] { + self.0.as_elements() + } + + /// Returns the byte representation of this batch ID. + pub fn as_bytes(&self) -> [u8; 32] { + self.0.as_bytes() + } + + /// Returns a big-endian, hex-encoded string. + pub fn to_hex(&self) -> String { + self.0.to_hex() + } +} + +impl core::fmt::Display for BatchId { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(f, "{}", self.to_hex()) + } +} diff --git a/crates/miden-objects/src/batch/mod.rs b/crates/miden-objects/src/batch/mod.rs index 23c43becc..dd0c0d433 100644 --- a/crates/miden-objects/src/batch/mod.rs +++ b/crates/miden-objects/src/batch/mod.rs @@ -1,2 +1,14 @@ mod note_tree; pub use note_tree::BatchNoteTree; + +mod batch_id; +pub use batch_id::BatchId; + +mod account_update; +pub use account_update::BatchAccountUpdate; + +mod proven_batch; +pub use proven_batch::ProvenBatch; + +mod proposed_batch; +pub use proposed_batch::ProposedBatch; diff --git a/crates/miden-objects/src/batch/note_tree.rs b/crates/miden-objects/src/batch/note_tree.rs index a0d0b5536..e6e98de31 100644 --- a/crates/miden-objects/src/batch/note_tree.rs +++ b/crates/miden-objects/src/batch/note_tree.rs @@ -35,4 +35,9 @@ impl BatchNoteTree { pub fn root(&self) -> RpoDigest { self.0.root() } + + /// Returns the number of non-empty leaves in this tree. + pub fn num_leaves(&self) -> usize { + self.0.num_leaves() + } } diff --git a/crates/miden-objects/src/batch/proposed_batch.rs b/crates/miden-objects/src/batch/proposed_batch.rs new file mode 100644 index 000000000..39aba4bd1 --- /dev/null +++ b/crates/miden-objects/src/batch/proposed_batch.rs @@ -0,0 +1,508 @@ +use alloc::{ + collections::{btree_map::Entry, BTreeMap, BTreeSet}, + sync::Arc, + vec::Vec, +}; + +use crate::{ + account::AccountId, + batch::{BatchAccountUpdate, BatchId, BatchNoteTree}, + block::{BlockHeader, BlockNumber}, + errors::ProposedBatchError, + note::{NoteHeader, NoteId, NoteInclusionProof}, + transaction::{ + ChainMmr, InputNoteCommitment, InputNotes, OutputNote, ProvenTransaction, TransactionId, + }, + MAX_ACCOUNTS_PER_BATCH, MAX_INPUT_NOTES_PER_BATCH, MAX_OUTPUT_NOTES_PER_BATCH, +}; + +/// A proposed batch of transactions with all necessary data to validate it. +/// +/// See [`ProposedBatch::new`] for what a proposed batch expects and guarantees. +/// +/// This type is fairly large, so consider boxing it. +#[derive(Debug, Clone)] +pub struct ProposedBatch { + /// The transactions of this batch. + transactions: Vec>, + /// The header is boxed as it has a large stack size. + block_header: BlockHeader, + /// The chain MMR used to authenticate: + /// - all unauthenticated notes that can be authenticated, + /// - all block hashes referenced by the transactions in the batch. + chain_mmr: ChainMmr, + /// The note inclusion proofs for unauthenticated notes that were consumed in the batch which + /// can be authenticated. + unauthenticated_note_proofs: BTreeMap, + /// The ID of the batch, which is a cryptographic commitment to the transactions in the batch. + id: BatchId, + /// A map from account ID's updated in this batch to the aggregated update from all + /// transaction's that touched the account. + account_updates: BTreeMap, + /// The block number at which the batch will expire. This is the minimum of all transaction's + /// expiration block number. + batch_expiration_block_num: BlockNumber, + /// The input note commitment of the transaction batch. This consists of all authenticated + /// notes that transactions in the batch consume as well as unauthenticated notes whose + /// authentication is delayed to the block kernel. + input_notes: InputNotes, + /// The SMT over the output notes of this batch. + output_notes_tree: BatchNoteTree, + /// The output notes of this batch. This consists of all notes created by transactions in the + /// batch that are not consumed within the same batch. + output_notes: Vec, +} + +impl ProposedBatch { + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + /// Creates a new [`ProposedBatch`] from the provided parts. + /// + /// # Inputs + /// + /// - The given transactions must be correctly ordered. That is, if two transactions A and B + /// update the same account in this order, meaning A's initial account state commitment + /// matches the account state before any transactions are executed and B's initial account + /// state commitment matches the final account state commitment of A, then A must come before + /// B. + /// - The chain MMR should contain all block headers + /// - that are referenced by note inclusion proofs in `unauthenticated_note_proofs`. + /// - that are referenced by a transaction in the batch. + /// - The `unauthenticated_note_proofs` should contain [`NoteInclusionProof`]s for any + /// unauthenticated note consumed by the transaction's in the batch which can be + /// authenticated. This means it is not required that every unauthenticated note has an entry + /// in this map for two reasons. + /// - Unauthenticated note authentication can be delayed to the block kernel. + /// - Another transaction in the batch creates an output note matching an unauthenticated + /// input note, in which case inclusion in the chain does not need to be proven. + /// - The block header's block number must be greater or equal to the highest block number + /// referenced by any transaction. This is not verified explicitly, but will implicitly cause + /// an error during validating that each reference block of a transaction is in the chain MMR. + /// + /// # Errors + /// + /// Returns an error if: + /// + /// - The number of input notes exceeds [`MAX_INPUT_NOTES_PER_BATCH`]. + /// - Note that unauthenticated notes that are created in the same batch do not count. Any + /// other input notes, unauthenticated or not, do count. + /// - The number of output notes exceeds [`MAX_OUTPUT_NOTES_PER_BATCH`]. + /// - Note that output notes that are consumed in the same batch as unauthenticated input + /// notes do not count. + /// - Any note is consumed more than once. + /// - Any note is created more than once. + /// - The number of account updates exceeds [`MAX_ACCOUNTS_PER_BATCH`]. + /// - Note that any number of transactions against the same account count as one update. + /// - The chain MMRs chain length does not match the block header's block number. This means the + /// chain MMR should not contain the block header itself as it is added to the MMR in the + /// batch kernel. + /// - The chain MMRs hashed peaks do not match the block header's chain root. + /// - The reference block of any transaction is not in the chain MMR. + /// - The note inclusion proof for an unauthenticated note fails to verify. + /// - The block referenced by a note inclusion proof for an unauthenticated note is missing from + /// the chain MMR. + /// - The transactions in the proposed batch which update the same account are not correctly + /// ordered. + /// - The provided list of transactions is empty. An empty batch is pointless and would + /// potentially result in the same [`BatchId`] for two empty batches which would mean batch + /// IDs are no longer unique. + /// - There are duplicate transactions. + pub fn new( + transactions: Vec>, + block_header: BlockHeader, + chain_mmr: ChainMmr, + unauthenticated_note_proofs: BTreeMap, + ) -> Result { + // Check for empty or duplicate transactions. + // -------------------------------------------------------------------------------------------- + + if transactions.is_empty() { + return Err(ProposedBatchError::EmptyTransactionBatch); + } + + let mut transaction_set = BTreeSet::new(); + for tx in transactions.iter() { + if !transaction_set.insert(tx.id()) { + return Err(ProposedBatchError::DuplicateTransaction { transaction_id: tx.id() }); + } + } + + // Verify block header and chain MMR match. + // -------------------------------------------------------------------------------------------- + + if chain_mmr.chain_length() != block_header.block_num() { + return Err(ProposedBatchError::InconsistentChainLength { + expected: block_header.block_num(), + actual: chain_mmr.chain_length(), + }); + } + + let hashed_peaks = chain_mmr.peaks().hash_peaks(); + if hashed_peaks != block_header.chain_root() { + return Err(ProposedBatchError::InconsistentChainRoot { + expected: block_header.chain_root(), + actual: hashed_peaks, + }); + } + + // Verify all block references from the transactions are in the chain. + // -------------------------------------------------------------------------------------------- + + // Aggregate block references into a set since the chain MMR does not index by hash. + let mut block_references = + BTreeSet::from_iter(chain_mmr.block_headers().map(BlockHeader::hash)); + // Insert the block referenced by the batch to consider it authenticated. We can assume this + // because the block kernel will verify the block hash as it is a public input to the batch + // kernel. + block_references.insert(block_header.hash()); + + for tx in transactions.iter() { + if !block_references.contains(&tx.block_ref()) { + return Err(ProposedBatchError::MissingTransactionBlockReference { + block_reference: tx.block_ref(), + transaction_id: tx.id(), + }); + } + } + + // Aggregate individual tx-level account updates into a batch-level account update - one per + // account. + // -------------------------------------------------------------------------------------------- + + // Populate batch output notes and updated accounts. + let mut account_updates = BTreeMap::::new(); + let mut batch_expiration_block_num = BlockNumber::from(u32::MAX); + for tx in transactions.iter() { + // Merge account updates so that state transitions A->B->C become A->C. + match account_updates.entry(tx.account_id()) { + Entry::Vacant(vacant) => { + let batch_account_update = BatchAccountUpdate::from_transaction(tx); + vacant.insert(batch_account_update); + }, + Entry::Occupied(occupied) => { + // This returns an error if the transactions are not correctly ordered, e.g. if + // B comes before A. + occupied.into_mut().merge_proven_tx(tx).map_err(|source| { + ProposedBatchError::AccountUpdateError { + account_id: tx.account_id(), + source, + } + })?; + }, + }; + + // The expiration block of the batch is the minimum of all transaction's expiration + // block. + batch_expiration_block_num = batch_expiration_block_num.min(tx.expiration_block_num()); + } + + if account_updates.len() > MAX_ACCOUNTS_PER_BATCH { + return Err(ProposedBatchError::TooManyAccountUpdates(account_updates.len())); + } + + // Check for duplicates in input notes. + // -------------------------------------------------------------------------------------------- + + // Check for duplicate input notes both within a transaction and across transactions. + // This also includes authenticated notes, as the transaction kernel doesn't check for + // duplicates. + let mut input_note_map = BTreeMap::new(); + + for tx in transactions.iter() { + for note in tx.input_notes() { + let nullifier = note.nullifier(); + if let Some(first_transaction_id) = input_note_map.insert(nullifier, tx.id()) { + return Err(ProposedBatchError::DuplicateInputNote { + note_nullifier: nullifier, + first_transaction_id, + second_transaction_id: tx.id(), + }); + } + } + } + + // Create input and output note set of the batch. + // -------------------------------------------------------------------------------------------- + + // Check for duplicate output notes and remove all output notes from the batch output note + // set that are consumed by transactions. + let mut output_notes = BatchOutputNoteTracker::new(transactions.iter().map(AsRef::as_ref))?; + let mut input_notes = vec![]; + + for tx in transactions.iter() { + for input_note in tx.input_notes().iter() { + // Header is present only for unauthenticated input notes. + let input_note = match input_note.header() { + Some(input_note_header) => { + if output_notes.remove_note(input_note_header)? { + // If a transaction consumes an unauthenticated note that is also + // created in this batch, it is removed from the set of output notes. + // We `continue` so that the input note is not added to the set of input + // notes of the batch. That way the note appears in neither input nor + // output set. + continue; + } + + // If an inclusion proof for an unauthenticated note is provided and the + // proof is valid, it means the note is part of the chain and we can mark it + // as authenticated by erasing the note header. + if let Some(proof) = + unauthenticated_note_proofs.get(&input_note_header.id()) + { + let note_block_header = chain_mmr + .get_block(proof.location().block_num()) + .ok_or_else(|| { + ProposedBatchError::UnauthenticatedInputNoteBlockNotInChainMmr { + block_number: proof.location().block_num(), + note_id: input_note_header.id(), + } + })?; + + authenticate_unauthenticated_note( + input_note_header, + proof, + note_block_header, + )?; + + // Erase the note header from the input note. + InputNoteCommitment::from(input_note.nullifier()) + } else { + input_note.clone() + } + }, + None => input_note.clone(), + }; + input_notes.push(input_note); + } + } + + let output_notes = output_notes.into_notes(); + + if input_notes.len() > MAX_INPUT_NOTES_PER_BATCH { + return Err(ProposedBatchError::TooManyInputNotes(input_notes.len())); + } + // SAFETY: This is safe as we have checked for duplicates and the max number of input notes + // in a batch. + let input_notes = InputNotes::new_unchecked(input_notes); + + if output_notes.len() > MAX_OUTPUT_NOTES_PER_BATCH { + return Err(ProposedBatchError::TooManyOutputNotes(output_notes.len())); + } + + // Build the output notes SMT. + // -------------------------------------------------------------------------------------------- + + // SAFETY: We can `expect` here because: + // - the batch output note tracker already returns an error for duplicate output notes, + // - we have checked that the number of output notes is <= 2^BATCH_NOTE_TREE_DEPTH. + let output_notes_tree = BatchNoteTree::with_contiguous_leaves( + output_notes.iter().map(|note| (note.id(), note.metadata())), + ) + .expect("there should be no duplicate notes and there should be <= 2^BATCH_NOTE_TREE_DEPTH notes"); + + // Compute batch ID. + // -------------------------------------------------------------------------------------------- + + let id = BatchId::from_transactions(transactions.iter().map(AsRef::as_ref)); + + Ok(Self { + id, + transactions, + block_header, + chain_mmr, + unauthenticated_note_proofs, + account_updates, + batch_expiration_block_num, + input_notes, + output_notes, + output_notes_tree, + }) + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns a slice of the [`ProvenTransaction`]s in the batch. + pub fn transactions(&self) -> &[Arc] { + &self.transactions + } + + /// Returns the map of account IDs mapped to their [`BatchAccountUpdate`]s. + /// + /// If an account was updated by multiple transactions, the [`BatchAccountUpdate`] is the result + /// of merging the individual updates. + /// + /// For example, suppose an account's state before this batch is `A` and the batch contains two + /// transactions that updated it. Applying the first transaction results in intermediate state + /// `B`, and applying the second one results in state `C`. Then the returned update represents + /// the state transition from `A` to `C`. + pub fn account_updates(&self) -> &BTreeMap { + &self.account_updates + } + + /// The ID of this batch. See [`BatchId`] for details on how it is computed. + pub fn id(&self) -> BatchId { + self.id + } + + /// Returns the block number at which the batch will expire. + pub fn batch_expiration_block_num(&self) -> BlockNumber { + self.batch_expiration_block_num + } + + /// Returns the [`InputNotes`] of this batch. + pub fn input_notes(&self) -> &InputNotes { + &self.input_notes + } + + /// Returns the output notes of the batch. + /// + /// This is the aggregation of all output notes by the transactions in the batch, except the + /// ones that were consumed within the batch itself. + pub fn output_notes(&self) -> &[OutputNote] { + &self.output_notes + } + + /// Returns the [`BatchNoteTree`] representing the output notes of the batch. + pub fn output_notes_tree(&self) -> &BatchNoteTree { + &self.output_notes_tree + } + + /// Consumes the proposed batch and returns its underlying parts. + #[allow(clippy::type_complexity)] + pub fn into_parts( + self, + ) -> ( + Vec>, + BlockHeader, + ChainMmr, + BTreeMap, + BatchId, + BTreeMap, + InputNotes, + BatchNoteTree, + Vec, + BlockNumber, + ) { + ( + self.transactions, + self.block_header, + self.chain_mmr, + self.unauthenticated_note_proofs, + self.id, + self.account_updates, + self.input_notes, + self.output_notes_tree, + self.output_notes, + self.batch_expiration_block_num, + ) + } +} + +// BATCH OUTPUT NOTE TRACKER +// ================================================================================================ + +/// A helper struct to track output notes. +/// Its main purpose is to check for duplicates and allow for removal of output notes that are +/// consumed in the same batch, so are not output notes of the batch. +/// +/// The approach for this is that the output note set is initialized to the union of all output +/// notes of the transactions in the batch. +/// Then (outside of this struct) all input notes of transactions in the batch which are also output +/// notes can be removed, as they are considered consumed within the batch and will not be visible +/// as created or consumed notes for the batch. +#[derive(Debug)] +struct BatchOutputNoteTracker { + /// An index from [`NoteId`]s to the transaction that creates the note and the note itself. + /// The transaction ID is tracked to produce better errors when a duplicate note is + /// encountered. + output_notes: BTreeMap, +} + +impl BatchOutputNoteTracker { + /// Constructs a new output note tracker from the given transactions. + /// + /// # Errors + /// + /// Returns an error if: + /// - any output note is created more than once (by the same or different transactions). + fn new<'a>( + txs: impl Iterator, + ) -> Result { + let mut output_notes = BTreeMap::new(); + for tx in txs { + for note in tx.output_notes().iter() { + if let Some((first_transaction_id, _)) = + output_notes.insert(note.id(), (tx.id(), note.clone())) + { + return Err(ProposedBatchError::DuplicateOutputNote { + note_id: note.id(), + first_transaction_id, + second_transaction_id: tx.id(), + }); + } + } + } + + Ok(Self { output_notes }) + } + + /// Attempts to remove the given input note from the output note set. + /// + /// Returns `true` if the given note existed in the output note set and was removed from it, + /// `false` otherwise. + /// + /// # Errors + /// + /// Returns an error if: + /// - the given note has a corresponding note in the output note set with the same [`NoteId`] + /// but their hashes differ (i.e. their metadata is different). + pub fn remove_note( + &mut self, + input_note_header: &NoteHeader, + ) -> Result { + let id = input_note_header.id(); + if let Some((_, output_note)) = self.output_notes.remove(&id) { + // Check if the notes with the same ID have differing hashes. + // This could happen if the metadata of the notes is different, which we consider an + // error. + let input_hash = input_note_header.hash(); + let output_hash = output_note.hash(); + if output_hash != input_hash { + return Err(ProposedBatchError::NoteHashesMismatch { id, input_hash, output_hash }); + } + + return Ok(true); + } + + Ok(false) + } + + /// Consumes the tracker and returns a [`Vec`] of output notes sorted by [`NoteId`]. + pub fn into_notes(self) -> Vec { + self.output_notes.into_iter().map(|(_, (_, output_note))| output_note).collect() + } +} + +// HELPER FUNCTIONS +// ================================================================================================ + +/// Validates whether the provided header of an unauthenticated note belongs to the note tree of the +/// specified block header. +fn authenticate_unauthenticated_note( + note_header: &NoteHeader, + proof: &NoteInclusionProof, + block_header: &BlockHeader, +) -> Result<(), ProposedBatchError> { + let note_index = proof.location().node_index_in_block().into(); + let note_hash = note_header.hash(); + proof + .note_path() + .verify(note_index, note_hash, &block_header.note_root()) + .map_err(|source| ProposedBatchError::UnauthenticatedNoteAuthenticationFailed { + note_id: note_header.id(), + block_num: proof.location().block_num(), + source, + }) +} diff --git a/crates/miden-objects/src/batch/proven_batch.rs b/crates/miden-objects/src/batch/proven_batch.rs new file mode 100644 index 000000000..162247100 --- /dev/null +++ b/crates/miden-objects/src/batch/proven_batch.rs @@ -0,0 +1,93 @@ +use alloc::{collections::BTreeMap, vec::Vec}; + +use crate::{ + account::AccountId, + batch::{BatchAccountUpdate, BatchId, BatchNoteTree}, + block::BlockNumber, + note::Nullifier, + transaction::{InputNoteCommitment, InputNotes, OutputNote}, +}; + +/// A transaction batch with an execution proof. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ProvenBatch { + id: BatchId, + account_updates: BTreeMap, + input_notes: InputNotes, + output_notes_smt: BatchNoteTree, + output_notes: Vec, + batch_expiration_block_num: BlockNumber, +} + +impl ProvenBatch { + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + /// Creates a new [`ProvenBatch`] from the provided parts. + pub fn new( + id: BatchId, + account_updates: BTreeMap, + input_notes: InputNotes, + output_notes_smt: BatchNoteTree, + output_notes: Vec, + batch_expiration_block_num: BlockNumber, + ) -> Self { + Self { + id, + account_updates, + input_notes, + output_notes_smt, + output_notes, + batch_expiration_block_num, + } + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// The ID of this batch. See [`BatchId`] for details on how it is computed. + pub fn id(&self) -> BatchId { + self.id + } + + /// Returns the block number at which the batch will expire. + pub fn batch_expiration_block_num(&self) -> BlockNumber { + self.batch_expiration_block_num + } + + /// Returns the map of account IDs mapped to their [`BatchAccountUpdate`]s. + /// + /// If an account was updated by multiple transactions, the [`BatchAccountUpdate`] is the result + /// of merging the individual updates. + /// + /// For example, suppose an account's state before this batch is `A` and the batch contains two + /// transactions that updated it. Applying the first transaction results in intermediate state + /// `B`, and applying the second one results in state `C`. Then the returned update represents + /// the state transition from `A` to `C`. + pub fn account_updates(&self) -> &BTreeMap { + &self.account_updates + } + + /// Returns the [`InputNotes`] of this batch. + pub fn input_notes(&self) -> &InputNotes { + &self.input_notes + } + + /// Returns an iterator over the nullifiers produced in this batch. + pub fn produced_nullifiers(&self) -> impl Iterator + use<'_> { + self.input_notes.iter().map(InputNoteCommitment::nullifier) + } + + /// Returns the output notes of the batch. + /// + /// This is the aggregation of all output notes by the transactions in the batch, except the + /// ones that were consumed within the batch itself. + pub fn output_notes(&self) -> &[OutputNote] { + &self.output_notes + } + + /// Returns the [`BatchNoteTree`] representing the output notes of the batch. + pub fn output_notes_tree(&self) -> &BatchNoteTree { + &self.output_notes_smt + } +} diff --git a/crates/miden-objects/src/block/block_number.rs b/crates/miden-objects/src/block/block_number.rs index 851ed4f00..656af8049 100644 --- a/crates/miden-objects/src/block/block_number.rs +++ b/crates/miden-objects/src/block/block_number.rs @@ -40,11 +40,6 @@ impl BlockNumber { BlockNumber((epoch as u32) << BlockNumber::EPOCH_LENGTH_EXPONENT) } - /// Creates a `BlockNumber` from a `usize`. - pub fn from_usize(value: usize) -> Self { - BlockNumber(value as u32) - } - /// Returns the epoch to which this block number belongs. pub const fn block_epoch(&self) -> u16 { (self.0 >> BlockNumber::EPOCH_LENGTH_EXPONENT) as u16 diff --git a/crates/miden-objects/src/block/mod.rs b/crates/miden-objects/src/block/mod.rs index 500c24a0e..f80cede62 100644 --- a/crates/miden-objects/src/block/mod.rs +++ b/crates/miden-objects/src/block/mod.rs @@ -7,8 +7,10 @@ use super::{ mod header; pub use header::BlockHeader; + mod block_number; pub use block_number::BlockNumber; + mod note_tree; pub use note_tree::{BlockNoteIndex, BlockNoteTree}; @@ -231,8 +233,8 @@ pub fn compute_tx_hash( ) -> Digest { let mut elements = vec![]; for (transaction_id, account_id) in updated_accounts { - let account_id_felts: [Felt; 2] = account_id.into(); - elements.extend_from_slice(&[account_id_felts[0], account_id_felts[1], ZERO, ZERO]); + let [account_id_prefix, account_id_suffix] = <[Felt; 2]>::from(account_id); + elements.extend_from_slice(&[account_id_prefix, account_id_suffix, ZERO, ZERO]); elements.extend_from_slice(transaction_id.as_elements()); } diff --git a/crates/miden-objects/src/errors.rs b/crates/miden-objects/src/errors.rs index 38da1fa1d..a47752c69 100644 --- a/crates/miden-objects/src/errors.rs +++ b/crates/miden-objects/src/errors.rs @@ -22,7 +22,9 @@ use crate::{ }, block::BlockNumber, note::{NoteAssets, NoteExecutionHint, NoteTag, NoteType, Nullifier}, - ACCOUNT_UPDATE_MAX_SIZE, MAX_INPUTS_PER_NOTE, MAX_INPUT_NOTES_PER_TX, MAX_OUTPUT_NOTES_PER_TX, + transaction::TransactionId, + ACCOUNT_UPDATE_MAX_SIZE, MAX_ACCOUNTS_PER_BATCH, MAX_INPUTS_PER_NOTE, + MAX_INPUT_NOTES_PER_BATCH, MAX_INPUT_NOTES_PER_TX, MAX_OUTPUT_NOTES_PER_TX, }; // ACCOUNT COMPONENT TEMPLATE ERROR @@ -182,6 +184,23 @@ pub enum AccountDeltaError { NotAFungibleFaucetId(AccountId), } +// BATCH ACCOUNT UPDATE ERROR +// ================================================================================================ + +#[derive(Debug, Error)] +pub enum BatchAccountUpdateError { + #[error("account update for account {expected_account_id} cannot be merged with update from transaction {transaction} which was executed against account {actual_account_id}")] + AccountUpdateIdMismatch { + transaction: TransactionId, + expected_account_id: AccountId, + actual_account_id: AccountId, + }, + #[error("final state commitment in account update from transaction {0} does not match initial state of current update")] + AccountUpdateInitialStateMismatch(TransactionId), + #[error("failed to merge account delta from transaction {0}")] + TransactionUpdateMergeError(TransactionId, #[source] AccountDeltaError), +} + // ASSET ERROR // ================================================================================================ @@ -429,6 +448,90 @@ pub enum ProvenTransactionError { }, } +// BATCH ERROR +// ================================================================================================ + +#[derive(Debug, Error)] +pub enum ProposedBatchError { + #[error( + "transaction batch has {0} input notes but at most {MAX_INPUT_NOTES_PER_BATCH} are allowed" + )] + TooManyInputNotes(usize), + + #[error( + "transaction batch has {0} output notes but at most {MAX_OUTPUT_NOTES_PER_BATCH} are allowed" + )] + TooManyOutputNotes(usize), + + #[error( + "transaction batch has {0} account updates but at most {MAX_ACCOUNTS_PER_BATCH} are allowed" + )] + TooManyAccountUpdates(usize), + + #[error("transaction batch must contain at least one transaction")] + EmptyTransactionBatch, + + #[error("transaction {transaction_id} appears twice in the proposed batch input")] + DuplicateTransaction { transaction_id: TransactionId }, + + #[error("transaction {second_transaction_id} consumes the note with nullifier {note_nullifier} that is also consumed by another transaction {first_transaction_id} in the batch")] + DuplicateInputNote { + note_nullifier: Nullifier, + first_transaction_id: TransactionId, + second_transaction_id: TransactionId, + }, + + #[error("transaction {second_transaction_id} creates the note with id {note_id} that is also created by another transaction {first_transaction_id} in the batch")] + DuplicateOutputNote { + note_id: NoteId, + first_transaction_id: TransactionId, + second_transaction_id: TransactionId, + }, + + #[error("note hashes mismatch for note {id}: (input: {input_hash}, output: {output_hash})")] + NoteHashesMismatch { + id: NoteId, + input_hash: Digest, + output_hash: Digest, + }, + + #[error("failed to merge transaction delta into account {account_id}")] + AccountUpdateError { + account_id: AccountId, + source: BatchAccountUpdateError, + }, + + #[error("unable to prove unauthenticated note inclusion because block {block_number} in which note with id {note_id} was created is not in chain mmr")] + UnauthenticatedInputNoteBlockNotInChainMmr { + block_number: BlockNumber, + note_id: NoteId, + }, + + #[error( + "unable to prove unauthenticated note inclusion of note {note_id} in block {block_num}" + )] + UnauthenticatedNoteAuthenticationFailed { + note_id: NoteId, + block_num: BlockNumber, + source: MerkleError, + }, + + #[error("chain mmr has length {actual} which does not match block number {expected} ")] + InconsistentChainLength { + expected: BlockNumber, + actual: BlockNumber, + }, + + #[error("chain mmr has root {actual} which does not match block header's root {expected}")] + InconsistentChainRoot { expected: Digest, actual: Digest }, + + #[error("block {block_reference} referenced by transaction {transaction_id} is not in the chain mmr")] + MissingTransactionBlockReference { + block_reference: Digest, + transaction_id: TransactionId, + }, +} + // BLOCK VALIDATION ERROR // ================================================================================================ diff --git a/crates/miden-objects/src/lib.rs b/crates/miden-objects/src/lib.rs index 069a43230..c0753ca92 100644 --- a/crates/miden-objects/src/lib.rs +++ b/crates/miden-objects/src/lib.rs @@ -24,9 +24,9 @@ mod errors; pub use constants::*; pub use errors::{ - AccountDeltaError, AccountError, AccountIdError, AssetError, AssetVaultError, BlockError, - ChainMmrError, NoteError, ProvenTransactionError, TransactionInputError, - TransactionOutputError, TransactionScriptError, + AccountDeltaError, AccountError, AccountIdError, AssetError, AssetVaultError, + BatchAccountUpdateError, BlockError, ChainMmrError, NoteError, ProposedBatchError, + ProvenTransactionError, TransactionInputError, TransactionOutputError, TransactionScriptError, }; pub use miden_crypto::hash::rpo::{Rpo256 as Hasher, RpoDigest as Digest}; pub use vm_core::{Felt, FieldElement, StarkField, Word, EMPTY_WORD, ONE, WORD_SIZE, ZERO}; diff --git a/crates/miden-objects/src/testing/chain_mmr.rs b/crates/miden-objects/src/testing/chain_mmr.rs new file mode 100644 index 000000000..833d7fe8e --- /dev/null +++ b/crates/miden-objects/src/testing/chain_mmr.rs @@ -0,0 +1,46 @@ +use miden_crypto::merkle::{Mmr, PartialMmr}; + +use crate::{block::BlockHeader, transaction::ChainMmr, ChainMmrError}; + +impl ChainMmr { + /// Converts the [`Mmr`] into a [`ChainMmr`] by selectively copying all leaves that are in the + /// given `blocks` iterator. + /// + /// This tracks all blocks in the given iterator in the [`ChainMmr`] except for the block whose + /// block number equals [`Mmr::forest`], which is the current chain length. + /// + /// # Panics + /// + /// Due to being only available in test scenarios, this function panics when one of the given + /// blocks does not exist in the provided mmr. + pub fn from_mmr( + mmr: &Mmr, + blocks: impl IntoIterator + Clone, + ) -> Result + where + I: Iterator, + { + // We do not include the latest block as it is used as the reference block and is added to + // the MMR by the transaction or batch kernel. + + let target_forest = mmr.forest() - 1; + let peaks = mmr + .peaks_at(target_forest) + .expect("target_forest should be smaller than forest of the mmr"); + let mut partial_mmr = PartialMmr::from_peaks(peaks); + + for block_num in blocks + .clone() + .into_iter() + .map(|header| header.block_num().as_usize()) + .filter(|block_num| *block_num < target_forest) + { + let leaf = mmr.get(block_num).expect("error: block num does not exist"); + let path = + mmr.open_at(block_num, target_forest).expect("error: block proof").merkle_path; + partial_mmr.track(block_num, leaf, &path).expect("error: partial mmr track"); + } + + ChainMmr::new(partial_mmr, blocks) + } +} diff --git a/crates/miden-objects/src/testing/mod.rs b/crates/miden-objects/src/testing/mod.rs index d84a91f76..1e58e5508 100644 --- a/crates/miden-objects/src/testing/mod.rs +++ b/crates/miden-objects/src/testing/mod.rs @@ -11,6 +11,7 @@ pub mod account_component; pub mod account_id; pub mod asset; pub mod block; +pub mod chain_mmr; pub mod constants; pub mod note; pub mod storage; diff --git a/crates/miden-objects/src/transaction/chain_mmr.rs b/crates/miden-objects/src/transaction/chain_mmr.rs index 431193d3b..764a1c42a 100644 --- a/crates/miden-objects/src/transaction/chain_mmr.rs +++ b/crates/miden-objects/src/transaction/chain_mmr.rs @@ -1,4 +1,4 @@ -use alloc::{collections::BTreeMap, vec::Vec}; +use alloc::collections::BTreeMap; use vm_core::utils::{Deserializable, Serializable}; @@ -43,11 +43,13 @@ impl ChainMmr { /// partial MMR. /// - The same block appears more than once in the provided list of block headers. /// - The partial MMR does not track authentication paths for any of the specified blocks. - pub fn new(mmr: PartialMmr, blocks: Vec) -> Result { + pub fn new( + mmr: PartialMmr, + blocks: impl IntoIterator, + ) -> Result { let chain_length = mmr.forest(); - let mut block_map = BTreeMap::new(); - for block in blocks.into_iter() { + for block in blocks { if block.block_num().as_usize() >= chain_length { return Err(ChainMmrError::block_num_too_big(chain_length, block.block_num())); } @@ -67,6 +69,11 @@ impl ChainMmr { // PUBLIC ACCESSORS // -------------------------------------------------------------------------------------------- + /// Returns the underlying [`PartialMmr`]. + pub fn mmr(&self) -> &PartialMmr { + &self.mmr + } + /// Returns peaks of this MMR. pub fn peaks(&self) -> MmrPeaks { self.mmr.peaks() @@ -91,6 +98,11 @@ impl ChainMmr { self.blocks.get(&block_num) } + /// Returns an iterator over the block headers in this chain MMR. + pub fn block_headers(&self) -> impl Iterator { + self.blocks.values() + } + // DATA MUTATORS // -------------------------------------------------------------------------------------------- diff --git a/crates/miden-objects/src/transaction/inputs.rs b/crates/miden-objects/src/transaction/inputs.rs index 93f9dda5c..d4a770101 100644 --- a/crates/miden-objects/src/transaction/inputs.rs +++ b/crates/miden-objects/src/transaction/inputs.rs @@ -206,6 +206,19 @@ impl InputNotes { Ok(Self { notes, commitment }) } + /// Returns new [`InputNotes`] instantiated from the provided vector of notes without checking + /// their validity. + /// + /// This is exposed for use in transaction batches, but should generally not be used. + /// + /// # Warning + /// + /// This does not run the checks from [`InputNotes::new`], so the latter should be preferred. + pub fn new_unchecked(notes: Vec) -> Self { + let commitment = build_input_note_commitment(¬es); + Self { notes, commitment } + } + // PUBLIC ACCESSORS // -------------------------------------------------------------------------------------------- diff --git a/crates/miden-objects/src/transaction/proven_tx.rs b/crates/miden-objects/src/transaction/proven_tx.rs index 55989b985..76ea35c98 100644 --- a/crates/miden-objects/src/transaction/proven_tx.rs +++ b/crates/miden-objects/src/transaction/proven_tx.rs @@ -34,6 +34,12 @@ pub struct ProvenTransaction { /// while for public notes this will also contain full note details. output_notes: OutputNotes, + /// [`BlockNumber`] of the transaction's reference block. + /// + /// This is not needed for proving the transaction, but it is useful for the node to lookup the + /// block. + block_num: BlockNumber, + /// The block hash of the last known block at the time the transaction was executed. block_ref: Digest, @@ -75,6 +81,11 @@ impl ProvenTransaction { &self.proof } + /// Returns the number of the reference block the transaction was executed against. + pub fn block_num(&self) -> BlockNumber { + self.block_num + } + /// Returns the block reference the transaction was executed against. pub fn block_ref(&self) -> Digest { self.block_ref @@ -153,6 +164,7 @@ impl Serializable for ProvenTransaction { self.account_update.write_into(target); self.input_notes.write_into(target); self.output_notes.write_into(target); + self.block_num.write_into(target); self.block_ref.write_into(target); self.expiration_block_num.write_into(target); self.proof.write_into(target); @@ -166,6 +178,7 @@ impl Deserializable for ProvenTransaction { let input_notes = >::read_from(source)?; let output_notes = OutputNotes::read_from(source)?; + let block_num = BlockNumber::read_from(source)?; let block_ref = Digest::read_from(source)?; let expiration_block_num = BlockNumber::read_from(source)?; let proof = ExecutionProof::read_from(source)?; @@ -182,6 +195,7 @@ impl Deserializable for ProvenTransaction { account_update, input_notes, output_notes, + block_num, block_ref, expiration_block_num, proof, @@ -217,6 +231,9 @@ pub struct ProvenTransactionBuilder { /// List of [OutputNote]s of all notes created by the transaction. output_notes: Vec, + /// [`BlockNumber`] of the transaction's reference block. + block_num: BlockNumber, + /// Block [Digest] of the transaction's reference block. block_ref: Digest, @@ -236,6 +253,7 @@ impl ProvenTransactionBuilder { account_id: AccountId, initial_account_hash: Digest, final_account_hash: Digest, + block_num: BlockNumber, block_ref: Digest, expiration_block_num: BlockNumber, proof: ExecutionProof, @@ -247,6 +265,7 @@ impl ProvenTransactionBuilder { account_update_details: AccountUpdateDetails::Private, input_notes: Vec::new(), output_notes: Vec::new(), + block_num, block_ref, expiration_block_num, proof, @@ -310,6 +329,7 @@ impl ProvenTransactionBuilder { account_update, input_notes, output_notes, + block_num: self.block_num, block_ref: self.block_ref, expiration_block_num: self.expiration_block_num, proof: self.proof, diff --git a/crates/miden-tx-batch-prover/Cargo.toml b/crates/miden-tx-batch-prover/Cargo.toml new file mode 100644 index 000000000..240db4fce --- /dev/null +++ b/crates/miden-tx-batch-prover/Cargo.toml @@ -0,0 +1,41 @@ +[package] +name = "miden-tx-batch-prover" +version = "0.8.0" +description = "Miden rollup transaction batch executor and prover" +readme = "README.md" +categories = ["no-std"] +keywords = ["miden", "batch", "prover"] +license.workspace = true +authors.workspace = true +homepage.workspace = true +repository.workspace = true +rust-version.workspace = true +edition.workspace = true + +[lib] +bench = false + +[features] +default = ["std"] +std = [ + "miden-objects/std", + "miden-tx/std", + "miden-crypto/std", + "vm-core/std", + "vm-processor/std", +] + +[dependencies] +miden-crypto = { workspace = true } +miden-tx = { workspace = true } +miden-objects = { workspace = true } +thiserror = { workspace = true } +vm-core = { workspace = true } +vm-processor = { workspace = true } + +[dev-dependencies] +anyhow = { version = "1.0", features = ["std", "backtrace"] } +miden-lib = { workspace = true, features = ["std", "testing"] } +miden-tx = { workspace = true, features = ["std", "testing"] } +rand = { workspace = true, features = ["small_rng"] } +winterfell = { version = "0.11" } diff --git a/crates/miden-tx-batch-prover/README.md b/crates/miden-tx-batch-prover/README.md new file mode 100644 index 000000000..85d4babb7 --- /dev/null +++ b/crates/miden-tx-batch-prover/README.md @@ -0,0 +1,7 @@ +# Miden Transaction Batch Prover + +This crate contains tools for executing and proving Miden transaction batches. + +## License + +This project is [MIT licensed](../LICENSE). diff --git a/crates/miden-tx-batch-prover/src/errors.rs b/crates/miden-tx-batch-prover/src/errors.rs new file mode 100644 index 000000000..e0500f239 --- /dev/null +++ b/crates/miden-tx-batch-prover/src/errors.rs @@ -0,0 +1,12 @@ +use miden_objects::transaction::TransactionId; +use miden_tx::TransactionVerifierError; +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum BatchProveError { + #[error("failed to verify transaction {transaction_id} in transaction batch")] + TransactionVerificationFailed { + transaction_id: TransactionId, + source: TransactionVerifierError, + }, +} diff --git a/crates/miden-tx-batch-prover/src/lib.rs b/crates/miden-tx-batch-prover/src/lib.rs new file mode 100644 index 000000000..aa7d26f38 --- /dev/null +++ b/crates/miden-tx-batch-prover/src/lib.rs @@ -0,0 +1,18 @@ +#![no_std] + +#[cfg_attr(test, macro_use)] +extern crate alloc; + +#[cfg(feature = "std")] +extern crate std; + +mod local_batch_prover; +pub use local_batch_prover::LocalBatchProver; + +pub mod errors; + +#[cfg(test)] +pub mod testing; + +#[cfg(test)] +mod tests; diff --git a/crates/miden-tx-batch-prover/src/local_batch_prover.rs b/crates/miden-tx-batch-prover/src/local_batch_prover.rs new file mode 100644 index 000000000..be3efb5bd --- /dev/null +++ b/crates/miden-tx-batch-prover/src/local_batch_prover.rs @@ -0,0 +1,58 @@ +use miden_objects::batch::{ProposedBatch, ProvenBatch}; +use miden_tx::TransactionVerifier; + +use crate::errors::BatchProveError; + +// LOCAL BATCH PROVER +// ================================================================================================ + +/// A local prover for transaction batches, proving the transactions in a [`ProposedBatch`] and +/// returning a [`ProvenBatch`]. +pub struct LocalBatchProver { + proof_security_level: u32, +} + +impl LocalBatchProver { + /// Creates a new [`LocalBatchProver`] instance. + pub fn new(proof_security_level: u32) -> Self { + Self { proof_security_level } + } + + /// Attempts to prove the [`ProposedBatch`] into a [`ProvenBatch`]. + /// + /// # Errors + /// + /// Returns an error if: + /// - a proof of any transaction in the batch fails to verify. + pub fn prove(&self, proposed_batch: ProposedBatch) -> Result { + let ( + transactions, + _block_header, + _block_chain, + _authenticatable_unauthenticated_notes, + id, + updated_accounts, + input_notes, + output_notes_smt, + output_notes, + batch_expiration_block_num, + ) = proposed_batch.into_parts(); + + let verifier = TransactionVerifier::new(self.proof_security_level); + + for tx in transactions { + verifier.verify(&tx).map_err(|source| { + BatchProveError::TransactionVerificationFailed { transaction_id: tx.id(), source } + })?; + } + + Ok(ProvenBatch::new( + id, + updated_accounts, + input_notes, + output_notes_smt, + output_notes, + batch_expiration_block_num, + )) + } +} diff --git a/crates/miden-tx-batch-prover/src/testing/mod.rs b/crates/miden-tx-batch-prover/src/testing/mod.rs new file mode 100644 index 000000000..8f5eeaba3 --- /dev/null +++ b/crates/miden-tx-batch-prover/src/testing/mod.rs @@ -0,0 +1,2 @@ +mod proven_tx_builder; +pub(crate) use proven_tx_builder::MockProvenTxBuilder; diff --git a/crates/miden-tx-batch-prover/src/testing/proven_tx_builder.rs b/crates/miden-tx-batch-prover/src/testing/proven_tx_builder.rs new file mode 100644 index 000000000..5a4b30def --- /dev/null +++ b/crates/miden-tx-batch-prover/src/testing/proven_tx_builder.rs @@ -0,0 +1,111 @@ +use alloc::vec::Vec; + +use anyhow::Context; +use miden_crypto::merkle::MerklePath; +use miden_objects::{ + account::AccountId, + block::BlockNumber, + note::{Note, NoteInclusionProof, Nullifier}, + transaction::{InputNote, OutputNote, ProvenTransaction, ProvenTransactionBuilder}, + vm::ExecutionProof, +}; +use vm_processor::Digest; +use winterfell::Proof; + +/// A builder to build mocked [`ProvenTransaction`]s. +pub struct MockProvenTxBuilder { + account_id: AccountId, + initial_account_commitment: Digest, + final_account_commitment: Digest, + block_reference: Option, + expiration_block_num: BlockNumber, + output_notes: Option>, + input_notes: Option>, + nullifiers: Option>, +} + +impl MockProvenTxBuilder { + /// Creates a new builder for a transaction executed against the given account with its initial + /// and final state commitment. + pub fn with_account( + account_id: AccountId, + initial_account_commitment: Digest, + final_account_commitment: Digest, + ) -> Self { + Self { + account_id, + initial_account_commitment, + final_account_commitment, + block_reference: None, + expiration_block_num: BlockNumber::from(u32::MAX), + output_notes: None, + input_notes: None, + nullifiers: None, + } + } + + /// Adds unauthenticated notes to the transaction. + #[must_use] + pub fn authenticated_notes(mut self, notes: Vec) -> Self { + let mock_proof = + NoteInclusionProof::new(BlockNumber::from(0), 0, MerklePath::new(vec![])).unwrap(); + self.input_notes = Some( + notes + .into_iter() + .map(|note| InputNote::authenticated(note, mock_proof.clone())) + .collect(), + ); + + self + } + + /// Adds unauthenticated notes to the transaction. + #[must_use] + pub fn unauthenticated_notes(mut self, notes: Vec) -> Self { + self.input_notes = Some(notes.into_iter().map(InputNote::unauthenticated).collect()); + + self + } + + /// Sets the transaction's expiration block number. + #[must_use] + pub fn expiration_block_num(mut self, expiration_block_num: BlockNumber) -> Self { + self.expiration_block_num = expiration_block_num; + + self + } + + /// Adds notes to the transaction's output notes. + #[must_use] + pub fn output_notes(mut self, notes: Vec) -> Self { + self.output_notes = Some(notes); + + self + } + + /// Sets the transaction's block reference. + #[must_use] + pub fn block_reference(mut self, block_reference: Digest) -> Self { + self.block_reference = Some(block_reference); + + self + } + + /// Builds the [`ProvenTransaction`] and returns potential errors. + pub fn build(self) -> anyhow::Result { + ProvenTransactionBuilder::new( + self.account_id, + self.initial_account_commitment, + self.final_account_commitment, + BlockNumber::from(0), + self.block_reference.unwrap_or_default(), + self.expiration_block_num, + ExecutionProof::new(Proof::new_dummy(), Default::default()), + ) + .add_input_notes(self.input_notes.unwrap_or_default()) + .add_input_notes(self.nullifiers.unwrap_or_default()) + .add_output_notes(self.output_notes.unwrap_or_default()) + .build() + .context("failed to build proven transaction") + } +} diff --git a/crates/miden-tx-batch-prover/src/tests/mod.rs b/crates/miden-tx-batch-prover/src/tests/mod.rs new file mode 100644 index 000000000..19fd2c568 --- /dev/null +++ b/crates/miden-tx-batch-prover/src/tests/mod.rs @@ -0,0 +1 @@ +mod proposed_batch; diff --git a/crates/miden-tx-batch-prover/src/tests/proposed_batch.rs b/crates/miden-tx-batch-prover/src/tests/proposed_batch.rs new file mode 100644 index 000000000..7adc23c22 --- /dev/null +++ b/crates/miden-tx-batch-prover/src/tests/proposed_batch.rs @@ -0,0 +1,591 @@ +use alloc::sync::Arc; +use std::collections::BTreeMap; + +use anyhow::Context; +use miden_crypto::merkle::MerkleError; +use miden_lib::transaction::TransactionKernel; +use miden_objects::{ + account::{Account, AccountId}, + batch::ProposedBatch, + block::BlockNumber, + note::{Note, NoteType}, + testing::{account_id::AccountIdBuilder, note::NoteBuilder}, + transaction::{ChainMmr, InputNote, InputNoteCommitment, OutputNote}, + BatchAccountUpdateError, ProposedBatchError, +}; +use miden_tx::testing::{Auth, MockChain}; +use rand::{rngs::SmallRng, SeedableRng}; +use vm_core::assert_matches; +use vm_processor::Digest; + +use crate::testing::MockProvenTxBuilder; + +fn mock_account_id(num: u8) -> AccountId { + AccountIdBuilder::new().build_with_rng(&mut SmallRng::from_seed([num; 32])) +} + +pub fn mock_note(num: u8) -> Note { + let sender = mock_account_id(num); + NoteBuilder::new(sender, SmallRng::from_seed([num; 32])) + .build(&TransactionKernel::assembler().with_debug_mode(true)) + .unwrap() +} + +pub fn mock_output_note(num: u8) -> OutputNote { + OutputNote::Full(mock_note(num)) +} + +struct TestSetup { + chain: MockChain, + account1: Account, + account2: Account, +} + +fn setup_chain() -> TestSetup { + let mut chain = MockChain::new(); + let account1 = chain.add_new_wallet(Auth::NoAuth); + let account2 = chain.add_new_wallet(Auth::NoAuth); + chain.seal_block(None); + + TestSetup { chain, account1, account2 } +} + +/// Tests that a note created and consumed in the same batch are erased from the input and +/// output note commitments. +#[test] +fn empty_transaction_batch() -> anyhow::Result<()> { + let TestSetup { chain, .. } = setup_chain(); + let block1 = chain.block_header(1); + + let error = ProposedBatch::new(vec![], block1, chain.chain(), BTreeMap::default()).unwrap_err(); + + assert_matches!(error, ProposedBatchError::EmptyTransactionBatch); + + Ok(()) +} + +/// Tests that a note created and consumed in the same batch are erased from the input and +/// output note commitments. +#[test] +fn note_created_and_consumed_in_same_batch() -> anyhow::Result<()> { + let TestSetup { mut chain, account1, account2 } = setup_chain(); + let block1 = chain.block_header(1); + let block2 = chain.seal_block(None); + + let note = mock_note(40); + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .output_notes(vec![OutputNote::Full(note.clone())]) + .build()?; + let tx2 = MockProvenTxBuilder::with_account(account2.id(), Digest::default(), account2.hash()) + .block_reference(block1.hash()) + .unauthenticated_notes(vec![note.clone()]) + .build()?; + + let batch = ProposedBatch::new( + [tx1, tx2].into_iter().map(Arc::new).collect(), + block2.header(), + chain.chain(), + BTreeMap::default(), + )?; + + assert_eq!(batch.input_notes().num_notes(), 0); + assert_eq!(batch.output_notes().len(), 0); + assert_eq!(batch.output_notes_tree().num_leaves(), 0); + + Ok(()) +} + +/// Tests that an error is returned if the same unauthenticated input note appears multiple +/// times in different transactions. +#[test] +fn duplicate_unauthenticated_input_notes() -> anyhow::Result<()> { + let TestSetup { chain, account1, account2 } = setup_chain(); + let block1 = chain.block_header(1); + + let note = mock_note(50); + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .unauthenticated_notes(vec![note.clone()]) + .build()?; + let tx2 = MockProvenTxBuilder::with_account(account2.id(), Digest::default(), account2.hash()) + .block_reference(block1.hash()) + .unauthenticated_notes(vec![note.clone()]) + .build()?; + + let error = ProposedBatch::new( + [tx1.clone(), tx2.clone()].into_iter().map(Arc::new).collect(), + block1, + chain.chain(), + BTreeMap::default(), + ) + .unwrap_err(); + + assert_matches!(error, ProposedBatchError::DuplicateInputNote { + note_nullifier, + first_transaction_id, + second_transaction_id + } if note_nullifier == note.nullifier() && + first_transaction_id == tx1.id() && + second_transaction_id == tx2.id() + ); + + Ok(()) +} + +/// Tests that an error is returned if the same authenticated input note appears multiple +/// times in different transactions. +#[test] +fn duplicate_authenticated_input_notes() -> anyhow::Result<()> { + let TestSetup { mut chain, account1, account2 } = setup_chain(); + let note = chain.add_p2id_note(account1.id(), account2.id(), &[], NoteType::Private, None)?; + let block1 = chain.block_header(1); + let block2 = chain.seal_block(None); + + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .authenticated_notes(vec![note.clone()]) + .build()?; + let tx2 = MockProvenTxBuilder::with_account(account2.id(), Digest::default(), account2.hash()) + .block_reference(block1.hash()) + .authenticated_notes(vec![note.clone()]) + .build()?; + + let error = ProposedBatch::new( + [tx1.clone(), tx2.clone()].into_iter().map(Arc::new).collect(), + block2.header(), + chain.chain(), + BTreeMap::default(), + ) + .unwrap_err(); + + assert_matches!(error, ProposedBatchError::DuplicateInputNote { + note_nullifier, + first_transaction_id, + second_transaction_id + } if note_nullifier == note.nullifier() && + first_transaction_id == tx1.id() && + second_transaction_id == tx2.id() + ); + + Ok(()) +} + +/// Tests that an error is returned if the same input note appears multiple times in different +/// transactions as an unauthenticated or authenticated note. +#[test] +fn duplicate_mixed_input_notes() -> anyhow::Result<()> { + let TestSetup { mut chain, account1, account2 } = setup_chain(); + let note = chain.add_p2id_note(account1.id(), account2.id(), &[], NoteType::Private, None)?; + let block1 = chain.block_header(1); + let block2 = chain.seal_block(None); + + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .unauthenticated_notes(vec![note.clone()]) + .build()?; + let tx2 = MockProvenTxBuilder::with_account(account2.id(), Digest::default(), account2.hash()) + .block_reference(block1.hash()) + .authenticated_notes(vec![note.clone()]) + .build()?; + + let error = ProposedBatch::new( + [tx1.clone(), tx2.clone()].into_iter().map(Arc::new).collect(), + block2.header(), + chain.chain(), + BTreeMap::default(), + ) + .unwrap_err(); + + assert_matches!(error, ProposedBatchError::DuplicateInputNote { + note_nullifier, + first_transaction_id, + second_transaction_id + } if note_nullifier == note.nullifier() && + first_transaction_id == tx1.id() && + second_transaction_id == tx2.id() + ); + + Ok(()) +} + +/// Tests that an error is returned if the same output note appears multiple times in different +/// transactions. +#[test] +fn duplicate_output_notes() -> anyhow::Result<()> { + let TestSetup { chain, account1, account2 } = setup_chain(); + let block1 = chain.block_header(1); + + let note0 = mock_output_note(50); + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .output_notes(vec![note0.clone()]) + .build()?; + let tx2 = MockProvenTxBuilder::with_account(account2.id(), Digest::default(), account2.hash()) + .block_reference(block1.hash()) + .output_notes(vec![note0.clone()]) + .build()?; + + let error = ProposedBatch::new( + [tx1.clone(), tx2.clone()].into_iter().map(Arc::new).collect(), + block1, + chain.chain(), + BTreeMap::default(), + ) + .unwrap_err(); + + assert_matches!(error, ProposedBatchError::DuplicateOutputNote { + note_id, + first_transaction_id, + second_transaction_id + } if note_id == note0.id() && + first_transaction_id == tx1.id() && + second_transaction_id == tx2.id()); + + Ok(()) +} + +/// Test that an unauthenticated input note for which a proof exists is converted into an +/// authenticated one and becomes part of the batch's input note commitment. +#[test] +fn unauthenticated_note_converted_to_authenticated() -> anyhow::Result<()> { + let TestSetup { mut chain, account1, account2 } = setup_chain(); + let note0 = chain.add_p2id_note(account2.id(), account1.id(), &[], NoteType::Private, None)?; + let note1 = chain.add_p2id_note(account1.id(), account2.id(), &[], NoteType::Private, None)?; + // The just created note will be provable against block2. + let block2 = chain.seal_block(None); + let block3 = chain.seal_block(None); + let block4 = chain.seal_block(None); + + // Consume the authenticated note as an unauthenticated one in the transaction. + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block3.hash()) + .unauthenticated_notes(vec![note1.clone()]) + .build()?; + + let input_note0 = chain.available_notes_map().get(¬e0.id()).expect("note not found"); + let note_inclusion_proof0 = input_note0.proof().expect("note should be of type authenticated"); + + let input_note1 = chain.available_notes_map().get(¬e1.id()).expect("note not found"); + let note_inclusion_proof1 = input_note1.proof().expect("note should be of type authenticated"); + + // The chain MMR will contain all blocks in the mock chain, in particular block2 which both note + // inclusion proofs need for verification. + let chain_mmr = chain.chain(); + + // Case 1: Error: A wrong proof is passed. + // -------------------------------------------------------------------------------------------- + + let error = ProposedBatch::new( + [tx1.clone()].into_iter().map(Arc::new).collect(), + block4.header(), + chain_mmr.clone(), + BTreeMap::from_iter([(input_note1.id(), note_inclusion_proof0.clone())]), + ) + .unwrap_err(); + + assert_matches!(error, ProposedBatchError::UnauthenticatedNoteAuthenticationFailed { + note_id, + block_num, + source: MerkleError::ConflictingRoots { .. }, + } if note_id == note1.id() && + block_num == block2.header().block_num() + ); + + // Case 2: Error: The block referenced by the (valid) note inclusion proof is missing. + // -------------------------------------------------------------------------------------------- + + // Make a clone of the chain mmr where block2 is missing. + let mut mmr = chain_mmr.mmr().clone(); + mmr.untrack(block2.header().block_num().as_usize()); + let blocks = chain_mmr + .block_headers() + .filter(|header| header.block_num() != block2.header().block_num()) + .copied(); + + let error = ProposedBatch::new( + [tx1.clone()].into_iter().map(Arc::new).collect(), + block4.header(), + ChainMmr::new(mmr, blocks).context("failed to build chain mmr with missing block")?, + BTreeMap::from_iter([(input_note1.id(), note_inclusion_proof1.clone())]), + ) + .unwrap_err(); + + assert_matches!( + error, + ProposedBatchError::UnauthenticatedInputNoteBlockNotInChainMmr { + block_number, + note_id + } if block_number == note_inclusion_proof1.location().block_num() && + note_id == input_note1.id() + ); + + // Case 3: Success: The correct proof is passed. + // -------------------------------------------------------------------------------------------- + + let batch = ProposedBatch::new( + [tx1].into_iter().map(Arc::new).collect(), + block4.header(), + chain_mmr, + BTreeMap::from_iter([(input_note1.id(), note_inclusion_proof1.clone())]), + )?; + + // We expect the unauthenticated input note to have become an authenticated one, + // meaning it is part of the input note commitment. + assert_eq!(batch.input_notes().num_notes(), 1); + assert!(batch + .input_notes() + .iter() + .any(|commitment| commitment == &InputNoteCommitment::from(input_note1))); + assert_eq!(batch.output_notes().len(), 0); + + Ok(()) +} + +/// Test that an authenticated input note that is also created in the same batch does not error +/// and instead is marked as consumed. +/// - This requires a nullifier collision on the input and output note which is very unlikely in +/// practice. +/// - This makes the created note unspendable as its nullifier is added to the nullifier tree. +/// - The batch kernel cannot return an error in this case as it can't detect this condition due to +/// only having the nullifier for authenticated input notes _but_ not having the nullifier for +/// private output notes. +/// - We test this to ensure the kernel does something reasonable in this case and it is not an +/// attack vector. +#[test] +fn authenticated_note_created_in_same_batch() -> anyhow::Result<()> { + let TestSetup { mut chain, account1, account2 } = setup_chain(); + let note = chain.add_p2id_note(account1.id(), account2.id(), &[], NoteType::Private, None)?; + let block1 = chain.block_header(1); + let block2 = chain.seal_block(None); + + let note0 = mock_note(50); + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .output_notes(vec![OutputNote::Full(note0.clone())]) + .build()?; + let tx2 = MockProvenTxBuilder::with_account(account2.id(), Digest::default(), account2.hash()) + .block_reference(block1.hash()) + .authenticated_notes(vec![note.clone()]) + .build()?; + + let batch = ProposedBatch::new( + [tx1, tx2].into_iter().map(Arc::new).collect(), + block2.header(), + chain.chain(), + BTreeMap::default(), + )?; + + assert_eq!(batch.input_notes().num_notes(), 1); + assert_eq!(batch.output_notes().len(), 1); + assert_eq!(batch.output_notes_tree().num_leaves(), 1); + + Ok(()) +} + +/// Test that multiple transactions against the same account +/// 1) can be correctly executed when in the right order, +/// 2) and that an error is returned if they are incorrectly ordered. +#[test] +fn multiple_transactions_against_same_account() -> anyhow::Result<()> { + let TestSetup { chain, account1, .. } = setup_chain(); + let block1 = chain.block_header(1); + + // Use some random hash as the initial state commitment of tx1. + let initial_state_commitment = Digest::default(); + let tx1 = + MockProvenTxBuilder::with_account(account1.id(), initial_state_commitment, account1.hash()) + .block_reference(block1.hash()) + .output_notes(vec![mock_output_note(0)]) + .build()?; + + // Use some random hash as the final state commitment of tx2. + let final_state_commitment = mock_note(10).hash(); + let tx2 = + MockProvenTxBuilder::with_account(account1.id(), account1.hash(), final_state_commitment) + .block_reference(block1.hash()) + .build()?; + + // Success: Transactions are correctly ordered. + let batch = ProposedBatch::new( + [tx1.clone(), tx2.clone()].into_iter().map(Arc::new).collect(), + block1, + chain.chain(), + BTreeMap::default(), + )?; + + assert_eq!(batch.account_updates().len(), 1); + // Assert that the initial state commitment from tx1 is used and the final state commitment + // from tx2. + assert_eq!( + batch.account_updates().get(&account1.id()).unwrap().initial_state_commitment(), + initial_state_commitment + ); + assert_eq!( + batch.account_updates().get(&account1.id()).unwrap().final_state_commitment(), + final_state_commitment + ); + + // Error: Transactions are incorrectly ordered. + let error = ProposedBatch::new( + [tx2.clone(), tx1.clone()].into_iter().map(Arc::new).collect(), + block1, + chain.chain(), + BTreeMap::default(), + ) + .unwrap_err(); + + assert_matches!( + error, + ProposedBatchError::AccountUpdateError { + source: BatchAccountUpdateError::AccountUpdateInitialStateMismatch(tx_id), + .. + } if tx_id == tx1.id() + ); + + Ok(()) +} + +/// Tests that the input and outputs notes commitment is correctly computed. +/// - Notes created and consumed in the same batch are erased from these commitments. +/// - The input note commitment is sorted by the order in which the notes appeared in the batch. +/// - The output note commitment is sorted by [`NoteId`]. +#[test] +fn input_and_output_notes_commitment() -> anyhow::Result<()> { + let TestSetup { chain, account1, account2 } = setup_chain(); + let block1 = chain.block_header(1); + + let note0 = mock_output_note(50); + let note1 = mock_note(60); + let note2 = mock_output_note(70); + let note3 = mock_output_note(80); + let note4 = mock_note(90); + let note5 = mock_note(100); + + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .unauthenticated_notes(vec![note1.clone(), note5.clone()]) + .output_notes(vec![note0.clone()]) + .build()?; + let tx2 = MockProvenTxBuilder::with_account(account2.id(), Digest::default(), account2.hash()) + .block_reference(block1.hash()) + .unauthenticated_notes(vec![note4.clone()]) + .output_notes(vec![OutputNote::Full(note1.clone()), note2.clone(), note3.clone()]) + .build()?; + + let batch = ProposedBatch::new( + [tx1.clone(), tx2.clone()].into_iter().map(Arc::new).collect(), + block1, + chain.chain(), + BTreeMap::default(), + )?; + + // We expecte note1 to be erased from the input/output notes as it is created and consumed + // in the batch. + let mut expected_output_notes = [note0, note2, note3]; + // We expect a vector sorted by NoteId. + expected_output_notes.sort_unstable_by_key(OutputNote::id); + + assert_eq!(batch.output_notes().len(), 3); + assert_eq!(batch.output_notes(), expected_output_notes); + + assert_eq!(batch.output_notes_tree().num_leaves(), 3); + + // Input notes are sorted by the order in which they appeared in the batch. + assert_eq!(batch.input_notes().num_notes(), 2); + assert_eq!( + batch.input_notes().clone().into_vec(), + &[ + InputNoteCommitment::from(&InputNote::unauthenticated(note5)), + InputNoteCommitment::from(&InputNote::unauthenticated(note4)), + ] + ); + + Ok(()) +} + +/// Tests that the expiration block number of a batch is the minimum of all contained transactions. +#[test] +fn batch_expiration() -> anyhow::Result<()> { + let TestSetup { chain, account1, account2 } = setup_chain(); + let block1 = chain.block_header(1); + + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .expiration_block_num(BlockNumber::from(35)) + .build()?; + let tx2 = MockProvenTxBuilder::with_account(account2.id(), Digest::default(), account2.hash()) + .block_reference(block1.hash()) + .expiration_block_num(BlockNumber::from(30)) + .build()?; + + let batch = ProposedBatch::new( + [tx1, tx2].into_iter().map(Arc::new).collect(), + block1, + chain.chain(), + BTreeMap::default(), + )?; + + assert_eq!(batch.batch_expiration_block_num(), BlockNumber::from(30)); + + Ok(()) +} + +/// Tests that passing duplicate transactions in a batch returns an error. +#[test] +fn duplicate_transaction() -> anyhow::Result<()> { + let TestSetup { chain, account1, .. } = setup_chain(); + let block1 = chain.block_header(1); + + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .expiration_block_num(BlockNumber::from(35)) + .build()?; + + let error = ProposedBatch::new( + [tx1.clone(), tx1.clone()].into_iter().map(Arc::new).collect(), + block1, + chain.chain(), + BTreeMap::default(), + ) + .unwrap_err(); + + assert_matches!(error, ProposedBatchError::DuplicateTransaction { transaction_id } if transaction_id == tx1.id()); + + Ok(()) +} + +/// Tests that transactions with a circular dependency between notes are accepted: +/// TX 1: Inputs [X] -> Outputs [Y] +/// TX 2: Inputs [Y] -> Outputs [X] +#[test] +fn circular_note_dependency() -> anyhow::Result<()> { + let TestSetup { chain, account1, account2 } = setup_chain(); + let block1 = chain.block_header(1); + + let note_x = mock_note(20); + let note_y = mock_note(30); + + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .unauthenticated_notes(vec![note_x.clone()]) + .output_notes(vec![OutputNote::Full(note_y.clone())]) + .build()?; + let tx2 = MockProvenTxBuilder::with_account(account2.id(), Digest::default(), account2.hash()) + .block_reference(block1.hash()) + .unauthenticated_notes(vec![note_y.clone()]) + .output_notes(vec![OutputNote::Full(note_x.clone())]) + .build()?; + + let batch = ProposedBatch::new( + [tx1, tx2].into_iter().map(Arc::new).collect(), + block1, + chain.chain(), + BTreeMap::default(), + )?; + + assert_eq!(batch.input_notes().num_notes(), 0); + assert_eq!(batch.output_notes().len(), 0); + + Ok(()) +} diff --git a/crates/miden-tx/src/prover/mod.rs b/crates/miden-tx/src/prover/mod.rs index e6445e665..c1c5ed14c 100644 --- a/crates/miden-tx/src/prover/mod.rs +++ b/crates/miden-tx/src/prover/mod.rs @@ -95,6 +95,7 @@ impl TransactionProver for LocalTransactionProver { let account = tx_inputs.account(); let input_notes = tx_inputs.input_notes(); + let block_num = tx_inputs.block_header().block_num(); let block_hash = tx_inputs.block_header().hash(); // execute and prove @@ -137,6 +138,7 @@ impl TransactionProver for LocalTransactionProver { account.id(), account.init_hash(), tx_outputs.account.hash(), + block_num, block_hash, tx_outputs.expiration_block_num, proof, diff --git a/crates/miden-tx/src/testing/mock_chain/mod.rs b/crates/miden-tx/src/testing/mock_chain/mod.rs index 4e873aa29..c346cb1bc 100644 --- a/crates/miden-tx/src/testing/mock_chain/mod.rs +++ b/crates/miden-tx/src/testing/mock_chain/mod.rs @@ -17,7 +17,7 @@ use miden_objects::{ }, crypto::{ dsa::rpo_falcon512::SecretKey, - merkle::{Mmr, MmrError, PartialMmr, Smt}, + merkle::{Mmr, Smt}, }, note::{Note, NoteId, NoteInclusionProof, NoteType, Nullifier}, testing::account_code::DEFAULT_AUTH_SCRIPT, @@ -633,8 +633,8 @@ impl MockChain { input_notes.push(InputNote::Unauthenticated { note: note.clone() }) } - let block_headers: Vec = block_headers_map.values().cloned().collect(); - let mmr = mmr_to_chain_mmr(&self.chain, &block_headers).unwrap(); + let block_headers = block_headers_map.values().cloned(); + let mmr = ChainMmr::from_mmr(&self.chain, block_headers).unwrap(); TransactionInputs::new( account, @@ -782,8 +782,11 @@ impl MockChain { /// Gets the latest [ChainMmr]. pub fn chain(&self) -> ChainMmr { - let block_headers: Vec = self.blocks.iter().map(|b| b.header()).collect(); - mmr_to_chain_mmr(&self.chain, &block_headers).unwrap() + // We cannot pass the latest block as that would violate the condition in the transaction + // inputs that the chain length of the mmr must match the number of the reference block. + let block_headers = self.blocks.iter().map(|b| b.header()).take(self.blocks.len() - 1); + + ChainMmr::from_mmr(&self.chain, block_headers).unwrap() } /// Gets a reference to [BlockHeader] with `block_number`. @@ -801,6 +804,11 @@ impl MockChain { self.available_notes.values().cloned().collect() } + /// Returns the map of note IDs to consumable input notes. + pub fn available_notes_map(&self) -> &BTreeMap { + &self.available_notes + } + /// Get the reference to the accounts hash tree. pub fn accounts(&self) -> &SimpleSmt { &self.accounts @@ -816,20 +824,3 @@ enum AccountState { New, Exists, } - -// HELPER FUNCTIONS -// ================================================================================================ - -/// Converts the MMR into partial MMR by copying all leaves from MMR to partial MMR. -fn mmr_to_chain_mmr(mmr: &Mmr, blocks: &[BlockHeader]) -> Result { - let target_forest = mmr.forest() - 1; - let mut partial_mmr = PartialMmr::from_peaks(mmr.peaks_at(target_forest)?); - - for i in 0..target_forest { - let node = mmr.get(i)?; - let path = mmr.open_at(i, target_forest)?.merkle_path; - partial_mmr.track(i, node, &path)?; - } - - Ok(ChainMmr::new(partial_mmr, blocks.to_vec()).unwrap()) -} diff --git a/crates/miden-tx/src/tests/mod.rs b/crates/miden-tx/src/tests/mod.rs index 09ba08157..45e75cc45 100644 --- a/crates/miden-tx/src/tests/mod.rs +++ b/crates/miden-tx/src/tests/mod.rs @@ -829,7 +829,7 @@ fn prove_witness_and_verify() { let serialized_transaction = proven_transaction.to_bytes(); let proven_transaction = ProvenTransaction::read_from_bytes(&serialized_transaction).unwrap(); let verifier = TransactionVerifier::new(MIN_PROOF_SECURITY_LEVEL); - assert!(verifier.verify(proven_transaction).is_ok()); + assert!(verifier.verify(&proven_transaction).is_ok()); } // TEST TRANSACTION SCRIPT diff --git a/crates/miden-tx/src/verifier/mod.rs b/crates/miden-tx/src/verifier/mod.rs index 6e6b8e6ae..d42b4944c 100644 --- a/crates/miden-tx/src/verifier/mod.rs +++ b/crates/miden-tx/src/verifier/mod.rs @@ -30,7 +30,7 @@ impl TransactionVerifier { /// Returns an error if: /// - Transaction verification fails. /// - The security level of the verified proof is insufficient. - pub fn verify(&self, transaction: ProvenTransaction) -> Result<(), TransactionVerifierError> { + pub fn verify(&self, transaction: &ProvenTransaction) -> Result<(), TransactionVerifierError> { // build stack inputs and outputs let stack_inputs = TransactionKernel::build_input_stack( transaction.account_id(), diff --git a/crates/miden-tx/tests/integration/main.rs b/crates/miden-tx/tests/integration/main.rs index 9842315a6..a43fee853 100644 --- a/crates/miden-tx/tests/integration/main.rs +++ b/crates/miden-tx/tests/integration/main.rs @@ -61,7 +61,7 @@ pub fn prove_and_verify_transaction( // Verify that the generated proof is valid let verifier = TransactionVerifier::new(miden_objects::MIN_PROOF_SECURITY_LEVEL); - verifier.verify(proven_transaction) + verifier.verify(&proven_transaction) } #[cfg(test)]