From 8437e82f7a346f08a1bfa34cc171d2a4ff95dfde Mon Sep 17 00:00:00 2001 From: Bobbin Threadbare Date: Thu, 23 Jan 2025 17:48:08 -0800 Subject: [PATCH 01/17] chore: increment crate versions to v0.8.0 and MSRV to 1.84 --- CHANGELOG.md | 4 ++++ Cargo.lock | 18 +++++++++--------- Cargo.toml | 14 +++++++------- README.md | 4 ++-- rust-toolchain.toml | 2 +- 5 files changed, 23 insertions(+), 19 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8ed656ea..1824a4db 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ ## Unreleased +### Changes + +- [BREAKING] Updated minimum Rust version to 1.84. + ## v0.7.0 (2025-01-23) ### Enhancements diff --git a/Cargo.lock b/Cargo.lock index a1363a3c..9009ea63 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "addr2line" @@ -1481,7 +1481,7 @@ dependencies = [ [[package]] name = "miden-faucet" -version = "0.7.0" +version = "0.8.0" dependencies = [ "anyhow", "axum", @@ -1574,7 +1574,7 @@ dependencies = [ [[package]] name = "miden-node" -version = "0.7.0" +version = "0.8.0" dependencies = [ "anyhow", "clap", @@ -1595,7 +1595,7 @@ dependencies = [ [[package]] name = "miden-node-block-producer" -version = "0.7.0" +version = "0.8.0" dependencies = [ "assert_matches", "async-trait", @@ -1623,7 +1623,7 @@ dependencies = [ [[package]] name = "miden-node-proto" -version = "0.7.0" +version = "0.8.0" dependencies = [ "anyhow", "hex", @@ -1640,7 +1640,7 @@ dependencies = [ [[package]] name = "miden-node-rpc" -version = "0.7.0" +version = "0.8.0" dependencies = [ "miden-node-proto", "miden-node-utils", @@ -1656,7 +1656,7 @@ dependencies = [ [[package]] name = "miden-node-store" -version = "0.7.0" +version = "0.8.0" dependencies = [ "assert_matches", "deadpool-sqlite", @@ -1685,7 +1685,7 @@ dependencies = [ [[package]] name = "miden-node-utils" -version = "0.7.0" +version = "0.8.0" dependencies = [ "anyhow", "figment", @@ -1751,7 +1751,7 @@ dependencies = [ [[package]] name = "miden-rpc-proto" -version = "0.7.0" +version = "0.8.0" [[package]] name = "miden-stdlib" diff --git a/Cargo.toml b/Cargo.toml index 92dbb184..ff800d2d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,8 +15,8 @@ resolver = "2" [workspace.package] edition = "2021" -rust-version = "1.82" -version = "0.7.0" +rust-version = "1.84" +version = "0.8.0" license = "MIT" authors = ["Miden contributors"] homepage = "https://polygon.technology/polygon-miden" @@ -28,12 +28,12 @@ readme = "README.md" assert_matches = { version = "1.5" } miden-air = { version = "0.12" } miden-lib = { version = "0.7" } -miden-node-block-producer = { path = "crates/block-producer", version = "0.7" } -miden-node-proto = { path = "crates/proto", version = "0.7" } -miden-node-rpc = { path = "crates/rpc", version = "0.7" } -miden-node-store = { path = "crates/store", version = "0.7" } +miden-node-block-producer = { path = "crates/block-producer", version = "0.8" } +miden-node-proto = { path = "crates/proto", version = "0.8" } +miden-node-rpc = { path = "crates/rpc", version = "0.8" } +miden-node-store = { path = "crates/store", version = "0.8" } miden-node-test-macro = { path = "crates/test-macro" } -miden-node-utils = { path = "crates/utils", version = "0.7" } +miden-node-utils = { path = "crates/utils", version = "0.8" } miden-objects = { version = "0.7" } miden-processor = { version = "0.12" } miden-stdlib = { version = "0.12", default-features = false } diff --git a/README.md b/README.md index 6ea1de0c..39048821 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![LICENSE](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/0xPolygonMiden/miden-node/blob/main/LICENSE) [![test](https://github.com/0xPolygonMiden/miden-node/actions/workflows/test.yml/badge.svg)](https://github.com/0xPolygonMiden/miden-node/actions/workflows/test.yml) -[![RUST_VERSION](https://img.shields.io/badge/rustc-1.82+-lightgray.svg)](https://www.rust-lang.org/tools/install) +[![RUST_VERSION](https://img.shields.io/badge/rustc-1.84+-lightgray.svg)](https://www.rust-lang.org/tools/install) [![crates.io](https://img.shields.io/crates/v/miden-node)](https://crates.io/crates/miden-node) This repository holds the Miden node; that is, the software which processes transactions and creates blocks for the Miden rollup. @@ -58,7 +58,7 @@ sudo dpkg -i $package_name.deb ### Install using `cargo` -Install Rust version **1.82** or greater using the official Rust installation [instructions](https://www.rust-lang.org/tools/install). +Install Rust version **1.84** or greater using the official Rust installation [instructions](https://www.rust-lang.org/tools/install). Depending on the platform, you may need to install additional libraries. For example, on Ubuntu 22.04 the following command ensures that all required libraries are installed. diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 6ad542bb..217b108a 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "1.82" +channel = "1.84" components = ["rustfmt", "rust-src", "clippy"] profile = "minimal" From 44394e4a5b8c9f4313c1059486a4e474a1741a5d Mon Sep 17 00:00:00 2001 From: Serge Radinovich <47865535+sergerad@users.noreply.github.com> Date: Sun, 26 Jan 2025 21:57:14 +1300 Subject: [PATCH 02/17] ci: fmt check for TOML files (#645) --- .github/workflows/lint.yml | 10 +++++ .taplo.toml | 6 +++ Cargo.toml | 75 ++++++++++++++++---------------- Makefile | 12 ++++- bin/faucet/Cargo.toml | 62 +++++++++++++------------- bin/node/Cargo.toml | 48 ++++++++++---------- config/genesis.toml | 10 ++--- config/miden-faucet.toml | 10 ++--- config/miden-node.toml | 6 +-- crates/block-producer/Cargo.toml | 66 ++++++++++++++-------------- crates/proto/Cargo.toml | 36 +++++++-------- crates/rpc-proto/Cargo.toml | 24 +++++----- crates/rpc/Cargo.toml | 36 +++++++-------- crates/store/Cargo.toml | 50 ++++++++++----------- crates/test-macro/Cargo.toml | 22 +++++----- crates/utils/Cargo.toml | 44 +++++++++---------- rust-toolchain.toml | 6 +-- rustfmt.toml | 34 +++++++-------- 18 files changed, 292 insertions(+), 265 deletions(-) create mode 100644 .taplo.toml diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 5c898fde..756c80f0 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -44,6 +44,16 @@ jobs: - name: Clippy run: make clippy + toml: + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - uses: actions/checkout@v4 + - uses: taiki-e/install-action@v2 + with: + tool: taplo-cli + - run: make toml-check + doc: name: doc runs-on: ubuntu-latest diff --git a/.taplo.toml b/.taplo.toml new file mode 100644 index 00000000..b735451f --- /dev/null +++ b/.taplo.toml @@ -0,0 +1,6 @@ +[formatting] +align_entries = true +column_width = 120 +reorder_arrays = true +reorder_inline_tables = true +reorder_keys = true diff --git a/Cargo.toml b/Cargo.toml index ff800d2d..ccf2199d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,62 +1,63 @@ [workspace] members = [ - "bin/node", "bin/faucet", + "bin/node", "crates/block-producer", "crates/proto", - "crates/rpc-proto", "crates/rpc", + "crates/rpc-proto", "crates/store", - "crates/utils", "crates/test-macro", + "crates/utils", ] resolver = "2" [workspace.package] -edition = "2021" +authors = ["Miden contributors"] +edition = "2021" +exclude = [".github/"] +homepage = "https://polygon.technology/polygon-miden" +license = "MIT" +readme = "README.md" +repository = "https://github.com/0xPolygonMiden/miden-node" rust-version = "1.84" -version = "0.8.0" -license = "MIT" -authors = ["Miden contributors"] -homepage = "https://polygon.technology/polygon-miden" -repository = "https://github.com/0xPolygonMiden/miden-node" -exclude = [".github/"] -readme = "README.md" +version = "0.8.0" [workspace.dependencies] -assert_matches = { version = "1.5" } -miden-air = { version = "0.12" } -miden-lib = { version = "0.7" } +assert_matches = { version = "1.5" } +miden-air = { version = "0.12" } +miden-lib = { version = "0.7" } miden-node-block-producer = { path = "crates/block-producer", version = "0.8" } -miden-node-proto = { path = "crates/proto", version = "0.8" } -miden-node-rpc = { path = "crates/rpc", version = "0.8" } -miden-node-store = { path = "crates/store", version = "0.8" } -miden-node-test-macro = { path = "crates/test-macro" } -miden-node-utils = { path = "crates/utils", version = "0.8" } -miden-objects = { version = "0.7" } -miden-processor = { version = "0.12" } -miden-stdlib = { version = "0.12", default-features = false } -miden-tx = { version = "0.7" } -prost = { version = "0.13" } -rand = { version = "0.8" } -thiserror = { version = "2.0", default-features = false } -tokio = { version = "1.40", features = ["rt-multi-thread"] } -tokio-stream = { version = "0.1" } -tonic = { version = "0.12" } -tracing = { version = "0.1" } -tracing-subscriber = { version = "0.3", features = ["fmt", "json", "env-filter"] } +miden-node-proto = { path = "crates/proto", version = "0.8" } +miden-node-rpc = { path = "crates/rpc", version = "0.8" } +miden-node-store = { path = "crates/store", version = "0.8" } +miden-node-test-macro = { path = "crates/test-macro" } +miden-node-utils = { path = "crates/utils", version = "0.8" } +miden-objects = { version = "0.7" } +miden-processor = { version = "0.12" } +miden-stdlib = { version = "0.12", default-features = false } +miden-tx = { version = "0.7" } +prost = { version = "0.13" } +rand = { version = "0.8" } +thiserror = { version = "2.0", default-features = false } +tokio = { version = "1.40", features = ["rt-multi-thread"] } +tokio-stream = { version = "0.1" } +tonic = { version = "0.12" } +tracing = { version = "0.1" } +tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt", "json"] } # Lints are set to warn for development, which are promoted to errors in CI. [workspace.lints.clippy] # Pedantic lints are set to a lower priority which allows lints in the group to be selectively enabled. pedantic = { level = "warn", priority = -1 } -cast_possible_truncation = "allow" # Overly many instances especially regarding indices. -ignored_unit_patterns = "allow" # Stylistic choice. + +cast_possible_truncation = "allow" # Overly many instances especially regarding indices. +ignored_unit_patterns = "allow" # Stylistic choice. large_types_passed_by_value = "allow" # Triggered by BlockHeader being Copy + 334 bytes. -missing_errors_doc = "allow" # TODO: fixup and enable this. -missing_panics_doc = "allow" # TODO: fixup and enable this. -module_name_repetitions = "allow" # Many triggers, and is a stylistic choice. -must_use_candidate = "allow" # This marks many fn's which isn't helpful. +missing_errors_doc = "allow" # TODO: fixup and enable this. +missing_panics_doc = "allow" # TODO: fixup and enable this. +module_name_repetitions = "allow" # Many triggers, and is a stylistic choice. +must_use_candidate = "allow" # This marks many fn's which isn't helpful. should_panic_without_expect = "allow" # We don't care about the specific panic message. # End of pedantic lints. diff --git a/Makefile b/Makefile index b8f6bc3c..92039013 100644 --- a/Makefile +++ b/Makefile @@ -31,8 +31,18 @@ format-check: ## Runs Format using nightly toolchain but only in check mode cargo +nightly fmt --all --check +.PHONY: toml +toml: ## Runs Format for all TOML files + taplo fmt + + +.PHONY: toml-check +toml-check: ## Runs Format for all TOML files but only in check mode + taplo fmt --check --verbose + + .PHONY: lint -lint: format fix clippy ## Runs all linting tasks at once (Clippy, fixing, formatting) +lint: format fix clippy toml ## Runs all linting tasks at once (Clippy, fixing, formatting) # --- docs ---------------------------------------------------------------------------------------- diff --git a/bin/faucet/Cargo.toml b/bin/faucet/Cargo.toml index cce8844f..ce942f12 100644 --- a/bin/faucet/Cargo.toml +++ b/bin/faucet/Cargo.toml @@ -1,44 +1,44 @@ [package] -name = "miden-faucet" -version.workspace = true -description = "Miden node token faucet" -readme = "README.md" -keywords = ["miden", "node", "faucet"] -edition.workspace = true +authors.workspace = true +description = "Miden node token faucet" +edition.workspace = true +homepage.workspace = true +keywords = ["faucet", "miden", "node"] +license.workspace = true +name = "miden-faucet" +readme = "README.md" +repository.workspace = true rust-version.workspace = true -license.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true +version.workspace = true [lints] workspace = true [dependencies] -anyhow = "1.0" -axum = { version = "0.7", features = ["tokio"] } -clap = { version = "4.5", features = ["derive", "string"] } -http = "1.1" -http-body-util = "0.1" -miden-lib = { workspace = true } +anyhow = "1.0" +axum = { version = "0.7", features = ["tokio"] } +clap = { version = "4.5", features = ["derive", "string"] } +http = "1.1" +http-body-util = "0.1" +miden-lib = { workspace = true } miden-node-proto = { workspace = true } miden-node-utils = { workspace = true } -miden-objects = { workspace = true } -miden-tx = { workspace = true, features = ["concurrent"] } -mime = "0.3" -rand = { workspace = true } -rand_chacha = "0.3" -serde = { version = "1.0", features = ["derive"] } -static-files = "0.2" -thiserror = { workspace = true } -tokio = { workspace = true, features = ["fs"] } -toml = { version = "0.8" } -tonic = { workspace = true } -tower = "0.5" -tower-http = { version = "0.6", features = ["cors", "set-header", "trace"] } -tracing = { workspace = true } +miden-objects = { workspace = true } +miden-tx = { workspace = true, features = ["concurrent"] } +mime = "0.3" +rand = { workspace = true } +rand_chacha = "0.3" +serde = { version = "1.0", features = ["derive"] } +static-files = "0.2" +thiserror = { workspace = true } +tokio = { workspace = true, features = ["fs"] } +toml = { version = "0.8" } +tonic = { workspace = true } +tower = "0.5" +tower-http = { version = "0.6", features = ["cors", "set-header", "trace"] } +tracing = { workspace = true } [build-dependencies] # Required to inject build metadata. miden-node-utils = { workspace = true, features = ["vergen"] } -static-files = "0.2" +static-files = "0.2" diff --git a/bin/node/Cargo.toml b/bin/node/Cargo.toml index 37eda9a1..0f1e505f 100644 --- a/bin/node/Cargo.toml +++ b/bin/node/Cargo.toml @@ -1,15 +1,15 @@ [package] -name = "miden-node" -version.workspace = true -description = "Miden node binary" -readme.workspace = true -keywords = ["miden", "node"] -edition.workspace = true +authors.workspace = true +description = "Miden node binary" +edition.workspace = true +homepage.workspace = true +keywords = ["miden", "node"] +license.workspace = true +name = "miden-node" +readme.workspace = true +repository.workspace = true rust-version.workspace = true -license.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true +version.workspace = true [lints] workspace = true @@ -18,23 +18,23 @@ workspace = true tracing-forest = ["miden-node-block-producer/tracing-forest"] [dependencies] -anyhow = { version = "1.0" } -clap = { version = "4.5", features = ["derive", "string"] } -miden-lib = { workspace = true } +anyhow = { version = "1.0" } +clap = { version = "4.5", features = ["derive", "string"] } +miden-lib = { workspace = true } miden-node-block-producer = { workspace = true } -miden-node-rpc = { workspace = true } -miden-node-store = { workspace = true } -miden-node-utils = { workspace = true } -miden-objects = { workspace = true } -rand = { workspace = true } -rand_chacha = "0.3" -serde = { version = "1.0", features = ["derive"] } -tokio = { workspace = true, features = ["rt-multi-thread", "net", "macros"] } -toml = { version = "0.8" } -tracing = { workspace = true } +miden-node-rpc = { workspace = true } +miden-node-store = { workspace = true } +miden-node-utils = { workspace = true } +miden-objects = { workspace = true } +rand = { workspace = true } +rand_chacha = "0.3" +serde = { version = "1.0", features = ["derive"] } +tokio = { workspace = true, features = ["macros", "net", "rt-multi-thread"] } +toml = { version = "0.8" } +tracing = { workspace = true } [dev-dependencies] -figment = { version = "0.10", features = ["toml", "env", "test"] } +figment = { version = "0.10", features = ["env", "test", "toml"] } miden-node-utils = { workspace = true, features = ["tracing-forest"] } [build-dependencies] diff --git a/config/genesis.toml b/config/genesis.toml index 8c74a4dd..52d98d14 100644 --- a/config/genesis.toml +++ b/config/genesis.toml @@ -1,11 +1,11 @@ # This is an example genesis input file for the Miden node. -version = 1 timestamp = 1672531200 +version = 1 [[accounts]] -type = "BasicFungibleFaucet" +auth_scheme = "RpoFalcon512" +decimals = 12 +max_supply = 1000000 storage_mode = "public" -auth_scheme = "RpoFalcon512" token_symbol = "POL" -decimals = 12 -max_supply = 1000000 +type = "BasicFungibleFaucet" diff --git a/config/miden-faucet.toml b/config/miden-faucet.toml index 8124e33d..6005ef6a 100644 --- a/config/miden-faucet.toml +++ b/config/miden-faucet.toml @@ -1,5 +1,5 @@ -endpoint = { host = "localhost", port = 8080 } -node_url = "http://localhost:57291" -timeout_ms = 10000 -asset_amount_options = [100, 500, 1000] -faucet_account_path = "accounts/faucet.mac" +asset_amount_options = [100, 1000, 500] +endpoint = { host = "localhost", port = 8080 } +faucet_account_path = "accounts/faucet.mac" +node_url = "http://localhost:57291" +timeout_ms = 10000 diff --git a/config/miden-node.toml b/config/miden-node.toml index b4b133c1..d260f189 100644 --- a/config/miden-node.toml +++ b/config/miden-node.toml @@ -13,7 +13,7 @@ endpoint = { host = "0.0.0.0", port = 57291 } [store] # port defined as: sum(ord(c)**p for (p, c) in enumerate('miden-store', 1)) % 2**16 -endpoint = { host = "localhost", port = 28943 } +blockstore_dir = "/opt/miden/blocks" database_filepath = "/opt/miden/miden-store.sqlite3" -genesis_filepath = "/opt/miden/genesis.dat" -blockstore_dir = "/opt/miden/blocks" +endpoint = { host = "localhost", port = 28943 } +genesis_filepath = "/opt/miden/genesis.dat" diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index 27d52be8..e48af5ee 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -1,15 +1,15 @@ [package] -name = "miden-node-block-producer" -version.workspace = true -description = "Miden node's block producer component" -readme = "README.md" -keywords = ["miden", "node", "block-producer"] -edition.workspace = true +authors.workspace = true +description = "Miden node's block producer component" +edition.workspace = true +homepage.workspace = true +keywords = ["block-producer", "miden", "node"] +license.workspace = true +name = "miden-node-block-producer" +readme = "README.md" +repository.workspace = true rust-version.workspace = true -license.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true +version.workspace = true [lints] workspace = true @@ -18,31 +18,31 @@ workspace = true tracing-forest = ["miden-node-utils/tracing-forest"] [dependencies] -async-trait = { version = "0.1" } -itertools = { version = "0.13" } -miden-lib = { workspace = true } +async-trait = { version = "0.1" } +itertools = { version = "0.13" } +miden-lib = { workspace = true } miden-node-proto = { workspace = true } miden-node-utils = { workspace = true } -miden-objects = { workspace = true } -miden-processor = { workspace = true } -miden-stdlib = { workspace = true } -miden-tx = { workspace = true } -rand = { version = "0.8" } -serde = { version = "1.0", features = ["derive"] } -thiserror = { workspace = true } -tokio = { workspace = true, features = ["rt-multi-thread", "net", "macros", "sync", "time"] } -tokio-stream = { workspace = true, features = ["net"] } -tonic = { workspace = true } -tracing = { workspace = true } +miden-objects = { workspace = true } +miden-processor = { workspace = true } +miden-stdlib = { workspace = true } +miden-tx = { workspace = true } +rand = { version = "0.8" } +serde = { version = "1.0", features = ["derive"] } +thiserror = { workspace = true } +tokio = { workspace = true, features = ["macros", "net", "rt-multi-thread", "sync", "time"] } +tokio-stream = { workspace = true, features = ["net"] } +tonic = { workspace = true } +tracing = { workspace = true } [dev-dependencies] -assert_matches = { workspace = true} -miden-air = { workspace = true } -miden-lib = { workspace = true, features = ["testing"] } +assert_matches = { workspace = true } +miden-air = { workspace = true } +miden-lib = { workspace = true, features = ["testing"] } miden-node-test-macro = { path = "../test-macro" } -miden-objects = { workspace = true, features = ["testing"] } -miden-tx = { workspace = true, features = ["testing"] } -pretty_assertions = "1.4" -rand_chacha = { version = "0.3", default-features = false } -tokio = { workspace = true, features = ["test-util"] } -winterfell = { version = "0.11" } +miden-objects = { workspace = true, features = ["testing"] } +miden-tx = { workspace = true, features = ["testing"] } +pretty_assertions = "1.4" +rand_chacha = { version = "0.3", default-features = false } +tokio = { workspace = true, features = ["test-util"] } +winterfell = { version = "0.11" } diff --git a/crates/proto/Cargo.toml b/crates/proto/Cargo.toml index 445a93a7..84cf5ae3 100644 --- a/crates/proto/Cargo.toml +++ b/crates/proto/Cargo.toml @@ -1,33 +1,33 @@ [package] -name = "miden-node-proto" -version.workspace = true -description = "Miden node message definitions (Store, Block Producer and RPC)" -readme = "README.md" -keywords = ["miden", "node", "protobuf", "rpc"] -edition.workspace = true +authors.workspace = true +description = "Miden node message definitions (Store, Block Producer and RPC)" +edition.workspace = true +homepage.workspace = true +keywords = ["miden", "node", "protobuf", "rpc"] +license.workspace = true +name = "miden-node-proto" +readme = "README.md" +repository.workspace = true rust-version.workspace = true -license.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true +version.workspace = true [lints] workspace = true [dependencies] -hex = { version = "0.4" } +hex = { version = "0.4" } miden-node-utils = { workspace = true } -miden-objects = { workspace = true } -prost = { workspace = true } -thiserror = { workspace = true } -tonic = { workspace = true } +miden-objects = { workspace = true } +prost = { workspace = true } +thiserror = { workspace = true } +tonic = { workspace = true } [dev-dependencies] proptest = { version = "1.5" } [build-dependencies] -anyhow = { version = "1.0" } -prost = { workspace = true } +anyhow = { version = "1.0" } +prost = { workspace = true } prost-build = { version = "0.13" } -protox = { version = "0.7" } +protox = { version = "0.7" } tonic-build = { version = "0.12" } diff --git a/crates/rpc-proto/Cargo.toml b/crates/rpc-proto/Cargo.toml index a9015ed6..d4ee4136 100644 --- a/crates/rpc-proto/Cargo.toml +++ b/crates/rpc-proto/Cargo.toml @@ -1,20 +1,20 @@ [package] -name = "miden-rpc-proto" -version.workspace = true -description = "Miden node RPC message definitions" -readme = "README.md" -keywords = ["miden", "node", "protobuf", "rpc"] -categories = ["no-std::no-alloc"] -edition.workspace = true +authors.workspace = true +categories = ["no-std::no-alloc"] +description = "Miden node RPC message definitions" +edition.workspace = true +homepage.workspace = true +keywords = ["miden", "node", "protobuf", "rpc"] +license.workspace = true +name = "miden-rpc-proto" +readme = "README.md" +repository.workspace = true rust-version.workspace = true -license.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true +version.workspace = true [lints] workspace = true [features] default = ["std"] -std = [] +std = [] diff --git a/crates/rpc/Cargo.toml b/crates/rpc/Cargo.toml index 3dfba524..59391509 100644 --- a/crates/rpc/Cargo.toml +++ b/crates/rpc/Cargo.toml @@ -1,15 +1,15 @@ [package] -name = "miden-node-rpc" -version.workspace = true -description = "Miden node's front-end RPC server" -readme = "README.md" -keywords = ["miden", "node", "rpc"] -edition.workspace = true +authors.workspace = true +description = "Miden node's front-end RPC server" +edition.workspace = true +homepage.workspace = true +keywords = ["miden", "node", "rpc"] +license.workspace = true +name = "miden-node-rpc" +readme = "README.md" +repository.workspace = true rust-version.workspace = true -license.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true +version.workspace = true [lints] workspace = true @@ -17,14 +17,14 @@ workspace = true [dependencies] miden-node-proto = { workspace = true } miden-node-utils = { workspace = true } -miden-objects = { workspace = true } -miden-tx = { workspace = true } -serde = { version = "1.0", features = ["derive"] } -tokio = { workspace = true, features = ["rt-multi-thread", "net", "macros"] } -tokio-stream = { workspace = true, features = ["net"] } -tonic = { workspace = true } -tonic-web = { version = "0.12" } -tracing = { workspace = true } +miden-objects = { workspace = true } +miden-tx = { workspace = true } +serde = { version = "1.0", features = ["derive"] } +tokio = { workspace = true, features = ["macros", "net", "rt-multi-thread"] } +tokio-stream = { workspace = true, features = ["net"] } +tonic = { workspace = true } +tonic-web = { version = "0.12" } +tracing = { workspace = true } [dev-dependencies] miden-node-utils = { workspace = true, features = ["tracing-forest"] } diff --git a/crates/store/Cargo.toml b/crates/store/Cargo.toml index 8eec38f2..f987304b 100644 --- a/crates/store/Cargo.toml +++ b/crates/store/Cargo.toml @@ -1,36 +1,36 @@ [package] -name = "miden-node-store" -version.workspace = true -description = "Miden node's state store component" -readme = "README.md" -keywords = ["miden", "node", "store"] -edition.workspace = true +authors.workspace = true +description = "Miden node's state store component" +edition.workspace = true +homepage.workspace = true +keywords = ["miden", "node", "store"] +license.workspace = true +name = "miden-node-store" +readme = "README.md" +repository.workspace = true rust-version.workspace = true -license.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true +version.workspace = true [lints] workspace = true [dependencies] -deadpool-sqlite = { version = "0.9.0", features = ["rt_tokio_1"] } -hex = { version = "0.4" } -miden-lib = { workspace = true } -miden-node-proto = { workspace = true } -miden-node-utils = { workspace = true } -miden-objects = { workspace = true } -rusqlite = { version = "0.32.1", features = ["array", "buildtime_bindgen", "bundled"] } +deadpool-sqlite = { version = "0.9.0", features = ["rt_tokio_1"] } +hex = { version = "0.4" } +miden-lib = { workspace = true } +miden-node-proto = { workspace = true } +miden-node-utils = { workspace = true } +miden-objects = { workspace = true } +rusqlite = { version = "0.32.1", features = ["array", "buildtime_bindgen", "bundled"] } rusqlite_migration = { version = "1.3" } -serde = { version = "1.0", features = ["derive"] } -thiserror = { workspace = true } -tokio = { workspace = true, features = ["fs", "net", "macros", "rt-multi-thread"] } -tokio-stream = { workspace = true, features = ["net"] } -tonic = { workspace = true } -tracing = { workspace = true } +serde = { version = "1.0", features = ["derive"] } +thiserror = { workspace = true } +tokio = { workspace = true, features = ["fs", "macros", "net", "rt-multi-thread"] } +tokio-stream = { workspace = true, features = ["net"] } +tonic = { workspace = true } +tracing = { workspace = true } [dev-dependencies] -assert_matches = { workspace = true} +assert_matches = { workspace = true } miden-node-utils = { workspace = true, features = ["tracing-forest"] } -miden-objects = { workspace = true, features = ["testing"] } +miden-objects = { workspace = true, features = ["testing"] } diff --git a/crates/test-macro/Cargo.toml b/crates/test-macro/Cargo.toml index 4f1b3b15..b07307cf 100644 --- a/crates/test-macro/Cargo.toml +++ b/crates/test-macro/Cargo.toml @@ -1,22 +1,22 @@ [package] -name = "miden-node-test-macro" -version = "0.1.0" -description = "Miden node's test macro" -readme = "README.md" -keywords = ["miden", "node", "utils", "macro"] -edition.workspace = true +authors.workspace = true +description = "Miden node's test macro" +edition.workspace = true +homepage.workspace = true +keywords = ["macro", "miden", "node", "utils"] +license.workspace = true +name = "miden-node-test-macro" +readme = "README.md" +repository.workspace = true rust-version.workspace = true -license.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true +version = "0.1.0" [lints] workspace = true [dependencies] quote = { version = "1.0" } -syn = { version = "2.0" , features = ["full", "extra-traits"]} +syn = { version = "2.0", features = ["extra-traits", "full"] } [lib] proc-macro = true diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index 001f0d46..13474ae7 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -1,36 +1,36 @@ [package] -name = "miden-node-utils" -version.workspace = true -description = "Miden node's shared utilities" -readme = "README.md" -keywords = ["miden", "node", "utils"] -edition.workspace = true +authors.workspace = true +description = "Miden node's shared utilities" +edition.workspace = true +homepage.workspace = true +keywords = ["miden", "node", "utils"] +license.workspace = true +name = "miden-node-utils" +readme = "README.md" +repository.workspace = true rust-version.workspace = true -license.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true +version.workspace = true [lints] workspace = true [features] # Enables depedencies intended for build script generation of version metadata. -vergen = ["dep:vergen", "dep:vergen-gitcl"] +vergen = ["dep:vergen", "dep:vergen-gitcl"] [dependencies] -anyhow = { version = "1.0" } -figment = { version = "0.10", features = ["toml", "env"] } -itertools = { version = "0.12" } -miden-objects = { workspace = true } -rand = { workspace = true } -serde = { version = "1.0", features = ["derive"] } -thiserror = { workspace = true } -tonic = { workspace = true } -tracing = { workspace = true } -tracing-forest = { version = "0.1", optional = true, features = ["chrono"] } +anyhow = { version = "1.0" } +figment = { version = "0.10", features = ["env", "toml"] } +itertools = { version = "0.12" } +miden-objects = { workspace = true } +rand = { workspace = true } +serde = { version = "1.0", features = ["derive"] } +thiserror = { workspace = true } +tonic = { workspace = true } +tracing = { workspace = true } +tracing-forest = { version = "0.1", optional = true, features = ["chrono"] } tracing-subscriber = { workspace = true } # Optional dependencies enabled by `vergen` feature. # This must match the version expected by `vergen-gitcl`. -vergen = { "version" = "9.0", optional = true } +vergen = { "version" = "9.0", optional = true } vergen-gitcl = { version = "1.0", features = ["cargo", "rustc"], optional = true } diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 217b108a..31c2f264 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "1.84" -components = ["rustfmt", "rust-src", "clippy"] -profile = "minimal" +channel = "1.84" +components = ["clippy", "rust-src", "rustfmt"] +profile = "minimal" diff --git a/rustfmt.toml b/rustfmt.toml index 59ee9ac8..20bb9535 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -1,18 +1,18 @@ -edition = "2021" -array_width = 80 -attr_fn_like_width = 80 -chain_width = 80 -comment_width = 100 -condense_wildcard_suffixes = true -fn_call_width = 80 -group_imports = "StdExternalCrate" -imports_granularity = "Crate" -newline_style = "Unix" -match_block_trailing_comma = true -single_line_if_else_max_width = 60 +array_width = 80 +attr_fn_like_width = 80 +chain_width = 80 +comment_width = 100 +condense_wildcard_suffixes = true +edition = "2021" +fn_call_width = 80 +group_imports = "StdExternalCrate" +imports_granularity = "Crate" +match_block_trailing_comma = true +newline_style = "Unix" +single_line_if_else_max_width = 60 single_line_let_else_max_width = 60 -struct_lit_width = 40 -struct_variant_width = 40 -use_field_init_shorthand = true -use_try_shorthand = true -wrap_comments = true +struct_lit_width = 40 +struct_variant_width = 40 +use_field_init_shorthand = true +use_try_shorthand = true +wrap_comments = true From 485082828ffa72c3cf72f4cae28c435673ce691c Mon Sep 17 00:00:00 2001 From: Serge Radinovich <47865535+sergerad@users.noreply.github.com> Date: Tue, 28 Jan 2025 20:00:49 +1300 Subject: [PATCH 03/17] chore: update itertools and axum (#646) --- Cargo.lock | 73 +++++++++++++++++++++++++++++--- Cargo.toml | 1 + bin/faucet/Cargo.toml | 2 +- crates/block-producer/Cargo.toml | 2 +- crates/utils/Cargo.toml | 2 +- 5 files changed, 71 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9009ea63..c7616520 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -197,16 +197,43 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" dependencies = [ "async-trait", - "axum-core", + "axum-core 0.4.5", "bytes", "futures-util", "http", "http-body", "http-body-util", + "itoa", + "matchit 0.7.3", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "sync_wrapper", + "tower 0.5.2", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d6fd624c75e18b3b4c6b9caf42b1afe24437daaee904069137d8bab077be8b8" +dependencies = [ + "axum-core 0.5.0", + "bytes", + "form_urlencoded", + "futures-util", + "http", + "http-body", + "http-body-util", "hyper", "hyper-util", "itoa", - "matchit", + "matchit 0.8.4", "memchr", "mime", "percent-encoding", @@ -242,6 +269,25 @@ dependencies = [ "sync_wrapper", "tower-layer", "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df1362f362fd16024ae199c1970ce98f9661bf5ef94b9808fee734bc3698b733" +dependencies = [ + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper", + "tower-layer", + "tower-service", "tracing", ] @@ -1203,6 +1249,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.14" @@ -1403,6 +1458,12 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" +[[package]] +name = "matchit" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" + [[package]] name = "memchr" version = "2.7.4" @@ -1484,7 +1545,7 @@ name = "miden-faucet" version = "0.8.0" dependencies = [ "anyhow", - "axum", + "axum 0.8.1", "clap", "http", "http-body-util", @@ -1599,7 +1660,7 @@ version = "0.8.0" dependencies = [ "assert_matches", "async-trait", - "itertools 0.13.0", + "itertools 0.14.0", "miden-air", "miden-lib", "miden-node-proto", @@ -1689,7 +1750,7 @@ version = "0.8.0" dependencies = [ "anyhow", "figment", - "itertools 0.12.1", + "itertools 0.14.0", "miden-objects", "rand", "serde", @@ -3063,7 +3124,7 @@ checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" dependencies = [ "async-stream", "async-trait", - "axum", + "axum 0.7.9", "base64", "bytes", "h2", diff --git a/Cargo.toml b/Cargo.toml index ccf2199d..33c7a64b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,6 +26,7 @@ version = "0.8.0" [workspace.dependencies] assert_matches = { version = "1.5" } +itertools = { version = "0.14" } miden-air = { version = "0.12" } miden-lib = { version = "0.7" } miden-node-block-producer = { path = "crates/block-producer", version = "0.8" } diff --git a/bin/faucet/Cargo.toml b/bin/faucet/Cargo.toml index ce942f12..16cf9b18 100644 --- a/bin/faucet/Cargo.toml +++ b/bin/faucet/Cargo.toml @@ -16,7 +16,7 @@ workspace = true [dependencies] anyhow = "1.0" -axum = { version = "0.7", features = ["tokio"] } +axum = { version = "0.8", features = ["tokio"] } clap = { version = "4.5", features = ["derive", "string"] } http = "1.1" http-body-util = "0.1" diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index e48af5ee..d9b15f3c 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -19,7 +19,7 @@ tracing-forest = ["miden-node-utils/tracing-forest"] [dependencies] async-trait = { version = "0.1" } -itertools = { version = "0.13" } +itertools = { workspace = true } miden-lib = { workspace = true } miden-node-proto = { workspace = true } miden-node-utils = { workspace = true } diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index 13474ae7..78c921c5 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -21,7 +21,7 @@ vergen = ["dep:vergen", "dep:vergen-gitcl"] [dependencies] anyhow = { version = "1.0" } figment = { version = "0.10", features = ["env", "toml"] } -itertools = { version = "0.12" } +itertools = { workspace = true } miden-objects = { workspace = true } rand = { workspace = true } serde = { version = "1.0", features = ["derive"] } From 8e361c38b4799d06042f57170fc95ee282a19a0c Mon Sep 17 00:00:00 2001 From: Serge Radinovich <47865535+sergerad@users.noreply.github.com> Date: Tue, 28 Jan 2025 21:25:31 +1300 Subject: [PATCH 04/17] ci: add workspace-lints to Makefile and lint.yml (#648) --- .github/workflows/lint.yml | 11 +++++++++++ Makefile | 7 ++++++- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 756c80f0..e4ac4540 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -54,6 +54,17 @@ jobs: tool: taplo-cli - run: make toml-check + workspace-lints: + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - uses: actions/checkout@v4 + - uses: taiki-e/install-action@v2 + with: + tool: cargo-workspace-lints + - run: | + make workspace-check + doc: name: doc runs-on: ubuntu-latest diff --git a/Makefile b/Makefile index 92039013..5a6d691b 100644 --- a/Makefile +++ b/Makefile @@ -41,8 +41,13 @@ toml-check: ## Runs Format for all TOML files but only in check mode taplo fmt --check --verbose +.PHONY: workspace-check +workspace-check: ## Runs a check that all packages have `lints.workspace = true` + cargo workspace-lints + + .PHONY: lint -lint: format fix clippy toml ## Runs all linting tasks at once (Clippy, fixing, formatting) +lint: format fix clippy toml workspace-check ## Runs all linting tasks at once (Clippy, fixing, formatting, workspace) # --- docs ---------------------------------------------------------------------------------------- From 5b55f309a1975d24b990ac3dc599cdbb8ddd05d6 Mon Sep 17 00:00:00 2001 From: Serge Radinovich <47865535+sergerad@users.noreply.github.com> Date: Thu, 30 Jan 2025 22:33:28 +1300 Subject: [PATCH 05/17] chore: remove Endpoint and Protocol types (#654) --- CHANGELOG.md | 1 + Cargo.lock | 291 ++++++++++++++++++++++++++++ Cargo.toml | 1 + bin/faucet/Cargo.toml | 1 + bin/faucet/src/client.rs | 2 +- bin/faucet/src/config.rs | 38 ++-- bin/faucet/src/main.rs | 8 +- bin/node/Cargo.toml | 1 + bin/node/src/config.rs | 39 ++-- config/miden-node.toml | 6 +- crates/block-producer/Cargo.toml | 1 + crates/block-producer/src/config.rs | 38 ++-- crates/block-producer/src/server.rs | 5 +- crates/rpc/Cargo.toml | 1 + crates/rpc/src/config.rs | 41 ++-- crates/rpc/src/server/api.rs | 9 +- crates/rpc/src/server/mod.rs | 5 +- crates/store/Cargo.toml | 1 + crates/store/src/config.rs | 31 ++- crates/store/src/server/mod.rs | 5 +- crates/utils/src/config.rs | 61 +----- 21 files changed, 441 insertions(+), 145 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d89a0cd5..ac87a64a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ ### Changes - [BREAKING] Updated minimum Rust version to 1.84. +- [BREAKING] `Endpoint` configuration simplified to a single string (#654). ## v0.7.2 (2025-01-29) diff --git a/Cargo.lock b/Cargo.lock index 264d733b..f08cd3ef 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -782,6 +782,17 @@ dependencies = [ "winapi", ] +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "dissimilar" version = "1.0.9" @@ -1184,12 +1195,151 @@ dependencies = [ "cc", ] +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "ident_case" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + [[package]] name = "indenter" version = "0.3.3" @@ -1393,6 +1543,12 @@ version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" +[[package]] +name = "litemap" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" + [[package]] name = "lock_api" version = "0.4.12" @@ -1578,6 +1734,7 @@ dependencies = [ "tower 0.5.2", "tower-http 0.6.2", "tracing", + "url", ] [[package]] @@ -1664,6 +1821,7 @@ dependencies = [ "tokio", "toml", "tracing", + "url", ] [[package]] @@ -1691,6 +1849,7 @@ dependencies = [ "tokio-stream", "tonic", "tracing", + "url", "winterfell", ] @@ -1725,6 +1884,7 @@ dependencies = [ "tonic", "tonic-web", "tracing", + "url", ] [[package]] @@ -1746,6 +1906,7 @@ dependencies = [ "tokio-stream", "tonic", "tracing", + "url", ] [[package]] @@ -2813,6 +2974,12 @@ version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + [[package]] name = "static-files" version = "0.2.4" @@ -2890,6 +3057,17 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "target-triple" version = "0.1.3" @@ -3043,6 +3221,16 @@ dependencies = [ "crunchy", ] +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tokio" version = "1.43.0" @@ -3439,6 +3627,30 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" +[[package]] +name = "url" +version = "2.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "utf8parse" version = "0.2.2" @@ -3996,12 +4208,48 @@ dependencies = [ "bitflags", ] +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + [[package]] name = "yansi" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + [[package]] name = "zerocopy" version = "0.7.35" @@ -4022,3 +4270,46 @@ dependencies = [ "quote", "syn", ] + +[[package]] +name = "zerofrom" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] diff --git a/Cargo.toml b/Cargo.toml index 33c7a64b..bdfa58e3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -47,6 +47,7 @@ tokio-stream = { version = "0.1" } tonic = { version = "0.12" } tracing = { version = "0.1" } tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt", "json"] } +url = { version = "2.5", features = ["serde"] } # Lints are set to warn for development, which are promoted to errors in CI. [workspace.lints.clippy] diff --git a/bin/faucet/Cargo.toml b/bin/faucet/Cargo.toml index 16cf9b18..83194cc0 100644 --- a/bin/faucet/Cargo.toml +++ b/bin/faucet/Cargo.toml @@ -37,6 +37,7 @@ tonic = { workspace = true } tower = "0.5" tower-http = { version = "0.6", features = ["cors", "set-header", "trace"] } tracing = { workspace = true } +url = { workspace = true } [build-dependencies] # Required to inject build metadata. diff --git a/bin/faucet/src/client.rs b/bin/faucet/src/client.rs index 44a9ee19..faf9b17b 100644 --- a/bin/faucet/src/client.rs +++ b/bin/faucet/src/client.rs @@ -200,7 +200,7 @@ impl FaucetClient { pub async fn initialize_faucet_client( config: &FaucetConfig, ) -> Result<(ApiClient, BlockHeader, ChainMmr), ClientError> { - let endpoint = tonic::transport::Endpoint::try_from(config.node_url.clone()) + let endpoint = tonic::transport::Endpoint::try_from(config.node_url.to_string()) .context("Failed to parse node URL from configuration file")? .timeout(Duration::from_millis(config.timeout_ms)); diff --git a/bin/faucet/src/config.rs b/bin/faucet/src/config.rs index 24eb6a96..20b49b27 100644 --- a/bin/faucet/src/config.rs +++ b/bin/faucet/src/config.rs @@ -3,10 +3,9 @@ use std::{ path::PathBuf, }; -use miden_node_utils::config::{ - Endpoint, Protocol, DEFAULT_FAUCET_SERVER_PORT, DEFAULT_NODE_RPC_PORT, -}; +use miden_node_utils::config::{DEFAULT_FAUCET_SERVER_PORT, DEFAULT_NODE_RPC_PORT}; use serde::{Deserialize, Serialize}; +use url::Url; // Faucet config // ================================================================================================ @@ -20,10 +19,10 @@ pub const DEFAULT_RPC_TIMEOUT_MS: u64 = 10000; #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] #[serde(deny_unknown_fields)] pub struct FaucetConfig { - /// Endpoint of the faucet - pub endpoint: Endpoint, + /// Endpoint of the faucet in the format `:` + pub endpoint: Url, /// Node RPC gRPC endpoint in the format `http://[:]` - pub node_url: String, + pub node_url: Url, /// Timeout for RPC requests in milliseconds pub timeout_ms: u64, /// Possible options on the amount of asset that should be dispersed on each faucet request @@ -44,15 +43,30 @@ impl Display for FaucetConfig { impl Default for FaucetConfig { fn default() -> Self { Self { - endpoint: Endpoint { - host: "0.0.0.0".to_string(), - port: DEFAULT_FAUCET_SERVER_PORT, - protocol: Protocol::Http, - }, - node_url: Endpoint::localhost(DEFAULT_NODE_RPC_PORT).to_string(), + endpoint: Url::parse(format!("http://0.0.0.0:{DEFAULT_FAUCET_SERVER_PORT}").as_str()) + .unwrap(), + node_url: Url::parse(format!("http://127.0.0.1:{DEFAULT_NODE_RPC_PORT}").as_str()) + .unwrap(), timeout_ms: DEFAULT_RPC_TIMEOUT_MS, asset_amount_options: vec![100, 500, 1000], faucet_account_path: DEFAULT_FAUCET_ACCOUNT_PATH.into(), } } } + +#[cfg(test)] +mod tests { + use tokio::net::TcpListener; + + use super::FaucetConfig; + + #[tokio::test] + async fn default_faucet_config() { + // Default does not panic + let config = FaucetConfig::default(); + // Default can bind + let socket_addrs = config.endpoint.socket_addrs(|| None).unwrap(); + let socket_addr = socket_addrs.into_iter().next().unwrap(); + let _listener = TcpListener::bind(socket_addr).await.unwrap(); + } +} diff --git a/bin/faucet/src/main.rs b/bin/faucet/src/main.rs index 35dd9be6..f7ea8c91 100644 --- a/bin/faucet/src/main.rs +++ b/bin/faucet/src/main.rs @@ -123,9 +123,11 @@ async fn main() -> anyhow::Result<()> { ) .with_state(faucet_state); - let listener = TcpListener::bind((config.endpoint.host.as_str(), config.endpoint.port)) - .await - .context("Failed to bind TCP listener")?; + let socket_addr = config.endpoint.socket_addrs(|| None)?.into_iter().next().ok_or( + anyhow::anyhow!("Couldn't get any socket addrs for endpoint: {}", config.endpoint), + )?; + let listener = + TcpListener::bind(socket_addr).await.context("Failed to bind TCP listener")?; info!(target: COMPONENT, endpoint = %config.endpoint, "Server started"); diff --git a/bin/node/Cargo.toml b/bin/node/Cargo.toml index 0f1e505f..8487908e 100644 --- a/bin/node/Cargo.toml +++ b/bin/node/Cargo.toml @@ -32,6 +32,7 @@ serde = { version = "1.0", features = ["derive"] } tokio = { workspace = true, features = ["macros", "net", "rt-multi-thread"] } toml = { version = "0.8" } tracing = { workspace = true } +url = { workspace = true } [dev-dependencies] figment = { version = "0.10", features = ["env", "test", "toml"] } diff --git a/bin/node/src/config.rs b/bin/node/src/config.rs index 21a01892..4fef3981 100644 --- a/bin/node/src/config.rs +++ b/bin/node/src/config.rs @@ -1,8 +1,8 @@ use miden_node_block_producer::config::BlockProducerConfig; use miden_node_rpc::config::RpcConfig; use miden_node_store::config::StoreConfig; -use miden_node_utils::config::Endpoint; use serde::{Deserialize, Serialize}; +use url::Url; /// Node top-level configuration. #[derive(Clone, Default, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] @@ -17,7 +17,7 @@ pub struct NodeConfig { #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] #[serde(deny_unknown_fields)] struct NormalizedRpcConfig { - endpoint: Endpoint, + endpoint: Url, } /// A specialized variant of [`BlockProducerConfig`] with redundant fields within [`NodeConfig`] @@ -25,7 +25,7 @@ struct NormalizedRpcConfig { #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] #[serde(deny_unknown_fields)] struct NormalizedBlockProducerConfig { - endpoint: Endpoint, + endpoint: Url, verify_tx_proofs: bool, } @@ -56,14 +56,14 @@ impl NodeConfig { let block_producer = BlockProducerConfig { endpoint: block_producer.endpoint, - store_url: store.endpoint_url(), + store_url: store.endpoint.clone(), verify_tx_proofs: block_producer.verify_tx_proofs, }; let rpc = RpcConfig { endpoint: rpc.endpoint, - store_url: store.endpoint_url(), - block_producer_url: block_producer.endpoint_url(), + store_url: store.endpoint.clone(), + block_producer_url: block_producer.endpoint.clone(), }; (block_producer, rpc, store) @@ -74,7 +74,8 @@ impl NodeConfig { mod tests { use figment::Jail; use miden_node_store::config::StoreConfig; - use miden_node_utils::config::{load_config, Endpoint, Protocol}; + use miden_node_utils::config::load_config; + use url::Url; use super::NodeConfig; use crate::{ @@ -89,14 +90,14 @@ mod tests { NODE_CONFIG_FILE_PATH, r#" [block_producer] - endpoint = { host = "127.0.0.1", port = 8080 } + endpoint = "http://127.0.0.1:8080" verify_tx_proofs = true [rpc] - endpoint = { host = "127.0.0.1", port = 8080, protocol = "Http" } + endpoint = "http://127.0.0.1:8080" [store] - endpoint = { host = "127.0.0.1", port = 8080, protocol = "Https" } + endpoint = "https://127.0.0.1:8080" database_filepath = "local.sqlite3" genesis_filepath = "genesis.dat" blockstore_dir = "blocks" @@ -109,26 +110,14 @@ mod tests { config, NodeConfig { block_producer: NormalizedBlockProducerConfig { - endpoint: Endpoint { - host: "127.0.0.1".to_string(), - port: 8080, - protocol: Protocol::default() - }, + endpoint: Url::parse("http://127.0.0.1:8080").unwrap(), verify_tx_proofs: true }, rpc: NormalizedRpcConfig { - endpoint: Endpoint { - host: "127.0.0.1".to_string(), - port: 8080, - protocol: Protocol::Http - }, + endpoint: Url::parse("http://127.0.0.1:8080").unwrap(), }, store: StoreConfig { - endpoint: Endpoint { - host: "127.0.0.1".to_string(), - port: 8080, - protocol: Protocol::Https - }, + endpoint: Url::parse("https://127.0.0.1:8080").unwrap(), database_filepath: "local.sqlite3".into(), genesis_filepath: "genesis.dat".into(), blockstore_dir: "blocks".into() diff --git a/config/miden-node.toml b/config/miden-node.toml index d260f189..4356ce51 100644 --- a/config/miden-node.toml +++ b/config/miden-node.toml @@ -2,18 +2,18 @@ [block_producer] # port defined as: sum(ord(c)**p for (p, c) in enumerate('miden-block-producer', 1)) % 2**16 -endpoint = { host = "localhost", port = 48046 } +endpoint = "http://127.0.0.1:48046" # enables or disables the verification of transaction proofs before they are accepted into the # transaction queue. verify_tx_proofs = true [rpc] # port defined as: sum(ord(c)**p for (p, c) in enumerate('miden-rpc', 1)) % 2**16 -endpoint = { host = "0.0.0.0", port = 57291 } +endpoint = "http://0.0.0.0:57291" [store] # port defined as: sum(ord(c)**p for (p, c) in enumerate('miden-store', 1)) % 2**16 blockstore_dir = "/opt/miden/blocks" database_filepath = "/opt/miden/miden-store.sqlite3" -endpoint = { host = "localhost", port = 28943 } +endpoint = "http://127.0.0.1:28943" genesis_filepath = "/opt/miden/genesis.dat" diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index d9b15f3c..d01bfebe 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -34,6 +34,7 @@ tokio = { workspace = true, features = ["macros", "net", "rt-multi-th tokio-stream = { workspace = true, features = ["net"] } tonic = { workspace = true } tracing = { workspace = true } +url = { workspace = true } [dev-dependencies] assert_matches = { workspace = true } diff --git a/crates/block-producer/src/config.rs b/crates/block-producer/src/config.rs index e95b9fe8..0ad19741 100644 --- a/crates/block-producer/src/config.rs +++ b/crates/block-producer/src/config.rs @@ -1,7 +1,8 @@ use std::fmt::{Display, Formatter}; -use miden_node_utils::config::{Endpoint, DEFAULT_BLOCK_PRODUCER_PORT, DEFAULT_STORE_PORT}; +use miden_node_utils::config::{DEFAULT_BLOCK_PRODUCER_PORT, DEFAULT_STORE_PORT}; use serde::{Deserialize, Serialize}; +use url::Url; // Main config // ================================================================================================ @@ -10,10 +11,10 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] #[serde(deny_unknown_fields)] pub struct BlockProducerConfig { - pub endpoint: Endpoint, + pub endpoint: Url, /// Store gRPC endpoint in the format `http://[:]`. - pub store_url: String, + pub store_url: Url, /// Enable or disable the verification of transaction proofs before they are accepted into the /// transaction queue. @@ -24,12 +25,6 @@ pub struct BlockProducerConfig { pub verify_tx_proofs: bool, } -impl BlockProducerConfig { - pub fn endpoint_url(&self) -> String { - self.endpoint.to_string() - } -} - impl Display for BlockProducerConfig { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.write_fmt(format_args!( @@ -42,9 +37,30 @@ impl Display for BlockProducerConfig { impl Default for BlockProducerConfig { fn default() -> Self { Self { - endpoint: Endpoint::localhost(DEFAULT_BLOCK_PRODUCER_PORT), - store_url: Endpoint::localhost(DEFAULT_STORE_PORT).to_string(), + endpoint: Url::parse( + format!("http://127.0.0.1:{DEFAULT_BLOCK_PRODUCER_PORT}").as_str(), + ) + .unwrap(), + store_url: Url::parse(format!("http://127.0.0.1:{DEFAULT_STORE_PORT}").as_str()) + .unwrap(), verify_tx_proofs: true, } } } + +#[cfg(test)] +mod tests { + use tokio::net::TcpListener; + + use super::BlockProducerConfig; + + #[tokio::test] + async fn default_block_producer_config() { + // Default does not panic + let config = BlockProducerConfig::default(); + // Default can bind + let socket_addrs = config.endpoint.socket_addrs(|| None).unwrap(); + let socket_addr = socket_addrs.into_iter().next().unwrap(); + let _listener = TcpListener::bind(socket_addr).await.unwrap(); + } +} diff --git a/crates/block-producer/src/server.rs b/crates/block-producer/src/server.rs index 5c7a42ec..2eaf0e50 100644 --- a/crates/block-producer/src/server.rs +++ b/crates/block-producer/src/server.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, net::ToSocketAddrs}; +use std::collections::HashMap; use miden_node_proto::generated::{ block_producer::api_server, requests::SubmitProvenTransactionRequest, @@ -66,8 +66,9 @@ impl BlockProducer { let rpc_listener = config .endpoint - .to_socket_addrs() + .socket_addrs(|| None) .map_err(ApiError::EndpointToSocketFailed)? + .into_iter() .next() .ok_or_else(|| ApiError::AddressResolutionFailed(config.endpoint.to_string())) .map(TcpListener::bind)? diff --git a/crates/rpc/Cargo.toml b/crates/rpc/Cargo.toml index 59391509..51670ff9 100644 --- a/crates/rpc/Cargo.toml +++ b/crates/rpc/Cargo.toml @@ -25,6 +25,7 @@ tokio-stream = { workspace = true, features = ["net"] } tonic = { workspace = true } tonic-web = { version = "0.12" } tracing = { workspace = true } +url = { workspace = true } [dev-dependencies] miden-node-utils = { workspace = true, features = ["tracing-forest"] } diff --git a/crates/rpc/src/config.rs b/crates/rpc/src/config.rs index 07dac077..a3b3bb2a 100644 --- a/crates/rpc/src/config.rs +++ b/crates/rpc/src/config.rs @@ -1,9 +1,10 @@ use std::fmt::{Display, Formatter}; use miden_node_utils::config::{ - Endpoint, Protocol, DEFAULT_BLOCK_PRODUCER_PORT, DEFAULT_NODE_RPC_PORT, DEFAULT_STORE_PORT, + DEFAULT_BLOCK_PRODUCER_PORT, DEFAULT_NODE_RPC_PORT, DEFAULT_STORE_PORT, }; use serde::{Deserialize, Serialize}; +use url::Url; // Main config // ================================================================================================ @@ -11,11 +12,11 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] #[serde(deny_unknown_fields)] pub struct RpcConfig { - pub endpoint: Endpoint, + pub endpoint: Url, /// Store gRPC endpoint in the format `http://[:]`. - pub store_url: String, + pub store_url: Url, /// Block producer gRPC endpoint in the format `http://[:]`. - pub block_producer_url: String, + pub block_producer_url: Url, } impl RpcConfig { @@ -36,13 +37,31 @@ impl Display for RpcConfig { impl Default for RpcConfig { fn default() -> Self { Self { - endpoint: Endpoint { - host: "0.0.0.0".to_string(), - port: DEFAULT_NODE_RPC_PORT, - protocol: Protocol::default(), - }, - store_url: Endpoint::localhost(DEFAULT_STORE_PORT).to_string(), - block_producer_url: Endpoint::localhost(DEFAULT_BLOCK_PRODUCER_PORT).to_string(), + endpoint: Url::parse(format!("http://0.0.0.0:{DEFAULT_NODE_RPC_PORT}").as_str()) + .unwrap(), + store_url: Url::parse(format!("http://127.0.0.1:{DEFAULT_STORE_PORT}").as_str()) + .unwrap(), + block_producer_url: Url::parse( + format!("http://127.0.0.1:{DEFAULT_BLOCK_PRODUCER_PORT}").as_str(), + ) + .unwrap(), } } } + +#[cfg(test)] +mod tests { + use tokio::net::TcpListener; + + use super::RpcConfig; + + #[tokio::test] + async fn default_rpc_config() { + // Default does not panic + let config = RpcConfig::default(); + // Default can bind + let socket_addrs = config.endpoint.socket_addrs(|| None).unwrap(); + let socket_addr = socket_addrs.into_iter().next().unwrap(); + let _listener = TcpListener::bind(socket_addr).await.unwrap(); + } +} diff --git a/crates/rpc/src/server/api.rs b/crates/rpc/src/server/api.rs index 609b8743..976ed4fc 100644 --- a/crates/rpc/src/server/api.rs +++ b/crates/rpc/src/server/api.rs @@ -41,14 +41,15 @@ pub struct RpcApi { impl RpcApi { pub(super) async fn from_config(config: &RpcConfig) -> Result { - let store = store_client::ApiClient::connect(config.store_url.clone()).await?; - info!(target: COMPONENT, store_endpoint = config.store_url, "Store client initialized"); + let store = store_client::ApiClient::connect(config.store_url.to_string()).await?; + info!(target: COMPONENT, store_endpoint = config.store_url.as_str(), "Store client initialized"); let block_producer = - block_producer_client::ApiClient::connect(config.block_producer_url.clone()).await?; + block_producer_client::ApiClient::connect(config.block_producer_url.to_string()) + .await?; info!( target: COMPONENT, - block_producer_endpoint = config.block_producer_url, + block_producer_endpoint = config.block_producer_url.as_str(), "Block producer client initialized", ); diff --git a/crates/rpc/src/server/mod.rs b/crates/rpc/src/server/mod.rs index dcef246b..d728eab4 100644 --- a/crates/rpc/src/server/mod.rs +++ b/crates/rpc/src/server/mod.rs @@ -1,5 +1,3 @@ -use std::net::ToSocketAddrs; - use api::RpcApi; use miden_node_proto::generated::rpc::api_server; use miden_node_utils::errors::ApiError; @@ -33,8 +31,9 @@ impl Rpc { let addr = config .endpoint - .to_socket_addrs() + .socket_addrs(|| None) .map_err(ApiError::EndpointToSocketFailed)? + .into_iter() .next() .ok_or_else(|| ApiError::AddressResolutionFailed(config.endpoint.to_string()))?; diff --git a/crates/store/Cargo.toml b/crates/store/Cargo.toml index f987304b..b1fdf261 100644 --- a/crates/store/Cargo.toml +++ b/crates/store/Cargo.toml @@ -29,6 +29,7 @@ tokio = { workspace = true, features = ["fs", "macros", "net", "rt- tokio-stream = { workspace = true, features = ["net"] } tonic = { workspace = true } tracing = { workspace = true } +url = { workspace = true } [dev-dependencies] assert_matches = { workspace = true } diff --git a/crates/store/src/config.rs b/crates/store/src/config.rs index 3cac20a7..3a065bcf 100644 --- a/crates/store/src/config.rs +++ b/crates/store/src/config.rs @@ -3,8 +3,9 @@ use std::{ path::PathBuf, }; -use miden_node_utils::config::{Endpoint, DEFAULT_STORE_PORT}; +use miden_node_utils::config::DEFAULT_STORE_PORT; use serde::{Deserialize, Serialize}; +use url::Url; // Main config // ================================================================================================ @@ -13,7 +14,7 @@ use serde::{Deserialize, Serialize}; #[serde(deny_unknown_fields)] pub struct StoreConfig { /// Defines the listening socket. - pub endpoint: Endpoint, + pub endpoint: Url, /// `SQLite` database file pub database_filepath: PathBuf, /// Genesis file @@ -22,12 +23,6 @@ pub struct StoreConfig { pub blockstore_dir: PathBuf, } -impl StoreConfig { - pub fn endpoint_url(&self) -> String { - self.endpoint.to_string() - } -} - impl Display for StoreConfig { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.write_fmt(format_args!( @@ -41,10 +36,28 @@ impl Default for StoreConfig { fn default() -> Self { const NODE_STORE_DIR: &str = "./"; Self { - endpoint: Endpoint::localhost(DEFAULT_STORE_PORT), + endpoint: Url::parse(format!("http://127.0.0.1:{DEFAULT_STORE_PORT}").as_str()) + .unwrap(), database_filepath: PathBuf::from(NODE_STORE_DIR.to_string() + "miden-store.sqlite3"), genesis_filepath: PathBuf::from(NODE_STORE_DIR.to_string() + "genesis.dat"), blockstore_dir: PathBuf::from(NODE_STORE_DIR.to_string() + "blocks"), } } } + +#[cfg(test)] +mod tests { + use tokio::net::TcpListener; + + use super::StoreConfig; + + #[tokio::test] + async fn default_store_config() { + // Default does not panic + let config = StoreConfig::default(); + // Default can bind + let socket_addrs = config.endpoint.socket_addrs(|| None).unwrap(); + let socket_addr = socket_addrs.into_iter().next().unwrap(); + let _listener = TcpListener::bind(socket_addr).await.unwrap(); + } +} diff --git a/crates/store/src/server/mod.rs b/crates/store/src/server/mod.rs index f73c9e9f..6ef429d0 100644 --- a/crates/store/src/server/mod.rs +++ b/crates/store/src/server/mod.rs @@ -1,4 +1,4 @@ -use std::{net::ToSocketAddrs, sync::Arc}; +use std::sync::Arc; use miden_node_proto::generated::store::api_server; use miden_node_utils::errors::ApiError; @@ -44,8 +44,9 @@ impl Store { let addr = config .endpoint - .to_socket_addrs() + .socket_addrs(|| None) .map_err(ApiError::EndpointToSocketFailed)? + .into_iter() .next() .ok_or_else(|| ApiError::AddressResolutionFailed(config.endpoint.to_string()))?; diff --git a/crates/utils/src/config.rs b/crates/utils/src/config.rs index 5c70315d..4f79c6c4 100644 --- a/crates/utils/src/config.rs +++ b/crates/utils/src/config.rs @@ -1,73 +1,16 @@ -use std::{ - fmt::{Display, Formatter}, - io, - net::{SocketAddr, ToSocketAddrs}, - path::Path, - vec, -}; +use std::path::Path; use figment::{ providers::{Format, Toml}, Figment, }; -use serde::{Deserialize, Serialize}; +use serde::Deserialize; pub const DEFAULT_NODE_RPC_PORT: u16 = 57291; pub const DEFAULT_BLOCK_PRODUCER_PORT: u16 = 48046; pub const DEFAULT_STORE_PORT: u16 = 28943; pub const DEFAULT_FAUCET_SERVER_PORT: u16 = 8080; -#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize, Default)] -pub enum Protocol { - #[default] - Http, - Https, -} -/// The `(host, port)` pair for the server's listening socket. -#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] -pub struct Endpoint { - /// Host used by the store. - pub host: String, - /// Port number used by the store. - pub port: u16, - /// Protocol type: http or https. - #[serde(default)] - pub protocol: Protocol, -} - -impl Endpoint { - pub fn localhost(port: u16) -> Self { - Endpoint { - host: "localhost".to_string(), - port, - protocol: Protocol::default(), - } - } -} - -impl ToSocketAddrs for Endpoint { - type Iter = vec::IntoIter; - fn to_socket_addrs(&self) -> io::Result { - (self.host.as_ref(), self.port).to_socket_addrs() - } -} - -impl Display for Endpoint { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - let Endpoint { protocol, host, port } = self; - f.write_fmt(format_args!("{protocol}://{host}:{port}")) - } -} - -impl Display for Protocol { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - Protocol::Http => f.write_str("http"), - Protocol::Https => f.write_str("https"), - } - } -} - /// Loads the user configuration. /// /// This function will look for the configuration file at the provided path. If the path is From ab0701dd0eae8d8dd7dc7499332f2478cd36021a Mon Sep 17 00:00:00 2001 From: Mirko <48352201+Mirko-von-Leipzig@users.noreply.github.com> Date: Mon, 3 Feb 2025 09:03:21 +0200 Subject: [PATCH 06/17] feat: open-telemetry exporter (#660) --- CHANGELOG.md | 4 + Cargo.lock | 269 ++++++++++++++++++++++++++++++++++++ bin/node/src/main.rs | 14 +- crates/utils/Cargo.toml | 26 ++-- crates/utils/src/logging.rs | 73 +++++++++- 5 files changed, 371 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ac87a64a..c9f5ba05 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ ## Unreleased +### Enhancements + +- Add an optional open-telemetry trace exporter (#659). + ### Changes - [BREAKING] Updated minimum Rust version to 1.84. diff --git a/Cargo.lock b/Cargo.lock index f08cd3ef..2d8e3495 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -574,6 +574,16 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" +[[package]] +name = "core-foundation" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" version = "0.8.7" @@ -915,12 +925,34 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + [[package]] name = "futures-io" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "futures-sink" version = "0.3.31" @@ -940,10 +972,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures-core", + "futures-macro", "futures-sink", "futures-task", "pin-project-lite", "pin-utils", + "slab", ] [[package]] @@ -1925,12 +1959,16 @@ dependencies = [ "figment", "itertools 0.14.0", "miden-objects", + "opentelemetry", + "opentelemetry-otlp", + "opentelemetry_sdk", "rand", "serde", "thiserror 2.0.11", "tonic", "tracing", "tracing-forest", + "tracing-opentelemetry", "tracing-subscriber", "vergen", "vergen-gitcl", @@ -2249,6 +2287,78 @@ version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +[[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "opentelemetry" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab70038c28ed37b97d8ed414b6429d343a8bbf44c9f79ec854f3a643029ba6d7" +dependencies = [ + "futures-core", + "futures-sink", + "js-sys", + "pin-project-lite", + "thiserror 1.0.69", + "tracing", +] + +[[package]] +name = "opentelemetry-otlp" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91cf61a1868dacc576bf2b2a1c3e9ab150af7272909e80085c3173384fe11f76" +dependencies = [ + "async-trait", + "futures-core", + "http", + "opentelemetry", + "opentelemetry-proto", + "opentelemetry_sdk", + "prost", + "thiserror 1.0.69", + "tokio", + "tonic", + "tracing", +] + +[[package]] +name = "opentelemetry-proto" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6e05acbfada5ec79023c85368af14abd0b307c015e9064d249b2a950ef459a6" +dependencies = [ + "opentelemetry", + "opentelemetry_sdk", + "prost", + "tonic", +] + +[[package]] +name = "opentelemetry_sdk" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "231e9d6ceef9b0b2546ddf52335785ce41252bc7474ee8ba05bfad277be13ab8" +dependencies = [ + "async-trait", + "futures-channel", + "futures-executor", + "futures-util", + "glob", + "opentelemetry", + "percent-encoding", + "rand", + "serde_json", + "thiserror 1.0.69", + "tokio", + "tokio-stream", + "tracing", +] + [[package]] name = "overload" version = "0.1.1" @@ -2707,6 +2817,21 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.15", + "libc", + "spin", + "untrusted", + "windows-sys 0.52.0", +] + [[package]] name = "rusqlite" version = "0.32.1" @@ -2774,6 +2899,59 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "rustls" +version = "0.23.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb9263ab4eb695e42321db096e3b8fbd715a59b154d5c88d82db2175b681ba7" +dependencies = [ + "log", + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" +dependencies = [ + "openssl-probe", + "rustls-pki-types", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "917ce264624a4b4db1c364dcc35bfca9ded014d0a958cd47ad3e960e988ea51c" + +[[package]] +name = "rustls-webpki" +version = "0.102.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + [[package]] name = "rustversion" version = "1.0.19" @@ -2807,6 +2985,15 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "schannel" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +dependencies = [ + "windows-sys 0.59.0", +] + [[package]] name = "scoped-tls" version = "1.0.1" @@ -2819,6 +3006,29 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "security-framework" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271720403f46ca04f7ba6f55d438f8bd878d6b8ca0a1046e8228c4145bcbb316" +dependencies = [ + "bitflags", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "semver" version = "0.9.0" @@ -3019,6 +3229,12 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + [[package]] name = "supports-color" version = "3.0.2" @@ -3258,6 +3474,16 @@ dependencies = [ "syn", ] +[[package]] +name = "tokio-rustls" +version = "0.26.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37" +dependencies = [ + "rustls", + "tokio", +] + [[package]] name = "tokio-stream" version = "0.1.17" @@ -3337,8 +3563,11 @@ dependencies = [ "percent-encoding", "pin-project", "prost", + "rustls-native-certs", + "rustls-pemfile", "socket2", "tokio", + "tokio-rustls", "tokio-stream", "tower 0.4.13", "tower-layer", @@ -3517,6 +3746,24 @@ dependencies = [ "tracing-core", ] +[[package]] +name = "tracing-opentelemetry" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97a971f6058498b5c0f1affa23e7ea202057a7301dbff68e968b2d578bcbd053" +dependencies = [ + "js-sys", + "once_cell", + "opentelemetry", + "opentelemetry_sdk", + "smallvec", + "tracing", + "tracing-core", + "tracing-log", + "tracing-subscriber", + "web-time", +] + [[package]] name = "tracing-serde" version = "0.2.0" @@ -3627,6 +3874,12 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + [[package]] name = "url" version = "2.5.4" @@ -3825,6 +4078,16 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "winapi" version = "0.3.9" @@ -4292,6 +4555,12 @@ dependencies = [ "synstructure", ] +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" + [[package]] name = "zerovec" version = "0.10.4" diff --git a/bin/node/src/main.rs b/bin/node/src/main.rs index 998b5446..1697f61f 100644 --- a/bin/node/src/main.rs +++ b/bin/node/src/main.rs @@ -37,6 +37,9 @@ pub enum Command { #[arg(short, long, value_name = "FILE", default_value = NODE_CONFIG_FILE_PATH)] config: PathBuf, + + #[arg(long = "open-telemetry", default_value_t = false)] + open_telemetry: bool, }, /// Generates a genesis file and associated account files based on a specified genesis input @@ -82,12 +85,17 @@ pub enum StartCommand { #[tokio::main] async fn main() -> anyhow::Result<()> { - miden_node_utils::logging::setup_logging()?; - let cli = Cli::parse(); + // Open telemetry exporting is only valid for running the node. + let open_telemetry = match &cli.command { + Command::Start { open_telemetry, .. } => *open_telemetry, + _ => false, + }; + miden_node_utils::logging::setup_tracing(open_telemetry)?; + match &cli.command { - Command::Start { command, config } => match command { + Command::Start { command, config, .. } => match command { StartCommand::Node => { let config = load_config(config).context("Loading configuration file")?; start_node(config).await diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index 78c921c5..c37684c8 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -19,17 +19,21 @@ workspace = true vergen = ["dep:vergen", "dep:vergen-gitcl"] [dependencies] -anyhow = { version = "1.0" } -figment = { version = "0.10", features = ["env", "toml"] } -itertools = { workspace = true } -miden-objects = { workspace = true } -rand = { workspace = true } -serde = { version = "1.0", features = ["derive"] } -thiserror = { workspace = true } -tonic = { workspace = true } -tracing = { workspace = true } -tracing-forest = { version = "0.1", optional = true, features = ["chrono"] } -tracing-subscriber = { workspace = true } +anyhow = { version = "1.0" } +figment = { version = "0.10", features = ["env", "toml"] } +itertools = { workspace = true } +miden-objects = { workspace = true } +opentelemetry = "0.27" +opentelemetry-otlp = { version = "0.27", features = ["tls-roots"] } +opentelemetry_sdk = { version = "0.27", features = ["rt-tokio"] } +rand = { workspace = true } +serde = { version = "1.0", features = ["derive"] } +thiserror = { workspace = true } +tonic = { workspace = true } +tracing = { workspace = true } +tracing-forest = { version = "0.1", optional = true, features = ["chrono"] } +tracing-opentelemetry = "0.28" +tracing-subscriber = { workspace = true } # Optional dependencies enabled by `vergen` feature. # This must match the version expected by `vergen-gitcl`. vergen = { "version" = "9.0", optional = true } diff --git a/crates/utils/src/logging.rs b/crates/utils/src/logging.rs index dc58d7e8..06968527 100644 --- a/crates/utils/src/logging.rs +++ b/crates/utils/src/logging.rs @@ -1,6 +1,19 @@ use anyhow::Result; +use opentelemetry::trace::TracerProvider as _; +use opentelemetry_otlp::WithTonicConfig; use tracing::subscriber::{self, Subscriber}; -use tracing_subscriber::EnvFilter; +use tracing_opentelemetry::OpenTelemetryLayer; +use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Layer, Registry}; + +/// Configures tracing and optionally enables an open-telemetry OTLP exporter. +/// +/// The open-telemetry configuration is controlled via environment variables as defined in the +/// [specification](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/exporter.md#opentelemetry-protocol-exporter) +pub fn setup_tracing(enable_otel: bool) -> Result<()> { + let otel_layer = enable_otel.then_some(open_telemetry_layer()); + let subscriber = Registry::default().with(stdout_layer()).with(otel_layer); + tracing::subscriber::set_global_default(subscriber).map_err(Into::into) +} pub fn setup_logging() -> Result<()> { subscriber::set_global_default(subscriber())?; @@ -8,6 +21,64 @@ pub fn setup_logging() -> Result<()> { Ok(()) } +fn open_telemetry_layer() -> Box + Send + Sync + 'static> +where + S: Subscriber + Sync + Send, + for<'a> S: tracing_subscriber::registry::LookupSpan<'a>, +{ + let exporter = opentelemetry_otlp::SpanExporter::builder() + .with_tonic() + .with_tls_config(tonic::transport::ClientTlsConfig::new().with_native_roots()) + .build() + .unwrap(); + + let tracer = opentelemetry_sdk::trace::TracerProvider::builder() + .with_batch_exporter(exporter, opentelemetry_sdk::runtime::Tokio) + .build(); + + let tracer = tracer.tracer("tracing-otel-subscriber"); + OpenTelemetryLayer::new(tracer).boxed() +} + +#[cfg(not(feature = "tracing-forest"))] +fn stdout_layer() -> Box + Send + Sync + 'static> +where + S: Subscriber, + for<'a> S: tracing_subscriber::registry::LookupSpan<'a>, +{ + use tracing_subscriber::fmt::format::FmtSpan; + + tracing_subscriber::fmt::layer() + .pretty() + .compact() + .with_level(true) + .with_file(true) + .with_line_number(true) + .with_target(true) + .with_span_events(FmtSpan::NEW | FmtSpan::CLOSE) + .with_filter(EnvFilter::try_from_default_env().unwrap_or_else(|_| { + // axum logs rejections from built-in extracts on the trace level, so we enable this + // manually. + "info,axum::rejection=trace".into() + })) + .boxed() +} + +#[cfg(feature = "tracing-forest")] +fn stdout_layer() -> Box + Send + Sync + 'static> +where + S: Subscriber, + for<'a> S: tracing_subscriber::registry::LookupSpan<'a>, +{ + tracing_forest::ForestLayer::default() + .with_filter(EnvFilter::try_from_default_env().unwrap_or_else(|_| { + // axum logs rejections from built-in extracts on the trace level, so we enable this + // manually. + "info,axum::rejection=trace".into() + })) + .boxed() +} + #[cfg(not(feature = "tracing-forest"))] pub fn subscriber() -> impl Subscriber + core::fmt::Debug { use tracing_subscriber::fmt::format::FmtSpan; From 4215341319cc1ab3d662d593e3d035c8b79bc011 Mon Sep 17 00:00:00 2001 From: Bobbin Threadbare Date: Mon, 3 Feb 2025 11:46:24 -0800 Subject: [PATCH 07/17] chore: fix typos --- crates/block-producer/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/block-producer/README.md b/crates/block-producer/README.md index ef6fb337..d4637779 100644 --- a/crates/block-producer/README.md +++ b/crates/block-producer/README.md @@ -1,9 +1,9 @@ # Miden block producer -Contains code definining the [Miden node's block-producer](/README.md#architecture) component. It is responsible for +Contains code defining the [Miden node's block-producer](/README.md#architecture) component. It is responsible for ordering transactions into blocks and submitting these for inclusion in the blockchain. -It serves a small [gRPC](htts://grpc.io) API which the node's RPC component uses to submit new transactions. In turn, +It serves a small [gRPC](https://grpc.io) API which the node's RPC component uses to submit new transactions. In turn, the `block-producer` uses the store's gRPC API to submit blocks and query chain state. For more information on the installation and operation of this component, please see the [node's readme](../../README.md). From 87b4a70e300f532497d83eb4cad1f886a6753383 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Tue, 4 Feb 2025 10:40:16 +0100 Subject: [PATCH 08/17] Use batch prover from miden-base (#659) * feat: Add tx-batch prover crate as dependency * feat: Implement Batch Inputs retrieval from store * feat: Replace `TransactionBatch` in tests with `ProvenBatch` * chore: Rebuild proto files * chore: Add changelog entry * chore: Import proto types as `proto::` * chore: Modify changelog entry * feat: Remove outdated `GetNoteAuthenticationInfoRequest` * feat: Handle errors in `get_batch_inputs` * feat: Handle errors during batch building * chore: Update usages of `AccountFile` * chore: More updates of `AccountFile` * feat: Use latest block num method on `InnerState` * chore: Pin to miden-base current next branch --- CHANGELOG.md | 4 + Cargo.lock | 98 ++-- Cargo.toml | 7 +- bin/faucet/src/client.rs | 4 +- bin/faucet/src/main.rs | 4 +- bin/node/src/commands/genesis/mod.rs | 8 +- crates/block-producer/Cargo.toml | 35 +- .../block-producer/src/batch_builder/batch.rs | 453 ------------------ .../block-producer/src/batch_builder/mod.rs | 69 ++- .../block-producer/src/block_builder/mod.rs | 23 +- .../src/block_builder/prover/block_witness.rs | 26 +- .../src/block_builder/prover/tests.rs | 103 ++-- .../block-producer/src/domain/transaction.rs | 9 + crates/block-producer/src/errors.rs | 28 +- .../block-producer/src/mempool/batch_graph.rs | 64 +-- crates/block-producer/src/mempool/mod.rs | 22 +- crates/block-producer/src/mempool/tests.rs | 29 +- crates/block-producer/src/store/mod.rs | 22 +- crates/block-producer/src/test_utils/batch.rs | 66 ++- crates/block-producer/src/test_utils/block.rs | 23 +- crates/block-producer/src/test_utils/mod.rs | 5 + .../src/test_utils/proven_tx.rs | 1 + crates/block-producer/src/test_utils/store.rs | 15 +- crates/proto/src/domain/batch.rs | 53 ++ crates/proto/src/domain/mod.rs | 1 + crates/proto/src/errors.rs | 16 +- crates/proto/src/generated/requests.rs | 17 +- crates/proto/src/generated/responses.rs | 22 +- crates/proto/src/generated/store.rs | 42 +- crates/rpc-proto/proto/requests.proto | 14 +- crates/rpc-proto/proto/responses.proto | 20 +- crates/rpc-proto/proto/store.proto | 4 +- crates/rpc/src/server/api.rs | 2 +- crates/store/src/errors.rs | 25 +- crates/store/src/server/api.rs | 85 ++-- crates/store/src/state.rs | 155 +++++- proto/requests.proto | 14 +- proto/responses.proto | 20 +- proto/store.proto | 4 +- 39 files changed, 752 insertions(+), 860 deletions(-) delete mode 100644 crates/block-producer/src/batch_builder/batch.rs create mode 100644 crates/proto/src/domain/batch.rs diff --git a/CHANGELOG.md b/CHANGELOG.md index c9f5ba05..1637ec79 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,10 @@ - [BREAKING] Updated minimum Rust version to 1.84. - [BREAKING] `Endpoint` configuration simplified to a single string (#654). +### Enhancements + +- Prove transaction batches using Rust batch prover reference implementation (#659). + ## v0.7.2 (2025-01-29) ### Fixes diff --git a/Cargo.lock b/Cargo.lock index 2d8e3495..5e6116ea 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -160,9 +160,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.85" +version = "0.1.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056" +checksum = "644dd749086bf3771a2fbc5f256fdb982d53f011c7d5d560304eafeecebce79d" dependencies = [ "proc-macro2", "quote", @@ -425,9 +425,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" +checksum = "f61dac84819c6588b558454b194026eb1f09c293b9036ae9b159e74e73ab6cf9" [[package]] name = "camino" @@ -463,9 +463,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.10" +version = "1.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13208fcbb66eaeffe09b99fffbe1af420f00a7b35aa99ad683dfc1aa76145229" +checksum = "e4730490333d58093109dc02c23174c3f4d490998c3fed3cc8e82d57afedb9cf" dependencies = [ "jobserver", "libc", @@ -524,9 +524,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.27" +version = "4.5.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "769b0145982b4b48713e01ec42d61614425f27b7058bda7180a3a41f30104796" +checksum = "3e77c3243bd94243c03672cb5154667347c457ca271254724f9f393aee1c05ff" dependencies = [ "clap_builder", "clap_derive", @@ -546,9 +546,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.24" +version = "4.5.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54b755194d6389280185988721fffba69495eed5ee9feeee9a599b53db80318c" +checksum = "bf4ced95c6f4a675af3da73304b9ac4ed991640c36374e4b46795c49e17cf1ed" dependencies = [ "heck", "proc-macro2", @@ -677,9 +677,9 @@ dependencies = [ [[package]] name = "deadpool" -version = "0.12.1" +version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6541a3916932fe57768d4be0b1ffb5ec7cbf74ca8c903fdfd5c0fe8aa958f0ed" +checksum = "5ed5957ff93768adf7a65ab167a17835c3d2c3c50d084fe305174c112f468e2f" dependencies = [ "deadpool-runtime", "num_cpus", @@ -1782,9 +1782,8 @@ dependencies = [ [[package]] name = "miden-lib" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ee8babd17ea380c6c5b948761ca63208b633b7130379ee2a57c6d3732d2f8bc" +version = "0.8.0" +source = "git+https://github.com/0xPolygonMiden/miden-base.git?rev=e82dee03de7589ef3fb12b7fd901cef25ae5535d#e82dee03de7589ef3fb12b7fd901cef25ae5535d" dependencies = [ "miden-assembly", "miden-objects", @@ -1874,6 +1873,7 @@ dependencies = [ "miden-processor", "miden-stdlib", "miden-tx", + "miden-tx-batch-prover", "pretty_assertions", "rand", "rand_chacha", @@ -1976,9 +1976,8 @@ dependencies = [ [[package]] name = "miden-objects" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fe3f10d0e3787176f0803be2ecb4646f3a17fe10af45a50736c8d079a3c94d8" +version = "0.8.0" +source = "git+https://github.com/0xPolygonMiden/miden-base.git?rev=e82dee03de7589ef3fb12b7fd901cef25ae5535d#e82dee03de7589ef3fb12b7fd901cef25ae5535d" dependencies = [ "getrandom 0.2.15", "miden-assembly", @@ -2036,9 +2035,8 @@ dependencies = [ [[package]] name = "miden-tx" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4371509f1e4c25dfe26b7ffcffbb34aaa152c6eaad400f2624240a941baed2d0" +version = "0.8.0" +source = "git+https://github.com/0xPolygonMiden/miden-base.git?rev=e82dee03de7589ef3fb12b7fd901cef25ae5535d#e82dee03de7589ef3fb12b7fd901cef25ae5535d" dependencies = [ "async-trait", "miden-lib", @@ -2052,6 +2050,19 @@ dependencies = [ "winter-maybe-async", ] +[[package]] +name = "miden-tx-batch-prover" +version = "0.8.0" +source = "git+https://github.com/0xPolygonMiden/miden-base.git?rev=e82dee03de7589ef3fb12b7fd901cef25ae5535d#e82dee03de7589ef3fb12b7fd901cef25ae5535d" +dependencies = [ + "miden-core", + "miden-crypto", + "miden-objects", + "miden-processor", + "miden-tx", + "thiserror 2.0.11", +] + [[package]] name = "miden-verifier" version = "0.12.0" @@ -2067,9 +2078,9 @@ dependencies = [ [[package]] name = "miette" -version = "7.4.0" +version = "7.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317f146e2eb7021892722af37cf1b971f0a70c8406f487e24952667616192c64" +checksum = "1a955165f87b37fd1862df2a59547ac542c77ef6d17c666f619d1ad22dd89484" dependencies = [ "cfg-if", "miette-derive", @@ -2079,9 +2090,9 @@ dependencies = [ [[package]] name = "miette-derive" -version = "7.4.0" +version = "7.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23c9b935fbe1d6cbd1dac857b54a688145e2d93f48db36010514d0f612d0ad67" +checksum = "bf45bf44ab49be92fd1227a3be6fc6f617f1a337c06af54981048574d8783147" dependencies = [ "proc-macro2", "quote", @@ -2450,27 +2461,27 @@ dependencies = [ [[package]] name = "phf_shared" -version = "0.10.0" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" dependencies = [ "siphasher", ] [[package]] name = "pin-project" -version = "1.1.8" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e2ec53ad785f4d35dac0adea7f7dc6f1bb277ad84a680c7afefeae05d1f5916" +checksum = "dfe2e71e1471fe07709406bf725f710b02927c9c54b2b5b2ec0e8087d97c327d" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.8" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d56a66c0c55993aa927429d0f8a0abfd74f084e4d9c192cffed01e418d83eefb" +checksum = "f6e859e6e5bd50440ab63c47e3ebabc90f26251f7c73c3d3e837b74a1cc3fa67" dependencies = [ "proc-macro2", "quote", @@ -3143,9 +3154,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "siphasher" -version = "0.3.11" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" [[package]] name = "slab" @@ -3203,12 +3214,11 @@ dependencies = [ [[package]] name = "string_cache" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" +checksum = "938d512196766101d333398efde81bc1f37b00cb42c2f8350e5df639f040bbbe" dependencies = [ "new_debug_unreachable", - "once_cell", "parking_lot", "phf_shared", "precomputed-hash", @@ -3258,9 +3268,9 @@ checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2" [[package]] name = "syn" -version = "2.0.96" +version = "2.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" +checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1" dependencies = [ "proc-macro2", "quote", @@ -3531,9 +3541,9 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.22" +version = "0.22.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" +checksum = "02a8b472d1a3d7c18e2d61a489aee3453fd9031c33e4f55bd533f4a7adca1bee" dependencies = [ "indexmap 2.7.1", "serde", @@ -3979,9 +3989,9 @@ dependencies = [ [[package]] name = "wait-timeout" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" dependencies = [ "libc", ] @@ -4342,9 +4352,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.6.25" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad699df48212c6cc6eb4435f35500ac6fd3b9913324f938aea302022ce19d310" +checksum = "86e376c75f4f43f44db463cf729e0d3acbf954d13e22c51e26e4c264b4ab545f" dependencies = [ "memchr", ] diff --git a/Cargo.toml b/Cargo.toml index bdfa58e3..bbb10141 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,17 +28,18 @@ version = "0.8.0" assert_matches = { version = "1.5" } itertools = { version = "0.14" } miden-air = { version = "0.12" } -miden-lib = { version = "0.7" } +miden-lib = { git = "https://github.com/0xPolygonMiden/miden-base.git", rev = "e82dee03de7589ef3fb12b7fd901cef25ae5535d" } miden-node-block-producer = { path = "crates/block-producer", version = "0.8" } miden-node-proto = { path = "crates/proto", version = "0.8" } miden-node-rpc = { path = "crates/rpc", version = "0.8" } miden-node-store = { path = "crates/store", version = "0.8" } miden-node-test-macro = { path = "crates/test-macro" } miden-node-utils = { path = "crates/utils", version = "0.8" } -miden-objects = { version = "0.7" } +miden-objects = { git = "https://github.com/0xPolygonMiden/miden-base.git", rev = "e82dee03de7589ef3fb12b7fd901cef25ae5535d" } miden-processor = { version = "0.12" } miden-stdlib = { version = "0.12", default-features = false } -miden-tx = { version = "0.7" } +miden-tx = { git = "https://github.com/0xPolygonMiden/miden-base.git", rev = "e82dee03de7589ef3fb12b7fd901cef25ae5535d" } +miden-tx-batch-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", rev = "e82dee03de7589ef3fb12b7fd901cef25ae5535d" } prost = { version = "0.13" } rand = { version = "0.8" } thiserror = { version = "2.0", default-features = false } diff --git a/bin/faucet/src/client.rs b/bin/faucet/src/client.rs index faf9b17b..cc3fa261 100644 --- a/bin/faucet/src/client.rs +++ b/bin/faucet/src/client.rs @@ -9,7 +9,7 @@ use miden_node_proto::generated::{ rpc::api_client::ApiClient, }; use miden_objects::{ - account::{Account, AccountData, AccountId, AuthSecretKey}, + account::{Account, AccountFile, AccountId, AuthSecretKey}, asset::FungibleAsset, block::{BlockHeader, BlockNumber}, crypto::{ @@ -61,7 +61,7 @@ impl FaucetClient { let (mut rpc_api, root_block_header, root_chain_mmr) = initialize_faucet_client(config).await?; - let faucet_account_data = AccountData::read(&config.faucet_account_path) + let faucet_account_data = AccountFile::read(&config.faucet_account_path) .context("Failed to load faucet account from file")?; let id = faucet_account_data.account.id(); diff --git a/bin/faucet/src/main.rs b/bin/faucet/src/main.rs index f7ea8c91..dc2d5338 100644 --- a/bin/faucet/src/main.rs +++ b/bin/faucet/src/main.rs @@ -19,7 +19,7 @@ use http::HeaderValue; use miden_lib::{account::faucets::create_basic_fungible_faucet, AuthScheme}; use miden_node_utils::{config::load_config, crypto::get_rpo_random_coin, version::LongVersion}; use miden_objects::{ - account::{AccountData, AccountStorageMode, AuthSecretKey}, + account::{AccountFile, AccountStorageMode, AuthSecretKey}, asset::TokenSymbol, crypto::dsa::rpo_falcon512::SecretKey, Felt, @@ -169,7 +169,7 @@ async fn main() -> anyhow::Result<()> { .context("Failed to create basic fungible faucet account")?; let account_data = - AccountData::new(account, Some(account_seed), AuthSecretKey::RpoFalcon512(secret)); + AccountFile::new(account, Some(account_seed), AuthSecretKey::RpoFalcon512(secret)); let output_path = current_dir.join(output_path); account_data diff --git a/bin/node/src/commands/genesis/mod.rs b/bin/node/src/commands/genesis/mod.rs index 08a65898..a8e95087 100644 --- a/bin/node/src/commands/genesis/mod.rs +++ b/bin/node/src/commands/genesis/mod.rs @@ -9,7 +9,7 @@ use miden_lib::{account::faucets::create_basic_fungible_faucet, AuthScheme}; use miden_node_store::genesis::GenesisState; use miden_node_utils::{config::load_config, crypto::get_rpo_random_coin}; use miden_objects::{ - account::{Account, AccountData, AccountIdAnchor, AuthSecretKey}, + account::{Account, AccountFile, AccountIdAnchor, AuthSecretKey}, asset::TokenSymbol, crypto::{dsa::rpo_falcon512::SecretKey, utils::Serializable}, Felt, ONE, @@ -134,7 +134,7 @@ fn create_accounts( ); faucet_count += 1; - (AccountData::new(account, Some(account_seed), auth_secret_key), name) + (AccountFile::new(account, Some(account_seed), auth_secret_key), name) }, }; @@ -182,7 +182,7 @@ mod tests { use figment::Jail; use miden_node_store::genesis::GenesisState; - use miden_objects::{account::AccountData, utils::serde::Deserializable}; + use miden_objects::{account::AccountFile, utils::serde::Deserializable}; use crate::DEFAULT_GENESIS_FILE_PATH; @@ -220,7 +220,7 @@ mod tests { assert!(a0_file_path.exists()); // deserialize account and genesis_state - let a0 = AccountData::read(a0_file_path).unwrap(); + let a0 = AccountFile::read(a0_file_path).unwrap(); // assert that the account has the corresponding storage mode assert!(a0.account.is_public()); diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index d01bfebe..ccecfab4 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -18,23 +18,24 @@ workspace = true tracing-forest = ["miden-node-utils/tracing-forest"] [dependencies] -async-trait = { version = "0.1" } -itertools = { workspace = true } -miden-lib = { workspace = true } -miden-node-proto = { workspace = true } -miden-node-utils = { workspace = true } -miden-objects = { workspace = true } -miden-processor = { workspace = true } -miden-stdlib = { workspace = true } -miden-tx = { workspace = true } -rand = { version = "0.8" } -serde = { version = "1.0", features = ["derive"] } -thiserror = { workspace = true } -tokio = { workspace = true, features = ["macros", "net", "rt-multi-thread", "sync", "time"] } -tokio-stream = { workspace = true, features = ["net"] } -tonic = { workspace = true } -tracing = { workspace = true } -url = { workspace = true } +async-trait = { version = "0.1" } +itertools = { workspace = true } +miden-lib = { workspace = true } +miden-node-proto = { workspace = true } +miden-node-utils = { workspace = true } +miden-objects = { workspace = true } +miden-processor = { workspace = true } +miden-stdlib = { workspace = true } +miden-tx = { workspace = true } +miden-tx-batch-prover = { workspace = true } +rand = { version = "0.8" } +serde = { version = "1.0", features = ["derive"] } +thiserror = { workspace = true } +tokio = { workspace = true, features = ["macros", "net", "rt-multi-thread", "sync", "time"] } +tokio-stream = { workspace = true, features = ["net"] } +tonic = { workspace = true } +tracing = { workspace = true } +url = { workspace = true } [dev-dependencies] assert_matches = { workspace = true } diff --git a/crates/block-producer/src/batch_builder/batch.rs b/crates/block-producer/src/batch_builder/batch.rs deleted file mode 100644 index 33b27b05..00000000 --- a/crates/block-producer/src/batch_builder/batch.rs +++ /dev/null @@ -1,453 +0,0 @@ -use std::{ - borrow::Borrow, - collections::{btree_map::Entry, BTreeMap, BTreeSet}, - mem, -}; - -use miden_node_proto::domain::note::NoteAuthenticationInfo; -use miden_node_utils::formatting::format_blake3_digest; -use miden_objects::{ - account::{delta::AccountUpdateDetails, AccountId}, - batch::BatchNoteTree, - crypto::hash::blake::{Blake3Digest, Blake3_256}, - note::{NoteHeader, NoteId, Nullifier}, - transaction::{InputNoteCommitment, OutputNote, ProvenTransaction, TransactionId}, - AccountDeltaError, Digest, -}; -use tracing::instrument; - -use crate::{errors::BuildBatchError, COMPONENT}; - -// BATCH ID -// ================================================================================================ - -/// Uniquely identifies a [`TransactionBatch`]. -#[derive(Debug, Copy, Clone, Eq, Ord, PartialEq, PartialOrd)] -pub struct BatchId(Blake3Digest<32>); - -impl BatchId { - /// Calculates a batch ID from the given set of transactions. - pub fn compute(txs: impl Iterator) -> Self - where - T: Borrow, - { - let mut buf = Vec::with_capacity(32 * txs.size_hint().0); - for tx in txs { - buf.extend_from_slice(&tx.borrow().as_bytes()); - } - Self(Blake3_256::hash(&buf)) - } -} - -impl std::fmt::Display for BatchId { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_str(&format_blake3_digest(self.0)) - } -} - -// ACCOUNT UPDATE -// ================================================================================================ - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct AccountUpdate { - pub init_state: Digest, - pub final_state: Digest, - pub transactions: Vec, - pub details: AccountUpdateDetails, -} - -impl AccountUpdate { - fn new(tx: &ProvenTransaction) -> Self { - Self { - init_state: tx.account_update().init_state_hash(), - final_state: tx.account_update().final_state_hash(), - transactions: vec![tx.id()], - details: tx.account_update().details().clone(), - } - } - - /// Merges the transaction's update into this account update. - fn merge_tx(&mut self, tx: &ProvenTransaction) -> Result<(), AccountDeltaError> { - assert!( - self.final_state == tx.account_update().init_state_hash(), - "Transacion's initial state does not match current account state" - ); - - self.final_state = tx.account_update().final_state_hash(); - self.transactions.push(tx.id()); - self.details = self.details.clone().merge(tx.account_update().details().clone())?; - - Ok(()) - } -} - -// TRANSACTION BATCH -// ================================================================================================ - -/// A batch of transactions that share a common proof. -/// -/// Note: Until recursive proofs are available in the Miden VM, we don't include the common proof. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct TransactionBatch { - id: BatchId, - updated_accounts: BTreeMap, - input_notes: Vec, - output_notes_smt: BatchNoteTree, - output_notes: Vec, -} - -impl TransactionBatch { - // CONSTRUCTORS - // -------------------------------------------------------------------------------------------- - - /// Returns a new [TransactionBatch] built from the provided transactions. If a map of - /// unauthenticated notes found in the store is provided, it is used for transforming - /// unauthenticated notes into authenticated notes. - /// - /// The tx input takes an `IntoIterator` of a reference, which effectively allows for cheap - /// cloning of the iterator. Or put differently, we want something similar to `impl - /// Iterator + Clone` which this provides. - /// - /// # Errors - /// - /// Returns an error if: - /// - There are duplicated output notes or unauthenticated notes found across all transactions - /// in the batch. - /// - Hashes for corresponding input notes and output notes don't match. - #[instrument(target = COMPONENT, name = "new_batch", skip_all, err)] - pub fn new<'a, I>( - txs: impl IntoIterator, - found_unauthenticated_notes: NoteAuthenticationInfo, - ) -> Result - where - I: Iterator + Clone, - { - let tx_iter = txs.into_iter(); - let id = BatchId::compute(tx_iter.clone().map(ProvenTransaction::id)); - - // Populate batch output notes and updated accounts. - let mut output_notes = OutputNoteTracker::new(tx_iter.clone())?; - let mut updated_accounts = BTreeMap::::new(); - let mut unauthenticated_input_notes = BTreeSet::new(); - for tx in tx_iter.clone() { - // Merge account updates so that state transitions A->B->C become A->C. - match updated_accounts.entry(tx.account_id()) { - Entry::Vacant(vacant) => { - vacant.insert(AccountUpdate::new(tx)); - }, - Entry::Occupied(occupied) => { - occupied.into_mut().merge_tx(tx).map_err(|source| { - BuildBatchError::AccountUpdateError { account_id: tx.account_id(), source } - })?; - }, - }; - - // Check unauthenticated input notes for duplicates: - for note in tx.get_unauthenticated_notes() { - let id = note.id(); - if !unauthenticated_input_notes.insert(id) { - return Err(BuildBatchError::DuplicateUnauthenticatedNote(id)); - } - } - } - - // Populate batch produced nullifiers and match output notes with corresponding - // unauthenticated input notes in the same batch, which are removed from the unauthenticated - // input notes set. - // - // One thing to note: - // This still allows transaction `A` to consume an unauthenticated note `x` and output note - // `y` and for transaction `B` to consume an unauthenticated note `y` and output - // note `x` (i.e., have a circular dependency between transactions), but this is not - // a problem. - let mut input_notes = vec![]; - for tx in tx_iter { - for input_note in tx.input_notes().iter() { - // Header is presented only for unauthenticated input notes. - let input_note = match input_note.header() { - Some(input_note_header) => { - if output_notes.remove_note(input_note_header)? { - continue; - } - - // If an unauthenticated note was found in the store, transform it to an - // authenticated one (i.e. erase additional note details - // except the nullifier) - if found_unauthenticated_notes.contains_note(&input_note_header.id()) { - InputNoteCommitment::from(input_note.nullifier()) - } else { - input_note.clone() - } - }, - None => input_note.clone(), - }; - input_notes.push(input_note); - } - } - - let output_notes = output_notes.into_notes(); - - // Build the output notes SMT. - let output_notes_smt = BatchNoteTree::with_contiguous_leaves( - output_notes.iter().map(|note| (note.id(), note.metadata())), - ) - .expect("Unreachable: fails only if the output note list contains duplicates"); - - Ok(Self { - id, - updated_accounts, - input_notes, - output_notes_smt, - output_notes, - }) - } - - // PUBLIC ACCESSORS - // -------------------------------------------------------------------------------------------- - - /// Returns the batch ID. - pub fn id(&self) -> BatchId { - self.id - } - - /// Returns an iterator over (`account_id`, `init_state_hash`) tuples for accounts that were - /// modified in this transaction batch. - #[cfg(test)] - pub fn account_initial_states(&self) -> impl Iterator + '_ { - self.updated_accounts - .iter() - .map(|(&account_id, update)| (account_id, update.init_state)) - } - - /// Returns an iterator over (`account_id`, details, `new_state_hash`) tuples for accounts that - /// were modified in this transaction batch. - pub fn updated_accounts(&self) -> impl Iterator + '_ { - self.updated_accounts.iter() - } - - /// Returns input notes list consumed by the transactions in this batch. Any unauthenticated - /// input notes which have matching output notes within this batch are not included in this - /// list. - pub fn input_notes(&self) -> &[InputNoteCommitment] { - &self.input_notes - } - - /// Returns an iterator over produced nullifiers for all consumed notes. - pub fn produced_nullifiers(&self) -> impl Iterator + '_ { - self.input_notes.iter().map(InputNoteCommitment::nullifier) - } - - /// Returns the root hash of the output notes SMT. - pub fn output_notes_root(&self) -> Digest { - self.output_notes_smt.root() - } - - /// Returns output notes list. - pub fn output_notes(&self) -> &Vec { - &self.output_notes - } -} - -#[derive(Debug)] -struct OutputNoteTracker { - output_notes: Vec>, - output_note_index: BTreeMap, -} - -impl OutputNoteTracker { - fn new<'a>(txs: impl Iterator) -> Result { - let mut output_notes = vec![]; - let mut output_note_index = BTreeMap::new(); - for tx in txs { - for note in tx.output_notes().iter() { - if output_note_index.insert(note.id(), output_notes.len()).is_some() { - return Err(BuildBatchError::DuplicateOutputNote(note.id())); - } - output_notes.push(Some(note.clone())); - } - } - - Ok(Self { output_notes, output_note_index }) - } - - pub fn remove_note(&mut self, input_note_header: &NoteHeader) -> Result { - let id = input_note_header.id(); - if let Some(note_index) = self.output_note_index.remove(&id) { - if let Some(output_note) = mem::take(&mut self.output_notes[note_index]) { - let input_hash = input_note_header.hash(); - let output_hash = output_note.hash(); - if output_hash != input_hash { - return Err(BuildBatchError::NoteHashesMismatch { - id, - input_hash, - output_hash, - }); - } - - return Ok(true); - } - } - - Ok(false) - } - - pub fn into_notes(self) -> Vec { - self.output_notes.into_iter().flatten().collect() - } -} - -// TESTS -// ================================================================================================ - -#[cfg(test)] -mod tests { - use miden_objects::note::NoteInclusionProof; - use miden_processor::crypto::MerklePath; - - use super::*; - use crate::test_utils::{ - mock_proven_tx, - note::{mock_note, mock_output_note, mock_unauthenticated_note_commitment}, - }; - - #[test] - fn output_note_tracker_duplicate_output_notes() { - let mut txs = mock_proven_txs(); - - let result = OutputNoteTracker::new(txs.iter()); - assert!( - result.is_ok(), - "Creation of output note tracker was not expected to fail: {result:?}" - ); - - let duplicate_output_note = txs[1].output_notes().get_note(1).clone(); - - txs.push(mock_proven_tx( - 3, - vec![], - vec![duplicate_output_note.clone(), mock_output_note(8), mock_output_note(4)], - )); - - match OutputNoteTracker::new(txs.iter()) { - Err(BuildBatchError::DuplicateOutputNote(note_id)) => { - assert_eq!(note_id, duplicate_output_note.id()); - }, - res => panic!("Unexpected result: {res:?}"), - } - } - - #[test] - fn output_note_tracker_remove_in_place_consumed_note() { - let txs = mock_proven_txs(); - let mut tracker = OutputNoteTracker::new(txs.iter()).unwrap(); - - let note_to_remove = mock_note(4); - - assert!(tracker.remove_note(note_to_remove.header()).unwrap()); - assert!(!tracker.remove_note(note_to_remove.header()).unwrap()); - - // Check that output notes are in the expected order and consumed note was removed - assert_eq!( - tracker.into_notes(), - vec![ - mock_output_note(2), - mock_output_note(3), - mock_output_note(6), - mock_output_note(7), - mock_output_note(8), - ] - ); - } - - #[test] - fn duplicate_unauthenticated_notes() { - let mut txs = mock_proven_txs(); - let duplicate_note = mock_note(5); - txs.push(mock_proven_tx(4, vec![duplicate_note.clone()], vec![mock_output_note(9)])); - match TransactionBatch::new(&txs, NoteAuthenticationInfo::default()) { - Err(BuildBatchError::DuplicateUnauthenticatedNote(note_id)) => { - assert_eq!(note_id, duplicate_note.id()); - }, - res => panic!("Unexpected result: {res:?}"), - } - } - - #[test] - fn consume_notes_in_place() { - let mut txs = mock_proven_txs(); - let note_to_consume = mock_note(3); - txs.push(mock_proven_tx( - 3, - vec![mock_note(11), note_to_consume, mock_note(13)], - vec![mock_output_note(9), mock_output_note(10)], - )); - - let batch = TransactionBatch::new(&txs, NoteAuthenticationInfo::default()).unwrap(); - - // One of the unauthenticated notes must be removed from the batch due to the consumption - // of the corresponding output note - let expected_input_notes = vec![ - mock_unauthenticated_note_commitment(1), - mock_unauthenticated_note_commitment(5), - mock_unauthenticated_note_commitment(11), - mock_unauthenticated_note_commitment(13), - ]; - assert_eq!(batch.input_notes, expected_input_notes); - - // One of the output notes must be removed from the batch due to the consumption - // by the corresponding unauthenticated note - let expected_output_notes = vec![ - mock_output_note(2), - mock_output_note(4), - mock_output_note(6), - mock_output_note(7), - mock_output_note(8), - mock_output_note(9), - mock_output_note(10), - ]; - assert_eq!(batch.output_notes.len(), expected_output_notes.len()); - assert_eq!(batch.output_notes, expected_output_notes); - - // Ensure all nullifiers match the corresponding input notes' nullifiers - let expected_nullifiers: Vec<_> = - batch.input_notes().iter().map(InputNoteCommitment::nullifier).collect(); - let actual_nullifiers: Vec<_> = batch.produced_nullifiers().collect(); - assert_eq!(actual_nullifiers, expected_nullifiers); - } - - #[test] - fn convert_unauthenticated_note_to_authenticated() { - let txs = mock_proven_txs(); - let found_unauthenticated_notes = BTreeMap::from_iter([( - mock_note(5).id(), - NoteInclusionProof::new(0.into(), 0, MerklePath::default()).unwrap(), - )]); - let found_unauthenticated_notes = NoteAuthenticationInfo { - note_proofs: found_unauthenticated_notes, - block_proofs: Vec::default(), - }; - let batch = TransactionBatch::new(&txs, found_unauthenticated_notes).unwrap(); - - let expected_input_notes = - vec![mock_unauthenticated_note_commitment(1), mock_note(5).nullifier().into()]; - assert_eq!(batch.input_notes, expected_input_notes); - } - - // UTILITIES - // ============================================================================================= - - fn mock_proven_txs() -> Vec { - vec![ - mock_proven_tx( - 1, - vec![mock_note(1)], - vec![mock_output_note(2), mock_output_note(3), mock_output_note(4)], - ), - mock_proven_tx( - 2, - vec![mock_note(5)], - vec![mock_output_note(6), mock_output_note(7), mock_output_note(8)], - ), - ] - } -} diff --git a/crates/block-producer/src/batch_builder/mod.rs b/crates/block-producer/src/batch_builder/mod.rs index 68fae021..550dcb73 100644 --- a/crates/block-producer/src/batch_builder/mod.rs +++ b/crates/block-producer/src/batch_builder/mod.rs @@ -1,22 +1,21 @@ use std::{num::NonZeroUsize, ops::Range, time::Duration}; -use batch::BatchId; -use miden_node_proto::domain::note::NoteAuthenticationInfo; +use miden_node_proto::domain::batch::BatchInputs; +use miden_node_utils::formatting::format_array; +use miden_objects::{ + batch::{BatchId, ProposedBatch, ProvenBatch}, + MIN_PROOF_SECURITY_LEVEL, +}; +use miden_tx_batch_prover::LocalBatchProver; use rand::Rng; use tokio::{task::JoinSet, time}; use tracing::{debug, info, instrument, Span}; use crate::{ - domain::transaction::AuthenticatedTransaction, mempool::SharedMempool, store::StoreClient, - COMPONENT, SERVER_BUILD_BATCH_FREQUENCY, + domain::transaction::AuthenticatedTransaction, errors::BuildBatchError, mempool::SharedMempool, + store::StoreClient, COMPONENT, SERVER_BUILD_BATCH_FREQUENCY, }; -pub mod batch; -pub use batch::TransactionBatch; -use miden_node_utils::formatting::format_array; - -use crate::errors::BuildBatchError; - // BATCH BUILDER // ================================================================================================ @@ -105,7 +104,7 @@ impl BatchBuilder { // BATCH WORKER // ================================================================================================ -type BatchResult = Result; +type BatchResult = Result; /// Represents a pool of batch provers. /// @@ -219,15 +218,19 @@ impl WorkerPool { async move { tracing::debug!("Begin proving batch."); - let inputs = store - .get_batch_inputs( - transactions - .iter() - .flat_map(AuthenticatedTransaction::unauthenticated_notes), - ) + let block_references = + transactions.iter().map(AuthenticatedTransaction::reference_block); + let unauthenticated_notes = transactions + .iter() + .flat_map(AuthenticatedTransaction::unauthenticated_notes); + + let batch_inputs = store + .get_batch_inputs(block_references, unauthenticated_notes) .await .map_err(|err| (id, BuildBatchError::FetchBatchInputsFailed(err)))?; - let batch = Self::build_batch(transactions, inputs).map_err(|err| (id, err))?; + + let batch = + Self::build_batch(transactions, batch_inputs).map_err(|err| (id, err))?; tokio::time::sleep(simulated_proof_time).await; if failed { @@ -250,19 +253,35 @@ impl WorkerPool { #[instrument(target = COMPONENT, skip_all, err, fields(batch_id))] fn build_batch( txs: Vec, - inputs: NoteAuthenticationInfo, - ) -> Result { + batch_inputs: BatchInputs, + ) -> Result { let num_txs = txs.len(); info!(target: COMPONENT, num_txs, "Building a transaction batch"); debug!(target: COMPONENT, txs = %format_array(txs.iter().map(|tx| tx.id().to_hex()))); - let txs = txs.iter().map(AuthenticatedTransaction::raw_proven_transaction); - let batch = TransactionBatch::new(txs, inputs)?; + let BatchInputs { + batch_reference_block_header, + note_proofs, + chain_mmr, + } = batch_inputs; + + let transactions = txs.iter().map(AuthenticatedTransaction::proven_transaction).collect(); + + let proposed_batch = + ProposedBatch::new(transactions, batch_reference_block_header, chain_mmr, note_proofs) + .map_err(BuildBatchError::ProposeBatchError)?; + + Span::current().record("batch_id", proposed_batch.id().to_string()); + info!(target: COMPONENT, "Proposed Batch built"); + + let proven_batch = LocalBatchProver::new(MIN_PROOF_SECURITY_LEVEL) + .prove(proposed_batch) + .map_err(BuildBatchError::ProveBatchError)?; - Span::current().record("batch_id", batch.id().to_string()); - info!(target: COMPONENT, "Transaction batch built"); + Span::current().record("batch_id", proven_batch.id().to_string()); + info!(target: COMPONENT, "Proven Batch built"); - Ok(batch) + Ok(proven_batch) } } diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index 32023a58..21790dff 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -3,6 +3,7 @@ use std::{collections::BTreeSet, ops::Range}; use miden_node_utils::formatting::format_array; use miden_objects::{ account::AccountId, + batch::ProvenBatch, block::Block, note::{NoteHeader, Nullifier}, transaction::{InputNoteCommitment, OutputNote}, @@ -12,8 +13,8 @@ use tokio::time::Duration; use tracing::{debug, info, instrument}; use crate::{ - batch_builder::batch::TransactionBatch, errors::BuildBlockError, mempool::SharedMempool, - store::StoreClient, COMPONENT, SERVER_BLOCK_FREQUENCY, + errors::BuildBlockError, mempool::SharedMempool, store::StoreClient, COMPONENT, + SERVER_BLOCK_FREQUENCY, }; pub(crate) mod prover; @@ -94,34 +95,36 @@ impl BlockBuilder { } #[instrument(target = COMPONENT, skip_all, err)] - async fn build_block(&self, batches: &[TransactionBatch]) -> Result<(), BuildBlockError> { + async fn build_block(&self, batches: &[ProvenBatch]) -> Result<(), BuildBlockError> { info!( target: COMPONENT, num_batches = batches.len(), - batches = %format_array(batches.iter().map(TransactionBatch::id)), + batches = %format_array(batches.iter().map(ProvenBatch::id)), ); let updated_account_set: BTreeSet = batches .iter() - .flat_map(TransactionBatch::updated_accounts) + .flat_map(ProvenBatch::account_updates) .map(|(account_id, _)| *account_id) .collect(); let output_notes: Vec<_> = - batches.iter().map(TransactionBatch::output_notes).cloned().collect(); + batches.iter().map(|batch| batch.output_notes().to_vec()).collect(); let produced_nullifiers: Vec = - batches.iter().flat_map(TransactionBatch::produced_nullifiers).collect(); + batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); // Populate set of output notes from all batches - let output_notes_set: BTreeSet<_> = - output_notes.iter().flat_map(|batch| batch.iter().map(OutputNote::id)).collect(); + let output_notes_set: BTreeSet<_> = output_notes + .iter() + .flat_map(|output_notes| output_notes.iter().map(OutputNote::id)) + .collect(); // Build a set of unauthenticated input notes for this block which do not have a matching // output note produced in this block let dangling_notes: BTreeSet<_> = batches .iter() - .flat_map(TransactionBatch::input_notes) + .flat_map(ProvenBatch::input_notes) .filter_map(InputNoteCommitment::header) .map(NoteHeader::id) .filter(|note_id| !output_notes_set.contains(note_id)) diff --git a/crates/block-producer/src/block_builder/prover/block_witness.rs b/crates/block-producer/src/block_builder/prover/block_witness.rs index 9f453b59..a0972e54 100644 --- a/crates/block-producer/src/block_builder/prover/block_witness.rs +++ b/crates/block-producer/src/block_builder/prover/block_witness.rs @@ -2,6 +2,7 @@ use std::collections::{BTreeMap, BTreeSet}; use miden_objects::{ account::{delta::AccountUpdateDetails, AccountId}, + batch::{BatchAccountUpdate, ProvenBatch}, block::{BlockAccountUpdate, BlockHeader}, crypto::merkle::{EmptySubtreeRoots, MerklePath, MerkleStore, MmrPeaks, SmtProof}, note::Nullifier, @@ -11,7 +12,6 @@ use miden_objects::{ }; use crate::{ - batch_builder::batch::{AccountUpdate, TransactionBatch}, block::BlockInputs, errors::{BlockProverError, BuildBlockError}, }; @@ -33,7 +33,7 @@ pub struct BlockWitness { impl BlockWitness { pub fn new( mut block_inputs: BlockInputs, - batches: &[TransactionBatch], + batches: &[ProvenBatch], ) -> Result<(Self, Vec), BuildBlockError> { // This limit should be enforced by the mempool. assert!(batches.len() <= MAX_BATCHES_PER_BLOCK); @@ -44,18 +44,19 @@ impl BlockWitness { .iter() .enumerate() .filter(|(_, batch)| !batch.output_notes().is_empty()) - .map(|(batch_index, batch)| (batch_index, batch.output_notes_root())) + .map(|(batch_index, batch)| (batch_index, batch.output_notes_tree().root())) .collect(); // Order account updates by account ID and each update's initial state hash. // // This let's us chronologically order the updates per account across batches. - let mut updated_accounts = BTreeMap::>::new(); - for (account_id, update) in batches.iter().flat_map(TransactionBatch::updated_accounts) { + let mut updated_accounts = + BTreeMap::>::new(); + for (account_id, update) in batches.iter().flat_map(ProvenBatch::account_updates) { updated_accounts .entry(*account_id) .or_default() - .insert(update.init_state, update.clone()); + .insert(update.initial_state_commitment(), update.clone()); } // Build account witnesses. @@ -84,12 +85,13 @@ impl BlockWitness { ) })?; - transactions.extend(update.transactions); - current_hash = update.final_state; + current_hash = update.final_state_commitment(); + let (update_transactions, update_details) = update.into_parts(); + transactions.extend(update_transactions); details = Some(match details { - None => update.details, - Some(details) => details.merge(update.details).map_err(|source| { + None => update_details, + Some(details) => details.merge(update_details).map_err(|source| { BuildBlockError::AccountUpdateError { account_id, source } })?, }); @@ -156,13 +158,13 @@ impl BlockWitness { /// done in MASM. fn validate_nullifiers( block_inputs: &BlockInputs, - batches: &[TransactionBatch], + batches: &[ProvenBatch], ) -> Result<(), BuildBlockError> { let produced_nullifiers_from_store: BTreeSet = block_inputs.nullifiers.keys().copied().collect(); let produced_nullifiers_from_batches: BTreeSet = - batches.iter().flat_map(TransactionBatch::produced_nullifiers).collect(); + batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); if produced_nullifiers_from_store == produced_nullifiers_from_batches { Ok(()) diff --git a/crates/block-producer/src/block_builder/prover/tests.rs b/crates/block-producer/src/block_builder/prover/tests.rs index d688cdcf..f020faf6 100644 --- a/crates/block-producer/src/block_builder/prover/tests.rs +++ b/crates/block-producer/src/block_builder/prover/tests.rs @@ -6,6 +6,7 @@ use miden_objects::{ account::{ delta::AccountUpdateDetails, AccountId, AccountIdVersion, AccountStorageMode, AccountType, }, + batch::ProvenBatch, block::{BlockAccountUpdate, BlockNoteIndex, BlockNoteTree, BlockNumber}, crypto::merkle::{ EmptySubtreeRoots, LeafIndex, MerklePath, Mmr, MmrPeaks, Smt, SmtLeaf, SmtProof, SMT_DEPTH, @@ -21,9 +22,9 @@ use miden_objects::{ use self::block_witness::AccountUpdateWitness; use super::*; use crate::{ - batch_builder::batch::TransactionBatch, block::{AccountWitness, BlockInputs}, test_utils::{ + batch::TransactionBatchConstructor, block::{build_actual_block_header, build_expected_block_header, MockBlockBuilder}, MockProvenTxBuilder, MockStoreSuccessBuilder, }, @@ -75,7 +76,7 @@ fn block_witness_validation_inconsistent_account_ids() { } }; - let batches: Vec = { + let batches: Vec = { let batch_1 = { let tx = MockProvenTxBuilder::with_account( account_id_2, @@ -84,7 +85,7 @@ fn block_witness_validation_inconsistent_account_ids() { ) .build(); - TransactionBatch::new([&tx], NoteAuthenticationInfo::default()).unwrap() + ProvenBatch::mocked_from_transactions([&tx]) }; let batch_2 = { @@ -95,7 +96,7 @@ fn block_witness_validation_inconsistent_account_ids() { ) .build(); - TransactionBatch::new([&tx], NoteAuthenticationInfo::default()).unwrap() + ProvenBatch::mocked_from_transactions([&tx]) }; vec![batch_1, batch_2] @@ -146,26 +147,19 @@ fn block_witness_validation_inconsistent_account_hashes() { }; let batches = { - let batch_1 = TransactionBatch::new( - [&MockProvenTxBuilder::with_account( - account_id_1, - account_1_hash_batches, - Digest::default(), - ) - .build()], - NoteAuthenticationInfo::default(), + let batch_1 = ProvenBatch::mocked_from_transactions([&MockProvenTxBuilder::with_account( + account_id_1, + account_1_hash_batches, + Digest::default(), ) - .unwrap(); - let batch_2 = TransactionBatch::new( - [&MockProvenTxBuilder::with_account( - account_id_2, - Digest::default(), - Digest::default(), - ) - .build()], - NoteAuthenticationInfo::default(), + .build()]); + + let batch_2 = ProvenBatch::mocked_from_transactions([&MockProvenTxBuilder::with_account( + account_id_2, + Digest::default(), + Digest::default(), ) - .unwrap(); + .build()]); vec![batch_1, batch_2] }; @@ -248,12 +242,8 @@ fn block_witness_multiple_batches_per_account() { }; let batches = { - let batch_1 = - TransactionBatch::new([&x_txs[0], &y_txs[1]], NoteAuthenticationInfo::default()) - .unwrap(); - let batch_2 = - TransactionBatch::new([&y_txs[0], &x_txs[1]], NoteAuthenticationInfo::default()) - .unwrap(); + let batch_1 = ProvenBatch::mocked_from_transactions([&x_txs[0], &y_txs[1]]); + let batch_2 = ProvenBatch::mocked_from_transactions([&y_txs[0], &x_txs[1]]); vec![batch_1, batch_2] }; @@ -360,7 +350,7 @@ async fn compute_account_root_success() { .await .unwrap(); - let batches: Vec = { + let batches: Vec = { let txs: Vec<_> = account_ids .iter() .enumerate() @@ -374,8 +364,8 @@ async fn compute_account_root_success() { }) .collect(); - let batch_1 = TransactionBatch::new(&txs[..2], NoteAuthenticationInfo::default()).unwrap(); - let batch_2 = TransactionBatch::new(&txs[2..], NoteAuthenticationInfo::default()).unwrap(); + let batch_1 = ProvenBatch::mocked_from_transactions(&txs[..2]); + let batch_2 = ProvenBatch::mocked_from_transactions(&txs[2..]); vec![batch_1, batch_2] }; @@ -510,7 +500,7 @@ async fn compute_note_root_empty_batches_success() { .await .unwrap(); - let batches: Vec = Vec::new(); + let batches: Vec = Vec::new(); let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); @@ -542,8 +532,8 @@ async fn compute_note_root_empty_notes_success() { .await .unwrap(); - let batches: Vec = { - let batch = TransactionBatch::new(vec![], NoteAuthenticationInfo::default()).unwrap(); + let batches: Vec = { + let batch = ProvenBatch::mocked_from_transactions(vec![]); vec![batch] }; @@ -620,7 +610,7 @@ async fn compute_note_root_success() { .await .unwrap(); - let batches: Vec = { + let batches: Vec = { let txs: Vec<_> = notes_created .iter() .zip(account_ids.iter()) @@ -632,8 +622,8 @@ async fn compute_note_root_success() { }) .collect(); - let batch_1 = TransactionBatch::new(&txs[..2], NoteAuthenticationInfo::default()).unwrap(); - let batch_2 = TransactionBatch::new(&txs[2..], NoteAuthenticationInfo::default()).unwrap(); + let batch_1 = ProvenBatch::mocked_from_transactions(&txs[..2]); + let batch_2 = ProvenBatch::mocked_from_transactions(&txs[2..]); vec![batch_1, batch_2] }; @@ -686,17 +676,17 @@ async fn compute_note_root_success() { /// The transaction batches will contain nullifiers 1 & 2, while the store will contain 2 & 3. #[test] fn block_witness_validation_inconsistent_nullifiers() { - let batches: Vec = { + let batches: Vec = { let batch_1 = { let tx = MockProvenTxBuilder::with_account_index(0).nullifiers_range(0..1).build(); - TransactionBatch::new([&tx], NoteAuthenticationInfo::default()).unwrap() + ProvenBatch::mocked_from_transactions([&tx]) }; let batch_2 = { let tx = MockProvenTxBuilder::with_account_index(1).nullifiers_range(1..2).build(); - TransactionBatch::new([&tx], NoteAuthenticationInfo::default()).unwrap() + ProvenBatch::mocked_from_transactions([&tx]) }; vec![batch_1, batch_2] @@ -713,7 +703,12 @@ fn block_witness_validation_inconsistent_nullifiers() { let accounts = batches .iter() - .flat_map(TransactionBatch::account_initial_states) + .flat_map(|batch| { + batch + .account_updates() + .iter() + .map(|(account_id, update)| (*account_id, update.initial_state_commitment())) + }) .map(|(account_id, hash)| { (account_id, AccountWitness { hash, proof: MerklePath::default() }) }) @@ -765,17 +760,17 @@ fn block_witness_validation_inconsistent_nullifiers() { /// in the transaction #[tokio::test] async fn compute_nullifier_root_empty_success() { - let batches: Vec = { + let batches: Vec = { let batch_1 = { let tx = MockProvenTxBuilder::with_account_index(0).build(); - TransactionBatch::new([&tx], NoteAuthenticationInfo::default()).unwrap() + ProvenBatch::mocked_from_transactions([&tx]) }; let batch_2 = { let tx = MockProvenTxBuilder::with_account_index(1).build(); - TransactionBatch::new([&tx], NoteAuthenticationInfo::default()).unwrap() + ProvenBatch::mocked_from_transactions([&tx]) }; vec![batch_1, batch_2] @@ -783,7 +778,12 @@ async fn compute_nullifier_root_empty_success() { let account_ids: Vec = batches .iter() - .flat_map(TransactionBatch::account_initial_states) + .flat_map(|batch| { + batch + .account_updates() + .iter() + .map(|(account_id, update)| (*account_id, update.initial_state_commitment())) + }) .map(|(account_id, _)| account_id) .collect(); @@ -819,17 +819,17 @@ async fn compute_nullifier_root_empty_success() { /// present in the transaction #[tokio::test] async fn compute_nullifier_root_success() { - let batches: Vec = { + let batches: Vec = { let batch_1 = { let tx = MockProvenTxBuilder::with_account_index(0).nullifiers_range(0..1).build(); - TransactionBatch::new([&tx], NoteAuthenticationInfo::default()).unwrap() + ProvenBatch::mocked_from_transactions([&tx]) }; let batch_2 = { let tx = MockProvenTxBuilder::with_account_index(1).nullifiers_range(1..2).build(); - TransactionBatch::new([&tx], NoteAuthenticationInfo::default()).unwrap() + ProvenBatch::mocked_from_transactions([&tx]) }; vec![batch_1, batch_2] @@ -837,7 +837,12 @@ async fn compute_nullifier_root_success() { let account_ids: Vec = batches .iter() - .flat_map(TransactionBatch::account_initial_states) + .flat_map(|batch| { + batch + .account_updates() + .iter() + .map(|(account_id, update)| (*account_id, update.initial_state_commitment())) + }) .map(|(account_id, _)| account_id) .collect(); diff --git a/crates/block-producer/src/domain/transaction.rs b/crates/block-producer/src/domain/transaction.rs index 10c2f236..c249709e 100644 --- a/crates/block-producer/src/domain/transaction.rs +++ b/crates/block-producer/src/domain/transaction.rs @@ -101,6 +101,10 @@ impl AuthenticatedTransaction { self.inner.input_notes().num_notes() } + pub fn reference_block(&self) -> (BlockNumber, Digest) { + (self.inner.block_num(), self.inner.block_ref()) + } + /// Notes which were unauthenticate in the transaction __and__ which were /// not authenticated by the store inputs. pub fn unauthenticated_notes(&self) -> impl Iterator + '_ { @@ -111,6 +115,11 @@ impl AuthenticatedTransaction { .filter(|note_id| !self.notes_authenticated_by_store.contains(note_id)) } + pub fn proven_transaction(&self) -> Arc { + Arc::clone(&self.inner) + } + + #[cfg(test)] pub fn raw_proven_transaction(&self) -> &ProvenTransaction { &self.inner } diff --git a/crates/block-producer/src/errors.rs b/crates/block-producer/src/errors.rs index 785de16a..f61cf6dc 100644 --- a/crates/block-producer/src/errors.rs +++ b/crates/block-producer/src/errors.rs @@ -6,9 +6,10 @@ use miden_objects::{ crypto::merkle::MerkleError, note::{NoteId, Nullifier}, transaction::TransactionId, - AccountDeltaError, Digest, + AccountDeltaError, Digest, ProposedBatchError, }; use miden_processor::ExecutionError; +use miden_tx_batch_prover::errors::BatchProveError; use thiserror::Error; use tokio::task::JoinError; @@ -127,25 +128,6 @@ impl From for tonic::Status { /// Error encountered while building a batch. #[derive(Debug, Error)] pub enum BuildBatchError { - #[error("duplicated unauthenticated transaction input note ID in the batch: {0}")] - DuplicateUnauthenticatedNote(NoteId), - - #[error("duplicated transaction output note ID in the batch: {0}")] - DuplicateOutputNote(NoteId), - - #[error("note hashes mismatch for note {id}: (input: {input_hash}, output: {output_hash})")] - NoteHashesMismatch { - id: NoteId, - input_hash: Digest, - output_hash: Digest, - }, - - #[error("failed to merge transaction delta into account {account_id}")] - AccountUpdateError { - account_id: AccountId, - source: AccountDeltaError, - }, - /// We sometimes randomly inject errors into the batch building process to test our failure /// responses. #[error("nothing actually went wrong, failure was injected on purpose")] @@ -156,6 +138,12 @@ pub enum BuildBatchError { #[error("failed to fetch batch inputs from store")] FetchBatchInputsFailed(#[source] StoreError), + + #[error("failed to build proposed transaction batch")] + ProposeBatchError(#[source] ProposedBatchError), + + #[error("failed to prove proposed transaction batch")] + ProveBatchError(#[source] BatchProveError), } // Block prover errors diff --git a/crates/block-producer/src/mempool/batch_graph.rs b/crates/block-producer/src/mempool/batch_graph.rs index 73948f46..256408c1 100644 --- a/crates/block-producer/src/mempool/batch_graph.rs +++ b/crates/block-producer/src/mempool/batch_graph.rs @@ -1,12 +1,15 @@ use std::collections::{BTreeMap, BTreeSet}; -use miden_objects::transaction::TransactionId; +use miden_objects::{ + account::AccountId, + batch::{BatchId, ProvenBatch}, + transaction::TransactionId, +}; use super::{ graph::{DependencyGraph, GraphError}, BlockBudget, BudgetStatus, }; -use crate::batch_builder::batch::{BatchId, TransactionBatch}; // BATCH GRAPH // ================================================================================================ @@ -53,7 +56,7 @@ use crate::batch_builder::batch::{BatchId, TransactionBatch}; #[derive(Default, Debug, Clone, PartialEq)] pub struct BatchGraph { /// Tracks the interdependencies between batches. - inner: DependencyGraph, + inner: DependencyGraph, /// Maps each transaction to its batch, allowing for reverse lookups. /// @@ -97,12 +100,12 @@ impl BatchGraph { /// - any parent transactions are _not_ in the graph pub fn insert( &mut self, - transactions: Vec, + transactions: Vec<(TransactionId, AccountId)>, mut parents: BTreeSet, ) -> Result { let duplicates = transactions .iter() - .filter(|tx| self.transactions.contains_key(tx)) + .filter_map(|(tx, _)| self.transactions.contains_key(tx).then_some(tx)) .copied() .collect::>(); if !duplicates.is_empty() { @@ -111,7 +114,7 @@ impl BatchGraph { // Reverse lookup parent batch IDs. Take care to allow for parent transactions within this // batch i.e. internal dependencies. - for tx in &transactions { + for (tx, _) in &transactions { parents.remove(tx); } let parent_batches = parents @@ -124,13 +127,14 @@ impl BatchGraph { }) .collect::>()?; - let id = BatchId::compute(transactions.iter()); + let id = BatchId::from_ids(transactions.iter().copied()); self.inner.insert_pending(id, parent_batches)?; - for tx in transactions.iter().copied() { + for (tx, _) in transactions.iter().copied() { self.transactions.insert(tx, id); } - self.batches.insert(id, transactions); + + self.batches.insert(id, transactions.into_iter().map(|(tx, _)| tx).collect()); Ok(id) } @@ -231,7 +235,7 @@ impl BatchGraph { /// # Errors /// /// Returns an error if the batch is not in the graph or if it was already previously proven. - pub fn submit_proof(&mut self, batch: TransactionBatch) -> Result<(), GraphError> { + pub fn submit_proof(&mut self, batch: ProvenBatch) -> Result<(), GraphError> { self.inner.promote_pending(batch.id(), batch) } @@ -240,7 +244,7 @@ impl BatchGraph { /// /// Note that batch order should be maintained to allow for inter-batch dependencies to be /// correctly resolved. - pub fn select_block(&mut self, mut budget: BlockBudget) -> Vec { + pub fn select_block(&mut self, mut budget: BlockBudget) -> Vec { let mut batches = Vec::with_capacity(budget.batches); while let Some(batch_id) = self.inner.roots().first().copied() { @@ -289,14 +293,14 @@ mod tests { #[test] fn insert_rejects_duplicate_transactions() { let mut rng = Random::with_random_seed(); - let tx_dup = rng.draw_tx_id(); - let tx_non_dup = rng.draw_tx_id(); + let tx_dup = (rng.draw_tx_id(), rng.draw_account_id()); + let tx_non_dup = (rng.draw_tx_id(), rng.draw_account_id()); let mut uut = BatchGraph::default(); uut.insert(vec![tx_dup], BTreeSet::default()).unwrap(); let err = uut.insert(vec![tx_dup, tx_non_dup], BTreeSet::default()).unwrap_err(); - let expected = BatchInsertError::DuplicateTransactions([tx_dup].into()); + let expected = BatchInsertError::DuplicateTransactions([tx_dup.0].into()); assert_eq!(err, expected); } @@ -304,13 +308,13 @@ mod tests { #[test] fn insert_rejects_missing_parents() { let mut rng = Random::with_random_seed(); - let tx = rng.draw_tx_id(); - let missing = rng.draw_tx_id(); + let tx = (rng.draw_tx_id(), rng.draw_account_id()); + let missing = (rng.draw_tx_id(), rng.draw_account_id()); let mut uut = BatchGraph::default(); - let err = uut.insert(vec![tx], [missing].into()).unwrap_err(); - let expected = BatchInsertError::UnknownParentTransaction(missing); + let err = uut.insert(vec![tx], [missing.0].into()).unwrap_err(); + let expected = BatchInsertError::UnknownParentTransaction(missing.0); assert_eq!(err, expected); } @@ -319,11 +323,11 @@ mod tests { fn insert_with_internal_parent_succeeds() { // Ensure that a batch with internal dependencies can be inserted. let mut rng = Random::with_random_seed(); - let parent = rng.draw_tx_id(); - let child = rng.draw_tx_id(); + let parent = (rng.draw_tx_id(), rng.draw_account_id()); + let child = (rng.draw_tx_id(), rng.draw_account_id()); let mut uut = BatchGraph::default(); - uut.insert(vec![parent, child], [parent].into()).unwrap(); + uut.insert(vec![parent, child], [parent.0].into()).unwrap(); } // PURGE_SUBGRAPHS TESTS @@ -334,19 +338,25 @@ mod tests { // Ensure that purge_subgraphs returns both parent and child batches when the parent is // pruned. Further ensure that a disjoint batch is not pruned. let mut rng = Random::with_random_seed(); - let parent_batch_txs = (0..5).map(|_| rng.draw_tx_id()).collect::>(); - let child_batch_txs = (0..5).map(|_| rng.draw_tx_id()).collect::>(); - let disjoint_batch_txs = (0..5).map(|_| rng.draw_tx_id()).collect(); + let parent_batch_txs = + (0..5).map(|_| (rng.draw_tx_id(), rng.draw_account_id())).collect::>(); + let child_batch_txs = + (0..5).map(|_| (rng.draw_tx_id(), rng.draw_account_id())).collect::>(); + let disjoint_batch_txs = + (0..5).map(|_| (rng.draw_tx_id(), rng.draw_account_id())).collect(); let mut uut = BatchGraph::default(); let parent_batch_id = uut.insert(parent_batch_txs.clone(), BTreeSet::default()).unwrap(); let child_batch_id = - uut.insert(child_batch_txs.clone(), [parent_batch_txs[0]].into()).unwrap(); + uut.insert(child_batch_txs.clone(), [parent_batch_txs[0].0].into()).unwrap(); uut.insert(disjoint_batch_txs, BTreeSet::default()).unwrap(); let result = uut.remove_batches([parent_batch_id].into()).unwrap(); - let expected = - [(parent_batch_id, parent_batch_txs), (child_batch_id, child_batch_txs)].into(); + let expected = [ + (parent_batch_id, parent_batch_txs.into_iter().map(|(tx, _)| tx).collect()), + (child_batch_id, child_batch_txs.into_iter().map(|(tx, _)| tx).collect()), + ] + .into(); assert_eq!(result, expected); } diff --git a/crates/block-producer/src/mempool/mod.rs b/crates/block-producer/src/mempool/mod.rs index 08e33232..3eaa4052 100644 --- a/crates/block-producer/src/mempool/mod.rs +++ b/crates/block-producer/src/mempool/mod.rs @@ -4,8 +4,10 @@ use batch_graph::BatchGraph; use graph::GraphError; use inflight_state::InflightState; use miden_objects::{ - block::BlockNumber, transaction::TransactionId, MAX_ACCOUNTS_PER_BATCH, - MAX_INPUT_NOTES_PER_BATCH, MAX_OUTPUT_NOTES_PER_BATCH, + batch::{BatchId, ProvenBatch}, + block::BlockNumber, + transaction::TransactionId, + MAX_ACCOUNTS_PER_BATCH, MAX_INPUT_NOTES_PER_BATCH, MAX_OUTPUT_NOTES_PER_BATCH, }; use tokio::sync::Mutex; use tracing::instrument; @@ -13,10 +15,8 @@ use transaction_expiration::TransactionExpirations; use transaction_graph::TransactionGraph; use crate::{ - batch_builder::batch::{BatchId, TransactionBatch}, - domain::transaction::AuthenticatedTransaction, - errors::AddTransactionError, - COMPONENT, SERVER_MAX_BATCHES_PER_BLOCK, SERVER_MAX_TXS_PER_BATCH, + domain::transaction::AuthenticatedTransaction, errors::AddTransactionError, COMPONENT, + SERVER_MAX_BATCHES_PER_BLOCK, SERVER_MAX_TXS_PER_BATCH, }; mod batch_graph; @@ -114,7 +114,7 @@ impl BlockBudget { /// Returns [`BudgetStatus::Exceeded`] if the batch would exceed the remaining budget, /// otherwise returns [`BudgetStatus::Ok`]. #[must_use] - fn check_then_subtract(&mut self, _batch: &TransactionBatch) -> BudgetStatus { + fn check_then_subtract(&mut self, _batch: &ProvenBatch) -> BudgetStatus { if self.batches == 0 { BudgetStatus::Exceeded } else { @@ -233,7 +233,7 @@ impl Mempool { if batch.is_empty() { return None; } - let tx_ids = batch.iter().map(AuthenticatedTransaction::id).collect::>(); + let tx_ids = batch.iter().map(|tx| (tx.id(), tx.account_id())).collect::>(); let batch_id = self.batches.insert(tx_ids, parents).expect("Selected batch should insert"); @@ -268,7 +268,7 @@ impl Mempool { /// Marks a batch as proven if it exists. #[instrument(target = COMPONENT, skip_all, fields(batch=%batch.id()))] - pub fn batch_proved(&mut self, batch: TransactionBatch) { + pub fn batch_proved(&mut self, batch: ProvenBatch) { // Batch may have been removed as part of a parent batches failure. if !self.batches.contains(&batch.id()) { return; @@ -287,11 +287,11 @@ impl Mempool { /// /// Panics if there is already a block in flight. #[instrument(target = COMPONENT, skip_all)] - pub fn select_block(&mut self) -> (BlockNumber, Vec) { + pub fn select_block(&mut self) -> (BlockNumber, Vec) { assert!(self.block_in_progress.is_none(), "Cannot have two blocks inflight."); let batches = self.batches.select_block(self.block_budget); - self.block_in_progress = Some(batches.iter().map(TransactionBatch::id).collect()); + self.block_in_progress = Some(batches.iter().map(ProvenBatch::id).collect()); (self.chain_tip.child(), batches) } diff --git a/crates/block-producer/src/mempool/tests.rs b/crates/block-producer/src/mempool/tests.rs index 8e93892f..e7680736 100644 --- a/crates/block-producer/src/mempool/tests.rs +++ b/crates/block-producer/src/mempool/tests.rs @@ -1,9 +1,8 @@ -use miden_node_proto::domain::note::NoteAuthenticationInfo; use miden_objects::block::BlockNumber; use pretty_assertions::assert_eq; use super::*; -use crate::test_utils::MockProvenTxBuilder; +use crate::test_utils::{batch::TransactionBatchConstructor, MockProvenTxBuilder}; impl Mempool { fn for_tests() -> Self { @@ -48,10 +47,8 @@ fn children_of_failed_batches_are_ignored() { uut.batch_failed(child_batch_a); assert_eq!(uut, reference); - let proof = - TransactionBatch::new([txs[2].raw_proven_transaction()], NoteAuthenticationInfo::default()) - .unwrap(); - uut.batch_proved(proof); + let proven_batch = ProvenBatch::mocked_from_transactions([txs[2].raw_proven_transaction()]); + uut.batch_proved(proven_batch); assert_eq!(uut, reference); } @@ -95,13 +92,9 @@ fn block_commit_reverts_expired_txns() { // Force the tx into a pending block. uut.add_transaction(tx_to_commit.clone()).unwrap(); uut.select_batch().unwrap(); - uut.batch_proved( - TransactionBatch::new( - [tx_to_commit.raw_proven_transaction()], - NoteAuthenticationInfo::default(), - ) - .unwrap(), - ); + uut.batch_proved(ProvenBatch::mocked_from_transactions( + [tx_to_commit.raw_proven_transaction()], + )); let (block, _) = uut.select_block(); // A reverted transaction behaves as if it never existed, the current state is the expected // outcome, plus an extra committed block at the end. @@ -168,13 +161,9 @@ fn block_failure_reverts_its_transactions() { uut.add_transaction(reverted_txs[0].clone()).unwrap(); uut.select_batch().unwrap(); - uut.batch_proved( - TransactionBatch::new( - [reverted_txs[0].raw_proven_transaction()], - NoteAuthenticationInfo::default(), - ) - .unwrap(), - ); + uut.batch_proved(ProvenBatch::mocked_from_transactions([ + reverted_txs[0].raw_proven_transaction() + ])); // Block 1 will contain just the first batch. let (block_number, _) = uut.select_block(); diff --git a/crates/block-producer/src/store/mod.rs b/crates/block-producer/src/store/mod.rs index 2608975b..1aa2638f 100644 --- a/crates/block-producer/src/store/mod.rs +++ b/crates/block-producer/src/store/mod.rs @@ -6,13 +6,13 @@ use std::{ use itertools::Itertools; use miden_node_proto::{ - domain::note::NoteAuthenticationInfo, + domain::batch::BatchInputs, errors::{ConversionError, MissingFieldHelper}, generated::{ digest, requests::{ - ApplyBlockRequest, GetBlockHeaderByNumberRequest, GetBlockInputsRequest, - GetNoteAuthenticationInfoRequest, GetTransactionInputsRequest, + ApplyBlockRequest, GetBatchInputsRequest, GetBlockHeaderByNumberRequest, + GetBlockInputsRequest, GetTransactionInputsRequest, }, responses::{GetTransactionInputsResponse, NullifierTransactionInputRecord}, store::api_client as store_client, @@ -212,21 +212,17 @@ impl StoreClient { #[instrument(target = COMPONENT, skip_all, err)] pub async fn get_batch_inputs( &self, + block_references: impl Iterator + Send, notes: impl Iterator + Send, - ) -> Result { - let request = tonic::Request::new(GetNoteAuthenticationInfoRequest { + ) -> Result { + let request = tonic::Request::new(GetBatchInputsRequest { + reference_blocks: block_references.map(|(block_num, _)| block_num.as_u32()).collect(), note_ids: notes.map(digest::Digest::from).collect(), }); - let store_response = - self.inner.clone().get_note_authentication_info(request).await?.into_inner(); + let store_response = self.inner.clone().get_batch_inputs(request).await?.into_inner(); - let note_authentication_info = store_response - .proofs - .ok_or(GetTransactionInputsResponse::missing_field("proofs"))? - .try_into()?; - - Ok(note_authentication_info) + store_response.try_into().map_err(Into::into) } #[instrument(target = COMPONENT, skip_all, err)] diff --git a/crates/block-producer/src/test_utils/batch.rs b/crates/block-producer/src/test_utils/batch.rs index 45346941..b4caffd2 100644 --- a/crates/block-producer/src/test_utils/batch.rs +++ b/crates/block-producer/src/test_utils/batch.rs @@ -1,8 +1,25 @@ -use miden_node_proto::domain::note::NoteAuthenticationInfo; +use std::collections::BTreeMap; -use crate::{batch_builder::TransactionBatch, test_utils::MockProvenTxBuilder}; +use miden_objects::{ + batch::{BatchAccountUpdate, BatchId, BatchNoteTree, ProvenBatch}, + block::BlockNumber, + transaction::{InputNotes, ProvenTransaction}, +}; + +use crate::test_utils::MockProvenTxBuilder; pub trait TransactionBatchConstructor { + /// Builds a **mocked** [`ProvenBatch`] from the given transactions, which most likely violates + /// some of the rules of actual transaction batches. + /// + /// This builds a mocked version of a proven batch for testing purposes which can be useful if + /// the batch's details don't need to be correct (e.g. if something else is under test but + /// requires a transaction batch). If you need an actual valid [`ProvenBatch`], build a + /// [`ProposedBatch`](miden_objects::batch::ProposedBatch) first and convert (without proving) + /// or prove it into a [`ProvenBatch`]. + fn mocked_from_transactions<'tx>(txs: impl IntoIterator) + -> Self; + /// Returns a `TransactionBatch` with `notes_per_tx.len()` transactions, where the i'th /// transaction has `notes_per_tx[i]` notes created fn from_notes_created(starting_account_index: u32, notes_per_tx: &[u64]) -> Self; @@ -11,7 +28,46 @@ pub trait TransactionBatchConstructor { fn from_txs(starting_account_index: u32, num_txs_in_batch: u64) -> Self; } -impl TransactionBatchConstructor for TransactionBatch { +impl TransactionBatchConstructor for ProvenBatch { + fn mocked_from_transactions<'tx>( + txs: impl IntoIterator, + ) -> Self { + let mut account_updates = BTreeMap::new(); + + let txs: Vec<_> = txs.into_iter().collect(); + let mut input_notes = Vec::new(); + let mut output_notes = Vec::new(); + + for tx in &txs { + // Aggregate account updates. + account_updates + .entry(tx.account_id()) + .and_modify(|update: &mut BatchAccountUpdate| { + update.merge_proven_tx(tx).unwrap(); + }) + .or_insert_with(|| BatchAccountUpdate::from_transaction(tx)); + + // Consider all input notes of all transactions as inputs of the batch, which may not + // always be correct. + input_notes.extend(tx.input_notes().iter().cloned()); + // Consider all outputs notes of all transactions as outputs of the batch, which may not + // always be correct. + output_notes.extend(tx.output_notes().iter().cloned()); + } + + ProvenBatch::new( + BatchId::from_transactions(txs.into_iter()), + account_updates, + InputNotes::new_unchecked(input_notes), + BatchNoteTree::with_contiguous_leaves( + output_notes.iter().map(|x| (x.id(), x.metadata())), + ) + .unwrap(), + output_notes, + BlockNumber::from(u32::MAX), + ) + } + fn from_notes_created(starting_account_index: u32, notes_per_tx: &[u64]) -> Self { let txs: Vec<_> = notes_per_tx .iter() @@ -26,7 +82,7 @@ impl TransactionBatchConstructor for TransactionBatch { }) .collect(); - Self::new(&txs, NoteAuthenticationInfo::default()).unwrap() + Self::mocked_from_transactions(&txs) } fn from_txs(starting_account_index: u32, num_txs_in_batch: u64) -> Self { @@ -38,6 +94,6 @@ impl TransactionBatchConstructor for TransactionBatch { }) .collect(); - Self::new(&txs, NoteAuthenticationInfo::default()).unwrap() + Self::mocked_from_transactions(&txs) } } diff --git a/crates/block-producer/src/test_utils/block.rs b/crates/block-producer/src/test_utils/block.rs index 2a940b1a..532e314d 100644 --- a/crates/block-producer/src/test_utils/block.rs +++ b/crates/block-producer/src/test_utils/block.rs @@ -1,6 +1,7 @@ use std::iter; use miden_objects::{ + batch::ProvenBatch, block::{Block, BlockAccountUpdate, BlockHeader, BlockNoteIndex, BlockNoteTree, NoteBatch}, crypto::merkle::{Mmr, SimpleSmt}, note::Nullifier, @@ -10,7 +11,6 @@ use miden_objects::{ use super::MockStoreSuccess; use crate::{ - batch_builder::TransactionBatch, block::BlockInputs, block_builder::prover::{block_witness::BlockWitness, BlockProver}, }; @@ -19,7 +19,7 @@ use crate::{ /// batches to be applied pub async fn build_expected_block_header( store: &MockStoreSuccess, - batches: &[TransactionBatch], + batches: &[ProvenBatch], ) -> BlockHeader { let last_block_header = *store .block_headers @@ -32,11 +32,11 @@ pub async fn build_expected_block_header( // Compute new account root let updated_accounts: Vec<_> = - batches.iter().flat_map(TransactionBatch::updated_accounts).collect(); + batches.iter().flat_map(|batch| batch.account_updates().iter()).collect(); let new_account_root = { let mut store_accounts = store.accounts.read().await.clone(); for (&account_id, update) in updated_accounts { - store_accounts.insert(account_id.into(), update.final_state.into()); + store_accounts.insert(account_id.into(), update.final_state_commitment().into()); } store_accounts.root() @@ -51,7 +51,8 @@ pub async fn build_expected_block_header( store_chain_mmr.peaks().hash_peaks() }; - let note_created_smt = note_created_smt_from_note_batches(block_output_notes(batches.iter())); + let note_created_smt = + note_created_smt_from_note_batches(block_output_notes(batches.iter()).iter()); // Build header BlockHeader::new( @@ -74,12 +75,12 @@ pub async fn build_expected_block_header( /// node pub async fn build_actual_block_header( store: &MockStoreSuccess, - batches: Vec, + batches: Vec, ) -> BlockHeader { let updated_accounts: Vec<_> = - batches.iter().flat_map(TransactionBatch::updated_accounts).collect(); + batches.iter().flat_map(|batch| batch.account_updates().iter()).collect(); let produced_nullifiers: Vec = - batches.iter().flat_map(TransactionBatch::produced_nullifiers).collect(); + batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); let block_inputs_from_store: BlockInputs = store .get_block_inputs( @@ -199,7 +200,7 @@ pub(crate) fn note_created_smt_from_note_batches<'a>( } pub(crate) fn block_output_notes<'a>( - batches: impl Iterator + Clone, -) -> impl Iterator + Clone { - batches.map(TransactionBatch::output_notes) + batches: impl Iterator + Clone, +) -> Vec> { + batches.map(|batch| batch.output_notes().to_vec()).collect() } diff --git a/crates/block-producer/src/test_utils/mod.rs b/crates/block-producer/src/test_utils/mod.rs index 28ba4349..97dfcc5f 100644 --- a/crates/block-producer/src/test_utils/mod.rs +++ b/crates/block-producer/src/test_utils/mod.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use miden_objects::{ account::AccountId, crypto::rand::{FeltRng, RpoRandomCoin}, + testing::account_id::AccountIdBuilder, transaction::TransactionId, Digest, }; @@ -48,6 +49,10 @@ impl Random { self.0.draw_word().into() } + pub fn draw_account_id(&mut self) -> AccountId { + AccountIdBuilder::new().build_with_rng(&mut self.0) + } + pub fn draw_digest(&mut self) -> Digest { self.0.draw_word().into() } diff --git a/crates/block-producer/src/test_utils/proven_tx.rs b/crates/block-producer/src/test_utils/proven_tx.rs index 3de0fd2c..4af9a0e5 100644 --- a/crates/block-producer/src/test_utils/proven_tx.rs +++ b/crates/block-producer/src/test_utils/proven_tx.rs @@ -135,6 +135,7 @@ impl MockProvenTxBuilder { self.account_id, self.initial_account_hash, self.final_account_hash, + BlockNumber::from(0), Digest::default(), self.expiration_block_num, ExecutionProof::new(Proof::new_dummy(), HashFunction::Blake3_192), diff --git a/crates/block-producer/src/test_utils/store.rs b/crates/block-producer/src/test_utils/store.rs index ac8e694a..ecbe1dc6 100644 --- a/crates/block-producer/src/test_utils/store.rs +++ b/crates/block-producer/src/test_utils/store.rs @@ -5,6 +5,7 @@ use std::{ use miden_node_proto::domain::{block::BlockInclusionProof, note::NoteAuthenticationInfo}; use miden_objects::{ + batch::ProvenBatch, block::{Block, BlockHeader, BlockNumber, NoteBatch}, crypto::merkle::{Mmr, SimpleSmt, Smt, ValuePath}, note::{NoteId, NoteInclusionProof, Nullifier}, @@ -15,7 +16,6 @@ use tokio::sync::RwLock; use super::*; use crate::{ - batch_builder::TransactionBatch, block::{AccountWitness, BlockInputs}, errors::StoreError, store::TransactionInputs, @@ -35,20 +35,23 @@ pub struct MockStoreSuccessBuilder { } impl MockStoreSuccessBuilder { - pub fn from_batches<'a>( - batches_iter: impl Iterator + Clone, - ) -> Self { + pub fn from_batches<'a>(batches_iter: impl Iterator + Clone) -> Self { let accounts_smt = { let accounts = batches_iter .clone() - .flat_map(TransactionBatch::account_initial_states) + .flat_map(|batch| { + batch + .account_updates() + .iter() + .map(|(account_id, update)| (account_id, update.initial_state_commitment())) + }) .map(|(account_id, hash)| (account_id.prefix().into(), hash.into())); SimpleSmt::::with_leaves(accounts).unwrap() }; Self { accounts: Some(accounts_smt), - notes: Some(block_output_notes(batches_iter).cloned().collect()), + notes: Some(block_output_notes(batches_iter)), produced_nullifiers: None, chain_mmr: None, block_num: None, diff --git a/crates/proto/src/domain/batch.rs b/crates/proto/src/domain/batch.rs new file mode 100644 index 00000000..2a29247e --- /dev/null +++ b/crates/proto/src/domain/batch.rs @@ -0,0 +1,53 @@ +use std::collections::BTreeMap; + +use miden_objects::{ + block::BlockHeader, + note::{NoteId, NoteInclusionProof}, + transaction::ChainMmr, + utils::{Deserializable, Serializable}, +}; + +use crate::{ + errors::{ConversionError, MissingFieldHelper}, + generated::responses as proto, +}; + +/// Data required for a transaction batch. +#[derive(Clone, Debug)] +pub struct BatchInputs { + pub batch_reference_block_header: BlockHeader, + pub note_proofs: BTreeMap, + pub chain_mmr: ChainMmr, +} + +impl From for proto::GetBatchInputsResponse { + fn from(inputs: BatchInputs) -> Self { + Self { + batch_reference_block_header: Some(inputs.batch_reference_block_header.into()), + note_proofs: inputs.note_proofs.iter().map(Into::into).collect(), + chain_mmr: inputs.chain_mmr.to_bytes(), + } + } +} + +impl TryFrom for BatchInputs { + type Error = ConversionError; + + fn try_from(response: proto::GetBatchInputsResponse) -> Result { + let result = Self { + batch_reference_block_header: response + .batch_reference_block_header + .ok_or(proto::GetBatchInputsResponse::missing_field("block_header"))? + .try_into()?, + note_proofs: response + .note_proofs + .iter() + .map(<(NoteId, NoteInclusionProof)>::try_from) + .collect::>()?, + chain_mmr: ChainMmr::read_from_bytes(&response.chain_mmr) + .map_err(|source| ConversionError::deserialization_error("ChainMmr", source))?, + }; + + Ok(result) + } +} diff --git a/crates/proto/src/domain/mod.rs b/crates/proto/src/domain/mod.rs index 83959535..2f7ee28d 100644 --- a/crates/proto/src/domain/mod.rs +++ b/crates/proto/src/domain/mod.rs @@ -1,4 +1,5 @@ pub mod account; +pub mod batch; pub mod block; pub mod digest; pub mod merkle; diff --git a/crates/proto/src/errors.rs b/crates/proto/src/errors.rs index 8af3c59e..39f72f3e 100644 --- a/crates/proto/src/errors.rs +++ b/crates/proto/src/errors.rs @@ -1,6 +1,9 @@ use std::{any::type_name, num::TryFromIntError}; -use miden_objects::crypto::merkle::{SmtLeafError, SmtProofError}; +use miden_objects::{ + crypto::merkle::{SmtLeafError, SmtProofError}, + utils::DeserializationError, +}; use thiserror::Error; #[derive(Debug, Error)] @@ -28,6 +31,17 @@ pub enum ConversionError { }, #[error("MMR error")] MmrError(#[from] miden_objects::crypto::merkle::MmrError), + #[error("failed to deserialize {entity}")] + DeserializationError { + entity: &'static str, + source: DeserializationError, + }, +} + +impl ConversionError { + pub fn deserialization_error(entity: &'static str, source: DeserializationError) -> Self { + Self::DeserializationError { entity, source } + } } pub trait MissingFieldHelper { diff --git a/crates/proto/src/generated/requests.rs b/crates/proto/src/generated/requests.rs index a38b5f65..c8e19bb2 100644 --- a/crates/proto/src/generated/requests.rs +++ b/crates/proto/src/generated/requests.rs @@ -92,6 +92,16 @@ pub struct GetBlockInputsRequest { #[prost(message, repeated, tag = "3")] pub unauthenticated_notes: ::prost::alloc::vec::Vec, } +/// Returns the inputs for a transaction batch. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetBatchInputsRequest { + /// List of unauthenticated notes to be queried from the database. + #[prost(message, repeated, tag = "1")] + pub note_ids: ::prost::alloc::vec::Vec, + /// Set of block numbers referenced by transactions. + #[prost(fixed32, repeated, tag = "2")] + pub reference_blocks: ::prost::alloc::vec::Vec, +} /// Returns data required to validate a new transaction. #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetTransactionInputsRequest { @@ -123,13 +133,6 @@ pub struct GetNotesByIdRequest { #[prost(message, repeated, tag = "1")] pub note_ids: ::prost::alloc::vec::Vec, } -/// Returns a list of Note inclusion proofs for the specified Note IDs. -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct GetNoteAuthenticationInfoRequest { - /// List of notes to be queried from the database. - #[prost(message, repeated, tag = "1")] - pub note_ids: ::prost::alloc::vec::Vec, -} /// Returns the latest state of an account with the specified ID. #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetAccountDetailsRequest { diff --git a/crates/proto/src/generated/responses.rs b/crates/proto/src/generated/responses.rs index 5b526ae6..c3a8f5f2 100644 --- a/crates/proto/src/generated/responses.rs +++ b/crates/proto/src/generated/responses.rs @@ -128,6 +128,21 @@ pub struct GetBlockInputsResponse { super::note::NoteAuthenticationInfo, >, } +/// Represents the result of getting batch inputs. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetBatchInputsResponse { + /// The block header that the transaction batch should reference. + #[prost(message, optional, tag = "1")] + pub batch_reference_block_header: ::core::option::Option, + /// Proof of each _found_ unauthenticated note's inclusion in a block. + #[prost(message, repeated, tag = "2")] + pub note_proofs: ::prost::alloc::vec::Vec, + /// The serialized chain MMR which includes proofs for all blocks referenced by the + /// above note inclusion proofs as well as proofs for inclusion of the blocks referenced + /// by the transactions in the batch. + #[prost(bytes = "vec", tag = "3")] + pub chain_mmr: ::prost::alloc::vec::Vec, +} /// An account returned as a response to the `GetTransactionInputs`. #[derive(Clone, PartialEq, ::prost::Message)] pub struct AccountTransactionInputRecord { @@ -178,13 +193,6 @@ pub struct GetNotesByIdResponse { #[prost(message, repeated, tag = "1")] pub notes: ::prost::alloc::vec::Vec, } -/// Represents the result of getting note authentication info. -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct GetNoteAuthenticationInfoResponse { - /// Proofs of note inclusions in blocks and block inclusions in chain. - #[prost(message, optional, tag = "1")] - pub proofs: ::core::option::Option, -} /// Represents the result of getting account details. #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetAccountDetailsResponse { diff --git a/crates/proto/src/generated/store.rs b/crates/proto/src/generated/store.rs index 815a6523..0d9b52f7 100644 --- a/crates/proto/src/generated/store.rs +++ b/crates/proto/src/generated/store.rs @@ -328,14 +328,14 @@ pub mod api_client { req.extensions_mut().insert(GrpcMethod::new("store.Api", "GetBlockInputs")); self.inner.unary(req, path, codec).await } - /// Returns a list of Note inclusion proofs for the specified Note IDs. - pub async fn get_note_authentication_info( + /// Returns the inputs for a transaction batch. + pub async fn get_batch_inputs( &mut self, request: impl tonic::IntoRequest< - super::super::requests::GetNoteAuthenticationInfoRequest, + super::super::requests::GetBatchInputsRequest, >, ) -> std::result::Result< - tonic::Response, + tonic::Response, tonic::Status, > { self.inner @@ -347,12 +347,9 @@ pub mod api_client { ) })?; let codec = tonic::codec::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.Api/GetNoteAuthenticationInfo", - ); + let path = http::uri::PathAndQuery::from_static("/store.Api/GetBatchInputs"); let mut req = request.into_request(); - req.extensions_mut() - .insert(GrpcMethod::new("store.Api", "GetNoteAuthenticationInfo")); + req.extensions_mut().insert(GrpcMethod::new("store.Api", "GetBatchInputs")); self.inner.unary(req, path, codec).await } /// Returns a list of notes matching the provided note IDs. @@ -565,14 +562,12 @@ pub mod api_server { tonic::Response, tonic::Status, >; - /// Returns a list of Note inclusion proofs for the specified Note IDs. - async fn get_note_authentication_info( + /// Returns the inputs for a transaction batch. + async fn get_batch_inputs( &self, - request: tonic::Request< - super::super::requests::GetNoteAuthenticationInfoRequest, - >, + request: tonic::Request, ) -> std::result::Result< - tonic::Response, + tonic::Response, tonic::Status, >; /// Returns a list of notes matching the provided note IDs. @@ -1140,15 +1135,15 @@ pub mod api_server { }; Box::pin(fut) } - "/store.Api/GetNoteAuthenticationInfo" => { + "/store.Api/GetBatchInputs" => { #[allow(non_camel_case_types)] - struct GetNoteAuthenticationInfoSvc(pub Arc); + struct GetBatchInputsSvc(pub Arc); impl< T: Api, > tonic::server::UnaryService< - super::super::requests::GetNoteAuthenticationInfoRequest, - > for GetNoteAuthenticationInfoSvc { - type Response = super::super::responses::GetNoteAuthenticationInfoResponse; + super::super::requests::GetBatchInputsRequest, + > for GetBatchInputsSvc { + type Response = super::super::responses::GetBatchInputsResponse; type Future = BoxFuture< tonic::Response, tonic::Status, @@ -1156,13 +1151,12 @@ pub mod api_server { fn call( &mut self, request: tonic::Request< - super::super::requests::GetNoteAuthenticationInfoRequest, + super::super::requests::GetBatchInputsRequest, >, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_note_authentication_info(&inner, request) - .await + ::get_batch_inputs(&inner, request).await }; Box::pin(fut) } @@ -1173,7 +1167,7 @@ pub mod api_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { - let method = GetNoteAuthenticationInfoSvc(inner); + let method = GetBatchInputsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) .apply_compression_config( diff --git a/crates/rpc-proto/proto/requests.proto b/crates/rpc-proto/proto/requests.proto index 1230eac7..f2323c56 100644 --- a/crates/rpc-proto/proto/requests.proto +++ b/crates/rpc-proto/proto/requests.proto @@ -86,6 +86,14 @@ message GetBlockInputsRequest { repeated digest.Digest unauthenticated_notes = 3; } +// Returns the inputs for a transaction batch. +message GetBatchInputsRequest { + // List of unauthenticated notes to be queried from the database. + repeated digest.Digest note_ids = 1; + // Set of block numbers referenced by transactions. + repeated fixed32 reference_blocks = 2; +} + // Returns data required to validate a new transaction. message GetTransactionInputsRequest { // ID of the account against which a transaction is executed. @@ -112,12 +120,6 @@ message GetNotesByIdRequest { repeated digest.Digest note_ids = 1; } -// Returns a list of Note inclusion proofs for the specified Note IDs. -message GetNoteAuthenticationInfoRequest { - // List of notes to be queried from the database. - repeated digest.Digest note_ids = 1; -} - // Returns the latest state of an account with the specified ID. message GetAccountDetailsRequest { // Account ID to get details. diff --git a/crates/rpc-proto/proto/responses.proto b/crates/rpc-proto/proto/responses.proto index 36e175d1..f1dfe5f9 100644 --- a/crates/rpc-proto/proto/responses.proto +++ b/crates/rpc-proto/proto/responses.proto @@ -128,6 +128,20 @@ message GetBlockInputsResponse { note.NoteAuthenticationInfo found_unauthenticated_notes = 5; } +// Represents the result of getting batch inputs. +message GetBatchInputsResponse { + // The block header that the transaction batch should reference. + block.BlockHeader batch_reference_block_header = 1; + + // Proof of each _found_ unauthenticated note's inclusion in a block. + repeated note.NoteInclusionInBlockProof note_proofs = 2; + + // The serialized chain MMR which includes proofs for all blocks referenced by the + // above note inclusion proofs as well as proofs for inclusion of the blocks referenced + // by the transactions in the batch. + bytes chain_mmr = 3; +} + // An account returned as a response to the `GetTransactionInputs`. message AccountTransactionInputRecord { // The account ID. @@ -173,12 +187,6 @@ message GetNotesByIdResponse { repeated note.Note notes = 1; } -// Represents the result of getting note authentication info. -message GetNoteAuthenticationInfoResponse { - // Proofs of note inclusions in blocks and block inclusions in chain. - note.NoteAuthenticationInfo proofs = 1; -} - // Represents the result of getting account details. message GetAccountDetailsResponse { // Account info (with details for public accounts). diff --git a/crates/rpc-proto/proto/store.proto b/crates/rpc-proto/proto/store.proto index 0562b8c5..7137121d 100644 --- a/crates/rpc-proto/proto/store.proto +++ b/crates/rpc-proto/proto/store.proto @@ -39,8 +39,8 @@ service Api { // Returns data required to prove the next block. rpc GetBlockInputs(requests.GetBlockInputsRequest) returns (responses.GetBlockInputsResponse) {} - // Returns a list of Note inclusion proofs for the specified Note IDs. - rpc GetNoteAuthenticationInfo(requests.GetNoteAuthenticationInfoRequest) returns (responses.GetNoteAuthenticationInfoResponse) {} + // Returns the inputs for a transaction batch. + rpc GetBatchInputs(requests.GetBatchInputsRequest) returns (responses.GetBatchInputsResponse) {} // Returns a list of notes matching the provided note IDs. rpc GetNotesById(requests.GetNotesByIdRequest) returns (responses.GetNotesByIdResponse) {} diff --git a/crates/rpc/src/server/api.rs b/crates/rpc/src/server/api.rs index 976ed4fc..e9fe29a2 100644 --- a/crates/rpc/src/server/api.rs +++ b/crates/rpc/src/server/api.rs @@ -182,7 +182,7 @@ impl api_server::Api for RpcApi { let tx_verifier = TransactionVerifier::new(MIN_PROOF_SECURITY_LEVEL); - tx_verifier.verify(tx.clone()).map_err(|err| { + tx_verifier.verify(&tx).map_err(|err| { Status::invalid_argument(format!("Invalid proof for transaction {}: {err}", tx.id())) })?; diff --git a/crates/store/src/errors.rs b/crates/store/src/errors.rs index 462341bf..2c6fa9e3 100644 --- a/crates/store/src/errors.rs +++ b/crates/store/src/errors.rs @@ -244,11 +244,11 @@ pub enum GetBlockInputsError { NoteInclusionMmr(#[from] MmrError), } -impl From for GetBlockInputsError { - fn from(value: GetNoteInclusionProofError) -> Self { +impl From for GetBlockInputsError { + fn from(value: GetNoteAuthenticationInfoError) -> Self { match value { - GetNoteInclusionProofError::DatabaseError(db_err) => db_err.into(), - GetNoteInclusionProofError::MmrError(mmr_err) => Self::NoteInclusionMmr(mmr_err), + GetNoteAuthenticationInfoError::DatabaseError(db_err) => db_err.into(), + GetNoteAuthenticationInfoError::MmrError(mmr_err) => Self::NoteInclusionMmr(mmr_err), } } } @@ -274,9 +274,24 @@ pub enum NoteSyncError { } #[derive(Error, Debug)] -pub enum GetNoteInclusionProofError { +pub enum GetNoteAuthenticationInfoError { #[error("database error")] DatabaseError(#[from] DatabaseError), #[error("Mmr error")] MmrError(#[from] MmrError), } + +#[derive(Error, Debug)] +pub enum GetBatchInputsError { + #[error("failed to select note inclusion proofs")] + SelectNoteInclusionProofError(#[source] DatabaseError), + #[error("failed to select block headers")] + SelectBlockHeaderError(#[source] DatabaseError), + #[error("set of blocks refernced by transactions is empty")] + TransactionBlockReferencesEmpty, + #[error("highest block number {highest_block_num} referenced by a transaction is newer than the latest block {latest_block_num}")] + TransactionBlockReferenceNewerThanLatestBlock { + highest_block_num: BlockNumber, + latest_block_num: BlockNumber, + }, +} diff --git a/crates/store/src/server/api.rs b/crates/store/src/server/api.rs index 9cb19686..84a0ff33 100644 --- a/crates/store/src/server/api.rs +++ b/crates/store/src/server/api.rs @@ -1,28 +1,24 @@ -use std::{collections::BTreeSet, sync::Arc}; +use std::{collections::BTreeSet, convert::Infallible, sync::Arc}; use miden_node_proto::{ convert, - domain::{ - account::{AccountInfo, AccountProofRequest}, - note::NoteAuthenticationInfo, - }, + domain::account::{AccountInfo, AccountProofRequest}, errors::ConversionError, generated::{ self, account::AccountSummary, - note::NoteAuthenticationInfo as NoteAuthenticationInfoProto, requests::{ ApplyBlockRequest, CheckNullifiersByPrefixRequest, CheckNullifiersRequest, GetAccountDetailsRequest, GetAccountProofsRequest, GetAccountStateDeltaRequest, - GetBlockByNumberRequest, GetBlockHeaderByNumberRequest, GetBlockInputsRequest, - GetNoteAuthenticationInfoRequest, GetNotesByIdRequest, GetTransactionInputsRequest, + GetBatchInputsRequest, GetBlockByNumberRequest, GetBlockHeaderByNumberRequest, + GetBlockInputsRequest, GetNotesByIdRequest, GetTransactionInputsRequest, SyncNoteRequest, SyncStateRequest, }, responses::{ AccountTransactionInputRecord, ApplyBlockResponse, CheckNullifiersByPrefixResponse, CheckNullifiersResponse, GetAccountDetailsResponse, GetAccountProofsResponse, - GetAccountStateDeltaResponse, GetBlockByNumberResponse, GetBlockHeaderByNumberResponse, - GetBlockInputsResponse, GetNoteAuthenticationInfoResponse, GetNotesByIdResponse, + GetAccountStateDeltaResponse, GetBatchInputsResponse, GetBlockByNumberResponse, + GetBlockHeaderByNumberResponse, GetBlockInputsResponse, GetNotesByIdResponse, GetTransactionInputsResponse, NullifierTransactionInputRecord, NullifierUpdate, SyncNoteResponse, SyncStateResponse, }, @@ -279,42 +275,6 @@ impl api_server::Api for StoreApi { Ok(Response::new(GetNotesByIdResponse { notes })) } - /// Returns the inclusion proofs of the specified notes. - #[instrument( - target = COMPONENT, - name = "store:get_note_inclusion_proofs", - skip_all, - ret(level = "debug"), - err - )] - async fn get_note_authentication_info( - &self, - request: Request, - ) -> Result, Status> { - info!(target: COMPONENT, ?request); - - let note_ids = request.into_inner().note_ids; - - let note_ids: Vec = try_convert(note_ids) - .map_err(|err| Status::invalid_argument(format!("Invalid NoteId: {err}")))?; - - let note_ids = note_ids.into_iter().map(From::from).collect(); - - let NoteAuthenticationInfo { block_proofs, note_proofs } = self - .state - .get_note_authentication_info(note_ids) - .await - .map_err(internal_error)?; - - // Massage into shape required by protobuf - let note_proofs = note_proofs.iter().map(Into::into).collect(); - let block_proofs = block_proofs.into_iter().map(Into::into).collect(); - - Ok(Response::new(GetNoteAuthenticationInfoResponse { - proofs: Some(NoteAuthenticationInfoProto { note_proofs, block_proofs }), - })) - } - /// Returns details for public (public) account by id. #[instrument( target = COMPONENT, @@ -402,6 +362,39 @@ impl api_server::Api for StoreApi { .map_err(internal_error) } + /// Fetches the inputs for a transaction batch from the database. + /// + /// See [`State::get_batch_inputs`] for details. + #[instrument( + target = COMPONENT, + name = "store:get_batch_inputs", + skip_all, + ret(level = "debug"), + err + )] + async fn get_batch_inputs( + &self, + request: Request, + ) -> Result, Status> { + let request = request.into_inner(); + + let note_ids: Vec = try_convert(request.note_ids) + .map_err(|err| Status::invalid_argument(format!("Invalid NoteId: {err}")))?; + let note_ids = note_ids.into_iter().map(NoteId::from).collect(); + + let reference_blocks: Vec = + try_convert::<_, Infallible, _, _, _>(request.reference_blocks) + .expect("operation should be infallible"); + let reference_blocks = reference_blocks.into_iter().map(BlockNumber::from).collect(); + + self.state + .get_batch_inputs(reference_blocks, note_ids) + .await + .map(Into::into) + .map(Response::new) + .map_err(internal_error) + } + #[instrument( target = COMPONENT, name = "store:get_transaction_inputs", diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index f3ae2dc9..a6d07fc5 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -13,6 +13,7 @@ use miden_node_proto::{ convert, domain::{ account::{AccountInfo, AccountProofRequest, StorageMapKeysProof}, + batch::BatchInputs, block::BlockInclusionProof, note::NoteAuthenticationInfo, }, @@ -28,11 +29,12 @@ use miden_objects::{ crypto::{ hash::rpo::RpoDigest, merkle::{ - LeafIndex, Mmr, MmrDelta, MmrError, MmrPeaks, MmrProof, SimpleSmt, SmtProof, ValuePath, + LeafIndex, Mmr, MmrDelta, MmrError, MmrPeaks, MmrProof, PartialMmr, SimpleSmt, + SmtProof, ValuePath, }, }, note::{NoteId, Nullifier}, - transaction::OutputNote, + transaction::{ChainMmr, OutputNote}, utils::Serializable, AccountError, ACCOUNT_TREE_DEPTH, }; @@ -46,9 +48,9 @@ use crate::{ blocks::BlockStore, db::{Db, NoteRecord, NoteSyncUpdate, NullifierInfo, StateSyncUpdate}, errors::{ - ApplyBlockError, DatabaseError, GetBlockHeaderError, GetBlockInputsError, - GetNoteInclusionProofError, InvalidBlockError, NoteSyncError, StateInitializationError, - StateSyncError, + ApplyBlockError, DatabaseError, GetBatchInputsError, GetBlockHeaderError, + GetBlockInputsError, GetNoteAuthenticationInfoError, InvalidBlockError, NoteSyncError, + StateInitializationError, StateSyncError, }, nullifier_tree::NullifierTree, COMPONENT, @@ -438,8 +440,8 @@ impl State { pub async fn get_note_authentication_info( &self, note_ids: BTreeSet, - ) -> Result { - // First we grab block-inclusion proofs for the known notes. These proofs only + ) -> Result { + // First we grab note inclusion proofs for the known notes. These proofs only // prove that the note was included in a given block. We then also need to prove that // each of those blocks is included in the chain. let note_proofs = self.db.select_note_inclusion_proofs(note_ids).await?; @@ -494,6 +496,145 @@ impl State { Ok(NoteAuthenticationInfo { block_proofs, note_proofs }) } + /// Fetches the inputs for a transaction batch from the database. + /// + /// ## Inputs + /// + /// The function takes as input: + /// - The tx reference blocks are the set of blocks referenced by transactions in the batch. + /// - The unauthenticated note ids are the set of IDs of unauthenticated notes consumed by all + /// transactions in the batch. For these notes, we attempt to find note inclusion proofs. Not + /// all notes will exist in the DB necessarily, as some notes can be created and consumed + /// within the same batch. + /// + /// ## Outputs + /// + /// The function will return: + /// - A block inclusion proof for all tx reference blocks and for all blocks which are + /// referenced by a note inclusion proof. + /// - Note inclusion proofs for all notes that were found in the DB. + /// - The block header that the batch should reference, i.e. the latest known block. + pub async fn get_batch_inputs( + &self, + tx_reference_blocks: BTreeSet, + unauthenticated_note_ids: BTreeSet, + ) -> Result { + if tx_reference_blocks.is_empty() { + return Err(GetBatchInputsError::TransactionBlockReferencesEmpty); + } + + // First we grab note inclusion proofs for the known notes. These proofs only + // prove that the note was included in a given block. We then also need to prove that + // each of those blocks is included in the chain. + let note_proofs = self + .db + .select_note_inclusion_proofs(unauthenticated_note_ids) + .await + .map_err(GetBatchInputsError::SelectNoteInclusionProofError)?; + + // The set of blocks that the notes are included in. + let note_blocks = note_proofs.values().map(|proof| proof.location().block_num()); + + // Collect all blocks we need to query without duplicates, which is: + // - all blocks for which we need to prove note inclusion. + // - all blocks referenced by transactions in the batch. + let mut blocks = tx_reference_blocks; + blocks.extend(note_blocks); + + // Grab the block merkle paths from the inner state. + // + // NOTE: Scoped block to automatically drop the mutex guard asap. + // + // We also avoid accessing the db in the block as this would delay + // dropping the guard. + let (batch_reference_block, partial_mmr) = { + let state = self.inner.read().await; + let latest_block_num = state.latest_block_num(); + + let highest_block_num = + *blocks.last().expect("we should have checked for empty block references"); + if highest_block_num > latest_block_num { + return Err(GetBatchInputsError::TransactionBlockReferenceNewerThanLatestBlock { + highest_block_num, + latest_block_num, + }); + } + + // Remove the latest block from the to-be-tracked blocks as it will be the reference + // block for the batch itself and thus added to the MMR within the batch kernel, so + // there is no need to prove its inclusion. + blocks.remove(&latest_block_num); + + // Using latest block as the target forest means we take the state of the MMR one before + // the latest block. This is because the latest block will be used as the reference + // block of the batch and will be added to the MMR by the batch kernel. + let target_forest = latest_block_num.as_usize(); + let peaks = state + .chain_mmr + .peaks_at(target_forest) + .expect("target_forest should be smaller than forest of the chain mmr"); + let mut partial_mmr = PartialMmr::from_peaks(peaks); + + for block_num in blocks.iter().map(BlockNumber::as_usize) { + // SAFETY: We have ensured block nums are less than chain length. + let leaf = state + .chain_mmr + .get(block_num) + .expect("block num less than chain length should exist in chain mmr"); + let path = state + .chain_mmr + .open_at(block_num, target_forest) + .expect("block num and target forest should be valid for this mmr") + .merkle_path; + // SAFETY: We should be able to fill the partial MMR with data from the chain MMR + // without errors, otherwise it indicates the chain mmr is invalid. + partial_mmr + .track(block_num, leaf, &path) + .expect("filling partial mmr with data from mmr should succeed"); + } + + (latest_block_num, partial_mmr) + }; + + // TODO: Unnecessary conversion. We should change the select_block_headers function to take + // an impl Iterator instead to avoid this allocation. + let mut blocks: Vec<_> = blocks.into_iter().collect(); + // Fetch the reference block of the batch as part of this query, so we can avoid looking it + // up in a separate DB access. + blocks.push(batch_reference_block); + let mut headers = self + .db + .select_block_headers(blocks) + .await + .map_err(GetBatchInputsError::SelectBlockHeaderError)?; + + // Find and remove the batch reference block as we don't want to add it to the chain MMR. + let header_index = headers + .iter() + .enumerate() + .find_map(|(index, header)| { + (header.block_num() == batch_reference_block).then_some(index) + }) + .expect("DB should have returned the header of the batch reference block"); + + // The order doesn't matter for ChainMmr::new, so swap remove is fine. + let batch_reference_block_header = headers.swap_remove(header_index); + + // SAFETY: This should not error because: + // - we're passing exactly the block headers that we've added to the partial MMR, + // - so none of the block headers block numbers should exceed the chain length of the + // partial MMR, + // - and we've added blocks to a BTreeSet, so there can be no duplicates. + let chain_mmr = ChainMmr::new(partial_mmr, headers) + .expect("partial mmr and block headers should be consistent"); + + Ok(BatchInputs { + batch_reference_block_header, + note_proofs, + chain_mmr, + }) + } + /// Loads data to synchronize a client. /// /// The client's request contains a list of tag prefixes, this method will return the first diff --git a/proto/requests.proto b/proto/requests.proto index 1230eac7..f2323c56 100644 --- a/proto/requests.proto +++ b/proto/requests.proto @@ -86,6 +86,14 @@ message GetBlockInputsRequest { repeated digest.Digest unauthenticated_notes = 3; } +// Returns the inputs for a transaction batch. +message GetBatchInputsRequest { + // List of unauthenticated notes to be queried from the database. + repeated digest.Digest note_ids = 1; + // Set of block numbers referenced by transactions. + repeated fixed32 reference_blocks = 2; +} + // Returns data required to validate a new transaction. message GetTransactionInputsRequest { // ID of the account against which a transaction is executed. @@ -112,12 +120,6 @@ message GetNotesByIdRequest { repeated digest.Digest note_ids = 1; } -// Returns a list of Note inclusion proofs for the specified Note IDs. -message GetNoteAuthenticationInfoRequest { - // List of notes to be queried from the database. - repeated digest.Digest note_ids = 1; -} - // Returns the latest state of an account with the specified ID. message GetAccountDetailsRequest { // Account ID to get details. diff --git a/proto/responses.proto b/proto/responses.proto index 36e175d1..f1dfe5f9 100644 --- a/proto/responses.proto +++ b/proto/responses.proto @@ -128,6 +128,20 @@ message GetBlockInputsResponse { note.NoteAuthenticationInfo found_unauthenticated_notes = 5; } +// Represents the result of getting batch inputs. +message GetBatchInputsResponse { + // The block header that the transaction batch should reference. + block.BlockHeader batch_reference_block_header = 1; + + // Proof of each _found_ unauthenticated note's inclusion in a block. + repeated note.NoteInclusionInBlockProof note_proofs = 2; + + // The serialized chain MMR which includes proofs for all blocks referenced by the + // above note inclusion proofs as well as proofs for inclusion of the blocks referenced + // by the transactions in the batch. + bytes chain_mmr = 3; +} + // An account returned as a response to the `GetTransactionInputs`. message AccountTransactionInputRecord { // The account ID. @@ -173,12 +187,6 @@ message GetNotesByIdResponse { repeated note.Note notes = 1; } -// Represents the result of getting note authentication info. -message GetNoteAuthenticationInfoResponse { - // Proofs of note inclusions in blocks and block inclusions in chain. - note.NoteAuthenticationInfo proofs = 1; -} - // Represents the result of getting account details. message GetAccountDetailsResponse { // Account info (with details for public accounts). diff --git a/proto/store.proto b/proto/store.proto index 0562b8c5..7137121d 100644 --- a/proto/store.proto +++ b/proto/store.proto @@ -39,8 +39,8 @@ service Api { // Returns data required to prove the next block. rpc GetBlockInputs(requests.GetBlockInputsRequest) returns (responses.GetBlockInputsResponse) {} - // Returns a list of Note inclusion proofs for the specified Note IDs. - rpc GetNoteAuthenticationInfo(requests.GetNoteAuthenticationInfoRequest) returns (responses.GetNoteAuthenticationInfoResponse) {} + // Returns the inputs for a transaction batch. + rpc GetBatchInputs(requests.GetBatchInputsRequest) returns (responses.GetBatchInputsResponse) {} // Returns a list of notes matching the provided note IDs. rpc GetNotesById(requests.GetNotesByIdRequest) returns (responses.GetNotesByIdResponse) {} From 018dde3cc92958a3111989def5c2dc7735cf6876 Mon Sep 17 00:00:00 2001 From: Mirko <48352201+Mirko-von-Leipzig@users.noreply.github.com> Date: Thu, 6 Feb 2025 18:28:55 +0200 Subject: [PATCH 09/17] feat: remote tracing context (#669) --- CHANGELOG.md | 1 + Cargo.lock | 1 + crates/block-producer/src/server.rs | 15 ++-- crates/block-producer/src/store/mod.rs | 10 ++- crates/rpc/src/server/api.rs | 22 +++-- crates/store/src/server/mod.rs | 1 + crates/utils/Cargo.toml | 1 + crates/utils/src/errors.rs | 3 + crates/utils/src/lib.rs | 1 + crates/utils/src/tracing/grpc.rs | 116 +++++++++++++++++++++++++ crates/utils/src/tracing/mod.rs | 1 + 11 files changed, 158 insertions(+), 14 deletions(-) create mode 100644 crates/utils/src/tracing/grpc.rs create mode 100644 crates/utils/src/tracing/mod.rs diff --git a/CHANGELOG.md b/CHANGELOG.md index 1637ec79..65a7ef2c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ ### Enhancements - Add an optional open-telemetry trace exporter (#659). +- Support tracing across gRPC boundaries using remote tracing context (#669). ### Changes diff --git a/Cargo.lock b/Cargo.lock index 5e6116ea..e9eac679 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1957,6 +1957,7 @@ version = "0.8.0" dependencies = [ "anyhow", "figment", + "http", "itertools 0.14.0", "miden-objects", "opentelemetry", diff --git a/crates/block-producer/src/server.rs b/crates/block-producer/src/server.rs index 2eaf0e50..251d0b33 100644 --- a/crates/block-producer/src/server.rs +++ b/crates/block-producer/src/server.rs @@ -7,6 +7,7 @@ use miden_node_proto::generated::{ use miden_node_utils::{ errors::ApiError, formatting::{format_input_notes, format_output_notes}, + tracing::grpc::OtelInterceptor, }; use miden_objects::{ block::BlockNumber, transaction::ProvenTransaction, utils::serde::Deserializable, @@ -52,11 +53,14 @@ impl BlockProducer { pub async fn init(config: BlockProducerConfig) -> Result { info!(target: COMPONENT, %config, "Initializing server"); - let store = StoreClient::new( - store_client::ApiClient::connect(config.store_url.to_string()) - .await - .map_err(|err| ApiError::DatabaseConnectionFailed(err.to_string()))?, - ); + let channel = tonic::transport::Endpoint::try_from(config.store_url.to_string()) + .map_err(|err| ApiError::InvalidStoreUrl(err.to_string()))? + .connect() + .await + .map_err(|err| ApiError::DatabaseConnectionFailed(err.to_string()))?; + + let store = store_client::ApiClient::with_interceptor(channel, OtelInterceptor); + let store = StoreClient::new(store); let latest_header = store .latest_header() @@ -208,6 +212,7 @@ impl BlockProducerRpcServer { async fn serve(self, listener: TcpListener) -> Result<(), tonic::transport::Error> { tonic::transport::Server::builder() + .trace_fn(miden_node_utils::tracing::grpc::block_producer_trace_fn) .add_service(api_server::ApiServer::new(self)) .serve_with_incoming(TcpListenerStream::new(listener)) .await diff --git a/crates/block-producer/src/store/mod.rs b/crates/block-producer/src/store/mod.rs index 1aa2638f..2c4a36fe 100644 --- a/crates/block-producer/src/store/mod.rs +++ b/crates/block-producer/src/store/mod.rs @@ -19,7 +19,7 @@ use miden_node_proto::{ }, AccountState, }; -use miden_node_utils::formatting::format_opt; +use miden_node_utils::{formatting::format_opt, tracing::grpc::OtelInterceptor}; use miden_objects::{ account::AccountId, block::{Block, BlockHeader, BlockNumber}, @@ -29,7 +29,7 @@ use miden_objects::{ Digest, }; use miden_processor::crypto::RpoDigest; -use tonic::transport::Channel; +use tonic::{service::interceptor::InterceptedService, transport::Channel}; use tracing::{debug, info, instrument}; use crate::{block::BlockInputs, errors::StoreError, COMPONENT}; @@ -121,17 +121,19 @@ impl TryFrom for TransactionInputs { // STORE CLIENT // ================================================================================================ +type InnerClient = store_client::ApiClient>; + /// Interface to the store's gRPC API. /// /// Essentially just a thin wrapper around the generated gRPC client which improves type safety. #[derive(Clone)] pub struct StoreClient { - inner: store_client::ApiClient, + inner: InnerClient, } impl StoreClient { /// TODO: this should probably take store connection string and create a connection internally - pub fn new(store: store_client::ApiClient) -> Self { + pub fn new(store: InnerClient) -> Self { Self { inner: store } } diff --git a/crates/rpc/src/server/api.rs b/crates/rpc/src/server/api.rs index e9fe29a2..fbb233eb 100644 --- a/crates/rpc/src/server/api.rs +++ b/crates/rpc/src/server/api.rs @@ -18,12 +18,14 @@ use miden_node_proto::{ }, try_convert, }; +use miden_node_utils::tracing::grpc::OtelInterceptor; use miden_objects::{ account::AccountId, crypto::hash::rpo::RpoDigest, transaction::ProvenTransaction, utils::serde::Deserializable, Digest, MAX_NUM_FOREIGN_ACCOUNTS, MIN_PROOF_SECURITY_LEVEL, }; use miden_tx::TransactionVerifier; use tonic::{ + service::interceptor::InterceptedService, transport::{Channel, Error}, Request, Response, Status, }; @@ -34,19 +36,29 @@ use crate::{config::RpcConfig, COMPONENT}; // RPC API // ================================================================================================ +type StoreClient = store_client::ApiClient>; +type BlockProducerClient = + block_producer_client::ApiClient>; + pub struct RpcApi { - store: store_client::ApiClient, - block_producer: block_producer_client::ApiClient, + store: StoreClient, + block_producer: BlockProducerClient, } impl RpcApi { pub(super) async fn from_config(config: &RpcConfig) -> Result { - let store = store_client::ApiClient::connect(config.store_url.to_string()).await?; + let channel = tonic::transport::Endpoint::try_from(config.store_url.to_string())? + .connect() + .await?; + let store = store_client::ApiClient::with_interceptor(channel, OtelInterceptor); info!(target: COMPONENT, store_endpoint = config.store_url.as_str(), "Store client initialized"); + let channel = tonic::transport::Endpoint::try_from(config.block_producer_url.to_string())? + .connect() + .await?; let block_producer = - block_producer_client::ApiClient::connect(config.block_producer_url.to_string()) - .await?; + block_producer_client::ApiClient::with_interceptor(channel, OtelInterceptor); + info!( target: COMPONENT, block_producer_endpoint = config.block_producer_url.as_str(), diff --git a/crates/store/src/server/mod.rs b/crates/store/src/server/mod.rs index 6ef429d0..2b65a1dc 100644 --- a/crates/store/src/server/mod.rs +++ b/crates/store/src/server/mod.rs @@ -62,6 +62,7 @@ impl Store { /// Note: this blocks until the server dies. pub async fn serve(self) -> Result<(), ApiError> { tonic::transport::Server::builder() + .trace_fn(miden_node_utils::tracing::grpc::store_trace_fn) .add_service(self.api_service) .serve_with_incoming(TcpListenerStream::new(self.listener)) .await diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index c37684c8..b411cd29 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -21,6 +21,7 @@ vergen = ["dep:vergen", "dep:vergen-gitcl"] [dependencies] anyhow = { version = "1.0" } figment = { version = "0.10", features = ["env", "toml"] } +http = "1.2" itertools = { workspace = true } miden-objects = { workspace = true } opentelemetry = "0.27" diff --git a/crates/utils/src/errors.rs b/crates/utils/src/errors.rs index 283da7c5..0bed649e 100644 --- a/crates/utils/src/errors.rs +++ b/crates/utils/src/errors.rs @@ -21,4 +21,7 @@ pub enum ApiError { #[error("connection to the database has failed: {0}")] DatabaseConnectionFailed(String), + + #[error("parsing store url failed: {0}")] + InvalidStoreUrl(String), } diff --git a/crates/utils/src/lib.rs b/crates/utils/src/lib.rs index 275ee7e3..8c71955d 100644 --- a/crates/utils/src/lib.rs +++ b/crates/utils/src/lib.rs @@ -3,4 +3,5 @@ pub mod crypto; pub mod errors; pub mod formatting; pub mod logging; +pub mod tracing; pub mod version; diff --git a/crates/utils/src/tracing/grpc.rs b/crates/utils/src/tracing/grpc.rs new file mode 100644 index 00000000..1e951af1 --- /dev/null +++ b/crates/utils/src/tracing/grpc.rs @@ -0,0 +1,116 @@ +use tracing_opentelemetry::OpenTelemetrySpanExt; + +/// A [`trace_fn`](tonic::transport::server::Server) implementation for the block producer which +/// adds open-telemetry information to the span. +/// +/// Creates an `info` span following the open-telemetry standard: `block-producer.rpc/{method}`. +/// Additionally also pulls in remote tracing context which allows the server trace to be connected +/// to the client's origin trace. +pub fn block_producer_trace_fn(request: &http::Request<()>) -> tracing::Span { + let span = if let Some("SubmitProvenTransaction") = request.uri().path().rsplit('/').next() { + tracing::info_span!("block-producer.rpc/SubmitProvenTransaction") + } else { + tracing::info_span!("block-producer.rpc/Unknown") + }; + + add_otel_span_attributes(span, request) +} + +/// A [`trace_fn`](tonic::transport::server::Server) implementation for the store which adds +/// open-telemetry information to the span. +/// +/// Creates an `info` span following the open-telemetry standard: `store.rpc/{method}`. Additionally +/// also pulls in remote tracing context which allows the server trace to be connected to the +/// client's origin trace. +pub fn store_trace_fn(request: &http::Request<()>) -> tracing::Span { + let span = match request.uri().path().rsplit('/').next() { + Some("ApplyBlock") => tracing::info_span!("store.rpc/ApplyBlock"), + Some("CheckNullifiers") => tracing::info_span!("store.rpc/CheckNullifiers"), + Some("CheckNullifiersByPrefix") => tracing::info_span!("store.rpc/CheckNullifiersByPrefix"), + Some("GetAccountDetails") => tracing::info_span!("store.rpc/GetAccountDetails"), + Some("GetAccountProofs") => tracing::info_span!("store.rpc/GetAccountProofs"), + Some("GetAccountStateDelta") => tracing::info_span!("store.rpc/GetAccountStateDelta"), + Some("GetBlockByNumber") => tracing::info_span!("store.rpc/GetBlockByNumber"), + Some("GetBlockHeaderByNumber") => tracing::info_span!("store.rpc/GetBlockHeaderByNumber"), + Some("GetBlockInputs") => tracing::info_span!("store.rpc/GetBlockInputs"), + Some("GetBatchInputs") => tracing::info_span!("store.rpc/GetBatchInputs"), + Some("GetNotesById") => tracing::info_span!("store.rpc/GetNotesById"), + Some("GetTransactionInputs") => tracing::info_span!("store.rpc/GetTransactionInputs"), + Some("SyncNotes") => tracing::info_span!("store.rpc/SyncNotes"), + Some("SyncState") => tracing::info_span!("store.rpc/SyncState"), + _ => tracing::info_span!("store.rpc/Unknown"), + }; + + add_otel_span_attributes(span, request) +} + +/// Adds remote tracing context to the span. +/// +/// Could be expanded in the future by adding in more open-telemetry properties. +fn add_otel_span_attributes(span: tracing::Span, request: &http::Request<()>) -> tracing::Span { + // Pull the open-telemetry parent context using the HTTP extractor. We could make a more + // generic gRPC extractor by utilising the gRPC metadata. However that + // (a) requires cloning headers, + // (b) we would have to write this ourselves, and + // (c) gRPC metadata is transferred using HTTP headers in any case. + use tracing_opentelemetry::OpenTelemetrySpanExt; + let otel_ctx = opentelemetry::global::get_text_map_propagator(|propagator| { + propagator.extract(&MetadataExtractor(&tonic::metadata::MetadataMap::from_headers( + request.headers().clone(), + ))) + }); + span.set_parent(otel_ctx); + + span +} + +/// Injects open-telemetry remote context into traces. +#[derive(Copy, Clone)] +pub struct OtelInterceptor; + +impl tonic::service::Interceptor for OtelInterceptor { + fn call( + &mut self, + mut request: tonic::Request<()>, + ) -> Result, tonic::Status> { + let ctx = tracing::Span::current().context(); + opentelemetry::global::get_text_map_propagator(|propagator| { + propagator.inject_context(&ctx, &mut MetadataInjector(request.metadata_mut())); + }); + + Ok(request) + } +} + +struct MetadataExtractor<'a>(&'a tonic::metadata::MetadataMap); +impl opentelemetry::propagation::Extractor for MetadataExtractor<'_> { + /// Get a value for a key from the `MetadataMap`. If the value can't be converted to &str, + /// returns None + fn get(&self, key: &str) -> Option<&str> { + self.0.get(key).and_then(|metadata| metadata.to_str().ok()) + } + + /// Collect all the keys from the `MetadataMap`. + fn keys(&self) -> Vec<&str> { + self.0 + .keys() + .map(|key| match key { + tonic::metadata::KeyRef::Ascii(v) => v.as_str(), + tonic::metadata::KeyRef::Binary(v) => v.as_str(), + }) + .collect::>() + } +} + +struct MetadataInjector<'a>(&'a mut tonic::metadata::MetadataMap); +impl opentelemetry::propagation::Injector for MetadataInjector<'_> { + /// Set a key and value in the `MetadataMap`. Does nothing if the key or value are not valid + /// inputs + fn set(&mut self, key: &str, value: String) { + if let Ok(key) = tonic::metadata::MetadataKey::from_bytes(key.as_bytes()) { + if let Ok(val) = tonic::metadata::MetadataValue::try_from(&value) { + self.0.insert(key, val); + } + } + } +} diff --git a/crates/utils/src/tracing/mod.rs b/crates/utils/src/tracing/mod.rs new file mode 100644 index 00000000..773d491c --- /dev/null +++ b/crates/utils/src/tracing/mod.rs @@ -0,0 +1 @@ +pub mod grpc; From d8eb5bc38890687f67045026b444731b406afcde Mon Sep 17 00:00:00 2001 From: Varun Doshi <61531351+varun-doshi@users.noreply.github.com> Date: Fri, 7 Feb 2025 16:11:21 +0530 Subject: [PATCH 10/17] feat: use iterator in select_block_headers (#667) --- crates/store/src/db/mod.rs | 7 +++++-- crates/store/src/db/sql/mod.rs | 6 +++--- crates/store/src/state.rs | 13 ++++--------- 3 files changed, 12 insertions(+), 14 deletions(-) diff --git a/crates/store/src/db/mod.rs b/crates/store/src/db/mod.rs index ad7219dc..51a678e2 100644 --- a/crates/store/src/db/mod.rs +++ b/crates/store/src/db/mod.rs @@ -250,11 +250,14 @@ impl Db { /// Loads multiple block headers from the DB. #[instrument(target = COMPONENT, skip_all, ret(level = "debug"), err)] - pub async fn select_block_headers(&self, blocks: Vec) -> Result> { + pub async fn select_block_headers( + &self, + blocks: impl Iterator + Send + 'static, + ) -> Result> { self.pool .get() .await? - .interact(move |conn| sql::select_block_headers(conn, &blocks)) + .interact(move |conn| sql::select_block_headers(conn, blocks)) .await .map_err(|err| { DatabaseError::InteractError(format!( diff --git a/crates/store/src/db/sql/mod.rs b/crates/store/src/db/sql/mod.rs index 62d0d92f..95e6dcdb 100644 --- a/crates/store/src/db/sql/mod.rs +++ b/crates/store/src/db/sql/mod.rs @@ -1059,11 +1059,11 @@ pub fn select_block_header_by_block_num( /// A vector of [`BlockHeader`] or an error. pub fn select_block_headers( conn: &mut Connection, - blocks: &[BlockNumber], + blocks: impl Iterator + Send, ) -> Result> { - let mut headers = Vec::with_capacity(blocks.len()); + let blocks: Vec = blocks.map(|b| b.as_u32().into()).collect(); - let blocks: Vec = blocks.iter().copied().map(|b| b.as_u32().into()).collect(); + let mut headers = Vec::with_capacity(blocks.len()); let mut stmt = conn .prepare_cached("SELECT block_header FROM block_headers WHERE block_num IN rarray(?1);")?; let mut rows = stmt.query(params![Rc::new(blocks)])?; diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index a6d07fc5..059c22be 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -450,9 +450,7 @@ impl State { let blocks = note_proofs .values() .map(|proof| proof.location().block_num()) - .collect::>() - .into_iter() - .collect::>(); + .collect::>(); // Grab the block merkle paths from the inner state. // @@ -479,7 +477,8 @@ impl State { (chain_length.into(), paths) }; - let headers = self.db.select_block_headers(blocks).await?; + let headers = self.db.select_block_headers(blocks.into_iter()).await?; + let headers = headers .into_iter() .map(|header| (header.block_num(), header)) @@ -596,15 +595,11 @@ impl State { (latest_block_num, partial_mmr) }; - // TODO: Unnecessary conversion. We should change the select_block_headers function to take - // an impl Iterator instead to avoid this allocation. - let mut blocks: Vec<_> = blocks.into_iter().collect(); // Fetch the reference block of the batch as part of this query, so we can avoid looking it // up in a separate DB access. - blocks.push(batch_reference_block); let mut headers = self .db - .select_block_headers(blocks) + .select_block_headers(blocks.into_iter().chain(std::iter::once(batch_reference_block))) .await .map_err(GetBatchInputsError::SelectBlockHeaderError)?; From 03622e74500ee709ef30b303121d2b94f8418da2 Mon Sep 17 00:00:00 2001 From: Mirko <48352201+Mirko-von-Leipzig@users.noreply.github.com> Date: Sat, 8 Feb 2025 11:40:15 +0200 Subject: [PATCH 11/17] fix: add faucet routes for favicon and background (#672) --- CHANGELOG.md | 4 ++++ bin/faucet/src/handlers.rs | 8 ++++++++ bin/faucet/src/main.rs | 4 +++- 3 files changed, 15 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 65a7ef2c..7cf6658c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ ## Unreleased +### Fixes + +- Faucet webpage is missing `background.png` and `favicon.ico` (#672). + ### Enhancements - Add an optional open-telemetry trace exporter (#659). diff --git a/bin/faucet/src/handlers.rs b/bin/faucet/src/handlers.rs index 5a6de2bc..171e7f5a 100644 --- a/bin/faucet/src/handlers.rs +++ b/bin/faucet/src/handlers.rs @@ -128,6 +128,14 @@ pub async fn get_index_css(state: State) -> Result) -> Result { + get_static_file(state, "background.png") +} + +pub async fn get_favicon(state: State) -> Result { + get_static_file(state, "favicon.ico") +} + /// Returns a static file bundled with the app state. /// /// # Panics diff --git a/bin/faucet/src/main.rs b/bin/faucet/src/main.rs index dc2d5338..5cc169a4 100644 --- a/bin/faucet/src/main.rs +++ b/bin/faucet/src/main.rs @@ -14,7 +14,7 @@ use axum::{ }; use clap::{Parser, Subcommand}; use client::initialize_faucet_client; -use handlers::{get_index_css, get_index_html, get_index_js}; +use handlers::{get_background, get_favicon, get_index_css, get_index_html, get_index_js}; use http::HeaderValue; use miden_lib::{account::faucets::create_basic_fungible_faucet, AuthScheme}; use miden_node_utils::{config::load_config, crypto::get_rpo_random_coin, version::LongVersion}; @@ -106,6 +106,8 @@ async fn main() -> anyhow::Result<()> { .route("/", get(get_index_html)) .route("/index.js", get(get_index_js)) .route("/index.css", get(get_index_css)) + .route("/background.png", get(get_background)) + .route("/favicon.ico", get(get_favicon)) .route("/get_metadata", get(get_metadata)) .route("/get_tokens", post(get_tokens)) .layer( From ebef2151f5883f76f8c36c5c835634e94d0aa8bf Mon Sep 17 00:00:00 2001 From: Mirko <48352201+Mirko-von-Leipzig@users.noreply.github.com> Date: Tue, 11 Feb 2025 19:14:02 +0200 Subject: [PATCH 12/17] feat(block-producer): instrument block building (#675) --- CHANGELOG.md | 1 + Cargo.lock | 5 + crates/block-producer/Cargo.toml | 1 + .../block-producer/src/block_builder/mod.rs | 293 ++++++++++++++---- crates/block-producer/src/errors.rs | 4 +- crates/block-producer/src/mempool/mod.rs | 67 ++-- crates/block-producer/src/mempool/tests.rs | 23 +- crates/block-producer/src/store/mod.rs | 10 +- crates/store/src/server/api.rs | 28 +- crates/utils/Cargo.toml | 4 +- crates/utils/src/logging.rs | 5 + crates/utils/src/tracing/mod.rs | 5 + 12 files changed, 316 insertions(+), 130 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7cf6658c..e0a88e64 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ - Add an optional open-telemetry trace exporter (#659). - Support tracing across gRPC boundaries using remote tracing context (#669). +- Instrument the block-producer's block building process (#676). ### Changes diff --git a/Cargo.lock b/Cargo.lock index e9eac679..8bdd6e95 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -903,6 +903,7 @@ checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" dependencies = [ "futures-channel", "futures-core", + "futures-executor", "futures-io", "futures-sink", "futures-task", @@ -971,10 +972,13 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ + "futures-channel", "futures-core", + "futures-io", "futures-macro", "futures-sink", "futures-task", + "memchr", "pin-project-lite", "pin-utils", "slab", @@ -1863,6 +1867,7 @@ version = "0.8.0" dependencies = [ "assert_matches", "async-trait", + "futures", "itertools 0.14.0", "miden-air", "miden-lib", diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index ccecfab4..dabf8d7b 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -19,6 +19,7 @@ tracing-forest = ["miden-node-utils/tracing-forest"] [dependencies] async-trait = { version = "0.1" } +futures = { version = "0.3" } itertools = { workspace = true } miden-lib = { workspace = true } miden-node-proto = { workspace = true } diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index 21790dff..1e7b2429 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -1,20 +1,24 @@ -use std::{collections::BTreeSet, ops::Range}; +use std::{ + collections::BTreeSet, + ops::{Add, Range}, +}; -use miden_node_utils::formatting::format_array; +use futures::FutureExt; +use miden_node_utils::tracing::{OpenTelemetrySpanExt, OtelStatus}; use miden_objects::{ account::AccountId, batch::ProvenBatch, - block::Block, - note::{NoteHeader, Nullifier}, + block::{Block, BlockNumber}, + note::{NoteHeader, NoteId, Nullifier}, transaction::{InputNoteCommitment, OutputNote}, }; use rand::Rng; use tokio::time::Duration; -use tracing::{debug, info, instrument}; +use tracing::{instrument, Span}; use crate::{ - errors::BuildBlockError, mempool::SharedMempool, store::StoreClient, COMPONENT, - SERVER_BLOCK_FREQUENCY, + block::BlockInputs, errors::BuildBlockError, mempool::SharedMempool, store::StoreClient, + COMPONENT, SERVER_BLOCK_FREQUENCY, }; pub(crate) mod prover; @@ -32,7 +36,7 @@ pub struct BlockBuilder { /// Simulated block failure rate as a percentage. /// /// Note: this _must_ be sign positive and less than 1.0. - pub failure_rate: f32, + pub failure_rate: f64, pub store: StoreClient, pub block_kernel: BlockProver, @@ -72,37 +76,160 @@ impl BlockBuilder { loop { interval.tick().await; - let (block_number, batches) = mempool.lock().await.select_block(); + self.build_block(&mempool).await; + } + } + + /// Run the block building stages and add open-telemetry trace information where applicable. + /// + /// A failure in any stage will result in that block being rolled back. + /// + /// ## Telemetry + /// + /// - Creates a new root span which means each block gets its own complete trace. + /// - Important telemetry fields are added to the root span with the `block.xxx` prefix. + /// - Each stage has its own child span and are free to add further field data. + /// - A failed stage will emit an error event, and both its own span and the root span will be + /// marked as errors. + #[instrument(parent = None, target = COMPONENT, name = "block_builder.build_block", skip_all)] + async fn build_block(&self, mempool: &SharedMempool) { + use futures::TryFutureExt; + + Self::select_block(mempool) + .inspect(SelectedBlock::inject_telemetry) + .then(|selected| self.get_block_inputs(selected)) + .inspect_ok(BlockSummaryAndInputs::inject_telemetry) + .and_then(|inputs| self.prove_block(inputs)) + .inspect_ok(ProvenBlock::inject_telemetry) + // Failure must be injected before the final pipeline stage i.e. before commit is called. The system cannot + // handle errors after it considers the process complete (which makes sense). + .and_then(|proven_block| async { self.inject_failure(proven_block) }) + .and_then(|proven_block| self.commit_block(mempool, proven_block)) + // Handle errors by propagating the error to the root span and rolling back the block. + .inspect_err(|err| Span::current().set_status(OtelStatus::Error { description: format!("{err:?}").into() })) + .or_else(|_err| self.rollback_block(mempool).never_error()) + // Error has been handled, this is just type manipulation to remove the result wrapper. + .unwrap_or_else(|_| ()) + .await; + } - let mut result = self.build_block(&batches).await; - let proving_duration = rand::thread_rng().gen_range(self.simulated_proof_time.clone()); + #[instrument(target = COMPONENT, name = "block_builder.select_block", skip_all)] + async fn select_block(mempool: &SharedMempool) -> SelectedBlock { + let (block_number, batches) = mempool.lock().await.select_block(); + SelectedBlock { block_number, batches } + } - tokio::time::sleep(proving_duration).await; + #[instrument(target = COMPONENT, name = "block_builder.get_block_inputs", skip_all, err)] + async fn get_block_inputs( + &self, + selected_block: SelectedBlock, + ) -> Result { + let SelectedBlock { block_number: _, batches } = selected_block; + let summary = BlockSummary::summarize_batches(&batches); - // Randomly inject failures at the given rate. - // - // Note: Rng::gen rolls between [0, 1.0) for f32, so this works as expected. - if rand::thread_rng().gen::() < self.failure_rate { - result = Err(BuildBlockError::InjectedFailure); - } + let inputs = self + .store + .get_block_inputs( + summary.updated_accounts.iter().copied(), + summary.nullifiers.iter(), + summary.dangling_notes.iter(), + ) + .await + .map_err(BuildBlockError::GetBlockInputsFailed)?; - let mut mempool = mempool.lock().await; - match result { - Ok(_) => mempool.block_committed(block_number), - Err(_) => mempool.block_failed(block_number), - } + let missing_notes: Vec<_> = summary + .dangling_notes + .difference(&inputs.found_unauthenticated_notes.note_ids()) + .copied() + .collect(); + if !missing_notes.is_empty() { + return Err(BuildBlockError::UnauthenticatedNotesNotFound(missing_notes)); } + + Ok(BlockSummaryAndInputs { batches, summary, inputs }) } - #[instrument(target = COMPONENT, skip_all, err)] - async fn build_block(&self, batches: &[ProvenBatch]) -> Result<(), BuildBlockError> { - info!( - target: COMPONENT, - num_batches = batches.len(), - batches = %format_array(batches.iter().map(ProvenBatch::id)), - ); + #[instrument(target = COMPONENT, name = "block_builder.prove_block", skip_all, err)] + async fn prove_block( + &self, + preimage: BlockSummaryAndInputs, + ) -> Result { + let BlockSummaryAndInputs { batches, summary, inputs } = preimage; + + let (block_header_witness, updated_accounts) = BlockWitness::new(inputs, &batches)?; + + let new_block_header = self.block_kernel.prove(block_header_witness)?; + + let block = Block::new( + new_block_header, + updated_accounts, + summary.output_notes, + summary.nullifiers, + )?; + + self.simulate_proving().await; + + Ok(ProvenBlock { block }) + } + + #[instrument(target = COMPONENT, name = "block_builder.commit_block", skip_all, err)] + async fn commit_block( + &self, + mempool: &SharedMempool, + proven_block: ProvenBlock, + ) -> Result<(), BuildBlockError> { + self.store + .apply_block(&proven_block.block) + .await + .map_err(BuildBlockError::StoreApplyBlockFailed)?; + + mempool.lock().await.commit_block(); + + Ok(()) + } + + #[instrument(target = COMPONENT, name = "block_builder.rollback_block", skip_all)] + async fn rollback_block(&self, mempool: &SharedMempool) { + mempool.lock().await.rollback_block(); + } + + #[instrument(target = COMPONENT, name = "block_builder.simulate_proving", skip_all)] + async fn simulate_proving(&self) { + let proving_duration = rand::thread_rng().gen_range(self.simulated_proof_time.clone()); + + Span::current().set_attribute("range.min_s", self.simulated_proof_time.start.as_secs_f64()); + Span::current().set_attribute("range.max_s", self.simulated_proof_time.end.as_secs_f64()); + Span::current().set_attribute("dice_roll_s", proving_duration.as_secs_f64()); - let updated_account_set: BTreeSet = batches + tokio::time::sleep(proving_duration).await; + } + + #[instrument(target = COMPONENT, name = "block_builder.inject_failure", skip_all, err)] + fn inject_failure(&self, value: T) -> Result { + let roll = rand::thread_rng().gen::(); + + Span::current().set_attribute("failure_rate", self.failure_rate); + Span::current().set_attribute("dice_roll", roll); + + if roll < self.failure_rate { + Err(BuildBlockError::InjectedFailure) + } else { + Ok(value) + } + } +} + +struct BlockSummary { + updated_accounts: BTreeSet, + nullifiers: Vec, + output_notes: Vec>, + dangling_notes: BTreeSet, +} + +impl BlockSummary { + #[instrument(target = COMPONENT, name = "block_builder.summarize_batches", skip_all)] + fn summarize_batches(batches: &[ProvenBatch]) -> Self { + let updated_accounts: BTreeSet = batches .iter() .flat_map(ProvenBatch::account_updates) .map(|(account_id, _)| *account_id) @@ -111,7 +238,7 @@ impl BlockBuilder { let output_notes: Vec<_> = batches.iter().map(|batch| batch.output_notes().to_vec()).collect(); - let produced_nullifiers: Vec = + let nullifiers: Vec = batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); // Populate set of output notes from all batches @@ -120,8 +247,8 @@ impl BlockBuilder { .flat_map(|output_notes| output_notes.iter().map(OutputNote::id)) .collect(); - // Build a set of unauthenticated input notes for this block which do not have a matching - // output note produced in this block + // Build a set of unauthenticated input notes for this block which do not have a + // matching output note produced in this block let dangling_notes: BTreeSet<_> = batches .iter() .flat_map(ProvenBatch::input_notes) @@ -130,47 +257,79 @@ impl BlockBuilder { .filter(|note_id| !output_notes_set.contains(note_id)) .collect(); - // Request information needed for block building from the store - let block_inputs = self - .store - .get_block_inputs( - updated_account_set.into_iter(), - produced_nullifiers.iter(), - dangling_notes.iter(), - ) - .await - .map_err(BuildBlockError::GetBlockInputsFailed)?; - - let missing_notes: Vec<_> = dangling_notes - .difference(&block_inputs.found_unauthenticated_notes.note_ids()) - .copied() - .collect(); - if !missing_notes.is_empty() { - return Err(BuildBlockError::UnauthenticatedNotesNotFound(missing_notes)); + Self { + updated_accounts, + nullifiers, + output_notes, + dangling_notes, } + } +} - let (block_header_witness, updated_accounts) = BlockWitness::new(block_inputs, batches)?; +struct SelectedBlock { + block_number: BlockNumber, + batches: Vec, +} +struct BlockSummaryAndInputs { + batches: Vec, + summary: BlockSummary, + inputs: BlockInputs, +} +struct ProvenBlock { + block: Block, +} - let new_block_header = self.block_kernel.prove(block_header_witness)?; +impl SelectedBlock { + fn inject_telemetry(&self) { + let span = Span::current(); + span.set_attribute("block.number", i64::from(self.block_number.as_u32())); + span.set_attribute("block.batches.count", i64::from(self.batches.len() as u32)); + } +} - // TODO: return an error? - let block = - Block::new(new_block_header, updated_accounts, output_notes, produced_nullifiers) - .expect("invalid block components"); +impl BlockSummaryAndInputs { + fn inject_telemetry(&self) { + let span = Span::current(); - let block_hash = block.hash(); - let block_num = new_block_header.block_num(); + // SAFETY: We do not expect to have more than u32::MAX of any count per block. + span.set_attribute( + "block.updated_accounts.count", + i64::try_from(self.summary.updated_accounts.len()) + .expect("less than u32::MAX account updates"), + ); + span.set_attribute( + "block.output_notes.count", + i64::try_from(self.summary.output_notes.iter().fold(0, |acc, x| acc.add(x.len()))) + .expect("less than u32::MAX output notes"), + ); + span.set_attribute( + "block.nullifiers.count", + i64::try_from(self.summary.nullifiers.len()).expect("less than u32::MAX nullifiers"), + ); + span.set_attribute( + "block.dangling_notes.count", + i64::try_from(self.summary.dangling_notes.len()) + .expect("less than u32::MAX dangling notes"), + ); + } +} - info!(target: COMPONENT, %block_num, %block_hash, "block built"); - debug!(target: COMPONENT, ?block); +impl ProvenBlock { + fn inject_telemetry(&self) { + let span = Span::current(); + let header = self.block.header(); - self.store - .apply_block(&block) - .await - .map_err(BuildBlockError::StoreApplyBlockFailed)?; + span.set_attribute("block.hash", header.hash().to_hex()); + span.set_attribute("block.sub_hash", header.sub_hash().to_hex()); + span.set_attribute("block.parent_hash", header.prev_hash().to_hex()); - info!(target: COMPONENT, %block_num, %block_hash, "block committed"); + span.set_attribute("block.protocol.version", i64::from(header.version())); - Ok(()) + span.set_attribute("block.commitments.kernel", header.kernel_root().to_hex()); + span.set_attribute("block.commitments.nullifier", header.nullifier_root().to_hex()); + span.set_attribute("block.commitments.account", header.account_root().to_hex()); + span.set_attribute("block.commitments.chain", header.chain_root().to_hex()); + span.set_attribute("block.commitments.note", header.note_root().to_hex()); + span.set_attribute("block.commitments.transaction", header.tx_hash().to_hex()); } } diff --git a/crates/block-producer/src/errors.rs b/crates/block-producer/src/errors.rs index f61cf6dc..02bc0156 100644 --- a/crates/block-producer/src/errors.rs +++ b/crates/block-producer/src/errors.rs @@ -6,7 +6,7 @@ use miden_objects::{ crypto::merkle::MerkleError, note::{NoteId, Nullifier}, transaction::TransactionId, - AccountDeltaError, Digest, ProposedBatchError, + AccountDeltaError, BlockError, Digest, ProposedBatchError, }; use miden_processor::ExecutionError; use miden_tx_batch_prover::errors::BatchProveError; @@ -187,6 +187,8 @@ pub enum BuildBlockError { account_id: AccountId, source: AccountDeltaError, }, + #[error("block construction failed")] + BlockConstructionError(#[from] BlockError), /// We sometimes randomly inject errors into the batch building process to test our failure /// responses. #[error("nothing actually went wrong, failure was injected on purpose")] diff --git a/crates/block-producer/src/mempool/mod.rs b/crates/block-producer/src/mempool/mod.rs index 3eaa4052..a4fca161 100644 --- a/crates/block-producer/src/mempool/mod.rs +++ b/crates/block-producer/src/mempool/mod.rs @@ -9,7 +9,7 @@ use miden_objects::{ transaction::TransactionId, MAX_ACCOUNTS_PER_BATCH, MAX_INPUT_NOTES_PER_BATCH, MAX_OUTPUT_NOTES_PER_BATCH, }; -use tokio::sync::Mutex; +use tokio::sync::{Mutex, MutexGuard}; use tracing::instrument; use transaction_expiration::TransactionExpirations; use transaction_graph::TransactionGraph; @@ -127,7 +127,15 @@ impl BlockBudget { // MEMPOOL // ================================================================================================ -pub type SharedMempool = Arc>; +#[derive(Clone)] +pub struct SharedMempool(Arc>); + +impl SharedMempool { + #[instrument(target = COMPONENT, name = "mempool.lock", skip_all)] + pub async fn lock(&self) -> MutexGuard<'_, Mempool> { + self.0.lock().await + } +} #[derive(Clone, Debug, PartialEq)] pub struct Mempool { @@ -168,13 +176,13 @@ impl Mempool { state_retention: usize, expiration_slack: u32, ) -> SharedMempool { - Arc::new(Mutex::new(Self::new( + SharedMempool(Arc::new(Mutex::new(Self::new( chain_tip, batch_budget, block_budget, state_retention, expiration_slack, - ))) + )))) } fn new( @@ -205,7 +213,7 @@ impl Mempool { /// # Errors /// /// Returns an error if the transaction's initial conditions don't match the current state. - #[instrument(target = COMPONENT, skip_all, fields(tx=%transaction.id()))] + #[instrument(target = COMPONENT, name = "mempool.add_transaction", skip_all, fields(tx=%transaction.id()))] pub fn add_transaction( &mut self, transaction: AuthenticatedTransaction, @@ -227,7 +235,7 @@ impl Mempool { /// Transactions are returned in a valid execution ordering. /// /// Returns `None` if no transactions are available. - #[instrument(target = COMPONENT, skip_all)] + #[instrument(target = COMPONENT, name = "mempool.select_batch", skip_all)] pub fn select_batch(&mut self) -> Option<(BatchId, Vec)> { let (batch, parents) = self.transactions.select_batch(self.batch_budget); if batch.is_empty() { @@ -243,7 +251,7 @@ impl Mempool { /// Drops the failed batch and all of its descendants. /// /// Transactions are placed back in the queue. - #[instrument(target = COMPONENT, skip_all, fields(batch))] + #[instrument(target = COMPONENT, name = "mempool.batch_failed", skip_all, fields(batch_id=%batch))] pub fn batch_failed(&mut self, batch: BatchId) { // Batch may already have been removed as part of a parent batches failure. if !self.batches.contains(&batch) { @@ -267,7 +275,7 @@ impl Mempool { } /// Marks a batch as proven if it exists. - #[instrument(target = COMPONENT, skip_all, fields(batch=%batch.id()))] + #[instrument(target = COMPONENT, name = "mempool.batch_proved", skip_all, fields(batch_id=%batch.id()))] pub fn batch_proved(&mut self, batch: ProvenBatch) { // Batch may have been removed as part of a parent batches failure. if !self.batches.contains(&batch.id()) { @@ -286,7 +294,7 @@ impl Mempool { /// # Panics /// /// Panics if there is already a block in flight. - #[instrument(target = COMPONENT, skip_all)] + #[instrument(target = COMPONENT, name = "mempool.select_block", skip_all)] pub fn select_block(&mut self) -> (BlockNumber, Vec) { assert!(self.block_in_progress.is_none(), "Cannot have two blocks inflight."); @@ -296,15 +304,16 @@ impl Mempool { (self.chain_tip.child(), batches) } - /// Notify the pool that the block was successfully completed. + /// Notify the pool that the in flight block was successfully committed to the chain. + /// + /// The pool will mark the associated batches and transactions as committed, and prune stale + /// committed data, and purge transactions that are now considered expired. /// /// # Panics /// - /// Panics if blocks are completed out-of-order or if there is no block in flight. - #[instrument(target = COMPONENT, skip_all, fields(block_number))] - pub fn block_committed(&mut self, block_number: BlockNumber) { - assert_eq!(block_number, self.chain_tip.child(), "Blocks must be submitted sequentially"); - + /// Panics if there is no block in flight. + #[instrument(target = COMPONENT, name = "mempool.commit_block", skip_all)] + pub fn commit_block(&mut self) { // Remove committed batches and transactions from graphs. let batches = self.block_in_progress.take().expect("No block in progress to commit"); let transactions = @@ -321,21 +330,18 @@ impl Mempool { self.chain_tip = self.chain_tip.child(); // Revert expired transactions and their descendents. - let expired = self.expirations.get(block_number); - self.revert_transactions(expired.into_iter().collect()) - .expect("expired transactions must be part of the mempool"); + self.revert_expired_transactions(); } - /// Block and all of its contents and dependents are purged from the mempool. + /// Notify the pool that construction of the in flight block failed. + /// + /// The pool will purge the block and all of its contents from the pool. /// /// # Panics /// - /// Panics if there is no block in flight or if the block number does not match the current - /// inflight block. - #[instrument(target = COMPONENT, skip_all, fields(block_number))] - pub fn block_failed(&mut self, block_number: BlockNumber) { - assert_eq!(block_number, self.chain_tip.child(), "Blocks must be submitted sequentially"); - + /// Panics if there is no block in flight. + #[instrument(target = COMPONENT, name = "mempool.rollback_block", skip_all)] + pub fn rollback_block(&mut self) { let batches = self.block_in_progress.take().expect("No block in progress to be failed"); // Revert all transactions. This is the nuclear (but simplest) solution. @@ -358,6 +364,14 @@ impl Mempool { .expect("transactions from a block must be part of the mempool"); } + #[instrument(target = COMPONENT, name = "mempool.revert_expired_transactions", skip_all)] + fn revert_expired_transactions(&mut self) { + let expired = self.expirations.get(self.chain_tip); + + self.revert_transactions(expired.into_iter().collect()) + .expect("expired transactions must be part of the mempool"); + } + /// Reverts the given transactions and their descendents from the mempool. /// /// This includes removing them from the transaction and batch graphs, as well as cleaning up @@ -370,10 +384,13 @@ impl Mempool { /// /// Returns an error if any transaction was not in the transaction graph i.e. if the transaction /// is unknown. + #[instrument(target = COMPONENT, name = "mempool.revert_transactions", skip_all, fields(transactions.ids))] fn revert_transactions( &mut self, txs: Vec, ) -> Result<(), GraphError> { + tracing::Span::current().record("transactions.expired.ids", tracing::field::debug(&txs)); + // Revert all transactions and their descendents, and their associated batches. let reverted = self.transactions.remove_transactions(txs)?; let batches_reverted = self.batches.remove_batches_with_transactions(reverted.iter()); diff --git a/crates/block-producer/src/mempool/tests.rs b/crates/block-producer/src/mempool/tests.rs index e7680736..8c81286b 100644 --- a/crates/block-producer/src/mempool/tests.rs +++ b/crates/block-producer/src/mempool/tests.rs @@ -107,8 +107,8 @@ fn block_commit_reverts_expired_txns() { uut.add_transaction(tx_to_revert).unwrap(); // Commit the pending block which should revert the above tx. - uut.block_committed(block); - reference.block_committed(block); + uut.commit_block(); + reference.commit_block(); assert_eq!(uut, reference); } @@ -118,24 +118,15 @@ fn empty_block_commitment() { let mut uut = Mempool::for_tests(); for _ in 0..3 { - let (block, _) = uut.select_block(); - uut.block_committed(block); + let (_block, _) = uut.select_block(); + uut.commit_block(); } } -#[test] -#[should_panic] -fn blocks_must_be_committed_sequentially() { - let mut uut = Mempool::for_tests(); - - let (block, _) = uut.select_block(); - uut.block_committed(block + 1); -} - #[test] #[should_panic] fn block_commitment_is_rejected_if_no_block_is_in_flight() { - Mempool::for_tests().block_committed(BlockNumber::from(1)); + Mempool::for_tests().commit_block(); } #[test] @@ -166,7 +157,7 @@ fn block_failure_reverts_its_transactions() { ])); // Block 1 will contain just the first batch. - let (block_number, _) = uut.select_block(); + let (_number, _batches) = uut.select_block(); // Create another dependent batch. uut.add_transaction(reverted_txs[1].clone()).unwrap(); @@ -175,7 +166,7 @@ fn block_failure_reverts_its_transactions() { uut.add_transaction(reverted_txs[2].clone()).unwrap(); // Fail the block which should result in everything reverting. - uut.block_failed(block_number); + uut.rollback_block(); assert_eq!(uut, reference); } diff --git a/crates/block-producer/src/store/mod.rs b/crates/block-producer/src/store/mod.rs index 2c4a36fe..3d8009cd 100644 --- a/crates/block-producer/src/store/mod.rs +++ b/crates/block-producer/src/store/mod.rs @@ -138,7 +138,7 @@ impl StoreClient { } /// Returns the latest block's header from the store. - #[instrument(target = COMPONENT, skip_all, err)] + #[instrument(target = COMPONENT, name = "store.client.latest_header", skip_all, err)] pub async fn latest_header(&self) -> Result { let response = self .inner @@ -156,7 +156,7 @@ impl StoreClient { BlockHeader::try_from(response).map_err(Into::into) } - #[instrument(target = COMPONENT, skip_all, err)] + #[instrument(target = COMPONENT, name = "store.client.get_tx_inputs", skip_all, err)] pub async fn get_tx_inputs( &self, proven_tx: &ProvenTransaction, @@ -193,7 +193,7 @@ impl StoreClient { Ok(tx_inputs) } - #[instrument(target = COMPONENT, skip_all, err)] + #[instrument(target = COMPONENT, name = "store.client.get_block_inputs", skip_all, err)] pub async fn get_block_inputs( &self, updated_accounts: impl Iterator + Send, @@ -211,7 +211,7 @@ impl StoreClient { store_response.try_into().map_err(Into::into) } - #[instrument(target = COMPONENT, skip_all, err)] + #[instrument(target = COMPONENT, name = "store.client.get_batch_inputs", skip_all, err)] pub async fn get_batch_inputs( &self, block_references: impl Iterator + Send, @@ -227,7 +227,7 @@ impl StoreClient { store_response.try_into().map_err(Into::into) } - #[instrument(target = COMPONENT, skip_all, err)] + #[instrument(target = COMPONENT, name = "store.client.apply_block", skip_all, err)] pub async fn apply_block(&self, block: &Block) -> Result<(), StoreError> { let request = tonic::Request::new(ApplyBlockRequest { block: block.to_bytes() }); diff --git a/crates/store/src/server/api.rs b/crates/store/src/server/api.rs index 84a0ff33..24cde86a 100644 --- a/crates/store/src/server/api.rs +++ b/crates/store/src/server/api.rs @@ -56,7 +56,7 @@ impl api_server::Api for StoreApi { /// If the block number is not provided, block header for the latest block is returned. #[instrument( target = COMPONENT, - name = "store:get_block_header_by_number", + name = "store.server.get_block_header_by_number", skip_all, ret(level = "debug"), err @@ -88,7 +88,7 @@ impl api_server::Api for StoreApi { /// be verified against the latest root of the nullifier database. #[instrument( target = COMPONENT, - name = "store:check_nullifiers", + name = "store.server.check_nullifiers", skip_all, ret(level = "debug"), err @@ -112,7 +112,7 @@ impl api_server::Api for StoreApi { /// Currently the only supported prefix length is 16 bits. #[instrument( target = COMPONENT, - name = "store:check_nullifiers_by_prefix", + name = "store.server.check_nullifiers_by_prefix", skip_all, ret(level = "debug"), err @@ -145,7 +145,7 @@ impl api_server::Api for StoreApi { /// for the objects the client is interested in. #[instrument( target = COMPONENT, - name = "store:sync_state", + name = "store.server.sync_state", skip_all, ret(level = "debug"), err @@ -214,7 +214,7 @@ impl api_server::Api for StoreApi { /// Returns info which can be used by the client to sync note state. #[instrument( target = COMPONENT, - name = "store:sync_notes", + name = "store.server.sync_notes", skip_all, ret(level = "debug"), err @@ -246,7 +246,7 @@ impl api_server::Api for StoreApi { /// If the list is empty or no Note matched the requested NoteId and empty list is returned. #[instrument( target = COMPONENT, - name = "store:get_notes_by_id", + name = "store.server.get_notes_by_id", skip_all, ret(level = "debug"), err @@ -278,7 +278,7 @@ impl api_server::Api for StoreApi { /// Returns details for public (public) account by id. #[instrument( target = COMPONENT, - name = "store:get_account_details", + name = "store.server.get_account_details", skip_all, ret(level = "debug"), err @@ -302,7 +302,7 @@ impl api_server::Api for StoreApi { /// Updates the local DB by inserting a new block header and the related data. #[instrument( target = COMPONENT, - name = "store:apply_block", + name = "store.server.apply_block", skip_all, ret(level = "debug"), err @@ -338,7 +338,7 @@ impl api_server::Api for StoreApi { /// Returns data needed by the block producer to construct and prove the next block. #[instrument( target = COMPONENT, - name = "store:get_block_inputs", + name = "store.server.get_block_inputs", skip_all, ret(level = "debug"), err @@ -367,7 +367,7 @@ impl api_server::Api for StoreApi { /// See [`State::get_batch_inputs`] for details. #[instrument( target = COMPONENT, - name = "store:get_batch_inputs", + name = "store.server.get_batch_inputs", skip_all, ret(level = "debug"), err @@ -397,7 +397,7 @@ impl api_server::Api for StoreApi { #[instrument( target = COMPONENT, - name = "store:get_transaction_inputs", + name = "store.server.get_transaction_inputs", skip_all, ret(level = "debug"), err @@ -445,7 +445,7 @@ impl api_server::Api for StoreApi { #[instrument( target = COMPONENT, - name = "store:get_block_by_number", + name = "store.server.get_block_by_number", skip_all, ret(level = "debug"), err @@ -465,7 +465,7 @@ impl api_server::Api for StoreApi { #[instrument( target = COMPONENT, - name = "store:get_account_proofs", + name = "store.server.get_account_proofs", skip_all, ret(level = "debug"), err @@ -503,7 +503,7 @@ impl api_server::Api for StoreApi { #[instrument( target = COMPONENT, - name = "store:get_account_state_delta", + name = "store.server.get_account_state_delta", skip_all, ret(level = "debug"), err diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index b411cd29..2d0cc785 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -24,7 +24,7 @@ figment = { version = "0.10", features = ["env", "toml"] } http = "1.2" itertools = { workspace = true } miden-objects = { workspace = true } -opentelemetry = "0.27" +opentelemetry = { version = "0.27" } opentelemetry-otlp = { version = "0.27", features = ["tls-roots"] } opentelemetry_sdk = { version = "0.27", features = ["rt-tokio"] } rand = { workspace = true } @@ -33,7 +33,7 @@ thiserror = { workspace = true } tonic = { workspace = true } tracing = { workspace = true } tracing-forest = { version = "0.1", optional = true, features = ["chrono"] } -tracing-opentelemetry = "0.28" +tracing-opentelemetry = { version = "0.28" } tracing-subscriber = { workspace = true } # Optional dependencies enabled by `vergen` feature. # This must match the version expected by `vergen-gitcl`. diff --git a/crates/utils/src/logging.rs b/crates/utils/src/logging.rs index 06968527..8d933046 100644 --- a/crates/utils/src/logging.rs +++ b/crates/utils/src/logging.rs @@ -1,6 +1,7 @@ use anyhow::Result; use opentelemetry::trace::TracerProvider as _; use opentelemetry_otlp::WithTonicConfig; +use opentelemetry_sdk::propagation::TraceContextPropagator; use tracing::subscriber::{self, Subscriber}; use tracing_opentelemetry::OpenTelemetryLayer; use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Layer, Registry}; @@ -10,6 +11,10 @@ use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Layer, Registry}; /// The open-telemetry configuration is controlled via environment variables as defined in the /// [specification](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/exporter.md#opentelemetry-protocol-exporter) pub fn setup_tracing(enable_otel: bool) -> Result<()> { + if enable_otel { + opentelemetry::global::set_text_map_propagator(TraceContextPropagator::new()); + } + let otel_layer = enable_otel.then_some(open_telemetry_layer()); let subscriber = Registry::default().with(stdout_layer()).with(otel_layer); tracing::subscriber::set_global_default(subscriber).map_err(Into::into) diff --git a/crates/utils/src/tracing/mod.rs b/crates/utils/src/tracing/mod.rs index 773d491c..292be68b 100644 --- a/crates/utils/src/tracing/mod.rs +++ b/crates/utils/src/tracing/mod.rs @@ -1 +1,6 @@ pub mod grpc; + +// Re-export useful traits for open-telemetry traces. This avoids requiring other crates from +// importing that family of crates directly. +pub use opentelemetry::trace::Status as OtelStatus; +pub use tracing_opentelemetry::OpenTelemetrySpanExt; From 26cf6be11b2cf9024c7875ce0a5801eca477e0c3 Mon Sep 17 00:00:00 2001 From: igamigo Date: Fri, 14 Feb 2025 03:20:26 -0300 Subject: [PATCH 13/17] chore: bump `miden-base` versions (#689) --- Cargo.lock | 8 ++++---- Cargo.toml | 8 ++++---- crates/block-producer/src/test_utils/batch.rs | 3 +++ crates/block-producer/src/test_utils/block.rs | 2 +- crates/block-producer/src/test_utils/store.rs | 3 ++- crates/store/src/db/sql/mod.rs | 14 +++++++++----- crates/store/src/genesis.rs | 2 +- crates/store/src/state.rs | 2 +- 8 files changed, 25 insertions(+), 17 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8bdd6e95..90826a36 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1787,7 +1787,7 @@ dependencies = [ [[package]] name = "miden-lib" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base.git?rev=e82dee03de7589ef3fb12b7fd901cef25ae5535d#e82dee03de7589ef3fb12b7fd901cef25ae5535d" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" dependencies = [ "miden-assembly", "miden-objects", @@ -1983,7 +1983,7 @@ dependencies = [ [[package]] name = "miden-objects" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base.git?rev=e82dee03de7589ef3fb12b7fd901cef25ae5535d#e82dee03de7589ef3fb12b7fd901cef25ae5535d" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" dependencies = [ "getrandom 0.2.15", "miden-assembly", @@ -2042,7 +2042,7 @@ dependencies = [ [[package]] name = "miden-tx" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base.git?rev=e82dee03de7589ef3fb12b7fd901cef25ae5535d#e82dee03de7589ef3fb12b7fd901cef25ae5535d" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" dependencies = [ "async-trait", "miden-lib", @@ -2059,7 +2059,7 @@ dependencies = [ [[package]] name = "miden-tx-batch-prover" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base.git?rev=e82dee03de7589ef3fb12b7fd901cef25ae5535d#e82dee03de7589ef3fb12b7fd901cef25ae5535d" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" dependencies = [ "miden-core", "miden-crypto", diff --git a/Cargo.toml b/Cargo.toml index bbb10141..2c05af39 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,18 +28,18 @@ version = "0.8.0" assert_matches = { version = "1.5" } itertools = { version = "0.14" } miden-air = { version = "0.12" } -miden-lib = { git = "https://github.com/0xPolygonMiden/miden-base.git", rev = "e82dee03de7589ef3fb12b7fd901cef25ae5535d" } +miden-lib = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "next" } miden-node-block-producer = { path = "crates/block-producer", version = "0.8" } miden-node-proto = { path = "crates/proto", version = "0.8" } miden-node-rpc = { path = "crates/rpc", version = "0.8" } miden-node-store = { path = "crates/store", version = "0.8" } miden-node-test-macro = { path = "crates/test-macro" } miden-node-utils = { path = "crates/utils", version = "0.8" } -miden-objects = { git = "https://github.com/0xPolygonMiden/miden-base.git", rev = "e82dee03de7589ef3fb12b7fd901cef25ae5535d" } +miden-objects = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "next" } miden-processor = { version = "0.12" } miden-stdlib = { version = "0.12", default-features = false } -miden-tx = { git = "https://github.com/0xPolygonMiden/miden-base.git", rev = "e82dee03de7589ef3fb12b7fd901cef25ae5535d" } -miden-tx-batch-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", rev = "e82dee03de7589ef3fb12b7fd901cef25ae5535d" } +miden-tx = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "next" } +miden-tx-batch-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next" } prost = { version = "0.13" } rand = { version = "0.8" } thiserror = { version = "2.0", default-features = false } diff --git a/crates/block-producer/src/test_utils/batch.rs b/crates/block-producer/src/test_utils/batch.rs index b4caffd2..37c2041f 100644 --- a/crates/block-producer/src/test_utils/batch.rs +++ b/crates/block-producer/src/test_utils/batch.rs @@ -4,6 +4,7 @@ use miden_objects::{ batch::{BatchAccountUpdate, BatchId, BatchNoteTree, ProvenBatch}, block::BlockNumber, transaction::{InputNotes, ProvenTransaction}, + Digest, }; use crate::test_utils::MockProvenTxBuilder; @@ -57,6 +58,8 @@ impl TransactionBatchConstructor for ProvenBatch { ProvenBatch::new( BatchId::from_transactions(txs.into_iter()), + Digest::default(), + BlockNumber::GENESIS, account_updates, InputNotes::new_unchecked(input_notes), BatchNoteTree::with_contiguous_leaves( diff --git a/crates/block-producer/src/test_utils/block.rs b/crates/block-producer/src/test_utils/block.rs index 532e314d..03ea004f 100644 --- a/crates/block-producer/src/test_utils/block.rs +++ b/crates/block-producer/src/test_utils/block.rs @@ -131,7 +131,7 @@ impl MockBlockBuilder { pub fn account_updates(mut self, updated_accounts: Vec) -> Self { for update in &updated_accounts { self.store_accounts - .insert(update.account_id().into(), update.new_state_hash().into()); + .insert(update.account_id().into(), update.final_state_commitment().into()); } self.updated_accounts = Some(updated_accounts); diff --git a/crates/block-producer/src/test_utils/store.rs b/crates/block-producer/src/test_utils/store.rs index ecbe1dc6..af404acd 100644 --- a/crates/block-producer/src/test_utils/store.rs +++ b/crates/block-producer/src/test_utils/store.rs @@ -199,7 +199,8 @@ impl MockStoreSuccess { // update accounts for update in block.updated_accounts() { - locked_accounts.insert(update.account_id().into(), update.new_state_hash().into()); + locked_accounts + .insert(update.account_id().into(), update.final_state_commitment().into()); } let header = block.header(); debug_assert_eq!(locked_accounts.root(), header.account_root()); diff --git a/crates/store/src/db/sql/mod.rs b/crates/store/src/db/sql/mod.rs index 95e6dcdb..7ade4267 100644 --- a/crates/store/src/db/sql/mod.rs +++ b/crates/store/src/db/sql/mod.rs @@ -422,10 +422,10 @@ pub fn upsert_accounts( AccountUpdateDetails::New(account) => { debug_assert_eq!(account_id, account.id()); - if account.hash() != update.new_state_hash() { + if account.hash() != update.final_state_commitment() { return Err(DatabaseError::AccountHashesMismatch { calculated: account.hash(), - expected: update.new_state_hash(), + expected: update.final_state_commitment(), }); } @@ -439,8 +439,12 @@ pub fn upsert_accounts( return Err(DatabaseError::AccountNotFoundInDb(account_id)); }; - let account = - apply_delta(account_id, &row.get_ref(0)?, delta, &update.new_state_hash())?; + let account = apply_delta( + account_id, + &row.get_ref(0)?, + delta, + &update.final_state_commitment(), + )?; (Some(Cow::Owned(account)), Some(Cow::Borrowed(delta))) }, @@ -448,7 +452,7 @@ pub fn upsert_accounts( let inserted = upsert_stmt.execute(params![ account_id.to_bytes(), - update.new_state_hash().to_bytes(), + update.final_state_commitment().to_bytes(), block_num.as_u32(), full_account.as_ref().map(|account| account.to_bytes()), ])?; diff --git a/crates/store/src/genesis.rs b/crates/store/src/genesis.rs index 2232cc2d..de128879 100644 --- a/crates/store/src/genesis.rs +++ b/crates/store/src/genesis.rs @@ -48,7 +48,7 @@ impl GenesisState { let account_smt: SimpleSmt = SimpleSmt::with_leaves(accounts.iter().map(|update| { - (update.account_id().prefix().into(), update.new_state_hash().into()) + (update.account_id().prefix().into(), update.final_state_commitment().into()) }))?; let header = BlockHeader::new( diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index 059c22be..8b8bcf69 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -264,7 +264,7 @@ impl State { block.updated_accounts().iter().map(|update| { ( LeafIndex::new_max_depth(update.account_id().prefix().into()), - update.new_state_hash().into(), + update.final_state_commitment().into(), ) }), ); From 2a10b47f5a2e3d928296d85d8516050b8e560f4a Mon Sep 17 00:00:00 2001 From: Mirko <48352201+Mirko-von-Leipzig@users.noreply.github.com> Date: Fri, 14 Feb 2025 08:25:27 +0200 Subject: [PATCH 14/17] chore: upgrade otel crates and filter otel traces (#690) --- CHANGELOG.md | 2 +- Cargo.lock | 27 ++++---- bin/faucet/src/main.rs | 29 +++++---- bin/node/src/main.rs | 9 +-- crates/test-macro/src/lib.rs | 4 +- crates/utils/Cargo.toml | 10 +-- crates/utils/src/logging.rs | 123 ++++++++++++++++++----------------- 7 files changed, 108 insertions(+), 96 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e0a88e64..8a3e87b2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,7 +8,7 @@ ### Enhancements -- Add an optional open-telemetry trace exporter (#659). +- Add an optional open-telemetry trace exporter (#659, #690). - Support tracing across gRPC boundaries using remote tracing context (#669). - Instrument the block-producer's block building process (#676). diff --git a/Cargo.lock b/Cargo.lock index 90826a36..a7eb2e6c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2312,23 +2312,23 @@ checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "opentelemetry" -version = "0.27.1" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab70038c28ed37b97d8ed414b6429d343a8bbf44c9f79ec854f3a643029ba6d7" +checksum = "236e667b670a5cdf90c258f5a55794ec5ac5027e960c224bff8367a59e1e6426" dependencies = [ "futures-core", "futures-sink", "js-sys", "pin-project-lite", - "thiserror 1.0.69", + "thiserror 2.0.11", "tracing", ] [[package]] name = "opentelemetry-otlp" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91cf61a1868dacc576bf2b2a1c3e9ab150af7272909e80085c3173384fe11f76" +checksum = "5bef114c6d41bea83d6dc60eb41720eedd0261a67af57b66dd2b84ac46c01d91" dependencies = [ "async-trait", "futures-core", @@ -2337,17 +2337,16 @@ dependencies = [ "opentelemetry-proto", "opentelemetry_sdk", "prost", - "thiserror 1.0.69", + "thiserror 2.0.11", "tokio", "tonic", - "tracing", ] [[package]] name = "opentelemetry-proto" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6e05acbfada5ec79023c85368af14abd0b307c015e9064d249b2a950ef459a6" +checksum = "56f8870d3024727e99212eb3bb1762ec16e255e3e6f58eeb3dc8db1aa226746d" dependencies = [ "opentelemetry", "opentelemetry_sdk", @@ -2357,9 +2356,9 @@ dependencies = [ [[package]] name = "opentelemetry_sdk" -version = "0.27.1" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "231e9d6ceef9b0b2546ddf52335785ce41252bc7474ee8ba05bfad277be13ab8" +checksum = "84dfad6042089c7fc1f6118b7040dc2eb4ab520abbf410b79dc481032af39570" dependencies = [ "async-trait", "futures-channel", @@ -2370,7 +2369,7 @@ dependencies = [ "percent-encoding", "rand", "serde_json", - "thiserror 1.0.69", + "thiserror 2.0.11", "tokio", "tokio-stream", "tracing", @@ -3764,9 +3763,9 @@ dependencies = [ [[package]] name = "tracing-opentelemetry" -version = "0.28.0" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97a971f6058498b5c0f1affa23e7ea202057a7301dbff68e968b2d578bcbd053" +checksum = "721f2d2569dce9f3dfbbddee5906941e953bfcdf736a62da3377f5751650cc36" dependencies = [ "js-sys", "once_cell", diff --git a/bin/faucet/src/main.rs b/bin/faucet/src/main.rs index 5cc169a4..64e223d0 100644 --- a/bin/faucet/src/main.rs +++ b/bin/faucet/src/main.rs @@ -17,7 +17,9 @@ use client::initialize_faucet_client; use handlers::{get_background, get_favicon, get_index_css, get_index_html, get_index_js}; use http::HeaderValue; use miden_lib::{account::faucets::create_basic_fungible_faucet, AuthScheme}; -use miden_node_utils::{config::load_config, crypto::get_rpo_random_coin, version::LongVersion}; +use miden_node_utils::{ + config::load_config, crypto::get_rpo_random_coin, logging::OpenTelemetry, version::LongVersion, +}; use miden_objects::{ account::{AccountFile, AccountStorageMode, AuthSecretKey}, asset::TokenSymbol, @@ -89,14 +91,15 @@ pub enum Command { #[tokio::main] async fn main() -> anyhow::Result<()> { - miden_node_utils::logging::setup_logging().context("Failed to initialize logging")?; + miden_node_utils::logging::setup_tracing(OpenTelemetry::Disabled) + .context("failed to initialize logging")?; let cli = Cli::parse(); match &cli.command { Command::Start { config } => { let config: FaucetConfig = - load_config(config).context("Failed to load configuration file")?; + load_config(config).context("failed to load configuration file")?; let faucet_state = FaucetState::new(config.clone()).await?; @@ -129,7 +132,7 @@ async fn main() -> anyhow::Result<()> { anyhow::anyhow!("Couldn't get any socket addrs for endpoint: {}", config.endpoint), )?; let listener = - TcpListener::bind(socket_addr).await.context("Failed to bind TCP listener")?; + TcpListener::bind(socket_addr).await.context("failed to bind TCP listener")?; info!(target: COMPONENT, endpoint = %config.endpoint, "Server started"); @@ -146,12 +149,12 @@ async fn main() -> anyhow::Result<()> { println!("Generating new faucet account. This may take a few minutes..."); let config: FaucetConfig = - load_config(config_path).context("Failed to load configuration file")?; + load_config(config_path).context("failed to load configuration file")?; let (_, root_block_header, _) = initialize_faucet_client(&config).await?; let current_dir = - std::env::current_dir().context("Failed to open current directory")?; + std::env::current_dir().context("failed to open current directory")?; let mut rng = ChaCha20Rng::from_seed(rand::random()); @@ -159,16 +162,16 @@ async fn main() -> anyhow::Result<()> { let (account, account_seed) = create_basic_fungible_faucet( rng.gen(), - (&root_block_header).try_into().context("Failed to create anchor block")?, + (&root_block_header).try_into().context("failed to create anchor block")?, TokenSymbol::try_from(token_symbol.as_str()) - .context("Failed to parse token symbol")?, + .context("failed to parse token symbol")?, *decimals, Felt::try_from(*max_supply) .expect("max supply value is greater than or equal to the field modulus"), AccountStorageMode::Public, AuthScheme::RpoFalcon512 { pub_key: secret.public_key() }, ) - .context("Failed to create basic fungible faucet account")?; + .context("failed to create basic fungible faucet account")?; let account_data = AccountFile::new(account, Some(account_seed), AuthSecretKey::RpoFalcon512(secret)); @@ -176,14 +179,14 @@ async fn main() -> anyhow::Result<()> { let output_path = current_dir.join(output_path); account_data .write(&output_path) - .context("Failed to write account data to file")?; + .context("failed to write account data to file")?; println!("Faucet account file successfully created at: {output_path:?}"); }, Command::Init { config_path, faucet_account_path } => { let current_dir = - std::env::current_dir().context("Failed to open current directory")?; + std::env::current_dir().context("failed to open current directory")?; let config_file_path = current_dir.join(config_path); @@ -193,10 +196,10 @@ async fn main() -> anyhow::Result<()> { }; let config_as_toml_string = - toml::to_string(&config).context("Failed to serialize default config")?; + toml::to_string(&config).context("failed to serialize default config")?; std::fs::write(&config_file_path, config_as_toml_string) - .context("Error writing config to file")?; + .context("error writing config to file")?; println!("Config file successfully created at: {config_file_path:?}"); }, diff --git a/bin/node/src/main.rs b/bin/node/src/main.rs index 1697f61f..599e2a6f 100644 --- a/bin/node/src/main.rs +++ b/bin/node/src/main.rs @@ -6,7 +6,7 @@ use commands::{init::init_config_files, start::start_node}; use miden_node_block_producer::server::BlockProducer; use miden_node_rpc::server::Rpc; use miden_node_store::server::Store; -use miden_node_utils::{config::load_config, version::LongVersion}; +use miden_node_utils::{config::load_config, logging::OpenTelemetry, version::LongVersion}; mod commands; mod config; @@ -88,9 +88,10 @@ async fn main() -> anyhow::Result<()> { let cli = Cli::parse(); // Open telemetry exporting is only valid for running the node. - let open_telemetry = match &cli.command { - Command::Start { open_telemetry, .. } => *open_telemetry, - _ => false, + let open_telemetry = if let Command::Start { open_telemetry: true, .. } = &cli.command { + OpenTelemetry::Enabled + } else { + OpenTelemetry::Disabled }; miden_node_utils::logging::setup_tracing(open_telemetry)?; diff --git a/crates/test-macro/src/lib.rs b/crates/test-macro/src/lib.rs index e0c0e7db..8958a4d4 100644 --- a/crates/test-macro/src/lib.rs +++ b/crates/test-macro/src/lib.rs @@ -10,7 +10,9 @@ pub fn enable_logging(_attr: TokenStream, item: TokenStream) -> TokenStream { let stmts = function.block.stmts; let block: Block = parse_quote! {{ if ::std::env::args().any(|e| e == "--nocapture") { - let subscriber = ::tracing::subscriber::set_default(::miden_node_utils::logging::subscriber()); + ::miden_node_utils::logging::setup_tracing( + ::miden_node_utils::logging::OpenTelemetry::Disabled + ).expect("logging setup should succeed"); let span = ::tracing::span!(::tracing::Level::INFO, #name).entered(); #(#stmts)* diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index 2d0cc785..73995dca 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -21,19 +21,19 @@ vergen = ["dep:vergen", "dep:vergen-gitcl"] [dependencies] anyhow = { version = "1.0" } figment = { version = "0.10", features = ["env", "toml"] } -http = "1.2" +http = { version = "1.2" } itertools = { workspace = true } miden-objects = { workspace = true } -opentelemetry = { version = "0.27" } -opentelemetry-otlp = { version = "0.27", features = ["tls-roots"] } -opentelemetry_sdk = { version = "0.27", features = ["rt-tokio"] } +opentelemetry = { version = "0.28" } +opentelemetry-otlp = { version = "0.28", default-features = false, features = ["grpc-tonic", "tls-roots", "trace"] } +opentelemetry_sdk = { version = "0.28", features = ["rt-tokio"] } rand = { workspace = true } serde = { version = "1.0", features = ["derive"] } thiserror = { workspace = true } tonic = { workspace = true } tracing = { workspace = true } tracing-forest = { version = "0.1", optional = true, features = ["chrono"] } -tracing-opentelemetry = { version = "0.28" } +tracing-opentelemetry = { version = "0.29" } tracing-subscriber = { workspace = true } # Optional dependencies enabled by `vergen` feature. # This must match the version expected by `vergen-gitcl`. diff --git a/crates/utils/src/logging.rs b/crates/utils/src/logging.rs index 8d933046..a8a220cc 100644 --- a/crates/utils/src/logging.rs +++ b/crates/utils/src/logging.rs @@ -1,29 +1,50 @@ +use std::str::FromStr; + use anyhow::Result; use opentelemetry::trace::TracerProvider as _; use opentelemetry_otlp::WithTonicConfig; use opentelemetry_sdk::propagation::TraceContextPropagator; -use tracing::subscriber::{self, Subscriber}; +use tracing::subscriber::Subscriber; use tracing_opentelemetry::OpenTelemetryLayer; -use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Layer, Registry}; +use tracing_subscriber::{ + layer::{Filter, SubscriberExt}, + Layer, Registry, +}; + +/// Configures [`setup_tracing`] to enable or disable the open-telemetry exporter. +#[derive(Clone, Copy)] +pub enum OpenTelemetry { + Enabled, + Disabled, +} -/// Configures tracing and optionally enables an open-telemetry OTLP exporter. +impl OpenTelemetry { + fn is_enabled(self) -> bool { + matches!(self, OpenTelemetry::Enabled) + } +} + +/// Initializes tracing to stdout and optionally an open-telemetry exporter. +/// +/// Trace filtering defaults to `INFO` and can be configured using the conventional `RUST_LOG` +/// environment variable. /// /// The open-telemetry configuration is controlled via environment variables as defined in the /// [specification](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/exporter.md#opentelemetry-protocol-exporter) -pub fn setup_tracing(enable_otel: bool) -> Result<()> { - if enable_otel { +pub fn setup_tracing(otel: OpenTelemetry) -> Result<()> { + if otel.is_enabled() { opentelemetry::global::set_text_map_propagator(TraceContextPropagator::new()); } - let otel_layer = enable_otel.then_some(open_telemetry_layer()); - let subscriber = Registry::default().with(stdout_layer()).with(otel_layer); - tracing::subscriber::set_global_default(subscriber).map_err(Into::into) -} + // Note: open-telemetry requires a tokio-runtime, so this _must_ be lazily evaluated (aka not + // `then_some`) to avoid crashing sync callers (with OpenTelemetry::Disabled set). Examples of + // such callers are tests with logging enabled. + let otel_layer = otel.is_enabled().then(open_telemetry_layer); -pub fn setup_logging() -> Result<()> { - subscriber::set_global_default(subscriber())?; - - Ok(()) + let subscriber = Registry::default() + .with(stdout_layer().with_filter(env_or_default_filter())) + .with(otel_layer.with_filter(env_or_default_filter())); + tracing::subscriber::set_global_default(subscriber).map_err(Into::into) } fn open_telemetry_layer() -> Box + Send + Sync + 'static> @@ -37,8 +58,8 @@ where .build() .unwrap(); - let tracer = opentelemetry_sdk::trace::TracerProvider::builder() - .with_batch_exporter(exporter, opentelemetry_sdk::runtime::Tokio) + let tracer = opentelemetry_sdk::trace::SdkTracerProvider::builder() + .with_batch_exporter(exporter) .build(); let tracer = tracer.tracer("tracing-otel-subscriber"); @@ -61,11 +82,6 @@ where .with_line_number(true) .with_target(true) .with_span_events(FmtSpan::NEW | FmtSpan::CLOSE) - .with_filter(EnvFilter::try_from_default_env().unwrap_or_else(|_| { - // axum logs rejections from built-in extracts on the trace level, so we enable this - // manually. - "info,axum::rejection=trace".into() - })) .boxed() } @@ -75,45 +91,36 @@ where S: Subscriber, for<'a> S: tracing_subscriber::registry::LookupSpan<'a>, { - tracing_forest::ForestLayer::default() - .with_filter(EnvFilter::try_from_default_env().unwrap_or_else(|_| { - // axum logs rejections from built-in extracts on the trace level, so we enable this - // manually. - "info,axum::rejection=trace".into() - })) - .boxed() -} - -#[cfg(not(feature = "tracing-forest"))] -pub fn subscriber() -> impl Subscriber + core::fmt::Debug { - use tracing_subscriber::fmt::format::FmtSpan; - - tracing_subscriber::fmt() - .pretty() - .compact() - .with_level(true) - .with_file(true) - .with_line_number(true) - .with_target(true) - .with_env_filter(EnvFilter::try_from_default_env().unwrap_or_else(|_| { - // axum logs rejections from built-in extracts on the trace level, so we enable this - // manually. - "info,axum::rejection=trace".into() - })) - .with_span_events(FmtSpan::NEW | FmtSpan::CLOSE) - .finish() + tracing_forest::ForestLayer::default().boxed() } -#[cfg(feature = "tracing-forest")] -pub fn subscriber() -> impl Subscriber + core::fmt::Debug { - pub use tracing_forest::ForestLayer; - pub use tracing_subscriber::{layer::SubscriberExt, Registry}; +/// Creates a filter from the `RUST_LOG` env var with a default of `INFO` if unset. +/// +/// # Panics +/// +/// Panics if `RUST_LOG` fails to parse. +fn env_or_default_filter() -> Box + Send + Sync + 'static> { + use tracing::level_filters::LevelFilter; + use tracing_subscriber::{ + filter::{FilterExt, Targets}, + EnvFilter, + }; - Registry::default().with(ForestLayer::default()).with( - EnvFilter::try_from_default_env().unwrap_or_else(|_| { - // axum logs rejections from built-in extracts on the trace level, so we enable this - // manually. - "info,axum::rejection=trace".into() - }), - ) + // `tracing` does not allow differentiating between invalid and missing env var so we manually + // do this instead. The alternative is to silently ignore parsing errors which I think is worse. + match std::env::var(EnvFilter::DEFAULT_ENV) { + Ok(rust_log) => FilterExt::boxed( + EnvFilter::from_str(&rust_log) + .expect("RUST_LOG should contain a valid filter configuration"), + ), + Err(std::env::VarError::NotUnicode(_)) => panic!("RUST_LOG contained non-unicode"), + Err(std::env::VarError::NotPresent) => { + // Default level is INFO, and additionally enable logs from axum extractor rejections. + FilterExt::boxed( + Targets::new() + .with_default(LevelFilter::INFO) + .with_target("axum::rejection", LevelFilter::TRACE), + ) + }, + } } From d57c8dd4df7a6ef62ae79b430427fbf5f407a425 Mon Sep 17 00:00:00 2001 From: Varun Doshi <61531351+varun-doshi@users.noreply.github.com> Date: Tue, 18 Feb 2025 19:37:12 +0530 Subject: [PATCH 15/17] feat(store): added BlockChain wrapper for Mmr (#668) --- crates/store/src/state.rs | 168 ++++++++++++++++++++++++++------------ 1 file changed, 118 insertions(+), 50 deletions(-) diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index 8b8bcf69..ccc566e9 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -96,17 +96,108 @@ pub struct TransactionInputs { pub found_unauthenticated_notes: BTreeSet, } +/// A [Merkle Mountain Range](Mmr) defining a chain of blocks. +#[derive(Debug, Clone)] +pub struct Blockchain(Mmr); + +impl Blockchain { + /// Returns a new Blockchain. + pub fn new(chain_mmr: Mmr) -> Self { + Self(chain_mmr) + } + + /// Returns the tip of the chain, i.e. the number of the latest block in the chain. + pub fn chain_tip(&self) -> BlockNumber { + let block_number: u32 = (self.0.forest() - 1) + .try_into() + .expect("chain_mmr always has, at least, the genesis block"); + + block_number.into() + } + + /// Returns the chain length. + pub fn chain_length(&self) -> BlockNumber { + self.chain_tip().child() + } + + /// Returns the current peaks of the MMR. + pub fn peaks(&self) -> MmrPeaks { + self.0.peaks() + } + + /// Returns the peaks of the MMR at the state specified by `forest`. + /// + /// # Errors + /// + /// Returns an error if the specified `forest` value is not valid for this MMR. + pub fn peaks_at(&self, forest: usize) -> Result { + self.0.peaks_at(forest) + } + + /// Adds a block commitment to the MMR. The caller must ensure that this commitent is the one + /// for the next block in the chain. + pub fn push(&mut self, block_commitment: RpoDigest) { + self.0.add(block_commitment); + } + + /// Returns an [`MmrProof`] for the leaf at the specified position. + pub fn open(&self, pos: usize) -> Result { + self.0.open_at(pos, self.0.forest()) + } + + /// Returns a reference to the underlying [`Mmr`]. + pub fn as_mmr(&self) -> &Mmr { + &self.0 + } + + /// Returns the latest block number and partial mmr. + pub fn partial_mmr_from_blocks( + &self, + blocks: &BTreeSet, + latest_block_number: BlockNumber, + ) -> Result { + // Using latest block as the target forest means we take the state of the MMR one before + // the latest block. This is because the latest block will be used as the reference + // block of the batch and will be added to the MMR by the batch kernel. + let target_forest = latest_block_number.as_usize(); + let peaks = self + .peaks_at(target_forest) + .expect("target_forest should be smaller than forest of the chain mmr"); + // Grab the block merkle paths from the inner state. + let mut partial_mmr = PartialMmr::from_peaks(peaks); + + for block_num in blocks.iter().map(BlockNumber::as_usize) { + // SAFETY: We have ensured block nums are less than chain length. + let leaf = self + .0 + .get(block_num) + .expect("block num less than chain length should exist in chain mmr"); + let path = self + .0 + .open_at(block_num, target_forest) + .expect("block num and target forest should be valid for this mmr") + .merkle_path; + // SAFETY: We should be able to fill the partial MMR with data from the chain MMR + // without errors, otherwise it indicates the chain mmr is invalid. + partial_mmr + .track(block_num, leaf, &path) + .expect("filling partial mmr with data from mmr should succeed"); + } + Ok(partial_mmr) + } +} + /// Container for state that needs to be updated atomically. struct InnerState { nullifier_tree: NullifierTree, - chain_mmr: Mmr, + blockchain: Blockchain, account_tree: SimpleSmt, } impl InnerState { /// Returns the latest block number. fn latest_block_num(&self) -> BlockNumber { - let block_number: u32 = (self.chain_mmr.forest() - 1) + let block_number: u32 = (self.blockchain.chain_tip().as_usize() - 1) .try_into() .expect("chain_mmr always has, at least, the genesis block"); @@ -144,7 +235,11 @@ impl State { let chain_mmr = load_mmr(&mut db).await?; let account_tree = load_accounts(&mut db).await?; - let inner = RwLock::new(InnerState { nullifier_tree, chain_mmr, account_tree }); + let inner = RwLock::new(InnerState { + nullifier_tree, + blockchain: Blockchain::new(chain_mmr), + account_tree, + }); let writer = Mutex::new(()); let db = Arc::new(db); @@ -245,7 +340,7 @@ impl State { // compute updates for the in-memory data structures // new_block.chain_root must be equal to the chain MMR root prior to the update - let peaks = inner.chain_mmr.peaks(); + let peaks = inner.blockchain.peaks(); if peaks.hash_peaks() != header.chain_root() { return Err(InvalidBlockError::NewBlockInvalidChainRoot.into()); } @@ -374,7 +469,7 @@ impl State { .account_tree .apply_mutations(account_tree_update) .expect("Unreachable: old account tree root must be checked before this step"); - inner.chain_mmr.add(block_hash); + inner.blockchain.push(block_hash); } info!(%block_hash, block_num = block_num.as_u32(), COMPONENT, "apply_block successful"); @@ -396,7 +491,7 @@ impl State { if let Some(header) = block_header { let mmr_proof = if include_mmr_proof { let inner = self.inner.read().await; - let mmr_proof = inner.chain_mmr.open(header.block_num().as_usize())?; + let mmr_proof = inner.blockchain.open(header.block_num().as_usize())?; Some(mmr_proof) } else { None @@ -460,12 +555,12 @@ impl State { // dropping the guard. let (chain_length, merkle_paths) = { let state = self.inner.read().await; - let chain_length = state.chain_mmr.forest(); + let chain_length = state.blockchain.chain_length().as_usize(); let paths = blocks .iter() .map(|&block_num| { - let proof = state.chain_mmr.open(block_num.as_usize())?.merkle_path; + let proof = state.blockchain.open(block_num.as_usize())?.merkle_path; Ok::<_, MmrError>((block_num, proof)) }) @@ -540,15 +635,12 @@ impl State { let mut blocks = tx_reference_blocks; blocks.extend(note_blocks); - // Grab the block merkle paths from the inner state. - // - // NOTE: Scoped block to automatically drop the mutex guard asap. - // - // We also avoid accessing the db in the block as this would delay - // dropping the guard. + // Scoped block to automatically drop the read lock guard as soon as we're done. + // We also avoid accessing the db in the block as this would delay dropping the guard. let (batch_reference_block, partial_mmr) = { - let state = self.inner.read().await; - let latest_block_num = state.latest_block_num(); + let inner_state = self.inner.blocking_read(); + + let latest_block_num = inner_state.blockchain.chain_tip(); let highest_block_num = *blocks.last().expect("we should have checked for empty block references"); @@ -564,35 +656,10 @@ impl State { // there is no need to prove its inclusion. blocks.remove(&latest_block_num); - // Using latest block as the target forest means we take the state of the MMR one before - // the latest block. This is because the latest block will be used as the reference - // block of the batch and will be added to the MMR by the batch kernel. - let target_forest = latest_block_num.as_usize(); - let peaks = state - .chain_mmr - .peaks_at(target_forest) - .expect("target_forest should be smaller than forest of the chain mmr"); - let mut partial_mmr = PartialMmr::from_peaks(peaks); - - for block_num in blocks.iter().map(BlockNumber::as_usize) { - // SAFETY: We have ensured block nums are less than chain length. - let leaf = state - .chain_mmr - .get(block_num) - .expect("block num less than chain length should exist in chain mmr"); - let path = state - .chain_mmr - .open_at(block_num, target_forest) - .expect("block num and target forest should be valid for this mmr") - .merkle_path; - // SAFETY: We should be able to fill the partial MMR with data from the chain MMR - // without errors, otherwise it indicates the chain mmr is invalid. - partial_mmr - .track(block_num, leaf, &path) - .expect("filling partial mmr with data from mmr should succeed"); - } - - (latest_block_num, partial_mmr) + ( + latest_block_num, + inner_state.blockchain.partial_mmr_from_blocks(&blocks, latest_block_num)?, + ) }; // Fetch the reference block of the batch as part of this query, so we can avoid looking it @@ -679,7 +746,8 @@ impl State { let from_forest = (block_num + 1).as_usize(); let to_forest = state_sync.block_header.block_num().as_usize(); inner - .chain_mmr + .blockchain + .as_mmr() .get_delta(from_forest, to_forest) .map_err(StateSyncError::FailedToBuildMmrDelta)? }; @@ -708,7 +776,7 @@ impl State { let note_sync = self.db.get_note_sync(block_num, note_tags).await?; - let mmr_proof = inner.chain_mmr.open(note_sync.block_header.block_num().as_usize())?; + let mmr_proof = inner.blockchain.open(note_sync.block_header.block_num().as_usize())?; Ok((note_sync, mmr_proof)) } @@ -729,9 +797,9 @@ impl State { .ok_or(GetBlockInputsError::DbBlockHeaderEmpty)?; // sanity check - if inner.chain_mmr.forest() != latest.block_num().as_usize() + 1 { + if inner.blockchain.chain_tip() != latest.block_num() { return Err(GetBlockInputsError::IncorrectChainMmrForestNumber { - forest: inner.chain_mmr.forest(), + forest: inner.blockchain.chain_tip().as_usize(), block_num: latest.block_num(), }); } @@ -739,7 +807,7 @@ impl State { // using current block number gets us the peaks of the chain MMR as of one block ago; // this is done so that latest.chain_root matches the returned peaks let chain_peaks = - inner.chain_mmr.peaks_at(latest.block_num().as_usize()).map_err(|error| { + inner.blockchain.peaks_at(latest.block_num().as_usize()).map_err(|error| { GetBlockInputsError::FailedToGetMmrPeaksForForest { forest: latest.block_num().as_usize(), error, From 22f53bdd3f559973f972a1137dc8da93b18593ac Mon Sep 17 00:00:00 2001 From: Serge Radinovich <47865535+sergerad@users.noreply.github.com> Date: Wed, 19 Feb 2025 18:49:09 +1300 Subject: [PATCH 16/17] feat: OpenTelemetrySpanExt trait (#700) --- .../block-producer/src/block_builder/mod.rs | 32 +++---- crates/utils/src/tracing/mod.rs | 6 +- crates/utils/src/tracing/span_ext.rs | 84 +++++++++++++++++++ 3 files changed, 102 insertions(+), 20 deletions(-) create mode 100644 crates/utils/src/tracing/span_ext.rs diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index 1e7b2429..bbdc23c5 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -4,7 +4,7 @@ use std::{ }; use futures::FutureExt; -use miden_node_utils::tracing::{OpenTelemetrySpanExt, OtelStatus}; +use miden_node_utils::tracing::OpenTelemetrySpanExt; use miden_objects::{ account::AccountId, batch::ProvenBatch, @@ -106,7 +106,7 @@ impl BlockBuilder { .and_then(|proven_block| async { self.inject_failure(proven_block) }) .and_then(|proven_block| self.commit_block(mempool, proven_block)) // Handle errors by propagating the error to the root span and rolling back the block. - .inspect_err(|err| Span::current().set_status(OtelStatus::Error { description: format!("{err:?}").into() })) + .inspect_err(|err| Span::current().set_error(err)) .or_else(|_err| self.rollback_block(mempool).never_error()) // Error has been handled, this is just type manipulation to remove the result wrapper. .unwrap_or_else(|_| ()) @@ -197,9 +197,9 @@ impl BlockBuilder { async fn simulate_proving(&self) { let proving_duration = rand::thread_rng().gen_range(self.simulated_proof_time.clone()); - Span::current().set_attribute("range.min_s", self.simulated_proof_time.start.as_secs_f64()); - Span::current().set_attribute("range.max_s", self.simulated_proof_time.end.as_secs_f64()); - Span::current().set_attribute("dice_roll_s", proving_duration.as_secs_f64()); + Span::current().set_attribute("range.min_s", self.simulated_proof_time.start); + Span::current().set_attribute("range.max_s", self.simulated_proof_time.end); + Span::current().set_attribute("dice_roll_s", proving_duration); tokio::time::sleep(proving_duration).await; } @@ -282,8 +282,8 @@ struct ProvenBlock { impl SelectedBlock { fn inject_telemetry(&self) { let span = Span::current(); - span.set_attribute("block.number", i64::from(self.block_number.as_u32())); - span.set_attribute("block.batches.count", i64::from(self.batches.len() as u32)); + span.set_attribute("block.number", self.block_number); + span.set_attribute("block.batches.count", self.batches.len() as u32); } } @@ -319,17 +319,17 @@ impl ProvenBlock { let span = Span::current(); let header = self.block.header(); - span.set_attribute("block.hash", header.hash().to_hex()); - span.set_attribute("block.sub_hash", header.sub_hash().to_hex()); - span.set_attribute("block.parent_hash", header.prev_hash().to_hex()); + span.set_attribute("block.hash", header.hash()); + span.set_attribute("block.sub_hash", header.sub_hash()); + span.set_attribute("block.parent_hash", header.prev_hash()); span.set_attribute("block.protocol.version", i64::from(header.version())); - span.set_attribute("block.commitments.kernel", header.kernel_root().to_hex()); - span.set_attribute("block.commitments.nullifier", header.nullifier_root().to_hex()); - span.set_attribute("block.commitments.account", header.account_root().to_hex()); - span.set_attribute("block.commitments.chain", header.chain_root().to_hex()); - span.set_attribute("block.commitments.note", header.note_root().to_hex()); - span.set_attribute("block.commitments.transaction", header.tx_hash().to_hex()); + span.set_attribute("block.commitments.kernel", header.kernel_root()); + span.set_attribute("block.commitments.nullifier", header.nullifier_root()); + span.set_attribute("block.commitments.account", header.account_root()); + span.set_attribute("block.commitments.chain", header.chain_root()); + span.set_attribute("block.commitments.note", header.note_root()); + span.set_attribute("block.commitments.transaction", header.tx_hash()); } } diff --git a/crates/utils/src/tracing/mod.rs b/crates/utils/src/tracing/mod.rs index 292be68b..2a01208d 100644 --- a/crates/utils/src/tracing/mod.rs +++ b/crates/utils/src/tracing/mod.rs @@ -1,6 +1,4 @@ pub mod grpc; +mod span_ext; -// Re-export useful traits for open-telemetry traces. This avoids requiring other crates from -// importing that family of crates directly. -pub use opentelemetry::trace::Status as OtelStatus; -pub use tracing_opentelemetry::OpenTelemetrySpanExt; +pub use span_ext::{OpenTelemetrySpanExt, ToValue}; diff --git a/crates/utils/src/tracing/span_ext.rs b/crates/utils/src/tracing/span_ext.rs new file mode 100644 index 00000000..a21150ac --- /dev/null +++ b/crates/utils/src/tracing/span_ext.rs @@ -0,0 +1,84 @@ +use core::time::Duration; + +use miden_objects::{block::BlockNumber, Digest}; +use opentelemetry::{trace::Status, Key, Value}; + +/// Utility functions for converting types into [`opentelemetry::Value`]. +pub trait ToValue { + fn to_value(&self) -> Value; +} + +impl ToValue for Duration { + fn to_value(&self) -> Value { + self.as_secs_f64().into() + } +} + +impl ToValue for Digest { + fn to_value(&self) -> Value { + self.to_hex().into() + } +} + +impl ToValue for f64 { + fn to_value(&self) -> Value { + (*self).into() + } +} + +impl ToValue for BlockNumber { + fn to_value(&self) -> Value { + i64::from(self.as_u32()).into() + } +} + +impl ToValue for u32 { + fn to_value(&self) -> Value { + i64::from(*self).into() + } +} + +impl ToValue for i64 { + fn to_value(&self) -> Value { + (*self).into() + } +} + +/// Utility functions based on [`tracing_opentelemetry::OpenTelemetrySpanExt`]. +/// +/// This is a sealed trait. It and cannot be implemented outside of this module. +pub trait OpenTelemetrySpanExt: private::Sealed { + fn set_attribute(&self, key: impl Into, value: impl ToValue); + fn set_error(&self, err: &dyn std::error::Error); +} + +impl OpenTelemetrySpanExt for S +where + S: tracing_opentelemetry::OpenTelemetrySpanExt, +{ + /// Sets an attribute on `Span`. + /// + /// Implementations for `ToValue` should be added to this crate (miden-node-utils). + fn set_attribute(&self, key: impl Into, value: impl ToValue) { + tracing_opentelemetry::OpenTelemetrySpanExt::set_attribute(self, key, value.to_value()); + } + + /// Sets a status on `Span` based on an error. + fn set_error(&self, err: &dyn std::error::Error) { + // Coalesce all sources into one string. + let mut description = format!("{err}"); + let current = err; + while let Some(cause) = current.source() { + description.push_str(format!("\nCaused by: {cause}").as_str()); + } + tracing_opentelemetry::OpenTelemetrySpanExt::set_status( + self, + Status::Error { description: description.into() }, + ); + } +} + +mod private { + pub trait Sealed {} + impl Sealed for S where S: tracing_opentelemetry::OpenTelemetrySpanExt {} +} From fea7a5947fdd543c83f2fe5d4c468638fa97a131 Mon Sep 17 00:00:00 2001 From: Mirko <48352201+Mirko-von-Leipzig@users.noreply.github.com> Date: Wed, 19 Feb 2025 11:30:12 +0200 Subject: [PATCH 17/17] fix(store): blocking in async fn get_batch_inputs (#705) --- crates/store/src/state.rs | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index ccc566e9..b08348e3 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -197,11 +197,7 @@ struct InnerState { impl InnerState { /// Returns the latest block number. fn latest_block_num(&self) -> BlockNumber { - let block_number: u32 = (self.blockchain.chain_tip().as_usize() - 1) - .try_into() - .expect("chain_mmr always has, at least, the genesis block"); - - block_number.into() + self.blockchain.chain_tip() } } @@ -638,7 +634,7 @@ impl State { // Scoped block to automatically drop the read lock guard as soon as we're done. // We also avoid accessing the db in the block as this would delay dropping the guard. let (batch_reference_block, partial_mmr) = { - let inner_state = self.inner.blocking_read(); + let inner_state = self.inner.read().await; let latest_block_num = inner_state.blockchain.chain_tip();