diff --git a/CHANGELOG.md b/CHANGELOG.md index 6d3ace52c..f4930468d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,20 @@ # Changelog +## 0.8.0 (TBD) + +### Changes + +- [BREAKING] Incremented minimum supported Rust version to 1.84. +- [BREAKING] Moved `generated` module from `miden-proving-service-client` crate to `tx_prover::generated` hierarchy (#1102). +- Added an endpoint to the `miden-proving-service` to update the workers (#1107). +- Renamed the protobuf file of the transaction prover to `tx_prover.proto` (#1110). +- [BREAKING] Renamed `AccountData` to `AccountFile` (#1116). +- Implement transaction batch prover in Rust (#1112). +- Added the `is_non_fungible_asset_issued` procedure to the `miden` library (#1125). +- [BREAKING] Refactored config file for `miden-proving-service` to be based on environment variables (#1120). +- Added block number as a public input to the transaction kernel. Updated prologue logic to validate the global input block number is consistent with the commitment block number (#1126). +- Made NoteFile and AccountFile more consistent (#1133). + ## 0.7.2 (2025-01-28) - `miden-objects` crate only ### Changes diff --git a/Cargo.lock b/Cargo.lock index 0995ce608..66ce5c284 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "addr2line" @@ -24,10 +24,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", - "getrandom", + "getrandom 0.2.15", "once_cell", "version_check", - "zerocopy", + "zerocopy 0.7.35", ] [[package]] @@ -186,27 +186,18 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] name = "async-trait" -version = "0.1.85" +version = "0.1.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056" +checksum = "644dd749086bf3771a2fbc5f256fdb982d53f011c7d5d560304eafeecebce79d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", -] - -[[package]] -name = "atomic" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d818003e740b63afc82337e3160717f4f63078720a810b7b903e70a5d1d2994" -dependencies = [ - "bytemuck", + "syn 2.0.98", ] [[package]] @@ -404,9 +395,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.16.0" +version = "3.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" +checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" [[package]] name = "bytemuck" @@ -422,9 +413,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" +checksum = "f61dac84819c6588b558454b194026eb1f09c293b9036ae9b159e74e73ab6cf9" [[package]] name = "cast" @@ -434,9 +425,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.2.10" +version = "1.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13208fcbb66eaeffe09b99fffbe1af420f00a7b35aa99ad683dfc1aa76145229" +checksum = "c7777341816418c02e033934a09f20dc0ccaf65a5201ef8a450ae0105a573fda" dependencies = [ "jobserver", "libc", @@ -504,12 +495,12 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.27" +version = "4.5.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "769b0145982b4b48713e01ec42d61614425f27b7058bda7180a3a41f30104796" +checksum = "3e77c3243bd94243c03672cb5154667347c457ca271254724f9f393aee1c05ff" dependencies = [ "clap_builder", - "clap_derive 4.5.24", + "clap_derive 4.5.28", ] [[package]] @@ -539,14 +530,14 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.24" +version = "4.5.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54b755194d6389280185988721fffba69495eed5ee9feeee9a599b53db80318c" +checksum = "bf4ced95c6f4a675af3da73304b9ac4ed991640c36374e4b46795c49e17cf1ed" dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -566,9 +557,9 @@ checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "cmake" -version = "0.1.52" +version = "0.1.54" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c682c223677e0e5b6b7f63a64b9351844c3f1b1678a68b7ee617e30fb082620e" +checksum = "e7caa3f9de89ddbe2c607f4101924c5abec803763ae9534e4f4d7d8f84aa81f0" dependencies = [ "cc", ] @@ -603,9 +594,9 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] @@ -628,7 +619,7 @@ dependencies = [ "anes", "cast", "ciborium", - "clap 4.5.27", + "clap 4.5.28", "criterion-plot", "is-terminal", "itertools 0.10.5", @@ -723,12 +714,6 @@ dependencies = [ "libc", ] -[[package]] -name = "data-encoding" -version = "2.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e60eed09d8c01d3cee5b7d30acb059b76614c918fa0f992e0dd6eeb10daad6f" - [[package]] name = "debugid" version = "0.8.0" @@ -789,7 +774,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -839,7 +824,7 @@ checksum = "3bf679796c0322556351f287a51b49e48f7c4986e727b5dd78c972d30e2e16cc" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -864,20 +849,6 @@ version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" -[[package]] -name = "figment" -version = "0.10.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cb01cd46b0cf372153850f4c6c272d9cbea2da513e07538405148f95bd789f3" -dependencies = [ - "atomic", - "pear", - "serde", - "toml", - "uncased", - "version_check", -] - [[package]] name = "findshlibs" version = "0.10.2" @@ -999,7 +970,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -1070,10 +1041,22 @@ dependencies = [ "cfg-if", "js-sys", "libc", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", "wasm-bindgen", ] +[[package]] +name = "getrandom" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.13.3+wasi-0.2.2", + "windows-targets 0.52.6", +] + [[package]] name = "gimli" version = "0.31.1" @@ -1212,9 +1195,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.9.5" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" +checksum = "f2d708df4e7140240a16cd6ab0ab65c972d7433ab77819ea693fde9c43811e2a" [[package]] name = "httpdate" @@ -1224,9 +1207,9 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "1.5.2" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "256fb8d4bd6413123cc9d91832d78325c48ff41677595be797d90f42969beae0" +checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" dependencies = [ "bytes", "futures-channel", @@ -1423,7 +1406,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -1491,12 +1474,6 @@ dependencies = [ "str_stack", ] -[[package]] -name = "inlinable_string" -version = "0.1.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8fae54786f62fb2918dcfae3d568594e50eb9b5c25bf04371af6fe7516452fb" - [[package]] name = "ipnet" version = "2.11.0" @@ -1707,7 +1684,7 @@ dependencies = [ "proc-macro2", "quote", "regex-syntax 0.8.5", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -1734,9 +1711,9 @@ dependencies = [ [[package]] name = "lru" -version = "0.12.5" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" +checksum = "227748d55f2f0ab4735d87fd623798cb6b664512fe979705f829c9f81c934465" dependencies = [ "hashbrown 0.15.2", ] @@ -1824,7 +1801,7 @@ dependencies = [ "miden-objects", "miden-processor", "miden-tx", - "rand_chacha", + "rand_chacha 0.3.1", "serde", "serde_json", ] @@ -1851,17 +1828,17 @@ dependencies = [ [[package]] name = "miden-crypto" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a06bf3ad2a85f3f8f0da73b6357c77e482b1ceb36cacda8a2d85caae3bd1f702" +checksum = "1945918276152bd9b8e8434643ad24d4968e075b68a5ed03927b53ac75490a79" dependencies = [ "blake3", "cc", "glob", "num", "num-complex", - "rand", - "rand_core", + "rand 0.8.5", + "rand_core 0.6.4", "sha3", "thiserror 2.0.11", "winter-crypto", @@ -1880,7 +1857,7 @@ dependencies = [ [[package]] name = "miden-lib" -version = "0.7.0" +version = "0.8.0" dependencies = [ "miden-assembly", "miden-objects", @@ -1914,7 +1891,7 @@ dependencies = [ "supports-color", "supports-hyperlinks", "supports-unicode", - "syn 2.0.96", + "syn 2.0.98", "terminal_size 0.3.0", "textwrap", "thiserror 2.0.11", @@ -1930,17 +1907,17 @@ checksum = "86a905f3ea65634dd4d1041a4f0fd0a3e77aa4118341d265af1a94339182222f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] name = "miden-objects" -version = "0.7.2" +version = "0.8.0" dependencies = [ "anyhow", "assert_matches", "criterion", - "getrandom", + "getrandom 0.2.15", "log", "miden-assembly", "miden-core", @@ -1949,7 +1926,7 @@ dependencies = [ "miden-processor", "miden-verifier", "pprof", - "rand", + "rand 0.8.5", "rand_xoshiro", "rstest", "semver 1.0.25", @@ -1988,13 +1965,12 @@ dependencies = [ [[package]] name = "miden-proving-service" -version = "0.7.0" +version = "0.8.0" dependencies = [ "async-trait", "axum", "bytes", - "clap 4.5.27", - "figment", + "clap 4.5.28", "miden-lib", "miden-objects", "miden-tx", @@ -2017,7 +1993,6 @@ dependencies = [ "thiserror 2.0.11", "tokio", "tokio-stream", - "toml", "tonic", "tonic-build", "tonic-health", @@ -2030,10 +2005,10 @@ dependencies = [ [[package]] name = "miden-proving-service-client" -version = "0.7.0" +version = "0.8.0" dependencies = [ "async-trait", - "getrandom", + "getrandom 0.2.15", "miden-objects", "miden-tx", "miette", @@ -2058,7 +2033,7 @@ dependencies = [ [[package]] name = "miden-tx" -version = "0.7.0" +version = "0.8.0" dependencies = [ "anyhow", "assert_matches", @@ -2070,12 +2045,28 @@ dependencies = [ "miden-prover", "miden-tx", "miden-verifier", - "rand", - "rand_chacha", + "rand 0.8.5", + "rand_chacha 0.3.1", "thiserror 2.0.11", "winter-maybe-async", ] +[[package]] +name = "miden-tx-batch-prover" +version = "0.8.0" +dependencies = [ + "anyhow", + "miden-core", + "miden-crypto", + "miden-lib", + "miden-objects", + "miden-processor", + "miden-tx", + "rand 0.8.5", + "thiserror 2.0.11", + "winterfell", +] + [[package]] name = "miden-verifier" version = "0.12.0" @@ -2091,9 +2082,9 @@ dependencies = [ [[package]] name = "miette" -version = "7.4.0" +version = "7.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317f146e2eb7021892722af37cf1b971f0a70c8406f487e24952667616192c64" +checksum = "1a955165f87b37fd1862df2a59547ac542c77ef6d17c666f619d1ad22dd89484" dependencies = [ "backtrace", "backtrace-ext", @@ -2111,13 +2102,13 @@ dependencies = [ [[package]] name = "miette-derive" -version = "7.4.0" +version = "7.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23c9b935fbe1d6cbd1dac857b54a688145e2d93f48db36010514d0f612d0ad67" +checksum = "bf45bf44ab49be92fd1227a3be6fc6f617f1a337c06af54981048574d8783147" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -2142,7 +2133,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ "libc", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", "windows-sys 0.52.0", ] @@ -2154,9 +2145,9 @@ checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" [[package]] name = "native-tls" -version = "0.2.12" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" +checksum = "0dab59f8e050d5df8e4dd87d9206fb6f65a483e20ac9fda365ade4fab353196c" dependencies = [ "libc", "log", @@ -2249,7 +2240,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -2314,9 +2305,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.20.2" +version = "1.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" [[package]] name = "oorandom" @@ -2326,9 +2317,9 @@ checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "openssl" -version = "0.10.68" +version = "0.10.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5" +checksum = "61cfb4e166a8bb8c9b55c500bc2308550148ece889be90f609377e58140f42c6" dependencies = [ "bitflags 2.8.0", "cfg-if", @@ -2347,7 +2338,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -2358,9 +2349,9 @@ checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "openssl-sys" -version = "0.9.104" +version = "0.9.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741" +checksum = "8b22d5b84be05a8d6947c7cb71f7c849aa0f112acd4bf51c2a7c1c988ac0a9dc" dependencies = [ "cc", "libc", @@ -2432,7 +2423,7 @@ dependencies = [ "glob", "opentelemetry", "percent-encoding", - "rand", + "rand 0.8.5", "serde_json", "thiserror 1.0.69", "tokio", @@ -2487,29 +2478,6 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" -[[package]] -name = "pear" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdeeaa00ce488657faba8ebf44ab9361f9365a97bd39ffb8a60663f57ff4b467" -dependencies = [ - "inlinable_string", - "pear_codegen", - "yansi", -] - -[[package]] -name = "pear_codegen" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bab5b985dc082b345f812b7df84e1bef27e7207b39e448439ba8bd69c93f147" -dependencies = [ - "proc-macro2", - "proc-macro2-diagnostics", - "quote", - "syn 2.0.96", -] - [[package]] name = "percent-encoding" version = "2.3.1" @@ -2528,31 +2496,31 @@ dependencies = [ [[package]] name = "phf_shared" -version = "0.10.0" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" dependencies = [ "siphasher", ] [[package]] name = "pin-project" -version = "1.1.8" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e2ec53ad785f4d35dac0adea7f7dc6f1bb277ad84a680c7afefeae05d1f5916" +checksum = "dfe2e71e1471fe07709406bf725f710b02927c9c54b2b5b2ec0e8087d97c327d" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.8" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d56a66c0c55993aa927429d0f8a0abfd74f084e4d9c192cffed01e418d83eefb" +checksum = "f6e859e6e5bd50440ab63c47e3ebabc90f26251f7c73c3d3e837b74a1cc3fa67" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -2648,7 +2616,7 @@ dependencies = [ "pingora-runtime", "pingora-timeout", "prometheus", - "rand", + "rand 0.8.5", "regex", "serde", "serde_yaml", @@ -2733,7 +2701,7 @@ dependencies = [ "pingora-http", "pingora-ketama", "pingora-runtime", - "rand", + "rand 0.9.0", "tokio", ] @@ -2746,7 +2714,7 @@ dependencies = [ "arrayvec", "hashbrown 0.15.2", "parking_lot", - "rand", + "rand 0.9.0", ] [[package]] @@ -2794,7 +2762,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "31a7c445ca224630961045684201e3cf8da9af0b01f286ed54ff8b2403aaabff" dependencies = [ "once_cell", - "rand", + "rand 0.8.5", "thread_local", "tokio", ] @@ -2875,7 +2843,7 @@ version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" dependencies = [ - "zerocopy", + "zerocopy 0.7.35", ] [[package]] @@ -2891,7 +2859,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6924ced06e1f7dfe3fa48d57b9f74f55d8915f5036121bef647ef4b204895fac" dependencies = [ "proc-macro2", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -2936,19 +2904,6 @@ dependencies = [ "unicode-ident", ] -[[package]] -name = "proc-macro2-diagnostics" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.96", - "version_check", - "yansi", -] - [[package]] name = "prometheus" version = "0.13.4" @@ -2990,7 +2945,7 @@ dependencies = [ "prost", "prost-types", "regex", - "syn 2.0.96", + "syn 2.0.98", "tempfile", ] @@ -3004,14 +2959,14 @@ dependencies = [ "itertools 0.13.0", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] name = "prost-reflect" -version = "0.14.5" +version = "0.14.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e92b959d24e05a3e2da1d0beb55b48bc8a97059b8336ea617780bd6addbbfb5a" +checksum = "a7b318f733603136dcc61aa9e77c928d67f87d2436c34ec052ba3f1b5ca219de" dependencies = [ "logos", "miette", @@ -3087,8 +3042,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha", - "rand_core", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94" +dependencies = [ + "rand_chacha 0.9.0", + "rand_core 0.9.0", + "zerocopy 0.8.17", ] [[package]] @@ -3098,7 +3064,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.0", ] [[package]] @@ -3107,7 +3083,17 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom", + "getrandom 0.2.15", +] + +[[package]] +name = "rand_core" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b08f3c9802962f7e1b25113931d94f43ed9725bebc59db9d0c3e9a23b67e15ff" +dependencies = [ + "getrandom 0.3.1", + "zerocopy 0.8.17", ] [[package]] @@ -3116,7 +3102,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" dependencies = [ - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -3154,7 +3140,7 @@ version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ - "getrandom", + "getrandom 0.2.15", "libredox", "thiserror 1.0.69", ] @@ -3270,7 +3256,7 @@ checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" dependencies = [ "cc", "cfg-if", - "getrandom", + "getrandom 0.2.15", "libc", "spin", "untrusted", @@ -3325,7 +3311,7 @@ dependencies = [ "regex", "relative-path", "rustc_version 0.4.1", - "syn 2.0.96", + "syn 2.0.98", "unicode-ident", ] @@ -3378,9 +3364,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.21" +version = "0.23.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f287924602bf649d949c63dc8ac8b235fa5387d394020705b80c4eb597ce5b8" +checksum = "9fb9263ab4eb695e42321db096e3b8fbd715a59b154d5c88d82db2175b681ba7" dependencies = [ "once_cell", "rustls-pki-types", @@ -3400,9 +3386,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.10.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2bf47e6ff922db3825eb750c4e2ff784c6ff8fb9e13046ef6a1d1c5401b0b37" +checksum = "917ce264624a4b4db1c364dcc35bfca9ded014d0a958cd47ad3e960e988ea51c" [[package]] name = "rustls-webpki" @@ -3423,7 +3409,7 @@ checksum = "a44822b10c095e574869de2b891e40c724fef42cadaea040d1cd3bdbb13d36a5" dependencies = [ "backtrace", "crossbeam-channel", - "rand", + "rand 0.8.5", "trackable 0.2.24", ] @@ -3436,7 +3422,7 @@ dependencies = [ "crossbeam-channel", "hostname", "percent-encoding", - "rand", + "rand 0.8.5", "rustracing", "thrift_codec", "trackable 0.2.24", @@ -3450,9 +3436,9 @@ checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" [[package]] name = "ryu" -version = "1.0.18" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" +checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd" [[package]] name = "same-file" @@ -3548,14 +3534,14 @@ checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] name = "serde_json" -version = "1.0.137" +version = "1.0.138" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "930cfb6e6abf99298aaad7d29abbef7a9999a9a8806a40088f55f0dcec03146b" +checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949" dependencies = [ "indexmap 2.7.1", "itoa", @@ -3620,11 +3606,11 @@ dependencies = [ [[package]] name = "sfv" -version = "0.9.4" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f27daf6ed3fc7ffd5ea3ce9f684fe351c47e50f2fdbb6236e2bad0b440dbe408" +checksum = "3fa1f336066b758b7c9df34ed049c0e693a426afe2b27ff7d5b14f410ab1a132" dependencies = [ - "data-encoding", + "base64", "indexmap 2.7.1", "rust_decimal", ] @@ -3665,9 +3651,9 @@ dependencies = [ [[package]] name = "siphasher" -version = "0.3.11" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" [[package]] name = "slab" @@ -3720,12 +3706,11 @@ checksum = "9091b6114800a5f2141aee1d1b9d6ca3592ac062dc5decb3764ec5895a47b4eb" [[package]] name = "string_cache" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" +checksum = "938d512196766101d333398efde81bc1f37b00cb42c2f8350e5df639f040bbbe" dependencies = [ "new_debug_unreachable", - "once_cell", "parking_lot", "phf_shared", "precomputed-hash", @@ -3771,7 +3756,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -3803,9 +3788,9 @@ checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2" [[package]] name = "symbolic-common" -version = "12.13.3" +version = "12.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13a4dfe4bbeef59c1f32fc7524ae7c95b9e1de5e79a43ce1604e181081d71b0c" +checksum = "b6189977df1d6ec30c920647919d76f29fb8d8f25e8952e835b0fcda25e8f792" dependencies = [ "debugid", "memmap2", @@ -3815,9 +3800,9 @@ dependencies = [ [[package]] name = "symbolic-demangle" -version = "12.13.3" +version = "12.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98cf6a95abff97de4d7ff3473f33cacd38f1ddccad5c1feab435d6760300e3b6" +checksum = "d234917f7986498e7f62061438cee724bafb483fe84cfbe2486f68dce48240d7" dependencies = [ "rustc-demangle", "symbolic-common", @@ -3836,9 +3821,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.96" +version = "2.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" +checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1" dependencies = [ "proc-macro2", "quote", @@ -3862,7 +3847,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -3894,13 +3879,13 @@ checksum = "42a4d50cdb458045afc8131fd91b64904da29548bcb63c7236e0844936c13078" [[package]] name = "tempfile" -version = "3.15.0" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a8a559c81686f576e8cd0290cd2a24a2a9ad80c98b3478856500fcbd7acd704" +checksum = "38c246215d7d24f48ae091a2902398798e05d978b24315d6efbc00ede9a8bb91" dependencies = [ "cfg-if", "fastrand", - "getrandom", + "getrandom 0.3.1", "once_cell", "rustix", "windows-sys 0.59.0", @@ -3983,7 +3968,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -3994,7 +3979,7 @@ checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -4072,7 +4057,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -4134,9 +4119,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.19" +version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" +checksum = "cd87a5cdd6ffab733b2f74bc4fd7ee5fff6634124999ac278c35fc78c6120148" dependencies = [ "serde", "serde_spanned", @@ -4155,9 +4140,9 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.22" +version = "0.22.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" +checksum = "02a8b472d1a3d7c18e2d61a489aee3453fd9031c33e4f55bd533f4a7adca1bee" dependencies = [ "indexmap 2.7.1", "serde", @@ -4207,7 +4192,7 @@ dependencies = [ "prost-build", "prost-types", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -4245,9 +4230,9 @@ dependencies = [ [[package]] name = "tonic-web-wasm-client" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef5ca6e7bdd0042c440d36b6df97c1436f1d45871ce18298091f114004b1beb4" +checksum = "c191f6613df48874158b6af303313eadf25d1b7a534216b62a1f049d77cd2711" dependencies = [ "base64", "byteorder", @@ -4279,7 +4264,7 @@ dependencies = [ "indexmap 1.9.3", "pin-project", "pin-project-lite", - "rand", + "rand 0.8.5", "slab", "tokio", "tokio-util", @@ -4352,7 +4337,7 @@ checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -4482,15 +4467,6 @@ version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" -[[package]] -name = "uncased" -version = "0.9.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1b88fcfe09e89d3866a5c11019378088af2d24c3fbd4f0543f96b479ec90697" -dependencies = [ - "version_check", -] - [[package]] name = "unicase" version = "2.8.1" @@ -4499,9 +4475,9 @@ checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-ident" -version = "1.0.15" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11cd88e12b17c6494200a9c1b683a04fcac9573ed74cd1b62aeb2727c5592243" +checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034" [[package]] name = "unicode-linebreak" @@ -4564,11 +4540,11 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.12.1" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3758f5e68192bb96cc8f9b7e2c2cfdabb435499a28499a42f8f984092adad4b" +checksum = "ced87ca4be083373936a67f8de945faa23b6b42384bd5b64434850802c6dccd0" dependencies = [ - "getrandom", + "getrandom 0.3.1", ] [[package]] @@ -4623,6 +4599,15 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +[[package]] +name = "wasi" +version = "0.13.3+wasi-0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" +dependencies = [ + "wit-bindgen-rt", +] + [[package]] name = "wasm-bindgen" version = "0.2.100" @@ -4645,7 +4630,7 @@ dependencies = [ "log", "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", "wasm-bindgen-shared", ] @@ -4680,7 +4665,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -4789,7 +4774,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -4800,7 +4785,7 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -4983,9 +4968,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.6.24" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8d71a593cc5c42ad7876e2c1fda56f314f3754c084128833e64f1345ff8a03a" +checksum = "86e376c75f4f43f44db463cf729e0d3acbf954d13e22c51e26e4c264b4ab545f" dependencies = [ "memchr", ] @@ -5042,7 +5027,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91ce144fde121b98523bb8a6c15a311773e1d534d33c1cb47f5580bba9cff8e7" dependencies = [ "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] @@ -5066,7 +5051,7 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "226e4c455f6eb72f64ac6eeb7642df25e21ff2280a4f6b09db75392ad6b390ef" dependencies = [ - "rand", + "rand 0.8.5", "winter-utils", ] @@ -5092,6 +5077,26 @@ dependencies = [ "winter-utils", ] +[[package]] +name = "winterfell" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6bdcd01333bbf4a349d8d13f269281524bd6d1a36ae3a853187f0665bf1cfd4" +dependencies = [ + "winter-air", + "winter-prover", + "winter-verifier", +] + +[[package]] +name = "wit-bindgen-rt" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" +dependencies = [ + "bitflags 2.8.0", +] + [[package]] name = "write16" version = "1.0.0" @@ -5113,12 +5118,6 @@ dependencies = [ "linked-hash-map", ] -[[package]] -name = "yansi" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" - [[package]] name = "yoke" version = "0.7.5" @@ -5139,7 +5138,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", "synstructure", ] @@ -5150,7 +5149,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" dependencies = [ "byteorder", - "zerocopy-derive", + "zerocopy-derive 0.7.35", +] + +[[package]] +name = "zerocopy" +version = "0.8.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa91407dacce3a68c56de03abe2760159582b846c6a4acd2f456618087f12713" +dependencies = [ + "zerocopy-derive 0.8.17", ] [[package]] @@ -5161,7 +5169,18 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06718a168365cad3d5ff0bb133aad346959a2074bd4a85c121255a11304a8626" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.98", ] [[package]] @@ -5181,7 +5200,7 @@ checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", "synstructure", ] @@ -5210,7 +5229,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.96", + "syn 2.0.98", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 834db8127..1328ea6d2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,6 +3,7 @@ resolver = "2" members = [ "bin/bench-tx", "bin/proving-service", + "crates/miden-tx-batch-prover", "crates/miden-lib", "crates/miden-objects", "crates/miden-proving-service-client", @@ -11,7 +12,7 @@ members = [ [workspace.package] edition = "2021" -rust-version = "1.82" +rust-version = "1.84" license = "MIT" authors = ["Miden contributors"] homepage = "https://polygon.technology/polygon-miden" @@ -36,11 +37,11 @@ lto = true assembly = { package = "miden-assembly", version = "0.12", default-features = false } assert_matches = { version = "1.5", default-features = false } miden-crypto = { version = "0.13", default-features = false } -miden-lib = { path = "crates/miden-lib", version = "0.7", default-features = false } -miden-objects = { path = "crates/miden-objects", version = "0.7", default-features = false } +miden-lib = { path = "crates/miden-lib", version = "0.8", default-features = false } +miden-objects = { path = "crates/miden-objects", version = "0.8", default-features = false } miden-prover = { version = "0.12", default-features = false } miden-stdlib = { version = "0.12", default-features = false } -miden-tx = { path = "crates/miden-tx", version = "0.7", default-features = false } +miden-tx = { path = "crates/miden-tx", version = "0.8", default-features = false } miden-verifier = { version = "0.12", default-features = false } rand = { version = "0.8", default-features = false } thiserror = { version = "2.0", default-features = false } diff --git a/README.md b/README.md index 2893c5a78..27b1a5575 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ [![LICENSE](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/0xPolygonMiden/miden-base/blob/main/LICENSE) [![test](https://github.com/0xPolygonMiden/miden-base/actions/workflows/test.yml/badge.svg)](https://github.com/0xPolygonMiden/miden-base/actions/workflows/test.yml) [![build](https://github.com/0xPolygonMiden/miden-base/actions/workflows/build.yml/badge.svg)](https://github.com/0xPolygonMiden/miden-base/actions/workflows/build.yml) -[![RUST_VERSION](https://img.shields.io/badge/rustc-1.82+-lightgray.svg)](https://www.rust-lang.org/tools/install) +[![RUST_VERSION](https://img.shields.io/badge/rustc-1.84+-lightgray.svg)](https://www.rust-lang.org/tools/install) [![GitHub Release](https://img.shields.io/github/release/0xPolygonMiden/miden-base)](https://github.com/0xPolygonMiden/miden-base/releases/) Description and core structures for the Miden Rollup protocol. @@ -23,7 +23,7 @@ If you want to join the technical discussion or learn more about the project, pl ## Status and features -Polygon Miden is currently on release v0.7. This is an early version of the protocol and its components. We expect to keep making changes (including breaking changes) to all components. +Polygon Miden is currently on release v0.8. This is an early version of the protocol and its components. We expect to keep making changes (including breaking changes) to all components. ### Feature highlights diff --git a/bin/proving-service/.env b/bin/proving-service/.env new file mode 100644 index 000000000..10d674063 --- /dev/null +++ b/bin/proving-service/.env @@ -0,0 +1,13 @@ +MPS_HOST="0.0.0.0" +MPS_PORT="8082" +MPS_WORKERS_UPDATE_PORT="8083" +MPS_TIMEOUT_SECS="100" +MPS_CONNECTION_TIMEOUT_SECS="10" +MPS_MAX_QUEUE_ITEMS="10" +MPS_MAX_RETRIES_PER_REQUEST="1" +MPS_MAX_REQ_PER_SEC="5" +MPS_AVAILABLE_WORKERS_POLLING_INTERVAL_MS="20" +MPS_HEALTH_CHECK_INTERVAL_SECS="1" +MPS_PROMETHEUS_HOST="127.0.0.1" +MPS_PROMETHEUS_PORT="6192" +RUST_LOG="info" diff --git a/bin/proving-service/Cargo.toml b/bin/proving-service/Cargo.toml index 1f678626d..96f6a7af0 100644 --- a/bin/proving-service/Cargo.toml +++ b/bin/proving-service/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-proving-service" -version = "0.7.0" +version = "0.8.0" description = "Miden rollup proving service" readme = "README.md" keywords = ["miden", "proving", "service"] @@ -21,10 +21,9 @@ concurrent = ["miden-tx/concurrent"] [dependencies] async-trait = "0.1" -axum = {version = "0.7" } +axum = { version = "0.7" } bytes = "1.0" -clap = { version = "4.5", features = ["derive"] } -figment = { version = "0.10", features = ["toml", "env"] } +clap = { version = "4.5", features = ["derive", "env"] } miden-lib = { workspace = true, default-features = false } miden-objects = { workspace = true, default-features = false, features = ["std"] } miden-tx = { workspace = true, default-features = false, features = ["std"] } @@ -43,9 +42,8 @@ serde = { version = "1.0", features = ["derive"] } serde_qs = { version = "0.13" } tokio = { version = "1.38", features = ["full"] } tokio-stream = { version = "0.1", features = [ "net" ]} -toml = { version = "0.8" } thiserror = { workspace = true } -tonic = { version = "0.12", default-features = false, features = ["prost", "codegen", "transport"] } +tonic = { version = "0.12", default-features = false, features = ["codegen", "prost", "transport"] } tonic-health = { version = "0.12" } tonic-web = { version = "0.12" } tracing = { version = "0.1" } diff --git a/bin/proving-service/README.md b/bin/proving-service/README.md index eeba9021d..73c342935 100644 --- a/bin/proving-service/README.md +++ b/bin/proving-service/README.md @@ -28,64 +28,49 @@ This will spawn a worker using the hosts and ports defined in the command option ## Proxy -First, you need to create a configuration file for the proxy with: +To start the proxy service, you will need to run: ```bash -miden-proving-service init -``` - -This will create the `miden-proving-service.toml` file in your current directory. This file will hold the configuration for the proxy. You can modify the configuration by changing the host and ports of the services, the maximum size of the queue, among other options. An example configuration is: - -```toml -# Host of the proxy server -host = "0.0.0.0" -# Port of the proxy server -port = 8082 -# Timeout for a new request to be completed -timeout_secs = 100 -# Timeout for establishing a connection to the worker -connection_timeout_secs = 10 -# Maximum amount of items that a queue can handle -max_queue_items = 10 -# Maximum amount of retries that a request can take -max_retries_per_request = 1 -# Maximum amount of requests that a given IP address can make per second -max_req_per_sec = 5 -# Time to wait before checking the availability of workers -available_workers_polling_time_ms = 20 -# Interval to check the health of the workers -health_check_interval_secs = 1 -# Host of the metrics server -prometheus_host = "127.0.0.1" -# Port of the metrics server -prometheus_port = 6192 +miden-proving-service start-proxy [worker1] [worker2] ... [workerN] ``` -Then, to start the proxy service, you will need to run: +For example: ```bash -miden-proving-service start-proxy [worker1] [worker2] ... [workerN] +miden-proving-service start-proxy 0.0.0.0:8084 0.0.0.0:8085 ``` This command will start the proxy using the workers passed as arguments. The workers should be in the format `host:port`. If no workers are passed, the proxy will start without any workers and will not be able to handle any requests until one is added through the `miden-proving-service add-worker` command. -At the moment, when a worker added to the proxy stops working and can not connect to it for a request, the connection is marked as retriable meaning that the proxy will try reaching another worker. The number of retries is configurable via the `max_retries_per_request` value in the configuration file. +You can customize the proxy service by setting environment variables. Possible customizations can be found by running `miden-proving-service start-proxy --help`. + +An example `.env` file is provided in the crate's root directory. To use the variables from a file in any Unix-like operating system, you can run `source `. + +At the moment, when a worker added to the proxy stops working and can not connect to it for a request, the connection is marked as retriable meaning that the proxy will try reaching another worker. The number of retries is configurable via the `MPS_MAX_RETRIES_PER_REQUEST` environmental variable. ## Updating workers on a running proxy To update the workers on a running proxy, two commands are provided: `add-worker` and `remove-worker`. These commands will update the workers on the proxy and will not require a restart. To use these commands, you will need to run: ```bash -miden-proving-service add-worker [worker1] [worker2] ... [workerN] -miden-proving-service remove-worker [worker1] [worker2] ... [workerN] +miden-proving-service add-worker --proxy-host --proxy-update-workers-port [worker1] [worker2] ... [workerN] +miden-proving-service remove-worker --proxy-host --proxy-update-workers-port [worker1] [worker2] ... [workerN] ``` For example: ```bash # To add 0.0.0.0:8085 and 200.58.70.4:50051 to the workers list: -miden-proving-service add-workers 0.0.0.0:8085 200.58.70.4:50051 +miden-proving-service add-workers --proxy-host 0.0.0.0 --proxy-update-workers-port 8083 0.0.0.0:8085 200.58.70.4:50051 # To remove 158.12.12.3:8080 and 122.122.6.6:50051 from the workers list: -miden-proving-service remove-workers 158.12.12.3:8080 122.122.6.6:50051 +miden-proving-service remove-workers --proxy-host 0.0.0.0 --proxy-update-workers-port 8083 158.12.12.3:8080 122.122.6.6:50051 +``` + +The `--proxy-host` and `--proxy-update-workers-port` flags are required to specify the proxy's host and the port where the proxy is listening for updates. The workers are passed as arguments in the format `host:port`. Both flags can be used from environment variables, `MPS_PROXY_HOST` and `MPS_PROXY_UPDATE_WORKERS_PORT` respectively. For example: + +```bash +export MPS_PROXY_HOST="0.0.0.0" +export MPS_PROXY_UPDATE_WORKERS_PORT="8083" +miden-proving-service add-workers 0.0.0.0:8085 ``` Note that, in order to update the workers, the proxy must be running in the same computer as the command is being executed because it will check if the client address is localhost to avoid any security issues. @@ -120,7 +105,7 @@ If Docker is not an option, Jaeger can also be set up directly on your machine o ## Metrics -The proxy includes a service that exposes metrics to be consumed by [Prometheus](https://prometheus.io/docs/introduction/overview/). This service is always enabled and uses the host and port defined in the `miden-proving-service.toml` file. +The proxy includes a service that exposes metrics to be consumed by [Prometheus](https://prometheus.io/docs/introduction/overview/). This service is always enabled and uses the host and port defined in the `.env` file through the `MPS_PROMETHEUS_HOST` and `MPS_PROMETHEUS_PORT` variables. The metrics architecture works by having the proxy expose metrics at an endpoint (`/metrics`) in a format Prometheus can read. Prometheus periodically scrapes this endpoint, adds timestamps to the metrics, and stores them in its time-series database. Then, we can use tools like Grafana to query Prometheus and visualize these metrics in configurable dashboards. diff --git a/bin/proving-service/build.rs b/bin/proving-service/build.rs index 264d3dd2d..536ac1a1f 100644 --- a/bin/proving-service/build.rs +++ b/bin/proving-service/build.rs @@ -33,10 +33,11 @@ fn main() -> miette::Result<()> { // HELPER FUNCTIONS // ================================================================================================ -/// Copies all api.proto file from the root proto directory to the proto directory of this crate. +/// Copies the tx_prover.proto file from the root proto directory to the proto directory of this +/// crate. fn copy_proto_files() -> miette::Result<()> { - let src_file = format!("{REPO_PROTO_DIR}/api.proto"); - let dest_file = format!("{CRATE_PROTO_DIR}/api.proto"); + let src_file = format!("{REPO_PROTO_DIR}/tx_prover.proto"); + let dest_file = format!("{CRATE_PROTO_DIR}/tx_prover.proto"); fs::remove_dir_all(CRATE_PROTO_DIR).into_diagnostic()?; fs::create_dir_all(CRATE_PROTO_DIR).into_diagnostic()?; @@ -50,14 +51,14 @@ fn compile_tonic_server_proto() -> miette::Result<()> { PathBuf::from(env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR should be set")); let dst_dir = crate_root.join("src").join("generated"); - // Remove `api.rs` if it exists. - fs::remove_file(dst_dir.join("api.rs")).into_diagnostic().ok(); + // Remove `tx_prover.rs` if it exists. + fs::remove_file(dst_dir.join("tx_prover.rs")).into_diagnostic().ok(); let out_dir = env::var("OUT_DIR").into_diagnostic()?; let file_descriptor_path = PathBuf::from(out_dir).join("file_descriptor_set.bin"); let proto_dir: PathBuf = CRATE_PROTO_DIR.into(); - let protos = &[proto_dir.join("api.proto")]; + let protos = &[proto_dir.join("tx_prover.proto")]; let includes = &[proto_dir]; let file_descriptors = protox::compile(protos, includes)?; diff --git a/bin/proving-service/proto/api.proto b/bin/proving-service/proto/tx_prover.proto similarity index 94% rename from bin/proving-service/proto/api.proto rename to bin/proving-service/proto/tx_prover.proto index 4555b326f..de39c685c 100644 --- a/bin/proving-service/proto/api.proto +++ b/bin/proving-service/proto/tx_prover.proto @@ -1,6 +1,6 @@ // Specification of the user facing gRPC API. syntax = "proto3"; -package api; +package tx_prover; service Api { rpc ProveTransaction(ProveTransactionRequest) returns (ProveTransactionResponse) {} diff --git a/bin/proving-service/src/commands/init.rs b/bin/proving-service/src/commands/init.rs deleted file mode 100644 index 81e2bda56..000000000 --- a/bin/proving-service/src/commands/init.rs +++ /dev/null @@ -1,48 +0,0 @@ -use std::{fs::File, io::Write}; - -use clap::Parser; - -use crate::{commands::ProxyConfig, utils::PROVING_SERVICE_CONFIG_FILE_NAME}; - -/// Creates a config file for the proxy. -#[derive(Debug, Parser)] -pub struct Init; - -impl Init { - /// Creates a config file for the proxy. - /// - /// This method will create a new config file names - /// [PROVING_SERVICE_CONFIG_FILE_NAME] in the current working directory with - /// default values. - pub fn execute(&self) -> Result<(), String> { - let mut current_dir = std::env::current_dir().map_err(|err| err.to_string())?; - current_dir.push(PROVING_SERVICE_CONFIG_FILE_NAME); - - if current_dir.exists() { - return Err(format!( - "The file \"{}\" already exists in the working directory.", - PROVING_SERVICE_CONFIG_FILE_NAME - ) - .to_string()); - } - - let cli_config = ProxyConfig::default(); - - let config_as_toml_string = toml::to_string_pretty(&cli_config) - .map_err(|err| format!("Error formatting config: {err}"))?; - - let mut file_handle = File::options() - .write(true) - .create_new(true) - .open(¤t_dir) - .map_err(|err| format!("Error opening the file: {err}"))?; - - file_handle - .write(config_as_toml_string.as_bytes()) - .map_err(|err| format!("Error writing to file: {err}"))?; - - println!("Config file successfully created at: {:?}", current_dir); - - Ok(()) - } -} diff --git a/bin/proving-service/src/commands/mod.rs b/bin/proving-service/src/commands/mod.rs index 42d6c11ba..52fe80b12 100644 --- a/bin/proving-service/src/commands/mod.rs +++ b/bin/proving-service/src/commands/mod.rs @@ -1,84 +1,62 @@ use clap::Parser; -use figment::{ - providers::{Format, Toml}, - Figment, -}; -use init::Init; use proxy::StartProxy; -use serde::{Deserialize, Serialize}; use tracing::instrument; use update_workers::{AddWorkers, RemoveWorkers, UpdateWorkers}; use worker::StartWorker; -use crate::utils::{MIDEN_PROVING_SERVICE, PROVING_SERVICE_CONFIG_FILE_NAME}; +use crate::utils::MIDEN_PROVING_SERVICE; -pub mod init; pub mod proxy; pub mod update_workers; pub mod worker; -/// Configuration of the proxy. -/// -/// It is stored in a TOML file, which will be created by the `init` command. -/// It allows manual modification of the configuration file. -#[derive(Debug, Serialize, Deserialize)] -pub struct ProxyConfig { - /// Host of the proxy. - pub host: String, - /// Port of the proxy. - pub port: u16, - /// Maximum time in seconds to complete the entire request. - pub timeout_secs: u64, +#[derive(Debug, Parser)] +pub(crate) struct ProxyConfig { + /// Interval in milliseconds at which the system polls for available workers to assign new + /// tasks. + #[clap(long, default_value = "20", env = "MPS_AVAILABLE_WORKERS_POLLING_INTERVAL_MS")] + pub(crate) available_workers_polling_interval_ms: u64, /// Maximum time in seconds to establish a connection. - pub connection_timeout_secs: u64, + #[clap(long, default_value = "10", env = "MPS_CONNECTION_TIMEOUT_SECS")] + pub(crate) connection_timeout_secs: u64, + /// Health check interval in seconds. + #[clap(long, default_value = "10", env = "MPS_HEALTH_CHECK_INTERVAL_SECS")] + pub(crate) health_check_interval_secs: u64, + /// Host of the proxy. + #[clap(long, default_value = "0.0.0.0", env = "MPS_HOST")] + pub(crate) host: String, /// Maximum number of items in the queue. - pub max_queue_items: usize, - /// Maximum number of retries per request. - pub max_retries_per_request: usize, + #[clap(long, default_value = "10", env = "MPS_MAX_QUEUE_ITEMS")] + pub(crate) max_queue_items: usize, /// Maximum number of requests per second per IP address. - pub max_req_per_sec: isize, - /// Time in milliseconds to poll available workers. - pub available_workers_polling_time_ms: u64, - /// Health check interval in seconds. - pub health_check_interval_secs: u64, - /// Prometheus metrics host. - pub prometheus_host: String, - /// Prometheus metrics port. - pub prometheus_port: u16, -} - -impl Default for ProxyConfig { - fn default() -> Self { - Self { - host: "0.0.0.0".into(), - port: 8082, - timeout_secs: 100, - connection_timeout_secs: 10, - max_queue_items: 10, - max_retries_per_request: 1, - max_req_per_sec: 5, - available_workers_polling_time_ms: 20, - health_check_interval_secs: 1, - prometheus_host: "127.0.0.1".into(), - prometheus_port: 6192, - } - } + #[clap(long, default_value = "5", env = "MPS_MAX_REQ_PER_SEC")] + pub(crate) max_req_per_sec: isize, + /// Maximum number of retries per request. + #[clap(long, default_value = "1", env = "MPS_MAX_RETRIES_PER_REQUEST")] + pub(crate) max_retries_per_request: usize, + /// Metrics configurations. + #[clap(flatten)] + pub(crate) metrics_config: MetricsConfig, + /// Port of the proxy. + #[clap(long, default_value = "8082", env = "MPS_PORT")] + pub(crate) port: u16, + /// Maximum time in seconds allowed for a request to complete. Once exceeded, the request is + /// aborted. + #[clap(long, default_value = "100", env = "MPS_TIMEOUT_SECS")] + pub(crate) timeout_secs: u64, + /// Worker update service port. + #[clap(long, default_value = "8083", env = "MPS_MPS_WORKERS_UPDATE_PORT")] + pub(crate) workers_update_port: u16, } -impl ProxyConfig { - /// Loads config file from current directory and default filename and returns it - /// - /// This function will look for the configuration file with the name defined at the - /// [PROVING_SERVICE_CONFIG_FILE_NAME] constant in the current directory. - pub(crate) fn load_config_from_file() -> Result { - let mut current_dir = std::env::current_dir().map_err(|err| err.to_string())?; - current_dir.push(PROVING_SERVICE_CONFIG_FILE_NAME); - let config_path = current_dir.as_path(); - - Figment::from(Toml::file(config_path)) - .extract() - .map_err(|err| format!("Failed to load {} config file: {err}", config_path.display())) - } +#[derive(Debug, Parser)] +pub(crate) struct MetricsConfig { + /// Prometheus metrics host. + #[clap(long, default_value = "0.0.0.0", env = "MPS_PROMETHEUS_HOST")] + pub(crate) prometheus_host: String, + /// Prometheus metrics port. + #[clap(long, default_value = "9090", env = "MPS_PROMETHEUS_PORT")] + pub(crate) prometheus_port: u16, } /// Root CLI struct @@ -97,23 +75,17 @@ pub struct Cli { /// CLI actions #[derive(Debug, Parser)] pub enum Command { - /// Creates a config file for the proxy. - /// - /// This method will create a new config file in the current working directory with default - /// values. The file will be named as defined in the - /// [PROVING_SERVICE_CONFIG_FILE_NAME] constant. - Init(Init), /// Starts the workers with the configuration defined in the command. StartWorker(StartWorker), - /// Starts the proxy defined in the config file. + /// Starts the proxy. StartProxy(StartProxy), /// Adds workers to the proxy. /// - /// This method will make a request to the proxy defined in the config file to add workers. + /// This command will make a request to the proxy to add the specified workers. AddWorkers(AddWorkers), /// Removes workers from the proxy. /// - /// This method will make a request to the proxy defined in the config file to remove workers. + /// This command will make a request to the proxy to remove the specified workers. RemoveWorkers(RemoveWorkers), } @@ -125,10 +97,6 @@ impl Cli { // For the `StartWorker` command, we need to create a new runtime and run the worker Command::StartWorker(worker_init) => worker_init.execute().await, Command::StartProxy(proxy_init) => proxy_init.execute().await, - Command::Init(init) => { - // Init does not require async, so run directly - init.execute() - }, Command::AddWorkers(update_workers) => { let update_workers: UpdateWorkers = update_workers.clone().into(); update_workers.execute().await diff --git a/bin/proving-service/src/commands/proxy.rs b/bin/proving-service/src/commands/proxy.rs index 60d246148..40e62bba7 100644 --- a/bin/proving-service/src/commands/proxy.rs +++ b/bin/proving-service/src/commands/proxy.rs @@ -4,17 +4,19 @@ use pingora::{ lb::Backend, prelude::{background_service, Opt}, server::Server, + services::listening::Service, }; use pingora_proxy::http_proxy_service; use tracing::warn; +use super::ProxyConfig; use crate::{ - error::TxProverServiceError, - proxy::{LoadBalancer, LoadBalancerState}, + error::ProvingServiceError, + proxy::{update_workers::LoadBalancerUpdateService, LoadBalancer, LoadBalancerState}, utils::MIDEN_PROVING_SERVICE, }; -/// Starts the proxy defined in the config file. +/// Starts the proxy. /// /// Example: `miden-proving-service start-proxy 0.0.0.0:8080 127.0.0.1:9090` #[derive(Debug, Parser)] @@ -24,17 +26,20 @@ pub struct StartProxy { /// Example: `127.0.0.1:8080 192.168.1.1:9090` #[clap(value_name = "WORKERS")] workers: Vec, + /// Proxy configurations. + #[clap(flatten)] + proxy_config: ProxyConfig, } impl StartProxy { - /// Starts the proxy defined in the config file. + /// Starts the proxy using the configuration defined in the command. /// - /// This method will first read the config file to get the parameters for the proxy. It will - /// then start a proxy with each worker passed as command argument as a backend. + /// This method will start a proxy with each worker passed as command argument as a backend, + /// using the configurations passed as options for the commands or the equivalent environmental + /// variables. /// /// # Errors /// Returns an error in the following cases: - /// - The config file cannot be read. /// - The backend cannot be created. /// - The Pingora configuration fails. /// - The server cannot be started. @@ -43,32 +48,40 @@ impl StartProxy { let mut server = Server::new(Some(Opt::default())).map_err(|err| err.to_string())?; server.bootstrap(); - let proxy_config = super::ProxyConfig::load_config_from_file()?; + println!("Starting proxy with workers: {:?}", self.workers); let workers = self .workers .iter() - .map(|worker| Backend::new(worker).map_err(TxProverServiceError::BackendCreationFailed)) - .collect::, TxProverServiceError>>()?; + .map(|worker| Backend::new(worker).map_err(ProvingServiceError::BackendCreationFailed)) + .collect::, ProvingServiceError>>()?; if workers.is_empty() { warn!("Starting the proxy without any workers"); } - let worker_lb = LoadBalancerState::new(workers, &proxy_config).await?; + let worker_lb = LoadBalancerState::new(workers, &self.proxy_config).await?; let health_check_service = background_service("health_check", worker_lb); + let worker_lb = health_check_service.task(); + let updater_service = LoadBalancerUpdateService::new(worker_lb.clone()); + + let mut update_workers_service = + Service::new("update_workers".to_string(), updater_service); + update_workers_service.add_tcp( + format!("{}:{}", self.proxy_config.host.clone(), self.proxy_config.workers_update_port) + .as_str(), + ); + // Set up the load balancer let mut lb = http_proxy_service(&server.configuration, LoadBalancer(worker_lb)); - let proxy_host = proxy_config.host; - let proxy_port = proxy_config.port.to_string(); - lb.add_tcp(format!("{}:{}", proxy_host, proxy_port).as_str()); + lb.add_tcp(format!("{}:{}", &self.proxy_config.host, self.proxy_config.port).as_str()); let logic = lb .app_logic_mut() - .ok_or(TxProverServiceError::PingoraConfigFailed("app logic not found".to_string()))?; + .ok_or(ProvingServiceError::PingoraConfigFailed("app logic not found".to_string()))?; let mut http_server_options = HttpServerOptions::default(); // Enable HTTP/2 for plaintext @@ -79,11 +92,17 @@ impl StartProxy { let mut prometheus_service_http = pingora::services::listening::Service::prometheus_http_service(); prometheus_service_http.add_tcp( - format!("{}:{}", proxy_config.prometheus_host, proxy_config.prometheus_port).as_str(), + format!( + "{}:{}", + self.proxy_config.metrics_config.prometheus_host, + self.proxy_config.metrics_config.prometheus_port + ) + .as_str(), ); server.add_service(prometheus_service_http); server.add_service(health_check_service); + server.add_service(update_workers_service); server.add_service(lb); tokio::task::spawn_blocking(|| server.run_forever()) .await diff --git a/bin/proving-service/src/commands/update_workers.rs b/bin/proving-service/src/commands/update_workers.rs index 6bde5a532..dfe5c3dcf 100644 --- a/bin/proving-service/src/commands/update_workers.rs +++ b/bin/proving-service/src/commands/update_workers.rs @@ -2,15 +2,23 @@ use clap::Parser; use reqwest::Client; use serde::{Deserialize, Serialize}; -use crate::commands::ProxyConfig; - // ADD WORKERS // ================================================================================================ /// Add workers to the proxy #[derive(Debug, Parser, Clone, Serialize, Deserialize)] pub struct AddWorkers { + /// Workers to be added to the proxy. + /// + /// The workers are passed as host:port strings. + #[clap(value_name = "WORKERS")] workers: Vec, + /// Host of the proxy. + #[clap(long, default_value = "0.0.0.0", env = "MPS_HOST")] + proxy_host: String, + /// Port of the proxy endpoint to update workers. + #[clap(long, default_value = "8083", env = "MPS_WORKERS_UPDATE_PORT")] + proxy_update_workers_port: u64, } // REMOVE WORKERS @@ -19,7 +27,16 @@ pub struct AddWorkers { /// Remove workers from the proxy #[derive(Debug, Parser, Clone, Serialize, Deserialize)] pub struct RemoveWorkers { + /// Workers to be removed from the proxy. + /// + /// The workers are passed as host:port strings. workers: Vec, + /// Host of the proxy. + #[clap(long, default_value = "0.0.0.0", env = "MPS_HOST")] + proxy_host: String, + /// Port of the proxy endpoint to update workers. + #[clap(long, default_value = "8083", env = "MPS_WORKERS_UPDATE_PORT")] + proxy_update_workers_port: u64, } // UPDATE WORKERS @@ -37,22 +54,21 @@ pub enum Action { pub struct UpdateWorkers { pub action: Action, pub workers: Vec, + pub proxy_host: String, + pub proxy_update_workers_port: u64, } impl UpdateWorkers { - /// Makes a requests to the proxy to update the workers. + /// Makes a requests to the update workers endpoint to update the workers. /// /// It works by sending a GET request to the proxy with the query parameters. The query /// parameters are serialized from the struct fields. /// - /// This method will work only if the proxy is running and the user is in the same computer as - /// the proxy, since the proxy checks for the source IP address and checks that the sender is - /// localhost. + /// It uses the host and port defined in the env vars or passed as parameter for the proxy. /// /// The request will return the new number of workers in the X-Worker-Count header. /// /// # Errors - /// - If a tokio runtime cannot be created. /// - If the query parameters cannot be serialized. /// - If the request fails. /// - If the status code is not successful. @@ -64,11 +80,11 @@ impl UpdateWorkers { println!("Action: {:?}, with workers: {:?}", self.action, self.workers); - // Get the proxy url from the configuration file. - let proxy_config = ProxyConfig::load_config_from_file()?; - // Create the full URL - let url = format!("http://{}:{}?{}", proxy_config.host, proxy_config.port, query_params); + let url = format!( + "http://{}:{}?{}", + self.proxy_host, self.proxy_update_workers_port, query_params + ); // Create an HTTP/2 client let client = Client::builder() @@ -106,6 +122,8 @@ impl From for UpdateWorkers { UpdateWorkers { action: Action::Remove, workers: remove_workers.workers, + proxy_host: remove_workers.proxy_host, + proxy_update_workers_port: remove_workers.proxy_update_workers_port, } } } @@ -115,6 +133,8 @@ impl From for UpdateWorkers { UpdateWorkers { action: Action::Add, workers: add_workers.workers, + proxy_host: add_workers.proxy_host, + proxy_update_workers_port: add_workers.proxy_update_workers_port, } } } diff --git a/bin/proving-service/src/error.rs b/bin/proving-service/src/error.rs index d69136213..019bb3ef6 100644 --- a/bin/proving-service/src/error.rs +++ b/bin/proving-service/src/error.rs @@ -5,7 +5,7 @@ use thiserror::Error; // ================================================================================================ #[derive(Debug, Error)] -pub enum TxProverServiceError { +pub enum ProvingServiceError { #[error("invalid uri {1}")] InvalidURI(#[source] InvalidUri, String), #[error("failed to connect to worker {1}")] @@ -14,10 +14,12 @@ pub enum TxProverServiceError { BackendCreationFailed(#[source] Box), #[error("failed to setup pingora: {0}")] PingoraConfigFailed(String), + #[error("failed to parse int: {0}")] + ParseError(#[from] std::num::ParseIntError), } -impl From for String { - fn from(err: TxProverServiceError) -> Self { +impl From for String { + fn from(err: ProvingServiceError) -> Self { err.to_string() } } diff --git a/bin/proving-service/src/generated/mod.rs b/bin/proving-service/src/generated/mod.rs index 78397c954..86e50a776 100644 --- a/bin/proving-service/src/generated/mod.rs +++ b/bin/proving-service/src/generated/mod.rs @@ -2,9 +2,9 @@ use miden_objects::transaction::ProvenTransaction; use miden_tx::utils::{Deserializable, DeserializationError, Serializable}; #[rustfmt::skip] -pub mod api; +pub mod tx_prover; -pub use api::*; +pub use tx_prover::*; // CONVERSIONS // ================================================================================================ diff --git a/bin/proving-service/src/generated/api.rs b/bin/proving-service/src/generated/tx_prover.rs similarity index 97% rename from bin/proving-service/src/generated/api.rs rename to bin/proving-service/src/generated/tx_prover.rs index fa143409e..73c46a4f5 100644 --- a/bin/proving-service/src/generated/api.rs +++ b/bin/proving-service/src/generated/tx_prover.rs @@ -116,9 +116,12 @@ pub mod api_client { ) })?; let codec = tonic::codec::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/api.Api/ProveTransaction"); + let path = http::uri::PathAndQuery::from_static( + "/tx_prover.Api/ProveTransaction", + ); let mut req = request.into_request(); - req.extensions_mut().insert(GrpcMethod::new("api.Api", "ProveTransaction")); + req.extensions_mut() + .insert(GrpcMethod::new("tx_prover.Api", "ProveTransaction")); self.inner.unary(req, path, codec).await } } @@ -220,7 +223,7 @@ pub mod api_server { } fn call(&mut self, req: http::Request) -> Self::Future { match req.uri().path() { - "/api.Api/ProveTransaction" => { + "/tx_prover.Api/ProveTransaction" => { #[allow(non_camel_case_types)] struct ProveTransactionSvc(pub Arc); impl< @@ -298,7 +301,7 @@ pub mod api_server { } } /// Generated gRPC service name - pub const SERVICE_NAME: &str = "api.Api"; + pub const SERVICE_NAME: &str = "tx_prover.Api"; impl tonic::server::NamedService for ApiServer { const NAME: &'static str = SERVICE_NAME; } diff --git a/bin/proving-service/src/proxy/health_check.rs b/bin/proving-service/src/proxy/health_check.rs new file mode 100644 index 000000000..b4a916040 --- /dev/null +++ b/bin/proving-service/src/proxy/health_check.rs @@ -0,0 +1,86 @@ +use std::time::Duration; + +use axum::async_trait; +use pingora::{prelude::sleep, server::ShutdownWatch, services::background::BackgroundService}; +use tonic::transport::Channel; +use tonic_health::pb::health_client::HealthClient; +use tracing::debug_span; + +use super::{ + metrics::{WORKER_COUNT, WORKER_UNHEALTHY}, + LoadBalancerState, +}; +use crate::error::ProvingServiceError; + +/// Implement the BackgroundService trait for the LoadBalancer +/// +/// A [BackgroundService] can be run as part of a Pingora application to add supporting logic that +/// exists outside of the request/response lifecycle. +/// +/// We use this implementation to periodically check the health of the workers and update the list +/// of available workers. +#[async_trait] +impl BackgroundService for LoadBalancerState { + /// Starts the health check background service. + /// + /// This function is called when the Pingora server tries to start all the services. The + /// background service can return at anytime or wait for the `shutdown` signal. + /// + /// The health check background service will periodically check the health of the workers + /// using the gRPC health check protocol. If a worker is not healthy, it will be removed from + /// the list of available workers. + /// + /// # Errors + /// - If the worker has an invalid URI. + async fn start(&self, _shutdown: ShutdownWatch) { + Box::pin(async move { + loop { + // Create a new spawn to perform the health check + let span = debug_span!("proxy:health_check"); + let _guard = span.enter(); + + let mut workers = self.workers.write().await; + let initial_workers_len = workers.len(); + + // Perform health checks on workers and retain healthy ones + let healthy_workers = self.check_workers_health(workers.iter_mut()).await; + + // Update the worker list with healthy workers + *workers = healthy_workers; + + // Update the worker count and worker unhealhy count metrics + WORKER_COUNT.set(workers.len() as i64); + let unhealthy_workers = initial_workers_len - workers.len(); + WORKER_UNHEALTHY.inc_by(unhealthy_workers as u64); + + // Sleep for the defined interval before the next health check + sleep(self.health_check_interval).await; + } + }) + .await; + } +} + +// HELPERS +// ================================================================================================ + +/// Create a gRPC [HealthClient] for the given worker address. +/// +/// # Errors +/// - [ProvingServiceError::InvalidURI] if the worker address is invalid. +/// - [ProvingServiceError::ConnectionFailed] if the connection to the worker fails. +pub async fn create_health_check_client( + address: String, + connection_timeout: Duration, + total_timeout: Duration, +) -> Result, ProvingServiceError> { + let channel = Channel::from_shared(format!("http://{}", address)) + .map_err(|err| ProvingServiceError::InvalidURI(err, address.clone()))? + .connect_timeout(connection_timeout) + .timeout(total_timeout) + .connect() + .await + .map_err(|err| ProvingServiceError::ConnectionFailed(err, address))?; + + Ok(HealthClient::new(channel)) +} diff --git a/bin/proving-service/src/proxy/mod.rs b/bin/proving-service/src/proxy/mod.rs index 7e770322d..ccd72c145 100644 --- a/bin/proving-service/src/proxy/mod.rs +++ b/bin/proving-service/src/proxy/mod.rs @@ -1,7 +1,5 @@ use std::{ collections::VecDeque, - future::Future, - pin::Pin, sync::{Arc, LazyLock}, time::{Duration, Instant}, }; @@ -11,22 +9,20 @@ use bytes::Bytes; use metrics::{ QUEUE_LATENCY, QUEUE_SIZE, RATE_LIMITED_REQUESTS, RATE_LIMIT_VIOLATIONS, REQUEST_COUNT, REQUEST_FAILURE_COUNT, REQUEST_LATENCY, REQUEST_RETRIES, WORKER_BUSY, WORKER_COUNT, - WORKER_REQUEST_COUNT, WORKER_UNHEALTHY, + WORKER_REQUEST_COUNT, }; use pingora::{ http::ResponseHeader, lb::Backend, prelude::*, protocols::Digest, - server::ShutdownWatch, - services::background::BackgroundService, upstreams::peer::{Peer, ALPN}, }; use pingora_core::{upstreams::peer::HttpPeer, Result}; use pingora_limits::rate::Rate; use pingora_proxy::{ProxyHttp, Session}; -use tokio::{sync::RwLock, time::sleep}; -use tracing::{debug_span, error, info, info_span, warn, Span}; +use tokio::sync::RwLock; +use tracing::{error, info, info_span, warn, Span}; use uuid::Uuid; use worker::Worker; @@ -35,19 +31,18 @@ use crate::{ update_workers::{Action, UpdateWorkers}, ProxyConfig, }, - error::TxProverServiceError, + error::ProvingServiceError, utils::{ create_queue_full_response, create_response_with_error_message, - create_too_many_requests_response, create_workers_updated_response, MIDEN_PROVING_SERVICE, + create_too_many_requests_response, MIDEN_PROVING_SERVICE, }, }; +mod health_check; pub mod metrics; +pub(crate) mod update_workers; mod worker; -/// Localhost address -const LOCALHOST_ADDR: &str = "127.0.0.1"; - // LOAD BALANCER STATE // ================================================================================================ @@ -60,8 +55,8 @@ pub struct LoadBalancerState { max_queue_items: usize, max_retries_per_request: usize, max_req_per_sec: isize, - available_workers_polling_time: Duration, - health_check_frequency: Duration, + available_workers_polling_interval: Duration, + health_check_interval: Duration, } impl LoadBalancerState { @@ -74,7 +69,7 @@ impl LoadBalancerState { pub async fn new( initial_workers: Vec, config: &ProxyConfig, - ) -> core::result::Result { + ) -> core::result::Result { let mut workers: Vec = Vec::with_capacity(initial_workers.len()); let connection_timeout = Duration::from_secs(config.connection_timeout_secs); @@ -96,10 +91,10 @@ impl LoadBalancerState { max_queue_items: config.max_queue_items, max_retries_per_request: config.max_retries_per_request, max_req_per_sec: config.max_req_per_sec, - available_workers_polling_time: Duration::from_millis( - config.available_workers_polling_time_ms, + available_workers_polling_interval: Duration::from_millis( + config.available_workers_polling_interval_ms, ), - health_check_frequency: Duration::from_secs(config.health_check_interval_secs), + health_check_interval: Duration::from_secs(config.health_check_interval_secs), }) } @@ -142,7 +137,7 @@ impl LoadBalancerState { pub async fn update_workers( &self, update_workers: UpdateWorkers, - ) -> std::result::Result<(), TxProverServiceError> { + ) -> std::result::Result<(), ProvingServiceError> { let mut workers = self.workers.write().await; info!("Current workers: {:?}", workers); @@ -151,7 +146,7 @@ impl LoadBalancerState { .iter() .map(|worker| Backend::new(worker)) .collect::, _>>() - .map_err(TxProverServiceError::BackendCreationFailed)?; + .map_err(ProvingServiceError::BackendCreationFailed)?; let mut native_workers = Vec::new(); @@ -191,66 +186,6 @@ impl LoadBalancerState { self.workers.read().await.iter().filter(|w| !w.is_available()).count() } - /// Handles the update workers request. - /// - /// # Behavior - /// - Reads the HTTP request from the session. - /// - If query parameters are present, attempts to parse them as an `UpdateWorkers` object. - /// - If the parsing fails, returns an error response. - /// - If successful, updates the list of workers by calling `update_workers`. - /// - If the update is successful, returns the count of available workers. - /// - /// # Errors - /// - If the HTTP request cannot be read. - /// - If the query parameters cannot be parsed. - /// - If the workers cannot be updated. - /// - If the response cannot be created. - pub async fn handle_update_workers_request( - &self, - session: &mut Session, - ) -> Option> { - let http_session = session.as_downstream_mut(); - - // Attempt to read the HTTP request - if let Err(err) = http_session.read_request().await { - let error_message = format!("Failed to read request: {}", err); - error!("{}", error_message); - return Some(create_response_with_error_message(session, error_message).await); - } - - // Extract and parse query parameters, if there are not any, return early to continue - // processing the request as a regular proving request. - let query_params = match http_session.req_header().as_ref().uri.query() { - Some(params) => params, - None => { - return None; - }, - }; - - // Parse the query parameters - let update_workers: Result = serde_qs::from_str(query_params); - let update_workers = match update_workers { - Ok(workers) => workers, - Err(err) => { - let error_message = format!("Failed to parse query parameters: {}", err); - error!("{}", error_message); - return Some(create_response_with_error_message(session, error_message).await); - }, - }; - - // Update workers and handle potential errors - if let Err(err) = self.update_workers(update_workers).await { - let error_message = format!("Failed to update workers: {}", err); - error!("{}", error_message); - return Some(create_response_with_error_message(session, error_message).await); - } - - // Successfully updated workers - info!("Workers updated successfully"); - let workers_count = self.num_workers().await; - Some(create_workers_updated_response(session, workers_count).await) - } - /// Check the health of the workers and returns a list of healthy workers. /// /// Performs a health check on each worker using the gRPC health check protocol. If a worker @@ -425,7 +360,7 @@ impl ProxyHttp for LoadBalancer { Some(addr) => addr.to_string(), None => { return create_response_with_error_message( - session, + session.as_downstream_mut(), "No socket address".to_string(), ) .await; @@ -434,13 +369,6 @@ impl ProxyHttp for LoadBalancer { info!("Client address: {:?}", client_addr); - // Special handling for localhost - if client_addr.contains(LOCALHOST_ADDR) { - if let Some(response) = self.0.handle_update_workers_request(session).await { - return response; - } - } - // Increment the request count REQUEST_COUNT.inc(); @@ -507,7 +435,7 @@ impl ProxyHttp for LoadBalancer { break; } info!("All workers are busy"); - tokio::time::sleep(self.0.available_workers_polling_time).await; + tokio::time::sleep(self.0.available_workers_polling_interval).await; } // Remove the request from the queue @@ -746,57 +674,3 @@ impl ProxyHttp for ProxyHttpDefaultImpl { unimplemented!("This is a dummy implementation, should not be called") } } - -/// Implement the BackgroundService trait for the LoadBalancer -/// -/// A [BackgroundService] can be run as part of a Pingora application to add supporting logic that -/// exists outside of the request/response lifecycle. -/// -/// We use this implementation to periodically check the health of the workers and update the list -/// of available workers. -impl BackgroundService for LoadBalancerState { - /// Starts the health check background service. - /// - /// This function is called when the Pingora server tries to start all the services. The - /// background service can return at anytime or wait for the `shutdown` signal. - /// - /// The health check background service will periodically check the health of the workers - /// using the gRPC health check protocol. If a worker is not healthy, it will be removed from - /// the list of available workers. - /// - /// # Errors - /// - If the worker has an invalid URI. - fn start<'life0, 'async_trait>( - &'life0 self, - _shutdown: ShutdownWatch, - ) -> Pin + ::core::marker::Send + 'async_trait>> - where - 'life0: 'async_trait, - Self: 'async_trait, - { - Box::pin(async move { - loop { - // Create a new spawn to perform the health check - let span = debug_span!("proxy:health_check"); - let _guard = span.enter(); - - let mut workers = self.workers.write().await; - let initial_workers_len = workers.len(); - - // Perform health checks on workers and retain healthy ones - let healthy_workers = self.check_workers_health(workers.iter_mut()).await; - - // Update the worker list with healthy workers - *workers = healthy_workers; - - // Update the worker count and worker unhealhy count metrics - WORKER_COUNT.set(workers.len() as i64); - let unhealthy_workers = initial_workers_len - workers.len(); - WORKER_UNHEALTHY.inc_by(unhealthy_workers as u64); - - // Sleep for the defined interval before the next health check - sleep(self.health_check_frequency).await; - } - }) - } -} diff --git a/bin/proving-service/src/proxy/update_workers.rs b/bin/proving-service/src/proxy/update_workers.rs new file mode 100644 index 000000000..cd19ecb99 --- /dev/null +++ b/bin/proving-service/src/proxy/update_workers.rs @@ -0,0 +1,155 @@ +use core::fmt; +use std::sync::Arc; + +use axum::async_trait; +use pingora::{ + apps::{HttpServerApp, HttpServerOptions}, + http::ResponseHeader, + protocols::{http::ServerSession, Stream}, + server::ShutdownWatch, +}; +use tracing::{error, info}; + +use super::LoadBalancerState; +use crate::{ + commands::update_workers::UpdateWorkers, + utils::{create_response_with_error_message, MIDEN_PROVING_SERVICE}, +}; + +/// The Load Balancer Updater Service. +/// +/// This service is responsible for updating the list of workers in the load balancer. +pub(crate) struct LoadBalancerUpdateService { + lb_state: Arc, + server_opts: HttpServerOptions, +} + +/// Manually implement Debug for LoadBalancerUpdateService. +/// [HttpServerOptions] does not implement Debug, so we cannot derive Debug for +/// [LoadBalancerUpdateService], which is needed for the tracing instrumentation. +impl fmt::Debug for LoadBalancerUpdateService { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("LBUpdaterService").field("lb_state", &self.lb_state).finish() + } +} + +impl LoadBalancerUpdateService { + pub(crate) fn new(lb_state: Arc) -> Self { + let mut server_opts = HttpServerOptions::default(); + server_opts.h2c = true; + + Self { lb_state, server_opts } + } +} + +#[async_trait] +impl HttpServerApp for LoadBalancerUpdateService { + /// Handles the update workers request. + /// + /// # Behavior + /// - Reads the HTTP request from the session. + /// - If query parameters are present, attempts to parse them as an `UpdateWorkers` object. + /// - If the parsing fails, returns an error response. + /// - If successful, updates the list of workers by calling `update_workers`. + /// - If the update is successful, returns the count of available workers. + /// + /// # Errors + /// - If the HTTP request cannot be read. + /// - If the query parameters cannot be parsed. + /// - If the workers cannot be updated. + /// - If the response cannot be created. + #[tracing::instrument(target = MIDEN_PROVING_SERVICE, name = "lb_updater_service:process_new_http", skip(http))] + async fn process_new_http( + self: &Arc, + mut http: ServerSession, + _shutdown: &ShutdownWatch, + ) -> Option { + match http.read_request().await { + Ok(res) => { + if !res { + error!("Failed to read request header"); + create_response_with_error_message( + &mut http, + "Failed to read request header".to_string(), + ) + .await + .ok(); + return None; + } + }, + Err(e) => { + error!("HTTP server fails to read from downstream: {e}"); + create_response_with_error_message( + &mut http, + format!("HTTP server fails to read from downstream: {e}"), + ) + .await + .ok(); + return None; + }, + } + + info!("Successfully get a new request to update workers"); + + // Extract and parse query parameters, if there are not any, return early. + let query_params = match http.req_header().as_ref().uri.query() { + Some(params) => params, + None => { + let error_message = "No query parameters provided".to_string(); + error!("{}", error_message); + create_response_with_error_message(&mut http, error_message).await.ok(); + return None; + }, + }; + + let update_workers: Result = serde_qs::from_str(query_params); + let update_workers = match update_workers { + Ok(workers) => workers, + Err(err) => { + let error_message = format!("Failed to parse query parameters: {}", err); + error!("{}", error_message); + create_response_with_error_message(&mut http, error_message).await.ok(); + return None; + }, + }; + + // Update workers and handle potential errors + if let Err(err) = self.lb_state.update_workers(update_workers).await { + let error_message = format!("Failed to update workers: {}", err); + error!("{}", error_message); + create_response_with_error_message(&mut http, error_message).await.ok(); + return None; + } + + create_workers_updated_response(&mut http, self.lb_state.num_workers().await) + .await + .ok(); + + info!("Successfully updated workers"); + + None + } + + /// Provide HTTP server options used to override default behavior. This function will be called + /// every time a new connection is processed. + fn server_options(&self) -> Option<&HttpServerOptions> { + Some(&self.server_opts) + } +} + +// HELPERS +// ================================================================================================ + +/// Create a 200 response for updated workers +/// +/// It will set the X-Worker-Count header to the number of workers. +async fn create_workers_updated_response( + session: &mut ServerSession, + workers: usize, +) -> pingora_core::Result { + let mut header = ResponseHeader::build(200, None)?; + header.insert_header("X-Worker-Count", workers.to_string())?; + session.set_keepalive(None); + session.write_response_header(Box::new(header)).await?; + Ok(true) +} diff --git a/bin/proving-service/src/proxy/worker.rs b/bin/proving-service/src/proxy/worker.rs index 0d8cf3e4b..7d2d8b14f 100644 --- a/bin/proving-service/src/proxy/worker.rs +++ b/bin/proving-service/src/proxy/worker.rs @@ -7,7 +7,8 @@ use tonic_health::pb::{ }; use tracing::error; -use crate::{error::TxProverServiceError, utils::create_health_check_client}; +use super::health_check::create_health_check_client; +use crate::error::ProvingServiceError; // WORKER // ================================================================================================ @@ -27,13 +28,13 @@ impl Worker { /// Creates a new worker and a gRPC health check client for the given worker address. /// /// # Errors - /// - Returns [TxProverServiceError::InvalidURI] if the worker address is invalid. - /// - Returns [TxProverServiceError::ConnectionFailed] if the connection to the worker fails. + /// - Returns [ProvingServiceError::InvalidURI] if the worker address is invalid. + /// - Returns [ProvingServiceError::ConnectionFailed] if the connection to the worker fails. pub async fn new( worker: Backend, connection_timeout: Duration, total_timeout: Duration, - ) -> Result { + ) -> Result { let health_check_client = create_health_check_client(worker.addr.to_string(), connection_timeout, total_timeout) .await?; diff --git a/bin/proving-service/src/utils.rs b/bin/proving-service/src/utils.rs index 915fbcc84..c28e429a8 100644 --- a/bin/proving-service/src/utils.rs +++ b/bin/proving-service/src/utils.rs @@ -1,5 +1,3 @@ -use std::time::Duration; - use opentelemetry::{trace::TracerProvider as _, KeyValue}; use opentelemetry_sdk::{ runtime, @@ -10,21 +8,16 @@ use opentelemetry_semantic_conventions::{ resource::{SERVICE_NAME, SERVICE_VERSION}, SCHEMA_URL, }; -use pingora::{http::ResponseHeader, Error, ErrorType}; +use pingora::{http::ResponseHeader, protocols::http::ServerSession, Error, ErrorType}; use pingora_proxy::Session; -use tonic::transport::Channel; -use tonic_health::pb::health_client::HealthClient; use tracing_subscriber::{layer::SubscriberExt, Registry}; -use crate::{error::TxProverServiceError, proxy::metrics::QUEUE_DROP_COUNT}; +use crate::proxy::metrics::QUEUE_DROP_COUNT; pub const MIDEN_PROVING_SERVICE: &str = "miden-proving-service"; const RESOURCE_EXHAUSTED_CODE: u16 = 8; -/// Name of the configuration file -pub const PROVING_SERVICE_CONFIG_FILE_NAME: &str = "miden-proving-service.toml"; - /// Initializes and configures the global tracing and telemetry system for the CLI, worker and /// proxy services. /// @@ -137,51 +130,16 @@ pub async fn create_too_many_requests_response( Ok(true) } -/// Create a 200 response for updated workers -/// -/// It will set the X-Worker-Count header to the number of workers. -pub async fn create_workers_updated_response( - session: &mut Session, - workers: usize, -) -> pingora_core::Result { - let mut header = ResponseHeader::build(200, None)?; - header.insert_header("X-Worker-Count", workers.to_string())?; - session.set_keepalive(None); - session.write_response_header(Box::new(header), true).await?; - Ok(true) -} - /// Create a 400 response with an error message /// /// It will set the X-Error-Message header to the error message. pub async fn create_response_with_error_message( - session: &mut Session, + session: &mut ServerSession, error_msg: String, ) -> pingora_core::Result { let mut header = ResponseHeader::build(400, None)?; header.insert_header("X-Error-Message", error_msg)?; session.set_keepalive(None); - session.write_response_header(Box::new(header), true).await?; + session.write_response_header(Box::new(header)).await?; Ok(true) } - -/// Create a gRPC [HealthClient] for the given worker address. -/// -/// # Errors -/// - [TxProverServiceError::InvalidURI] if the worker address is invalid. -/// - [TxProverServiceError::ConnectionFailed] if the connection to the worker fails. -pub async fn create_health_check_client( - address: String, - connection_timeout: Duration, - total_timeout: Duration, -) -> Result, TxProverServiceError> { - let channel = Channel::from_shared(format!("http://{}", address)) - .map_err(|err| TxProverServiceError::InvalidURI(err, address.clone()))? - .connect_timeout(connection_timeout) - .timeout(total_timeout) - .connect() - .await - .map_err(|err| TxProverServiceError::ConnectionFailed(err, address))?; - - Ok(HealthClient::new(channel)) -} diff --git a/crates/miden-lib/Cargo.toml b/crates/miden-lib/Cargo.toml index dd5461cec..bd77f8d18 100644 --- a/crates/miden-lib/Cargo.toml +++ b/crates/miden-lib/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-lib" -version = "0.7.0" +version = "0.8.0" description = "Standard library of the Miden rollup" readme = "README.md" categories = ["no-std"] diff --git a/crates/miden-lib/asm/kernels/transaction/api.masm b/crates/miden-lib/asm/kernels/transaction/api.masm index e3c0fca1d..43f547743 100644 --- a/crates/miden-lib/asm/kernels/transaction/api.masm +++ b/crates/miden-lib/asm/kernels/transaction/api.masm @@ -23,8 +23,11 @@ use.kernel::tx # For faucets the FAUCET_STORAGE_DATA_SLOT storage slot is reserved and can not be used with set_account_item const.ERR_FAUCET_STORAGE_DATA_SLOT_IS_RESERVED=0x00020000 -# The get_fungible_faucet_total_issuance procedure can only be called on a fungible faucet -const.ERR_ACCOUNT_TOTAL_ISSUANCE_PROC_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_FAUCET=0x00020001 +# The faucet_get_total_fungible_asset_issuance procedure can only be called on a fungible faucet +const.ERR_FAUCET_TOTAL_ISSUANCE_PROC_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_FAUCET=0x00020001 + +# The faucet_is_non_fungible_asset_issued procedure can only be called on a non-fungible faucet +const.ERR_FAUCET_IS_NF_ASSET_ISSUED_PROC_CAN_ONLY_BE_CALLED_ON_NON_FUNGIBLE_FAUCET=0x0002005C # Provided kernel procedure offset is out of bounds const.ERR_KERNEL_PROCEDURE_OFFSET_OUT_OF_BOUNDS=0x00020003 @@ -610,7 +613,7 @@ end export.faucet_get_total_fungible_asset_issuance # assert that we are executing a transaction against a fungible faucet (access checks) exec.account::get_id swap drop exec.account::is_fungible_faucet - assert.err=ERR_ACCOUNT_TOTAL_ISSUANCE_PROC_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_FAUCET + assert.err=ERR_FAUCET_TOTAL_ISSUANCE_PROC_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_FAUCET # => [pad(16)] # get the total issuance @@ -622,22 +625,34 @@ export.faucet_get_total_fungible_asset_issuance # => [total_issuance, pad(15)] end -#! Returns a boolean indicating whether the non-fungible asset is issued. +#! Returns a boolean indicating whether the provided non-fungible asset has been already issued by +#! this faucet. #! #! Inputs: [ASSET, pad(12)] #! Outputs: [is_issued, pad(15)] #! #! Where: -#! - ASSET is the non-fungible asset of interest. +#! - ASSET is the non-fungible asset that is being checked. #! - is_issued is a boolean indicating whether the non-fungible asset has been issued. #! #! Panics if: #! - the ASSET is a fungible asset. +#! - the ASSET is not associated with the faucet the transaction is being executed against. #! #! Invocation: dynexec export.faucet_is_non_fungible_asset_issued - # TODO: implement this procedure - push.0 drop + # assert that we are executing a transaction against a non-fungible faucet (access checks) + exec.account::get_id swap drop exec.account::is_non_fungible_faucet + assert.err=ERR_FAUCET_IS_NF_ASSET_ISSUED_PROC_CAN_ONLY_BE_CALLED_ON_NON_FUNGIBLE_FAUCET + # => [ASSET, pad(12)] + + # get the issuance flag + exec.faucet::is_non_fungible_asset_issued + # => [is_issued, pad(16)] + + # truncate the stack + swap drop + # => [is_issued, pad(15)] end ### NOTE ######################################## diff --git a/crates/miden-lib/asm/kernels/transaction/lib/account.masm b/crates/miden-lib/asm/kernels/transaction/lib/account.masm index 6d0d6dd67..5189a1ab6 100644 --- a/crates/miden-lib/asm/kernels/transaction/lib/account.masm +++ b/crates/miden-lib/asm/kernels/transaction/lib/account.masm @@ -246,13 +246,14 @@ export.incr_nonce emit.ACCOUNT_AFTER_INCREMENT_NONCE_EVENT end -#! Returns the account ID. +#! Returns the id of the current account. #! #! Inputs: [] -#! Outputs: [acct_id] +#! Outputs: [curr_acct_id_prefix, curr_acct_id_suffix] #! #! Where: -#! - acct_id is the account ID. +#! - curr_acct_id_{prefix,suffix} are the prefix and suffix felts of the account ID of the currently +#! accessing account. export.memory::get_account_id->get_id #! Returns the account nonce. @@ -573,11 +574,11 @@ end #! Panics if: #! - the requested storage slot type is not map. export.get_map_item - # check if storage slot type is map + # get the storage slot type dup exec.get_storage_slot_type # => [slot_type, index, KEY] - # check if type == map + # check if storage slot type is map exec.constants::get_storage_slot_type_map eq assert.err=ERR_ACCOUNT_READING_MAP_VALUE_FROM_NON_MAP_SLOT # => [index, KEY] @@ -617,7 +618,7 @@ end export.set_map_item.12 # store index for later dup loc_store.0 - # => [index, KEY, NEW_VALUE, ...] + # => [index, KEY, NEW_VALUE, OLD_ROOT, ...] # check if storage type is map dup exec.get_storage_slot_type @@ -673,7 +674,7 @@ end export.get_storage_slot_type # check that index is in bounds dup exec.memory::get_num_storage_slots lt assert.err=ERR_STORAGE_SLOT_INDEX_OUT_OF_BOUNDS - # => [index, V'] + # => [index] # get account storage slots section offset exec.memory::get_acct_storage_slots_section_ptr diff --git a/crates/miden-lib/asm/kernels/transaction/lib/asset_vault.masm b/crates/miden-lib/asm/kernels/transaction/lib/asset_vault.masm index 203a5902b..0e600b350 100644 --- a/crates/miden-lib/asm/kernels/transaction/lib/asset_vault.masm +++ b/crates/miden-lib/asm/kernels/transaction/lib/asset_vault.masm @@ -431,7 +431,7 @@ end #! Where: #! - ASSET is the non-fungible asset for which the vault key is built. #! - ASSET_KEY is the vault key of the non-fungible asset. -proc.build_non_fungible_asset_vault_key +export.build_non_fungible_asset_vault_key # create the asset key from the non-fungible asset by swapping hash0 with the faucet id # => [faucet_id_prefix, hash2, hash1, hash0] swap.3 diff --git a/crates/miden-lib/asm/kernels/transaction/lib/faucet.masm b/crates/miden-lib/asm/kernels/transaction/lib/faucet.masm index f5fcb6817..296d28c6a 100644 --- a/crates/miden-lib/asm/kernels/transaction/lib/faucet.masm +++ b/crates/miden-lib/asm/kernels/transaction/lib/faucet.masm @@ -145,29 +145,25 @@ end proc.mint_non_fungible_asset # assert that the asset is associated with the faucet the transaction is being executed against # and that the asset is valid - exec.account::get_id swap drop - exec.asset::validate_non_fungible_asset_origin + exec.account::get_id swap drop exec.asset::validate_non_fungible_asset_origin # => [ASSET] # fetch the root of the SMT containing the non-fungible assets - dupw exec.account::get_faucet_storage_data_slot exec.account::get_item - # => [SMT_ROOT, ASSET, ASSET] + dupw exec.account::get_faucet_storage_data_slot dup movdn.5 exec.account::get_item + # => [SMT_ROOT, ASSET, faucet_storage_data_slot, ASSET] # prepare stack for insert of non-fungible asset into tracking SMT - swapw dupw - # => [ASSET, ASSET, SMT_ROOT, ASSET] + swapw dupw exec.asset_vault::build_non_fungible_asset_vault_key movup.12 + # => [faucet_storage_data_slot, ASSET_KEY, ASSET, SMT_ROOT, ASSET] # insert the non-fungible asset into the tracking SMT - exec.smt::set - # => [OLD_VAL, SMT_ROOT', ASSET] + exec.account::set_map_item dropw + # => [OLD_VAL, ASSET] - # assert the `OLD_VAL` is EMPTY_WORD, indicating that the non-fungible asset did not already - # exist we only need to check ASSET[1] as this is always set to the faucet_id and can not be 0. - drop drop eq.0 assert.err=ERR_FAUCET_NON_FUNGIBLE_ASSET_ALREADY_ISSUED drop - # => [SMT_ROOT', ASSET] - - # update the root of the SMT containing the non-fungible assets - exec.account::get_faucet_storage_data_slot exec.account::set_item dropw + # Assert the `OLD_VAL` is an EMPTY_WORD, indicating that the non-fungible asset has not been + # issued yet. We only need to check OLD_VAL[3] as this is always set to the faucet_id_prefix + # and can not be 0. + eq.0 assert.err=ERR_FAUCET_NON_FUNGIBLE_ASSET_ALREADY_ISSUED drop drop drop # => [ASSET] # add the non-fungible asset to the input vault for asset preservation checks @@ -192,7 +188,7 @@ end #! transaction via a note or the accounts vault. proc.burn_non_fungible_asset # assert that we are executing a transaction against the non-fungible faucet (access checks) - exec.account::get_id exec.account::is_non_fungible_faucet + exec.account::get_id swap drop exec.account::is_non_fungible_faucet assert.err=ERR_FAUCET_BURN_NON_FUNGIBLE_ASSET_CAN_ONLY_BE_CALLED_ON_NON_FUNGIBLE_FAUCET # => [ASSET] @@ -201,24 +197,20 @@ proc.burn_non_fungible_asset # => [ASSET, ASSET] # fetch the root of the SMT containing the non-fungible assets - exec.account::get_faucet_storage_data_slot exec.account::get_item - # => [SMT_ROOT, ASSET, ASSET] + exec.account::get_faucet_storage_data_slot dup movdn.5 exec.account::get_item + # => [SMT_ROOT, ASSET, faucet_storage_data_slot, ASSET] # prepare stack for removal of non-fungible asset from tracking SMT - swapw padw - # => [EMPTY_WORD, ASSET, SMT_ROOT, ASSET] + swapw exec.asset_vault::build_non_fungible_asset_vault_key padw swapw movup.12 + # => [faucet_storage_data_slot, ASSET_KEY, EMPTY_WORD, SMT_ROOT, ASSET] # remove the non-fungible asset from the tracking SMT - exec.smt::set - # => [OLD_VAL, SMT_ROOT', ASSET] - - # assert the `OLD_VAL` is not EMPTY_WORD, indicating that the non-fungible asset exists. - # we only need to check ASSET[1] as this is always set to the faucet_id and can not be 0. - drop drop eq.0 not assert.err=ERR_FAUCET_NON_FUNGIBLE_ASSET_TO_BURN_NOT_FOUND drop - # => [SMT_ROOT', ASSET] + exec.account::set_map_item dropw + # => [OLD_VAL, ASSET] - # update the root of the SMT containing the non-fungible assets - exec.account::get_faucet_storage_data_slot exec.account::set_item dropw + # Assert the `OLD_VAL` is not an EMPTY_WORD, indicating that the non-fungible asset exists. We + # only need to check OLD_VAL[3] as this is always set to the faucet_id_prefix and can not be 0. + eq.0 not assert.err=ERR_FAUCET_NON_FUNGIBLE_ASSET_TO_BURN_NOT_FOUND drop drop drop # => [ASSET] # remove the non-fungible asset from the input vault for asset preservation checks @@ -226,6 +218,45 @@ proc.burn_non_fungible_asset # => [ASSET] end +#! Returns a boolean indicating whether the provided non-fungible asset has been already issued by +#! this faucet. +#! +#! Inputs: [ASSET] +#! Outputs: [is_issued] +#! +#! Where: +#! - ASSET is the non-fungible asset that is being checked. +#! - is_issued is a boolean indicating whether the non-fungible asset has been issued. +#! +#! Panics if: +#! - the ASSET is a fungible asset. +#! - the ASSET is not associated with the faucet the transaction is being executed against. +export.is_non_fungible_asset_issued + # assert that the asset is associated with the faucet the transaction is being executed against + # and that the asset is valid + exec.account::get_id swap drop exec.asset::validate_non_fungible_asset_origin + # => [ASSET] + + # get the asset vault key from the asset + exec.asset_vault::build_non_fungible_asset_vault_key + # => [ASSET_KEY] + + # get the storage index where faucet's assets map is stored + exec.account::get_faucet_storage_data_slot + # => [map_slot_index, ASSET_KEY] + + # get the non-fungible asset stored by the computed account key + exec.account::get_map_item + # => [STORED_ASSET] + + # Check whether the `STORED_ASSET` is an EMPTY_WORD, indicating that the non-fungible asset has + # not been issued yet. We only need to check STORED_ASSET[3] as this is always set to the + # faucet_id_prefix and can not be 0 (in reversed stack order it will be top stack element). + # Equality of the STORED_ASSET[3] to zero will become a flag that this asset is not issued. + neq.0 movdn.3 drop drop drop + # => [is_issued] +end + # PUBLIC INTERFACE # ================================================================================================== diff --git a/crates/miden-lib/asm/kernels/transaction/lib/memory.masm b/crates/miden-lib/asm/kernels/transaction/lib/memory.masm index fd11e6d01..ec7be8704 100644 --- a/crates/miden-lib/asm/kernels/transaction/lib/memory.masm +++ b/crates/miden-lib/asm/kernels/transaction/lib/memory.masm @@ -43,25 +43,25 @@ const.TX_EXPIRATION_BLOCK_NUM_PTR=28 # GLOBAL INPUTS # ------------------------------------------------------------------------------------------------- -# The memory address at which the global inputs section begins +# The memory address at which the global inputs section begins. const.GLOBAL_INPUTS_SECTION_OFFSET=400 -# The memory address at which the latest known block hash is stored +# The memory address at which the latest known block hash is stored. const.BLK_HASH_PTR=400 # The memory address at which the account ID felts are stored. const.ACCT_ID_PTR=404 -# The memory address at which the initial account hash is stored +# The memory address at which the initial account hash is stored. const.INIT_ACCT_HASH_PTR=408 -# The memory address at which the input notes commitment is stored +# The memory address at which the input notes commitment is stored. const.INPUT_NOTES_COMMITMENT_PTR=412 -# The memory address at which the initial nonce is stored +# The memory address at which the initial nonce is stored. const.INIT_NONCE_PTR=416 -# The memory address at which the transaction script mast root is stored +# The memory address at which the transaction script mast root is stored. const.TX_SCRIPT_ROOT_PTR=420 # GLOBAL BLOCK DATA @@ -123,14 +123,14 @@ const.KERNEL_PROCEDURES_PTR=1604 # ------------------------------------------------------------------------------------------------- # The largest memory address which can be used to load the foreign account data. -# It is computed as `2048 * 64 * 4` -- this is the memory address where the data block of the 64th +# It is computed as `2048 * 64 * 4` -- this is the memory address where the data block of the 64th # account starts. const.MAX_FOREIGN_ACCOUNT_PTR=524288 # The memory address at which the native account data is stored. const.NATIVE_ACCOUNT_DATA_PTR=8192 -# The length of the memory interval that the account data occupies. +# The length of the memory interval that the account data occupies. const.ACCOUNT_DATA_LENGTH=8192 # The offsets at which the account data is stored relative to the start of the account data segment. @@ -241,7 +241,7 @@ end #! Outputs: [input_vault_root_ptr] #! #! Where: -#! - input_vault_root_ptr is a pointer to the memory address at which the input vault root is +#! - input_vault_root_ptr is a pointer to the memory address at which the input vault root is #! stored. export.get_input_vault_root_ptr push.INPUT_VAULT_ROOT_PTR @@ -649,7 +649,7 @@ export.get_native_account_data_ptr push.NATIVE_ACCOUNT_DATA_PTR end -#! Returns the length of the memory interval that the account data occupies. +#! Returns the length of the memory interval that the account data occupies. #! #! Inputs: [] #! Outputs: [acct_data_length] @@ -666,7 +666,7 @@ end #! Outputs: [max_foreign_acct_ptr] #! #! Where: -#! - max_foreign_acct_ptr is the largest memory address which can be used to load the foreign +#! - max_foreign_acct_ptr is the largest memory address which can be used to load the foreign #! account data. export.get_max_foreign_account_ptr push.MAX_FOREIGN_ACCOUNT_PTR @@ -1063,7 +1063,7 @@ export.set_input_note_id mem_storew end -#! Computes a pointer to the memory address at which the nullifier associated a note with `idx` is +#! Computes a pointer to the memory address at which the nullifier associated a note with `idx` is #! stored. #! #! Inputs: [idx] @@ -1221,7 +1221,7 @@ export.set_input_note_num_assets mem_store end -#! Returns a pointer to the start of the assets segment for the input note located at the specified +#! Returns a pointer to the start of the assets segment for the input note located at the specified #! memory address. #! #! Inputs: [note_ptr] @@ -1449,7 +1449,7 @@ end #! Outputs: [kernel_procedures_ptr] #! #! Where: -#! - kernel_procedures_ptr is the memory address where the hashes of the kernel procedures are +#! - kernel_procedures_ptr is the memory address where the hashes of the kernel procedures are #! stored. export.get_kernel_procedures_ptr push.KERNEL_PROCEDURES_PTR diff --git a/crates/miden-lib/asm/kernels/transaction/lib/prologue.masm b/crates/miden-lib/asm/kernels/transaction/lib/prologue.masm index 2fff6b96a..f52dc4baf 100644 --- a/crates/miden-lib/asm/kernels/transaction/lib/prologue.masm +++ b/crates/miden-lib/asm/kernels/transaction/lib/prologue.masm @@ -19,6 +19,9 @@ const.MAX_BLOCK_NUM=0xFFFFFFFF # The provided global inputs do not match the block hash commitment const.ERR_PROLOGUE_GLOBAL_INPUTS_PROVIDED_DO_NOT_MATCH_BLOCK_HASH_COMMITMENT=0x00020034 +# The provided global inputs do not match the block number commitment +const.ERR_PROLOGUE_GLOBAL_INPUTS_PROVIDED_DO_NOT_MATCH_BLOCK_NUMBER_COMMITMENT=0x00020134 + # New account must have an empty vault const.ERR_PROLOGUE_NEW_ACCOUNT_VAULT_MUST_BE_EMPTY=0x00020035 @@ -66,22 +69,23 @@ const.ERR_PROLOGUE_NEW_ACCOUNT_NONCE_MUST_BE_ZERO=0x0002005B #! Saves global inputs to memory. #! -#! Inputs: [BLOCK_HASH, account_id_prefix, account_id_suffix, INITIAL_ACCOUNT_HASH, INPUT_NOTES_COMMITMENT] -#! Outputs: [] +#! Inputs: [BLOCK_HASH, INITIAL_ACCOUNT_HASH, INPUT_NOTES_COMMITMENT, account_id_prefix, account_id_suffix, block_num] +#! Outputs: [block_num] #! #! Where: #! - BLOCK_HASH is the reference block for the transaction execution. -#! - account_id_{prefix,suffix} are the prefix and suffix felts of the account ID of the account +#! - block_num is the number of the reference block. +#! - account_id_{prefix,suffix} are the prefix and suffix felts of the account ID of the account #! that the transaction is being executed against. -#! - INITIAL_ACCOUNT_HASH is the account state prior to the transaction, EMPTY_WORD for new +#! - INITIAL_ACCOUNT_HASH is the account state prior to the transaction, EMPTY_WORD for new #! accounts. -#! - INPUT_NOTES_COMMITMENT is the commitment to the input notes. See the +#! - INPUT_NOTES_COMMITMENT is the commitment to the input notes. See the #! `tx_get_input_notes_commitment` kernel procedure for details. proc.process_global_inputs exec.memory::set_block_hash dropw - exec.memory::set_global_acct_id exec.memory::set_init_acct_hash dropw exec.memory::set_nullifier_commitment dropw + exec.memory::set_global_acct_id end # KERNEL DATA @@ -102,7 +106,7 @@ end #! Outputs: #! Operand stack: [] #! Advice stack: [] -#! +#! #! Where: #! - kernel_version is the index of the desired kernel in the array of all kernels available for the #! current transaction. @@ -154,7 +158,7 @@ proc.process_kernel_data # AS => [] # get the hash of the kernel which will be used in the current transaction - exec.memory::get_kernel_procedures_ptr add + exec.memory::get_kernel_procedures_ptr add # OS => [kernel_ptr] # AS => [] @@ -196,7 +200,7 @@ proc.process_kernel_data # assert that the precomputed hash matches the computed one movup.4 drop assert_eqw # OS => [] - # AS => [] + # AS => [] end # BLOCK DATA @@ -205,7 +209,7 @@ end #! Saves block data to memory and verifies that it matches the BLOCK_HASH public input. #! #! Inputs: -#! Operand stack: [] +#! Operand stack: [block_num] #! Advice stack: [ #! PREVIOUS_BLOCK_HASH, #! CHAIN_MMR_HASH, @@ -224,7 +228,7 @@ end #! - PREVIOUS_BLOCK_HASH is the hash of the previous block. #! - CHAIN_MMR_HASH is the sequential hash of the reference MMR. #! - ACCOUNT_ROOT is the root of the tree with latest account states for all accounts. -#! - NULLIFIER_ROOT is the root of the tree with nullifiers of all notes that have ever been +#! - NULLIFIER_ROOT is the root of the tree with nullifiers of all notes that have ever been #! consumed. #! - TX_HASH is the commitment to a set of IDs of transactions which affected accounts in the block. #! - KERNEL_ROOT is the accumulative hash from all kernel hashes. @@ -235,7 +239,7 @@ end #! - NOTE_ROOT is the root of the tree with all notes created in the block. proc.process_block_data exec.memory::get_block_data_ptr - # => [block_data_ptr] + # => [block_data_ptr, block_num] # read block data and compute its subhash. See `Advice stack` above for details. padw padw padw @@ -244,24 +248,27 @@ proc.process_block_data adv_pipe hperm adv_pipe hperm exec.rpo::squeeze_digest - # => [DIG, block_data_ptr'] + # => [DIG, block_data_ptr', block_num] # store the note root in memory padw adv_loadw dupw exec.memory::set_note_root dropw - # => [NOTE_ROOT, DIG, block_data_ptr'] + # => [NOTE_ROOT, DIG, block_data_ptr', block_num] # merge the note root with the block data digest hmerge - # => [BLOCK_HASH, block_data_ptr'] + # => [BLOCK_HASH, block_data_ptr', block_num] # assert that the block hash matches the hash in global inputs exec.memory::get_block_hash assert_eqw.err=ERR_PROLOGUE_GLOBAL_INPUTS_PROVIDED_DO_NOT_MATCH_BLOCK_HASH_COMMITMENT - # => [block_data_ptr'] + # => [block_data_ptr', block_num] drop - # => [] + # => [block_num] + + exec.memory::get_blk_num + assert_eq.err=ERR_PROLOGUE_GLOBAL_INPUTS_PROVIDED_DO_NOT_MATCH_BLOCK_NUMBER_COMMITMENT end # CHAIN DATA @@ -357,7 +364,7 @@ proc.validate_new_account if.true # assert the fungible faucet reserved slot is initialized correctly (EMPTY_WORD) # TODO: Switch to standard library implementation when available (miden-vm/#1483) - exec.is_empty_word_dropped not + exec.is_empty_word_dropped not assertz.err=ERR_PROLOGUE_NEW_FUNGIBLE_FAUCET_RESERVED_SLOT_MUST_BE_EMPTY # => [] @@ -366,13 +373,13 @@ proc.validate_new_account # => [slot_type] # assert the fungible faucet reserved slot type == value - exec.constants::get_storage_slot_type_value eq + exec.constants::get_storage_slot_type_value eq assert.err=ERR_PROLOGUE_NEW_FUNGIBLE_FAUCET_RESERVED_SLOT_INVALID_TYPE # => [] else # assert the non-fungible faucet reserved slot is initialized correctly (root of # empty SMT) - exec.constants::get_empty_smt_root + exec.constants::get_empty_smt_root assert_eqw.err=ERR_PROLOGUE_NEW_NON_FUNGIBLE_FAUCET_RESERVED_SLOT_MUST_BE_VALID_EMPY_SMT # => [] @@ -381,7 +388,7 @@ proc.validate_new_account # => [slot_type] # assert the non-fungible faucet reserved slot type == map - exec.constants::get_storage_slot_type_map eq + exec.constants::get_storage_slot_type_map eq assert.err=ERR_PROLOGUE_NEW_NON_FUNGIBLE_FAUCET_RESERVED_SLOT_INVALID_TYPE # => [] end @@ -421,7 +428,7 @@ end #! This procedure will: #! - Read the account data from the advice stack. #! - Save it to memory. -#! - For new accounts, signaled by having a INITIAL_ACCOUNT_HASH set to EMPTY_WORD as a global +#! - For new accounts, signaled by having a INITIAL_ACCOUNT_HASH set to EMPTY_WORD as a global #! input, validate the account's id and initial state. #! - For existing accounts, verify the INITIAL_ACCOUNT_HASH commitment matches the provided data, #! and the account nonce is not zero. @@ -439,7 +446,7 @@ end #! Advice stack: [] #! #! Where: -#! - account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the account that the +#! - account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the account that the #! transaction is being executed against. #! - account_nonce is the account's nonce. #! - ACCOUNT_VAULT_ROOT is the account's vault root. @@ -482,7 +489,7 @@ proc.process_account_data exec.account::save_account_storage_data # => [ACCT_HASH] - # set the new account code commitment to the initial account code root + # set the new account code commitment to the initial account code root # this is used for managing code commitment updates exec.memory::get_acct_code_commitment exec.memory::set_new_acct_code_commitment @@ -579,7 +586,7 @@ proc.authenticate_note.8 # => [COMPUTED_BLOCK_HASH, PERM, mem_ptr', BLOCK_HASH, NOTE_HASH] # assert the computed block hash matches - movup.8 drop movupw.2 + movup.8 drop movupw.2 assert_eqw.err=ERR_PROLOGUE_MISMATCH_OF_REFERENCE_BLOCK_MMR_AND_NOTE_AUTHENTICATION_MMR # => [PERM, NOTE_HASH] @@ -616,7 +623,7 @@ end #! INPUTS_HASH, #! ASSETS_HASH, #! ] -#! Outputs: +#! Outputs: #! Operand stack: [NULLIFIER] #! Advice stack: [] #! @@ -648,8 +655,8 @@ end #! - The note's ARGS are not authenticated, these are optional arguments the user can provide when #! consuming the note. #! - The note's metadata is authenticated, so the data is returned in the stack. The value is used -#! to compute the NOTE_HASH as `hash(NOTE_ID || NOTE_METADATA)`, which is the leaf value of the -#! note's tree in the contained in the block header. The NOTE_HASH is either verified by this +#! to compute the NOTE_HASH as `hash(NOTE_ID || NOTE_METADATA)`, which is the leaf value of the +#! note's tree in the contained in the block header. The NOTE_HASH is either verified by this #! kernel, or delayed to be verified by another kernel (e.g. block or batch kernels). #! #! Inputs: @@ -691,7 +698,7 @@ proc.process_note_assets adv_push.1 # => [assets_count, note_ptr] - dup exec.constants::get_max_assets_per_note lte + dup exec.constants::get_max_assets_per_note lte assert.err=ERR_PROLOGUE_NUMBER_OF_NOTE_ASSETS_EXCEEDS_LIMIT # => [assets_count, note_ptr] @@ -855,13 +862,13 @@ end #! NOTE_ROOT, #! )? #! ] -#! Outputs: +#! Outputs: #! Operand stack: [PERM, PERM, PERM] #! Advice stack: [] #! #! Where: #! - idx is the index of the input note. -#! - HASHER_CAPACITY is the state of the hasher capacity word, with the commitment to the previous +#! - HASHER_CAPACITY is the state of the hasher capacity word, with the commitment to the previous #! notes. #! - SERIAL_NUMBER is the note's serial. #! - SCRIPT_ROOT is the note's script root. @@ -954,10 +961,10 @@ end #! Inputs: #! Operand stack: [] #! Advice stack: [num_notes], -#! Advice map: { -#! INPUT_NOTES_COMMITMENT: [NOTE_DATA] +#! Advice map: { +#! INPUT_NOTES_COMMITMENT: [NOTE_DATA] #! } -#! Outputs: +#! Outputs: #! Operand stack: [] #! Advice stack: [] #! @@ -973,7 +980,7 @@ proc.process_input_notes_data # assert the number of input notes is within limits; since max number of input notes is # expected to be smaller than 2^32, we can use a more efficient u32 comparison dup - exec.constants::get_max_num_input_notes + exec.constants::get_max_num_input_notes u32assert2.err=ERR_PROLOGUE_NUMBER_OF_INPUT_NOTES_EXCEEDS_LIMIT u32lte assert.err=ERR_PROLOGUE_NUMBER_OF_INPUT_NOTES_EXCEEDS_LIMIT # => [num_notes] @@ -1086,9 +1093,9 @@ end #! Inputs: #! Operand stack: [ #! BLOCK_HASH, -#! account_id_prefix, account_id_suffix, #! INITIAL_ACCOUNT_HASH, #! INPUT_NOTES_COMMITMENT, +#! account_id_prefix, account_id_suffix, block_num, #! ] #! Advice stack: [ #! PREVIOUS_BLOCK_HASH, @@ -1122,16 +1129,16 @@ end #! #! Where: #! - BLOCK_HASH is the reference block for the transaction execution. -#! - account_id_{prefix,suffix} are the prefix and suffix felts of the account that the transaction +#! - account_id_{prefix,suffix} are the prefix and suffix felts of the account that the transaction #! is being executed against. -#! - INITIAL_ACCOUNT_HASH is the account state prior to the transaction, EMPTY_WORD for new +#! - INITIAL_ACCOUNT_HASH is the account state prior to the transaction, EMPTY_WORD for new #! accounts. #! - INPUT_NOTES_COMMITMENT, see `transaction::api::get_input_notes_commitment`. #! - KERNEL_ROOT is the accumulative hash from all kernel hashes. #! - PREVIOUS_BLOCK_HASH is the hash of the previous block. #! - CHAIN_MMR_HASH is the sequential hash of the reference MMR. #! - ACCOUNT_ROOT is the root of the tree with latest account states for all accounts. -#! - NULLIFIER_ROOT is the root of the tree with nullifiers of all notes that have ever been +#! - NULLIFIER_ROOT is the root of the tree with nullifiers of all notes that have ever been #! consumed. #! - TX_HASH is the commitment to a set of IDs of transactions which affected accounts in the block. #! - PROOF_HASH is the hash of the block's stark proof. @@ -1158,7 +1165,7 @@ end #! - any of the input notes do note exist in the note db. export.prepare_transaction exec.process_global_inputs - # => [] + # => [block_num] exec.process_block_data exec.process_kernel_data diff --git a/crates/miden-lib/asm/miden/faucet.masm b/crates/miden-lib/asm/miden/faucet.masm index f031fbc61..d43a8aeed 100644 --- a/crates/miden-lib/asm/miden/faucet.masm +++ b/crates/miden-lib/asm/miden/faucet.masm @@ -97,3 +97,34 @@ export.get_total_issuance swapdw dropw dropw swapw dropw movdn.3 drop drop drop # => [total_issuance] end + +#! Returns a boolean indicating whether the provided non-fungible asset has been already issued by +#! this faucet. +#! +#! Inputs: [ASSET] +#! Outputs: [is_issued] +#! +#! Where: +#! - ASSET is the non-fungible asset that is being checked. +#! - is_issued is a boolean indicating whether the non-fungible asset has been issued. +#! +#! Panics if: +#! - the ASSET is a fungible asset. +#! - the ASSET is not associated with the faucet the transaction is being executed against. +#! +#! Invocation: exec +export.is_non_fungible_asset_issued + exec.kernel_proc_offsets::faucet_is_non_fungible_asset_issued_offset + # => [offset, ASSET] + + # pad the stack + push.0.0.0 movdn.7 movdn.7 movdn.7 padw padw swapdw + # => [offset, ASSET, pad(11)] + + syscall.exec_kernel_proc + # => [is_issued, pad(15)] + + # clean the stack + swapdw dropw dropw swapw dropw movdn.3 drop drop drop + # => [total_issuance] +end diff --git a/crates/miden-lib/build.rs b/crates/miden-lib/build.rs index 5f0b5de38..5d2c3eff9 100644 --- a/crates/miden-lib/build.rs +++ b/crates/miden-lib/build.rs @@ -75,7 +75,11 @@ fn main() -> Result<()> { )?; // compile account components - compile_account_components(&target_dir.join(ASM_ACCOUNT_COMPONENTS_DIR), assembler)?; + compile_account_components( + &source_dir.join(ASM_ACCOUNT_COMPONENTS_DIR), + &target_dir.join(ASM_ACCOUNT_COMPONENTS_DIR), + assembler, + )?; generate_kernel_error_constants(&source_dir)?; @@ -233,7 +237,7 @@ fn parse_proc_offsets(filename: impl AsRef) -> Result Result<()> { @@ -283,33 +287,37 @@ fn compile_note_scripts(source_dir: &Path, target_dir: &Path, assembler: Assembl Ok(()) } -// COMPILE DEFAULT ACCOUNT COMPONENTS +// COMPILE ACCOUNT COMPONENTS // ================================================================================================ -const BASIC_WALLET_CODE: &str = " - export.::miden::contracts::wallets::basic::receive_asset - export.::miden::contracts::wallets::basic::create_note - export.::miden::contracts::wallets::basic::move_asset_to_note -"; - -const RPO_FALCON_AUTH_CODE: &str = " - export.::miden::contracts::auth::basic::auth_tx_rpo_falcon512 -"; - -const BASIC_FUNGIBLE_FAUCET_CODE: &str = " - export.::miden::contracts::faucets::basic_fungible::distribute - export.::miden::contracts::faucets::basic_fungible::burn -"; - -/// Compiles the default account components into a MASL library and stores the complied files in -/// `target_dir`. -fn compile_account_components(target_dir: &Path, assembler: Assembler) -> Result<()> { - for (component_name, component_code) in [ - ("basic_wallet", BASIC_WALLET_CODE), - ("rpo_falcon_512", RPO_FALCON_AUTH_CODE), - ("basic_fungible_faucet", BASIC_FUNGIBLE_FAUCET_CODE), - ] { - let component_library = assembler.clone().assemble_library([component_code])?; +/// Compiles the account components in `source_dir` into MASL libraries and stores the compiled +/// files in `target_dir`. +fn compile_account_components( + source_dir: &Path, + target_dir: &Path, + assembler: Assembler, +) -> Result<()> { + if !target_dir.exists() { + fs::create_dir_all(target_dir).unwrap(); + } + + for masm_file_path in get_masm_files(source_dir).unwrap() { + let component_name = masm_file_path + .file_stem() + .expect("masm file should have a file stem") + .to_str() + .expect("file stem should be valid UTF-8") + .to_owned(); + + // Read the source code to string instead of passing it to assemble_library directly since + // that would attempt to interpret the path as a LibraryPath which would fail. + let component_source_code = fs::read_to_string(masm_file_path) + .expect("reading the component's MASM source code should succeed"); + + let component_library = assembler + .clone() + .assemble_library([component_source_code]) + .expect("library assembly should succeed"); let component_file_path = target_dir.join(component_name).with_extension(Library::LIBRARY_EXTENSION); component_library.write_to_file(component_file_path).into_diagnostic()?; diff --git a/crates/miden-lib/masm_doc_comment_fmt.md b/crates/miden-lib/masm_doc_comment_fmt.md index e450d8c41..d480d2e50 100644 --- a/crates/miden-lib/masm_doc_comment_fmt.md +++ b/crates/miden-lib/masm_doc_comment_fmt.md @@ -97,14 +97,14 @@ It is strongly not recommended to use a single-letter names for variables, with ### Inputs -Inputs block could contain three components: operand stack, advice stack and advice map. Description of the each container should be offseted with two spaces relative to the start of the `Inputs` word. Each name of the container should be separated from its value by the colon (e.g. `Operand stack: [value_1]`). +Inputs block could contain three components: operand stack, advice stack and advice map. Description of the each container should be offset with two spaces relative to the start of the `Inputs` word. Each name of the container should be separated from its value by the colon (e.g. `Operand stack: [value_1]`). Operand stack and advice stack should be presented as an array containing some data. The lines which exceed 100 symbols should be formatted differently, it could be done in two different ways: 1. The line should be broken, and the end of the line should be moved to the new line with an offset such that the first symbol of the first element on the second line should be directly above the first symbol of the first element on the first line (see the value of the `FOREIGN_ACCOUNT_ID` in the example in `Formats` section). -2. The exceeded array should be formatted in a column, forming a Word or some other number of related elements on each line. Each new line should be offseted with two spaces relative to the name of the container (see example below). +2. The exceeded array should be formatted in a column, forming a Word or some other number of related elements on each line. Each new line should be offset with two spaces relative to the name of the container (see example below). Example: @@ -121,7 +121,7 @@ Example: To show that some internal value array could have dynamic length, additional brackets should be used (see the `[VALUE_B]` in the advice stack in the example in `Formats` section). -In case some inputs are presented on the stack only if some condition is satisfied, such inputs should be placed in the "optional" box: inside the parentheses with a question mark at the end. Opening and closing brackets should be placed on a new line with the same offset as the other inputs, and values inside the brackets should be offseted by two spaces. +In case some inputs are presented on the stack only if some condition is satisfied, such inputs should be placed in the "optional" box: inside the parentheses with a question mark at the end. Opening and closing brackets should be placed on a new line with the same offset as the other inputs, and values inside the brackets should be offset by two spaces. Example: @@ -194,7 +194,7 @@ Example: In case the values are provided only through the operand stack, a short version of the inputs and outputs should be used. In that case only `Inputs` and `Outputs` components are used, representing the values on the operand stack. -Input values array should be offseted by one space to be inline with the output values array (see the example). +Input values array should be offset by one space to be inline with the output values array (see the example). Example: diff --git a/crates/miden-lib/src/account/auth/mod.rs b/crates/miden-lib/src/account/auth/mod.rs index ce93fafed..8a25fe578 100644 --- a/crates/miden-lib/src/account/auth/mod.rs +++ b/crates/miden-lib/src/account/auth/mod.rs @@ -8,11 +8,16 @@ use crate::account::components::rpo_falcon_512_library; /// An [`AccountComponent`] implementing the RpoFalcon512 signature scheme for authentication of /// transactions. /// -/// Its exported procedures are: +/// It reexports the procedures from `miden::contracts::auth::basic`. When linking against this +/// component, the `miden` library (i.e. [`MidenLib`](crate::MidenLib)) must be available to the +/// assembler which is the case when using [`TransactionKernel::assembler()`][kasm]. The procedures +/// of this component are: /// - `auth_tx_rpo_falcon512`, which can be used to verify a signature provided via the advice stack /// to authenticate a transaction. /// /// This component supports all account types. +/// +/// [kasm]: crate::transaction::TransactionKernel::assembler pub struct RpoFalcon512 { public_key: PublicKey, } diff --git a/crates/miden-lib/src/account/faucets/mod.rs b/crates/miden-lib/src/account/faucets/mod.rs index 0238e2ef4..1c0388107 100644 --- a/crates/miden-lib/src/account/faucets/mod.rs +++ b/crates/miden-lib/src/account/faucets/mod.rs @@ -15,7 +15,10 @@ use crate::account::{auth::RpoFalcon512, components::basic_fungible_faucet_libra /// An [`AccountComponent`] implementing a basic fungible faucet. /// -/// Its exported procedures are: +/// It reexports the procedures from `miden::contracts::faucets::basic_fungible`. When linking +/// against this component, the `miden` library (i.e. [`MidenLib`](crate::MidenLib)) must be +/// available to the assembler which is the case when using +/// [`TransactionKernel::assembler()`][kasm]. The procedures of this component are: /// - `distribute`, which mints an assets and create a note for the provided recipient. /// - `burn`, which burns the provided asset. /// @@ -24,6 +27,8 @@ use crate::account::{auth::RpoFalcon512, components::basic_fungible_faucet_libra /// authentication. /// /// This component supports accounts of type [`AccountType::FungibleFaucet`]. +/// +/// [kasm]: crate::transaction::TransactionKernel::assembler pub struct BasicFungibleFaucet { symbol: TokenSymbol, decimals: u8, diff --git a/crates/miden-lib/src/account/wallets/mod.rs b/crates/miden-lib/src/account/wallets/mod.rs index 2d89e2530..9d8e0e33d 100644 --- a/crates/miden-lib/src/account/wallets/mod.rs +++ b/crates/miden-lib/src/account/wallets/mod.rs @@ -15,7 +15,10 @@ use crate::account::{auth::RpoFalcon512, components::basic_wallet_library}; /// An [`AccountComponent`] implementing a basic wallet. /// -/// Its exported procedures are: +/// It reexports the procedures from `miden::contracts::wallets::basic`. When linking against this +/// component, the `miden` library (i.e. [`MidenLib`](crate::MidenLib)) must be available to the +/// assembler which is the case when using [`TransactionKernel::assembler()`][kasm]. The procedures +/// of this component are: /// - `receive_asset`, which can be used to add an asset to the account. /// - `create_note`, which can be used to create a new note without any assets attached to it. /// - `move_asset_to_note`, which can be used to remove the specified asset from the account and add @@ -25,6 +28,8 @@ use crate::account::{auth::RpoFalcon512, components::basic_wallet_library}; /// providing authentication. /// /// This component supports all account types. +/// +/// [kasm]: crate::transaction::TransactionKernel::assembler pub struct BasicWallet; impl From for AccountComponent { diff --git a/crates/miden-lib/src/errors/tx_kernel_errors.rs b/crates/miden-lib/src/errors/tx_kernel_errors.rs index e3e9441b0..c97fabeb7 100644 --- a/crates/miden-lib/src/errors/tx_kernel_errors.rs +++ b/crates/miden-lib/src/errors/tx_kernel_errors.rs @@ -30,17 +30,18 @@ pub const ERR_ACCOUNT_SETTING_VALUE_ITEM_ON_NON_VALUE_SLOT: u32 = 0x00020009; pub const ERR_ACCOUNT_STORAGE_COMMITMENT_MISMATCH: u32 = 0x00020012; pub const ERR_ACCOUNT_TOO_MANY_PROCEDURES: u32 = 0x00020010; pub const ERR_ACCOUNT_TOO_MANY_STORAGE_SLOTS: u32 = 0x00020011; -pub const ERR_ACCOUNT_TOTAL_ISSUANCE_PROC_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_FAUCET: u32 = 0x00020001; pub const ERR_EPILOGUE_TOTAL_NUMBER_OF_ASSETS_MUST_STAY_THE_SAME: u32 = 0x00020029; pub const ERR_FAUCET_BURN_CANNOT_EXCEED_EXISTING_TOTAL_SUPPLY: u32 = 0x0002002B; pub const ERR_FAUCET_BURN_NON_FUNGIBLE_ASSET_CAN_ONLY_BE_CALLED_ON_NON_FUNGIBLE_FAUCET: u32 = 0x0002002D; pub const ERR_FAUCET_INVALID_STORAGE_OFFSET: u32 = 0x0002000E; +pub const ERR_FAUCET_IS_NF_ASSET_ISSUED_PROC_CAN_ONLY_BE_CALLED_ON_NON_FUNGIBLE_FAUCET: u32 = 0x0002005C; pub const ERR_FAUCET_NEW_TOTAL_SUPPLY_WOULD_EXCEED_MAX_ASSET_AMOUNT: u32 = 0x0002002A; pub const ERR_FAUCET_NON_FUNGIBLE_ASSET_ALREADY_ISSUED: u32 = 0x0002002C; pub const ERR_FAUCET_NON_FUNGIBLE_ASSET_TO_BURN_NOT_FOUND: u32 = 0x0002002E; pub const ERR_FAUCET_STORAGE_DATA_SLOT_IS_RESERVED: u32 = 0x00020000; +pub const ERR_FAUCET_TOTAL_ISSUANCE_PROC_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_FAUCET: u32 = 0x00020001; pub const ERR_FOREIGN_ACCOUNT_ID_EQUALS_NATIVE_ACCT_ID: u32 = 0x00020016; pub const ERR_FOREIGN_ACCOUNT_ID_IS_ZERO: u32 = 0x00020014; @@ -83,6 +84,7 @@ pub const ERR_P2ID_WRONG_NUMBER_OF_INPUTS: u32 = 0x00020050; pub const ERR_PROLOGUE_EXISTING_ACCOUNT_MUST_HAVE_NON_ZERO_NONCE: u32 = 0x0002003B; pub const ERR_PROLOGUE_GLOBAL_INPUTS_PROVIDED_DO_NOT_MATCH_BLOCK_HASH_COMMITMENT: u32 = 0x00020034; +pub const ERR_PROLOGUE_GLOBAL_INPUTS_PROVIDED_DO_NOT_MATCH_BLOCK_NUMBER_COMMITMENT: u32 = 0x00020134; pub const ERR_PROLOGUE_INPUT_NOTES_COMMITMENT_MISMATCH: u32 = 0x00020041; pub const ERR_PROLOGUE_MISMATCH_OF_ACCOUNT_IDS_FROM_GLOBAL_INPUTS_AND_ADVICE_PROVIDER: u32 = 0x0002003C; pub const ERR_PROLOGUE_MISMATCH_OF_REFERENCE_BLOCK_MMR_AND_NOTE_AUTHENTICATION_MMR: u32 = 0x0002003D; @@ -115,7 +117,7 @@ pub const ERR_VAULT_NON_FUNGIBLE_ASSET_ALREADY_EXISTS: u32 = 0x0002001C; pub const ERR_VAULT_NON_FUNGIBLE_ASSET_TO_REMOVE_NOT_FOUND: u32 = 0x0002001F; pub const ERR_VAULT_REMOVE_FUNGIBLE_ASSET_FAILED_INITIAL_VALUE_INVALID: u32 = 0x0002001E; -pub const TX_KERNEL_ERRORS: [(u32, &str); 90] = [ +pub const TX_KERNEL_ERRORS: [(u32, &str); 92] = [ (ERR_ACCOUNT_ANCHOR_BLOCK_HASH_MUST_NOT_BE_EMPTY, "Anchor block hash must not be empty"), (ERR_ACCOUNT_CODE_COMMITMENT_MISMATCH, "Computed account code commitment does not match recorded account code commitment"), (ERR_ACCOUNT_CODE_IS_NOT_UPDATABLE, "Account code must be updatable for it to be possible to set new code"), @@ -136,17 +138,18 @@ pub const TX_KERNEL_ERRORS: [(u32, &str); 90] = [ (ERR_ACCOUNT_STORAGE_COMMITMENT_MISMATCH, "Computed account storage commitment does not match recorded account storage commitment"), (ERR_ACCOUNT_TOO_MANY_PROCEDURES, "Number of account procedures exceeds the maximum limit of 256"), (ERR_ACCOUNT_TOO_MANY_STORAGE_SLOTS, "Number of account storage slots exceeds the maximum limit of 255"), - (ERR_ACCOUNT_TOTAL_ISSUANCE_PROC_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_FAUCET, "The get_fungible_faucet_total_issuance procedure can only be called on a fungible faucet"), (ERR_EPILOGUE_TOTAL_NUMBER_OF_ASSETS_MUST_STAY_THE_SAME, "Total number of assets in the account and all involved notes must stay the same"), (ERR_FAUCET_BURN_CANNOT_EXCEED_EXISTING_TOTAL_SUPPLY, "Asset amount to burn can not exceed the existing total supply"), (ERR_FAUCET_BURN_NON_FUNGIBLE_ASSET_CAN_ONLY_BE_CALLED_ON_NON_FUNGIBLE_FAUCET, "The burn_non_fungible_asset procedure can only be called on a non-fungible faucet"), (ERR_FAUCET_INVALID_STORAGE_OFFSET, "Storage offset is invalid for a faucet account (0 is prohibited as it is the reserved data slot for faucets)"), + (ERR_FAUCET_IS_NF_ASSET_ISSUED_PROC_CAN_ONLY_BE_CALLED_ON_NON_FUNGIBLE_FAUCET, "The faucet_is_non_fungible_asset_issued procedure can only be called on a non-fungible faucet"), (ERR_FAUCET_NEW_TOTAL_SUPPLY_WOULD_EXCEED_MAX_ASSET_AMOUNT, "Asset mint operation would cause the new total supply to exceed the maximum allowed asset amount"), (ERR_FAUCET_NON_FUNGIBLE_ASSET_ALREADY_ISSUED, "Failed to mint new non-fungible asset because it was already issued"), (ERR_FAUCET_NON_FUNGIBLE_ASSET_TO_BURN_NOT_FOUND, "Failed to burn non-existent non-fungible asset in the vault"), (ERR_FAUCET_STORAGE_DATA_SLOT_IS_RESERVED, "For faucets the FAUCET_STORAGE_DATA_SLOT storage slot is reserved and can not be used with set_account_item"), + (ERR_FAUCET_TOTAL_ISSUANCE_PROC_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_FAUCET, "The faucet_get_total_fungible_asset_issuance procedure can only be called on a fungible faucet"), (ERR_FOREIGN_ACCOUNT_ID_EQUALS_NATIVE_ACCT_ID, "Provided foreign account ID is equal to the native account ID."), (ERR_FOREIGN_ACCOUNT_ID_IS_ZERO, "ID of the provided foreign account equals zero."), @@ -189,6 +192,7 @@ pub const TX_KERNEL_ERRORS: [(u32, &str); 90] = [ (ERR_PROLOGUE_EXISTING_ACCOUNT_MUST_HAVE_NON_ZERO_NONCE, "Existing accounts must have a non-zero nonce"), (ERR_PROLOGUE_GLOBAL_INPUTS_PROVIDED_DO_NOT_MATCH_BLOCK_HASH_COMMITMENT, "The provided global inputs do not match the block hash commitment"), + (ERR_PROLOGUE_GLOBAL_INPUTS_PROVIDED_DO_NOT_MATCH_BLOCK_NUMBER_COMMITMENT, "The provided global inputs do not match the block number commitment"), (ERR_PROLOGUE_INPUT_NOTES_COMMITMENT_MISMATCH, "Note commitment computed from the input note data does not match given note commitment"), (ERR_PROLOGUE_MISMATCH_OF_ACCOUNT_IDS_FROM_GLOBAL_INPUTS_AND_ADVICE_PROVIDER, "Account IDs provided via global inputs and advice provider do not match"), (ERR_PROLOGUE_MISMATCH_OF_REFERENCE_BLOCK_MMR_AND_NOTE_AUTHENTICATION_MMR, "Reference block MMR and note's authentication MMR must match"), diff --git a/crates/miden-lib/src/transaction/memory.rs b/crates/miden-lib/src/transaction/memory.rs index a92e76d2d..6e0ca413c 100644 --- a/crates/miden-lib/src/transaction/memory.rs +++ b/crates/miden-lib/src/transaction/memory.rs @@ -259,6 +259,7 @@ pub const NATIVE_ACCT_STORAGE_SLOTS_SECTION_PTR: MemoryAddress = /// The size of the memory segment allocated to each note. pub const NOTE_MEM_SIZE: MemoryAddress = 2048; +#[allow(clippy::empty_line_after_outer_attr)] #[rustfmt::skip] // INPUT NOTES DATA // ------------------------------------------------------------------------------------------------ diff --git a/crates/miden-lib/src/transaction/mod.rs b/crates/miden-lib/src/transaction/mod.rs index 275007d13..6810144e0 100644 --- a/crates/miden-lib/src/transaction/mod.rs +++ b/crates/miden-lib/src/transaction/mod.rs @@ -99,6 +99,7 @@ impl TransactionKernel { account.init_hash(), tx_inputs.input_notes().commitment(), tx_inputs.block_header().hash(), + tx_inputs.block_header().block_num(), ); let mut advice_inputs = init_advice_inputs.unwrap_or_default(); @@ -131,14 +132,15 @@ impl TransactionKernel { /// ```text /// [ /// BLOCK_HASH, - /// acct_id, /// INITIAL_ACCOUNT_HASH, /// INPUT_NOTES_COMMITMENT, + /// acct_id_prefix, acct_id_suffix, block_num /// ] /// ``` /// /// Where: /// - BLOCK_HASH, reference block for the transaction execution. + /// - block_num, number of the reference block. /// - acct_id, the account that the transaction is being executed against. /// - INITIAL_ACCOUNT_HASH, account state prior to the transaction, EMPTY_WORD for new accounts. /// - INPUT_NOTES_COMMITMENT, see `transaction::api::get_input_notes_commitment`. @@ -147,13 +149,15 @@ impl TransactionKernel { init_acct_hash: Digest, input_notes_hash: Digest, block_hash: Digest, + block_num: BlockNumber, ) -> StackInputs { // Note: Must be kept in sync with the transaction's kernel prepare_transaction procedure let mut inputs: Vec = Vec::with_capacity(14); - inputs.extend(input_notes_hash); - inputs.extend_from_slice(init_acct_hash.as_elements()); + inputs.push(Felt::from(block_num)); inputs.push(account_id.suffix()); inputs.push(account_id.prefix().as_felt()); + inputs.extend(input_notes_hash); + inputs.extend_from_slice(init_acct_hash.as_elements()); inputs.extend_from_slice(block_hash.as_elements()); StackInputs::new(inputs) .map_err(|e| e.to_string()) diff --git a/crates/miden-lib/src/transaction/procedures/kernel_v0.rs b/crates/miden-lib/src/transaction/procedures/kernel_v0.rs index 29d0e29a9..940da26a6 100644 --- a/crates/miden-lib/src/transaction/procedures/kernel_v0.rs +++ b/crates/miden-lib/src/transaction/procedures/kernel_v0.rs @@ -40,13 +40,13 @@ pub const KERNEL0_PROCEDURES: [Digest; 35] = [ // account_has_non_fungible_asset digest!("0x653ab7a20e5af62c0022850726fef4f5fd6468f9de4cfc43b8fb6b9ff12e6b32"), // faucet_mint_asset - digest!("0x499a9fa3f670529c79c0eaafb07170ce13e003c2b08dda2dc4c2c12b3d96b9af"), + digest!("0x2b755f5f43f1f8957225c7d4c2130a37a4ba15c1323703efe82d3ed676051535"), // faucet_burn_asset - digest!("0xa56c96b989d852fffad0b4ca17de4e15e5865b0e76ea0a40f03959c175bde175"), + digest!("0xdfa2005118b0bcefe94082b25b09bb3b47f525a026dffad60664d9bb679ca140"), // faucet_get_total_fungible_asset_issuance digest!("0x7c46ed8cc84a0c5439285f715d1c867eb71131e9f0b1bbd65acea9dddc35bd96"), // faucet_is_non_fungible_asset_issued - digest!("0x2ebb03e088454d8da766957f00c81c2a4c31b74e3f20285716b3f505c7394bc4"), + digest!("0xab87249b2584dc957876f0f2bfc697ad6a9a4d6710e751dc51272f1fbecce5b4"), // note_get_assets_info digest!("0x34e4f1ea83eb4342ab8f5acec89962b2ab4b56d9c631e807d8e4dc8efd270bf2"), // note_add_asset diff --git a/crates/miden-objects/Cargo.toml b/crates/miden-objects/Cargo.toml index 0a6085846..08587ef18 100644 --- a/crates/miden-objects/Cargo.toml +++ b/crates/miden-objects/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-objects" -version = "0.7.2" +version = "0.8.0" description = "Core components of the Miden rollup" readme = "README.md" categories = ["no-std"] diff --git a/crates/miden-objects/src/account/data.rs b/crates/miden-objects/src/account/file.rs similarity index 69% rename from crates/miden-objects/src/account/data.rs rename to crates/miden-objects/src/account/file.rs index 2f8789558..c174d0f23 100644 --- a/crates/miden-objects/src/account/data.rs +++ b/crates/miden-objects/src/account/file.rs @@ -15,22 +15,24 @@ use super::{ Account, AuthSecretKey, Word, }; -// ACCOUNT DATA +const MAGIC: &str = "acct"; + +// ACCOUNT FILE // ================================================================================================ -/// Account data contains a complete description of an account, including the [Account] struct as +/// Account file contains a complete description of an account, including the [Account] struct as /// well as account seed and account authentication info. /// /// The intent of this struct is to provide an easy way to serialize and deserialize all /// account-related data as a single unit (e.g., to/from files). #[derive(Debug, Clone)] -pub struct AccountData { +pub struct AccountFile { pub account: Account, pub account_seed: Option, pub auth_secret_key: AuthSecretKey, } -impl AccountData { +impl AccountFile { pub fn new(account: Account, account_seed: Option, auth: AuthSecretKey) -> Self { Self { account, @@ -38,15 +40,16 @@ impl AccountData { auth_secret_key: auth, } } +} - #[cfg(feature = "std")] - /// Serialises and writes binary AccountData to specified file +#[cfg(feature = "std")] +impl AccountFile { + /// Serializes and writes binary [AccountFile] to specified file pub fn write(&self, filepath: impl AsRef) -> io::Result<()> { fs::write(filepath, self.to_bytes()) } - #[cfg(feature = "std")] - /// Reads from file and tries to deserialise an AccountData + /// Reads from file and tries to deserialize an [AccountFile] pub fn read(filepath: impl AsRef) -> io::Result { let mut file = File::open(filepath)?; let mut buffer = Vec::new(); @@ -54,16 +57,17 @@ impl AccountData { file.read_to_end(&mut buffer)?; let mut reader = SliceReader::new(&buffer); - Ok(AccountData::read_from(&mut reader).map_err(|_| io::ErrorKind::InvalidData)?) + Ok(AccountFile::read_from(&mut reader).map_err(|_| io::ErrorKind::InvalidData)?) } } // SERIALIZATION // ================================================================================================ -impl Serializable for AccountData { +impl Serializable for AccountFile { fn write_into(&self, target: &mut W) { - let AccountData { + target.write_bytes(MAGIC.as_bytes()); + let AccountFile { account, account_seed, auth_secret_key: auth, @@ -75,8 +79,14 @@ impl Serializable for AccountData { } } -impl Deserializable for AccountData { +impl Deserializable for AccountFile { fn read_from(source: &mut R) -> Result { + let magic_value = source.read_string(4)?; + if magic_value != MAGIC { + return Err(DeserializationError::InvalidValue(format!( + "invalid account file marker: {magic_value}" + ))); + } let account = Account::read_from(source)?; let account_seed = >::read_from(source)?; let auth_secret_key = AuthSecretKey::read_from(source)?; @@ -102,14 +112,14 @@ mod tests { #[cfg(feature = "std")] use tempfile::tempdir; - use super::AccountData; + use super::AccountFile; use crate::{ account::{storage, Account, AccountCode, AccountId, AuthSecretKey, Felt, Word}, asset::AssetVault, testing::account_id::ACCOUNT_ID_REGULAR_ACCOUNT_IMMUTABLE_CODE_ON_CHAIN, }; - fn build_account_data() -> AccountData { + fn build_account_file() -> AccountFile { let id = AccountId::try_from(ACCOUNT_ID_REGULAR_ACCOUNT_IMMUTABLE_CODE_ON_CHAIN).unwrap(); let code = AccountCode::mock(); @@ -121,19 +131,19 @@ mod tests { let account_seed = Some(Word::default()); let auth_secret_key = AuthSecretKey::RpoFalcon512(SecretKey::new()); - AccountData::new(account, account_seed, auth_secret_key) + AccountFile::new(account, account_seed, auth_secret_key) } #[test] fn test_serde() { - let account_data = build_account_data(); - let serialized = account_data.to_bytes(); - let deserialized = AccountData::read_from_bytes(&serialized).unwrap(); - assert_eq!(deserialized.account, account_data.account); - assert_eq!(deserialized.account_seed, account_data.account_seed); + let account_file = build_account_file(); + let serialized = account_file.to_bytes(); + let deserialized = AccountFile::read_from_bytes(&serialized).unwrap(); + assert_eq!(deserialized.account, account_file.account); + assert_eq!(deserialized.account_seed, account_file.account_seed); assert_eq!( deserialized.auth_secret_key.to_bytes(), - account_data.auth_secret_key.to_bytes() + account_file.auth_secret_key.to_bytes() ); } @@ -141,17 +151,17 @@ mod tests { #[test] fn test_serde_file() { let dir = tempdir().unwrap(); - let filepath = dir.path().join("account_data.mac"); + let filepath = dir.path().join("account_file.mac"); - let account_data = build_account_data(); - account_data.write(filepath.as_path()).unwrap(); - let deserialized = AccountData::read(filepath.as_path()).unwrap(); + let account_file = build_account_file(); + account_file.write(filepath.as_path()).unwrap(); + let deserialized = AccountFile::read(filepath.as_path()).unwrap(); - assert_eq!(deserialized.account, account_data.account); - assert_eq!(deserialized.account_seed, account_data.account_seed); + assert_eq!(deserialized.account, account_file.account); + assert_eq!(deserialized.account_seed, account_file.account_seed); assert_eq!( deserialized.auth_secret_key.to_bytes(), - account_data.auth_secret_key.to_bytes() + account_file.auth_secret_key.to_bytes() ); } } diff --git a/crates/miden-objects/src/account/mod.rs b/crates/miden-objects/src/account/mod.rs index 3bb2f2fee..2893733c5 100644 --- a/crates/miden-objects/src/account/mod.rs +++ b/crates/miden-objects/src/account/mod.rs @@ -39,8 +39,8 @@ pub use storage::{AccountStorage, AccountStorageHeader, StorageMap, StorageSlot, mod header; pub use header::AccountHeader; -mod data; -pub use data::AccountData; +mod file; +pub use file::AccountFile; // ACCOUNT // ================================================================================================ diff --git a/crates/miden-objects/src/batch/account_update.rs b/crates/miden-objects/src/batch/account_update.rs new file mode 100644 index 000000000..0dab76cd5 --- /dev/null +++ b/crates/miden-objects/src/batch/account_update.rs @@ -0,0 +1,161 @@ +use alloc::vec::Vec; + +use vm_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; +use vm_processor::{DeserializationError, Digest}; + +use crate::{ + account::{delta::AccountUpdateDetails, AccountId}, + errors::BatchAccountUpdateError, + transaction::{ProvenTransaction, TransactionId}, +}; + +// BATCH ACCOUNT UPDATE +// ================================================================================================ + +/// Represents the changes made to an account resulting from executing a batch of transactions. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct BatchAccountUpdate { + /// ID of the updated account. + account_id: AccountId, + + /// Commitment to the state of the account before this update is applied. + /// + /// Equal to `Digest::default()` for new accounts. + initial_state_commitment: Digest, + + /// Commitment to the state of the account after this update is applied. + final_state_commitment: Digest, + + /// IDs of all transactions that updated the account. + transactions: Vec, + + /// A set of changes which can be applied to the previous account state (i.e. `initial_state`) + /// to get the new account state. For private accounts, this is set to + /// [`AccountUpdateDetails::Private`]. + details: AccountUpdateDetails, +} + +impl BatchAccountUpdate { + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + /// Creates a [`BatchAccountUpdate`] by cloning the update and other details from the provided + /// [`ProvenTransaction`]. + pub fn from_transaction(transaction: &ProvenTransaction) -> Self { + Self { + account_id: transaction.account_id(), + initial_state_commitment: transaction.account_update().init_state_hash(), + final_state_commitment: transaction.account_update().final_state_hash(), + transactions: vec![transaction.id()], + details: transaction.account_update().details().clone(), + } + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the ID of the updated account. + pub fn account_id(&self) -> AccountId { + self.account_id + } + + /// Returns a commitment to the state of the account before this update is applied. + /// + /// This is equal to [`Digest::default()`] for new accounts. + pub fn initial_state_commitment(&self) -> Digest { + self.initial_state_commitment + } + + /// Returns a commitment to the state of the account after this update is applied. + pub fn final_state_commitment(&self) -> Digest { + self.final_state_commitment + } + + /// Returns a slice of [`TransactionId`]s that updated this account's state. + pub fn transactions(&self) -> &[TransactionId] { + &self.transactions + } + + /// Returns the contained [`AccountUpdateDetails`]. + /// + /// This update can be used to build the new account state from the previous account state. + pub fn details(&self) -> &AccountUpdateDetails { + &self.details + } + + /// Returns `true` if the account update details are for a private account. + pub fn is_private(&self) -> bool { + self.details.is_private() + } + + // MUTATORS + // -------------------------------------------------------------------------------------------- + + /// Merges the transaction's update into this account update. + /// + /// # Errors + /// + /// Returns an error if: + /// - The account ID of the merging transaction does not match the account ID of the existing + /// update. + /// - The merging transaction's initial state commitment does not match the final state + /// commitment of the current update. + /// - If the underlying [`AccountUpdateDetails::merge`] fails. + pub fn merge_proven_tx( + &mut self, + tx: &ProvenTransaction, + ) -> Result<(), BatchAccountUpdateError> { + if self.account_id != tx.account_id() { + return Err(BatchAccountUpdateError::AccountUpdateIdMismatch { + transaction: tx.id(), + expected_account_id: self.account_id, + actual_account_id: tx.account_id(), + }); + } + + if self.final_state_commitment != tx.account_update().init_state_hash() { + return Err(BatchAccountUpdateError::AccountUpdateInitialStateMismatch(tx.id())); + } + + self.details = self.details.clone().merge(tx.account_update().details().clone()).map_err( + |source_err| BatchAccountUpdateError::TransactionUpdateMergeError(tx.id(), source_err), + )?; + self.final_state_commitment = tx.account_update().final_state_hash(); + self.transactions.push(tx.id()); + + Ok(()) + } + + // CONVERSIONS + // -------------------------------------------------------------------------------------------- + + /// Consumes the update and returns the non-[`Copy`] parts. + pub fn into_parts(self) -> (Vec, AccountUpdateDetails) { + (self.transactions, self.details) + } +} + +// SERIALIZATION +// ================================================================================================ + +impl Serializable for BatchAccountUpdate { + fn write_into(&self, target: &mut W) { + self.account_id.write_into(target); + self.initial_state_commitment.write_into(target); + self.final_state_commitment.write_into(target); + self.transactions.write_into(target); + self.details.write_into(target); + } +} + +impl Deserializable for BatchAccountUpdate { + fn read_from(source: &mut R) -> Result { + Ok(Self { + account_id: AccountId::read_from(source)?, + initial_state_commitment: Digest::read_from(source)?, + final_state_commitment: Digest::read_from(source)?, + transactions: >::read_from(source)?, + details: AccountUpdateDetails::read_from(source)?, + }) + } +} diff --git a/crates/miden-objects/src/batch/batch_id.rs b/crates/miden-objects/src/batch/batch_id.rs new file mode 100644 index 000000000..ae7100eab --- /dev/null +++ b/crates/miden-objects/src/batch/batch_id.rs @@ -0,0 +1,65 @@ +use alloc::{string::String, vec::Vec}; + +use vm_core::{Felt, ZERO}; +use vm_processor::Digest; + +use crate::{ + account::AccountId, + transaction::{ProvenTransaction, TransactionId}, + Hasher, +}; + +// BATCH ID +// ================================================================================================ + +/// Uniquely identifies a batch of transactions, i.e. both +/// [`ProposedBatch`](crate::batch::ProposedBatch) and [`ProvenBatch`](crate::batch::ProvenBatch). +/// +/// This is a sequential hash of the tuple `(TRANSACTION_ID || [account_id_prefix, +/// account_id_suffix, 0, 0])` of all transactions and the accounts their executed against in the +/// batch. +#[derive(Debug, Copy, Clone, Eq, Ord, PartialEq, PartialOrd)] +pub struct BatchId(Digest); + +impl BatchId { + /// Calculates a batch ID from the given set of transactions. + pub fn from_transactions<'tx, T>(txs: T) -> Self + where + T: Iterator, + { + Self::from_ids(txs.map(|tx| (tx.id(), tx.account_id()))) + } + + /// Calculates a batch ID from the given transaction ID and account ID tuple. + pub fn from_ids(iter: impl Iterator) -> Self { + let mut elements: Vec = Vec::new(); + for (tx_id, account_id) in iter { + elements.extend_from_slice(tx_id.as_elements()); + let [account_id_prefix, account_id_suffix] = <[Felt; 2]>::from(account_id); + elements.extend_from_slice(&[account_id_prefix, account_id_suffix, ZERO, ZERO]); + } + + Self(Hasher::hash_elements(&elements)) + } + + /// Returns the elements representation of this batch ID. + pub fn as_elements(&self) -> &[Felt] { + self.0.as_elements() + } + + /// Returns the byte representation of this batch ID. + pub fn as_bytes(&self) -> [u8; 32] { + self.0.as_bytes() + } + + /// Returns a big-endian, hex-encoded string. + pub fn to_hex(&self) -> String { + self.0.to_hex() + } +} + +impl core::fmt::Display for BatchId { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(f, "{}", self.to_hex()) + } +} diff --git a/crates/miden-objects/src/batch/mod.rs b/crates/miden-objects/src/batch/mod.rs index 23c43becc..dd0c0d433 100644 --- a/crates/miden-objects/src/batch/mod.rs +++ b/crates/miden-objects/src/batch/mod.rs @@ -1,2 +1,14 @@ mod note_tree; pub use note_tree::BatchNoteTree; + +mod batch_id; +pub use batch_id::BatchId; + +mod account_update; +pub use account_update::BatchAccountUpdate; + +mod proven_batch; +pub use proven_batch::ProvenBatch; + +mod proposed_batch; +pub use proposed_batch::ProposedBatch; diff --git a/crates/miden-objects/src/batch/note_tree.rs b/crates/miden-objects/src/batch/note_tree.rs index a0d0b5536..e6e98de31 100644 --- a/crates/miden-objects/src/batch/note_tree.rs +++ b/crates/miden-objects/src/batch/note_tree.rs @@ -35,4 +35,9 @@ impl BatchNoteTree { pub fn root(&self) -> RpoDigest { self.0.root() } + + /// Returns the number of non-empty leaves in this tree. + pub fn num_leaves(&self) -> usize { + self.0.num_leaves() + } } diff --git a/crates/miden-objects/src/batch/proposed_batch.rs b/crates/miden-objects/src/batch/proposed_batch.rs new file mode 100644 index 000000000..39aba4bd1 --- /dev/null +++ b/crates/miden-objects/src/batch/proposed_batch.rs @@ -0,0 +1,508 @@ +use alloc::{ + collections::{btree_map::Entry, BTreeMap, BTreeSet}, + sync::Arc, + vec::Vec, +}; + +use crate::{ + account::AccountId, + batch::{BatchAccountUpdate, BatchId, BatchNoteTree}, + block::{BlockHeader, BlockNumber}, + errors::ProposedBatchError, + note::{NoteHeader, NoteId, NoteInclusionProof}, + transaction::{ + ChainMmr, InputNoteCommitment, InputNotes, OutputNote, ProvenTransaction, TransactionId, + }, + MAX_ACCOUNTS_PER_BATCH, MAX_INPUT_NOTES_PER_BATCH, MAX_OUTPUT_NOTES_PER_BATCH, +}; + +/// A proposed batch of transactions with all necessary data to validate it. +/// +/// See [`ProposedBatch::new`] for what a proposed batch expects and guarantees. +/// +/// This type is fairly large, so consider boxing it. +#[derive(Debug, Clone)] +pub struct ProposedBatch { + /// The transactions of this batch. + transactions: Vec>, + /// The header is boxed as it has a large stack size. + block_header: BlockHeader, + /// The chain MMR used to authenticate: + /// - all unauthenticated notes that can be authenticated, + /// - all block hashes referenced by the transactions in the batch. + chain_mmr: ChainMmr, + /// The note inclusion proofs for unauthenticated notes that were consumed in the batch which + /// can be authenticated. + unauthenticated_note_proofs: BTreeMap, + /// The ID of the batch, which is a cryptographic commitment to the transactions in the batch. + id: BatchId, + /// A map from account ID's updated in this batch to the aggregated update from all + /// transaction's that touched the account. + account_updates: BTreeMap, + /// The block number at which the batch will expire. This is the minimum of all transaction's + /// expiration block number. + batch_expiration_block_num: BlockNumber, + /// The input note commitment of the transaction batch. This consists of all authenticated + /// notes that transactions in the batch consume as well as unauthenticated notes whose + /// authentication is delayed to the block kernel. + input_notes: InputNotes, + /// The SMT over the output notes of this batch. + output_notes_tree: BatchNoteTree, + /// The output notes of this batch. This consists of all notes created by transactions in the + /// batch that are not consumed within the same batch. + output_notes: Vec, +} + +impl ProposedBatch { + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + /// Creates a new [`ProposedBatch`] from the provided parts. + /// + /// # Inputs + /// + /// - The given transactions must be correctly ordered. That is, if two transactions A and B + /// update the same account in this order, meaning A's initial account state commitment + /// matches the account state before any transactions are executed and B's initial account + /// state commitment matches the final account state commitment of A, then A must come before + /// B. + /// - The chain MMR should contain all block headers + /// - that are referenced by note inclusion proofs in `unauthenticated_note_proofs`. + /// - that are referenced by a transaction in the batch. + /// - The `unauthenticated_note_proofs` should contain [`NoteInclusionProof`]s for any + /// unauthenticated note consumed by the transaction's in the batch which can be + /// authenticated. This means it is not required that every unauthenticated note has an entry + /// in this map for two reasons. + /// - Unauthenticated note authentication can be delayed to the block kernel. + /// - Another transaction in the batch creates an output note matching an unauthenticated + /// input note, in which case inclusion in the chain does not need to be proven. + /// - The block header's block number must be greater or equal to the highest block number + /// referenced by any transaction. This is not verified explicitly, but will implicitly cause + /// an error during validating that each reference block of a transaction is in the chain MMR. + /// + /// # Errors + /// + /// Returns an error if: + /// + /// - The number of input notes exceeds [`MAX_INPUT_NOTES_PER_BATCH`]. + /// - Note that unauthenticated notes that are created in the same batch do not count. Any + /// other input notes, unauthenticated or not, do count. + /// - The number of output notes exceeds [`MAX_OUTPUT_NOTES_PER_BATCH`]. + /// - Note that output notes that are consumed in the same batch as unauthenticated input + /// notes do not count. + /// - Any note is consumed more than once. + /// - Any note is created more than once. + /// - The number of account updates exceeds [`MAX_ACCOUNTS_PER_BATCH`]. + /// - Note that any number of transactions against the same account count as one update. + /// - The chain MMRs chain length does not match the block header's block number. This means the + /// chain MMR should not contain the block header itself as it is added to the MMR in the + /// batch kernel. + /// - The chain MMRs hashed peaks do not match the block header's chain root. + /// - The reference block of any transaction is not in the chain MMR. + /// - The note inclusion proof for an unauthenticated note fails to verify. + /// - The block referenced by a note inclusion proof for an unauthenticated note is missing from + /// the chain MMR. + /// - The transactions in the proposed batch which update the same account are not correctly + /// ordered. + /// - The provided list of transactions is empty. An empty batch is pointless and would + /// potentially result in the same [`BatchId`] for two empty batches which would mean batch + /// IDs are no longer unique. + /// - There are duplicate transactions. + pub fn new( + transactions: Vec>, + block_header: BlockHeader, + chain_mmr: ChainMmr, + unauthenticated_note_proofs: BTreeMap, + ) -> Result { + // Check for empty or duplicate transactions. + // -------------------------------------------------------------------------------------------- + + if transactions.is_empty() { + return Err(ProposedBatchError::EmptyTransactionBatch); + } + + let mut transaction_set = BTreeSet::new(); + for tx in transactions.iter() { + if !transaction_set.insert(tx.id()) { + return Err(ProposedBatchError::DuplicateTransaction { transaction_id: tx.id() }); + } + } + + // Verify block header and chain MMR match. + // -------------------------------------------------------------------------------------------- + + if chain_mmr.chain_length() != block_header.block_num() { + return Err(ProposedBatchError::InconsistentChainLength { + expected: block_header.block_num(), + actual: chain_mmr.chain_length(), + }); + } + + let hashed_peaks = chain_mmr.peaks().hash_peaks(); + if hashed_peaks != block_header.chain_root() { + return Err(ProposedBatchError::InconsistentChainRoot { + expected: block_header.chain_root(), + actual: hashed_peaks, + }); + } + + // Verify all block references from the transactions are in the chain. + // -------------------------------------------------------------------------------------------- + + // Aggregate block references into a set since the chain MMR does not index by hash. + let mut block_references = + BTreeSet::from_iter(chain_mmr.block_headers().map(BlockHeader::hash)); + // Insert the block referenced by the batch to consider it authenticated. We can assume this + // because the block kernel will verify the block hash as it is a public input to the batch + // kernel. + block_references.insert(block_header.hash()); + + for tx in transactions.iter() { + if !block_references.contains(&tx.block_ref()) { + return Err(ProposedBatchError::MissingTransactionBlockReference { + block_reference: tx.block_ref(), + transaction_id: tx.id(), + }); + } + } + + // Aggregate individual tx-level account updates into a batch-level account update - one per + // account. + // -------------------------------------------------------------------------------------------- + + // Populate batch output notes and updated accounts. + let mut account_updates = BTreeMap::::new(); + let mut batch_expiration_block_num = BlockNumber::from(u32::MAX); + for tx in transactions.iter() { + // Merge account updates so that state transitions A->B->C become A->C. + match account_updates.entry(tx.account_id()) { + Entry::Vacant(vacant) => { + let batch_account_update = BatchAccountUpdate::from_transaction(tx); + vacant.insert(batch_account_update); + }, + Entry::Occupied(occupied) => { + // This returns an error if the transactions are not correctly ordered, e.g. if + // B comes before A. + occupied.into_mut().merge_proven_tx(tx).map_err(|source| { + ProposedBatchError::AccountUpdateError { + account_id: tx.account_id(), + source, + } + })?; + }, + }; + + // The expiration block of the batch is the minimum of all transaction's expiration + // block. + batch_expiration_block_num = batch_expiration_block_num.min(tx.expiration_block_num()); + } + + if account_updates.len() > MAX_ACCOUNTS_PER_BATCH { + return Err(ProposedBatchError::TooManyAccountUpdates(account_updates.len())); + } + + // Check for duplicates in input notes. + // -------------------------------------------------------------------------------------------- + + // Check for duplicate input notes both within a transaction and across transactions. + // This also includes authenticated notes, as the transaction kernel doesn't check for + // duplicates. + let mut input_note_map = BTreeMap::new(); + + for tx in transactions.iter() { + for note in tx.input_notes() { + let nullifier = note.nullifier(); + if let Some(first_transaction_id) = input_note_map.insert(nullifier, tx.id()) { + return Err(ProposedBatchError::DuplicateInputNote { + note_nullifier: nullifier, + first_transaction_id, + second_transaction_id: tx.id(), + }); + } + } + } + + // Create input and output note set of the batch. + // -------------------------------------------------------------------------------------------- + + // Check for duplicate output notes and remove all output notes from the batch output note + // set that are consumed by transactions. + let mut output_notes = BatchOutputNoteTracker::new(transactions.iter().map(AsRef::as_ref))?; + let mut input_notes = vec![]; + + for tx in transactions.iter() { + for input_note in tx.input_notes().iter() { + // Header is present only for unauthenticated input notes. + let input_note = match input_note.header() { + Some(input_note_header) => { + if output_notes.remove_note(input_note_header)? { + // If a transaction consumes an unauthenticated note that is also + // created in this batch, it is removed from the set of output notes. + // We `continue` so that the input note is not added to the set of input + // notes of the batch. That way the note appears in neither input nor + // output set. + continue; + } + + // If an inclusion proof for an unauthenticated note is provided and the + // proof is valid, it means the note is part of the chain and we can mark it + // as authenticated by erasing the note header. + if let Some(proof) = + unauthenticated_note_proofs.get(&input_note_header.id()) + { + let note_block_header = chain_mmr + .get_block(proof.location().block_num()) + .ok_or_else(|| { + ProposedBatchError::UnauthenticatedInputNoteBlockNotInChainMmr { + block_number: proof.location().block_num(), + note_id: input_note_header.id(), + } + })?; + + authenticate_unauthenticated_note( + input_note_header, + proof, + note_block_header, + )?; + + // Erase the note header from the input note. + InputNoteCommitment::from(input_note.nullifier()) + } else { + input_note.clone() + } + }, + None => input_note.clone(), + }; + input_notes.push(input_note); + } + } + + let output_notes = output_notes.into_notes(); + + if input_notes.len() > MAX_INPUT_NOTES_PER_BATCH { + return Err(ProposedBatchError::TooManyInputNotes(input_notes.len())); + } + // SAFETY: This is safe as we have checked for duplicates and the max number of input notes + // in a batch. + let input_notes = InputNotes::new_unchecked(input_notes); + + if output_notes.len() > MAX_OUTPUT_NOTES_PER_BATCH { + return Err(ProposedBatchError::TooManyOutputNotes(output_notes.len())); + } + + // Build the output notes SMT. + // -------------------------------------------------------------------------------------------- + + // SAFETY: We can `expect` here because: + // - the batch output note tracker already returns an error for duplicate output notes, + // - we have checked that the number of output notes is <= 2^BATCH_NOTE_TREE_DEPTH. + let output_notes_tree = BatchNoteTree::with_contiguous_leaves( + output_notes.iter().map(|note| (note.id(), note.metadata())), + ) + .expect("there should be no duplicate notes and there should be <= 2^BATCH_NOTE_TREE_DEPTH notes"); + + // Compute batch ID. + // -------------------------------------------------------------------------------------------- + + let id = BatchId::from_transactions(transactions.iter().map(AsRef::as_ref)); + + Ok(Self { + id, + transactions, + block_header, + chain_mmr, + unauthenticated_note_proofs, + account_updates, + batch_expiration_block_num, + input_notes, + output_notes, + output_notes_tree, + }) + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns a slice of the [`ProvenTransaction`]s in the batch. + pub fn transactions(&self) -> &[Arc] { + &self.transactions + } + + /// Returns the map of account IDs mapped to their [`BatchAccountUpdate`]s. + /// + /// If an account was updated by multiple transactions, the [`BatchAccountUpdate`] is the result + /// of merging the individual updates. + /// + /// For example, suppose an account's state before this batch is `A` and the batch contains two + /// transactions that updated it. Applying the first transaction results in intermediate state + /// `B`, and applying the second one results in state `C`. Then the returned update represents + /// the state transition from `A` to `C`. + pub fn account_updates(&self) -> &BTreeMap { + &self.account_updates + } + + /// The ID of this batch. See [`BatchId`] for details on how it is computed. + pub fn id(&self) -> BatchId { + self.id + } + + /// Returns the block number at which the batch will expire. + pub fn batch_expiration_block_num(&self) -> BlockNumber { + self.batch_expiration_block_num + } + + /// Returns the [`InputNotes`] of this batch. + pub fn input_notes(&self) -> &InputNotes { + &self.input_notes + } + + /// Returns the output notes of the batch. + /// + /// This is the aggregation of all output notes by the transactions in the batch, except the + /// ones that were consumed within the batch itself. + pub fn output_notes(&self) -> &[OutputNote] { + &self.output_notes + } + + /// Returns the [`BatchNoteTree`] representing the output notes of the batch. + pub fn output_notes_tree(&self) -> &BatchNoteTree { + &self.output_notes_tree + } + + /// Consumes the proposed batch and returns its underlying parts. + #[allow(clippy::type_complexity)] + pub fn into_parts( + self, + ) -> ( + Vec>, + BlockHeader, + ChainMmr, + BTreeMap, + BatchId, + BTreeMap, + InputNotes, + BatchNoteTree, + Vec, + BlockNumber, + ) { + ( + self.transactions, + self.block_header, + self.chain_mmr, + self.unauthenticated_note_proofs, + self.id, + self.account_updates, + self.input_notes, + self.output_notes_tree, + self.output_notes, + self.batch_expiration_block_num, + ) + } +} + +// BATCH OUTPUT NOTE TRACKER +// ================================================================================================ + +/// A helper struct to track output notes. +/// Its main purpose is to check for duplicates and allow for removal of output notes that are +/// consumed in the same batch, so are not output notes of the batch. +/// +/// The approach for this is that the output note set is initialized to the union of all output +/// notes of the transactions in the batch. +/// Then (outside of this struct) all input notes of transactions in the batch which are also output +/// notes can be removed, as they are considered consumed within the batch and will not be visible +/// as created or consumed notes for the batch. +#[derive(Debug)] +struct BatchOutputNoteTracker { + /// An index from [`NoteId`]s to the transaction that creates the note and the note itself. + /// The transaction ID is tracked to produce better errors when a duplicate note is + /// encountered. + output_notes: BTreeMap, +} + +impl BatchOutputNoteTracker { + /// Constructs a new output note tracker from the given transactions. + /// + /// # Errors + /// + /// Returns an error if: + /// - any output note is created more than once (by the same or different transactions). + fn new<'a>( + txs: impl Iterator, + ) -> Result { + let mut output_notes = BTreeMap::new(); + for tx in txs { + for note in tx.output_notes().iter() { + if let Some((first_transaction_id, _)) = + output_notes.insert(note.id(), (tx.id(), note.clone())) + { + return Err(ProposedBatchError::DuplicateOutputNote { + note_id: note.id(), + first_transaction_id, + second_transaction_id: tx.id(), + }); + } + } + } + + Ok(Self { output_notes }) + } + + /// Attempts to remove the given input note from the output note set. + /// + /// Returns `true` if the given note existed in the output note set and was removed from it, + /// `false` otherwise. + /// + /// # Errors + /// + /// Returns an error if: + /// - the given note has a corresponding note in the output note set with the same [`NoteId`] + /// but their hashes differ (i.e. their metadata is different). + pub fn remove_note( + &mut self, + input_note_header: &NoteHeader, + ) -> Result { + let id = input_note_header.id(); + if let Some((_, output_note)) = self.output_notes.remove(&id) { + // Check if the notes with the same ID have differing hashes. + // This could happen if the metadata of the notes is different, which we consider an + // error. + let input_hash = input_note_header.hash(); + let output_hash = output_note.hash(); + if output_hash != input_hash { + return Err(ProposedBatchError::NoteHashesMismatch { id, input_hash, output_hash }); + } + + return Ok(true); + } + + Ok(false) + } + + /// Consumes the tracker and returns a [`Vec`] of output notes sorted by [`NoteId`]. + pub fn into_notes(self) -> Vec { + self.output_notes.into_iter().map(|(_, (_, output_note))| output_note).collect() + } +} + +// HELPER FUNCTIONS +// ================================================================================================ + +/// Validates whether the provided header of an unauthenticated note belongs to the note tree of the +/// specified block header. +fn authenticate_unauthenticated_note( + note_header: &NoteHeader, + proof: &NoteInclusionProof, + block_header: &BlockHeader, +) -> Result<(), ProposedBatchError> { + let note_index = proof.location().node_index_in_block().into(); + let note_hash = note_header.hash(); + proof + .note_path() + .verify(note_index, note_hash, &block_header.note_root()) + .map_err(|source| ProposedBatchError::UnauthenticatedNoteAuthenticationFailed { + note_id: note_header.id(), + block_num: proof.location().block_num(), + source, + }) +} diff --git a/crates/miden-objects/src/batch/proven_batch.rs b/crates/miden-objects/src/batch/proven_batch.rs new file mode 100644 index 000000000..162247100 --- /dev/null +++ b/crates/miden-objects/src/batch/proven_batch.rs @@ -0,0 +1,93 @@ +use alloc::{collections::BTreeMap, vec::Vec}; + +use crate::{ + account::AccountId, + batch::{BatchAccountUpdate, BatchId, BatchNoteTree}, + block::BlockNumber, + note::Nullifier, + transaction::{InputNoteCommitment, InputNotes, OutputNote}, +}; + +/// A transaction batch with an execution proof. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ProvenBatch { + id: BatchId, + account_updates: BTreeMap, + input_notes: InputNotes, + output_notes_smt: BatchNoteTree, + output_notes: Vec, + batch_expiration_block_num: BlockNumber, +} + +impl ProvenBatch { + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + /// Creates a new [`ProvenBatch`] from the provided parts. + pub fn new( + id: BatchId, + account_updates: BTreeMap, + input_notes: InputNotes, + output_notes_smt: BatchNoteTree, + output_notes: Vec, + batch_expiration_block_num: BlockNumber, + ) -> Self { + Self { + id, + account_updates, + input_notes, + output_notes_smt, + output_notes, + batch_expiration_block_num, + } + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// The ID of this batch. See [`BatchId`] for details on how it is computed. + pub fn id(&self) -> BatchId { + self.id + } + + /// Returns the block number at which the batch will expire. + pub fn batch_expiration_block_num(&self) -> BlockNumber { + self.batch_expiration_block_num + } + + /// Returns the map of account IDs mapped to their [`BatchAccountUpdate`]s. + /// + /// If an account was updated by multiple transactions, the [`BatchAccountUpdate`] is the result + /// of merging the individual updates. + /// + /// For example, suppose an account's state before this batch is `A` and the batch contains two + /// transactions that updated it. Applying the first transaction results in intermediate state + /// `B`, and applying the second one results in state `C`. Then the returned update represents + /// the state transition from `A` to `C`. + pub fn account_updates(&self) -> &BTreeMap { + &self.account_updates + } + + /// Returns the [`InputNotes`] of this batch. + pub fn input_notes(&self) -> &InputNotes { + &self.input_notes + } + + /// Returns an iterator over the nullifiers produced in this batch. + pub fn produced_nullifiers(&self) -> impl Iterator + use<'_> { + self.input_notes.iter().map(InputNoteCommitment::nullifier) + } + + /// Returns the output notes of the batch. + /// + /// This is the aggregation of all output notes by the transactions in the batch, except the + /// ones that were consumed within the batch itself. + pub fn output_notes(&self) -> &[OutputNote] { + &self.output_notes + } + + /// Returns the [`BatchNoteTree`] representing the output notes of the batch. + pub fn output_notes_tree(&self) -> &BatchNoteTree { + &self.output_notes_smt + } +} diff --git a/crates/miden-objects/src/block/block_number.rs b/crates/miden-objects/src/block/block_number.rs index 851ed4f00..656af8049 100644 --- a/crates/miden-objects/src/block/block_number.rs +++ b/crates/miden-objects/src/block/block_number.rs @@ -40,11 +40,6 @@ impl BlockNumber { BlockNumber((epoch as u32) << BlockNumber::EPOCH_LENGTH_EXPONENT) } - /// Creates a `BlockNumber` from a `usize`. - pub fn from_usize(value: usize) -> Self { - BlockNumber(value as u32) - } - /// Returns the epoch to which this block number belongs. pub const fn block_epoch(&self) -> u16 { (self.0 >> BlockNumber::EPOCH_LENGTH_EXPONENT) as u16 diff --git a/crates/miden-objects/src/block/mod.rs b/crates/miden-objects/src/block/mod.rs index 500c24a0e..f80cede62 100644 --- a/crates/miden-objects/src/block/mod.rs +++ b/crates/miden-objects/src/block/mod.rs @@ -7,8 +7,10 @@ use super::{ mod header; pub use header::BlockHeader; + mod block_number; pub use block_number::BlockNumber; + mod note_tree; pub use note_tree::{BlockNoteIndex, BlockNoteTree}; @@ -231,8 +233,8 @@ pub fn compute_tx_hash( ) -> Digest { let mut elements = vec![]; for (transaction_id, account_id) in updated_accounts { - let account_id_felts: [Felt; 2] = account_id.into(); - elements.extend_from_slice(&[account_id_felts[0], account_id_felts[1], ZERO, ZERO]); + let [account_id_prefix, account_id_suffix] = <[Felt; 2]>::from(account_id); + elements.extend_from_slice(&[account_id_prefix, account_id_suffix, ZERO, ZERO]); elements.extend_from_slice(transaction_id.as_elements()); } diff --git a/crates/miden-objects/src/errors.rs b/crates/miden-objects/src/errors.rs index 38da1fa1d..a47752c69 100644 --- a/crates/miden-objects/src/errors.rs +++ b/crates/miden-objects/src/errors.rs @@ -22,7 +22,9 @@ use crate::{ }, block::BlockNumber, note::{NoteAssets, NoteExecutionHint, NoteTag, NoteType, Nullifier}, - ACCOUNT_UPDATE_MAX_SIZE, MAX_INPUTS_PER_NOTE, MAX_INPUT_NOTES_PER_TX, MAX_OUTPUT_NOTES_PER_TX, + transaction::TransactionId, + ACCOUNT_UPDATE_MAX_SIZE, MAX_ACCOUNTS_PER_BATCH, MAX_INPUTS_PER_NOTE, + MAX_INPUT_NOTES_PER_BATCH, MAX_INPUT_NOTES_PER_TX, MAX_OUTPUT_NOTES_PER_TX, }; // ACCOUNT COMPONENT TEMPLATE ERROR @@ -182,6 +184,23 @@ pub enum AccountDeltaError { NotAFungibleFaucetId(AccountId), } +// BATCH ACCOUNT UPDATE ERROR +// ================================================================================================ + +#[derive(Debug, Error)] +pub enum BatchAccountUpdateError { + #[error("account update for account {expected_account_id} cannot be merged with update from transaction {transaction} which was executed against account {actual_account_id}")] + AccountUpdateIdMismatch { + transaction: TransactionId, + expected_account_id: AccountId, + actual_account_id: AccountId, + }, + #[error("final state commitment in account update from transaction {0} does not match initial state of current update")] + AccountUpdateInitialStateMismatch(TransactionId), + #[error("failed to merge account delta from transaction {0}")] + TransactionUpdateMergeError(TransactionId, #[source] AccountDeltaError), +} + // ASSET ERROR // ================================================================================================ @@ -429,6 +448,90 @@ pub enum ProvenTransactionError { }, } +// BATCH ERROR +// ================================================================================================ + +#[derive(Debug, Error)] +pub enum ProposedBatchError { + #[error( + "transaction batch has {0} input notes but at most {MAX_INPUT_NOTES_PER_BATCH} are allowed" + )] + TooManyInputNotes(usize), + + #[error( + "transaction batch has {0} output notes but at most {MAX_OUTPUT_NOTES_PER_BATCH} are allowed" + )] + TooManyOutputNotes(usize), + + #[error( + "transaction batch has {0} account updates but at most {MAX_ACCOUNTS_PER_BATCH} are allowed" + )] + TooManyAccountUpdates(usize), + + #[error("transaction batch must contain at least one transaction")] + EmptyTransactionBatch, + + #[error("transaction {transaction_id} appears twice in the proposed batch input")] + DuplicateTransaction { transaction_id: TransactionId }, + + #[error("transaction {second_transaction_id} consumes the note with nullifier {note_nullifier} that is also consumed by another transaction {first_transaction_id} in the batch")] + DuplicateInputNote { + note_nullifier: Nullifier, + first_transaction_id: TransactionId, + second_transaction_id: TransactionId, + }, + + #[error("transaction {second_transaction_id} creates the note with id {note_id} that is also created by another transaction {first_transaction_id} in the batch")] + DuplicateOutputNote { + note_id: NoteId, + first_transaction_id: TransactionId, + second_transaction_id: TransactionId, + }, + + #[error("note hashes mismatch for note {id}: (input: {input_hash}, output: {output_hash})")] + NoteHashesMismatch { + id: NoteId, + input_hash: Digest, + output_hash: Digest, + }, + + #[error("failed to merge transaction delta into account {account_id}")] + AccountUpdateError { + account_id: AccountId, + source: BatchAccountUpdateError, + }, + + #[error("unable to prove unauthenticated note inclusion because block {block_number} in which note with id {note_id} was created is not in chain mmr")] + UnauthenticatedInputNoteBlockNotInChainMmr { + block_number: BlockNumber, + note_id: NoteId, + }, + + #[error( + "unable to prove unauthenticated note inclusion of note {note_id} in block {block_num}" + )] + UnauthenticatedNoteAuthenticationFailed { + note_id: NoteId, + block_num: BlockNumber, + source: MerkleError, + }, + + #[error("chain mmr has length {actual} which does not match block number {expected} ")] + InconsistentChainLength { + expected: BlockNumber, + actual: BlockNumber, + }, + + #[error("chain mmr has root {actual} which does not match block header's root {expected}")] + InconsistentChainRoot { expected: Digest, actual: Digest }, + + #[error("block {block_reference} referenced by transaction {transaction_id} is not in the chain mmr")] + MissingTransactionBlockReference { + block_reference: Digest, + transaction_id: TransactionId, + }, +} + // BLOCK VALIDATION ERROR // ================================================================================================ diff --git a/crates/miden-objects/src/lib.rs b/crates/miden-objects/src/lib.rs index 069a43230..c0753ca92 100644 --- a/crates/miden-objects/src/lib.rs +++ b/crates/miden-objects/src/lib.rs @@ -24,9 +24,9 @@ mod errors; pub use constants::*; pub use errors::{ - AccountDeltaError, AccountError, AccountIdError, AssetError, AssetVaultError, BlockError, - ChainMmrError, NoteError, ProvenTransactionError, TransactionInputError, - TransactionOutputError, TransactionScriptError, + AccountDeltaError, AccountError, AccountIdError, AssetError, AssetVaultError, + BatchAccountUpdateError, BlockError, ChainMmrError, NoteError, ProposedBatchError, + ProvenTransactionError, TransactionInputError, TransactionOutputError, TransactionScriptError, }; pub use miden_crypto::hash::rpo::{Rpo256 as Hasher, RpoDigest as Digest}; pub use vm_core::{Felt, FieldElement, StarkField, Word, EMPTY_WORD, ONE, WORD_SIZE, ZERO}; diff --git a/crates/miden-objects/src/note/file.rs b/crates/miden-objects/src/note/file.rs index f2dde3491..f124cec4c 100644 --- a/crates/miden-objects/src/note/file.rs +++ b/crates/miden-objects/src/note/file.rs @@ -1,9 +1,21 @@ +#[cfg(feature = "std")] +use std::{ + fs::{self, File}, + io::{self, Read}, + path::Path, + vec::Vec, +}; + +#[cfg(feature = "std")] +use vm_core::utils::SliceReader; use vm_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; use vm_processor::DeserializationError; use super::{Note, NoteDetails, NoteId, NoteInclusionProof, NoteTag}; use crate::block::BlockNumber; +const MAGIC: &str = "note"; + // NOTE FILE // ================================================================================================ @@ -28,6 +40,25 @@ pub enum NoteFile { NoteWithProof(Note, NoteInclusionProof), } +#[cfg(feature = "std")] +impl NoteFile { + /// Serializes and writes binary [NoteFile] to specified file + pub fn write(&self, filepath: impl AsRef) -> io::Result<()> { + fs::write(filepath, self.to_bytes()) + } + + /// Reads from file and tries to deserialize an [NoteFile] + pub fn read(filepath: impl AsRef) -> io::Result { + let mut file = File::open(filepath)?; + let mut buffer = Vec::new(); + + file.read_to_end(&mut buffer)?; + let mut reader = SliceReader::new(&buffer); + + Ok(NoteFile::read_from(&mut reader).map_err(|_| io::ErrorKind::InvalidData)?) + } +} + impl From for NoteFile { fn from(details: NoteDetails) -> Self { NoteFile::NoteDetails { @@ -49,7 +80,7 @@ impl From for NoteFile { impl Serializable for NoteFile { fn write_into(&self, target: &mut W) { - target.write_bytes("note".as_bytes()); + target.write_bytes(MAGIC.as_bytes()); match self { NoteFile::NoteId(note_id) => { target.write_u8(0); @@ -73,9 +104,9 @@ impl Serializable for NoteFile { impl Deserializable for NoteFile { fn read_from(source: &mut R) -> Result { let magic_value = source.read_string(4)?; - if magic_value != "note" { + if magic_value != MAGIC { return Err(DeserializationError::InvalidValue(format!( - "Invalid note file marker: {magic_value}" + "invalid note file marker: {magic_value}" ))); } match source.read_u8()? { diff --git a/crates/miden-objects/src/testing/account.rs b/crates/miden-objects/src/testing/account.rs index 3fb905249..de2ddc3af 100644 --- a/crates/miden-objects/src/testing/account.rs +++ b/crates/miden-objects/src/testing/account.rs @@ -13,7 +13,7 @@ use crate::{ }, storage::FAUCET_STORAGE_DATA_SLOT, }, - Felt, Word, ZERO, + Felt, ZERO, }; // MOCK ACCOUNT @@ -73,7 +73,8 @@ impl Account { true => vec![], false => { let asset = NonFungibleAsset::mock(&constants::NON_FUNGIBLE_ASSET_DATA_2); - vec![(Word::from(asset).into(), asset.into())] + let vault_key = asset.vault_key(); + vec![(vault_key.into(), asset.into())] }, }; diff --git a/crates/miden-objects/src/testing/chain_mmr.rs b/crates/miden-objects/src/testing/chain_mmr.rs new file mode 100644 index 000000000..833d7fe8e --- /dev/null +++ b/crates/miden-objects/src/testing/chain_mmr.rs @@ -0,0 +1,46 @@ +use miden_crypto::merkle::{Mmr, PartialMmr}; + +use crate::{block::BlockHeader, transaction::ChainMmr, ChainMmrError}; + +impl ChainMmr { + /// Converts the [`Mmr`] into a [`ChainMmr`] by selectively copying all leaves that are in the + /// given `blocks` iterator. + /// + /// This tracks all blocks in the given iterator in the [`ChainMmr`] except for the block whose + /// block number equals [`Mmr::forest`], which is the current chain length. + /// + /// # Panics + /// + /// Due to being only available in test scenarios, this function panics when one of the given + /// blocks does not exist in the provided mmr. + pub fn from_mmr( + mmr: &Mmr, + blocks: impl IntoIterator + Clone, + ) -> Result + where + I: Iterator, + { + // We do not include the latest block as it is used as the reference block and is added to + // the MMR by the transaction or batch kernel. + + let target_forest = mmr.forest() - 1; + let peaks = mmr + .peaks_at(target_forest) + .expect("target_forest should be smaller than forest of the mmr"); + let mut partial_mmr = PartialMmr::from_peaks(peaks); + + for block_num in blocks + .clone() + .into_iter() + .map(|header| header.block_num().as_usize()) + .filter(|block_num| *block_num < target_forest) + { + let leaf = mmr.get(block_num).expect("error: block num does not exist"); + let path = + mmr.open_at(block_num, target_forest).expect("error: block proof").merkle_path; + partial_mmr.track(block_num, leaf, &path).expect("error: partial mmr track"); + } + + ChainMmr::new(partial_mmr, blocks) + } +} diff --git a/crates/miden-objects/src/testing/mod.rs b/crates/miden-objects/src/testing/mod.rs index d84a91f76..1e58e5508 100644 --- a/crates/miden-objects/src/testing/mod.rs +++ b/crates/miden-objects/src/testing/mod.rs @@ -11,6 +11,7 @@ pub mod account_component; pub mod account_id; pub mod asset; pub mod block; +pub mod chain_mmr; pub mod constants; pub mod note; pub mod storage; diff --git a/crates/miden-objects/src/transaction/chain_mmr.rs b/crates/miden-objects/src/transaction/chain_mmr.rs index 431193d3b..764a1c42a 100644 --- a/crates/miden-objects/src/transaction/chain_mmr.rs +++ b/crates/miden-objects/src/transaction/chain_mmr.rs @@ -1,4 +1,4 @@ -use alloc::{collections::BTreeMap, vec::Vec}; +use alloc::collections::BTreeMap; use vm_core::utils::{Deserializable, Serializable}; @@ -43,11 +43,13 @@ impl ChainMmr { /// partial MMR. /// - The same block appears more than once in the provided list of block headers. /// - The partial MMR does not track authentication paths for any of the specified blocks. - pub fn new(mmr: PartialMmr, blocks: Vec) -> Result { + pub fn new( + mmr: PartialMmr, + blocks: impl IntoIterator, + ) -> Result { let chain_length = mmr.forest(); - let mut block_map = BTreeMap::new(); - for block in blocks.into_iter() { + for block in blocks { if block.block_num().as_usize() >= chain_length { return Err(ChainMmrError::block_num_too_big(chain_length, block.block_num())); } @@ -67,6 +69,11 @@ impl ChainMmr { // PUBLIC ACCESSORS // -------------------------------------------------------------------------------------------- + /// Returns the underlying [`PartialMmr`]. + pub fn mmr(&self) -> &PartialMmr { + &self.mmr + } + /// Returns peaks of this MMR. pub fn peaks(&self) -> MmrPeaks { self.mmr.peaks() @@ -91,6 +98,11 @@ impl ChainMmr { self.blocks.get(&block_num) } + /// Returns an iterator over the block headers in this chain MMR. + pub fn block_headers(&self) -> impl Iterator { + self.blocks.values() + } + // DATA MUTATORS // -------------------------------------------------------------------------------------------- diff --git a/crates/miden-objects/src/transaction/inputs.rs b/crates/miden-objects/src/transaction/inputs.rs index 93f9dda5c..d4a770101 100644 --- a/crates/miden-objects/src/transaction/inputs.rs +++ b/crates/miden-objects/src/transaction/inputs.rs @@ -206,6 +206,19 @@ impl InputNotes { Ok(Self { notes, commitment }) } + /// Returns new [`InputNotes`] instantiated from the provided vector of notes without checking + /// their validity. + /// + /// This is exposed for use in transaction batches, but should generally not be used. + /// + /// # Warning + /// + /// This does not run the checks from [`InputNotes::new`], so the latter should be preferred. + pub fn new_unchecked(notes: Vec) -> Self { + let commitment = build_input_note_commitment(¬es); + Self { notes, commitment } + } + // PUBLIC ACCESSORS // -------------------------------------------------------------------------------------------- diff --git a/crates/miden-objects/src/transaction/proven_tx.rs b/crates/miden-objects/src/transaction/proven_tx.rs index 55989b985..76ea35c98 100644 --- a/crates/miden-objects/src/transaction/proven_tx.rs +++ b/crates/miden-objects/src/transaction/proven_tx.rs @@ -34,6 +34,12 @@ pub struct ProvenTransaction { /// while for public notes this will also contain full note details. output_notes: OutputNotes, + /// [`BlockNumber`] of the transaction's reference block. + /// + /// This is not needed for proving the transaction, but it is useful for the node to lookup the + /// block. + block_num: BlockNumber, + /// The block hash of the last known block at the time the transaction was executed. block_ref: Digest, @@ -75,6 +81,11 @@ impl ProvenTransaction { &self.proof } + /// Returns the number of the reference block the transaction was executed against. + pub fn block_num(&self) -> BlockNumber { + self.block_num + } + /// Returns the block reference the transaction was executed against. pub fn block_ref(&self) -> Digest { self.block_ref @@ -153,6 +164,7 @@ impl Serializable for ProvenTransaction { self.account_update.write_into(target); self.input_notes.write_into(target); self.output_notes.write_into(target); + self.block_num.write_into(target); self.block_ref.write_into(target); self.expiration_block_num.write_into(target); self.proof.write_into(target); @@ -166,6 +178,7 @@ impl Deserializable for ProvenTransaction { let input_notes = >::read_from(source)?; let output_notes = OutputNotes::read_from(source)?; + let block_num = BlockNumber::read_from(source)?; let block_ref = Digest::read_from(source)?; let expiration_block_num = BlockNumber::read_from(source)?; let proof = ExecutionProof::read_from(source)?; @@ -182,6 +195,7 @@ impl Deserializable for ProvenTransaction { account_update, input_notes, output_notes, + block_num, block_ref, expiration_block_num, proof, @@ -217,6 +231,9 @@ pub struct ProvenTransactionBuilder { /// List of [OutputNote]s of all notes created by the transaction. output_notes: Vec, + /// [`BlockNumber`] of the transaction's reference block. + block_num: BlockNumber, + /// Block [Digest] of the transaction's reference block. block_ref: Digest, @@ -236,6 +253,7 @@ impl ProvenTransactionBuilder { account_id: AccountId, initial_account_hash: Digest, final_account_hash: Digest, + block_num: BlockNumber, block_ref: Digest, expiration_block_num: BlockNumber, proof: ExecutionProof, @@ -247,6 +265,7 @@ impl ProvenTransactionBuilder { account_update_details: AccountUpdateDetails::Private, input_notes: Vec::new(), output_notes: Vec::new(), + block_num, block_ref, expiration_block_num, proof, @@ -310,6 +329,7 @@ impl ProvenTransactionBuilder { account_update, input_notes, output_notes, + block_num: self.block_num, block_ref: self.block_ref, expiration_block_num: self.expiration_block_num, proof: self.proof, diff --git a/crates/miden-proving-service-client/Cargo.toml b/crates/miden-proving-service-client/Cargo.toml index 6f4208802..77441dba9 100644 --- a/crates/miden-proving-service-client/Cargo.toml +++ b/crates/miden-proving-service-client/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-proving-service-client" -version = "0.7.0" +version = "0.8.0" description = "Client library for the Miden rollup proving service" readme = "README.md" keywords = ["miden", "proving", "service"] diff --git a/crates/miden-proving-service-client/build.rs b/crates/miden-proving-service-client/build.rs index ad10d95b7..fdee85e76 100644 --- a/crates/miden-proving-service-client/build.rs +++ b/crates/miden-proving-service-client/build.rs @@ -34,10 +34,11 @@ fn main() -> miette::Result<()> { // HELPER FUNCTIONS // ================================================================================================ -/// Copies all api.proto file from the root proto directory to the proto directory of this crate. +/// Copies the tx_prover.proto file from the root proto directory to the proto directory of this +/// crate. fn copy_proto_files() -> miette::Result<()> { - let src_file = format!("{REPO_PROTO_DIR}/api.proto"); - let dest_file = format!("{CRATE_PROTO_DIR}/api.proto"); + let src_file = format!("{REPO_PROTO_DIR}/tx_prover.proto"); + let dest_file = format!("{CRATE_PROTO_DIR}/tx_prover.proto"); fs::remove_dir_all(CRATE_PROTO_DIR).into_diagnostic()?; fs::create_dir_all(CRATE_PROTO_DIR).into_diagnostic()?; @@ -49,16 +50,16 @@ fn copy_proto_files() -> miette::Result<()> { fn compile_tonic_client_proto() -> miette::Result<()> { let crate_root = PathBuf::from(env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR should be set")); - let dst_dir = crate_root.join("src").join("generated"); + let dst_dir = crate_root.join("src").join("tx_prover").join("generated"); - // Remove `api.rs` if it exists. - fs::remove_file(dst_dir.join("api.rs")).into_diagnostic().ok(); + // Remove `tx_prover.rs` if it exists. + fs::remove_file(dst_dir.join("tx_prover.rs")).into_diagnostic().ok(); let out_dir = env::var("OUT_DIR").into_diagnostic()?; let file_descriptor_path = PathBuf::from(out_dir).join("file_descriptor_set.bin"); let proto_dir: PathBuf = CRATE_PROTO_DIR.into(); - let protos = &[proto_dir.join("api.proto")]; + let protos = &[proto_dir.join("tx_prover.proto")]; let includes = &[proto_dir]; let file_descriptors = protox::compile(protos, includes)?; @@ -70,9 +71,9 @@ fn compile_tonic_client_proto() -> miette::Result<()> { build_tonic_client(&file_descriptor_path, &std_path, protos, includes, false)?; build_tonic_client(&file_descriptor_path, &nostd_path, protos, includes, true)?; - // Replace `std` references with `core` and `alloc` in `api.rs`. + // Replace `std` references with `core` and `alloc` in `tx_prover.rs`. // (Only for nostd version) - let nostd_file_path = nostd_path.join("api.rs"); + let nostd_file_path = nostd_path.join("tx_prover.rs"); let file_content = fs::read_to_string(&nostd_file_path).into_diagnostic()?; let updated_content = file_content .replace("std::result", "core::result") diff --git a/crates/miden-proving-service-client/proto/api.proto b/crates/miden-proving-service-client/proto/tx_prover.proto similarity index 94% rename from crates/miden-proving-service-client/proto/api.proto rename to crates/miden-proving-service-client/proto/tx_prover.proto index 4555b326f..de39c685c 100644 --- a/crates/miden-proving-service-client/proto/api.proto +++ b/crates/miden-proving-service-client/proto/tx_prover.proto @@ -1,6 +1,6 @@ // Specification of the user facing gRPC API. syntax = "proto3"; -package api; +package tx_prover; service Api { rpc ProveTransaction(ProveTransactionRequest) returns (ProveTransactionResponse) {} diff --git a/crates/miden-proving-service-client/src/generated/nostd/mod.rs b/crates/miden-proving-service-client/src/generated/nostd/mod.rs deleted file mode 100644 index 1b28e9b38..000000000 --- a/crates/miden-proving-service-client/src/generated/nostd/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -#[rustfmt::skip] -pub mod api; diff --git a/crates/miden-proving-service-client/src/generated/std/mod.rs b/crates/miden-proving-service-client/src/generated/std/mod.rs deleted file mode 100644 index 1b28e9b38..000000000 --- a/crates/miden-proving-service-client/src/generated/std/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -#[rustfmt::skip] -pub mod api; diff --git a/crates/miden-proving-service-client/src/lib.rs b/crates/miden-proving-service-client/src/lib.rs index a12fd0251..3b18eea80 100644 --- a/crates/miden-proving-service-client/src/lib.rs +++ b/crates/miden-proving-service-client/src/lib.rs @@ -13,15 +13,10 @@ extern crate std; use thiserror::Error; #[cfg(feature = "tx-prover")] -pub mod generated; - -#[cfg(feature = "tx-prover")] -mod prover; -#[cfg(feature = "tx-prover")] -pub use prover::RemoteTransactionProver; +pub mod tx_prover; /// Protobuf definition for the Miden proving service -pub const SERVICE_PROTO: &str = include_str!("../proto/api.proto"); +pub const TX_PROVER_PROTO: &str = include_str!("../proto/tx_prover.proto"); /// ERRORS /// =============================================================================================== diff --git a/crates/miden-proving-service-client/src/generated/mod.rs b/crates/miden-proving-service-client/src/tx_prover/generated/mod.rs similarity index 94% rename from crates/miden-proving-service-client/src/generated/mod.rs rename to crates/miden-proving-service-client/src/tx_prover/generated/mod.rs index 11bbbfa4c..8f4f2baf6 100644 --- a/crates/miden-proving-service-client/src/generated/mod.rs +++ b/crates/miden-proving-service-client/src/tx_prover/generated/mod.rs @@ -7,12 +7,12 @@ compile_error!("The `std` feature cannot be used when targeting `wasm32`."); #[cfg(feature = "std")] mod std; #[cfg(feature = "std")] -pub use std::api::*; +pub use std::tx_prover::*; #[cfg(not(feature = "std"))] mod nostd; #[cfg(not(feature = "std"))] -pub use nostd::api::*; +pub use nostd::tx_prover::*; // CONVERSIONS // ================================================================================================ diff --git a/crates/miden-proving-service-client/src/tx_prover/generated/nostd/mod.rs b/crates/miden-proving-service-client/src/tx_prover/generated/nostd/mod.rs new file mode 100644 index 000000000..18134780a --- /dev/null +++ b/crates/miden-proving-service-client/src/tx_prover/generated/nostd/mod.rs @@ -0,0 +1,2 @@ +#[rustfmt::skip] +pub mod tx_prover; diff --git a/crates/miden-proving-service-client/src/generated/nostd/api.rs b/crates/miden-proving-service-client/src/tx_prover/generated/nostd/tx_prover.rs similarity index 94% rename from crates/miden-proving-service-client/src/generated/nostd/api.rs rename to crates/miden-proving-service-client/src/tx_prover/generated/nostd/tx_prover.rs index 8c84619ac..73a6ed46e 100644 --- a/crates/miden-proving-service-client/src/generated/nostd/api.rs +++ b/crates/miden-proving-service-client/src/tx_prover/generated/nostd/tx_prover.rs @@ -105,9 +105,12 @@ pub mod api_client { ) })?; let codec = tonic::codec::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/api.Api/ProveTransaction"); + let path = http::uri::PathAndQuery::from_static( + "/tx_prover.Api/ProveTransaction", + ); let mut req = request.into_request(); - req.extensions_mut().insert(GrpcMethod::new("api.Api", "ProveTransaction")); + req.extensions_mut() + .insert(GrpcMethod::new("tx_prover.Api", "ProveTransaction")); self.inner.unary(req, path, codec).await } } diff --git a/crates/miden-proving-service-client/src/tx_prover/generated/std/mod.rs b/crates/miden-proving-service-client/src/tx_prover/generated/std/mod.rs new file mode 100644 index 000000000..18134780a --- /dev/null +++ b/crates/miden-proving-service-client/src/tx_prover/generated/std/mod.rs @@ -0,0 +1,2 @@ +#[rustfmt::skip] +pub mod tx_prover; diff --git a/crates/miden-proving-service-client/src/generated/std/api.rs b/crates/miden-proving-service-client/src/tx_prover/generated/std/tx_prover.rs similarity index 94% rename from crates/miden-proving-service-client/src/generated/std/api.rs rename to crates/miden-proving-service-client/src/tx_prover/generated/std/tx_prover.rs index 6abe53d07..717b7bb1a 100644 --- a/crates/miden-proving-service-client/src/generated/std/api.rs +++ b/crates/miden-proving-service-client/src/tx_prover/generated/std/tx_prover.rs @@ -116,9 +116,12 @@ pub mod api_client { ) })?; let codec = tonic::codec::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/api.Api/ProveTransaction"); + let path = http::uri::PathAndQuery::from_static( + "/tx_prover.Api/ProveTransaction", + ); let mut req = request.into_request(); - req.extensions_mut().insert(GrpcMethod::new("api.Api", "ProveTransaction")); + req.extensions_mut() + .insert(GrpcMethod::new("tx_prover.Api", "ProveTransaction")); self.inner.unary(req, path, codec).await } } diff --git a/crates/miden-proving-service-client/src/prover.rs b/crates/miden-proving-service-client/src/tx_prover/mod.rs similarity index 95% rename from crates/miden-proving-service-client/src/prover.rs rename to crates/miden-proving-service-client/src/tx_prover/mod.rs index 58385cbda..b96bdb4ad 100644 --- a/crates/miden-proving-service-client/src/prover.rs +++ b/crates/miden-proving-service-client/src/tx_prover/mod.rs @@ -1,12 +1,15 @@ +pub mod generated; + use alloc::{ boxed::Box, string::{String, ToString}, }; +use generated::api_client::ApiClient; use miden_objects::transaction::{ProvenTransaction, TransactionWitness}; use miden_tx::{utils::sync::RwLock, TransactionProver, TransactionProverError}; -use crate::{generated::api_client::ApiClient, RemoteProverError}; +use crate::RemoteProverError; // REMOTE TRANSACTION PROVER // ================================================================================================ @@ -84,7 +87,7 @@ impl TransactionProver for RemoteTransactionProver { .ok_or_else(|| TransactionProverError::other("client should be connected"))? .clone(); - let request = tonic::Request::new(crate::generated::ProveTransactionRequest { + let request = tonic::Request::new(generated::ProveTransactionRequest { transaction_witness: tx_witness.to_bytes(), }); diff --git a/crates/miden-tx-batch-prover/Cargo.toml b/crates/miden-tx-batch-prover/Cargo.toml new file mode 100644 index 000000000..240db4fce --- /dev/null +++ b/crates/miden-tx-batch-prover/Cargo.toml @@ -0,0 +1,41 @@ +[package] +name = "miden-tx-batch-prover" +version = "0.8.0" +description = "Miden rollup transaction batch executor and prover" +readme = "README.md" +categories = ["no-std"] +keywords = ["miden", "batch", "prover"] +license.workspace = true +authors.workspace = true +homepage.workspace = true +repository.workspace = true +rust-version.workspace = true +edition.workspace = true + +[lib] +bench = false + +[features] +default = ["std"] +std = [ + "miden-objects/std", + "miden-tx/std", + "miden-crypto/std", + "vm-core/std", + "vm-processor/std", +] + +[dependencies] +miden-crypto = { workspace = true } +miden-tx = { workspace = true } +miden-objects = { workspace = true } +thiserror = { workspace = true } +vm-core = { workspace = true } +vm-processor = { workspace = true } + +[dev-dependencies] +anyhow = { version = "1.0", features = ["std", "backtrace"] } +miden-lib = { workspace = true, features = ["std", "testing"] } +miden-tx = { workspace = true, features = ["std", "testing"] } +rand = { workspace = true, features = ["small_rng"] } +winterfell = { version = "0.11" } diff --git a/crates/miden-tx-batch-prover/README.md b/crates/miden-tx-batch-prover/README.md new file mode 100644 index 000000000..85d4babb7 --- /dev/null +++ b/crates/miden-tx-batch-prover/README.md @@ -0,0 +1,7 @@ +# Miden Transaction Batch Prover + +This crate contains tools for executing and proving Miden transaction batches. + +## License + +This project is [MIT licensed](../LICENSE). diff --git a/crates/miden-tx-batch-prover/src/errors.rs b/crates/miden-tx-batch-prover/src/errors.rs new file mode 100644 index 000000000..e0500f239 --- /dev/null +++ b/crates/miden-tx-batch-prover/src/errors.rs @@ -0,0 +1,12 @@ +use miden_objects::transaction::TransactionId; +use miden_tx::TransactionVerifierError; +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum BatchProveError { + #[error("failed to verify transaction {transaction_id} in transaction batch")] + TransactionVerificationFailed { + transaction_id: TransactionId, + source: TransactionVerifierError, + }, +} diff --git a/crates/miden-tx-batch-prover/src/lib.rs b/crates/miden-tx-batch-prover/src/lib.rs new file mode 100644 index 000000000..aa7d26f38 --- /dev/null +++ b/crates/miden-tx-batch-prover/src/lib.rs @@ -0,0 +1,18 @@ +#![no_std] + +#[cfg_attr(test, macro_use)] +extern crate alloc; + +#[cfg(feature = "std")] +extern crate std; + +mod local_batch_prover; +pub use local_batch_prover::LocalBatchProver; + +pub mod errors; + +#[cfg(test)] +pub mod testing; + +#[cfg(test)] +mod tests; diff --git a/crates/miden-tx-batch-prover/src/local_batch_prover.rs b/crates/miden-tx-batch-prover/src/local_batch_prover.rs new file mode 100644 index 000000000..be3efb5bd --- /dev/null +++ b/crates/miden-tx-batch-prover/src/local_batch_prover.rs @@ -0,0 +1,58 @@ +use miden_objects::batch::{ProposedBatch, ProvenBatch}; +use miden_tx::TransactionVerifier; + +use crate::errors::BatchProveError; + +// LOCAL BATCH PROVER +// ================================================================================================ + +/// A local prover for transaction batches, proving the transactions in a [`ProposedBatch`] and +/// returning a [`ProvenBatch`]. +pub struct LocalBatchProver { + proof_security_level: u32, +} + +impl LocalBatchProver { + /// Creates a new [`LocalBatchProver`] instance. + pub fn new(proof_security_level: u32) -> Self { + Self { proof_security_level } + } + + /// Attempts to prove the [`ProposedBatch`] into a [`ProvenBatch`]. + /// + /// # Errors + /// + /// Returns an error if: + /// - a proof of any transaction in the batch fails to verify. + pub fn prove(&self, proposed_batch: ProposedBatch) -> Result { + let ( + transactions, + _block_header, + _block_chain, + _authenticatable_unauthenticated_notes, + id, + updated_accounts, + input_notes, + output_notes_smt, + output_notes, + batch_expiration_block_num, + ) = proposed_batch.into_parts(); + + let verifier = TransactionVerifier::new(self.proof_security_level); + + for tx in transactions { + verifier.verify(&tx).map_err(|source| { + BatchProveError::TransactionVerificationFailed { transaction_id: tx.id(), source } + })?; + } + + Ok(ProvenBatch::new( + id, + updated_accounts, + input_notes, + output_notes_smt, + output_notes, + batch_expiration_block_num, + )) + } +} diff --git a/crates/miden-tx-batch-prover/src/testing/mod.rs b/crates/miden-tx-batch-prover/src/testing/mod.rs new file mode 100644 index 000000000..8f5eeaba3 --- /dev/null +++ b/crates/miden-tx-batch-prover/src/testing/mod.rs @@ -0,0 +1,2 @@ +mod proven_tx_builder; +pub(crate) use proven_tx_builder::MockProvenTxBuilder; diff --git a/crates/miden-tx-batch-prover/src/testing/proven_tx_builder.rs b/crates/miden-tx-batch-prover/src/testing/proven_tx_builder.rs new file mode 100644 index 000000000..5a4b30def --- /dev/null +++ b/crates/miden-tx-batch-prover/src/testing/proven_tx_builder.rs @@ -0,0 +1,111 @@ +use alloc::vec::Vec; + +use anyhow::Context; +use miden_crypto::merkle::MerklePath; +use miden_objects::{ + account::AccountId, + block::BlockNumber, + note::{Note, NoteInclusionProof, Nullifier}, + transaction::{InputNote, OutputNote, ProvenTransaction, ProvenTransactionBuilder}, + vm::ExecutionProof, +}; +use vm_processor::Digest; +use winterfell::Proof; + +/// A builder to build mocked [`ProvenTransaction`]s. +pub struct MockProvenTxBuilder { + account_id: AccountId, + initial_account_commitment: Digest, + final_account_commitment: Digest, + block_reference: Option, + expiration_block_num: BlockNumber, + output_notes: Option>, + input_notes: Option>, + nullifiers: Option>, +} + +impl MockProvenTxBuilder { + /// Creates a new builder for a transaction executed against the given account with its initial + /// and final state commitment. + pub fn with_account( + account_id: AccountId, + initial_account_commitment: Digest, + final_account_commitment: Digest, + ) -> Self { + Self { + account_id, + initial_account_commitment, + final_account_commitment, + block_reference: None, + expiration_block_num: BlockNumber::from(u32::MAX), + output_notes: None, + input_notes: None, + nullifiers: None, + } + } + + /// Adds unauthenticated notes to the transaction. + #[must_use] + pub fn authenticated_notes(mut self, notes: Vec) -> Self { + let mock_proof = + NoteInclusionProof::new(BlockNumber::from(0), 0, MerklePath::new(vec![])).unwrap(); + self.input_notes = Some( + notes + .into_iter() + .map(|note| InputNote::authenticated(note, mock_proof.clone())) + .collect(), + ); + + self + } + + /// Adds unauthenticated notes to the transaction. + #[must_use] + pub fn unauthenticated_notes(mut self, notes: Vec) -> Self { + self.input_notes = Some(notes.into_iter().map(InputNote::unauthenticated).collect()); + + self + } + + /// Sets the transaction's expiration block number. + #[must_use] + pub fn expiration_block_num(mut self, expiration_block_num: BlockNumber) -> Self { + self.expiration_block_num = expiration_block_num; + + self + } + + /// Adds notes to the transaction's output notes. + #[must_use] + pub fn output_notes(mut self, notes: Vec) -> Self { + self.output_notes = Some(notes); + + self + } + + /// Sets the transaction's block reference. + #[must_use] + pub fn block_reference(mut self, block_reference: Digest) -> Self { + self.block_reference = Some(block_reference); + + self + } + + /// Builds the [`ProvenTransaction`] and returns potential errors. + pub fn build(self) -> anyhow::Result { + ProvenTransactionBuilder::new( + self.account_id, + self.initial_account_commitment, + self.final_account_commitment, + BlockNumber::from(0), + self.block_reference.unwrap_or_default(), + self.expiration_block_num, + ExecutionProof::new(Proof::new_dummy(), Default::default()), + ) + .add_input_notes(self.input_notes.unwrap_or_default()) + .add_input_notes(self.nullifiers.unwrap_or_default()) + .add_output_notes(self.output_notes.unwrap_or_default()) + .build() + .context("failed to build proven transaction") + } +} diff --git a/crates/miden-tx-batch-prover/src/tests/mod.rs b/crates/miden-tx-batch-prover/src/tests/mod.rs new file mode 100644 index 000000000..19fd2c568 --- /dev/null +++ b/crates/miden-tx-batch-prover/src/tests/mod.rs @@ -0,0 +1 @@ +mod proposed_batch; diff --git a/crates/miden-tx-batch-prover/src/tests/proposed_batch.rs b/crates/miden-tx-batch-prover/src/tests/proposed_batch.rs new file mode 100644 index 000000000..7adc23c22 --- /dev/null +++ b/crates/miden-tx-batch-prover/src/tests/proposed_batch.rs @@ -0,0 +1,591 @@ +use alloc::sync::Arc; +use std::collections::BTreeMap; + +use anyhow::Context; +use miden_crypto::merkle::MerkleError; +use miden_lib::transaction::TransactionKernel; +use miden_objects::{ + account::{Account, AccountId}, + batch::ProposedBatch, + block::BlockNumber, + note::{Note, NoteType}, + testing::{account_id::AccountIdBuilder, note::NoteBuilder}, + transaction::{ChainMmr, InputNote, InputNoteCommitment, OutputNote}, + BatchAccountUpdateError, ProposedBatchError, +}; +use miden_tx::testing::{Auth, MockChain}; +use rand::{rngs::SmallRng, SeedableRng}; +use vm_core::assert_matches; +use vm_processor::Digest; + +use crate::testing::MockProvenTxBuilder; + +fn mock_account_id(num: u8) -> AccountId { + AccountIdBuilder::new().build_with_rng(&mut SmallRng::from_seed([num; 32])) +} + +pub fn mock_note(num: u8) -> Note { + let sender = mock_account_id(num); + NoteBuilder::new(sender, SmallRng::from_seed([num; 32])) + .build(&TransactionKernel::assembler().with_debug_mode(true)) + .unwrap() +} + +pub fn mock_output_note(num: u8) -> OutputNote { + OutputNote::Full(mock_note(num)) +} + +struct TestSetup { + chain: MockChain, + account1: Account, + account2: Account, +} + +fn setup_chain() -> TestSetup { + let mut chain = MockChain::new(); + let account1 = chain.add_new_wallet(Auth::NoAuth); + let account2 = chain.add_new_wallet(Auth::NoAuth); + chain.seal_block(None); + + TestSetup { chain, account1, account2 } +} + +/// Tests that a note created and consumed in the same batch are erased from the input and +/// output note commitments. +#[test] +fn empty_transaction_batch() -> anyhow::Result<()> { + let TestSetup { chain, .. } = setup_chain(); + let block1 = chain.block_header(1); + + let error = ProposedBatch::new(vec![], block1, chain.chain(), BTreeMap::default()).unwrap_err(); + + assert_matches!(error, ProposedBatchError::EmptyTransactionBatch); + + Ok(()) +} + +/// Tests that a note created and consumed in the same batch are erased from the input and +/// output note commitments. +#[test] +fn note_created_and_consumed_in_same_batch() -> anyhow::Result<()> { + let TestSetup { mut chain, account1, account2 } = setup_chain(); + let block1 = chain.block_header(1); + let block2 = chain.seal_block(None); + + let note = mock_note(40); + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .output_notes(vec![OutputNote::Full(note.clone())]) + .build()?; + let tx2 = MockProvenTxBuilder::with_account(account2.id(), Digest::default(), account2.hash()) + .block_reference(block1.hash()) + .unauthenticated_notes(vec![note.clone()]) + .build()?; + + let batch = ProposedBatch::new( + [tx1, tx2].into_iter().map(Arc::new).collect(), + block2.header(), + chain.chain(), + BTreeMap::default(), + )?; + + assert_eq!(batch.input_notes().num_notes(), 0); + assert_eq!(batch.output_notes().len(), 0); + assert_eq!(batch.output_notes_tree().num_leaves(), 0); + + Ok(()) +} + +/// Tests that an error is returned if the same unauthenticated input note appears multiple +/// times in different transactions. +#[test] +fn duplicate_unauthenticated_input_notes() -> anyhow::Result<()> { + let TestSetup { chain, account1, account2 } = setup_chain(); + let block1 = chain.block_header(1); + + let note = mock_note(50); + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .unauthenticated_notes(vec![note.clone()]) + .build()?; + let tx2 = MockProvenTxBuilder::with_account(account2.id(), Digest::default(), account2.hash()) + .block_reference(block1.hash()) + .unauthenticated_notes(vec![note.clone()]) + .build()?; + + let error = ProposedBatch::new( + [tx1.clone(), tx2.clone()].into_iter().map(Arc::new).collect(), + block1, + chain.chain(), + BTreeMap::default(), + ) + .unwrap_err(); + + assert_matches!(error, ProposedBatchError::DuplicateInputNote { + note_nullifier, + first_transaction_id, + second_transaction_id + } if note_nullifier == note.nullifier() && + first_transaction_id == tx1.id() && + second_transaction_id == tx2.id() + ); + + Ok(()) +} + +/// Tests that an error is returned if the same authenticated input note appears multiple +/// times in different transactions. +#[test] +fn duplicate_authenticated_input_notes() -> anyhow::Result<()> { + let TestSetup { mut chain, account1, account2 } = setup_chain(); + let note = chain.add_p2id_note(account1.id(), account2.id(), &[], NoteType::Private, None)?; + let block1 = chain.block_header(1); + let block2 = chain.seal_block(None); + + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .authenticated_notes(vec![note.clone()]) + .build()?; + let tx2 = MockProvenTxBuilder::with_account(account2.id(), Digest::default(), account2.hash()) + .block_reference(block1.hash()) + .authenticated_notes(vec![note.clone()]) + .build()?; + + let error = ProposedBatch::new( + [tx1.clone(), tx2.clone()].into_iter().map(Arc::new).collect(), + block2.header(), + chain.chain(), + BTreeMap::default(), + ) + .unwrap_err(); + + assert_matches!(error, ProposedBatchError::DuplicateInputNote { + note_nullifier, + first_transaction_id, + second_transaction_id + } if note_nullifier == note.nullifier() && + first_transaction_id == tx1.id() && + second_transaction_id == tx2.id() + ); + + Ok(()) +} + +/// Tests that an error is returned if the same input note appears multiple times in different +/// transactions as an unauthenticated or authenticated note. +#[test] +fn duplicate_mixed_input_notes() -> anyhow::Result<()> { + let TestSetup { mut chain, account1, account2 } = setup_chain(); + let note = chain.add_p2id_note(account1.id(), account2.id(), &[], NoteType::Private, None)?; + let block1 = chain.block_header(1); + let block2 = chain.seal_block(None); + + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .unauthenticated_notes(vec![note.clone()]) + .build()?; + let tx2 = MockProvenTxBuilder::with_account(account2.id(), Digest::default(), account2.hash()) + .block_reference(block1.hash()) + .authenticated_notes(vec![note.clone()]) + .build()?; + + let error = ProposedBatch::new( + [tx1.clone(), tx2.clone()].into_iter().map(Arc::new).collect(), + block2.header(), + chain.chain(), + BTreeMap::default(), + ) + .unwrap_err(); + + assert_matches!(error, ProposedBatchError::DuplicateInputNote { + note_nullifier, + first_transaction_id, + second_transaction_id + } if note_nullifier == note.nullifier() && + first_transaction_id == tx1.id() && + second_transaction_id == tx2.id() + ); + + Ok(()) +} + +/// Tests that an error is returned if the same output note appears multiple times in different +/// transactions. +#[test] +fn duplicate_output_notes() -> anyhow::Result<()> { + let TestSetup { chain, account1, account2 } = setup_chain(); + let block1 = chain.block_header(1); + + let note0 = mock_output_note(50); + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .output_notes(vec![note0.clone()]) + .build()?; + let tx2 = MockProvenTxBuilder::with_account(account2.id(), Digest::default(), account2.hash()) + .block_reference(block1.hash()) + .output_notes(vec![note0.clone()]) + .build()?; + + let error = ProposedBatch::new( + [tx1.clone(), tx2.clone()].into_iter().map(Arc::new).collect(), + block1, + chain.chain(), + BTreeMap::default(), + ) + .unwrap_err(); + + assert_matches!(error, ProposedBatchError::DuplicateOutputNote { + note_id, + first_transaction_id, + second_transaction_id + } if note_id == note0.id() && + first_transaction_id == tx1.id() && + second_transaction_id == tx2.id()); + + Ok(()) +} + +/// Test that an unauthenticated input note for which a proof exists is converted into an +/// authenticated one and becomes part of the batch's input note commitment. +#[test] +fn unauthenticated_note_converted_to_authenticated() -> anyhow::Result<()> { + let TestSetup { mut chain, account1, account2 } = setup_chain(); + let note0 = chain.add_p2id_note(account2.id(), account1.id(), &[], NoteType::Private, None)?; + let note1 = chain.add_p2id_note(account1.id(), account2.id(), &[], NoteType::Private, None)?; + // The just created note will be provable against block2. + let block2 = chain.seal_block(None); + let block3 = chain.seal_block(None); + let block4 = chain.seal_block(None); + + // Consume the authenticated note as an unauthenticated one in the transaction. + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block3.hash()) + .unauthenticated_notes(vec![note1.clone()]) + .build()?; + + let input_note0 = chain.available_notes_map().get(¬e0.id()).expect("note not found"); + let note_inclusion_proof0 = input_note0.proof().expect("note should be of type authenticated"); + + let input_note1 = chain.available_notes_map().get(¬e1.id()).expect("note not found"); + let note_inclusion_proof1 = input_note1.proof().expect("note should be of type authenticated"); + + // The chain MMR will contain all blocks in the mock chain, in particular block2 which both note + // inclusion proofs need for verification. + let chain_mmr = chain.chain(); + + // Case 1: Error: A wrong proof is passed. + // -------------------------------------------------------------------------------------------- + + let error = ProposedBatch::new( + [tx1.clone()].into_iter().map(Arc::new).collect(), + block4.header(), + chain_mmr.clone(), + BTreeMap::from_iter([(input_note1.id(), note_inclusion_proof0.clone())]), + ) + .unwrap_err(); + + assert_matches!(error, ProposedBatchError::UnauthenticatedNoteAuthenticationFailed { + note_id, + block_num, + source: MerkleError::ConflictingRoots { .. }, + } if note_id == note1.id() && + block_num == block2.header().block_num() + ); + + // Case 2: Error: The block referenced by the (valid) note inclusion proof is missing. + // -------------------------------------------------------------------------------------------- + + // Make a clone of the chain mmr where block2 is missing. + let mut mmr = chain_mmr.mmr().clone(); + mmr.untrack(block2.header().block_num().as_usize()); + let blocks = chain_mmr + .block_headers() + .filter(|header| header.block_num() != block2.header().block_num()) + .copied(); + + let error = ProposedBatch::new( + [tx1.clone()].into_iter().map(Arc::new).collect(), + block4.header(), + ChainMmr::new(mmr, blocks).context("failed to build chain mmr with missing block")?, + BTreeMap::from_iter([(input_note1.id(), note_inclusion_proof1.clone())]), + ) + .unwrap_err(); + + assert_matches!( + error, + ProposedBatchError::UnauthenticatedInputNoteBlockNotInChainMmr { + block_number, + note_id + } if block_number == note_inclusion_proof1.location().block_num() && + note_id == input_note1.id() + ); + + // Case 3: Success: The correct proof is passed. + // -------------------------------------------------------------------------------------------- + + let batch = ProposedBatch::new( + [tx1].into_iter().map(Arc::new).collect(), + block4.header(), + chain_mmr, + BTreeMap::from_iter([(input_note1.id(), note_inclusion_proof1.clone())]), + )?; + + // We expect the unauthenticated input note to have become an authenticated one, + // meaning it is part of the input note commitment. + assert_eq!(batch.input_notes().num_notes(), 1); + assert!(batch + .input_notes() + .iter() + .any(|commitment| commitment == &InputNoteCommitment::from(input_note1))); + assert_eq!(batch.output_notes().len(), 0); + + Ok(()) +} + +/// Test that an authenticated input note that is also created in the same batch does not error +/// and instead is marked as consumed. +/// - This requires a nullifier collision on the input and output note which is very unlikely in +/// practice. +/// - This makes the created note unspendable as its nullifier is added to the nullifier tree. +/// - The batch kernel cannot return an error in this case as it can't detect this condition due to +/// only having the nullifier for authenticated input notes _but_ not having the nullifier for +/// private output notes. +/// - We test this to ensure the kernel does something reasonable in this case and it is not an +/// attack vector. +#[test] +fn authenticated_note_created_in_same_batch() -> anyhow::Result<()> { + let TestSetup { mut chain, account1, account2 } = setup_chain(); + let note = chain.add_p2id_note(account1.id(), account2.id(), &[], NoteType::Private, None)?; + let block1 = chain.block_header(1); + let block2 = chain.seal_block(None); + + let note0 = mock_note(50); + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .output_notes(vec![OutputNote::Full(note0.clone())]) + .build()?; + let tx2 = MockProvenTxBuilder::with_account(account2.id(), Digest::default(), account2.hash()) + .block_reference(block1.hash()) + .authenticated_notes(vec![note.clone()]) + .build()?; + + let batch = ProposedBatch::new( + [tx1, tx2].into_iter().map(Arc::new).collect(), + block2.header(), + chain.chain(), + BTreeMap::default(), + )?; + + assert_eq!(batch.input_notes().num_notes(), 1); + assert_eq!(batch.output_notes().len(), 1); + assert_eq!(batch.output_notes_tree().num_leaves(), 1); + + Ok(()) +} + +/// Test that multiple transactions against the same account +/// 1) can be correctly executed when in the right order, +/// 2) and that an error is returned if they are incorrectly ordered. +#[test] +fn multiple_transactions_against_same_account() -> anyhow::Result<()> { + let TestSetup { chain, account1, .. } = setup_chain(); + let block1 = chain.block_header(1); + + // Use some random hash as the initial state commitment of tx1. + let initial_state_commitment = Digest::default(); + let tx1 = + MockProvenTxBuilder::with_account(account1.id(), initial_state_commitment, account1.hash()) + .block_reference(block1.hash()) + .output_notes(vec![mock_output_note(0)]) + .build()?; + + // Use some random hash as the final state commitment of tx2. + let final_state_commitment = mock_note(10).hash(); + let tx2 = + MockProvenTxBuilder::with_account(account1.id(), account1.hash(), final_state_commitment) + .block_reference(block1.hash()) + .build()?; + + // Success: Transactions are correctly ordered. + let batch = ProposedBatch::new( + [tx1.clone(), tx2.clone()].into_iter().map(Arc::new).collect(), + block1, + chain.chain(), + BTreeMap::default(), + )?; + + assert_eq!(batch.account_updates().len(), 1); + // Assert that the initial state commitment from tx1 is used and the final state commitment + // from tx2. + assert_eq!( + batch.account_updates().get(&account1.id()).unwrap().initial_state_commitment(), + initial_state_commitment + ); + assert_eq!( + batch.account_updates().get(&account1.id()).unwrap().final_state_commitment(), + final_state_commitment + ); + + // Error: Transactions are incorrectly ordered. + let error = ProposedBatch::new( + [tx2.clone(), tx1.clone()].into_iter().map(Arc::new).collect(), + block1, + chain.chain(), + BTreeMap::default(), + ) + .unwrap_err(); + + assert_matches!( + error, + ProposedBatchError::AccountUpdateError { + source: BatchAccountUpdateError::AccountUpdateInitialStateMismatch(tx_id), + .. + } if tx_id == tx1.id() + ); + + Ok(()) +} + +/// Tests that the input and outputs notes commitment is correctly computed. +/// - Notes created and consumed in the same batch are erased from these commitments. +/// - The input note commitment is sorted by the order in which the notes appeared in the batch. +/// - The output note commitment is sorted by [`NoteId`]. +#[test] +fn input_and_output_notes_commitment() -> anyhow::Result<()> { + let TestSetup { chain, account1, account2 } = setup_chain(); + let block1 = chain.block_header(1); + + let note0 = mock_output_note(50); + let note1 = mock_note(60); + let note2 = mock_output_note(70); + let note3 = mock_output_note(80); + let note4 = mock_note(90); + let note5 = mock_note(100); + + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .unauthenticated_notes(vec![note1.clone(), note5.clone()]) + .output_notes(vec![note0.clone()]) + .build()?; + let tx2 = MockProvenTxBuilder::with_account(account2.id(), Digest::default(), account2.hash()) + .block_reference(block1.hash()) + .unauthenticated_notes(vec![note4.clone()]) + .output_notes(vec![OutputNote::Full(note1.clone()), note2.clone(), note3.clone()]) + .build()?; + + let batch = ProposedBatch::new( + [tx1.clone(), tx2.clone()].into_iter().map(Arc::new).collect(), + block1, + chain.chain(), + BTreeMap::default(), + )?; + + // We expecte note1 to be erased from the input/output notes as it is created and consumed + // in the batch. + let mut expected_output_notes = [note0, note2, note3]; + // We expect a vector sorted by NoteId. + expected_output_notes.sort_unstable_by_key(OutputNote::id); + + assert_eq!(batch.output_notes().len(), 3); + assert_eq!(batch.output_notes(), expected_output_notes); + + assert_eq!(batch.output_notes_tree().num_leaves(), 3); + + // Input notes are sorted by the order in which they appeared in the batch. + assert_eq!(batch.input_notes().num_notes(), 2); + assert_eq!( + batch.input_notes().clone().into_vec(), + &[ + InputNoteCommitment::from(&InputNote::unauthenticated(note5)), + InputNoteCommitment::from(&InputNote::unauthenticated(note4)), + ] + ); + + Ok(()) +} + +/// Tests that the expiration block number of a batch is the minimum of all contained transactions. +#[test] +fn batch_expiration() -> anyhow::Result<()> { + let TestSetup { chain, account1, account2 } = setup_chain(); + let block1 = chain.block_header(1); + + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .expiration_block_num(BlockNumber::from(35)) + .build()?; + let tx2 = MockProvenTxBuilder::with_account(account2.id(), Digest::default(), account2.hash()) + .block_reference(block1.hash()) + .expiration_block_num(BlockNumber::from(30)) + .build()?; + + let batch = ProposedBatch::new( + [tx1, tx2].into_iter().map(Arc::new).collect(), + block1, + chain.chain(), + BTreeMap::default(), + )?; + + assert_eq!(batch.batch_expiration_block_num(), BlockNumber::from(30)); + + Ok(()) +} + +/// Tests that passing duplicate transactions in a batch returns an error. +#[test] +fn duplicate_transaction() -> anyhow::Result<()> { + let TestSetup { chain, account1, .. } = setup_chain(); + let block1 = chain.block_header(1); + + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .expiration_block_num(BlockNumber::from(35)) + .build()?; + + let error = ProposedBatch::new( + [tx1.clone(), tx1.clone()].into_iter().map(Arc::new).collect(), + block1, + chain.chain(), + BTreeMap::default(), + ) + .unwrap_err(); + + assert_matches!(error, ProposedBatchError::DuplicateTransaction { transaction_id } if transaction_id == tx1.id()); + + Ok(()) +} + +/// Tests that transactions with a circular dependency between notes are accepted: +/// TX 1: Inputs [X] -> Outputs [Y] +/// TX 2: Inputs [Y] -> Outputs [X] +#[test] +fn circular_note_dependency() -> anyhow::Result<()> { + let TestSetup { chain, account1, account2 } = setup_chain(); + let block1 = chain.block_header(1); + + let note_x = mock_note(20); + let note_y = mock_note(30); + + let tx1 = MockProvenTxBuilder::with_account(account1.id(), Digest::default(), account1.hash()) + .block_reference(block1.hash()) + .unauthenticated_notes(vec![note_x.clone()]) + .output_notes(vec![OutputNote::Full(note_y.clone())]) + .build()?; + let tx2 = MockProvenTxBuilder::with_account(account2.id(), Digest::default(), account2.hash()) + .block_reference(block1.hash()) + .unauthenticated_notes(vec![note_y.clone()]) + .output_notes(vec![OutputNote::Full(note_x.clone())]) + .build()?; + + let batch = ProposedBatch::new( + [tx1, tx2].into_iter().map(Arc::new).collect(), + block1, + chain.chain(), + BTreeMap::default(), + )?; + + assert_eq!(batch.input_notes().num_notes(), 0); + assert_eq!(batch.output_notes().len(), 0); + + Ok(()) +} diff --git a/crates/miden-tx/Cargo.toml b/crates/miden-tx/Cargo.toml index b56327019..606a41705 100644 --- a/crates/miden-tx/Cargo.toml +++ b/crates/miden-tx/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miden-tx" -version = "0.7.0" +version = "0.8.0" description = "Miden rollup transaction compiler, executor, and prover" readme = "README.md" categories = ["no-std"] diff --git a/crates/miden-tx/src/prover/mod.rs b/crates/miden-tx/src/prover/mod.rs index e6445e665..c1c5ed14c 100644 --- a/crates/miden-tx/src/prover/mod.rs +++ b/crates/miden-tx/src/prover/mod.rs @@ -95,6 +95,7 @@ impl TransactionProver for LocalTransactionProver { let account = tx_inputs.account(); let input_notes = tx_inputs.input_notes(); + let block_num = tx_inputs.block_header().block_num(); let block_hash = tx_inputs.block_header().hash(); // execute and prove @@ -137,6 +138,7 @@ impl TransactionProver for LocalTransactionProver { account.id(), account.init_hash(), tx_outputs.account.hash(), + block_num, block_hash, tx_outputs.expiration_block_num, proof, diff --git a/crates/miden-tx/src/testing/mock_chain/mod.rs b/crates/miden-tx/src/testing/mock_chain/mod.rs index 4e873aa29..c346cb1bc 100644 --- a/crates/miden-tx/src/testing/mock_chain/mod.rs +++ b/crates/miden-tx/src/testing/mock_chain/mod.rs @@ -17,7 +17,7 @@ use miden_objects::{ }, crypto::{ dsa::rpo_falcon512::SecretKey, - merkle::{Mmr, MmrError, PartialMmr, Smt}, + merkle::{Mmr, Smt}, }, note::{Note, NoteId, NoteInclusionProof, NoteType, Nullifier}, testing::account_code::DEFAULT_AUTH_SCRIPT, @@ -633,8 +633,8 @@ impl MockChain { input_notes.push(InputNote::Unauthenticated { note: note.clone() }) } - let block_headers: Vec = block_headers_map.values().cloned().collect(); - let mmr = mmr_to_chain_mmr(&self.chain, &block_headers).unwrap(); + let block_headers = block_headers_map.values().cloned(); + let mmr = ChainMmr::from_mmr(&self.chain, block_headers).unwrap(); TransactionInputs::new( account, @@ -782,8 +782,11 @@ impl MockChain { /// Gets the latest [ChainMmr]. pub fn chain(&self) -> ChainMmr { - let block_headers: Vec = self.blocks.iter().map(|b| b.header()).collect(); - mmr_to_chain_mmr(&self.chain, &block_headers).unwrap() + // We cannot pass the latest block as that would violate the condition in the transaction + // inputs that the chain length of the mmr must match the number of the reference block. + let block_headers = self.blocks.iter().map(|b| b.header()).take(self.blocks.len() - 1); + + ChainMmr::from_mmr(&self.chain, block_headers).unwrap() } /// Gets a reference to [BlockHeader] with `block_number`. @@ -801,6 +804,11 @@ impl MockChain { self.available_notes.values().cloned().collect() } + /// Returns the map of note IDs to consumable input notes. + pub fn available_notes_map(&self) -> &BTreeMap { + &self.available_notes + } + /// Get the reference to the accounts hash tree. pub fn accounts(&self) -> &SimpleSmt { &self.accounts @@ -816,20 +824,3 @@ enum AccountState { New, Exists, } - -// HELPER FUNCTIONS -// ================================================================================================ - -/// Converts the MMR into partial MMR by copying all leaves from MMR to partial MMR. -fn mmr_to_chain_mmr(mmr: &Mmr, blocks: &[BlockHeader]) -> Result { - let target_forest = mmr.forest() - 1; - let mut partial_mmr = PartialMmr::from_peaks(mmr.peaks_at(target_forest)?); - - for i in 0..target_forest { - let node = mmr.get(i)?; - let path = mmr.open_at(i, target_forest)?.merkle_path; - partial_mmr.track(i, node, &path)?; - } - - Ok(ChainMmr::new(partial_mmr, blocks.to_vec()).unwrap()) -} diff --git a/crates/miden-tx/src/tests/kernel_tests/test_faucet.rs b/crates/miden-tx/src/tests/kernel_tests/test_faucet.rs index 6a091f447..6e1dd6bc6 100644 --- a/crates/miden-tx/src/tests/kernel_tests/test_faucet.rs +++ b/crates/miden-tx/src/tests/kernel_tests/test_faucet.rs @@ -60,13 +60,13 @@ fn test_mint_fungible_asset_succeeds() { # assert the correct asset is returned push.{FUNGIBLE_ASSET_AMOUNT}.0.{suffix}.{prefix} - assert_eqw + assert_eqw.err=9998 # assert the input vault has been updated exec.memory::get_input_vault_root_ptr push.{suffix}.{prefix} exec.asset_vault::get_balance - push.{FUNGIBLE_ASSET_AMOUNT} assert_eq + push.{FUNGIBLE_ASSET_AMOUNT} assert_eq.err=9999 end ", prefix = faucet_id.prefix().as_felt(), @@ -174,8 +174,6 @@ fn test_mint_fungible_asset_fails_saturate_max_amount() { // NON-FUNGIBLE FAUCET MINT TESTS // ================================================================================================ -// TODO: reenable once storage map support is implemented -#[ignore] #[test] fn test_mint_non_fungible_asset_succeeds() { let tx_context = TransactionContextBuilder::with_non_fungible_faucet( @@ -186,6 +184,7 @@ fn test_mint_non_fungible_asset_succeeds() { .build(); let non_fungible_asset = NonFungibleAsset::mock(&NON_FUNGIBLE_ASSET_DATA); + let asset_vault_key = non_fungible_asset.vault_key(); let code = format!( " @@ -195,34 +194,36 @@ fn test_mint_non_fungible_asset_succeeds() { use.kernel::asset_vault use.kernel::memory use.kernel::prologue - use.miden::faucet + use.test::account->test_account begin # mint asset exec.prologue::prepare_transaction push.{non_fungible_asset} - exec.faucet::mint + call.test_account::mint # assert the correct asset is returned push.{non_fungible_asset} - assert_eqw + assert_eqw.err=9997 # assert the input vault has been updated. exec.memory::get_input_vault_root_ptr push.{non_fungible_asset} exec.asset_vault::has_non_fungible_asset - assert + assert.err=9998 # assert the non-fungible asset has been added to the faucet smt push.{FAUCET_STORAGE_DATA_SLOT} exec.account::get_item - push.{non_fungible_asset} + push.{asset_vault_key} exec.smt::get push.{non_fungible_asset} - assert_eqw + assert_eqw.err=9999 + dropw end ", - non_fungible_asset = prepare_word(&non_fungible_asset.into()) + non_fungible_asset = prepare_word(&non_fungible_asset.into()), + asset_vault_key = prepare_word(&asset_vault_key), ); tx_context.execute_code(&code).unwrap(); @@ -335,9 +336,9 @@ fn test_burn_fungible_asset_succeeds() { exec.prologue::prepare_transaction push.{FUNGIBLE_ASSET_AMOUNT}.0.{suffix}.{prefix} call.account::burn - # assert the correct asset is returned + # assert the correct asset is returned push.{FUNGIBLE_ASSET_AMOUNT}.0.{suffix}.{prefix} - assert_eqw + assert_eqw.err=9998 # assert the input vault has been updated exec.memory::get_input_vault_root_ptr @@ -345,7 +346,7 @@ fn test_burn_fungible_asset_succeeds() { push.{suffix}.{prefix} exec.asset_vault::get_balance - push.{final_input_vault_asset_amount} assert_eq + push.{final_input_vault_asset_amount} assert_eq.err=9999 end ", prefix = faucet_id.prefix().as_felt(), @@ -461,8 +462,6 @@ fn test_burn_fungible_asset_insufficient_input_amount() { // NON-FUNGIBLE FAUCET BURN TESTS // ================================================================================================ -// TODO: reenable once storage map support is implemented -#[ignore] #[test] fn test_burn_non_fungible_asset_succeeds() { let tx_context = TransactionContextBuilder::with_non_fungible_faucet( @@ -472,7 +471,8 @@ fn test_burn_non_fungible_asset_succeeds() { ) .build(); - let non_fungible_asset_burnt = NonFungibleAsset::mock(&[1, 2, 3]); + let non_fungible_asset_burnt = NonFungibleAsset::mock(&NON_FUNGIBLE_ASSET_DATA_2); + let burnt_asset_vault_key = non_fungible_asset_burnt.vault_key(); let code = format!( " @@ -482,34 +482,41 @@ fn test_burn_non_fungible_asset_succeeds() { use.kernel::asset_vault use.kernel::memory use.kernel::prologue - use.miden::faucet + use.test::account->test_account begin - # burn asset exec.prologue::prepare_transaction + + # add existing non-fungible asset to the vault + exec.memory::get_input_vault_root_ptr push.{non_fungible_asset} + exec.asset_vault::add_non_fungible_asset dropw + + # burn asset push.{non_fungible_asset} - exec.faucet::burn + call.test_account::burn # assert the correct asset is returned push.{non_fungible_asset} - assert_eqw + assert_eqw.err=9997 # assert the input vault has been updated. exec.memory::get_input_vault_root_ptr push.{non_fungible_asset} exec.asset_vault::has_non_fungible_asset - not assert + not assert.err=9998 # assert the non-fungible asset has been removed from the faucet smt push.{FAUCET_STORAGE_DATA_SLOT} exec.account::get_item - push.{non_fungible_asset} + push.{burnt_asset_vault_key} exec.smt::get padw - assert_eqw + assert_eqw.err=9999 + dropw end ", - non_fungible_asset = prepare_word(&non_fungible_asset_burnt.into()) + non_fungible_asset = prepare_word(&non_fungible_asset_burnt.into()), + burnt_asset_vault_key = prepare_word(&burnt_asset_vault_key), ); tx_context.execute_code(&code).unwrap(); @@ -528,11 +535,6 @@ fn test_burn_non_fungible_asset_fails_does_not_exist() { let code = format!( " - use.std::collections::smt - - #use.kernel::account - use.kernel::asset_vault - use.kernel::memory use.kernel::prologue use.test::account @@ -559,10 +561,6 @@ fn test_burn_non_fungible_asset_fails_not_faucet_account() { let code = format!( " - use.std::collections::smt - - use.kernel::asset_vault - use.kernel::memory use.kernel::prologue use.test::account @@ -598,10 +596,6 @@ fn test_burn_non_fungible_asset_fails_inconsistent_faucet_id() { let code = format!( " - use.std::collections::smt - - use.kernel::asset_vault - use.kernel::memory use.kernel::prologue use.test::account @@ -620,6 +614,53 @@ fn test_burn_non_fungible_asset_fails_inconsistent_faucet_id() { assert_execution_error!(process, ERR_FAUCET_NON_FUNGIBLE_ASSET_TO_BURN_NOT_FOUND); } +// IS NON FUNGIBLE ASSET ISSUED TESTS +// ================================================================================================ + +#[test] +fn test_is_non_fungible_asset_issued_succeeds() { + // NON_FUNGIBLE_ASSET_DATA_2 is "issued" during the mock faucet creation, so it is already in + // the map of issued assets. + let tx_context = TransactionContextBuilder::with_non_fungible_faucet( + NonFungibleAsset::mock_issuer().into(), + ONE, + false, + ) + .build(); + + let non_fungible_asset_1 = NonFungibleAsset::mock(&NON_FUNGIBLE_ASSET_DATA); + let non_fungible_asset_2 = NonFungibleAsset::mock(&NON_FUNGIBLE_ASSET_DATA_2); + + let code = format!( + " + use.kernel::prologue + use.miden::faucet + + begin + exec.prologue::prepare_transaction + + # check that NON_FUNGIBLE_ASSET_DATA_2 is already issued + push.{non_fungible_asset_2} + exec.faucet::is_non_fungible_asset_issued + + # use error code 9998 to assert that NON_FUNGIBLE_ASSET_DATA_2 is issued + eq.1 assert.err=9998 + + # check that NON_FUNGIBLE_ASSET_DATA was not issued yet + push.{non_fungible_asset_1} + exec.faucet::is_non_fungible_asset_issued + + # use error code 9999 to assert that NON_FUNGIBLE_ASSET_DATA is not issued + eq.0 assert.err=9999 + end + ", + non_fungible_asset_1 = prepare_word(&non_fungible_asset_1.into()), + non_fungible_asset_2 = prepare_word(&non_fungible_asset_2.into()), + ); + + tx_context.execute_code(&code).unwrap(); +} + // GET TOTAL ISSUANCE TESTS // ================================================================================================ @@ -645,7 +686,7 @@ fn test_get_total_issuance_succeeds() { # => [total_issuance] # assert the correct balance is returned - push.{FUNGIBLE_FAUCET_INITIAL_BALANCE} assert_eq + push.{FUNGIBLE_FAUCET_INITIAL_BALANCE} assert_eq.err=9999 # => [] end ", diff --git a/crates/miden-tx/src/tests/kernel_tests/test_prologue.rs b/crates/miden-tx/src/tests/kernel_tests/test_prologue.rs index 2e490c888..0fb99ae72 100644 --- a/crates/miden-tx/src/tests/kernel_tests/test_prologue.rs +++ b/crates/miden-tx/src/tests/kernel_tests/test_prologue.rs @@ -657,7 +657,7 @@ fn test_get_blk_version() { exec.prologue::prepare_transaction exec.memory::get_blk_version - # truncate the stack + # truncate the stack swap drop end "; @@ -678,7 +678,7 @@ fn test_get_blk_timestamp() { exec.prologue::prepare_transaction exec.memory::get_blk_timestamp - # truncate the stack + # truncate the stack swap drop end "; diff --git a/crates/miden-tx/src/tests/kernel_tests/test_tx.rs b/crates/miden-tx/src/tests/kernel_tests/test_tx.rs index 6c0e24e8a..bb45b8580 100644 --- a/crates/miden-tx/src/tests/kernel_tests/test_tx.rs +++ b/crates/miden-tx/src/tests/kernel_tests/test_tx.rs @@ -726,7 +726,7 @@ fn test_fpi_memory() { let mut mock_chain = MockChain::with_accounts(&[native_account.clone(), foreign_account.clone()]); mock_chain.seal_block(None); - let advice_inputs = get_mock_fpi_adv_inputs(&foreign_account, &mock_chain); + let advice_inputs = get_mock_fpi_adv_inputs(vec![&foreign_account], &mock_chain); let tx_context = mock_chain .build_tx_context(native_account.id(), &[], &[]) @@ -749,7 +749,7 @@ fn test_fpi_memory() { begin exec.prologue::prepare_transaction - # pad the stack for the `execute_foreign_procedure`execution + # pad the stack for the `execute_foreign_procedure` execution padw padw padw push.0.0 # => [pad(14)] @@ -856,7 +856,7 @@ fn test_fpi_memory() { exec.prologue::prepare_transaction ### Get the storage item at index 0 ##################### - # pad the stack for the `execute_foreign_procedure`execution + # pad the stack for the `execute_foreign_procedure` execution padw padw padw push.0.0 # => [pad(14)] @@ -874,7 +874,7 @@ fn test_fpi_memory() { # => [] ### Get the storage item at index 0 again ############### - # pad the stack for the `execute_foreign_procedure`execution + # pad the stack for the `execute_foreign_procedure` execution padw padw padw push.0.0 # => [pad(14)] @@ -904,16 +904,230 @@ fn test_fpi_memory() { // Check that the second invocation of the foreign procedure from the same account does not load // the account data again: already loaded data should be reused. // - // Native account: [2048; 4095] <- initialized during prologue - // Foreign account: [4096; 6143] <- initialized during first FPI - // Next account slot: [6144; 8191] <- should not be initialized + // Native account: [8192; 16383] <- initialized during prologue + // Foreign account: [16384; 24575] <- initialized during first FPI + // Next account slot: [24576; 32767] <- should not be initialized assert_eq!( try_read_root_mem_word( &process.into(), NATIVE_ACCOUNT_DATA_PTR + ACCOUNT_DATA_LENGTH as u32 * 2 ), None, - "Memory starting from 6144 should stay uninitialized" + "Memory starting from 24576 should stay uninitialized" + ); +} + +#[test] +fn test_fpi_memory_two_accounts() { + // Prepare the test data + let storage_slots_1 = vec![AccountStorage::mock_item_0().slot]; + let storage_slots_2 = vec![AccountStorage::mock_item_1().slot]; + + let foreign_account_code_source_1 = " + use.miden::account + + export.get_item_foreign_1 + # make this foreign procedure unique to make sure that we invoke the procedure of the + # foreign account, not the native one + push.1 drop + exec.account::get_item + + # truncate the stack + movup.6 movup.6 movup.6 drop drop drop + end + "; + let foreign_account_code_source_2 = " + use.miden::account + + export.get_item_foreign_2 + # make this foreign procedure unique to make sure that we invoke the procedure of the + # foreign account, not the native one + push.2 drop + exec.account::get_item + + # truncate the stack + movup.6 movup.6 movup.6 drop drop drop + end + "; + + let foreign_account_component_1 = AccountComponent::compile( + foreign_account_code_source_1, + TransactionKernel::testing_assembler(), + storage_slots_1.clone(), + ) + .unwrap() + .with_supports_all_types(); + + let foreign_account_component_2 = AccountComponent::compile( + foreign_account_code_source_2, + TransactionKernel::testing_assembler(), + storage_slots_2.clone(), + ) + .unwrap() + .with_supports_all_types(); + + let foreign_account_1 = AccountBuilder::new(ChaCha20Rng::from_entropy().gen()) + .with_component(foreign_account_component_1) + .build_existing() + .unwrap(); + + let foreign_account_2 = AccountBuilder::new(ChaCha20Rng::from_entropy().gen()) + .with_component(foreign_account_component_2) + .build_existing() + .unwrap(); + + let native_account = AccountBuilder::new(ChaCha20Rng::from_entropy().gen()) + .with_component( + AccountMockComponent::new_with_empty_slots(TransactionKernel::testing_assembler()) + .unwrap(), + ) + .build_existing() + .unwrap(); + + let mut mock_chain = MockChain::with_accounts(&[ + native_account.clone(), + foreign_account_1.clone(), + foreign_account_2.clone(), + ]); + mock_chain.seal_block(None); + let advice_inputs = + get_mock_fpi_adv_inputs(vec![&foreign_account_1, &foreign_account_2], &mock_chain); + + let tx_context = mock_chain + .build_tx_context(native_account.id(), &[], &[]) + .foreign_account_codes(vec![ + foreign_account_1.code().clone(), + foreign_account_2.code().clone(), + ]) + .advice_inputs(advice_inputs.clone()) + .build(); + + // GET ITEM TWICE WITH TWO ACCOUNTS + // -------------------------------------------------------------------------------------------- + // Check the correctness of the memory layout after two invocations of the `get_item` account + // procedures separated by the call of this procedure against another foreign account. Invoking + // two foreign procedures from the same account should result in reuse of the loaded account. + + let code = format!( + " + use.std::sys + + use.kernel::prologue + use.miden::tx + + begin + exec.prologue::prepare_transaction + + ### Get the storage item at index 0 from the first account + # pad the stack for the `execute_foreign_procedure` execution + padw padw padw push.0.0 + # => [pad(14)] + + # push the index of desired storage item + push.0 + + # get the hash of the `get_item_foreign_1` procedure of the foreign account 1 + push.{get_item_foreign_1_hash} + + # push the foreign account ID + push.{foreign_1_suffix}.{foreign_1_prefix} + # => [foreign_account_1_id_prefix, foreign_account_1_id_suffix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] + + exec.tx::execute_foreign_procedure dropw + # => [] + + ### Get the storage item at index 0 from the second account + # pad the stack for the `execute_foreign_procedure` execution + padw padw padw push.0.0 + # => [pad(14)] + + # push the index of desired storage item + push.0 + + # get the hash of the `get_item_foreign_2` procedure of the foreign account 2 + push.{get_item_foreign_2_hash} + + # push the foreign account ID + push.{foreign_2_suffix}.{foreign_2_prefix} + # => [foreign_account_2_id_prefix, foreign_account_2_id_suffix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] + + exec.tx::execute_foreign_procedure dropw + # => [] + + ### Get the storage item at index 0 from the first account again + # pad the stack for the `execute_foreign_procedure` execution + padw padw padw push.0.0 + # => [pad(14)] + + # push the index of desired storage item + push.0 + + # get the hash of the `get_item_foreign_1` procedure of the foreign account 1 + push.{get_item_foreign_1_hash} + + # push the foreign account ID + push.{foreign_1_suffix}.{foreign_1_prefix} + # => [foreign_account_1_id_prefix, foreign_account_1_id_suffix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] + + exec.tx::execute_foreign_procedure + + # truncate the stack + exec.sys::truncate_stack + end + ", + get_item_foreign_1_hash = foreign_account_1.code().procedures()[0].mast_root(), + get_item_foreign_2_hash = foreign_account_2.code().procedures()[0].mast_root(), + + foreign_1_prefix = foreign_account_1.id().prefix().as_felt(), + foreign_1_suffix = foreign_account_1.id().suffix(), + + foreign_2_prefix = foreign_account_2.id().prefix().as_felt(), + foreign_2_suffix = foreign_account_2.id().suffix(), + ); + + let process = &tx_context.execute_code(&code).unwrap(); + + // Check the correctness of the memory layout after multiple foreign procedure invocations from + // different foreign accounts + // + // Native account: [8192; 16383] <- initialized during prologue + // Foreign account 1: [16384; 24575] <- initialized during first FPI + // Foreign account 2: [24576; 32767] <- initialized during second FPI + // Next account slot: [32768; 40959] <- should not be initialized + + // check that the first word of the first foreign account slot is correct + assert_eq!( + read_root_mem_word(&process.into(), NATIVE_ACCOUNT_DATA_PTR + ACCOUNT_DATA_LENGTH as u32), + [ + foreign_account_1.id().suffix(), + foreign_account_1.id().prefix().as_felt(), + ZERO, + foreign_account_1.nonce() + ] + ); + + // check that the first word of the second foreign account slot is correct + assert_eq!( + read_root_mem_word( + &process.into(), + NATIVE_ACCOUNT_DATA_PTR + ACCOUNT_DATA_LENGTH as u32 * 2 + ), + [ + foreign_account_2.id().suffix(), + foreign_account_2.id().prefix().as_felt(), + ZERO, + foreign_account_2.nonce() + ] + ); + + // check that the first word of the third foreign account slot was not initialized + assert_eq!( + try_read_root_mem_word( + &process.into(), + NATIVE_ACCOUNT_DATA_PTR + ACCOUNT_DATA_LENGTH as u32 * 3 + ), + None, + "Memory starting from 32768 should stay uninitialized" ); } @@ -972,7 +1186,7 @@ fn test_fpi_execute_foreign_procedure() { let mut mock_chain = MockChain::with_accounts(&[native_account.clone(), foreign_account.clone()]); mock_chain.seal_block(None); - let advice_inputs = get_mock_fpi_adv_inputs(&foreign_account, &mock_chain); + let advice_inputs = get_mock_fpi_adv_inputs(vec![&foreign_account], &mock_chain); let code = format!( " @@ -982,7 +1196,7 @@ fn test_fpi_execute_foreign_procedure() { begin # get the storage item at index 0 - # pad the stack for the `execute_foreign_procedure`execution + # pad the stack for the `execute_foreign_procedure` execution padw padw padw push.0.0 # => [pad(14)] @@ -1077,35 +1291,41 @@ fn test_fpi_execute_foreign_procedure() { // HELPER FUNCTIONS // ================================================================================================ -fn get_mock_fpi_adv_inputs(foreign_account: &Account, mock_chain: &MockChain) -> AdviceInputs { +fn get_mock_fpi_adv_inputs( + foreign_accounts: Vec<&Account>, + mock_chain: &MockChain, +) -> AdviceInputs { let mut advice_inputs = AdviceInputs::default(); - TransactionKernel::extend_advice_inputs_for_account( - &mut advice_inputs, - &foreign_account.clone().into(), - foreign_account.code(), - &foreign_account.storage().get_header(), - // Provide the merkle path of the foreign account to be able to verify that the account - // database has the hash of this foreign account. Verification is done during the - // execution of the `kernel::account::validate_current_foreign_account` procedure. - &MerklePath::new( - mock_chain - .accounts() - // TODO: Update. - .open(&LeafIndex::::new(foreign_account.id().prefix().as_felt().as_int()).unwrap()) - .path - .into(), - ), - ) - .unwrap(); - for slot in foreign_account.storage().slots() { - // if there are storage maps, we populate the merkle store and advice map - if let StorageSlot::Map(map) = slot { - // extend the merkle store and map with the storage maps - advice_inputs.extend_merkle_store(map.inner_nodes()); - // populate advice map with Sparse Merkle Tree leaf nodes - advice_inputs - .extend_map(map.leaves().map(|(_, leaf)| (leaf.hash(), leaf.to_elements()))); + for foreign_account in foreign_accounts { + TransactionKernel::extend_advice_inputs_for_account( + &mut advice_inputs, + &foreign_account.into(), + foreign_account.code(), + &foreign_account.storage().get_header(), + // Provide the merkle path of the foreign account to be able to verify that the account + // tree has the commitment of this foreign account. Verification is done during the + // execution of the `kernel::account::validate_current_foreign_account` procedure. + &MerklePath::new( + mock_chain + .accounts() + // TODO: Update. + .open(&LeafIndex::::new(foreign_account.id().prefix().as_felt().as_int()).unwrap()) + .path + .into(), + ), + ) + .unwrap(); + + for slot in foreign_account.storage().slots() { + // if there are storage maps, we populate the merkle store and advice map + if let StorageSlot::Map(map) = slot { + // extend the merkle store and map with the storage maps + advice_inputs.extend_merkle_store(map.inner_nodes()); + // populate advice map with Sparse Merkle Tree leaf nodes + advice_inputs + .extend_map(map.leaves().map(|(_, leaf)| (leaf.hash(), leaf.to_elements()))); + } } } diff --git a/crates/miden-tx/src/tests/mod.rs b/crates/miden-tx/src/tests/mod.rs index 09ba08157..45e75cc45 100644 --- a/crates/miden-tx/src/tests/mod.rs +++ b/crates/miden-tx/src/tests/mod.rs @@ -829,7 +829,7 @@ fn prove_witness_and_verify() { let serialized_transaction = proven_transaction.to_bytes(); let proven_transaction = ProvenTransaction::read_from_bytes(&serialized_transaction).unwrap(); let verifier = TransactionVerifier::new(MIN_PROOF_SECURITY_LEVEL); - assert!(verifier.verify(proven_transaction).is_ok()); + assert!(verifier.verify(&proven_transaction).is_ok()); } // TEST TRANSACTION SCRIPT diff --git a/crates/miden-tx/src/verifier/mod.rs b/crates/miden-tx/src/verifier/mod.rs index 6e6b8e6ae..ce113c1db 100644 --- a/crates/miden-tx/src/verifier/mod.rs +++ b/crates/miden-tx/src/verifier/mod.rs @@ -30,13 +30,14 @@ impl TransactionVerifier { /// Returns an error if: /// - Transaction verification fails. /// - The security level of the verified proof is insufficient. - pub fn verify(&self, transaction: ProvenTransaction) -> Result<(), TransactionVerifierError> { + pub fn verify(&self, transaction: &ProvenTransaction) -> Result<(), TransactionVerifierError> { // build stack inputs and outputs let stack_inputs = TransactionKernel::build_input_stack( transaction.account_id(), transaction.account_update().init_state_hash(), transaction.input_notes().commitment(), transaction.block_ref(), + transaction.block_num(), ); let stack_outputs = TransactionKernel::build_output_stack( transaction.account_update().final_state_hash(), diff --git a/crates/miden-tx/tests/integration/main.rs b/crates/miden-tx/tests/integration/main.rs index 9842315a6..a43fee853 100644 --- a/crates/miden-tx/tests/integration/main.rs +++ b/crates/miden-tx/tests/integration/main.rs @@ -61,7 +61,7 @@ pub fn prove_and_verify_transaction( // Verify that the generated proof is valid let verifier = TransactionVerifier::new(miden_objects::MIN_PROOF_SECURITY_LEVEL); - verifier.verify(proven_transaction) + verifier.verify(&proven_transaction) } #[cfg(test)] diff --git a/docs/index.md b/docs/index.md index 68f0488bc..f7da3f122 100644 --- a/docs/index.md +++ b/docs/index.md @@ -16,7 +16,7 @@ If you want to join the technical discussion, please check out the following: ## Status and features -Polygon Miden is currently on release v0.7. This is an early version of the protocol and its components. +Polygon Miden is currently on release v0.8. This is an early version of the protocol and its components. > **Important** > We expect breaking changes on all components. diff --git a/proto/api.proto b/proto/tx_prover.proto similarity index 94% rename from proto/api.proto rename to proto/tx_prover.proto index 4555b326f..de39c685c 100644 --- a/proto/api.proto +++ b/proto/tx_prover.proto @@ -1,6 +1,6 @@ // Specification of the user facing gRPC API. syntax = "proto3"; -package api; +package tx_prover; service Api { rpc ProveTransaction(ProveTransactionRequest) returns (ProveTransactionResponse) {} diff --git a/rust-toolchain.toml b/rust-toolchain.toml index a1c01e041..252a5088e 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "1.82" +channel = "1.84" components = ["rustfmt", "rust-src", "clippy"] targets = ["wasm32-unknown-unknown"] profile = "minimal" diff --git a/scripts/check-changelog.sh b/scripts/check-changelog.sh index dbf14cdbb..7d7529daa 100755 --- a/scripts/check-changelog.sh +++ b/scripts/check-changelog.sh @@ -13,7 +13,7 @@ else if git diff --exit-code "origin/${BASE_REF}" -- "${CHANGELOG_FILE}"; then >&2 echo "Changes should come with an entry in the \"CHANGELOG.md\" file. This behavior can be overridden by using the \"no changelog\" label, which is used for changes -that are trivial / explicitely stated not to require a changelog entry." +that are trivial / explicitly stated not to require a changelog entry." exit 1 fi