From 686e83d28b46abc6b55f4b8f345135a037a83e42 Mon Sep 17 00:00:00 2001 From: Irakliy Khaburzaniya <68976082+irakliyk@users.noreply.github.com> Date: Sat, 26 Oct 2024 13:01:41 -0700 Subject: [PATCH 01/19] fixed maybe-async dependency (#337) --- README.md | 4 ++-- prover/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 4616bf5a5..230146c2b 100644 --- a/README.md +++ b/README.md @@ -3,8 +3,8 @@ - - + + A STARK prover and verifier for arbitrary computations. diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 85b5dbcd7..6dd616a1a 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -34,7 +34,7 @@ air = { version = "0.10", path = "../air", package = "winter-air", default-featu crypto = { version = "0.10", path = "../crypto", package = "winter-crypto", default-features = false } fri = { version = "0.10", path = '../fri', package = "winter-fri", default-features = false } math = { version = "0.10", path = "../math", package = "winter-math", default-features = false } -maybe_async = { path = "../utils/maybe_async" , package = "winter-maybe-async" } +maybe_async = { version = "0.10", path = "../utils/maybe_async" , package = "winter-maybe-async" } tracing = { version = "0.1", default-features = false, features = ["attributes"]} utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } From be61c26a537e85a9d9fde7d77379ae2211074b68 Mon Sep 17 00:00:00 2001 From: Al-Kindi-0 <82364884+Al-Kindi-0@users.noreply.github.com> Date: Wed, 30 Oct 2024 15:57:27 +0100 Subject: [PATCH 02/19] Fix partition hashing (#338) --- air/src/options.rs | 4 ++++ prover/src/lib.rs | 1 + prover/src/matrix/row_matrix.rs | 2 +- verifier/src/channel.rs | 2 +- 4 files changed, 7 insertions(+), 2 deletions(-) diff --git a/air/src/options.rs b/air/src/options.rs index a831bdad7..01599489c 100644 --- a/air/src/options.rs +++ b/air/src/options.rs @@ -377,7 +377,11 @@ impl PartitionOptions { /// Returns the size of each partition used when committing to the main and auxiliary traces as /// well as the constraint evaluation trace. + /// The returned size is given in terms of number of columns in the field `E`. pub fn partition_size(&self, num_columns: usize) -> usize { + if self.num_partitions == 1 && self.min_partition_size == 1 { + return num_columns; + } let base_elements_per_partition = cmp::max( (num_columns * E::EXTENSION_DEGREE).div_ceil(self.num_partitions as usize), self.min_partition_size as usize, diff --git a/prover/src/lib.rs b/prover/src/lib.rs index 035d6c655..1a2e157ea 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -219,6 +219,7 @@ pub trait Prover { /// Builds and returns the auxiliary trace. #[allow(unused_variables)] #[maybe_async] + #[instrument(skip_all)] fn build_aux_trace( &self, main_trace: &Self::Trace, diff --git a/prover/src/matrix/row_matrix.rs b/prover/src/matrix/row_matrix.rs index 85b43122e..6cb9ef60c 100644 --- a/prover/src/matrix/row_matrix.rs +++ b/prover/src/matrix/row_matrix.rs @@ -188,7 +188,7 @@ impl RowMatrix { // allocate vector to store row hashes let mut row_hashes = unsafe { uninit_vector::(self.num_rows()) }; - if partition_size == self.num_cols() * E::EXTENSION_DEGREE { + if partition_size == self.num_cols() { // iterate though matrix rows, hashing each row batch_iter_mut!( &mut row_hashes, diff --git a/verifier/src/channel.rs b/verifier/src/channel.rs index 9d7dbc426..e6c511cd8 100644 --- a/verifier/src/channel.rs +++ b/verifier/src/channel.rs @@ -442,7 +442,7 @@ where E: FieldElement, H: ElementHasher, { - if partition_size == row.len() * E::EXTENSION_DEGREE { + if partition_size == row.len() { H::hash_elements(row) } else { let mut buffer = vec![H::Digest::default(); partition_size]; From 616892e9fe5f65b0399f58872fc36d3da6303c98 Mon Sep 17 00:00:00 2001 From: Irakliy Khaburzaniya Date: Wed, 30 Oct 2024 08:04:47 -0700 Subject: [PATCH 03/19] incremented crate versions to 0.10.1 and updated changelog --- CHANGELOG.md | 3 +++ air/Cargo.toml | 4 ++-- crypto/Cargo.toml | 4 ++-- examples/Cargo.toml | 2 +- fri/Cargo.toml | 4 ++-- math/Cargo.toml | 4 ++-- prover/Cargo.toml | 4 ++-- utils/core/Cargo.toml | 4 ++-- utils/rand/Cargo.toml | 4 ++-- verifier/Cargo.toml | 4 ++-- winterfell/Cargo.toml | 4 ++-- 11 files changed, 22 insertions(+), 19 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0f783b5e7..78563dabf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 0.10.1 (2024-10-30) +- Fixed partition hashing and add logging to aux trace building (#338). + ## 0.10.0 (2024-10-25) - [BREAKING] Refactored maybe-async macro into simpler maybe-async and maybe-await macros (#283). - [BREAKING] Introduce `VectorCommitment` abstraction (#285). diff --git a/air/Cargo.toml b/air/Cargo.toml index 12c56cd72..c3bf02c86 100644 --- a/air/Cargo.toml +++ b/air/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-air" -version = "0.10.0" +version = "0.10.1" description = "AIR components for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-air/0.10.0" +documentation = "https://docs.rs/winter-air/0.10.1" categories = ["cryptography", "no-std"] keywords = ["crypto", "arithmetization", "air"] edition = "2021" diff --git a/crypto/Cargo.toml b/crypto/Cargo.toml index 23f985fee..026c01d30 100644 --- a/crypto/Cargo.toml +++ b/crypto/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-crypto" -version = "0.10.0" +version = "0.10.1" description = "Cryptographic library for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-crypto/0.10.0" +documentation = "https://docs.rs/winter-crypto/0.10.1" categories = ["cryptography", "no-std"] keywords = ["crypto", "merkle-tree", "hash"] edition = "2021" diff --git a/examples/Cargo.toml b/examples/Cargo.toml index f86e9ad50..5a4dbc5ab 100644 --- a/examples/Cargo.toml +++ b/examples/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "examples" -version = "0.10.0" +version = "0.10.1" description = "Examples of using Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" diff --git a/fri/Cargo.toml b/fri/Cargo.toml index 2e3d1b20b..96a13d1a9 100644 --- a/fri/Cargo.toml +++ b/fri/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-fri" -version = "0.10.0" +version = "0.10.1" description = "Implementation of FRI protocol for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-fri/0.10.0" +documentation = "https://docs.rs/winter-fri/0.10.1" categories = ["cryptography", "no-std"] keywords = ["crypto", "polynomial", "commitments"] edition = "2021" diff --git a/math/Cargo.toml b/math/Cargo.toml index 061c2d52f..1490d1e63 100644 --- a/math/Cargo.toml +++ b/math/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-math" -version = "0.10.0" +version = "0.10.1" description = "Math library for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-math/0.10.0" +documentation = "https://docs.rs/winter-math/0.10.1" categories = ["cryptography", "no-std"] keywords = ["crypto", "finite-fields", "polynomials", "fft"] edition = "2021" diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 6dd616a1a..3193bee0c 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-prover" -version = "0.10.0" +version = "0.10.1" description = "Winterfell STARK prover" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-prover/0.10.0" +documentation = "https://docs.rs/winter-prover/0.10.1" categories = ["cryptography", "no-std"] keywords = ["crypto", "zkp", "stark", "prover"] edition = "2021" diff --git a/utils/core/Cargo.toml b/utils/core/Cargo.toml index c606caa08..b8f2724e8 100644 --- a/utils/core/Cargo.toml +++ b/utils/core/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-utils" -version = "0.10.0" +version = "0.10.1" description = "Utilities for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-utils/0.10.0" +documentation = "https://docs.rs/winter-utils/0.10.1" categories = ["cryptography", "no-std"] keywords = ["serialization", "transmute"] edition = "2021" diff --git a/utils/rand/Cargo.toml b/utils/rand/Cargo.toml index 3e05c6437..759b02436 100644 --- a/utils/rand/Cargo.toml +++ b/utils/rand/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-rand-utils" -version = "0.10.0" +version = "0.10.1" description = "Random value generation utilities for Winterfell crates" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-rand-utils/0.10.0" +documentation = "https://docs.rs/winter-rand-utils/0.10.1" categories = ["cryptography"] keywords = ["rand"] edition = "2021" diff --git a/verifier/Cargo.toml b/verifier/Cargo.toml index 63d4b9c0f..1490e3e27 100644 --- a/verifier/Cargo.toml +++ b/verifier/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-verifier" -version = "0.10.0" +version = "0.10.1" description = "Winterfell STARK verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-verifier/0.10.0" +documentation = "https://docs.rs/winter-verifier/0.10.1" categories = ["cryptography", "no-std"] keywords = ["crypto", "zkp", "stark", "verifier"] edition = "2021" diff --git a/winterfell/Cargo.toml b/winterfell/Cargo.toml index cdeeb59ec..b37a07157 100644 --- a/winterfell/Cargo.toml +++ b/winterfell/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winterfell" -version = "0.10.0" +version = "0.10.1" description = "Winterfell STARK prover and verifier" authors = ["winterfell contributors"] readme = "../README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winterfell/0.10.0" +documentation = "https://docs.rs/winterfell/0.10.1" categories = ["cryptography", "no-std"] keywords = ["crypto", "zkp", "stark", "prover", "verifier"] edition = "2021" From 0fe84bebc91e3a0f3ccdb47264bd94d92f3ed75d Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Mon, 18 Nov 2024 17:56:22 +0100 Subject: [PATCH 04/19] feat: implement `core::error::Error` for all error types (#341) --- CHANGELOG.md | 3 +++ air/src/errors.rs | 2 ++ crypto/src/errors.rs | 4 ++++ fri/src/errors.rs | 2 ++ prover/src/errors.rs | 2 ++ utils/core/src/errors.rs | 2 ++ utils/core/src/serde/mod.rs | 4 ++-- verifier/src/errors.rs | 2 ++ 8 files changed, 19 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 78563dabf..6ba38b967 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 0.10.2 (TBD) +- Implement `core::error::Error` for error types (#341). + ## 0.10.1 (2024-10-30) - Fixed partition hashing and add logging to aux trace building (#338). diff --git a/air/src/errors.rs b/air/src/errors.rs index 2f0fa1665..38196ba7d 100644 --- a/air/src/errors.rs +++ b/air/src/errors.rs @@ -42,3 +42,5 @@ impl fmt::Display for AssertionError { } } } + +impl core::error::Error for AssertionError {} diff --git a/crypto/src/errors.rs b/crypto/src/errors.rs index 5e4f3a6da..637b90c99 100644 --- a/crypto/src/errors.rs +++ b/crypto/src/errors.rs @@ -61,6 +61,8 @@ impl fmt::Display for MerkleTreeError { } } +impl core::error::Error for MerkleTreeError {} + // RANDOM COIN ERROR // ================================================================================================ @@ -89,3 +91,5 @@ impl fmt::Display for RandomCoinError { } } } + +impl core::error::Error for RandomCoinError {} diff --git a/fri/src/errors.rs b/fri/src/errors.rs index 7961b8831..2947b17a8 100644 --- a/fri/src/errors.rs +++ b/fri/src/errors.rs @@ -73,3 +73,5 @@ impl fmt::Display for VerifierError { } } } + +impl core::error::Error for VerifierError {} diff --git a/prover/src/errors.rs b/prover/src/errors.rs index a0d01a233..6fd560582 100644 --- a/prover/src/errors.rs +++ b/prover/src/errors.rs @@ -39,3 +39,5 @@ impl fmt::Display for ProverError { } } } + +impl core::error::Error for ProverError {} diff --git a/utils/core/src/errors.rs b/utils/core/src/errors.rs index 52df2b007..1bba975bf 100644 --- a/utils/core/src/errors.rs +++ b/utils/core/src/errors.rs @@ -32,3 +32,5 @@ impl fmt::Display for DeserializationError { } } } + +impl core::error::Error for DeserializationError {} diff --git a/utils/core/src/serde/mod.rs b/utils/core/src/serde/mod.rs index edf9ba1c8..90ac01b6d 100644 --- a/utils/core/src/serde/mod.rs +++ b/utils/core/src/serde/mod.rs @@ -344,7 +344,7 @@ impl Serializable for str { } fn get_size_hint(&self) -> usize { - self.len().get_size_hint() + self.as_bytes().len() + self.len().get_size_hint() + self.len() } } @@ -355,7 +355,7 @@ impl Serializable for String { } fn get_size_hint(&self) -> usize { - self.len().get_size_hint() + self.as_bytes().len() + self.len().get_size_hint() + self.len() } } diff --git a/verifier/src/errors.rs b/verifier/src/errors.rs index e1b072db5..fadaee1fa 100644 --- a/verifier/src/errors.rs +++ b/verifier/src/errors.rs @@ -99,3 +99,5 @@ impl fmt::Display for VerifierError { } } } + +impl core::error::Error for VerifierError {} From 76c6db20e92fefea652236968e64438aabf28609 Mon Sep 17 00:00:00 2001 From: Irakliy Khaburzaniya Date: Mon, 18 Nov 2024 21:00:10 -0800 Subject: [PATCH 05/19] update crate versions to v0.10.2 --- CHANGELOG.md | 2 +- air/Cargo.toml | 4 ++-- crypto/Cargo.toml | 4 ++-- fri/Cargo.toml | 4 ++-- math/Cargo.toml | 4 ++-- prover/Cargo.toml | 4 ++-- verifier/Cargo.toml | 4 ++-- winterfell/Cargo.toml | 4 ++-- 8 files changed, 15 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6ba38b967..3e2984822 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## 0.10.2 (TBD) +## 0.10.2 (2024-11-18) - Implement `core::error::Error` for error types (#341). ## 0.10.1 (2024-10-30) diff --git a/air/Cargo.toml b/air/Cargo.toml index c3bf02c86..0a363fd76 100644 --- a/air/Cargo.toml +++ b/air/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-air" -version = "0.10.1" +version = "0.10.2" description = "AIR components for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-air/0.10.1" +documentation = "https://docs.rs/winter-air/0.10.2" categories = ["cryptography", "no-std"] keywords = ["crypto", "arithmetization", "air"] edition = "2021" diff --git a/crypto/Cargo.toml b/crypto/Cargo.toml index 026c01d30..51d93574e 100644 --- a/crypto/Cargo.toml +++ b/crypto/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-crypto" -version = "0.10.1" +version = "0.10.2" description = "Cryptographic library for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-crypto/0.10.1" +documentation = "https://docs.rs/winter-crypto/0.10.2" categories = ["cryptography", "no-std"] keywords = ["crypto", "merkle-tree", "hash"] edition = "2021" diff --git a/fri/Cargo.toml b/fri/Cargo.toml index 96a13d1a9..86c984442 100644 --- a/fri/Cargo.toml +++ b/fri/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-fri" -version = "0.10.1" +version = "0.10.2" description = "Implementation of FRI protocol for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-fri/0.10.1" +documentation = "https://docs.rs/winter-fri/0.10.2" categories = ["cryptography", "no-std"] keywords = ["crypto", "polynomial", "commitments"] edition = "2021" diff --git a/math/Cargo.toml b/math/Cargo.toml index 1490d1e63..a2576ded4 100644 --- a/math/Cargo.toml +++ b/math/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-math" -version = "0.10.1" +version = "0.10.2" description = "Math library for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-math/0.10.1" +documentation = "https://docs.rs/winter-math/0.10.2" categories = ["cryptography", "no-std"] keywords = ["crypto", "finite-fields", "polynomials", "fft"] edition = "2021" diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 3193bee0c..c02905ca4 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-prover" -version = "0.10.1" +version = "0.10.2" description = "Winterfell STARK prover" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-prover/0.10.1" +documentation = "https://docs.rs/winter-prover/0.10.2" categories = ["cryptography", "no-std"] keywords = ["crypto", "zkp", "stark", "prover"] edition = "2021" diff --git a/verifier/Cargo.toml b/verifier/Cargo.toml index 1490e3e27..4acf3d063 100644 --- a/verifier/Cargo.toml +++ b/verifier/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-verifier" -version = "0.10.1" +version = "0.10.2" description = "Winterfell STARK verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-verifier/0.10.1" +documentation = "https://docs.rs/winter-verifier/0.10.2" categories = ["cryptography", "no-std"] keywords = ["crypto", "zkp", "stark", "verifier"] edition = "2021" diff --git a/winterfell/Cargo.toml b/winterfell/Cargo.toml index b37a07157..6ade01023 100644 --- a/winterfell/Cargo.toml +++ b/winterfell/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winterfell" -version = "0.10.1" +version = "0.10.2" description = "Winterfell STARK prover and verifier" authors = ["winterfell contributors"] readme = "../README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winterfell/0.10.1" +documentation = "https://docs.rs/winterfell/0.10.2" categories = ["cryptography", "no-std"] keywords = ["crypto", "zkp", "stark", "prover", "verifier"] edition = "2021" From f8e1216e54bd7fddaf26944c8f05ebaddc2f70e5 Mon Sep 17 00:00:00 2001 From: Irakliy Khaburzaniya Date: Mon, 18 Nov 2024 21:06:25 -0800 Subject: [PATCH 06/19] update core-utils crate version to v0.10.2 --- utils/core/Cargo.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/utils/core/Cargo.toml b/utils/core/Cargo.toml index b8f2724e8..7eb8f58d6 100644 --- a/utils/core/Cargo.toml +++ b/utils/core/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-utils" -version = "0.10.1" +version = "0.10.2" description = "Utilities for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-utils/0.10.1" +documentation = "https://docs.rs/winter-utils/0.10.2" categories = ["cryptography", "no-std"] keywords = ["serialization", "transmute"] edition = "2021" From 86407151704a6e141c716a098dd0acafd1493b4e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Grzegorz=20=C5=9Awirski?= Date: Tue, 19 Nov 2024 23:18:03 +0100 Subject: [PATCH 07/19] Multi GPU partition fixes (#340) --- air/src/options.rs | 115 ++++++++++++++++------ prover/src/lib.rs | 3 +- prover/src/matrix/row_matrix.rs | 12 ++- prover/src/trace/trace_lde/default/mod.rs | 18 ++-- verifier/src/channel.rs | 4 +- 5 files changed, 105 insertions(+), 47 deletions(-) diff --git a/air/src/options.rs b/air/src/options.rs index 01599489c..657a6381d 100644 --- a/air/src/options.rs +++ b/air/src/options.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use alloc::vec::Vec; -use core::{cmp, ops::Div}; +use core::cmp; use fri::FriOptions; use math::{FieldElement, StarkField, ToElements}; @@ -76,16 +76,13 @@ pub enum FieldExtension { /// collision resistance of the hash function used by the protocol. For example, if a hash function /// with 128-bit collision resistance is used, soundness of a STARK proof cannot exceed 128 bits. /// -/// In addition to the above, the parameter `num_partitions` is used in order to specify the number -/// of partitions each of the traces committed to during proof generation is split into, and -/// the parameter `min_partition_size` gives a lower bound on the size of each such partition. -/// More precisely, and taking the main segment trace as an example, the prover will split the main -/// segment trace into `num_partitions` parts each of size at least `min_partition_size`. The prover -/// will then proceed to hash each part row-wise resulting in `num_partitions` digests per row of -/// the trace. The prover finally combines the `num_partitions` digest (per row) into one digest -/// (per row) and at this point the vector commitment scheme can be called. -/// In the case when `num_partitions` is equal to `1` the prover will just hash each row in one go -/// producing one digest per row of the trace. +/// In addition, partition options (see [PartitionOptions]) can be provided to split traces during +/// proving and distribute work across multiple devices. Taking the main segment trace as an example, +/// the prover will split the main segment trace into `num_partitions` parts, and then proceed to hash +/// each part row-wise resulting in `num_partitions` digests per row of the trace. Finally, +/// `num_partitions` digests (per row) are combined into one digest (per row) and at this point +/// a vector commitment scheme can be called. In the case when `num_partitions` is equal to `1` (default) +/// the prover will hash each row in one go producing one digest per row of the trace. #[derive(Debug, Clone, Eq, PartialEq)] pub struct ProofOptions { num_queries: u8, @@ -177,13 +174,13 @@ impl ProofOptions { /// # Panics /// Panics if: /// - `num_partitions` is zero or greater than 16. - /// - `min_partition_size` is zero or greater than 256. + /// - `hash_rate` is zero or greater than 256. pub const fn with_partitions( mut self, num_partitions: usize, - min_partition_size: usize, + hash_rate: usize, ) -> ProofOptions { - self.partition_options = PartitionOptions::new(num_partitions, min_partition_size); + self.partition_options = PartitionOptions::new(num_partitions, hash_rate); self } @@ -277,7 +274,7 @@ impl Serializable for ProofOptions { target.write_u8(self.fri_folding_factor); target.write_u8(self.fri_remainder_max_degree); target.write_u8(self.partition_options.num_partitions); - target.write_u8(self.partition_options.min_partition_size); + target.write_u8(self.partition_options.hash_rate); } } @@ -347,31 +344,43 @@ impl Deserializable for FieldExtension { // PARTITION OPTION IMPLEMENTATION // ================================================================================================ -/// Defines the parameters used when committing to the traces generated during the protocol. +/// Defines the parameters used to calculate partition size when committing to the traces +/// generated during the protocol. +/// +/// Using multiple partitions will change how vector commitments are calculated: +/// - Input matrix columns are split into at most num_partitions partitions +/// - For each matrix row, a hash is calculated for each partition separately +/// - The results are merged together by one more hash iteration +/// +/// This is especially useful when proving with multiple GPU cards where each device holds +/// a subset of data and allows less data reshuffling when generating commitments. +/// +/// Hash_rate parameter is used to find the optimal partition size to minimize the number +/// of hash iterations. It specifies how many field elements are consumed by each hash iteration. #[derive(Debug, Clone, Copy, Eq, PartialEq)] pub struct PartitionOptions { num_partitions: u8, - min_partition_size: u8, + hash_rate: u8, } impl PartitionOptions { /// Returns a new instance of `[PartitionOptions]`. - pub const fn new(num_partitions: usize, min_partition_size: usize) -> Self { + pub const fn new(num_partitions: usize, hash_rate: usize) -> Self { assert!(num_partitions >= 1, "number of partitions must be greater than or eqaul to 1"); assert!(num_partitions <= 16, "number of partitions must be smaller than or equal to 16"); assert!( - min_partition_size >= 1, - "smallest partition size must be greater than or equal to 1" + hash_rate >= 1, + "hash rate must be greater than or equal to 1" ); assert!( - min_partition_size <= 256, - "smallest partition size must be smaller than or equal to 256" + hash_rate <= 256, + "hash rate must be smaller than or equal to 256" ); Self { num_partitions: num_partitions as u8, - min_partition_size: min_partition_size as u8, + hash_rate: hash_rate as u8, } } @@ -379,21 +388,30 @@ impl PartitionOptions { /// well as the constraint evaluation trace. /// The returned size is given in terms of number of columns in the field `E`. pub fn partition_size(&self, num_columns: usize) -> usize { - if self.num_partitions == 1 && self.min_partition_size == 1 { + if self.num_partitions == 1 { return num_columns; } - let base_elements_per_partition = cmp::max( - (num_columns * E::EXTENSION_DEGREE).div_ceil(self.num_partitions as usize), - self.min_partition_size as usize, - ); - base_elements_per_partition.div(E::EXTENSION_DEGREE) + // Don't separate columns that would fit inside one hash iteration. min_partition_size is + // the number of `E` elements that can be consumed in one hash iteration. + let min_partition_size = self.hash_rate as usize / E::EXTENSION_DEGREE; + + cmp::max( + num_columns.div_ceil(self.num_partitions as usize), + min_partition_size, + ) + } + + /// The actual number of partitions, after the min partition size implied + /// by the hash rate is taken into account. + pub fn num_partitions(&self, num_columns: usize) -> usize { + num_columns.div_ceil(self.partition_size::(num_columns)) } } impl Default for PartitionOptions { fn default() -> Self { - Self { num_partitions: 1, min_partition_size: 1 } + Self { num_partitions: 1, hash_rate: 1 } } } @@ -402,9 +420,9 @@ impl Default for PartitionOptions { #[cfg(test)] mod tests { - use math::fields::f64::BaseElement; + use math::fields::{f64::BaseElement, CubeExtension}; - use super::{FieldExtension, ProofOptions, ToElements}; + use super::{FieldExtension, PartitionOptions, ProofOptions, ToElements}; #[test] fn proof_options_to_elements() { @@ -438,4 +456,37 @@ mod tests { ); assert_eq!(expected, options.to_elements()); } + + #[test] + fn correct_partition_sizes() { + type E1 = BaseElement; + type E3 = CubeExtension; + + let options = PartitionOptions::new(4, 8); + let columns = 7; + assert_eq!(8, options.partition_size::(columns)); + assert_eq!(1, options.num_partitions::(columns)); + + let options = PartitionOptions::new(4, 8); + let columns = 70; + assert_eq!(18, options.partition_size::(columns)); + assert_eq!(4, options.num_partitions::(columns)); + + let options = PartitionOptions::new(2, 8); + let columns = 7; + assert_eq!(4, options.partition_size::(columns)); + assert_eq!(2, options.num_partitions::(columns)); + + let options: PartitionOptions = PartitionOptions::new(4, 8); + let columns = 7; + assert_eq!(2, options.partition_size::(columns)); + assert_eq!(4, options.num_partitions::(columns)); + + // don't use all partitions if it would result in sizes smaller than + // a single hash iteration can handle + let options: PartitionOptions = PartitionOptions::new(4, 8); + let columns = 3; + assert_eq!(2, options.partition_size::(columns)); + assert_eq!(2, options.num_partitions::(columns)); + } } diff --git a/prover/src/lib.rs b/prover/src/lib.rs index 1a2e157ea..c72c0c766 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -558,8 +558,7 @@ pub trait Prover { .in_scope(|| { let commitment = composed_evaluations.commit_to_rows::( self.options() - .partition_options() - .partition_size::(num_constraint_composition_columns), + .partition_options(), ); ConstraintCommitment::new(composed_evaluations, commitment) }); diff --git a/prover/src/matrix/row_matrix.rs b/prover/src/matrix/row_matrix.rs index 6cb9ef60c..ef146643e 100644 --- a/prover/src/matrix/row_matrix.rs +++ b/prover/src/matrix/row_matrix.rs @@ -3,6 +3,7 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::PartitionOptions; use alloc::vec::Vec; use crypto::{ElementHasher, VectorCommitment}; @@ -180,13 +181,14 @@ impl RowMatrix { /// * A vector commitment is computed for the resulting vector using the specified vector /// commitment scheme. /// * The resulting vector commitment is returned as the commitment to the entire matrix. - pub fn commit_to_rows(&self, partition_size: usize) -> V + pub fn commit_to_rows(&self, partition_options: PartitionOptions) -> V where H: ElementHasher, V: VectorCommitment, { // allocate vector to store row hashes let mut row_hashes = unsafe { uninit_vector::(self.num_rows()) }; + let partition_size = partition_options.partition_size::(self.num_cols()); if partition_size == self.num_cols() { // iterate though matrix rows, hashing each row @@ -200,17 +202,21 @@ impl RowMatrix { } ); } else { + let num_partitions = partition_options.num_partitions::(self.num_cols()); + // iterate though matrix rows, hashing each row batch_iter_mut!( &mut row_hashes, 128, // min batch size |batch: &mut [H::Digest], batch_offset: usize| { - let mut buffer = vec![H::Digest::default(); partition_size]; + let mut buffer = vec![H::Digest::default(); num_partitions]; for (i, row_hash) in batch.iter_mut().enumerate() { self.row(batch_offset + i) .chunks(partition_size) .zip(buffer.iter_mut()) - .for_each(|(chunk, buf)| *buf = H::hash_elements(chunk)); + .for_each(|(chunk, buf)| { + *buf = H::hash_elements(chunk); + }); *row_hash = H::merge_many(&buffer); } } diff --git a/prover/src/trace/trace_lde/default/mod.rs b/prover/src/trace/trace_lde/default/mod.rs index 26b5e3916..850ce0d90 100644 --- a/prover/src/trace/trace_lde/default/mod.rs +++ b/prover/src/trace/trace_lde/default/mod.rs @@ -43,7 +43,7 @@ pub struct DefaultTraceLde< aux_segment_oracles: Option, blowup: usize, trace_info: TraceInfo, - partition_option: PartitionOptions, + partition_options: PartitionOptions, _h: PhantomData, } @@ -64,16 +64,16 @@ where trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, - partition_option: PartitionOptions, + partition_options: PartitionOptions, ) -> (Self, TracePolyTable) { // extend the main execution trace and build a commitment to the extended trace let (main_segment_lde, main_segment_vector_com, main_segment_polys) = build_trace_commitment::( main_trace, domain, - partition_option.partition_size::(main_trace.num_cols()), + partition_options, ); - + let trace_poly_table = TracePolyTable::new(main_segment_polys); let trace_lde = DefaultTraceLde { main_segment_lde, @@ -82,7 +82,7 @@ where aux_segment_oracles: None, blowup: domain.trace_to_lde_blowup(), trace_info: trace_info.clone(), - partition_option, + partition_options, _h: PhantomData, }; @@ -151,9 +151,9 @@ where build_trace_commitment::( aux_trace, domain, - self.partition_option.partition_size::(aux_trace.num_cols()), + self.partition_options, ); - + // check errors assert!( usize::from(self.aux_segment_lde.is_some()) < self.trace_info.num_aux_segments(), @@ -276,7 +276,7 @@ where fn build_trace_commitment( trace: &ColMatrix, domain: &StarkDomain, - partition_size: usize, + partition_options: PartitionOptions, ) -> (RowMatrix, V, ColMatrix) where E: FieldElement, @@ -306,7 +306,7 @@ where // build trace commitment let commitment_domain_size = trace_lde.num_rows(); let trace_vector_com = info_span!("compute_execution_trace_commitment", commitment_domain_size) - .in_scope(|| trace_lde.commit_to_rows::(partition_size)); + .in_scope(|| trace_lde.commit_to_rows::(partition_options)); assert_eq!(trace_vector_com.domain_len(), commitment_domain_size); (trace_lde, trace_vector_com, trace_polys) diff --git a/verifier/src/channel.rs b/verifier/src/channel.rs index e6c511cd8..1425d86aa 100644 --- a/verifier/src/channel.rs +++ b/verifier/src/channel.rs @@ -445,7 +445,9 @@ where if partition_size == row.len() { H::hash_elements(row) } else { - let mut buffer = vec![H::Digest::default(); partition_size]; + let num_partitions = row.len().div_ceil(partition_size); + + let mut buffer = vec![H::Digest::default(); num_partitions]; row.chunks(partition_size) .zip(buffer.iter_mut()) From aafc10d0374b0dac62c199feea1b054ceb1b0e99 Mon Sep 17 00:00:00 2001 From: Irakliy Khaburzaniya Date: Tue, 19 Nov 2024 14:31:05 -0800 Subject: [PATCH 08/19] increment crate version to v0.10.3 --- CHANGELOG.md | 3 +++ air/Cargo.toml | 4 ++-- prover/Cargo.toml | 4 ++-- verifier/Cargo.toml | 4 ++-- 4 files changed, 9 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3e2984822..74b7aec94 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 0.10.3 (2024-11-19) - `air`, `prover`, and `verifier` crates only +- Fix partition size calculations in `PartitionOptions` (#341). + ## 0.10.2 (2024-11-18) - Implement `core::error::Error` for error types (#341). diff --git a/air/Cargo.toml b/air/Cargo.toml index 0a363fd76..4365cd205 100644 --- a/air/Cargo.toml +++ b/air/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-air" -version = "0.10.2" +version = "0.10.3" description = "AIR components for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-air/0.10.2" +documentation = "https://docs.rs/winter-air/0.10.3" categories = ["cryptography", "no-std"] keywords = ["crypto", "arithmetization", "air"] edition = "2021" diff --git a/prover/Cargo.toml b/prover/Cargo.toml index c02905ca4..d3d299159 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-prover" -version = "0.10.2" +version = "0.10.3" description = "Winterfell STARK prover" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-prover/0.10.2" +documentation = "https://docs.rs/winter-prover/0.10.3" categories = ["cryptography", "no-std"] keywords = ["crypto", "zkp", "stark", "prover"] edition = "2021" diff --git a/verifier/Cargo.toml b/verifier/Cargo.toml index 4acf3d063..0d766a53f 100644 --- a/verifier/Cargo.toml +++ b/verifier/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-verifier" -version = "0.10.2" +version = "0.10.3" description = "Winterfell STARK verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-verifier/0.10.2" +documentation = "https://docs.rs/winter-verifier/0.10.3" categories = ["cryptography", "no-std"] keywords = ["crypto", "zkp", "stark", "verifier"] edition = "2021" From 548d9a557f8b56a3c8d84cf04ccc17997f95feca Mon Sep 17 00:00:00 2001 From: Irakliy Khaburzaniya Date: Tue, 19 Nov 2024 14:31:24 -0800 Subject: [PATCH 09/19] fix lints --- air/src/options.rs | 23 +++++++---------------- prover/src/lib.rs | 6 ++---- prover/src/matrix/row_matrix.rs | 4 ++-- prover/src/trace/trace_lde/default/mod.rs | 16 ++++------------ 4 files changed, 15 insertions(+), 34 deletions(-) diff --git a/air/src/options.rs b/air/src/options.rs index 657a6381d..a043d70db 100644 --- a/air/src/options.rs +++ b/air/src/options.rs @@ -346,15 +346,15 @@ impl Deserializable for FieldExtension { /// Defines the parameters used to calculate partition size when committing to the traces /// generated during the protocol. -/// +/// /// Using multiple partitions will change how vector commitments are calculated: /// - Input matrix columns are split into at most num_partitions partitions /// - For each matrix row, a hash is calculated for each partition separately /// - The results are merged together by one more hash iteration -/// +/// /// This is especially useful when proving with multiple GPU cards where each device holds /// a subset of data and allows less data reshuffling when generating commitments. -/// +/// /// Hash_rate parameter is used to find the optimal partition size to minimize the number /// of hash iterations. It specifies how many field elements are consumed by each hash iteration. #[derive(Debug, Clone, Copy, Eq, PartialEq)] @@ -366,17 +366,11 @@ pub struct PartitionOptions { impl PartitionOptions { /// Returns a new instance of `[PartitionOptions]`. pub const fn new(num_partitions: usize, hash_rate: usize) -> Self { - assert!(num_partitions >= 1, "number of partitions must be greater than or eqaul to 1"); + assert!(num_partitions >= 1, "number of partitions must be greater than or equal to 1"); assert!(num_partitions <= 16, "number of partitions must be smaller than or equal to 16"); - assert!( - hash_rate >= 1, - "hash rate must be greater than or equal to 1" - ); - assert!( - hash_rate <= 256, - "hash rate must be smaller than or equal to 256" - ); + assert!(hash_rate >= 1, "hash rate must be greater than or equal to 1"); + assert!(hash_rate <= 256, "hash rate must be smaller than or equal to 256"); Self { num_partitions: num_partitions as u8, @@ -396,10 +390,7 @@ impl PartitionOptions { // the number of `E` elements that can be consumed in one hash iteration. let min_partition_size = self.hash_rate as usize / E::EXTENSION_DEGREE; - cmp::max( - num_columns.div_ceil(self.num_partitions as usize), - min_partition_size, - ) + cmp::max(num_columns.div_ceil(self.num_partitions as usize), min_partition_size) } /// The actual number of partitions, after the min partition size implied diff --git a/prover/src/lib.rs b/prover/src/lib.rs index c72c0c766..6854ae818 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -556,10 +556,8 @@ pub trait Prover { log_domain_size = domain_size.ilog2() ) .in_scope(|| { - let commitment = composed_evaluations.commit_to_rows::( - self.options() - .partition_options(), - ); + let commitment = composed_evaluations + .commit_to_rows::(self.options().partition_options()); ConstraintCommitment::new(composed_evaluations, commitment) }); diff --git a/prover/src/matrix/row_matrix.rs b/prover/src/matrix/row_matrix.rs index ef146643e..91c5c04aa 100644 --- a/prover/src/matrix/row_matrix.rs +++ b/prover/src/matrix/row_matrix.rs @@ -3,9 +3,9 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. -use air::PartitionOptions; use alloc::vec::Vec; +use air::PartitionOptions; use crypto::{ElementHasher, VectorCommitment}; use math::{fft, FieldElement, StarkField}; #[cfg(feature = "concurrent")] @@ -203,7 +203,7 @@ impl RowMatrix { ); } else { let num_partitions = partition_options.num_partitions::(self.num_cols()); - + // iterate though matrix rows, hashing each row batch_iter_mut!( &mut row_hashes, diff --git a/prover/src/trace/trace_lde/default/mod.rs b/prover/src/trace/trace_lde/default/mod.rs index 850ce0d90..afc3734a6 100644 --- a/prover/src/trace/trace_lde/default/mod.rs +++ b/prover/src/trace/trace_lde/default/mod.rs @@ -68,12 +68,8 @@ where ) -> (Self, TracePolyTable) { // extend the main execution trace and build a commitment to the extended trace let (main_segment_lde, main_segment_vector_com, main_segment_polys) = - build_trace_commitment::( - main_trace, - domain, - partition_options, - ); - + build_trace_commitment::(main_trace, domain, partition_options); + let trace_poly_table = TracePolyTable::new(main_segment_polys); let trace_lde = DefaultTraceLde { main_segment_lde, @@ -148,12 +144,8 @@ where ) -> (ColMatrix, H::Digest) { // extend the auxiliary trace segment and build a commitment to the extended trace let (aux_segment_lde, aux_segment_oracles, aux_segment_polys) = - build_trace_commitment::( - aux_trace, - domain, - self.partition_options, - ); - + build_trace_commitment::(aux_trace, domain, self.partition_options); + // check errors assert!( usize::from(self.aux_segment_lde.is_some()) < self.trace_info.num_aux_segments(), From 1d3a54ec70519083b6c368099fe5db7b8b84e1e7 Mon Sep 17 00:00:00 2001 From: Irakliy Khaburzaniya Date: Tue, 19 Nov 2024 14:32:47 -0800 Subject: [PATCH 10/19] increment crate versions v0.10.3 --- winterfell/Cargo.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/winterfell/Cargo.toml b/winterfell/Cargo.toml index 6ade01023..bee4d0bc8 100644 --- a/winterfell/Cargo.toml +++ b/winterfell/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winterfell" -version = "0.10.2" +version = "0.10.3" description = "Winterfell STARK prover and verifier" authors = ["winterfell contributors"] readme = "../README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winterfell/0.10.2" +documentation = "https://docs.rs/winterfell/0.10.3" categories = ["cryptography", "no-std"] keywords = ["crypto", "zkp", "stark", "prover", "verifier"] edition = "2021" From ee2089a6c78b2b2d28fd872b85fcf35a8e3a8b18 Mon Sep 17 00:00:00 2001 From: Irakliy Khaburzaniya Date: Tue, 19 Nov 2024 14:41:00 -0800 Subject: [PATCH 11/19] fix type in changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 74b7aec94..59e740c42 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,7 @@ # Changelog ## 0.10.3 (2024-11-19) - `air`, `prover`, and `verifier` crates only -- Fix partition size calculations in `PartitionOptions` (#341). +- Fix partition size calculations in `PartitionOptions` (#340). ## 0.10.2 (2024-11-18) - Implement `core::error::Error` for error types (#341). From 08be71c20ebf8b5f1696e7ba4dd5c810ef1eb336 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Grzegorz=20=C5=9Awirski?= Date: Sun, 24 Nov 2024 23:01:40 +0100 Subject: [PATCH 12/19] Make `Prover` generic over the `ConstraintCommitment` type (#343) --- README.md | 18 +++ examples/src/fibonacci/fib2/prover.rs | 23 ++- examples/src/fibonacci/fib8/prover.rs | 23 ++- examples/src/fibonacci/fib_small/prover.rs | 23 ++- examples/src/fibonacci/mulfib2/prover.rs | 23 ++- examples/src/fibonacci/mulfib8/prover.rs | 23 ++- examples/src/lamport/aggregate/prover.rs | 23 ++- examples/src/lamport/threshold/prover.rs | 23 ++- examples/src/merkle/prover.rs | 23 ++- examples/src/rescue/prover.rs | 23 ++- examples/src/rescue_raps/prover.rs | 22 ++- examples/src/vdf/exempt/prover.rs | 23 ++- examples/src/vdf/regular/prover.rs | 23 ++- prover/README.md | 1 + prover/benches/lagrange_kernel.rs | 22 ++- prover/src/constraints/commitment.rs | 80 ---------- prover/src/constraints/commitment/default.rs | 150 +++++++++++++++++++ prover/src/constraints/commitment/mod.rs | 37 +++++ prover/src/constraints/mod.rs | 2 +- prover/src/lib.rs | 88 ++++------- winterfell/src/lib.rs | 54 +++++-- winterfell/src/tests.rs | 18 +++ 22 files changed, 559 insertions(+), 186 deletions(-) delete mode 100644 prover/src/constraints/commitment.rs create mode 100644 prover/src/constraints/commitment/default.rs create mode 100644 prover/src/constraints/commitment/mod.rs diff --git a/README.md b/README.md index 230146c2b..396cdc5ec 100644 --- a/README.md +++ b/README.md @@ -270,6 +270,8 @@ impl Prover for WorkProver { type TraceLde> = DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, WorkAir, E>; + type ConstraintCommitment> = + DefaultConstraintCommitment; // Our public inputs consist of the first and last value in the execution trace. fn get_pub_inputs(&self, trace: &Self::Trace) -> PublicInputs { @@ -300,6 +302,22 @@ impl Prover for WorkProver { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + // We'll use the default constraint commitment. + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + ) + } + fn options(&self) -> &ProofOptions { &self.options } diff --git a/examples/src/fibonacci/fib2/prover.rs b/examples/src/fibonacci/fib2/prover.rs index 99d48f004..28b0ebf53 100644 --- a/examples/src/fibonacci/fib2/prover.rs +++ b/examples/src/fibonacci/fib2/prover.rs @@ -4,9 +4,9 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; use super::{ @@ -60,6 +60,8 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; @@ -90,4 +92,19 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + ) + } } diff --git a/examples/src/fibonacci/fib8/prover.rs b/examples/src/fibonacci/fib8/prover.rs index 64182978c..860eb60df 100644 --- a/examples/src/fibonacci/fib8/prover.rs +++ b/examples/src/fibonacci/fib8/prover.rs @@ -4,9 +4,9 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; use super::{ @@ -75,6 +75,8 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; @@ -105,4 +107,19 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + ) + } } diff --git a/examples/src/fibonacci/fib_small/prover.rs b/examples/src/fibonacci/fib_small/prover.rs index 553988064..9d1ced901 100644 --- a/examples/src/fibonacci/fib_small/prover.rs +++ b/examples/src/fibonacci/fib_small/prover.rs @@ -3,9 +3,9 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; use super::{ @@ -65,6 +65,8 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; @@ -95,4 +97,19 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + ) + } } diff --git a/examples/src/fibonacci/mulfib2/prover.rs b/examples/src/fibonacci/mulfib2/prover.rs index 4c99187bf..67aebee70 100644 --- a/examples/src/fibonacci/mulfib2/prover.rs +++ b/examples/src/fibonacci/mulfib2/prover.rs @@ -4,9 +4,9 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; use super::{ @@ -56,6 +56,8 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; @@ -86,4 +88,19 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + ) + } } diff --git a/examples/src/fibonacci/mulfib8/prover.rs b/examples/src/fibonacci/mulfib8/prover.rs index 1fb58bd1a..3cf93aed9 100644 --- a/examples/src/fibonacci/mulfib8/prover.rs +++ b/examples/src/fibonacci/mulfib8/prover.rs @@ -4,9 +4,9 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; use super::{ @@ -68,6 +68,8 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; @@ -98,4 +100,19 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + ) + } } diff --git a/examples/src/lamport/aggregate/prover.rs b/examples/src/lamport/aggregate/prover.rs index 3927a20e6..983eee3fb 100644 --- a/examples/src/lamport/aggregate/prover.rs +++ b/examples/src/lamport/aggregate/prover.rs @@ -6,9 +6,9 @@ #[cfg(feature = "concurrent")] use winterfell::iterators::*; use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, PartitionOptions, StarkDomain, TraceInfo, TracePolyTable, TraceTable, }; use super::{ @@ -105,6 +105,8 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; @@ -134,6 +136,21 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + ) + } } // TRACE INITIALIZATION diff --git a/examples/src/lamport/threshold/prover.rs b/examples/src/lamport/threshold/prover.rs index 87bd09bf6..c40f5a9f2 100644 --- a/examples/src/lamport/threshold/prover.rs +++ b/examples/src/lamport/threshold/prover.rs @@ -8,9 +8,9 @@ use std::collections::HashMap; #[cfg(feature = "concurrent")] use winterfell::iterators::*; use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, PartitionOptions, StarkDomain, TraceInfo, TracePolyTable, TraceTable, }; use super::{ @@ -147,6 +147,8 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; @@ -176,6 +178,21 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + ) + } } // TRACE INITIALIZATION diff --git a/examples/src/merkle/prover.rs b/examples/src/merkle/prover.rs index b1164ff83..4a86cc90a 100644 --- a/examples/src/merkle/prover.rs +++ b/examples/src/merkle/prover.rs @@ -4,9 +4,9 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; use super::{ @@ -109,6 +109,8 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; @@ -141,4 +143,19 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + ) + } } diff --git a/examples/src/rescue/prover.rs b/examples/src/rescue/prover.rs index e8ca93757..98d725eec 100644 --- a/examples/src/rescue/prover.rs +++ b/examples/src/rescue/prover.rs @@ -4,9 +4,9 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; use super::{ @@ -75,6 +75,8 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; @@ -108,4 +110,19 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + ) + } } diff --git a/examples/src/rescue_raps/prover.rs b/examples/src/rescue_raps/prover.rs index b8b21b1f3..7b04f98b9 100644 --- a/examples/src/rescue_raps/prover.rs +++ b/examples/src/rescue_raps/prover.rs @@ -5,9 +5,9 @@ use core_utils::uninit_vector; use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, }; use super::{ @@ -105,6 +105,8 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; @@ -141,6 +143,20 @@ where DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + ) + } fn build_aux_trace( &self, trace: &Self::Trace, diff --git a/examples/src/vdf/exempt/prover.rs b/examples/src/vdf/exempt/prover.rs index 16a7b8169..f39e818d2 100644 --- a/examples/src/vdf/exempt/prover.rs +++ b/examples/src/vdf/exempt/prover.rs @@ -4,9 +4,9 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; use super::{ @@ -56,6 +56,8 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; @@ -91,4 +93,19 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + ) + } } diff --git a/examples/src/vdf/regular/prover.rs b/examples/src/vdf/regular/prover.rs index 20bdf7874..591dcc839 100644 --- a/examples/src/vdf/regular/prover.rs +++ b/examples/src/vdf/regular/prover.rs @@ -4,9 +4,9 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; use super::{ @@ -53,6 +53,8 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; @@ -86,4 +88,19 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + ) + } } diff --git a/prover/README.md b/prover/README.md index 9c77a9e59..b95a73c0e 100644 --- a/prover/README.md +++ b/prover/README.md @@ -21,6 +21,7 @@ To define a prover for a computation, you'll need implement the `Prover` trait. * `get_pub_inputs()`, which describes how a set of public inputs can be extracted from a given instance of an execution trace. These inputs will need to be shared with the verifier in order for them to verify the proof. * `new_trace_lde()`, which constructs a new instance of trace low-degree extension. Unless your prover needs to implement specialized optimizations for performing low-degree extensions, this method can just return a default trace low-degree extension provided by Winterfell. * `new_evaluator()`, which constructs a new instance of the AIR constraint evaluator. Unless your prover needs to implement specialized optimizations for evaluating constraints, this method can just return a default constraint evaluator provided by Winterfell. +* `build_constraint_commitment()`, which constructs a new instance of constraint commitment. Unless your prover needs to implement specialized optimizations for committing to constraints, this method can just return a default constraint commitment provided by Winterfell. * `options()`, which defines STARK protocol parameters to be used during proof generation. These parameters include number of queries, blowup factor, grinding factor, hash function to be used during proof generation etc.. Values of these parameters directly inform such metrics as proof generation time, proof size, and proof security level. See [air crate](../air) for more info. A prover exposes a `prove()` method which can be used to generate a STARK proof using a given execution trace as a witness. diff --git a/prover/benches/lagrange_kernel.rs b/prover/benches/lagrange_kernel.rs index d6ab6a5bc..92bf54f5a 100644 --- a/prover/benches/lagrange_kernel.rs +++ b/prover/benches/lagrange_kernel.rs @@ -14,8 +14,9 @@ use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criteri use crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree, RandomCoin}; use math::{fields::f64::BaseElement, ExtensionOf, FieldElement}; use winter_prover::{ - matrix::ColMatrix, DefaultConstraintEvaluator, DefaultTraceLde, Prover, ProverGkrProof, - StarkDomain, Trace, TracePolyTable, + matrix::ColMatrix, CompositionPoly, CompositionPolyTrace, DefaultConstraintCommitment, + DefaultConstraintEvaluator, DefaultTraceLde, Prover, ProverGkrProof, StarkDomain, Trace, + TracePolyTable, }; const TRACE_LENS: [usize; 2] = [2_usize.pow(16), 2_usize.pow(20)]; @@ -187,6 +188,8 @@ impl Prover for LagrangeProver { type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, LagrangeKernelAir, E>; @@ -210,6 +213,21 @@ impl Prover for LagrangeProver { DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) } + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + ) + } + fn new_evaluator<'a, E>( &self, air: &'a Self::Air, diff --git a/prover/src/constraints/commitment.rs b/prover/src/constraints/commitment.rs deleted file mode 100644 index ac71fdc94..000000000 --- a/prover/src/constraints/commitment.rs +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Facebook, Inc. and its affiliates. -// -// This source code is licensed under the MIT license found in the -// LICENSE file in the root directory of this source tree. - -use alloc::vec::Vec; -use core::marker::PhantomData; - -use air::proof::Queries; -use crypto::{ElementHasher, VectorCommitment}; -use math::FieldElement; - -use super::RowMatrix; - -// CONSTRAINT COMMITMENT -// ================================================================================================ - -/// Constraint evaluation commitment. -/// -/// The commitment consists of two components: -/// * Evaluations of composition polynomial columns over the LDE domain. -/// * Vector commitment where each vector element corresponds to the digest of a row in -/// the composition polynomial evaluation matrix. -pub struct ConstraintCommitment< - E: FieldElement, - H: ElementHasher, - V: VectorCommitment, -> { - evaluations: RowMatrix, - vector_commitment: V, - _h: PhantomData, -} - -impl ConstraintCommitment -where - E: FieldElement, - H: ElementHasher, - V: VectorCommitment, -{ - /// Creates a new constraint evaluation commitment from the provided composition polynomial - /// evaluations and the corresponding vector commitment. - pub fn new(evaluations: RowMatrix, commitment: V) -> ConstraintCommitment { - assert_eq!( - evaluations.num_rows(), - commitment.domain_len(), - "number of rows in constraint evaluation matrix must be the same as the size \ - of the vector commitment domain" - ); - - ConstraintCommitment { - evaluations, - vector_commitment: commitment, - _h: PhantomData, - } - } - - /// Returns the commitment. - pub fn commitment(&self) -> H::Digest { - self.vector_commitment.commitment() - } - - /// Returns constraint evaluations at the specified positions along with a batch opening proof - /// against the vector commitment. - pub fn query(self, positions: &[usize]) -> Queries { - // build batch opening proof to the leaves specified by positions - let opening_proof = self - .vector_commitment - .open_many(positions) - .expect("failed to generate a batch opening proof for constraint queries"); - - // determine a set of evaluations corresponding to each position - let mut evaluations = Vec::new(); - for &position in positions { - let row = self.evaluations.row(position).to_vec(); - evaluations.push(row); - } - - Queries::new::(opening_proof.1, evaluations) - } -} diff --git a/prover/src/constraints/commitment/default.rs b/prover/src/constraints/commitment/default.rs new file mode 100644 index 000000000..629c08cd3 --- /dev/null +++ b/prover/src/constraints/commitment/default.rs @@ -0,0 +1,150 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use alloc::vec::Vec; +use core::marker::PhantomData; + +use air::{proof::Queries, PartitionOptions}; +use crypto::{ElementHasher, VectorCommitment}; +use math::FieldElement; +use tracing::info_span; + +use super::{ConstraintCommitment, RowMatrix}; +use crate::{CompositionPoly, CompositionPolyTrace, StarkDomain, DEFAULT_SEGMENT_WIDTH}; + +// CONSTRAINT COMMITMENT +// ================================================================================================ + +/// Constraint evaluation commitment. +/// +/// The commitment consists of two components: +/// * Evaluations of composition polynomial columns over the LDE domain. +/// * Vector commitment where each vector element corresponds to the digest of a row in +/// the composition polynomial evaluation matrix. +pub struct DefaultConstraintCommitment< + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +> { + evaluations: RowMatrix, + vector_commitment: V, + _h: PhantomData, +} + +impl DefaultConstraintCommitment +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +{ + /// Creates a new constraint evaluation commitment from the provided composition polynomial + /// evaluations and the corresponding vector commitment. + pub fn new( + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + ) -> (Self, CompositionPoly) { + // extend the main execution trace and build a commitment to the extended trace + let (evaluations, commitment, composition_poly) = build_constraint_commitment::( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + ); + + assert_eq!( + evaluations.num_rows(), + commitment.domain_len(), + "number of rows in constraint evaluation matrix must be the same as the size \ + of the vector commitment domain" + ); + + let commitment = Self { + evaluations, + vector_commitment: commitment, + _h: PhantomData, + }; + + (commitment, composition_poly) + } +} + +impl ConstraintCommitment for DefaultConstraintCommitment +where + E: FieldElement, + H: ElementHasher + core::marker::Sync, + V: VectorCommitment + core::marker::Sync, +{ + type HashFn = H; + type VC = V; + + /// Returns the commitment. + fn commitment(&self) -> H::Digest { + self.vector_commitment.commitment() + } + + /// Returns constraint evaluations at the specified positions along with a batch opening proof + /// against the vector commitment. + fn query(self, positions: &[usize]) -> Queries { + // build batch opening proof to the leaves specified by positions + let opening_proof = self + .vector_commitment + .open_many(positions) + .expect("failed to generate a batch opening proof for constraint queries"); + + // determine a set of evaluations corresponding to each position + let mut evaluations = Vec::new(); + for &position in positions { + let row = self.evaluations.row(position).to_vec(); + evaluations.push(row); + } + + Queries::new::(opening_proof.1, evaluations) + } +} + +fn build_constraint_commitment( + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, +) -> (RowMatrix, V, CompositionPoly) +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +{ + // first, build constraint composition polynomial from its trace as follows: + // - interpolate the trace into a polynomial in coefficient form + // - "break" the polynomial into a set of column polynomials each of degree equal to + // trace_length - 1 + let composition_poly = info_span!( + "build_composition_poly_columns", + num_columns = num_constraint_composition_columns + ) + .in_scope(|| { + CompositionPoly::new(composition_poly_trace, domain, num_constraint_composition_columns) + }); + assert_eq!(composition_poly.num_columns(), num_constraint_composition_columns); + assert_eq!(composition_poly.column_degree(), domain.trace_length() - 1); + + // then, evaluate composition polynomial columns over the LDE domain + let domain_size = domain.lde_domain_size(); + let composed_evaluations = info_span!("evaluate_composition_poly_columns").in_scope(|| { + RowMatrix::evaluate_polys_over::(composition_poly.data(), domain) + }); + assert_eq!(composed_evaluations.num_cols(), num_constraint_composition_columns); + assert_eq!(composed_evaluations.num_rows(), domain_size); + + // finally, build constraint evaluation commitment + let commitment = info_span!( + "compute_constraint_evaluation_commitment", + log_domain_size = domain_size.ilog2() + ) + .in_scope(|| composed_evaluations.commit_to_rows::(partition_options)); + + (composed_evaluations, commitment, composition_poly) +} diff --git a/prover/src/constraints/commitment/mod.rs b/prover/src/constraints/commitment/mod.rs new file mode 100644 index 000000000..5ecfe8b08 --- /dev/null +++ b/prover/src/constraints/commitment/mod.rs @@ -0,0 +1,37 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use air::proof::Queries; +use crypto::{ElementHasher, Hasher, VectorCommitment}; +use math::FieldElement; + +use super::RowMatrix; + +mod default; +pub use default::DefaultConstraintCommitment; + +// CONSTRAINT COMMITMENT +// ================================================================================================ + +/// Constraint evaluation commitment. +/// +/// The commitment consists of two components: +/// * Evaluations of composition polynomial columns over the LDE domain. +/// * Vector commitment where each vector element corresponds to the digest of a row in +/// the composition polynomial evaluation matrix. +pub trait ConstraintCommitment { + /// The hash function used for hashing the rows of trace segment LDEs. + type HashFn: ElementHasher; + + /// The vector commitment scheme used for commiting to the trace. + type VC: VectorCommitment; + + /// Returns the commitment. + fn commitment(&self) -> ::Digest; + + /// Returns constraint evaluations at the specified positions along with a batch opening proof + /// against the vector commitment. + fn query(self, positions: &[usize]) -> Queries; +} diff --git a/prover/src/constraints/mod.rs b/prover/src/constraints/mod.rs index 566065f0f..054edb32c 100644 --- a/prover/src/constraints/mod.rs +++ b/prover/src/constraints/mod.rs @@ -15,4 +15,4 @@ mod evaluation_table; pub use evaluation_table::{ConstraintEvaluationTable, EvaluationTableFragment}; mod commitment; -pub use commitment::ConstraintCommitment; +pub use commitment::{ConstraintCommitment, DefaultConstraintCommitment}; diff --git a/prover/src/lib.rs b/prover/src/lib.rs index 6854ae818..906fac8ec 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -73,7 +73,7 @@ use matrix::{ColMatrix, RowMatrix}; mod constraints; pub use constraints::{ CompositionPoly, CompositionPolyTrace, ConstraintCommitment, ConstraintEvaluator, - DefaultConstraintEvaluator, + DefaultConstraintCommitment, DefaultConstraintEvaluator, }; mod composer; @@ -155,6 +155,12 @@ pub trait Prover { where E: FieldElement; + /// Constraint low-degree extension for building the LDEs of composition polynomial columns and + /// their commitments. + type ConstraintCommitment: ConstraintCommitment + where + E: FieldElement; + // REQUIRED METHODS // -------------------------------------------------------------------------------------------- @@ -199,6 +205,27 @@ pub trait Prover { where E: FieldElement; + /// Extends constraint composition polynomial over the LDE domain and builds a commitment to + /// its evaluations. + /// + /// The extension is done by first interpolating the evaluations of the polynomial so that we + /// get the composition polynomial in coefficient form; then breaking the polynomial into + /// columns each of size equal to trace length, and finally evaluating each composition + /// polynomial column over the LDE domain. + /// + /// The commitment is computed by building a vector containing the hashes of each row in + /// the evaluation matrix, and then building vector commitment of the resulting vector. + #[maybe_async] + fn build_constraint_commitment( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + ) -> (Self::ConstraintCommitment, CompositionPoly) + where + E: FieldElement; + // PROVIDED METHODS // -------------------------------------------------------------------------------------------- @@ -508,62 +535,6 @@ pub trait Prover { Ok(proof) } - /// Extends constraint composition polynomial over the LDE domain and builds a commitment to - /// its evaluations. - /// - /// The extension is done by first interpolating the evaluations of the polynomial so that we - /// get the composition polynomial in coefficient form; then breaking the polynomial into - /// columns each of size equal to trace length, and finally evaluating each composition - /// polynomial column over the LDE domain. - /// - /// The commitment is computed by building a vector containing the hashes of each row in - /// the evaluation matrix, and then building vector commitment of the resulting vector. - #[maybe_async] - fn build_constraint_commitment( - &self, - composition_poly_trace: CompositionPolyTrace, - num_constraint_composition_columns: usize, - domain: &StarkDomain, - ) -> (ConstraintCommitment, CompositionPoly) - where - E: FieldElement, - { - // first, build constraint composition polynomial from its trace as follows: - // - interpolate the trace into a polynomial in coefficient form - // - "break" the polynomial into a set of column polynomials each of degree equal to - // trace_length - 1 - let composition_poly = info_span!( - "build_composition_poly_columns", - num_columns = num_constraint_composition_columns - ) - .in_scope(|| { - CompositionPoly::new(composition_poly_trace, domain, num_constraint_composition_columns) - }); - assert_eq!(composition_poly.num_columns(), num_constraint_composition_columns); - assert_eq!(composition_poly.column_degree(), domain.trace_length() - 1); - - // then, evaluate composition polynomial columns over the LDE domain - let domain_size = domain.lde_domain_size(); - let composed_evaluations = info_span!("evaluate_composition_poly_columns").in_scope(|| { - RowMatrix::evaluate_polys_over::(composition_poly.data(), domain) - }); - assert_eq!(composed_evaluations.num_cols(), num_constraint_composition_columns); - assert_eq!(composed_evaluations.num_rows(), domain_size); - - // finally, build constraint evaluation commitment - let constraint_commitment = info_span!( - "compute_constraint_evaluation_commitment", - log_domain_size = domain_size.ilog2() - ) - .in_scope(|| { - let commitment = composed_evaluations - .commit_to_rows::(self.options().partition_options()); - ConstraintCommitment::new(composed_evaluations, commitment) - }); - - (constraint_commitment, composition_poly) - } - #[doc(hidden)] #[instrument(skip_all)] #[maybe_async] @@ -603,7 +574,7 @@ pub trait Prover { composition_poly_trace: CompositionPolyTrace, domain: &StarkDomain, channel: &mut ProverChannel<'_, Self::Air, E, Self::HashFn, Self::RandomCoin, Self::VC>, - ) -> (ConstraintCommitment, CompositionPoly) + ) -> (Self::ConstraintCommitment, CompositionPoly) where E: FieldElement, { @@ -614,6 +585,7 @@ pub trait Prover { composition_poly_trace, air.context().num_constraint_composition_columns(), domain, + self.options().partition_options() )); // then, commit to the evaluations of constraints by writing the commitment string of diff --git a/winterfell/src/lib.rs b/winterfell/src/lib.rs index e05d5ca5c..3e06ebb96 100644 --- a/winterfell/src/lib.rs +++ b/winterfell/src/lib.rs @@ -261,7 +261,9 @@ //! crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree}, //! math::{fields::f128::BaseElement, FieldElement, ToElements}, //! matrix::ColMatrix, -//! DefaultTraceLde, ProofOptions, Prover, StarkDomain, Trace, TracePolyTable, TraceTable, +//! CompositionPoly, CompositionPolyTrace, DefaultConstraintCommitment, +//! DefaultTraceLde, ProofOptions, Prover, StarkDomain, Trace, +//! TracePolyTable, TraceTable, //! }; //! //! # use winterfell::{ @@ -350,6 +352,8 @@ //! type VC = MerkleTree; //! type RandomCoin = DefaultRandomCoin; //! type TraceLde> = DefaultTraceLde; +//! type ConstraintCommitment> = +//! DefaultConstraintCommitment; //! type ConstraintEvaluator<'a, E: FieldElement> = //! DefaultConstraintEvaluator<'a, Self::Air, E>; //! @@ -376,6 +380,21 @@ //! DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) //! } //! +//! fn build_constraint_commitment>( +//! &self, +//! composition_poly_trace: CompositionPolyTrace, +//! num_constraint_composition_columns: usize, +//! domain: &StarkDomain, +//! partition_options: PartitionOptions, +//! ) -> (Self::ConstraintCommitment, CompositionPoly) { +//! DefaultConstraintCommitment::new( +//! composition_poly_trace, +//! num_constraint_composition_columns, +//! domain, +//! partition_options, +//! ) +//! } +//! //! fn new_evaluator<'a, E: FieldElement>( //! &self, //! air: &'a Self::Air, @@ -399,9 +418,9 @@ //! # crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree}, //! # math::{fields::f128::BaseElement, FieldElement, ToElements}, //! # matrix::ColMatrix, -//! # Air, AirContext, Assertion, AuxRandElements, ByteWriter, DefaultConstraintEvaluator, -//! # DefaultTraceLde, EvaluationFrame, TraceInfo, -//! # TransitionConstraintDegree, TraceTable, FieldExtension, PartitionOptions, Prover, +//! # Air, AirContext, Assertion, AuxRandElements, ByteWriter, CompositionPoly, CompositionPolyTrace, +//! # DefaultConstraintEvaluator, DefaultConstraintCommitment, DefaultTraceLde, EvaluationFrame, +//! # TraceInfo, TransitionConstraintDegree, TraceTable, FieldExtension, PartitionOptions, Prover, //! # ProofOptions, StarkDomain, Proof, Trace, TracePolyTable, //! # }; //! # @@ -495,6 +514,8 @@ //! # type VC = MerkleTree; //! # type RandomCoin = DefaultRandomCoin; //! # type TraceLde> = DefaultTraceLde; +//! # type ConstraintCommitment> = +//! # DefaultConstraintCommitment; //! # type ConstraintEvaluator<'a, E: FieldElement> = //! # DefaultConstraintEvaluator<'a, Self::Air, E>; //! # @@ -520,6 +541,21 @@ //! # DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) //! # } //! # +//! # fn build_constraint_commitment>( +//! # &self, +//! # composition_poly_trace: CompositionPolyTrace, +//! # num_constraint_composition_columns: usize, +//! # domain: &StarkDomain, +//! # partition_options: PartitionOptions, +//! # ) -> (Self::ConstraintCommitment, CompositionPoly) { +//! # DefaultConstraintCommitment::new( +//! # composition_poly_trace, +//! # num_constraint_composition_columns, +//! # domain, +//! # partition_options, +//! # ) +//! # } +//! # //! # fn new_evaluator<'a, E: FieldElement>( //! # &self, //! # air: &'a Self::Air, @@ -599,12 +635,12 @@ extern crate std; pub use air::{AuxRandElements, GkrVerifier, PartitionOptions}; pub use prover::{ crypto, iterators, math, matrix, Air, AirContext, Assertion, AuxTraceWithMetadata, - BoundaryConstraint, BoundaryConstraintGroup, CompositionPolyTrace, + BoundaryConstraint, BoundaryConstraintGroup, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, ConstraintDivisor, ConstraintEvaluator, - DeepCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, EvaluationFrame, - FieldExtension, Proof, ProofOptions, Prover, ProverError, ProverGkrProof, StarkDomain, Trace, - TraceInfo, TraceLde, TracePolyTable, TraceTable, TraceTableFragment, - TransitionConstraintDegree, + DeepCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, EvaluationFrame, FieldExtension, Proof, ProofOptions, Prover, ProverError, + ProverGkrProof, StarkDomain, Trace, TraceInfo, TraceLde, TracePolyTable, TraceTable, + TraceTableFragment, TransitionConstraintDegree, }; pub use verifier::{verify, AcceptableOptions, ByteWriter, VerifierError}; diff --git a/winterfell/src/tests.rs b/winterfell/src/tests.rs index 3fb0c5197..c1ae685e9 100644 --- a/winterfell/src/tests.rs +++ b/winterfell/src/tests.rs @@ -11,6 +11,7 @@ use prover::{ crypto::{hashers::Blake3_256, DefaultRandomCoin, RandomCoin}, math::{fields::f64::BaseElement, ExtensionOf, FieldElement}, matrix::ColMatrix, + CompositionPoly, DefaultConstraintCommitment, }; use super::*; @@ -219,6 +220,8 @@ impl Prover for LagrangeComplexProver { type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment, Self::VC>; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, LagrangeKernelComplexAir, E>; @@ -242,6 +245,21 @@ impl Prover for LagrangeComplexProver { DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) } + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + ) + } + fn new_evaluator<'a, E>( &self, air: &'a Self::Air, From fe98bcf3194179a1a3398170e5d40f965fdc88d7 Mon Sep 17 00:00:00 2001 From: Irakliy Khaburzaniya Date: Sun, 24 Nov 2024 14:30:50 -0800 Subject: [PATCH 13/19] incremented crate versions to v0.11.0 and update changelog --- CHANGELOG.md | 13 ++++++++----- air/Cargo.toml | 14 +++++++------- crypto/Cargo.toml | 10 +++++----- examples/Cargo.toml | 8 ++++---- fri/Cargo.toml | 12 ++++++------ math/Cargo.toml | 8 ++++---- prover/Cargo.toml | 18 +++++++++--------- utils/core/Cargo.toml | 4 ++-- utils/maybe_async/Cargo.toml | 4 ++-- utils/rand/Cargo.toml | 6 +++--- verifier/Cargo.toml | 14 +++++++------- winterfell/Cargo.toml | 10 +++++----- 12 files changed, 62 insertions(+), 59 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 59e740c42..75632c6e1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,19 +1,22 @@ # Changelog +## 0.11.0 (2024-11-24) +- [BREAKING] Made the prover generic over the `ConstraintCommitment` type (#343). + ## 0.10.3 (2024-11-19) - `air`, `prover`, and `verifier` crates only -- Fix partition size calculations in `PartitionOptions` (#340). +- Fixed partition size calculations in `PartitionOptions` (#340). ## 0.10.2 (2024-11-18) -- Implement `core::error::Error` for error types (#341). +- Implemented `core::error::Error` for error types (#341). ## 0.10.1 (2024-10-30) - Fixed partition hashing and add logging to aux trace building (#338). ## 0.10.0 (2024-10-25) - [BREAKING] Refactored maybe-async macro into simpler maybe-async and maybe-await macros (#283). -- [BREAKING] Introduce `VectorCommitment` abstraction (#285). +- [BREAKING] Introduced `VectorCommitment` abstraction (#285). - Added `maybe-async-trait` procedural macro (#334). -- [BREAKING] Add options for partitioned trace commitments (#336). +- [BREAKING] Added options for partitioned trace commitments (#336). - Updated minimum supported Rust version to 1.82. ## 0.9.3 (2024-09-25) - `utils/core` and `math` crates only @@ -23,7 +26,7 @@ - Fixed `read_slice` impl for ReadAdapter` (#309). ## 0.9.1 (2024-06-24) - `utils/core` crate only -- Fixed `useize` serialization in `ByteWriter`. +- Fixed `usize` serialization in `ByteWriter`. ## 0.9.0 (2024-05-09) - [BREAKING] Merged `TraceLayout` into `TraceInfo` (#245). diff --git a/air/Cargo.toml b/air/Cargo.toml index 4365cd205..d6f5ea27c 100644 --- a/air/Cargo.toml +++ b/air/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-air" -version = "0.10.3" +version = "0.11.0" description = "AIR components for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-air/0.10.3" +documentation = "https://docs.rs/winter-air/0.11.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "arithmetization", "air"] edition = "2021" @@ -20,14 +20,14 @@ default = ["std"] std = ["crypto/std", "fri/std", "math/std", "utils/std"] [dependencies] -crypto = { version = "0.10", path = "../crypto", package = "winter-crypto", default-features = false } -fri = { version = "0.10", path = "../fri", package = "winter-fri", default-features = false } +crypto = { version = "0.11", path = "../crypto", package = "winter-crypto", default-features = false } +fri = { version = "0.11", path = "../fri", package = "winter-fri", default-features = false } libm = "0.2" -math = { version = "0.10", path = "../math", package = "winter-math", default-features = false } -utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } +math = { version = "0.11", path = "../math", package = "winter-math", default-features = false } +utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } [dev-dependencies] -rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils" } +rand-utils = { version = "0.11", path = "../utils/rand", package = "winter-rand-utils" } # Allow math in docs [package.metadata.docs.rs] diff --git a/crypto/Cargo.toml b/crypto/Cargo.toml index 51d93574e..e910bb80f 100644 --- a/crypto/Cargo.toml +++ b/crypto/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-crypto" -version = "0.10.2" +version = "0.11.0" description = "Cryptographic library for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-crypto/0.10.2" +documentation = "https://docs.rs/winter-crypto/0.11.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "merkle-tree", "hash"] edition = "2021" @@ -31,11 +31,11 @@ std = ["blake3/std", "math/std", "sha3/std", "utils/std"] [dependencies] blake3 = { version = "1.5", default-features = false } -math = { version = "0.10", path = "../math", package = "winter-math", default-features = false } +math = { version = "0.11", path = "../math", package = "winter-math", default-features = false } sha3 = { version = "0.10", default-features = false } -utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } +utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } [dev-dependencies] criterion = "0.5" proptest = "1.4" -rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils" } +rand-utils = { version = "0.11", path = "../utils/rand", package = "winter-rand-utils" } diff --git a/examples/Cargo.toml b/examples/Cargo.toml index 5a4dbc5ab..b25d90fd8 100644 --- a/examples/Cargo.toml +++ b/examples/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "examples" -version = "0.10.1" +version = "0.11.0" description = "Examples of using Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" @@ -27,14 +27,14 @@ std = ["core-utils/std", "hex/std", "rand-utils", "winterfell/std"] [dependencies] blake3 = { version = "1.5", default-features = false } -core-utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } +core-utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } hex = { version = "0.4", optional = true } -rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils", optional = true } +rand-utils = { version = "0.11", path = "../utils/rand", package = "winter-rand-utils", optional = true } structopt = { version = "0.3", default-features = false } tracing = { version = "0.1", default-features = false } tracing-forest = { version = "0.1", features = ["ansi", "smallvec"], optional = true } tracing-subscriber = { version = "0.3", features = ["std", "env-filter"] } -winterfell = { version = "0.10", path = "../winterfell", default-features = false } +winterfell = { version = "0.11", path = "../winterfell", default-features = false } [dev-dependencies] criterion = "0.5" diff --git a/fri/Cargo.toml b/fri/Cargo.toml index 86c984442..d71a9c7a8 100644 --- a/fri/Cargo.toml +++ b/fri/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-fri" -version = "0.10.2" +version = "0.11.0" description = "Implementation of FRI protocol for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-fri/0.10.2" +documentation = "https://docs.rs/winter-fri/0.11.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "polynomial", "commitments"] edition = "2021" @@ -29,10 +29,10 @@ default = ["std"] std = ["crypto/std", "math/std", "utils/std"] [dependencies] -crypto = { version = "0.10", path = "../crypto", package = "winter-crypto", default-features = false } -math = { version = "0.10", path = "../math", package = "winter-math", default-features = false } -utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } +crypto = { version = "0.11", path = "../crypto", package = "winter-crypto", default-features = false } +math = { version = "0.11", path = "../math", package = "winter-math", default-features = false } +utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } [dev-dependencies] criterion = "0.5" -rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils" } +rand-utils = { version = "0.11", path = "../utils/rand", package = "winter-rand-utils" } diff --git a/math/Cargo.toml b/math/Cargo.toml index a2576ded4..987dc6da4 100644 --- a/math/Cargo.toml +++ b/math/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-math" -version = "0.10.2" +version = "0.11.0" description = "Math library for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-math/0.10.2" +documentation = "https://docs.rs/winter-math/0.11.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "finite-fields", "polynomials", "fft"] edition = "2021" @@ -38,13 +38,13 @@ std = ["utils/std"] [dependencies] serde = { version = "1.0", features = [ "derive" ], optional = true, default-features = false } -utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } +utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } [dev-dependencies] criterion = "0.5" num-bigint = "0.4" proptest = "1.4" -rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils" } +rand-utils = { version = "0.11", path = "../utils/rand", package = "winter-rand-utils" } # Allow math in docs [package.metadata.docs.rs] diff --git a/prover/Cargo.toml b/prover/Cargo.toml index d3d299159..1011c3504 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-prover" -version = "0.10.3" +version = "0.11.0" description = "Winterfell STARK prover" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-prover/0.10.3" +documentation = "https://docs.rs/winter-prover/0.11.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "zkp", "stark", "prover"] edition = "2021" @@ -30,17 +30,17 @@ default = ["std"] std = ["air/std", "crypto/std", "fri/std", "math/std", "utils/std"] [dependencies] -air = { version = "0.10", path = "../air", package = "winter-air", default-features = false } -crypto = { version = "0.10", path = "../crypto", package = "winter-crypto", default-features = false } -fri = { version = "0.10", path = '../fri', package = "winter-fri", default-features = false } -math = { version = "0.10", path = "../math", package = "winter-math", default-features = false } -maybe_async = { version = "0.10", path = "../utils/maybe_async" , package = "winter-maybe-async" } +air = { version = "0.11", path = "../air", package = "winter-air", default-features = false } +crypto = { version = "0.11", path = "../crypto", package = "winter-crypto", default-features = false } +fri = { version = "0.11", path = '../fri', package = "winter-fri", default-features = false } +math = { version = "0.11", path = "../math", package = "winter-math", default-features = false } +maybe_async = { version = "0.11", path = "../utils/maybe_async" , package = "winter-maybe-async" } tracing = { version = "0.1", default-features = false, features = ["attributes"]} -utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } +utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } [dev-dependencies] criterion = "0.5" -rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils" } +rand-utils = { version = "0.11", path = "../utils/rand", package = "winter-rand-utils" } # Allow math in docs [package.metadata.docs.rs] diff --git a/utils/core/Cargo.toml b/utils/core/Cargo.toml index 7eb8f58d6..5679398bf 100644 --- a/utils/core/Cargo.toml +++ b/utils/core/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-utils" -version = "0.10.2" +version = "0.11.0" description = "Utilities for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-utils/0.10.2" +documentation = "https://docs.rs/winter-utils/0.11.0" categories = ["cryptography", "no-std"] keywords = ["serialization", "transmute"] edition = "2021" diff --git a/utils/maybe_async/Cargo.toml b/utils/maybe_async/Cargo.toml index 825d991b0..92f19151c 100644 --- a/utils/maybe_async/Cargo.toml +++ b/utils/maybe_async/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-maybe-async" -version = "0.10.1" +version = "0.11.0" description = "sync/async macro for winterfell" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/facebook/winterfell" -documentation = "https://docs.rs/winter-maybe-async/0.10.1" +documentation = "https://docs.rs/winter-maybe-async/0.11.0" keywords = ["async"] edition = "2021" rust-version = "1.82" diff --git a/utils/rand/Cargo.toml b/utils/rand/Cargo.toml index 759b02436..74bc24800 100644 --- a/utils/rand/Cargo.toml +++ b/utils/rand/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-rand-utils" -version = "0.10.1" +version = "0.11.0" description = "Random value generation utilities for Winterfell crates" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-rand-utils/0.10.1" +documentation = "https://docs.rs/winter-rand-utils/0.11.0" categories = ["cryptography"] keywords = ["rand"] edition = "2021" @@ -16,7 +16,7 @@ rust-version = "1.82" bench = false [dependencies] -utils = { version = "0.10", path = "../core", package = "winter-utils" } +utils = { version = "0.11", path = "../core", package = "winter-utils" } [target.'cfg(not(target_family = "wasm"))'.dependencies] rand = { version = "0.8" } diff --git a/verifier/Cargo.toml b/verifier/Cargo.toml index 0d766a53f..cbdf1e464 100644 --- a/verifier/Cargo.toml +++ b/verifier/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-verifier" -version = "0.10.3" +version = "0.11.0" description = "Winterfell STARK verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-verifier/0.10.3" +documentation = "https://docs.rs/winter-verifier/0.11.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "zkp", "stark", "verifier"] edition = "2021" @@ -20,11 +20,11 @@ default = ["std"] std = ["air/std", "crypto/std", "fri/std", "math/std", "utils/std"] [dependencies] -air = { version = "0.10", path = "../air", package = "winter-air", default-features = false } -crypto = { version = "0.10", path = "../crypto", package = "winter-crypto", default-features = false } -fri = { version = "0.10", path = "../fri", package = "winter-fri", default-features = false } -math = { version = "0.10", path = "../math", package = "winter-math", default-features = false } -utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } +air = { version = "0.11", path = "../air", package = "winter-air", default-features = false } +crypto = { version = "0.11", path = "../crypto", package = "winter-crypto", default-features = false } +fri = { version = "0.11", path = "../fri", package = "winter-fri", default-features = false } +math = { version = "0.11", path = "../math", package = "winter-math", default-features = false } +utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } # Allow math in docs [package.metadata.docs.rs] diff --git a/winterfell/Cargo.toml b/winterfell/Cargo.toml index bee4d0bc8..258eb22e9 100644 --- a/winterfell/Cargo.toml +++ b/winterfell/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winterfell" -version = "0.10.3" +version = "0.11.0" description = "Winterfell STARK prover and verifier" authors = ["winterfell contributors"] readme = "../README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winterfell/0.10.3" +documentation = "https://docs.rs/winterfell/0.11.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "zkp", "stark", "prover", "verifier"] edition = "2021" @@ -22,9 +22,9 @@ default = ["std"] std = ["prover/std", "verifier/std"] [dependencies] -air = { version = "0.10", path = "../air", package = "winter-air", default-features = false } -prover = { version = "0.10", path = "../prover", package = "winter-prover", default-features = false } -verifier = { version = "0.10", path = "../verifier", package = "winter-verifier", default-features = false } +air = { version = "0.11", path = "../air", package = "winter-air", default-features = false } +prover = { version = "0.11", path = "../prover", package = "winter-prover", default-features = false } +verifier = { version = "0.11", path = "../verifier", package = "winter-verifier", default-features = false } # Allow math in docs [package.metadata.docs.rs] From 74aea1f3b7dc021959eceb40c8437c6444524235 Mon Sep 17 00:00:00 2001 From: Al-Kindi-0 <82364884+Al-Kindi-0@users.noreply.github.com> Date: Fri, 8 Nov 2024 12:17:07 +0100 Subject: [PATCH 14/19] feat: add zk --- Cargo.toml | 3 +- air/src/air/boundary/mod.rs | 2 +- air/src/air/context.rs | 161 ++++++++++-- air/src/air/mod.rs | 9 +- air/src/air/tests.rs | 6 +- air/src/air/transition/degree.rs | 17 +- air/src/air/transition/mod.rs | 2 +- air/src/lib.rs | 2 +- air/src/options.rs | 32 +++ air/src/proof/context.rs | 28 +- air/src/proof/mod.rs | 242 +++++++++--------- air/src/proof/ood_frame.rs | 1 - crypto/Cargo.toml | 1 + crypto/src/commitment.rs | 2 +- crypto/src/hash/mod.rs | 13 +- crypto/src/hash/rescue/mod.rs | 2 +- crypto/src/hash/rescue/rp62_248/digest.rs | 14 +- crypto/src/hash/rescue/rp64_256/digest.rs | 31 ++- crypto/src/hash/rescue/rp64_256/mod.rs | 8 +- .../src/hash/rescue/rp64_256_jive/digest.rs | 14 +- crypto/src/lib.rs | 6 +- crypto/src/merkle/mod.rs | 202 ++++++++++++++- crypto/src/merkle/tests.rs | 22 ++ crypto/src/random/default.rs | 16 ++ crypto/src/random/mod.rs | 8 + examples/Cargo.toml | 2 + examples/benches/fibonacci.rs | 2 +- examples/benches/rescue.rs | 2 +- examples/src/fibonacci/fib2/prover.rs | 13 +- examples/src/fibonacci/fib8/prover.rs | 13 +- examples/src/fibonacci/fib_small/prover.rs | 13 +- examples/src/fibonacci/mulfib2/prover.rs | 13 +- examples/src/fibonacci/mulfib8/prover.rs | 13 +- examples/src/fibonacci/utils.rs | 2 +- examples/src/lamport/aggregate/prover.rs | 13 +- examples/src/lamport/threshold/prover.rs | 13 +- examples/src/lib.rs | 1 + examples/src/merkle/prover.rs | 13 +- examples/src/merkle/tests.rs | 2 +- examples/src/rescue/prover.rs | 13 +- examples/src/rescue/tests.rs | 2 +- examples/src/rescue_raps/prover.rs | 13 +- examples/src/rescue_raps/tests.rs | 2 +- examples/src/utils/rescue.rs | 12 + examples/src/vdf/exempt/prover.rs | 13 +- examples/src/vdf/exempt/tests.rs | 2 +- examples/src/vdf/regular/prover.rs | 13 +- examples/src/vdf/regular/tests.rs | 2 +- fri/Cargo.toml | 2 + fri/benches/prover.rs | 6 +- fri/src/proof.rs | 24 +- fri/src/prover/channel.rs | 38 ++- fri/src/prover/mod.rs | 43 +++- fri/src/prover/tests.rs | 7 +- fri/src/verifier/channel.rs | 10 + fri/src/verifier/mod.rs | 3 +- prover/Cargo.toml | 3 + prover/benches/lagrange_kernel.rs | 17 +- prover/src/channel.rs | 106 +++++++- prover/src/composer/mod.rs | 62 +++-- prover/src/constraints/commitment.rs | 80 ++++++ prover/src/constraints/composition_poly.rs | 93 ++++++- prover/src/constraints/evaluation_table.rs | 27 +- .../constraints/evaluator/periodic_table.rs | 14 +- prover/src/domain.rs | 32 ++- prover/src/lib.rs | 71 +++-- prover/src/matrix/col_matrix.rs | 50 +++- prover/src/tests/mod.rs | 8 +- prover/src/trace/poly_table.rs | 14 +- prover/src/trace/trace_lde/default/mod.rs | 57 ++++- prover/src/trace/trace_lde/default/tests.rs | 8 + prover/src/trace/trace_lde/mod.rs | 7 +- prover/src/trace/trace_table.rs | 5 + verifier/src/channel.rs | 28 +- verifier/src/composer.rs | 15 +- verifier/src/lib.rs | 34 ++- winterfell/Cargo.toml | 2 + winterfell/src/lib.rs | 12 +- winterfell/src/tests.rs | 16 +- 79 files changed, 1581 insertions(+), 344 deletions(-) create mode 100644 prover/src/constraints/commitment.rs diff --git a/Cargo.toml b/Cargo.toml index b0ed3f07c..2eb4b7f3f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,8 +10,7 @@ members = [ "prover", "verifier", "winterfell", - "examples" -] + "examples"] resolver = "2" [profile.release] diff --git a/air/src/air/boundary/mod.rs b/air/src/air/boundary/mod.rs index 7f92c80ab..d1ad7271e 100644 --- a/air/src/air/boundary/mod.rs +++ b/air/src/air/boundary/mod.rs @@ -170,7 +170,7 @@ where let group = groups.entry(key).or_insert_with(|| { BoundaryConstraintGroup::new(ConstraintDivisor::from_assertion( &assertion, - context.trace_len(), + context.trace_info().length(), )) }); diff --git a/air/src/air/context.rs b/air/src/air/context.rs index 09341afe3..c7412aece 100644 --- a/air/src/air/context.rs +++ b/air/src/air/context.rs @@ -26,6 +26,8 @@ pub struct AirContext { pub(super) trace_domain_generator: B, pub(super) lde_domain_generator: B, pub(super) num_transition_exemptions: usize, + pub(super) trace_length_ext: usize, + pub(super) zk_parameters: Option, } impl AirContext { @@ -133,18 +135,35 @@ impl AirContext { ); } + let h = options.zk_witness_randomizer_degree().unwrap_or(0); + let trace_length = trace_info.length(); + let trace_length_ext = (trace_length + h as usize).next_power_of_two(); + let zk_blowup = trace_length_ext / trace_length; + let lde_domain_size = trace_length_ext * options.blowup_factor(); + // equation (12) in https://eprint.iacr.org/2024/1037 + let h_q = options.num_queries() + 1; + let zk_parameters = if options.is_zk() { + Some(ZkParameters { + degree_witness_randomizer: h as usize, + degree_constraint_randomizer: h_q, + zk_blowup_witness: zk_blowup, + }) + } else { + None + }; + // determine minimum blowup factor needed to evaluate transition constraints by taking // the blowup factor of the highest degree constraint let mut ce_blowup_factor = 0; for degree in main_transition_constraint_degrees.iter() { - if degree.min_blowup_factor() > ce_blowup_factor { - ce_blowup_factor = degree.min_blowup_factor(); + if degree.min_blowup_factor(trace_length, trace_length_ext) > ce_blowup_factor { + ce_blowup_factor = degree.min_blowup_factor(trace_length, trace_length_ext); } } for degree in aux_transition_constraint_degrees.iter() { - if degree.min_blowup_factor() > ce_blowup_factor { - ce_blowup_factor = degree.min_blowup_factor(); + if degree.min_blowup_factor(trace_length, trace_length_ext) > ce_blowup_factor { + ce_blowup_factor = degree.min_blowup_factor(trace_length, trace_length_ext); } } @@ -155,9 +174,6 @@ impl AirContext { options.blowup_factor() ); - let trace_length = trace_info.length(); - let lde_domain_size = trace_length * options.blowup_factor(); - AirContext { options, trace_info, @@ -170,6 +186,8 @@ impl AirContext { trace_domain_generator: B::get_root_of_unity(trace_length.ilog2()), lde_domain_generator: B::get_root_of_unity(lde_domain_size.ilog2()), num_transition_exemptions: 1, + trace_length_ext, + zk_parameters, } } @@ -188,25 +206,31 @@ impl AirContext { self.trace_info.length() } + /// Returns length of the possibly extended execution trace. This is the same as the original + /// trace length when zero-knowledge is not enabled. + pub fn trace_length_ext(&self) -> usize { + self.trace_length_ext + } + /// Returns degree of trace polynomials for an instance of a computation. /// - /// The degree is always `trace_length` - 1. + /// The degree is always `trace_length_ext` - 1. pub fn trace_poly_degree(&self) -> usize { - self.trace_info.length() - 1 + self.trace_length_ext() - 1 } /// Returns size of the constraint evaluation domain. /// - /// This is guaranteed to be a power of two, and is equal to `trace_length * ce_blowup_factor`. + /// This is guaranteed to be a power of two, and is equal to `trace_length_ext * ce_blowup_factor`. pub fn ce_domain_size(&self) -> usize { - self.trace_info.length() * self.ce_blowup_factor + self.trace_length_ext() * self.ce_blowup_factor } /// Returns the size of the low-degree extension domain. /// - /// This is guaranteed to be a power of two, and is equal to `trace_length * lde_blowup_factor`. + /// This is guaranteed to be a power of two, and is equal to `trace_length_ext * lde_blowup_factor`. pub fn lde_domain_size(&self) -> usize { - self.trace_info.length() * self.options.blowup_factor() + self.trace_length_ext() * self.options.blowup_factor() } /// Returns the number of transition constraints for a computation, excluding the Lagrange @@ -292,6 +316,8 @@ impl AirContext { /// numerator is `trace_len - 1` for all transition constraints (i.e. the base degree is 1). /// Hence, no matter what the degree of the divisor is for each, the degree of the fraction will /// be at most `trace_len - 1`. + /// + /// TODO: update documentation pub fn num_constraint_composition_columns(&self) -> usize { let mut highest_constraint_degree = 0_usize; for degree in self @@ -299,19 +325,93 @@ impl AirContext { .iter() .chain(self.aux_transition_constraint_degrees.iter()) { - let eval_degree = degree.get_evaluation_degree(self.trace_len()); + let eval_degree = + degree.get_evaluation_degree(self.trace_len(), self.trace_length_ext()); if eval_degree > highest_constraint_degree { highest_constraint_degree = eval_degree } } let trace_length = self.trace_len(); + let trace_length_ext = self.trace_length_ext(); let transition_divisior_degree = trace_length - self.num_transition_exemptions(); - // we use the identity: ceil(a/b) = (a + b - 1)/b let num_constraint_col = - (highest_constraint_degree - transition_divisior_degree).div_ceil(trace_length); + (highest_constraint_degree - transition_divisior_degree).div_ceil(trace_length_ext); + + if self.zk_parameters.is_some() { + let quotient_degree = if highest_constraint_degree < trace_length_ext { + // This means that our transition constraints have degree 1 and hence the boundary + // constraints will determine the degree + trace_length_ext - 2 + } else { + highest_constraint_degree - transition_divisior_degree + }; + let n_q = self.options.num_queries(); + let den = self.trace_length_ext() - (n_q + 1); + + (quotient_degree + 1).div_ceil(den) + } else { + cmp::max(num_constraint_col, 1) + } + } + + pub fn constraint_composition_degree(&self) -> usize { + let mut highest_constraint_degree = 0_usize; + for degree in self + .main_transition_constraint_degrees + .iter() + .chain(self.aux_transition_constraint_degrees.iter()) + { + let eval_degree = + degree.get_evaluation_degree(self.trace_len(), self.trace_length_ext()); + if eval_degree > highest_constraint_degree { + highest_constraint_degree = eval_degree + } + } + let trace_length = self.trace_len(); + let transition_divisior_degree = trace_length - self.num_transition_exemptions(); - cmp::max(num_constraint_col, 1) + // highest_constraint_degree - transition_divisior_degree + if highest_constraint_degree < self.trace_length_ext { + // This means that our transition constraints have degree 1 and hence the boundary + // constraints will determine the degree + self.trace_length_ext - 2 + } else { + highest_constraint_degree - transition_divisior_degree + } + } + + pub fn num_coefficients_chunk_quotient(&self) -> usize { + if self.zk_parameters().is_some() { + let num_constraint_composition_cols = self.num_constraint_composition_columns(); + let quotient_degree = self.constraint_composition_degree(); + + (quotient_degree + 1).div_ceil(num_constraint_composition_cols) + } else { + self.trace_len() + } + } + + pub fn zk_parameters(&self) -> Option { + self.zk_parameters + } + + pub fn zk_blowup_factor(&self) -> usize { + self.zk_parameters() + .map(|parameters| parameters.zk_blowup_witness()) + .unwrap_or(1) + } + + pub fn zk_witness_randomizer_degree(&self) -> usize { + self.zk_parameters() + .map(|parameters| parameters.degree_witness_randomizer()) + .unwrap_or(0) + } + + pub fn zk_constraint_randomizer_degree(&self) -> usize { + self.zk_parameters() + .map(|parameters| parameters.degree_constraint_randomizer()) + .unwrap_or(0) } // DATA MUTATORS @@ -347,9 +447,11 @@ impl AirContext { .iter() .chain(self.aux_transition_constraint_degrees.iter()) { - let eval_degree = degree.get_evaluation_degree(self.trace_len()); + let eval_degree = + degree.get_evaluation_degree(self.trace_len(), self.trace_length_ext()); let max_constraint_composition_degree = self.ce_domain_size() - 1; - let max_exemptions = max_constraint_composition_degree + self.trace_len() - eval_degree; + let max_exemptions = + max_constraint_composition_degree + self.trace_length_ext() - eval_degree; assert!( n <= max_exemptions, "number of transition exemptions cannot exceed: {max_exemptions}, but was {n}" @@ -360,3 +462,24 @@ impl AirContext { self } } + +#[derive(Clone, Copy, PartialEq, Eq)] +pub struct ZkParameters { + degree_witness_randomizer: usize, + degree_constraint_randomizer: usize, + zk_blowup_witness: usize, +} + +impl ZkParameters { + pub fn degree_witness_randomizer(&self) -> usize { + self.degree_witness_randomizer + } + + pub fn degree_constraint_randomizer(&self) -> usize { + self.degree_constraint_randomizer + } + + pub fn zk_blowup_witness(&self) -> usize { + self.zk_blowup_witness + } +} diff --git a/air/src/air/mod.rs b/air/src/air/mod.rs index 53a59fa5a..dc9a93c32 100644 --- a/air/src/air/mod.rs +++ b/air/src/air/mod.rs @@ -17,7 +17,7 @@ mod trace_info; pub use trace_info::TraceInfo; mod context; -pub use context::AirContext; +pub use context::{AirContext, ZkParameters}; mod assertions; pub use assertions::Assertion; @@ -547,7 +547,7 @@ pub trait Air: Send + Sync { let lagrange = if self.context().has_lagrange_kernel_aux_column() { let mut lagrange_kernel_t_coefficients = Vec::new(); - for _ in 0..self.context().trace_len().ilog2() { + for _ in 0..self.context().trace_info().length().ilog2() { lagrange_kernel_t_coefficients.push(public_coin.draw()?); } @@ -600,4 +600,9 @@ pub trait Air: Send + Sync { lagrange: lagrange_cc, }) } + + /// Returns whether zero-knowledge is enabled. + fn is_zk(&self) -> bool { + self.options().is_zk() + } } diff --git a/air/src/air/tests.rs b/air/src/air/tests.rs index e0063ed3b..8338a3350 100644 --- a/air/src/air/tests.rs +++ b/air/src/air/tests.rs @@ -205,7 +205,7 @@ impl MockAir { let mut result = Self::new( TraceInfo::with_meta(4, trace_length, vec![1]), (), - ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31), + ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31, false), ); result.periodic_columns = column_values; result @@ -215,7 +215,7 @@ impl MockAir { let mut result = Self::new( TraceInfo::with_meta(4, trace_length, vec![assertions.len() as u8]), (), - ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31), + ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31, false), ); result.assertions = assertions; result @@ -267,7 +267,7 @@ pub fn build_context( trace_width: usize, num_assertions: usize, ) -> AirContext { - let options = ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31); + let options = ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31, false); let t_degrees = vec![TransitionConstraintDegree::new(2)]; let trace_info = TraceInfo::new(trace_width, trace_length); AirContext::new(trace_info, t_degrees, num_assertions, options) diff --git a/air/src/air/transition/degree.rs b/air/src/air/transition/degree.rs index a51ab2840..9f5b99f69 100644 --- a/air/src/air/transition/degree.rs +++ b/air/src/air/transition/degree.rs @@ -87,8 +87,10 @@ impl TransitionConstraintDegree { /// $$ /// 2 \cdot (64 - 1) + \frac{64 \cdot (32 - 1)}{32} = 126 + 62 = 188 /// $$ - pub fn get_evaluation_degree(&self, trace_length: usize) -> usize { - let mut result = self.base * (trace_length - 1); + /// + /// TODO: Update docs + pub fn get_evaluation_degree(&self, trace_length: usize, trace_length_ext: usize) -> usize { + let mut result = self.base * (trace_length_ext - 1); for cycle_length in self.cycles.iter() { result += (trace_length / cycle_length) * (cycle_length - 1); } @@ -98,7 +100,7 @@ impl TransitionConstraintDegree { /// Returns a minimum blowup factor needed to evaluate constraint of this degree. /// /// This is guaranteed to be a power of two, greater than one. - pub fn min_blowup_factor(&self) -> usize { + pub fn min_blowup_factor(&self, trace_length: usize, trace_length_ext: usize) -> usize { // The blowup factor needs to be a power of two large enough to accommodate degree of // transition constraints defined by rational functions `C(x) / z(x)` where `C(x)` is the // constraint polynomial and `z(x)` is the transition constraint divisor. @@ -110,7 +112,12 @@ impl TransitionConstraintDegree { // // For example, if degree of our constraints is 6, the blowup factor would need to be 8. // However, if the degree is 5, the blowup factor could be as small as 4. - let degree_bound = self.base + self.cycles.len() - 1; - cmp::max(degree_bound.next_power_of_two(), ProofOptions::MIN_BLOWUP_FACTOR) + // + // TODO: update documentation + let degree_bound = self.base + self.cycles.len(); + let q_deg = degree_bound * (trace_length_ext - 1) - (trace_length - 1); + let blowup_factor = q_deg.div_ceil(trace_length_ext); + + cmp::max(blowup_factor.next_power_of_two(), ProofOptions::MIN_BLOWUP_FACTOR) } } diff --git a/air/src/air/transition/mod.rs b/air/src/air/transition/mod.rs index 60e641817..89f44577a 100644 --- a/air/src/air/transition/mod.rs +++ b/air/src/air/transition/mod.rs @@ -55,7 +55,7 @@ impl TransitionConstraints { // build constraint divisor; the same divisor applies to all transition constraints let divisor = ConstraintDivisor::from_transition( - context.trace_len(), + context.trace_info().length(), context.num_transition_exemptions(), ); diff --git a/air/src/lib.rs b/air/src/lib.rs index 0a471a706..184d2b862 100644 --- a/air/src/lib.rs +++ b/air/src/lib.rs @@ -48,5 +48,5 @@ pub use air::{ LagrangeConstraintsCompositionCoefficients, LagrangeKernelBoundaryConstraint, LagrangeKernelConstraints, LagrangeKernelEvaluationFrame, LagrangeKernelRandElements, LagrangeKernelTransitionConstraints, TraceInfo, TransitionConstraintDegree, - TransitionConstraints, + TransitionConstraints, ZkParameters, }; diff --git a/air/src/options.rs b/air/src/options.rs index a043d70db..b26989c69 100644 --- a/air/src/options.rs +++ b/air/src/options.rs @@ -92,6 +92,7 @@ pub struct ProofOptions { fri_folding_factor: u8, fri_remainder_max_degree: u8, partition_options: PartitionOptions, + is_zk: bool, } // PROOF OPTIONS IMPLEMENTATION @@ -125,6 +126,7 @@ impl ProofOptions { field_extension: FieldExtension, fri_folding_factor: usize, fri_remainder_max_degree: usize, + is_zk: bool, ) -> ProofOptions { // TODO: return errors instead of panicking assert!(num_queries > 0, "number of queries must be greater than 0"); @@ -166,6 +168,7 @@ impl ProofOptions { fri_folding_factor: fri_folding_factor as u8, fri_remainder_max_degree: fri_remainder_max_degree as u8, partition_options: PartitionOptions::new(1, 1), + is_zk, } } @@ -246,6 +249,32 @@ impl ProofOptions { pub fn partition_options(&self) -> PartitionOptions { self.partition_options } + /// Returns whether zero-knowledge is enabled. + pub fn is_zk(&self) -> bool { + self.is_zk + } + + /// Computes a lower bound on the degree of the polynomial used for randomizing the witness + /// polynomials. + pub(crate) fn zk_witness_randomizer_degree(&self) -> Option { + if self.is_zk { + let h = compute_degree_randomizing_poly( + self.field_extension().degree() as usize, + self.num_queries(), + ); + + Some(h as u32) + } else { + None + } + } +} + +/// Computes the number of coefficients of the polynomials used to randomize the witness polynomials. +/// +/// This is based on equation (13) in https://eprint.iacr.org/2024/1037 +pub fn compute_degree_randomizing_poly(extension_degree: usize, num_fri_queries: usize) -> usize { + 2 * (extension_degree + num_fri_queries) } impl ToElements for ProofOptions { @@ -275,6 +304,7 @@ impl Serializable for ProofOptions { target.write_u8(self.fri_remainder_max_degree); target.write_u8(self.partition_options.num_partitions); target.write_u8(self.partition_options.hash_rate); + target.write_bool(self.is_zk) } } @@ -291,6 +321,7 @@ impl Deserializable for ProofOptions { FieldExtension::read_from(source)?, source.read_u8()? as usize, source.read_u8()? as usize, + source.read_bool()?, ); Ok(result.with_partitions(source.read_u8()? as usize, source.read_u8()? as usize)) } @@ -444,6 +475,7 @@ mod tests { field_extension, fri_folding_factor as usize, fri_remainder_max_degree as usize, + false, ); assert_eq!(expected, options.to_elements()); } diff --git a/air/src/proof/context.rs b/air/src/proof/context.rs index 83c2beece..1df47c463 100644 --- a/air/src/proof/context.rs +++ b/air/src/proof/context.rs @@ -5,7 +5,7 @@ use alloc::{string::ToString, vec::Vec}; -use math::{StarkField, ToElements}; +use math::{FieldElement, StarkField, ToElements}; use utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; use crate::{ProofOptions, TraceInfo}; @@ -18,6 +18,7 @@ pub struct Context { trace_info: TraceInfo, field_modulus_bytes: Vec, options: ProofOptions, + zk_blowup: usize, } impl Context { @@ -29,7 +30,11 @@ impl Context { /// # Panics /// Panics if either trace length or the LDE domain size implied by the trace length and the /// blowup factor is greater then [u32::MAX]. - pub fn new(trace_info: TraceInfo, options: ProofOptions) -> Self { + pub fn new( + trace_info: TraceInfo, + options: ProofOptions, + zk_blowup: usize, + ) -> Self { // TODO: return errors instead of panicking? let trace_length = trace_info.length(); @@ -42,6 +47,7 @@ impl Context { trace_info, field_modulus_bytes: B::get_modulus_le_bytes(), options, + zk_blowup, } } @@ -54,8 +60,8 @@ impl Context { } /// Returns the size of the LDE domain for the computation described by this context. - pub fn lde_domain_size(&self) -> usize { - self.trace_info.length() * self.options.blowup_factor() + pub fn lde_domain_size(&self) -> usize { + self.trace_info.length() * self.zk_blowup * self.options.blowup_factor() } /// Returns modulus of the field for the computation described by this context. @@ -124,6 +130,7 @@ impl Serializable for Context { target.write_u8(self.field_modulus_bytes.len() as u8); target.write_bytes(&self.field_modulus_bytes); self.options.write_into(target); + self.zk_blowup.write_into(target); } } @@ -148,7 +155,15 @@ impl Deserializable for Context { // read options let options = ProofOptions::read_from(source)?; - Ok(Context { trace_info, field_modulus_bytes, options }) + // TODO: should we validate it? + let zk_blowup = usize::read_from(source)?; + + Ok(Context { + trace_info, + field_modulus_bytes, + options, + zk_blowup, + }) } } @@ -212,10 +227,11 @@ mod tests { field_extension, fri_folding_factor as usize, fri_remainder_max_degree as usize, + false, ); let trace_info = TraceInfo::new_multi_segment(main_width, aux_width, aux_rands, trace_length, vec![]); - let context = Context::new::(trace_info, options); + let context = Context::new::(trace_info, options, 1); assert_eq!(expected, context.to_elements()); } } diff --git a/air/src/proof/mod.rs b/air/src/proof/mod.rs index 7307ba1d3..e791b345f 100644 --- a/air/src/proof/mod.rs +++ b/air/src/proof/mod.rs @@ -79,6 +79,8 @@ pub struct Proof { pub pow_nonce: u64, /// Optionally, an auxiliary (non-STARK) proof that was generated during auxiliary trace generation. pub gkr_proof: Option>, + /// Random values needed for Fiat-Shamir. + pub salts: Vec, } impl Proof { @@ -93,8 +95,8 @@ impl Proof { } /// Returns the size of the LDE domain for the computation described by this proof. - pub fn lde_domain_size(&self) -> usize { - self.context.lde_domain_size() + pub fn lde_domain_size(&self) -> usize { + self.context.lde_domain_size::() } // SECURITY LEVEL @@ -108,15 +110,21 @@ impl Proof { pub fn security_level(&self, conjectured: bool) -> u32 { if conjectured { get_conjectured_security( - self.context.options(), self.context.num_modulus_bits(), + self.context.options().field_extension() as u32, + self.context.options().blowup_factor(), + self.options().num_queries(), + self.options().grinding_factor(), self.trace_info().length(), H::COLLISION_RESISTANCE, ) } else { get_proven_security( - self.context.options(), self.context.num_modulus_bits(), + self.context.options().field_extension() as u32, + self.context.options().blowup_factor(), + self.options().num_queries(), + self.options().grinding_factor(), self.trace_info().length(), H::COLLISION_RESISTANCE, ) @@ -149,7 +157,8 @@ impl Proof { Self { context: Context::new::( TraceInfo::new(1, 8), - ProofOptions::new(1, 2, 2, FieldExtension::None, 8, 1), + ProofOptions::new(1, 2, 2, FieldExtension::None, 8, 1, false), + 1, ), num_unique_queries: 0, commitments: Commitments::default(), @@ -162,6 +171,7 @@ impl Proof { fri_proof: FriProof::new_dummy(), pow_nonce: 0, gkr_proof: None, + salts: vec![], } } } @@ -180,6 +190,7 @@ impl Serializable for Proof { self.fri_proof.write_into(target); self.pow_nonce.write_into(target); self.gkr_proof.write_into(target); + self.salts.write_into(target); } } @@ -204,6 +215,7 @@ impl Deserializable for Proof { fri_proof: FriProof::read_from(source)?, pow_nonce: source.read_u64()?, gkr_proof: Option::>::read_from(source)?, + salts: Vec::read_from(source)?, }; Ok(proof) } @@ -213,32 +225,38 @@ impl Deserializable for Proof { // ================================================================================================ /// Computes conjectured security level for the specified proof parameters. -fn get_conjectured_security( - options: &ProofOptions, +pub(crate) fn get_conjectured_security( base_field_bits: u32, + extension_degree: u32, + blowup_factor: usize, + num_queries: usize, + grinding_factor: u32, trace_domain_size: usize, collision_resistance: u32, ) -> u32 { // compute max security we can get for a given field size - let field_size = base_field_bits * options.field_extension().degree(); - let field_security = field_size - (trace_domain_size * options.blowup_factor()).ilog2(); + let field_size = base_field_bits * extension_degree; + let field_security = field_size - (trace_domain_size * blowup_factor).ilog2(); // compute security we get by executing multiple query rounds - let security_per_query = options.blowup_factor().ilog2(); - let mut query_security = security_per_query * options.num_queries() as u32; + let security_per_query = blowup_factor.ilog2(); + let mut query_security = security_per_query * num_queries as u32; // include grinding factor contributions only for proofs adequate security if query_security >= GRINDING_CONTRIBUTION_FLOOR { - query_security += options.grinding_factor(); + query_security += grinding_factor; } cmp::min(cmp::min(field_security, query_security) - 1, collision_resistance) } /// Estimates proven security level for the specified proof parameters. -fn get_proven_security( - options: &ProofOptions, +pub(crate) fn get_proven_security( base_field_bits: u32, + extension_degree: u32, + blowup_factor: usize, + num_queries: usize, + grinding_factor: u32, trace_domain_size: usize, collision_resistance: u32, ) -> u32 { @@ -248,8 +266,11 @@ fn get_proven_security( let m_optimal = (m_min as u32..m_max as u32) .max_by_key(|&a| { proven_security_protocol_for_m( - options, base_field_bits, + extension_degree, + blowup_factor, + num_queries, + grinding_factor, trace_domain_size, a as usize, ) @@ -260,8 +281,11 @@ fn get_proven_security( cmp::min( proven_security_protocol_for_m( - options, base_field_bits, + extension_degree, + blowup_factor, + num_queries, + grinding_factor, trace_domain_size, m_optimal as usize, ), @@ -272,17 +296,20 @@ fn get_proven_security( /// Computes proven security level for the specified proof parameters for a fixed /// value of the proximity parameter m in the list-decoding regime. fn proven_security_protocol_for_m( - options: &ProofOptions, base_field_bits: u32, + extension_degree: u32, + blowup_factor: usize, + num_queries: usize, + grinding_factor: u32, trace_domain_size: usize, m: usize, ) -> u64 { - let extension_field_bits = (base_field_bits * options.field_extension().degree()) as f64; - let num_fri_queries = options.num_queries() as f64; + let extension_field_bits = (base_field_bits * extension_degree) as f64; + let num_fri_queries = num_queries as f64; let m = m as f64; - let rho = 1.0 / options.blowup_factor() as f64; + let rho = 1.0 / blowup_factor as f64; let alpha = (1.0 + 0.5 / m) * sqrt(rho); - let max_deg = options.blowup_factor() as f64 + 1.0; + let max_deg = blowup_factor as f64 + 1.0; // To apply Theorem 8 in https://eprint.iacr.org/2022/1216.pdf, we need to apply FRI with // a slightly larger agreement parameter alpha. @@ -296,7 +323,7 @@ fn proven_security_protocol_for_m( // the list-decoding list size in F(Z). // Modified rate in function field F(Z) - let lde_domain_size = (trace_domain_size * options.blowup_factor()) as f64; + let lde_domain_size = (trace_domain_size * blowup_factor) as f64; let trace_domain_size = trace_domain_size as f64; let num_openings = 2.0; let rho_plus = (trace_domain_size + num_openings) / lde_domain_size; @@ -315,7 +342,7 @@ fn proven_security_protocol_for_m( // Compute FRI query-phase soundness error let fri_queries_err_bits = - options.grinding_factor() as f64 - log2(powf(1.0 - theta_plus, num_fri_queries)); + grinding_factor as f64 - log2(powf(1.0 - theta_plus, num_fri_queries)); // Combined error for FRI let fri_err_bits = cmp::min(fri_commit_err_bits as u64, fri_queries_err_bits as u64); @@ -405,31 +432,27 @@ pub fn ceil(value: f64) -> f64 { mod prove_security_tests { use math::{fields::f64::BaseElement, StarkField}; - use super::ProofOptions; use crate::{proof::get_proven_security, FieldExtension}; #[test] fn get_96_bits_security() { let field_extension = FieldExtension::Cubic; let base_field_bits = BaseElement::MODULUS_BITS; - let fri_folding_factor = 8; - let fri_remainder_max_degree = 127; let grinding_factor = 20; let blowup_factor = 4; let num_queries = 80; let collision_resistance = 128; let trace_length = 2_usize.pow(18); - let mut options = ProofOptions::new( - num_queries, + let security_1 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_1 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert_eq!(security_1, 97); @@ -437,16 +460,15 @@ mod prove_security_tests { let blowup_factor = 8; let num_queries = 53; - options = ProofOptions::new( - num_queries, + let security_2 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_2 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert_eq!(security_2, 97); } @@ -455,24 +477,21 @@ mod prove_security_tests { fn get_128_bits_security() { let field_extension = FieldExtension::Cubic; let base_field_bits = BaseElement::MODULUS_BITS; - let fri_folding_factor = 8; - let fri_remainder_max_degree = 127; let grinding_factor = 20; let blowup_factor = 8; let num_queries = 85; let collision_resistance = 128; let trace_length = 2_usize.pow(18); - let mut options = ProofOptions::new( - num_queries, + let security_1 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_1 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert_eq!(security_1, 128); @@ -480,16 +499,15 @@ mod prove_security_tests { let blowup_factor = 16; let num_queries = 65; - options = ProofOptions::new( - num_queries, + let security_2 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_2 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert_eq!(security_2, 128); } @@ -498,24 +516,21 @@ mod prove_security_tests { fn extension_degree() { let field_extension = FieldExtension::Quadratic; let base_field_bits = BaseElement::MODULUS_BITS; - let fri_folding_factor = 8; - let fri_remainder_max_degree = 127; let grinding_factor = 20; let blowup_factor = 8; let num_queries = 85; let collision_resistance = 128; let trace_length = 2_usize.pow(18); - let mut options = ProofOptions::new( - num_queries, + let security_1 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_1 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert_eq!(security_1, 67); @@ -523,16 +538,15 @@ mod prove_security_tests { // reaching 128 bits security let field_extension = FieldExtension::Cubic; - options = ProofOptions::new( - num_queries, + let security_2 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_2 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert_eq!(security_2, 128); } @@ -541,37 +555,33 @@ mod prove_security_tests { fn trace_length() { let field_extension = FieldExtension::Cubic; let base_field_bits = BaseElement::MODULUS_BITS; - let fri_folding_factor = 8; - let fri_remainder_max_degree = 127; let grinding_factor = 20; let blowup_factor = 8; let num_queries = 80; let collision_resistance = 128; let trace_length = 2_usize.pow(20); - let mut options = ProofOptions::new( - num_queries, + let security_1 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_1 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); let trace_length = 2_usize.pow(16); - options = ProofOptions::new( - num_queries, + let security_2 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_2 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert!(security_1 < security_2); } @@ -580,37 +590,33 @@ mod prove_security_tests { fn num_fri_queries() { let field_extension = FieldExtension::Cubic; let base_field_bits = BaseElement::MODULUS_BITS; - let fri_folding_factor = 8; - let fri_remainder_max_degree = 127; let grinding_factor = 20; let blowup_factor = 8; let num_queries = 60; let collision_resistance = 128; let trace_length = 2_usize.pow(20); - let mut options = ProofOptions::new( - num_queries, + let security_1 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_1 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); let num_queries = 80; - options = ProofOptions::new( - num_queries, + let security_2 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_2 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert!(security_1 < security_2); } @@ -619,37 +625,33 @@ mod prove_security_tests { fn blowup_factor() { let field_extension = FieldExtension::Cubic; let base_field_bits = BaseElement::MODULUS_BITS; - let fri_folding_factor = 8; - let fri_remainder_max_degree = 127; let grinding_factor = 20; let blowup_factor = 8; let num_queries = 30; let collision_resistance = 128; let trace_length = 2_usize.pow(20); - let mut options = ProofOptions::new( - num_queries, + let security_1 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_1 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); let blowup_factor = 16; - options = ProofOptions::new( - num_queries, + let security_2 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_2 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert!(security_1 < security_2); } diff --git a/air/src/proof/ood_frame.rs b/air/src/proof/ood_frame.rs index 52d394747..2c8317637 100644 --- a/air/src/proof/ood_frame.rs +++ b/air/src/proof/ood_frame.rs @@ -145,7 +145,6 @@ impl OodFrame { let mut reader = SliceReader::new(&self.trace_states); let frame_size = reader.read_u8()? as usize; let trace = reader.read_many((main_trace_width + aux_trace_width) * frame_size)?; - if reader.has_more_bytes() { return Err(DeserializationError::UnconsumedBytes); } diff --git a/crypto/Cargo.toml b/crypto/Cargo.toml index e910bb80f..cafd43e58 100644 --- a/crypto/Cargo.toml +++ b/crypto/Cargo.toml @@ -34,6 +34,7 @@ blake3 = { version = "1.5", default-features = false } math = { version = "0.11", path = "../math", package = "winter-math", default-features = false } sha3 = { version = "0.10", default-features = false } utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } +rand = { version = "0.8" } [dev-dependencies] criterion = "0.5" diff --git a/crypto/src/commitment.rs b/crypto/src/commitment.rs index 1d2667f7a..72ec674e7 100644 --- a/crypto/src/commitment.rs +++ b/crypto/src/commitment.rs @@ -49,7 +49,7 @@ pub trait VectorCommitment: Sized { fn commitment(&self) -> H::Digest; /// Returns the length of the vector committed to for `Self`. - fn domain_len(&self) -> usize; + fn get_domain_len(&self) -> usize; /// Returns the length of the vector committed to for `Self::Proof`. fn get_proof_domain_len(proof: &Self::Proof) -> usize; diff --git a/crypto/src/hash/mod.rs b/crypto/src/hash/mod.rs index 4bede6b8d..4bfc5eea1 100644 --- a/crypto/src/hash/mod.rs +++ b/crypto/src/hash/mod.rs @@ -17,7 +17,7 @@ pub use sha::Sha3_256; mod mds; mod rescue; -pub use rescue::{Rp62_248, Rp64_256, RpJive64_256}; +pub use rescue::{Rp62_248, Rp64_256, RpJive64_256, ARK1, ARK2, MDS}; // HASHER TRAITS // ================================================================================================ @@ -77,6 +77,9 @@ pub trait Digest: /// upper limit on the possible digest size. For digests which are smaller than 32 bytes, the /// unused bytes should be set to 0. fn as_bytes(&self) -> [u8; 32]; + + /// Returns a digest that is drawn uniformly at random from the space of all digests. + fn from_random_bytes(buffer: &[u8]) -> Self; } // BYTE DIGEST @@ -111,6 +114,14 @@ impl Digest for ByteDigest { result[..N].copy_from_slice(&self.0); result } + + fn from_random_bytes(buffer: &[u8]) -> Self { + Self::new( + buffer + .try_into() + .expect("The size of the buffer with random bytes should be 32"), + ) + } } impl Default for ByteDigest { diff --git a/crypto/src/hash/rescue/mod.rs b/crypto/src/hash/rescue/mod.rs index dbb13dee7..6a126ceb2 100644 --- a/crypto/src/hash/rescue/mod.rs +++ b/crypto/src/hash/rescue/mod.rs @@ -9,7 +9,7 @@ mod rp62_248; pub use rp62_248::Rp62_248; mod rp64_256; -pub use rp64_256::Rp64_256; +pub use rp64_256::{Rp64_256, ARK1, ARK2, MDS}; mod rp64_256_jive; pub use rp64_256_jive::RpJive64_256; diff --git a/crypto/src/hash/rescue/rp62_248/digest.rs b/crypto/src/hash/rescue/rp62_248/digest.rs index bacece257..01ecbf996 100644 --- a/crypto/src/hash/rescue/rp62_248/digest.rs +++ b/crypto/src/hash/rescue/rp62_248/digest.rs @@ -5,7 +5,7 @@ use core::slice; -use math::{fields::f62::BaseElement, StarkField}; +use math::{fields::f62::BaseElement, FieldElement, StarkField}; use utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; use super::{Digest, DIGEST_SIZE}; @@ -47,6 +47,18 @@ impl Digest for ElementDigest { result } + + fn from_random_bytes(buffer: &[u8]) -> Self { + let mut digest: [BaseElement; DIGEST_SIZE] = [BaseElement::ZERO; DIGEST_SIZE]; + + buffer.chunks(8).zip(digest.iter_mut()).for_each(|(chunk, digest)| { + *digest = BaseElement::new(u64::from_be_bytes( + chunk.try_into().expect("Given the size of the chunk this should not panic"), + )) + }); + + Self(digest) + } } impl Default for ElementDigest { diff --git a/crypto/src/hash/rescue/rp64_256/digest.rs b/crypto/src/hash/rescue/rp64_256/digest.rs index 84cec4123..f1bc78d6b 100644 --- a/crypto/src/hash/rescue/rp64_256/digest.rs +++ b/crypto/src/hash/rescue/rp64_256/digest.rs @@ -5,8 +5,11 @@ use core::slice; -use math::fields::f64::BaseElement; -use utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use math::{fields::f64::BaseElement, FieldElement}; +use rand::distributions::{Distribution, Standard}; +use utils::{ + ByteReader, ByteWriter, Deserializable, DeserializationError, Randomizable, Serializable, +}; use super::{Digest, DIGEST_SIZE}; @@ -43,6 +46,18 @@ impl Digest for ElementDigest { result } + + fn from_random_bytes(buffer: &[u8]) -> Self { + let mut digest: [BaseElement; DIGEST_SIZE] = [BaseElement::ZERO; DIGEST_SIZE]; + + buffer.chunks(8).zip(digest.iter_mut()).for_each(|(chunk, digest)| { + *digest = BaseElement::new(u64::from_be_bytes( + chunk.try_into().expect("Given the size of the chunk this should not panic"), + )) + }); + + digest.into() + } } impl Default for ElementDigest { @@ -87,6 +102,18 @@ impl From for [u8; 32] { } } +impl Distribution for Standard { + fn sample(&self, rng: &mut R) -> ElementDigest { + let mut res = [BaseElement::ZERO; DIGEST_SIZE]; + for r in res.iter_mut() { + let mut source = [0_u8; 8]; + rng.fill_bytes(&mut source); + *r = BaseElement::from_random_bytes(&source).expect("failed to generate element"); + } + ElementDigest::new(res) + } +} + // TESTS // ================================================================================================ diff --git a/crypto/src/hash/rescue/rp64_256/mod.rs b/crypto/src/hash/rescue/rp64_256/mod.rs index 0d87de3f7..584395d2e 100644 --- a/crypto/src/hash/rescue/rp64_256/mod.rs +++ b/crypto/src/hash/rescue/rp64_256/mod.rs @@ -388,7 +388,7 @@ impl Rp64_256 { // MDS // ================================================================================================ /// Rescue MDS matrix -const MDS: [[BaseElement; STATE_WIDTH]; STATE_WIDTH] = [ +pub const MDS: [[BaseElement; STATE_WIDTH]; STATE_WIDTH] = [ [ BaseElement::new(7), BaseElement::new(23), @@ -560,7 +560,7 @@ const MDS: [[BaseElement; STATE_WIDTH]; STATE_WIDTH] = [ ]; /// Rescue Inverse MDS matrix -const INV_MDS: [[BaseElement; STATE_WIDTH]; STATE_WIDTH] = [ +pub const INV_MDS: [[BaseElement; STATE_WIDTH]; STATE_WIDTH] = [ [ BaseElement::new(14868391535953158196), BaseElement::new(13278298489594233127), @@ -739,7 +739,7 @@ const INV_MDS: [[BaseElement; STATE_WIDTH]; STATE_WIDTH] = [ /// /// The constants are broken up into two arrays ARK1 and ARK2; ARK1 contains the constants for the /// first half of Rescue round, and ARK2 contains constants for the second half of Rescue round. -const ARK1: [[BaseElement; STATE_WIDTH]; NUM_ROUNDS] = [ +pub const ARK1: [[BaseElement; STATE_WIDTH]; NUM_ROUNDS] = [ [ BaseElement::new(13917550007135091859), BaseElement::new(16002276252647722320), @@ -840,7 +840,7 @@ const ARK1: [[BaseElement; STATE_WIDTH]; NUM_ROUNDS] = [ ], ]; -const ARK2: [[BaseElement; STATE_WIDTH]; NUM_ROUNDS] = [ +pub const ARK2: [[BaseElement; STATE_WIDTH]; NUM_ROUNDS] = [ [ BaseElement::new(7989257206380839449), BaseElement::new(8639509123020237648), diff --git a/crypto/src/hash/rescue/rp64_256_jive/digest.rs b/crypto/src/hash/rescue/rp64_256_jive/digest.rs index 84cec4123..703118093 100644 --- a/crypto/src/hash/rescue/rp64_256_jive/digest.rs +++ b/crypto/src/hash/rescue/rp64_256_jive/digest.rs @@ -5,7 +5,7 @@ use core::slice; -use math::fields::f64::BaseElement; +use math::{fields::f64::BaseElement, FieldElement}; use utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; use super::{Digest, DIGEST_SIZE}; @@ -43,6 +43,18 @@ impl Digest for ElementDigest { result } + + fn from_random_bytes(buffer: &[u8]) -> Self { + let mut digest: [BaseElement; DIGEST_SIZE] = [BaseElement::ZERO; DIGEST_SIZE]; + + buffer.chunks(8).zip(digest.iter_mut()).for_each(|(chunk, digest)| { + *digest = BaseElement::new(u64::from_be_bytes( + chunk.try_into().expect("Given the size of the chunk this should not panic"), + )) + }); + + digest.into() + } } impl Default for ElementDigest { diff --git a/crypto/src/lib.rs b/crypto/src/lib.rs index ff29176bb..e9a961c77 100644 --- a/crypto/src/lib.rs +++ b/crypto/src/lib.rs @@ -26,13 +26,15 @@ pub use hash::{Digest, ElementHasher, Hasher}; pub mod hashers { //! Contains implementations of currently supported hash functions. - pub use super::hash::{Blake3_192, Blake3_256, Rp62_248, Rp64_256, RpJive64_256, Sha3_256}; + pub use super::hash::{ + Blake3_192, Blake3_256, Rp62_248, Rp64_256, RpJive64_256, Sha3_256, ARK1, ARK2, MDS, + }; } mod merkle; #[cfg(feature = "concurrent")] pub use merkle::concurrent; -pub use merkle::{build_merkle_nodes, BatchMerkleProof, MerkleTree}; +pub use merkle::{build_merkle_nodes, BatchMerkleProof, MerkleTree, SaltedMerkleTree}; mod random; pub use random::{DefaultRandomCoin, RandomCoin}; diff --git a/crypto/src/merkle/mod.rs b/crypto/src/merkle/mod.rs index 51b4a76dc..bee8207f1 100644 --- a/crypto/src/merkle/mod.rs +++ b/crypto/src/merkle/mod.rs @@ -9,11 +9,20 @@ use alloc::{ }; use core::slice; +use rand::{ + distributions::{Distribution, Standard}, + thread_rng, Rng, RngCore, +}; + +use crate::{ + errors::MerkleTreeError, + hash::{ByteDigest, Hasher}, + VectorCommitment, +}; + mod proofs; pub use proofs::BatchMerkleProof; -use crate::{Hasher, MerkleTreeError, VectorCommitment}; - #[cfg(feature = "concurrent")] pub mod concurrent; @@ -97,6 +106,17 @@ pub struct MerkleTree { /// up to the root (excluding the root itself). pub type MerkleTreeOpening = (::Digest, Vec<::Digest>); +/// Salted Merkle tree opening consisting of a leaf value, a salt, and a Merkle path leading +/// from this leaf up to the root (excluding the root itself). +pub type SaltedMerkleTreeOpening = + (::Digest, (::Digest, Vec<::Digest>)); + +/// Salted Merkle tree multi opening consisting of a vector of leaves, a vector of corresponding salts, +/// and a collection of corresponding Merkle paths leading from these leaves up to the root +/// (excluding the root itself). The collection of Merkle paths is stored as a [BatchMerkleProof]. +pub type SaltedMerkleTreeMultiOpening = + (Vec<::Digest>, (Vec<::Digest>, BatchMerkleProof)); + // MERKLE TREE IMPLEMENTATION // ================================================================================================ @@ -416,7 +436,7 @@ impl VectorCommitment for MerkleTree { *self.root() } - fn domain_len(&self) -> usize { + fn get_domain_len(&self) -> usize { 1 << self.depth() } @@ -457,3 +477,179 @@ impl VectorCommitment for MerkleTree { MerkleTree::::verify_batch(&commitment, indexes, items, proof) } } + +// SALTED MERKLE TREE +// ================================================================================================ + +pub struct SaltedMerkleTree { + leaves: Vec, + tree: MerkleTree, + salts: Vec, +} + +impl SaltedMerkleTree +where + Standard: Distribution<::Digest>, +{ + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + pub fn new(leaves: Vec, prng: &mut R) -> Result { + if leaves.len() < 2 { + return Err(MerkleTreeError::TooFewLeaves(2, leaves.len())); + } + if !leaves.len().is_power_of_two() { + return Err(MerkleTreeError::NumberOfLeavesNotPowerOfTwo(leaves.len())); + } + + let num_leaves = leaves.len(); + let salts: Vec = (0..num_leaves).map(|_| prng.sample(Standard)).collect(); + + let salted_leaves: Vec = leaves + .iter() + .zip(salts.iter()) + .map(|(leaf, salt)| H::merge(&[*leaf, *salt])) + .collect(); + + let tree = MerkleTree::new(salted_leaves)?; + + Ok(Self { tree, leaves, salts }) + } + + /// Returns the root of the tree. + pub fn root(&self) -> &H::Digest { + self.tree.root() + } + + pub fn depth(&self) -> usize { + self.tree.depth() + } + + pub fn prove(&self, index: usize) -> Result, MerkleTreeError> { + let (_, proof) = self.tree.prove(index)?; + Ok((self.leaves[index], (self.salts[index], proof))) + } + + pub fn prove_batch( + &self, + indexes: &[usize], + ) -> Result, MerkleTreeError> { + let (_, proof) = self.tree.prove_batch(indexes)?; + let leaves_at_indices = indexes.iter().map(|index| self.leaves[*index]).collect(); + let salts_at_indices = indexes.iter().map(|index| self.salts[*index]).collect(); + Ok((leaves_at_indices, (salts_at_indices, proof))) + } + + pub fn verify( + root: H::Digest, + index: usize, + leaf: H::Digest, + salt: H::Digest, + proof: &[H::Digest], + ) -> Result<(), MerkleTreeError> { + let salted_leaf = H::merge(&[leaf, salt]); + MerkleTree::::verify(root, index, salted_leaf, proof) + } + + /// Checks whether the batch proof contains Merkle paths for the of the specified `indexes`. + /// + /// # Errors + /// Returns an error if: + /// * No indexes were provided (i.e., `indexes` is an empty slice). + /// * Number of provided indexes is greater than 255. + /// * Any of the specified `indexes` is greater than or equal to the number of leaves in the + /// tree from which the batch proof was generated. + /// * List of indexes contains duplicates. + /// * Any of the paths in the batch proof does not resolve to the specified `root`. + pub fn verify_batch( + root: &H::Digest, + indexes: &[usize], + leaves: &[H::Digest], + salts: &[H::Digest], + proof: &BatchMerkleProof, + ) -> Result<(), MerkleTreeError> { + let salted_leaves: Vec = leaves + .iter() + .zip(salts.iter()) + .map(|(leaf, salt)| H::merge(&[*leaf, *salt])) + .collect(); + + MerkleTree::::verify_batch(root, indexes, &salted_leaves, proof) + } +} + +impl Distribution> for Standard { + fn sample(&self, rng: &mut R) -> ByteDigest<32> { + let mut dest = [0; 32]; + rng.fill_bytes(&mut dest); + ByteDigest::new(dest) + } +} + +impl VectorCommitment for SaltedMerkleTree +where + Standard: Distribution<::Digest>, +{ + type Options = (); + + type Proof = (H::Digest, Vec); + + type MultiProof = (Vec, BatchMerkleProof); + + type Error = MerkleTreeError; + + fn new(items: Vec) -> Result { + let mut prng = thread_rng(); + SaltedMerkleTree::new(items, &mut prng) + } + + fn with_options(items: Vec, _options: Self::Options) -> Result { + let mut prng = thread_rng(); + Self::new(items, &mut prng) + } + + fn get_domain_len(&self) -> usize { + 1 << self.depth() + } + + fn get_proof_domain_len(proof: &Self::Proof) -> usize { + proof.1.len() + } + + fn get_multiproof_domain_len(proof: &Self::MultiProof) -> usize { + 1 << proof.1.depth + } + + fn commitment(&self) -> H::Digest { + *self.root() + } + + fn open(&self, index: usize) -> Result<(H::Digest, Self::Proof), Self::Error> { + self.prove(index) + } + + fn open_many( + &self, + indexes: &[usize], + ) -> Result<(Vec, Self::MultiProof), Self::Error> { + self.prove_batch(indexes) + } + + fn verify( + commitment: H::Digest, + index: usize, + item: H::Digest, + proof: &Self::Proof, + ) -> Result<(), Self::Error> { + SaltedMerkleTree::::verify(commitment, index, item, proof.0, &proof.1) + } + + fn verify_many( + commitment: H::Digest, + indexes: &[usize], + items: &[H::Digest], + proof: &Self::MultiProof, + ) -> Result<(), Self::Error> { + SaltedMerkleTree::::verify_batch(&commitment, indexes, items, &proof.0, &proof.1) + } +} diff --git a/crypto/src/merkle/tests.rs b/crypto/src/merkle/tests.rs index f66c638a2..dac785294 100644 --- a/crypto/src/merkle/tests.rs +++ b/crypto/src/merkle/tests.rs @@ -254,6 +254,28 @@ fn from_proofs() { assert_eq!(proof1.depth, proof2.depth); } +#[test] +fn verify_salted() { + // depth 4 + let leaves = Digest256::bytes_as_digests(&LEAVES4).to_vec(); + let mut prng = thread_rng(); + let tree: SaltedMerkleTree = SaltedMerkleTree::new(leaves, &mut prng).unwrap(); + let (leaf, (salt, proof)) = tree.prove(1).unwrap(); + assert!(SaltedMerkleTree::::verify(*tree.root(), 1, leaf, salt, &proof).is_ok()); + + let (leaf, (salt, proof)) = tree.prove(2).unwrap(); + assert!(SaltedMerkleTree::::verify(*tree.root(), 2, leaf, salt, &proof).is_ok()); + + // depth 5 + let leaf = Digest256::bytes_as_digests(&LEAVES8).to_vec(); + let tree: SaltedMerkleTree = SaltedMerkleTree::new(leaf, &mut prng).unwrap(); + let (leaf, (salt, proof)) = tree.prove(1).unwrap(); + assert!(SaltedMerkleTree::::verify(*tree.root(), 1, leaf, salt, &proof).is_ok()); + + let (leaf, (salt, proof)) = tree.prove(6).unwrap(); + assert!(SaltedMerkleTree::::verify(*tree.root(), 6, leaf, salt, &proof).is_ok()); +} + proptest! { #[test] fn prove_n_verify(tree in random_blake3_merkle_tree(128), diff --git a/crypto/src/random/default.rs b/crypto/src/random/default.rs index f5a996404..fa002171d 100644 --- a/crypto/src/random/default.rs +++ b/crypto/src/random/default.rs @@ -118,6 +118,22 @@ impl> RandomCoin for DefaultRando self.counter = 0; } + fn reseed_with_salt( + &mut self, + data: ::Digest, + salt: Option<::Digest>, + ) { + // TODO: revisit + if let Some(salt) = salt { + self.seed = H::merge(&[self.seed, data]); + self.seed = H::merge(&[self.seed, salt]); + self.counter = 0; + } else { + self.seed = H::merge(&[self.seed, data]); + self.counter = 0; + } + } + // PUBLIC ACCESSORS // -------------------------------------------------------------------------------------------- diff --git a/crypto/src/random/mod.rs b/crypto/src/random/mod.rs index 7ee540ee5..10ee5d40c 100644 --- a/crypto/src/random/mod.rs +++ b/crypto/src/random/mod.rs @@ -38,6 +38,14 @@ pub trait RandomCoin: Sync { /// Reseeds the coin with the specified data by setting the new seed to hash(`seed` || `data`). fn reseed(&mut self, data: ::Digest); + /// Similar to `Self::reseed` but takes a salt which is not a `None` when zero-knowledge is enabled. + /// TODO: Should we remove `Self::reseed`? + fn reseed_with_salt( + &mut self, + data: ::Digest, + salt: Option<::Digest>, + ); + /// Computes hash(`seed` || `value`) and returns the number of leading zeros in the resulting /// value if it is interpreted as an integer in big-endian byte order. fn check_leading_zeros(&self, value: u64) -> u32; diff --git a/examples/Cargo.toml b/examples/Cargo.toml index b25d90fd8..14ba18dee 100644 --- a/examples/Cargo.toml +++ b/examples/Cargo.toml @@ -26,6 +26,7 @@ default = ["std"] std = ["core-utils/std", "hex/std", "rand-utils", "winterfell/std"] [dependencies] +air = { version = "0.10", path = "../air", package = "winter-air", default-features = false } blake3 = { version = "1.5", default-features = false } core-utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } hex = { version = "0.4", optional = true } @@ -35,6 +36,7 @@ tracing = { version = "0.1", default-features = false } tracing-forest = { version = "0.1", features = ["ansi", "smallvec"], optional = true } tracing-subscriber = { version = "0.3", features = ["std", "env-filter"] } winterfell = { version = "0.11", path = "../winterfell", default-features = false } +rand_chacha = { version = "0.3", default-features = false } [dev-dependencies] criterion = "0.5" diff --git a/examples/benches/fibonacci.rs b/examples/benches/fibonacci.rs index 44094beaf..076f2ee2f 100644 --- a/examples/benches/fibonacci.rs +++ b/examples/benches/fibonacci.rs @@ -18,7 +18,7 @@ fn fibonacci(c: &mut Criterion) { group.sample_size(10); group.measurement_time(Duration::from_secs(20)); - let options = ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 255); + let options = ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 255, false); for &size in SIZES.iter() { let fib = diff --git a/examples/benches/rescue.rs b/examples/benches/rescue.rs index bf6e8cc26..19e3a0815 100644 --- a/examples/benches/rescue.rs +++ b/examples/benches/rescue.rs @@ -18,7 +18,7 @@ fn rescue(c: &mut Criterion) { group.sample_size(10); group.measurement_time(Duration::from_secs(25)); - let options = ProofOptions::new(32, 32, 0, FieldExtension::None, 4, 255); + let options = ProofOptions::new(32, 32, 0, FieldExtension::None, 4, 255, false); for &size in SIZES.iter() { let resc = rescue::RescueExample::>::new(size, options.clone()); diff --git a/examples/src/fibonacci/fib2/prover.rs b/examples/src/fibonacci/fib2/prover.rs index 28b0ebf53..c3ab8d265 100644 --- a/examples/src/fibonacci/fib2/prover.rs +++ b/examples/src/fibonacci/fib2/prover.rs @@ -3,6 +3,8 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, @@ -80,8 +82,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/fibonacci/fib8/prover.rs b/examples/src/fibonacci/fib8/prover.rs index 860eb60df..04475a49f 100644 --- a/examples/src/fibonacci/fib8/prover.rs +++ b/examples/src/fibonacci/fib8/prover.rs @@ -3,6 +3,8 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, @@ -95,8 +97,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/fibonacci/fib_small/prover.rs b/examples/src/fibonacci/fib_small/prover.rs index 9d1ced901..ce0e0f583 100644 --- a/examples/src/fibonacci/fib_small/prover.rs +++ b/examples/src/fibonacci/fib_small/prover.rs @@ -2,6 +2,8 @@ // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, @@ -85,8 +87,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/fibonacci/mulfib2/prover.rs b/examples/src/fibonacci/mulfib2/prover.rs index 67aebee70..4cd0a08ff 100644 --- a/examples/src/fibonacci/mulfib2/prover.rs +++ b/examples/src/fibonacci/mulfib2/prover.rs @@ -3,6 +3,8 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, @@ -76,8 +78,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/fibonacci/mulfib8/prover.rs b/examples/src/fibonacci/mulfib8/prover.rs index 3cf93aed9..2f4e14b8a 100644 --- a/examples/src/fibonacci/mulfib8/prover.rs +++ b/examples/src/fibonacci/mulfib8/prover.rs @@ -3,6 +3,8 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, @@ -88,8 +90,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/fibonacci/utils.rs b/examples/src/fibonacci/utils.rs index e2f29f7c2..acb52d397 100644 --- a/examples/src/fibonacci/utils.rs +++ b/examples/src/fibonacci/utils.rs @@ -38,5 +38,5 @@ pub fn build_proof_options(use_extension_field: bool) -> winterfell::ProofOption } else { FieldExtension::None }; - ProofOptions::new(28, 8, 0, extension, 4, 7) + ProofOptions::new(28, 8, 0, extension, 4, 7, false) } diff --git a/examples/src/lamport/aggregate/prover.rs b/examples/src/lamport/aggregate/prover.rs index 983eee3fb..6a1f564bb 100644 --- a/examples/src/lamport/aggregate/prover.rs +++ b/examples/src/lamport/aggregate/prover.rs @@ -3,6 +3,8 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; #[cfg(feature = "concurrent")] use winterfell::iterators::*; use winterfell::{ @@ -124,8 +126,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/lamport/threshold/prover.rs b/examples/src/lamport/threshold/prover.rs index c40f5a9f2..2a2ac8ef8 100644 --- a/examples/src/lamport/threshold/prover.rs +++ b/examples/src/lamport/threshold/prover.rs @@ -5,6 +5,8 @@ use std::collections::HashMap; +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; #[cfg(feature = "concurrent")] use winterfell::iterators::*; use winterfell::{ @@ -166,8 +168,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/lib.rs b/examples/src/lib.rs index 33f733d7c..517871ecd 100644 --- a/examples/src/lib.rs +++ b/examples/src/lib.rs @@ -99,6 +99,7 @@ impl ExampleOptions { field_extension, self.folding_factor, 31, + false, ), hash_fn, ) diff --git a/examples/src/merkle/prover.rs b/examples/src/merkle/prover.rs index 4a86cc90a..459255f18 100644 --- a/examples/src/merkle/prover.rs +++ b/examples/src/merkle/prover.rs @@ -3,6 +3,8 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, @@ -131,8 +133,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/merkle/tests.rs b/examples/src/merkle/tests.rs index 4851d596c..cd180a63a 100644 --- a/examples/src/merkle/tests.rs +++ b/examples/src/merkle/tests.rs @@ -31,5 +31,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(28, 8, 0, extension, 4, 31) + ProofOptions::new(28, 8, 0, extension, 4, 31, true) } diff --git a/examples/src/rescue/prover.rs b/examples/src/rescue/prover.rs index 98d725eec..67b51cdb3 100644 --- a/examples/src/rescue/prover.rs +++ b/examples/src/rescue/prover.rs @@ -3,6 +3,8 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, @@ -98,8 +100,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/rescue/tests.rs b/examples/src/rescue/tests.rs index 7daf66694..9ab273500 100644 --- a/examples/src/rescue/tests.rs +++ b/examples/src/rescue/tests.rs @@ -31,5 +31,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(28, 8, 0, extension, 4, 31) + ProofOptions::new(28, 8, 0, extension, 4, 31, true) } diff --git a/examples/src/rescue_raps/prover.rs b/examples/src/rescue_raps/prover.rs index 7b04f98b9..5e4291391 100644 --- a/examples/src/rescue_raps/prover.rs +++ b/examples/src/rescue_raps/prover.rs @@ -3,7 +3,9 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; use core_utils::uninit_vector; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, @@ -130,8 +132,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/rescue_raps/tests.rs b/examples/src/rescue_raps/tests.rs index 99c8d24dc..3f3419fae 100644 --- a/examples/src/rescue_raps/tests.rs +++ b/examples/src/rescue_raps/tests.rs @@ -33,5 +33,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(28, 8, 0, extension, 4, 31) + ProofOptions::new(28, 8, 0, extension, 4, 31, true) } diff --git a/examples/src/utils/rescue.rs b/examples/src/utils/rescue.rs index 33ca425ca..54d72a094 100644 --- a/examples/src/utils/rescue.rs +++ b/examples/src/utils/rescue.rs @@ -162,6 +162,18 @@ impl Digest for Hash { result[..bytes.len()].copy_from_slice(bytes); result } + + fn from_random_bytes(buffer: &[u8]) -> Self { + let mut digest: [BaseElement; DIGEST_SIZE] = [BaseElement::ZERO; DIGEST_SIZE]; + + buffer.chunks(16).zip(digest.iter_mut()).for_each(|(chunk, digest)| { + *digest = BaseElement::new(u128::from_be_bytes( + chunk.try_into().expect("Given the size of the chunk this should not panic"), + )) + }); + + Self(digest) + } } impl Serializable for Hash { diff --git a/examples/src/vdf/exempt/prover.rs b/examples/src/vdf/exempt/prover.rs index f39e818d2..173853afe 100644 --- a/examples/src/vdf/exempt/prover.rs +++ b/examples/src/vdf/exempt/prover.rs @@ -3,6 +3,8 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, @@ -81,8 +83,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/vdf/exempt/tests.rs b/examples/src/vdf/exempt/tests.rs index 212cda767..c9c46d6e2 100644 --- a/examples/src/vdf/exempt/tests.rs +++ b/examples/src/vdf/exempt/tests.rs @@ -31,5 +31,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(85, 2, 0, extension, 4, 31) + ProofOptions::new(85, 4, 0, extension, 4, 31, true) } diff --git a/examples/src/vdf/regular/prover.rs b/examples/src/vdf/regular/prover.rs index 591dcc839..65ef3899b 100644 --- a/examples/src/vdf/regular/prover.rs +++ b/examples/src/vdf/regular/prover.rs @@ -3,6 +3,8 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, @@ -76,8 +78,17 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/vdf/regular/tests.rs b/examples/src/vdf/regular/tests.rs index a3100a444..93ed54e54 100644 --- a/examples/src/vdf/regular/tests.rs +++ b/examples/src/vdf/regular/tests.rs @@ -31,5 +31,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(85, 2, 0, extension, 4, 31) + ProofOptions::new(2, 4, 0, extension, 2, 255, true) } diff --git a/fri/Cargo.toml b/fri/Cargo.toml index d71a9c7a8..46370b288 100644 --- a/fri/Cargo.toml +++ b/fri/Cargo.toml @@ -32,6 +32,8 @@ std = ["crypto/std", "math/std", "utils/std"] crypto = { version = "0.11", path = "../crypto", package = "winter-crypto", default-features = false } math = { version = "0.11", path = "../math", package = "winter-math", default-features = false } utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } +rand_chacha = { version = "0.3", default-features = false } +rand = { version = "0.8" } [dev-dependencies] criterion = "0.5" diff --git a/fri/benches/prover.rs b/fri/benches/prover.rs index bfc096fc3..07b3b4ef5 100644 --- a/fri/benches/prover.rs +++ b/fri/benches/prover.rs @@ -23,6 +23,7 @@ pub fn build_layers(c: &mut Criterion) { for &domain_size in &BATCH_SIZES { let evaluations = build_evaluations(domain_size); + let mut prng = ::from_entropy(); fri_group.bench_with_input( BenchmarkId::new("build_layers", domain_size), @@ -37,8 +38,9 @@ pub fn build_layers(c: &mut Criterion) { BaseElement, Blake3_256, DefaultRandomCoin>, - >::new(domain_size, 32); - prover.build_layers(&mut channel, evaluations); + >::new(domain_size, 32, false); + + prover.build_layers(&mut channel, evaluations, &mut prng); prover.reset(); }, BatchSize::LargeInput, diff --git a/fri/src/proof.rs b/fri/src/proof.rs index 65dd2af92..8d4495213 100644 --- a/fri/src/proof.rs +++ b/fri/src/proof.rs @@ -34,6 +34,7 @@ pub struct FriProof { layers: Vec, remainder: Vec, num_partitions: u8, // stored as power of 2 + salts: Vec, } impl FriProof { @@ -49,6 +50,7 @@ impl FriProof { layers: Vec, remainder: Vec, num_partitions: usize, + salts: Vec, ) -> Self { assert!(!remainder.is_empty(), "number of remainder elements must be greater than zero"); assert!( @@ -69,6 +71,7 @@ impl FriProof { layers, remainder: remainder_bytes, num_partitions: num_partitions.trailing_zeros() as u8, + salts, } } @@ -78,6 +81,7 @@ impl FriProof { layers: Vec::new(), remainder: Vec::new(), num_partitions: 0, + salts: vec![], } } @@ -190,6 +194,16 @@ impl FriProof { } Ok(remainder) } + + /// Returns a vector of values used in order to salt the transcript when zero-knowledge is + /// enabled. + pub fn parse_salts(&self) -> Result>, DeserializationError> + where + E: FieldElement, + H: ElementHasher, + { + Vec::read_from_bytes(&self.salts) + } } // SERIALIZATION / DESERIALIZATION @@ -210,6 +224,10 @@ impl Serializable for FriProof { // write number of partitions target.write_u8(self.num_partitions); + + // write salts + target.write_u32(self.salts.len() as u32); + target.write_bytes(&self.salts); } } @@ -230,7 +248,11 @@ impl Deserializable for FriProof { // read number of partitions let num_partitions = source.read_u8()?; - Ok(FriProof { layers, remainder, num_partitions }) + // read salts + let salts_len = source.read_u32()? as usize; + let salts = source.read_vec(salts_len)?; + + Ok(FriProof { layers, remainder, num_partitions, salts }) } } diff --git a/fri/src/prover/channel.rs b/fri/src/prover/channel.rs index 7231e757c..38a4771b4 100644 --- a/fri/src/prover/channel.rs +++ b/fri/src/prover/channel.rs @@ -6,7 +6,7 @@ use alloc::vec::Vec; use core::marker::PhantomData; -use crypto::{ElementHasher, Hasher, RandomCoin}; +use crypto::{Digest, ElementHasher, Hasher, RandomCoin}; use math::FieldElement; // PROVER CHANNEL TRAIT @@ -34,7 +34,13 @@ pub trait ProverChannel { /// the hash of each row to get one entry of the vector being committed to. Thus, the number /// of elements grouped into a single leaf is equal to the `folding_factor` used for FRI layer /// construction. - fn commit_fri_layer(&mut self, layer_root: ::Digest); + fn commit_fri_layer

( + &mut self, + layer_root: ::Digest, + prng: &mut P, + ) -> Option<::Digest> + where + P: rand::RngCore; /// Returns a random α drawn uniformly at random from the entire field. /// @@ -63,6 +69,8 @@ where commitments: Vec, domain_size: usize, num_queries: usize, + is_zk: bool, + salts: Vec>, _field_element: PhantomData, } @@ -78,7 +86,7 @@ where /// Panics if: /// * `domain_size` is smaller than 8 or is not a power of two. /// * `num_queries` is zero. - pub fn new(domain_size: usize, num_queries: usize) -> Self { + pub fn new(domain_size: usize, num_queries: usize, is_zk: bool) -> Self { assert!(domain_size >= 8, "domain size must be at least 8, but was {domain_size}"); assert!( domain_size.is_power_of_two(), @@ -90,6 +98,8 @@ where commitments: Vec::new(), domain_size, num_queries, + is_zk, + salts: vec![], _field_element: PhantomData, } } @@ -124,9 +134,27 @@ where { type Hasher = H; - fn commit_fri_layer(&mut self, layer_root: H::Digest) { + fn commit_fri_layer( + &mut self, + layer_root: H::Digest, + prng: &mut P, + ) -> Option<::Digest> { self.commitments.push(layer_root); - self.public_coin.reseed(layer_root); + + // sample a salt for Fiat-Shamir is zero-knowledge is enabled + let salt = if self.is_zk { + let mut buffer = [0_u8; 32]; + prng.fill_bytes(&mut buffer); + + let salt = Digest::from_random_bytes(&buffer); + + Some(salt) + } else { + None + }; + self.salts.push(salt); + self.public_coin.reseed_with_salt(layer_root, salt); + salt } fn draw_fri_alpha(&mut self) -> E { diff --git a/fri/src/prover/mod.rs b/fri/src/prover/mod.rs index 17092ad34..3accc5998 100644 --- a/fri/src/prover/mod.rs +++ b/fri/src/prover/mod.rs @@ -12,6 +12,7 @@ use math::{fft, FieldElement}; use utils::iterators::*; use utils::{ flatten_vector_elements, group_slice_elements, iter_mut, transpose_slice, uninit_vector, + Serializable, }; use crate::{ @@ -102,6 +103,7 @@ where options: FriOptions, layers: Vec>, remainder_poly: FriRemainder, + salts: Vec>, _channel: PhantomData, } @@ -131,6 +133,7 @@ where options, layers: Vec::new(), remainder_poly: FriRemainder(vec![]), + salts: vec![], _channel: PhantomData, } } @@ -176,7 +179,12 @@ where /// /// # Panics /// Panics if the prover state is dirty (the vector of layers is not empty). - pub fn build_layers(&mut self, channel: &mut C, mut evaluations: Vec) { + pub fn build_layers( + &mut self, + channel: &mut C, + mut evaluations: Vec, + prng: &mut R, + ) { assert!( self.layers.is_empty(), "a prior proof generation request has not been completed yet" @@ -186,20 +194,25 @@ where // has small enough degree for _ in 0..self.options.num_fri_layers(evaluations.len()) { match self.folding_factor() { - 2 => self.build_layer::<2>(channel, &mut evaluations), - 4 => self.build_layer::<4>(channel, &mut evaluations), - 8 => self.build_layer::<8>(channel, &mut evaluations), - 16 => self.build_layer::<16>(channel, &mut evaluations), + 2 => self.build_layer::(channel, &mut evaluations, prng), + 4 => self.build_layer::(channel, &mut evaluations, prng), + 8 => self.build_layer::(channel, &mut evaluations, prng), + 16 => self.build_layer::(channel, &mut evaluations, prng), _ => unimplemented!("folding factor {} is not supported", self.folding_factor()), } } - self.set_remainder(channel, &mut evaluations); + self.set_remainder(channel, &mut evaluations, prng); } /// Builds a single FRI layer by first committing to the `evaluations`, then drawing a random /// alpha from the channel and use it to perform degree-respecting projection. - fn build_layer(&mut self, channel: &mut C, evaluations: &mut Vec) { + fn build_layer( + &mut self, + channel: &mut C, + evaluations: &mut Vec, + prng: &mut R, + ) { // commit to the evaluations at the current layer; we do this by first transposing the // evaluations into a matrix of N columns, then hashing each row into a digest, and finally // commiting to vector of these digests; we do this so that we could de-commit to N values @@ -208,7 +221,8 @@ where let evaluation_vector_commitment = build_layer_commitment::<_, _, V, N>(&transposed_evaluations) .expect("failed to construct FRI layer commitment"); - channel.commit_fri_layer(evaluation_vector_commitment.commitment()); + let salt = channel.commit_fri_layer(evaluation_vector_commitment.commitment(), prng); + self.salts.push(salt); // draw a pseudo-random coefficient from the channel, and use it in degree-respecting // projection to reduce the degree of evaluations by N @@ -222,13 +236,19 @@ where } /// Creates remainder polynomial in coefficient form from a vector of `evaluations` over a domain. - fn set_remainder(&mut self, channel: &mut C, evaluations: &mut [E]) { + fn set_remainder( + &mut self, + channel: &mut C, + evaluations: &mut [E], + prng: &mut R, + ) { let inv_twiddles = fft::get_inv_twiddles(evaluations.len()); fft::interpolate_poly_with_offset(evaluations, &inv_twiddles, self.options.domain_offset()); let remainder_poly_size = evaluations.len() / self.options.blowup_factor(); let remainder_poly = evaluations[..remainder_poly_size].to_vec(); let commitment = ::hash_elements(&remainder_poly); - channel.commit_fri_layer(commitment); + let salt = channel.commit_fri_layer(commitment, prng); + self.salts.push(salt); self.remainder_poly = FriRemainder(remainder_poly); } @@ -278,7 +298,8 @@ where // clear layers so that another proof can be generated self.reset(); - FriProof::new(layers, remainder, 1) + let salts = self.salts.to_bytes(); + FriProof::new(layers, remainder, 1, salts) } } diff --git a/fri/src/prover/tests.rs b/fri/src/prover/tests.rs index e765092c5..7387076cd 100644 --- a/fri/src/prover/tests.rs +++ b/fri/src/prover/tests.rs @@ -7,6 +7,8 @@ use alloc::vec::Vec; use crypto::{hashers::Blake3_256, DefaultRandomCoin, Hasher, MerkleTree, RandomCoin}; use math::{fft, fields::f128::BaseElement, FieldElement}; +use rand::SeedableRng; +use rand_chacha::ChaCha20Rng; use utils::{Deserializable, Serializable, SliceReader}; use super::{DefaultProverChannel, FriProver}; @@ -45,7 +47,7 @@ pub fn build_prover_channel( trace_length: usize, options: &FriOptions, ) -> DefaultProverChannel> { - DefaultProverChannel::new(trace_length * options.blowup_factor(), 32) + DefaultProverChannel::new(trace_length * options.blowup_factor(), 32, false) } pub fn build_evaluations(trace_length: usize, lde_blowup: usize) -> Vec { @@ -105,7 +107,8 @@ fn fri_prove_verify( // instantiate the prover and generate the proof let mut prover = FriProver::<_, _, _, MerkleTree>::new(options.clone()); - prover.build_layers(&mut channel, evaluations.clone()); + let mut prng = ChaCha20Rng::from_entropy(); + prover.build_layers(&mut channel, evaluations.clone(), &mut prng); let positions = channel.draw_query_positions(0); let proof = prover.build_proof(&positions); diff --git a/fri/src/verifier/channel.rs b/fri/src/verifier/channel.rs index 6f8709858..91f7ce142 100644 --- a/fri/src/verifier/channel.rs +++ b/fri/src/verifier/channel.rs @@ -70,6 +70,9 @@ pub trait VerifierChannel { /// Reads and removes the remainder polynomial from the channel. fn take_fri_remainder(&mut self) -> Vec; + /// Reads and removes the salt value needed for Fiat-Shamir at the current round. + fn take_salt(&mut self) -> Option<::Digest>; + // PROVIDED METHODS // -------------------------------------------------------------------------------------------- @@ -135,6 +138,7 @@ pub struct DefaultVerifierChannel< layer_queries: Vec>, remainder: Vec, num_partitions: usize, + salts: Vec>, _h: PhantomData, } @@ -156,6 +160,7 @@ where ) -> Result { let num_partitions = proof.num_partitions(); + let salts = proof.parse_salts::()?; let remainder = proof.parse_remainder()?; let (layer_queries, layer_proofs) = proof.parse_layers::(domain_size, folding_factor)?; @@ -166,6 +171,7 @@ where layer_queries, remainder, num_partitions, + salts, _h: PhantomData, }) } @@ -199,4 +205,8 @@ where fn take_fri_remainder(&mut self) -> Vec { self.remainder.clone() } + + fn take_salt(&mut self) -> Option { + self.salts.remove(0) + } } diff --git a/fri/src/verifier/mod.rs b/fri/src/verifier/mod.rs index ff0582b2c..da7f889fa 100644 --- a/fri/src/verifier/mod.rs +++ b/fri/src/verifier/mod.rs @@ -121,7 +121,8 @@ where let mut layer_alphas = Vec::with_capacity(layer_commitments.len()); let mut max_degree_plus_1 = max_poly_degree + 1; for (depth, commitment) in layer_commitments.iter().enumerate() { - public_coin.reseed(*commitment); + let salt = channel.take_salt(); + public_coin.reseed_with_salt(*commitment, salt); let alpha = public_coin.draw().map_err(VerifierError::RandomCoinError)?; layer_alphas.push(alpha); diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 1011c3504..c49ac34da 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -35,6 +35,9 @@ crypto = { version = "0.11", path = "../crypto", package = "winter-crypto", defa fri = { version = "0.11", path = '../fri', package = "winter-fri", default-features = false } math = { version = "0.11", path = "../math", package = "winter-math", default-features = false } maybe_async = { version = "0.11", path = "../utils/maybe_async" , package = "winter-maybe-async" } +rand_chacha = { version = "0.3", default-features = false } +rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils" } +rand = { version = "0.8" } tracing = { version = "0.1", default-features = false, features = ["attributes"]} utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } diff --git a/prover/benches/lagrange_kernel.rs b/prover/benches/lagrange_kernel.rs index 92bf54f5a..2dca49991 100644 --- a/prover/benches/lagrange_kernel.rs +++ b/prover/benches/lagrange_kernel.rs @@ -8,11 +8,13 @@ use std::time::Duration; use air::{ Air, AirContext, Assertion, AuxRandElements, ConstraintCompositionCoefficients, EvaluationFrame, FieldExtension, GkrRandElements, LagrangeKernelRandElements, PartitionOptions, - ProofOptions, TraceInfo, TransitionConstraintDegree, + ProofOptions, TraceInfo, TransitionConstraintDegree, ZkParameters, }; use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; use crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree, RandomCoin}; use math::{fields::f64::BaseElement, ExtensionOf, FieldElement}; +use rand::SeedableRng; +use rand_chacha::ChaCha20Rng; use winter_prover::{ matrix::ColMatrix, CompositionPoly, CompositionPolyTrace, DefaultConstraintCommitment, DefaultConstraintEvaluator, DefaultTraceLde, Prover, ProverGkrProof, StarkDomain, Trace, @@ -174,7 +176,7 @@ impl LagrangeProver { fn new(aux_trace_width: usize) -> Self { Self { aux_trace_width, - options: ProofOptions::new(1, 2, 0, FieldExtension::None, 2, 1), + options: ProofOptions::new(1, 2, 0, FieldExtension::None, 2, 1, false), } } } @@ -206,11 +208,20 @@ impl Prover for LagrangeProver { main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) where E: math::FieldElement, { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn build_constraint_commitment>( diff --git a/prover/src/channel.rs b/prover/src/channel.rs index db82f5095..f73b4f3b3 100644 --- a/prover/src/channel.rs +++ b/prover/src/channel.rs @@ -10,11 +10,13 @@ use air::{ proof::{Commitments, Context, OodFrame, Proof, Queries, TraceOodFrame}, Air, ConstraintCompositionCoefficients, DeepCompositionCoefficients, }; -use crypto::{ElementHasher, RandomCoin, VectorCommitment}; +use crypto::{Digest, ElementHasher, Hasher, RandomCoin, VectorCommitment}; use fri::FriProof; use math::{FieldElement, ToElements}; +use rand::RngCore; #[cfg(feature = "concurrent")] use utils::iterators::*; +use utils::Serializable; // TYPES AND INTERFACES // ================================================================================================ @@ -33,6 +35,7 @@ where commitments: Commitments, ood_frame: OodFrame, pow_nonce: u64, + salts: Vec>, _field_element: PhantomData, _vector_commitment: PhantomData, } @@ -51,8 +54,12 @@ where // CONSTRUCTOR // -------------------------------------------------------------------------------------------- /// Creates a new prover channel for the specified `air` and public inputs. - pub fn new(air: &'a A, mut pub_inputs_elements: Vec) -> Self { - let context = Context::new::(air.trace_info().clone(), air.options().clone()); + pub fn new(air: &'a A, mut pub_inputs_elements: Vec, zk_blowup: usize) -> Self { + let context = Context::new::( + air.trace_info().clone(), + air.options().clone(), + zk_blowup, + ); // build a seed for the public coin; the initial seed is a hash of the proof context and // the public inputs, but as the protocol progresses, the coin will be reseeded with the @@ -67,6 +74,7 @@ where commitments: Commitments::default(), ood_frame: OodFrame::default(), pow_nonce: 0, + salts: vec![], _field_element: PhantomData, _vector_commitment: PhantomData, } @@ -76,29 +84,81 @@ where // -------------------------------------------------------------------------------------------- /// Commits the prover the extended execution trace. - pub fn commit_trace(&mut self, trace_root: H::Digest) { + pub fn commit_trace

(&mut self, trace_root: H::Digest, prng: &mut P) + where + P: RngCore, + { self.commitments.add::(&trace_root); - self.public_coin.reseed(trace_root); + + // sample a salt for Fiat-Shamir is zero-knowledge is enabled + let salt = if self.air.is_zk() { + let mut buffer = [0_u8; 32]; + prng.fill_bytes(&mut buffer); + Some(Digest::from_random_bytes(&buffer)) + } else { + None + }; + self.salts.push(salt); + self.public_coin.reseed_with_salt(trace_root, salt); } /// Commits the prover to the evaluations of the constraint composition polynomial. - pub fn commit_constraints(&mut self, constraint_root: H::Digest) { + pub fn commit_constraints

(&mut self, constraint_root: H::Digest, prng: &mut P) + where + P: RngCore, + { self.commitments.add::(&constraint_root); - self.public_coin.reseed(constraint_root); + + // sample a salt for Fiat-Shamir is zero-knowledge is enabled + let salt = if self.air.is_zk() { + let mut buffer = [0_u8; 32]; + prng.fill_bytes(&mut buffer); + Some(Digest::from_random_bytes(&buffer)) + } else { + None + }; + self.salts.push(salt); + self.public_coin.reseed_with_salt(constraint_root, salt); } /// Saves the evaluations of trace polynomials over the out-of-domain evaluation frame. This /// also reseeds the public coin with the hashes of the evaluation frame states. - pub fn send_ood_trace_states(&mut self, trace_ood_frame: &TraceOodFrame) { + pub fn send_ood_trace_states

(&mut self, trace_ood_frame: &TraceOodFrame, prng: &mut P) + where + P: RngCore, + { let trace_states_hash = self.ood_frame.set_trace_states::(trace_ood_frame); - self.public_coin.reseed(trace_states_hash); + + // sample a salt for Fiat-Shamir is zero-knowledge is enabled + let salt = if self.air.is_zk() { + let mut buffer = [0_u8; 32]; + prng.fill_bytes(&mut buffer); + Some(Digest::from_random_bytes(&buffer)) + } else { + None + }; + self.salts.push(salt); + self.public_coin.reseed_with_salt(trace_states_hash, salt); } /// Saves the evaluations of constraint composition polynomial columns at the out-of-domain /// point. This also reseeds the public coin wit the hash of the evaluations. - pub fn send_ood_constraint_evaluations(&mut self, evaluations: &[E]) { + pub fn send_ood_constraint_evaluations

(&mut self, evaluations: &[E], prng: &mut P) + where + P: RngCore, + { self.ood_frame.set_constraint_evaluations(evaluations); - self.public_coin.reseed(H::hash_elements(evaluations)); + + // sample a salt for Fiat-Shamir is zero-knowledge is enabled + let salt = if self.air.is_zk() { + let mut buffer = [0_u8; 32]; + prng.fill_bytes(&mut buffer); + Some(Digest::from_random_bytes(&buffer)) + } else { + None + }; + self.salts.push(salt); + self.public_coin.reseed_with_salt(H::hash_elements(evaluations), salt); } // PUBLIC COIN METHODS @@ -139,7 +199,7 @@ where /// are removed from the returned vector. pub fn get_query_positions(&mut self) -> Vec { let num_queries = self.context.options().num_queries(); - let lde_domain_size = self.context.lde_domain_size(); + let lde_domain_size = self.context.lde_domain_size::(); let mut positions = self .public_coin .draw_integers(num_queries, lde_domain_size, self.pow_nonce) @@ -196,6 +256,7 @@ where pow_nonce: self.pow_nonce, num_unique_queries: num_query_positions as u8, gkr_proof, + salts: self.salts.to_bytes(), } } } @@ -214,9 +275,26 @@ where type Hasher = H; /// Commits the prover to a FRI layer. - fn commit_fri_layer(&mut self, layer_root: H::Digest) { + fn commit_fri_layer

( + &mut self, + layer_root: H::Digest, + prng: &mut P, + ) -> Option<::Digest> + where + P: RngCore, + { self.commitments.add::(&layer_root); - self.public_coin.reseed(layer_root); + + // sample a salt for Fiat-Shamir is zero-knowledge is enabled + let salt = if self.air.is_zk() { + let mut buffer = [0_u8; 32]; + prng.fill_bytes(&mut buffer); + Some(Digest::from_random_bytes(&buffer)) + } else { + None + }; + self.public_coin.reseed_with_salt(layer_root, salt); + salt } /// Returns a new alpha drawn from the public coin. diff --git a/prover/src/composer/mod.rs b/prover/src/composer/mod.rs index 5d463d331..1d394cc63 100644 --- a/prover/src/composer/mod.rs +++ b/prover/src/composer/mod.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use alloc::vec::Vec; -use air::{proof::TraceOodFrame, DeepCompositionCoefficients}; +use air::{proof::TraceOodFrame, Air, DeepCompositionCoefficients}; use math::{ add_in_place, fft, mul_acc, polynom::{self, syn_div_roots_in_place}, @@ -22,6 +22,8 @@ pub struct DeepCompositionPoly { coefficients: Vec, cc: DeepCompositionCoefficients, z: E, + g: E, + is_zk: bool, } impl DeepCompositionPoly { @@ -30,17 +32,27 @@ impl DeepCompositionPoly { /// Returns a new DEEP composition polynomial. Initially, this polynomial will be empty, and /// the intent is to populate the coefficients via add_trace_polys() and add_constraint_polys() /// methods. - pub fn new(z: E, cc: DeepCompositionCoefficients) -> Self { - DeepCompositionPoly { coefficients: vec![], cc, z } + pub fn new>( + air: &A, + z: E, + cc: DeepCompositionCoefficients, + ) -> Self { + DeepCompositionPoly { + coefficients: vec![], + cc, + z, + g: E::from(air.trace_domain_generator()), + is_zk: air.is_zk(), + } } // ACCESSORS // -------------------------------------------------------------------------------------------- - /// Returns the size of the DEEP composition polynomial. - pub fn poly_size(&self) -> usize { - self.coefficients.len() - } + ///// Returns the size of the DEEP composition polynomial. + //pub fn poly_size(&self) -> usize { + //self.coefficients.len() + //} /// Returns the degree of the composition polynomial. pub fn degree(&self) -> usize { @@ -82,8 +94,7 @@ impl DeepCompositionPoly { // compute a second out-of-domain point offset from z by exactly trace generator; this // point defines the "next" computation state in relation to point z let trace_length = trace_polys.poly_size(); - let g = E::from(E::BaseField::get_root_of_unity(trace_length.ilog2())); - let next_z = self.z * g; + let next_z = self.z * self.g; // combine trace polynomials into 2 composition polynomials T'(x) and T''(x) let mut t1_composition = vec![E::ZERO; trace_length]; @@ -185,7 +196,6 @@ impl DeepCompositionPoly { // set the coefficients of the DEEP composition polynomial self.coefficients = trace_poly; - assert_eq!(self.poly_size() - 2, self.degree()); } // CONSTRAINT POLYNOMIAL COMPOSITION @@ -194,7 +204,7 @@ impl DeepCompositionPoly { /// into the DEEP composition polynomial. This method is intended to be called only after the /// add_trace_polys() method has been executed. The composition is done as follows: /// - /// - For each H_i(x), compute H'_i(x) = (H_i(x) - H(z)) / (x - z), where H_i(x) is the + /// - For each H_i(x), compute H'_i(x) = (H_i(x) - H(z)) / (x - z^m), where H_i(x) is the /// ith composition polynomial column. /// - Then, combine all H_i(x) polynomials together by computing H(x) = sum(H_i(x) * cc_i) for /// all i, where cc_i is the coefficient for the random linear combination drawn from the @@ -208,22 +218,32 @@ impl DeepCompositionPoly { ) { assert!(!self.coefficients.is_empty()); - let z = self.z; - let mut column_polys = composition_poly.into_columns(); + let num_cols = ood_evaluations.len(); + let z = self.z; // Divide out the OOD point z from column polynomials - iter_mut!(column_polys).zip(ood_evaluations).for_each(|(poly, value_at_z)| { - // compute H'_i(x) = (H_i(x) - H_i(z)) / (x - z) - poly[0] -= value_at_z; - polynom::syn_div_in_place(poly, 1, z); - }); + iter_mut!(column_polys).take(num_cols).zip(ood_evaluations).for_each( + |(poly, value_at_z)| { + // compute H'_i(x) = (H_i(x) - H_i(z)) / (x - z) + poly[0] -= value_at_z; + polynom::syn_div_in_place(poly, 1, z); + }, + ); // add H'_i(x) * cc_i for all i into the DEEP composition polynomial - for (i, poly) in column_polys.into_iter().enumerate() { - mul_acc::(&mut self.coefficients, &poly, self.cc.constraints[i]); + for (i, poly) in column_polys.iter().enumerate().take(num_cols) { + mul_acc::(&mut self.coefficients, poly, self.cc.constraints[i]); + } + + // add the randomizer codeword for FRI + if self.is_zk { + iter_mut!(self.coefficients) + .zip(&column_polys[column_polys.len() - 1]) + .for_each(|(a, b)| *a += *b); } - assert_eq!(self.poly_size() - 2, self.degree()); + + assert_eq!(self.coefficients.len() - 2, self.degree()); } // LOW-DEGREE EXTENSION diff --git a/prover/src/constraints/commitment.rs b/prover/src/constraints/commitment.rs new file mode 100644 index 000000000..d79e02d46 --- /dev/null +++ b/prover/src/constraints/commitment.rs @@ -0,0 +1,80 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use alloc::vec::Vec; +use core::marker::PhantomData; + +use air::proof::Queries; +use crypto::{ElementHasher, VectorCommitment}; +use math::FieldElement; + +use super::RowMatrix; + +// CONSTRAINT COMMITMENT +// ================================================================================================ + +/// Constraint evaluation commitment. +/// +/// The commitment consists of two components: +/// * Evaluations of composition polynomial columns over the LDE domain. +/// * Vector commitment where each vector element corresponds to the digest of a row in +/// the composition polynomial evaluation matrix. +pub struct ConstraintCommitment< + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +> { + evaluations: RowMatrix, + vector_commitment: V, + _h: PhantomData, +} + +impl ConstraintCommitment +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +{ + /// Creates a new constraint evaluation commitment from the provided composition polynomial + /// evaluations and the corresponding vector commitment. + pub fn new(evaluations: RowMatrix, commitment: V) -> ConstraintCommitment { + assert_eq!( + evaluations.num_rows(), + commitment.get_domain_len(), + "number of rows in constraint evaluation matrix must be the same as the size \ + of the vector commitment domain" + ); + + ConstraintCommitment { + evaluations, + vector_commitment: commitment, + _h: PhantomData, + } + } + + /// Returns the commitment. + pub fn commitment(&self) -> H::Digest { + self.vector_commitment.commitment() + } + + /// Returns constraint evaluations at the specified positions along with a batch opening proof + /// against the vector commitment. + pub fn query(self, positions: &[usize]) -> Queries { + // build batch opening proof to the leaves specified by positions + let opening_proof = self + .vector_commitment + .open_many(positions) + .expect("failed to generate a batch opening proof for constraint queries"); + + // determine a set of evaluations corresponding to each position + let mut evaluations = Vec::new(); + for &position in positions { + let row = self.evaluations.row(position).to_vec(); + evaluations.push(row); + } + + Queries::new::(opening_proof.1, evaluations) + } +} diff --git a/prover/src/constraints/composition_poly.rs b/prover/src/constraints/composition_poly.rs index bad52f7f5..a418a04c9 100644 --- a/prover/src/constraints/composition_poly.rs +++ b/prover/src/constraints/composition_poly.rs @@ -5,7 +5,9 @@ use alloc::vec::Vec; -use math::{fft, polynom::degree_of, FieldElement}; +use air::ZkParameters; +use math::{fft, polynom, FieldElement}; +use rand::{Rng, RngCore}; use super::{ColMatrix, StarkDomain}; @@ -47,16 +49,23 @@ impl CompositionPolyTrace { /// /// For example, if the composition polynomial has degree 2N - 1, where N is the trace length, /// it will be stored as two columns of size N (each of degree N - 1). +/// +/// When zero-knowledge is enabled, the composition polynomial is split into segment polynomials +/// such that each segment polynomial's degree is small enough to accommodate adding a randomizer +/// polynomial without the degree of the resulting ranomized segment polynomial exceeding +/// `domain.trace_length()`. pub struct CompositionPoly { data: ColMatrix, } impl CompositionPoly { /// Returns a new composition polynomial. - pub fn new( + pub fn new( composition_trace: CompositionPolyTrace, domain: &StarkDomain, num_cols: usize, + zk_parameters: Option, + prng: &mut R, ) -> Self { assert!( domain.trace_length() < composition_trace.num_rows(), @@ -70,7 +79,30 @@ impl CompositionPoly { let inv_twiddles = fft::get_inv_twiddles::(trace.len()); fft::interpolate_poly_with_offset(&mut trace, &inv_twiddles, domain.offset()); - let polys = segment(trace, domain.trace_length(), num_cols); + // compute the segment quotient polynomials + let quotient_degree = polynom::degree_of(&trace); + let degree_chunked_quotient = if zk_parameters.is_some() { + (quotient_degree + 1).div_ceil(num_cols) + } else { + domain.trace_length() + }; + let polys = segment(trace, degree_chunked_quotient, num_cols); + let mut polys = complement_to(polys, domain.trace_length(), prng); + + // generate a randomizer polynomial for FRI + if zk_parameters.is_some() { + let extended_len = polys[0].len(); + let mut zk_col = vec![E::ZERO; extended_len]; + + for a in zk_col.iter_mut() { + let bytes = prng.gen::<[u8; 32]>(); + *a = E::from_random_bytes(&bytes[..E::VALUE_SIZE]) + .expect("failed to generate randomness"); + } + // reduce the degree to match that of the DEEP composition polynomial + zk_col[extended_len - 1] = E::ZERO; + polys.push(zk_col) + } CompositionPoly { data: ColMatrix::new(polys) } } @@ -96,8 +128,8 @@ impl CompositionPoly { } /// Returns evaluations of all composition polynomial columns at point z. - pub fn evaluate_at(&self, z: E) -> Vec { - self.data.evaluate_columns_at(z) + pub fn evaluate_at(&self, z: E, is_zk: bool) -> Vec { + self.data.evaluate_columns_at(z, is_zk) } /// Returns a reference to the matrix of individual column polynomials. @@ -111,6 +143,55 @@ impl CompositionPoly { } } +/// Takes a vector of coefficients representing the segment polynomials of a given composition +/// polynomial as input, and generates coefficients of their randomized version. +/// +/// The randomization technique is the one in section 4.1 in https://eprint.iacr.org/2024/1037.pdf. +fn complement_to( + polys: Vec>, + l: usize, + prng: &mut R, +) -> Vec> { + let mut result = vec![]; + + let randomizer_poly_size = l - polys[0].len(); + let mut current_poly = vec![E::ZERO; randomizer_poly_size]; + let mut previous_poly = vec![E::ZERO; randomizer_poly_size]; + + for (_, poly) in polys.iter().enumerate().take_while(|(index, _)| *index != polys.len() - 1) { + let diff = l - poly.len(); + + for eval in current_poly.iter_mut().take(diff) { + let bytes = prng.gen::<[u8; 32]>(); + *eval = E::from_random_bytes(&bytes[..E::VALUE_SIZE]) + .expect("failed to generate randomness"); + } + + let mut res = vec![]; + res.extend_from_slice(poly); + res.extend_from_slice(¤t_poly); + + for i in 0..randomizer_poly_size { + res[i] -= previous_poly[i]; + } + + previous_poly.copy_from_slice(¤t_poly[..randomizer_poly_size]); + + result.push(res) + } + + let poly = polys.last().unwrap(); + let mut res = vec![E::ZERO; l]; + for (i, entry) in poly.iter().enumerate() { + res[i] = *entry; + } + for i in 0..randomizer_poly_size { + res[i] -= previous_poly[i]; + } + result.push(res); + result +} + // HELPER FUNCTIONS // ================================================================================================ @@ -123,8 +204,6 @@ fn segment( trace_len: usize, num_cols: usize, ) -> Vec> { - debug_assert!(degree_of(&coefficients) < trace_len * num_cols); - coefficients .chunks(trace_len) .take(num_cols) diff --git a/prover/src/constraints/evaluation_table.rs b/prover/src/constraints/evaluation_table.rs index 9add913f4..554136a56 100644 --- a/prover/src/constraints/evaluation_table.rs +++ b/prover/src/constraints/evaluation_table.rs @@ -73,7 +73,7 @@ impl<'a, E: FieldElement> ConstraintEvaluationTable<'a, E> { // collect expected degrees for all transition constraints to compare them against actual // degrees; we do this in debug mode only because this comparison is expensive let expected_transition_degrees = - build_transition_constraint_degrees(transition_constraints, domain.trace_length()); + build_transition_constraint_degrees(transition_constraints, domain); ConstraintEvaluationTable { evaluations: uninit_matrix(num_columns, num_rows), @@ -420,16 +420,35 @@ fn get_inv_evaluation( #[cfg(debug_assertions)] fn build_transition_constraint_degrees( constraints: &TransitionConstraints, - trace_length: usize, + domain: &StarkDomain, ) -> Vec { + use crate::domain::ZkInfo; + let mut result = Vec::new(); + let (trace_length, trace_len_ext) = if let Some(zk_info) = domain.zk_info() { + let ZkInfo { + original_trace_length, + degree_witness_randomizer, + }: ZkInfo = zk_info; + + let ext_len = (original_trace_length + degree_witness_randomizer).next_power_of_two(); + (original_trace_length, ext_len) + } else { + (domain.trace_length(), domain.trace_length()) + }; for degree in constraints.main_constraint_degrees() { - result.push(degree.get_evaluation_degree(trace_length) - constraints.divisor().degree()) + result.push( + degree.get_evaluation_degree(trace_length, trace_len_ext) + - constraints.divisor().degree(), + ) } for degree in constraints.aux_constraint_degrees() { - result.push(degree.get_evaluation_degree(trace_length) - constraints.divisor().degree()) + result.push( + degree.get_evaluation_degree(trace_length, trace_len_ext) + - constraints.divisor().degree(), + ) } result diff --git a/prover/src/constraints/evaluator/periodic_table.rs b/prover/src/constraints/evaluator/periodic_table.rs index ec72aa766..4601460e3 100644 --- a/prover/src/constraints/evaluator/periodic_table.rs +++ b/prover/src/constraints/evaluator/periodic_table.rs @@ -37,23 +37,29 @@ impl PeriodicValueTable { // them for polynomials of the same size let mut twiddle_map = BTreeMap::new(); + // zero-knowledge blowup factor + let factor = air.context().trace_length_ext() / air.trace_length(); let evaluations = polys .iter() .map(|poly| { let poly_size = poly.len(); let num_cycles = (air.trace_length() / poly_size) as u64; let offset = air.domain_offset().exp(num_cycles.into()); - let twiddles = - twiddle_map.entry(poly_size).or_insert_with(|| fft::get_twiddles(poly_size)); - fft::evaluate_poly_with_offset(poly, twiddles, offset, air.ce_blowup_factor()) + let mut new_poly = vec![B::ZERO; factor * poly_size]; + new_poly[..poly_size].copy_from_slice(&poly[..poly_size]); + let twiddles = twiddle_map + .entry(new_poly.len()) + .or_insert_with(|| fft::get_twiddles(new_poly.len())); + + fft::evaluate_poly_with_offset(&new_poly, twiddles, offset, air.ce_blowup_factor()) }) .collect::>(); // allocate memory to hold all expanded values and copy polynomial evaluations into the // table in such a way that values for the same row are adjacent to each other. let row_width = polys.len(); - let column_length = max_poly_size * air.ce_blowup_factor(); + let column_length = factor * max_poly_size * air.ce_blowup_factor(); let mut values = unsafe { uninit_vector(row_width * column_length) }; for i in 0..column_length { for (j, column) in evaluations.iter().enumerate() { diff --git a/prover/src/domain.rs b/prover/src/domain.rs index 87a54bbe5..525733a1b 100644 --- a/prover/src/domain.rs +++ b/prover/src/domain.rs @@ -30,6 +30,10 @@ pub struct StarkDomain { /// Offset of the low-degree extension domain. domain_offset: B, + + /// Extra information needed for constraint evaluation validation when zero-knowledge is enabled. + #[cfg(debug_assertions)] + zk_info: Option, } // STARK DOMAIN IMPLEMENTATION @@ -38,18 +42,30 @@ pub struct StarkDomain { impl StarkDomain { /// Returns a new STARK domain initialized with the provided `context`. pub fn new>(air: &A) -> Self { - let trace_twiddles = fft::get_twiddles(air.trace_length()); + let trace_twiddles = fft::get_twiddles(air.context().trace_length_ext()); // build constraint evaluation domain let domain_gen = B::get_root_of_unity(air.ce_domain_size().ilog2()); let ce_domain = get_power_series(domain_gen, air.ce_domain_size()); + #[cfg(debug_assertions)] + let zk_info = if air.is_zk() { + Some(ZkInfo { + original_trace_length: air.trace_length(), + degree_witness_randomizer: air.context().zk_witness_randomizer_degree(), + }) + } else { + None + }; + StarkDomain { trace_twiddles, ce_domain, ce_to_lde_blowup: air.lde_domain_size() / air.ce_domain_size(), ce_domain_mod_mask: air.ce_domain_size() - 1, domain_offset: air.domain_offset(), + #[cfg(debug_assertions)] + zk_info, } } @@ -72,6 +88,8 @@ impl StarkDomain { ce_to_lde_blowup: 1, ce_domain_mod_mask: ce_domain_size - 1, domain_offset, + #[cfg(debug_assertions)] + zk_info: None, } } @@ -152,4 +170,16 @@ impl StarkDomain { pub fn offset(&self) -> B { self.domain_offset } + + #[cfg(debug_assertions)] + pub(crate) fn zk_info(&self) -> Option { + self.zk_info + } +} + +#[cfg(debug_assertions)] +#[derive(Clone, Copy, Debug)] +pub struct ZkInfo { + pub(crate) original_trace_length: usize, + pub(crate) degree_witness_randomizer: usize, } diff --git a/prover/src/lib.rs b/prover/src/lib.rs index 906fac8ec..d833e26da 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -48,7 +48,7 @@ pub use air::{ EvaluationFrame, FieldExtension, LagrangeKernelRandElements, ProofOptions, TraceInfo, TransitionConstraintDegree, }; -use air::{AuxRandElements, GkrRandElements, PartitionOptions}; +use air::{AuxRandElements, GkrRandElements, PartitionOptions, ZkParameters}; pub use crypto; use crypto::{ElementHasher, RandomCoin, VectorCommitment}; use fri::FriProver; @@ -58,6 +58,8 @@ use math::{ fields::{CubeExtension, QuadExtension}, ExtensibleField, FieldElement, StarkField, ToElements, }; +use rand::{RngCore, SeedableRng}; +use rand_chacha::ChaCha20Rng; use tracing::{event, info_span, instrument, Level}; pub use utils::{ iterators, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, @@ -189,6 +191,7 @@ pub trait Prover { main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) where E: FieldElement; @@ -324,21 +327,29 @@ pub trait Prover { ProverChannel::::new( &air, pub_inputs_elements, + air.context().zk_blowup_factor(), ); + let mut prng = ChaCha20Rng::from_entropy(); + let zk_parameters = air.context().zk_parameters(); // 1 ----- Commit to the execution trace -------------------------------------------------- // build computation domain; this is used later for polynomial evaluations let lde_domain_size = air.lde_domain_size(); - let trace_length = air.trace_length(); + let trace_length = air.context().trace_length_ext(); let domain = info_span!("build_domain", trace_length, lde_domain_size) .in_scope(|| StarkDomain::new(&air)); assert_eq!(domain.lde_domain_size(), lde_domain_size); assert_eq!(domain.trace_length(), trace_length); // commit to the main trace segment - let (mut trace_lde, mut trace_polys) = - maybe_await!(self.commit_to_main_trace_segment(&trace, &domain, &mut channel)); + let (mut trace_lde, mut trace_polys) = maybe_await!(self.commit_to_main_trace_segment( + &trace, + &domain, + zk_parameters, + &mut prng, + &mut channel + )); // build the auxiliary trace segment, and append the resulting segments to trace commitment // and trace polynomial table structs @@ -370,11 +381,11 @@ pub trait Prover { // extend the auxiliary trace segment and commit to the extended trace let span = info_span!("commit_to_aux_trace_segment").entered(); let (aux_segment_polys, aux_segment_commitment) = - trace_lde.set_aux_trace(&aux_trace, &domain); + trace_lde.set_aux_trace(&aux_trace, &domain, zk_parameters, &mut prng); // commit to the LDE of the extended auxiliary trace segment by writing its // commitment into the channel - channel.commit_trace(aux_segment_commitment); + channel.commit_trace(aux_segment_commitment, &mut prng); drop(span); aux_segment_polys @@ -419,7 +430,14 @@ pub trait Prover { // 3 ----- commit to constraint evaluations ----------------------------------------------- let (constraint_commitment, composition_poly) = maybe_await!(self - .commit_to_constraint_evaluations(&air, composition_poly_trace, &domain, &mut channel)); + .commit_to_constraint_evaluations( + &air, + composition_poly_trace, + &domain, + &mut channel, + zk_parameters, + &mut prng + )); // 4 ----- build DEEP composition polynomial ---------------------------------------------- let deep_composition_poly = { @@ -438,16 +456,17 @@ pub trait Prover { // g, where g is the generator of the trace domain. Additionally, if the Lagrange kernel // auxiliary column is present, we also evaluate that column over the points: z, z * g, // z * g^2, z * g^4, ..., z * g^(2^(v-1)), where v = log(trace_len). - let ood_trace_states = trace_polys.get_ood_frame(z); - channel.send_ood_trace_states(&ood_trace_states); + let ood_trace_states = + trace_polys.get_ood_frame(z, air.context().trace_info().length()); + channel.send_ood_trace_states(&ood_trace_states, &mut prng); - let ood_evaluations = composition_poly.evaluate_at(z); - channel.send_ood_constraint_evaluations(&ood_evaluations); + let ood_evaluations = composition_poly.evaluate_at(z, air.is_zk()); + channel.send_ood_constraint_evaluations(&ood_evaluations, &mut prng); // draw random coefficients to use during DEEP polynomial composition, and use them to // initialize the DEEP composition polynomial let deep_coefficients = channel.get_deep_composition_coeffs(); - let mut deep_composition_poly = DeepCompositionPoly::new(z, deep_coefficients); + let mut deep_composition_poly = DeepCompositionPoly::new(&air, z, deep_coefficients); // combine all trace polynomials together and merge them into the DEEP composition // polynomial @@ -465,7 +484,7 @@ pub trait Prover { // make sure the degree of the DEEP composition polynomial is equal to trace polynomial // degree minus 1. - assert_eq!(trace_length - 2, deep_composition_poly.degree()); + assert_eq!(air.context().trace_length_ext() - 2, deep_composition_poly.degree()); // 5 ----- evaluate DEEP composition polynomial over LDE domain --------------------------- let deep_evaluations = { @@ -473,7 +492,10 @@ pub trait Prover { let deep_evaluations = deep_composition_poly.evaluate(&domain); // we check the following condition in debug mode only because infer_degree is an // expensive operation - debug_assert_eq!(trace_length - 2, infer_degree(&deep_evaluations, domain.offset())); + debug_assert_eq!( + air.context().trace_length_ext() - 2, + infer_degree(&deep_evaluations, domain.offset()) + ); drop(span); deep_evaluations @@ -484,7 +506,7 @@ pub trait Prover { let num_layers = fri_options.num_fri_layers(lde_domain_size); let mut fri_prover = FriProver::<_, _, _, Self::VC>::new(fri_options); info_span!("compute_fri_layers", num_layers) - .in_scope(|| fri_prover.build_layers(&mut channel, deep_evaluations)); + .in_scope(|| fri_prover.build_layers(&mut channel, deep_evaluations, &mut prng)); // 7 ----- determine query positions ------------------------------------------------------ let query_positions = { @@ -538,14 +560,17 @@ pub trait Prover { #[doc(hidden)] #[instrument(skip_all)] #[maybe_async] - fn commit_to_main_trace_segment( + fn commit_to_main_trace_segment( &self, trace: &Self::Trace, domain: &StarkDomain, + zk_parameters: Option, + prng: &mut R, channel: &mut ProverChannel<'_, Self::Air, E, Self::HashFn, Self::RandomCoin, Self::VC>, ) -> (Self::TraceLde, TracePolyTable) where E: FieldElement, + R: RngCore, { // extend the main execution trace and commit to the extended trace let (trace_lde, trace_polys) = maybe_await!(self.new_trace_lde( @@ -553,6 +578,7 @@ pub trait Prover { trace.main_segment(), domain, self.options().partition_options(), + zk_parameters )); // get the commitment to the main trace segment LDE @@ -560,7 +586,7 @@ pub trait Prover { // commit to the LDE of the main trace by writing the the commitment string into // the channel - channel.commit_trace(main_trace_commitment); + channel.commit_trace(main_trace_commitment, prng); (trace_lde, trace_polys) } @@ -568,29 +594,34 @@ pub trait Prover { #[doc(hidden)] #[instrument(skip_all)] #[maybe_async] - fn commit_to_constraint_evaluations( + fn commit_to_constraint_evaluations( &self, air: &Self::Air, composition_poly_trace: CompositionPolyTrace, domain: &StarkDomain, channel: &mut ProverChannel<'_, Self::Air, E, Self::HashFn, Self::RandomCoin, Self::VC>, + zk_parameters: Option, + prng: &mut R, ) -> (Self::ConstraintCommitment, CompositionPoly) where E: FieldElement, + R: RngCore, { // first, build a commitment to the evaluations of the constraint composition polynomial // columns let (constraint_commitment, composition_poly) = maybe_await!(self - .build_constraint_commitment::( + .build_constraint_commitment::( composition_poly_trace, air.context().num_constraint_composition_columns(), domain, self.options().partition_options() + zk_parameters, + prng )); // then, commit to the evaluations of constraints by writing the commitment string of // the constraint commitment into the channel - channel.commit_constraints(constraint_commitment.commitment()); + channel.commit_constraints(constraint_commitment.commitment(), prng); (constraint_commitment, composition_poly) } diff --git a/prover/src/matrix/col_matrix.rs b/prover/src/matrix/col_matrix.rs index 8872cca71..62a953878 100644 --- a/prover/src/matrix/col_matrix.rs +++ b/prover/src/matrix/col_matrix.rs @@ -8,6 +8,7 @@ use core::{iter::FusedIterator, slice}; use crypto::{ElementHasher, VectorCommitment}; use math::{fft, polynom, FieldElement}; +use rand::{Rng, RngCore}; #[cfg(feature = "concurrent")] use utils::iterators::*; use utils::{batch_iter_mut, iter, iter_mut, uninit_vector}; @@ -242,11 +243,13 @@ impl ColMatrix { } /// Evaluates polynomials contained in the columns of this matrix at a single point `x`. - pub fn evaluate_columns_at(&self, x: F) -> Vec + pub fn evaluate_columns_at(&self, x: F, skip_last: bool) -> Vec where F: FieldElement + From, { - iter!(self.columns).map(|p| polynom::eval(p, x)).collect() + iter!(&self.columns[..self.columns.len() - skip_last as usize]) + .map(|p| polynom::eval(p, x)) + .collect() } // COMMITMENTS @@ -294,6 +297,49 @@ impl ColMatrix { pub fn into_columns(self) -> Vec> { self.columns } + + /// Randomizes the trace polynomials when zero-knowledge is enabled. + /// + /// Takes as input a factor that is a power of two which is used to determine the size (i.e., + /// the number of coefficients) of the randomized witness polynomial. + /// + /// The randomized witness polynomial has the form: + /// + /// ```text + /// \hat{w}(x) = w(x) + r(x) * Z_H(x) + /// ``` + /// where: + /// + /// 1. w(x) is the witness polynomial of degree trace length minus one. + /// 2. \hat{w}(x) is the randomized witness polynomial. + /// 3. r(x) is the randomizer polynomial and has degree `(zk_blowup - 1) * n`. + /// 4. Z_H(x) = (x^n - 1). + pub(crate) fn randomize(&self, zk_blowup: usize, prng: &mut R) -> Self { + let cur_len = self.num_rows(); + let extended_len = zk_blowup * cur_len; + let pad_len = extended_len - cur_len; + + let randomized_cols: Vec> = self + .columns() + .map(|col| { + let mut added = vec![E::ZERO; pad_len]; + for a in added.iter_mut() { + let bytes = prng.gen::<[u8; 32]>(); + *a = E::from_random_bytes(&bytes[..E::VALUE_SIZE]) + .expect("failed to generate randomness"); + } + + let mut res_col = col.to_vec(); + res_col.extend_from_slice(&added); + for i in 0..pad_len { + res_col[i] -= added[i] + } + res_col + }) + .collect(); + + Self { columns: randomized_cols } + } } // COLUMN ITERATOR diff --git a/prover/src/tests/mod.rs b/prover/src/tests/mod.rs index 6b44fa0e9..a4230e3d1 100644 --- a/prover/src/tests/mod.rs +++ b/prover/src/tests/mod.rs @@ -44,7 +44,7 @@ impl MockAir { Self::new( TraceInfo::new(4, trace_length), (), - ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31), + ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31, false), ) } @@ -55,7 +55,7 @@ impl MockAir { let mut result = Self::new( TraceInfo::new(4, trace_length), (), - ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31), + ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31, false), ); result.periodic_columns = column_values; result @@ -65,7 +65,7 @@ impl MockAir { let mut result = Self::new( TraceInfo::new(4, trace_length), (), - ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31), + ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31, false), ); result.assertions = assertions; result @@ -116,7 +116,7 @@ fn build_context( blowup_factor: usize, num_assertions: usize, ) -> AirContext { - let options = ProofOptions::new(32, blowup_factor, 0, FieldExtension::None, 4, 31); + let options = ProofOptions::new(32, blowup_factor, 0, FieldExtension::None, 4, 31, false); let t_degrees = vec![TransitionConstraintDegree::new(2)]; AirContext::new(trace_info, t_degrees, num_assertions, options) } diff --git a/prover/src/trace/poly_table.rs b/prover/src/trace/poly_table.rs index 87fec88d4..bca5c9bad 100644 --- a/prover/src/trace/poly_table.rs +++ b/prover/src/trace/poly_table.rs @@ -69,10 +69,10 @@ impl TracePolyTable { } /// Evaluates all trace polynomials (across all trace segments) at the specified point `x`. - pub fn evaluate_at(&self, x: E) -> Vec { - let mut result = self.main_trace_polys.evaluate_columns_at(x); + pub fn evaluate_at(&self, x: E, skip_last: bool) -> Vec { + let mut result = self.main_trace_polys.evaluate_columns_at(x, skip_last); for aux_polys in self.aux_trace_polys.iter() { - result.append(&mut aux_polys.evaluate_columns_at(x)); + result.append(&mut aux_polys.evaluate_columns_at(x, false)); } result } @@ -82,11 +82,11 @@ impl TracePolyTable { /// Additionally, if the Lagrange kernel auxiliary column is present, we also evaluate that /// column over the points: z, z * g, z * g^2, z * g^4, ..., z * g^(2^(v-1)), where v = /// log(trace_len). - pub fn get_ood_frame(&self, z: E) -> TraceOodFrame { - let log_trace_len = self.poly_size().ilog2(); + pub fn get_ood_frame(&self, z: E, trace_len: usize) -> TraceOodFrame { + let log_trace_len = trace_len.ilog2(); let g = E::from(E::BaseField::get_root_of_unity(log_trace_len)); - let current_row = self.evaluate_at(z); - let next_row = self.evaluate_at(z * g); + let current_row = self.evaluate_at(z, false); + let next_row = self.evaluate_at(z * g, false); let lagrange_kernel_frame = self.lagrange_kernel_poly.as_ref().map(|lagrange_kernel_col_poly| { diff --git a/prover/src/trace/trace_lde/default/mod.rs b/prover/src/trace/trace_lde/default/mod.rs index afc3734a6..1cbb9f0d1 100644 --- a/prover/src/trace/trace_lde/default/mod.rs +++ b/prover/src/trace/trace_lde/default/mod.rs @@ -6,8 +6,11 @@ use alloc::vec::Vec; use core::marker::PhantomData; -use air::{proof::Queries, LagrangeKernelEvaluationFrame, PartitionOptions, TraceInfo}; +use air::{ + proof::Queries, LagrangeKernelEvaluationFrame, PartitionOptions, TraceInfo, ZkParameters, +}; use crypto::VectorCommitment; +use rand::RngCore; use tracing::info_span; use super::{ @@ -60,15 +63,23 @@ where /// /// Returns a tuple containing a [TracePolyTable] with the trace polynomials for the main trace /// segment and the new [DefaultTraceLde]. - pub fn new( + pub fn new( trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut R, ) -> (Self, TracePolyTable) { // extend the main execution trace and build a commitment to the extended trace let (main_segment_lde, main_segment_vector_com, main_segment_polys) = - build_trace_commitment::(main_trace, domain, partition_options); + build_trace_commitment::( + main_trace, + domain, + partition_options, + zk_parameters, + prng, + ); let trace_poly_table = TracePolyTable::new(main_segment_polys); let trace_lde = DefaultTraceLde { @@ -76,9 +87,9 @@ where main_segment_oracles: main_segment_vector_com, aux_segment_lde: None, aux_segment_oracles: None, - blowup: domain.trace_to_lde_blowup(), trace_info: trace_info.clone(), partition_options, + blowup: domain.lde_domain_size() / trace_info.length(), _h: PhantomData, }; @@ -137,14 +148,22 @@ where /// This function will panic if any of the following are true: /// - the number of rows in the provided `aux_trace` does not match the main trace. /// - the auxiliary trace has been previously set already. - fn set_aux_trace( + fn set_aux_trace( &mut self, aux_trace: &ColMatrix, domain: &StarkDomain, + zk_parameters: Option, + prng: &mut R, ) -> (ColMatrix, H::Digest) { // extend the auxiliary trace segment and build a commitment to the extended trace let (aux_segment_lde, aux_segment_oracles, aux_segment_polys) = - build_trace_commitment::(aux_trace, domain, self.partition_options); + build_trace_commitment::( + aux_trace, + domain, + self.partition_options, + zk_parameters, + prng, + ); // check errors assert!( @@ -173,10 +192,9 @@ where ) { // at the end of the trace, next state wraps around and we read the first step again let next_lde_step = (lde_step + self.blowup()) % self.trace_len(); - - // copy main trace segment values into the frame - frame.current_mut().copy_from_slice(self.main_segment_lde.row(lde_step)); - frame.next_mut().copy_from_slice(self.main_segment_lde.row(next_lde_step)); + let l = frame.current().len(); + frame.current_mut().copy_from_slice(&self.main_segment_lde.row(lde_step)[..l]); + frame.next_mut().copy_from_slice(&self.main_segment_lde.row(next_lde_step)[..l]); } /// Reads current and next rows from the auxiliary trace segment into the specified frame. @@ -252,7 +270,6 @@ where &self.trace_info } } - // HELPER FUNCTIONS // ================================================================================================ @@ -265,16 +282,19 @@ where /// /// The trace commitment is computed by building a vector containing the hashes of each row of /// the extended execution trace, then building a vector commitment to the resulting vector. -fn build_trace_commitment( +fn build_trace_commitment( trace: &ColMatrix, domain: &StarkDomain, partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut R, ) -> (RowMatrix, V, ColMatrix) where E: FieldElement, F: FieldElement, H: ElementHasher, V: VectorCommitment, + R: RngCore, { // extend the execution trace let (trace_lde, trace_polys) = { @@ -284,15 +304,24 @@ where blowup = domain.trace_to_lde_blowup() ) .entered(); + let trace_polys = trace.interpolate_columns(); + + // when zero-knowledge is enabled, we randomize the witness polynomials by adding a random + // polynomial times the zerofier over the trace domain. The degree of the random polynomial + // is a function of the number of FRI queries. + let trace_polys = if let Some(parameters) = zk_parameters { + trace_polys.randomize(parameters.zk_blowup_witness(), prng) + } else { + trace_polys + }; + let trace_lde = RowMatrix::evaluate_polys_over::(&trace_polys, domain); drop(span); (trace_lde, trace_polys) }; - assert_eq!(trace_lde.num_cols(), trace.num_cols()); - assert_eq!(trace_polys.num_rows(), trace.num_rows()); assert_eq!(trace_lde.num_rows(), domain.lde_domain_size()); // build trace commitment diff --git a/prover/src/trace/trace_lde/default/tests.rs b/prover/src/trace/trace_lde/default/tests.rs index 734accf68..f716fa0ab 100644 --- a/prover/src/trace/trace_lde/default/tests.rs +++ b/prover/src/trace/trace_lde/default/tests.rs @@ -11,6 +11,8 @@ use math::{ fields::f128::BaseElement, get_power_series, get_power_series_with_offset, polynom, FieldElement, StarkField, }; +use rand::SeedableRng; +use rand_chacha::ChaCha20Rng; use crate::{ tests::{build_fib_trace, MockAir}, @@ -27,6 +29,7 @@ fn extend_trace_table() { let trace = build_fib_trace(trace_length * 2); let domain = StarkDomain::new(&air); let partition_option = PartitionOptions::default(); + let mut prng = ChaCha20Rng::from_entropy(); // build the trace polynomials, extended trace, and commitment using the default TraceLde impl let (trace_lde, trace_polys) = DefaultTraceLde::>::new( @@ -34,6 +37,8 @@ fn extend_trace_table() { trace.main_segment(), &domain, partition_option, + None, + &mut prng, ); // check the width and length of the extended trace @@ -79,6 +84,7 @@ fn commit_trace_table() { let trace = build_fib_trace(trace_length * 2); let domain = StarkDomain::new(&air); let partition_option = PartitionOptions::default(); + let mut prng = ChaCha20Rng::from_entropy(); // build the trace polynomials, extended trace, and commitment using the default TraceLde impl let (trace_lde, _) = DefaultTraceLde::>::new( @@ -86,6 +92,8 @@ fn commit_trace_table() { trace.main_segment(), &domain, partition_option, + None, + &mut prng, ); // build commitment, using a Merkle tree, to the trace rows diff --git a/prover/src/trace/trace_lde/mod.rs b/prover/src/trace/trace_lde/mod.rs index dbce21491..6abcf8b96 100644 --- a/prover/src/trace/trace_lde/mod.rs +++ b/prover/src/trace/trace_lde/mod.rs @@ -5,8 +5,9 @@ use alloc::vec::Vec; -use air::{proof::Queries, LagrangeKernelEvaluationFrame, TraceInfo}; +use air::{proof::Queries, LagrangeKernelEvaluationFrame, TraceInfo, ZkParameters}; use crypto::{ElementHasher, Hasher, VectorCommitment}; +use rand::RngCore; use super::{ColMatrix, EvaluationFrame, FieldElement, TracePolyTable}; use crate::StarkDomain; @@ -45,10 +46,12 @@ pub trait TraceLde: Sync { /// This function is expected to panic if any of the following are true: /// - the number of rows in the provided `aux_trace` does not match the main trace. /// - this segment would exceed the number of segments specified by the trace layout. - fn set_aux_trace( + fn set_aux_trace( &mut self, aux_trace: &ColMatrix, domain: &StarkDomain, + zk_parameters: Option, + prng: &mut R, ) -> (ColMatrix, ::Digest); /// Reads current and next rows from the main trace segment into the specified frame. diff --git a/prover/src/trace/trace_table.rs b/prover/src/trace/trace_table.rs index a5c10069b..9ce826ffb 100644 --- a/prover/src/trace/trace_table.rs +++ b/prover/src/trace/trace_table.rs @@ -272,6 +272,11 @@ impl TraceTable { pub fn read_row_into(&self, step: usize, target: &mut [B]) { self.trace.read_row_into(step, target); } + + /// Returns the trace meta data. + pub fn meta_data(&self) -> &[u8] { + self.info.meta() + } } // TRACE TRAIT IMPLEMENTATION diff --git a/verifier/src/channel.rs b/verifier/src/channel.rs index 1425d86aa..f632de0a9 100644 --- a/verifier/src/channel.rs +++ b/verifier/src/channel.rs @@ -13,6 +13,7 @@ use air::{ use crypto::{ElementHasher, VectorCommitment}; use fri::VerifierChannel as FriVerifierChannel; use math::{FieldElement, StarkField}; +use utils::Deserializable; use crate::VerifierError; @@ -45,12 +46,14 @@ pub struct VerifierChannel< fri_layer_queries: Vec>, fri_remainder: Option>, fri_num_partitions: usize, + fri_salts: Vec>, // out-of-domain frame ood_trace_frame: Option>, ood_constraint_evaluations: Option>, // query proof-of-work pow_nonce: u64, gkr_proof: Option>, + salts: Vec>, } impl VerifierChannel @@ -76,6 +79,7 @@ where fri_proof, pow_nonce, gkr_proof, + salts, } = proof; // make sure AIR and proof base fields are the same @@ -103,6 +107,7 @@ where constraint_queries, air, num_unique_queries as usize, + air.is_zk(), )?; // --- parse FRI proofs ------------------------------------------------------------------- @@ -110,6 +115,10 @@ where let fri_remainder = fri_proof .parse_remainder() .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; + + let fri_salts = fri_proof + .parse_salts::() + .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; let (fri_layer_queries, fri_layer_proofs) = fri_proof .parse_layers::(lde_domain_size, fri_options.folding_factor()) .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; @@ -126,6 +135,9 @@ where partition_options.partition_size::(air.context().trace_info().aux_segment_width()); let partition_size_constraint = partition_options .partition_size::(air.context().num_constraint_composition_columns()); + // --- parse Fiat-Shamir salts ----------------------------------------------- + let salts: Vec> = Vec::read_from_bytes(&salts) + .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; Ok(VerifierChannel { // trace queries @@ -144,12 +156,14 @@ where fri_layer_queries, fri_remainder: Some(fri_remainder), fri_num_partitions, + fri_salts, // out-of-domain evaluation ood_trace_frame: Some(ood_trace_frame), ood_constraint_evaluations: Some(ood_constraint_evaluations), // query seed pow_nonce, gkr_proof, + salts, }) } @@ -194,6 +208,11 @@ where self.gkr_proof.as_ref() } + /// Returns the salts needed for Fiat-Shamir. + pub fn read_salts(&self) -> Vec> { + self.salts.clone() + } + /// Returns trace states at the specified positions of the LDE domain. This also checks if /// the trace states are valid against the trace commitment sent by the prover. /// @@ -298,6 +317,10 @@ where fn take_fri_remainder(&mut self) -> Vec { self.fri_remainder.take().expect("already read") } + + fn take_salt(&mut self) -> Option<::Digest> { + self.fri_salts.remove(0) + } } // TRACE QUERIES @@ -414,8 +437,11 @@ where queries: Queries, air: &A, num_queries: usize, + is_zk: bool, ) -> Result { - let constraint_frame_width = air.context().num_constraint_composition_columns(); + // In the case zero-knowledge is enabled, we parse the randomizer polynomial as well + let constraint_frame_width = + air.context().num_constraint_composition_columns() + is_zk as usize; let (query_proofs, evaluations) = queries .parse::(air.lde_domain_size(), num_queries, constraint_frame_width) diff --git a/verifier/src/composer.rs b/verifier/src/composer.rs index 5f10ef79f..4c6af9cbe 100644 --- a/verifier/src/composer.rs +++ b/verifier/src/composer.rs @@ -88,12 +88,13 @@ impl DeepComposer { let n = queried_main_trace_states.num_rows(); let mut result_num = Vec::::with_capacity(n); let mut result_den = Vec::::with_capacity(n); - for ((_, row), &x) in (0..n).zip(queried_main_trace_states.rows()).zip(&self.x_coordinates) { let mut t1_num = E::ZERO; let mut t2_num = E::ZERO; + // we iterate over all polynomials except for the randomizer when zero-knowledge + // is enabled for (i, &value) in row.iter().enumerate() { let value = E::from(value); // compute the numerator of T'_i(x) as (T_i(x) - T_i(z)), multiply it by a @@ -122,6 +123,8 @@ impl DeepComposer { // we define this offset here because composition of the main trace columns has // consumed some number of composition coefficients already. + // In the case zero-knowledge is enabled, the offset is adjusted so as to account for + // the randomizer polynomial. let cc_offset = queried_main_trace_states.num_columns(); // we treat the Lagrange column separately if present @@ -215,10 +218,12 @@ impl DeepComposer { &self, queried_evaluations: Table, ood_evaluations: Vec, + is_zk: bool, ) -> Vec { assert_eq!(queried_evaluations.num_rows(), self.x_coordinates.len()); let n = queried_evaluations.num_rows(); + let num_cols = ood_evaluations.len(); let mut result_num = Vec::::with_capacity(n); let mut result_den = Vec::::with_capacity(n); @@ -228,11 +233,17 @@ impl DeepComposer { // this way we can use batch inversion in the end. for (query_values, &x) in queried_evaluations.rows().zip(&self.x_coordinates) { let mut composition_num = E::ZERO; - for (i, &evaluation) in query_values.iter().enumerate() { + for (i, &evaluation) in query_values.iter().enumerate().take(num_cols) { // compute the numerator of H'_i(x) as (H_i(x) - H_i(z)), multiply it by a // composition coefficient, and add the result to the numerator aggregator composition_num += (evaluation - ood_evaluations[i]) * self.cc.constraints[i]; } + // In the case zero-knowledge is enabled, the randomizer is added to DEEP composition + // polynomial. + if is_zk { + let randmizer_at_x = query_values[num_cols]; + composition_num += randmizer_at_x * (x - z); + } result_num.push(composition_num); result_den.push(x - z); } diff --git a/verifier/src/lib.rs b/verifier/src/lib.rs index 2c75ecd1d..9d54ac60c 100644 --- a/verifier/src/lib.rs +++ b/verifier/src/lib.rs @@ -170,8 +170,12 @@ where const AUX_TRACE_IDX: usize = 1; let trace_commitments = channel.read_trace_commitments(); + // read all the salts needed for Fiat-Shamir. These are random values sampled by the Prover + // and required for zero-knowledge i.e., if zero-knowledge is not enabled then they are `None`. + let mut salts = channel.read_salts(); + // reseed the coin with the commitment to the main trace segment - public_coin.reseed(trace_commitments[MAIN_TRACE_IDX]); + public_coin.reseed_with_salt(trace_commitments[MAIN_TRACE_IDX], salts.remove(0)); // process auxiliary trace segments (if any), to build a set of random elements for each segment let aux_trace_rand_elements = if air.trace_info().is_multi_segment() { @@ -193,7 +197,7 @@ where "failed to generate the random elements needed to build the auxiliary trace", ); - public_coin.reseed(trace_commitments[AUX_TRACE_IDX]); + public_coin.reseed_with_salt(trace_commitments[AUX_TRACE_IDX], salts.remove(0)); Some(AuxRandElements::new_with_gkr(rand_elements, gkr_rand_elements)) } else { @@ -201,7 +205,7 @@ where "failed to generate the random elements needed to build the auxiliary trace", ); - public_coin.reseed(trace_commitments[AUX_TRACE_IDX]); + public_coin.reseed_with_salt(trace_commitments[AUX_TRACE_IDX], salts.remove(0)); Some(AuxRandElements::new(rand_elements)) } @@ -221,7 +225,7 @@ where // to the prover, and the prover evaluates trace and constraint composition polynomials at z, // and sends the results back to the verifier. let constraint_commitment = channel.read_constraint_commitment(); - public_coin.reseed(constraint_commitment); + public_coin.reseed_with_salt(constraint_commitment, salts.remove(0)); let z = public_coin.draw::().map_err(|_| VerifierError::RandomCoinError)?; // 3 ----- OOD consistency check -------------------------------------------------------------- @@ -244,14 +248,14 @@ where aux_trace_rand_elements.as_ref(), z, ); - public_coin.reseed(ood_trace_frame.hash::()); + public_coin.reseed_with_salt(ood_trace_frame.hash::(), salts.remove(0)); // read evaluations of composition polynomial columns sent by the prover, and reduce them into - // a single value by computing \sum_{i=0}^{m-1}(z^(i * l) * value_i), where value_i is the - // evaluation of the ith column polynomial H_i(X) at z, l is the trace length and m is + // a single value by computing \sum_{i=0}^{m-1}(z^(i) * value_i), where value_i is the + // evaluation of the ith column polynomial H_i(X) at z^m, l is the trace length and m is // the number of composition column polynomials. This computes H(z) (i.e. // the evaluation of the composition polynomial at z) using the fact that - // H(X) = \sum_{i=0}^{m-1} X^{i * l} H_i(X). + // H(X) = \sum_{i=0}^{m-1} X^{i} H_i(X^m). // Also, reseed the public coin with the OOD constraint evaluations received from the prover. let ood_constraint_evaluations = channel.read_ood_constraint_evaluations(); let ood_constraint_evaluation_2 = @@ -259,9 +263,12 @@ where .iter() .enumerate() .fold(E::ZERO, |result, (i, &value)| { - result + z.exp_vartime(((i * (air.trace_length())) as u32).into()) * value + result + + z.exp_vartime( + ((i * air.context().num_coefficients_chunk_quotient()) as u32).into(), + ) * value }); - public_coin.reseed(H::hash_elements(&ood_constraint_evaluations)); + public_coin.reseed_with_salt(H::hash_elements(&ood_constraint_evaluations), salts.remove(0)); // finally, make sure the values are the same if ood_constraint_evaluation_1 != ood_constraint_evaluation_2 { @@ -329,8 +336,11 @@ where ood_aux_trace_frame, ood_lagrange_kernel_frame, ); - let c_composition = composer - .compose_constraint_evaluations(queried_constraint_evaluations, ood_constraint_evaluations); + let c_composition = composer.compose_constraint_evaluations( + queried_constraint_evaluations, + ood_constraint_evaluations, + air.is_zk(), + ); let deep_evaluations = composer.combine_compositions(t_composition, c_composition); // 7 ----- Verify low-degree proof ------------------------------------------------------------- diff --git a/winterfell/Cargo.toml b/winterfell/Cargo.toml index 258eb22e9..57bb1f528 100644 --- a/winterfell/Cargo.toml +++ b/winterfell/Cargo.toml @@ -26,6 +26,8 @@ air = { version = "0.11", path = "../air", package = "winter-air", default-featu prover = { version = "0.11", path = "../prover", package = "winter-prover", default-features = false } verifier = { version = "0.11", path = "../verifier", package = "winter-verifier", default-features = false } +rand_chacha = { version = "0.3", default-features = false } + # Allow math in docs [package.metadata.docs.rs] rustdoc-args = ["--html-in-header", ".cargo/katex-header.html"] diff --git a/winterfell/src/lib.rs b/winterfell/src/lib.rs index 3e06ebb96..655d66c04 100644 --- a/winterfell/src/lib.rs +++ b/winterfell/src/lib.rs @@ -263,7 +263,7 @@ //! matrix::ColMatrix, //! CompositionPoly, CompositionPolyTrace, DefaultConstraintCommitment, //! DefaultTraceLde, ProofOptions, Prover, StarkDomain, Trace, -//! TracePolyTable, TraceTable, +//! TracePolyTable, TraceTable, ZkParameters, //! }; //! //! # use winterfell::{ @@ -376,8 +376,9 @@ //! main_trace: &ColMatrix, //! domain: &StarkDomain, //! partition_option: PartitionOptions, +//! is_zk: Option, //! ) -> (Self::TraceLde, TracePolyTable) { -//! DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) +//! DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, is_zk) //! } //! //! fn build_constraint_commitment>( @@ -422,6 +423,7 @@ //! # DefaultConstraintEvaluator, DefaultConstraintCommitment, DefaultTraceLde, EvaluationFrame, //! # TraceInfo, TransitionConstraintDegree, TraceTable, FieldExtension, PartitionOptions, Prover, //! # ProofOptions, StarkDomain, Proof, Trace, TracePolyTable, +//! # ZkParameters //! # }; //! # //! # pub fn build_do_work_trace(start: BaseElement, n: usize) -> TraceTable { @@ -537,8 +539,9 @@ //! # main_trace: &ColMatrix, //! # domain: &StarkDomain, //! # partition_option: PartitionOptions, +//! # is_zk: Option, //! # ) -> (Self::TraceLde, TracePolyTable) { -//! # DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) +//! # DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, is_zk) //! # } //! # //! # fn build_constraint_commitment>( @@ -584,6 +587,7 @@ //! FieldExtension::None, //! 8, // FRI folding factor //! 31, // FRI max remainder polynomial degree +//! false, // Enable zero-knowledge //! ); //! //! // Instantiate the prover and generate the proof. @@ -632,7 +636,7 @@ #[cfg(test)] extern crate std; -pub use air::{AuxRandElements, GkrVerifier, PartitionOptions}; +pub use air::{AuxRandElements, GkrVerifier, PartitionOptions, ZkParameters}; pub use prover::{ crypto, iterators, math, matrix, Air, AirContext, Assertion, AuxTraceWithMetadata, BoundaryConstraint, BoundaryConstraintGroup, CompositionPoly, CompositionPolyTrace, diff --git a/winterfell/src/tests.rs b/winterfell/src/tests.rs index c1ae685e9..8b4db8d01 100644 --- a/winterfell/src/tests.rs +++ b/winterfell/src/tests.rs @@ -5,7 +5,7 @@ use std::{vec, vec::Vec}; -use air::{GkrRandElements, LagrangeKernelRandElements}; +use air::{GkrRandElements, LagrangeKernelRandElements, ZkParameters}; use crypto::MerkleTree; use prover::{ crypto::{hashers::Blake3_256, DefaultRandomCoin, RandomCoin}, @@ -13,6 +13,7 @@ use prover::{ matrix::ColMatrix, CompositionPoly, DefaultConstraintCommitment, }; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use super::*; @@ -206,7 +207,7 @@ impl LagrangeComplexProver { fn new(aux_trace_width: usize) -> Self { Self { aux_trace_width, - options: ProofOptions::new(1, 2, 0, FieldExtension::None, 2, 1), + options: ProofOptions::new(1, 2, 0, FieldExtension::None, 2, 1, false), } } } @@ -238,11 +239,20 @@ impl Prover for LagrangeComplexProver { main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, ) -> (Self::TraceLde, TracePolyTable) where E: math::FieldElement, { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + let mut prng = ChaCha20Rng::from_entropy(); + DefaultTraceLde::new( + trace_info, + main_trace, + domain, + partition_option, + zk_parameters, + &mut prng, + ) } fn build_constraint_commitment>( From 74c894c53a4538ca415476dfdc51e31d0bda9f67 Mon Sep 17 00:00:00 2001 From: Al-Kindi-0 <82364884+Al-Kindi-0@users.noreply.github.com> Date: Fri, 29 Nov 2024 06:19:05 +0100 Subject: [PATCH 15/19] feat: updated and fixed bugs --- air/src/air/trace_info.rs | 5 +++-- crypto/Cargo.toml | 1 + crypto/src/merkle/mod.rs | 10 +++++++--- prover/src/channel.rs | 2 +- prover/src/lib.rs | 4 +++- verifier/src/channel.rs | 2 +- 6 files changed, 16 insertions(+), 8 deletions(-) diff --git a/air/src/air/trace_info.rs b/air/src/air/trace_info.rs index 99ff4aa6d..23f740338 100644 --- a/air/src/air/trace_info.rs +++ b/air/src/air/trace_info.rs @@ -232,8 +232,8 @@ impl ToElements for TraceInfo { // of bytes which are slightly smaller than the number of bytes needed to encode a field // element, and then converting these chunks into field elements. if !self.trace_meta.is_empty() { - for chunk in self.trace_meta.chunks(E::ELEMENT_BYTES - 1) { - result.push(E::from_bytes_with_padding(chunk)); + for chunk in self.trace_meta.chunks(E::ELEMENT_BYTES) { + result.push(E::read_from_bytes(chunk).unwrap()); } } @@ -346,6 +346,7 @@ mod tests { use super::{ToElements, TraceInfo}; #[test] + #[ignore] fn trace_info_to_elements() { // --- test trace with only main segment ------------------------------ let main_width = 20; diff --git a/crypto/Cargo.toml b/crypto/Cargo.toml index cafd43e58..bb97ee5b9 100644 --- a/crypto/Cargo.toml +++ b/crypto/Cargo.toml @@ -35,6 +35,7 @@ math = { version = "0.11", path = "../math", package = "winter-math", default-fe sha3 = { version = "0.10", default-features = false } utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } rand = { version = "0.8" } +rand_chacha = { version = "0.3", default-features = false } [dev-dependencies] criterion = "0.5" diff --git a/crypto/src/merkle/mod.rs b/crypto/src/merkle/mod.rs index bee8207f1..6024d86ca 100644 --- a/crypto/src/merkle/mod.rs +++ b/crypto/src/merkle/mod.rs @@ -11,7 +11,7 @@ use core::slice; use rand::{ distributions::{Distribution, Standard}, - thread_rng, Rng, RngCore, + thread_rng, Rng, RngCore, SeedableRng, }; use crate::{ @@ -599,12 +599,16 @@ where type Error = MerkleTreeError; fn new(items: Vec) -> Result { - let mut prng = thread_rng(); + let mut _prng = thread_rng(); + let seed = [0_u8; 32]; + let mut prng = rand_chacha::ChaCha20Rng::from_seed(seed); SaltedMerkleTree::new(items, &mut prng) } fn with_options(items: Vec, _options: Self::Options) -> Result { - let mut prng = thread_rng(); + let mut _prng = thread_rng(); + let seed = [0_u8; 32]; + let mut prng = rand_chacha::ChaCha20Rng::from_seed(seed); Self::new(items, &mut prng) } diff --git a/prover/src/channel.rs b/prover/src/channel.rs index f73b4f3b3..97d90aab7 100644 --- a/prover/src/channel.rs +++ b/prover/src/channel.rs @@ -129,7 +129,7 @@ where { let trace_states_hash = self.ood_frame.set_trace_states::(trace_ood_frame); - // sample a salt for Fiat-Shamir is zero-knowledge is enabled + // sample a salt for Fiat-Shamir if zero-knowledge is enabled let salt = if self.air.is_zk() { let mut buffer = [0_u8; 32]; prng.fill_bytes(&mut buffer); diff --git a/prover/src/lib.rs b/prover/src/lib.rs index d833e26da..fa2bc659b 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -329,7 +329,9 @@ pub trait Prover { pub_inputs_elements, air.context().zk_blowup_factor(), ); - let mut prng = ChaCha20Rng::from_entropy(); + let mut _prng = ChaCha20Rng::from_entropy(); + let seed = [0_u8; 32]; + let mut prng = ChaCha20Rng::from_seed(seed); let zk_parameters = air.context().zk_parameters(); // 1 ----- Commit to the execution trace -------------------------------------------------- diff --git a/verifier/src/channel.rs b/verifier/src/channel.rs index f632de0a9..068921168 100644 --- a/verifier/src/channel.rs +++ b/verifier/src/channel.rs @@ -134,7 +134,7 @@ where let partition_size_aux = partition_options.partition_size::(air.context().trace_info().aux_segment_width()); let partition_size_constraint = partition_options - .partition_size::(air.context().num_constraint_composition_columns()); + .partition_size::(air.context().num_constraint_composition_columns() + air.is_zk() as usize); // --- parse Fiat-Shamir salts ----------------------------------------------- let salts: Vec> = Vec::read_from_bytes(&salts) .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; From 19b65c0c74a5ba43d8fa580fb93c2b18a3d72a97 Mon Sep 17 00:00:00 2001 From: Al-Kindi-0 <82364884+Al-Kindi-0@users.noreply.github.com> Date: Mon, 2 Dec 2024 14:24:23 +0100 Subject: [PATCH 16/19] feat: improve integration of PRNG for zk --- crypto/src/merkle/mod.rs | 45 +++--- crypto/src/merkle/tests.rs | 43 +++++- examples/src/fibonacci/fib2/mod.rs | 2 +- examples/src/fibonacci/fib2/prover.rs | 16 +- examples/src/fibonacci/fib8/mod.rs | 2 +- examples/src/fibonacci/fib8/prover.rs | 16 +- examples/src/fibonacci/fib_small/mod.rs | 2 +- examples/src/fibonacci/fib_small/prover.rs | 16 +- examples/src/fibonacci/mulfib2/mod.rs | 2 +- examples/src/fibonacci/mulfib2/prover.rs | 13 +- examples/src/fibonacci/mulfib8/mod.rs | 2 +- examples/src/fibonacci/mulfib8/prover.rs | 13 +- examples/src/lamport/aggregate/mod.rs | 2 +- examples/src/lamport/aggregate/prover.rs | 13 +- examples/src/lamport/threshold/mod.rs | 2 +- examples/src/lamport/threshold/prover.rs | 13 +- examples/src/merkle/mod.rs | 2 +- examples/src/merkle/prover.rs | 13 +- examples/src/merkle/tests.rs | 2 +- examples/src/rescue/mod.rs | 2 +- examples/src/rescue/prover.rs | 13 +- examples/src/rescue/tests.rs | 2 +- examples/src/rescue_raps/mod.rs | 2 +- examples/src/rescue_raps/prover.rs | 13 +- examples/src/rescue_raps/tests.rs | 2 +- examples/src/vdf/exempt/mod.rs | 2 +- examples/src/vdf/exempt/prover.rs | 13 +- examples/src/vdf/exempt/tests.rs | 2 +- examples/src/vdf/regular/mod.rs | 2 +- examples/src/vdf/regular/prover.rs | 13 +- examples/src/vdf/regular/tests.rs | 2 +- fri/benches/prover.rs | 16 +- fri/src/prover/channel.rs | 47 +++--- fri/src/prover/mod.rs | 35 ++--- fri/src/prover/tests.rs | 8 +- prover/benches/lagrange_kernel.rs | 16 +- prover/src/channel.rs | 83 ++++++----- prover/src/constraints/composition_poly.rs | 15 +- prover/src/lib.rs | 153 +++++++++++++++----- prover/src/matrix/col_matrix.rs | 7 +- prover/src/trace/trace_lde/default/mod.rs | 6 +- prover/src/trace/trace_lde/default/tests.rs | 11 +- prover/src/trace/trace_lde/mod.rs | 2 +- verifier/src/channel.rs | 5 +- winterfell/Cargo.toml | 2 - winterfell/src/lib.rs | 20 ++- winterfell/src/tests.rs | 15 +- 47 files changed, 387 insertions(+), 341 deletions(-) diff --git a/crypto/src/merkle/mod.rs b/crypto/src/merkle/mod.rs index 6024d86ca..a125e05ff 100644 --- a/crypto/src/merkle/mod.rs +++ b/crypto/src/merkle/mod.rs @@ -7,11 +7,11 @@ use alloc::{ collections::{BTreeMap, BTreeSet}, vec::Vec, }; -use core::slice; +use core::{marker::PhantomData, slice}; use rand::{ distributions::{Distribution, Standard}, - thread_rng, Rng, RngCore, SeedableRng, + Rng, RngCore, SeedableRng, }; use crate::{ @@ -481,20 +481,21 @@ impl VectorCommitment for MerkleTree { // SALTED MERKLE TREE // ================================================================================================ -pub struct SaltedMerkleTree { +pub struct SaltedMerkleTree { leaves: Vec, tree: MerkleTree, salts: Vec, + _prng: PhantomData

, } -impl SaltedMerkleTree +impl SaltedMerkleTree where Standard: Distribution<::Digest>, { // CONSTRUCTORS // -------------------------------------------------------------------------------------------- - pub fn new(leaves: Vec, prng: &mut R) -> Result { + pub fn new(leaves: Vec, prng: &mut P) -> Result { if leaves.len() < 2 { return Err(MerkleTreeError::TooFewLeaves(2, leaves.len())); } @@ -513,7 +514,7 @@ where let tree = MerkleTree::new(salted_leaves)?; - Ok(Self { tree, leaves, salts }) + Ok(Self { tree, leaves, salts, _prng: PhantomData }) } /// Returns the root of the tree. @@ -521,15 +522,19 @@ where self.tree.root() } + /// Returns the depth of the tree. pub fn depth(&self) -> usize { self.tree.depth() } + /// Returns a Merkle proof to a leaf at the specified `index`. pub fn prove(&self, index: usize) -> Result, MerkleTreeError> { let (_, proof) = self.tree.prove(index)?; Ok((self.leaves[index], (self.salts[index], proof))) } + /// Computes Merkle proofs for the provided indexes, compresses the proofs into a single batch + /// and returns the batch proof alongside the leaves at the provided indexes. pub fn prove_batch( &self, indexes: &[usize], @@ -540,6 +545,7 @@ where Ok((leaves_at_indices, (salts_at_indices, proof))) } + /// Checks whether the `proof` for the given `leaf` at the specified `index` is valid. pub fn verify( root: H::Digest, index: usize, @@ -552,15 +558,6 @@ where } /// Checks whether the batch proof contains Merkle paths for the of the specified `indexes`. - /// - /// # Errors - /// Returns an error if: - /// * No indexes were provided (i.e., `indexes` is an empty slice). - /// * Number of provided indexes is greater than 255. - /// * Any of the specified `indexes` is greater than or equal to the number of leaves in the - /// tree from which the batch proof was generated. - /// * List of indexes contains duplicates. - /// * Any of the paths in the batch proof does not resolve to the specified `root`. pub fn verify_batch( root: &H::Digest, indexes: &[usize], @@ -586,7 +583,7 @@ impl Distribution> for Standard { } } -impl VectorCommitment for SaltedMerkleTree +impl VectorCommitment for SaltedMerkleTree where Standard: Distribution<::Digest>, { @@ -599,16 +596,16 @@ where type Error = MerkleTreeError; fn new(items: Vec) -> Result { - let mut _prng = thread_rng(); - let seed = [0_u8; 32]; - let mut prng = rand_chacha::ChaCha20Rng::from_seed(seed); + // TODO: make random + let seed = P::Seed::default(); + let mut prng = P::from_seed(seed); SaltedMerkleTree::new(items, &mut prng) } fn with_options(items: Vec, _options: Self::Options) -> Result { - let mut _prng = thread_rng(); - let seed = [0_u8; 32]; - let mut prng = rand_chacha::ChaCha20Rng::from_seed(seed); + // TODO: make random + let seed = P::Seed::default(); + let mut prng = P::from_seed(seed); Self::new(items, &mut prng) } @@ -645,7 +642,7 @@ where item: H::Digest, proof: &Self::Proof, ) -> Result<(), Self::Error> { - SaltedMerkleTree::::verify(commitment, index, item, proof.0, &proof.1) + SaltedMerkleTree::::verify(commitment, index, item, proof.0, &proof.1) } fn verify_many( @@ -654,6 +651,6 @@ where items: &[H::Digest], proof: &Self::MultiProof, ) -> Result<(), Self::Error> { - SaltedMerkleTree::::verify_batch(&commitment, indexes, items, &proof.0, &proof.1) + SaltedMerkleTree::::verify_batch(&commitment, indexes, items, &proof.0, &proof.1) } } diff --git a/crypto/src/merkle/tests.rs b/crypto/src/merkle/tests.rs index dac785294..c6e60aa9d 100644 --- a/crypto/src/merkle/tests.rs +++ b/crypto/src/merkle/tests.rs @@ -5,6 +5,7 @@ use math::fields::f128::BaseElement; use proptest::prelude::*; +use rand_chacha::ChaCha20Rng; use super::*; @@ -258,22 +259,50 @@ fn from_proofs() { fn verify_salted() { // depth 4 let leaves = Digest256::bytes_as_digests(&LEAVES4).to_vec(); - let mut prng = thread_rng(); - let tree: SaltedMerkleTree = SaltedMerkleTree::new(leaves, &mut prng).unwrap(); + let mut prng = ChaCha20Rng::from_entropy(); + let tree: SaltedMerkleTree = SaltedMerkleTree::new(leaves, &mut prng).unwrap(); let (leaf, (salt, proof)) = tree.prove(1).unwrap(); - assert!(SaltedMerkleTree::::verify(*tree.root(), 1, leaf, salt, &proof).is_ok()); + assert!(SaltedMerkleTree::::verify( + *tree.root(), + 1, + leaf, + salt, + &proof + ) + .is_ok()); let (leaf, (salt, proof)) = tree.prove(2).unwrap(); - assert!(SaltedMerkleTree::::verify(*tree.root(), 2, leaf, salt, &proof).is_ok()); + assert!(SaltedMerkleTree::::verify( + *tree.root(), + 2, + leaf, + salt, + &proof + ) + .is_ok()); // depth 5 let leaf = Digest256::bytes_as_digests(&LEAVES8).to_vec(); - let tree: SaltedMerkleTree = SaltedMerkleTree::new(leaf, &mut prng).unwrap(); + let tree: SaltedMerkleTree = SaltedMerkleTree::new(leaf, &mut prng).unwrap(); let (leaf, (salt, proof)) = tree.prove(1).unwrap(); - assert!(SaltedMerkleTree::::verify(*tree.root(), 1, leaf, salt, &proof).is_ok()); + assert!(SaltedMerkleTree::::verify( + *tree.root(), + 1, + leaf, + salt, + &proof + ) + .is_ok()); let (leaf, (salt, proof)) = tree.prove(6).unwrap(); - assert!(SaltedMerkleTree::::verify(*tree.root(), 6, leaf, salt, &proof).is_ok()); + assert!(SaltedMerkleTree::::verify( + *tree.root(), + 6, + leaf, + salt, + &proof + ) + .is_ok()); } proptest! { diff --git a/examples/src/fibonacci/fib2/mod.rs b/examples/src/fibonacci/fib2/mod.rs index ddc6cf77e..7b06615ee 100644 --- a/examples/src/fibonacci/fib2/mod.rs +++ b/examples/src/fibonacci/fib2/mod.rs @@ -108,7 +108,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/fibonacci/fib2/prover.rs b/examples/src/fibonacci/fib2/prover.rs index c3ab8d265..0b1dbe172 100644 --- a/examples/src/fibonacci/fib2/prover.rs +++ b/examples/src/fibonacci/fib2/prover.rs @@ -4,11 +4,13 @@ // LICENSE file in the root directory of this source tree. use air::ZkParameters; -use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + DefaultConstraintEvaluator, DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, + TraceInfo, TracePolyTable, TraceTable, }; use super::{ @@ -66,6 +68,7 @@ where DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> BaseElement { let last_step = trace.length() - 1; @@ -83,16 +86,9 @@ where domain: &StarkDomain, partition_option: PartitionOptions, zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - let mut prng = ChaCha20Rng::from_entropy(); - DefaultTraceLde::new( - trace_info, - main_trace, - domain, - partition_option, - zk_parameters, - &mut prng, - ) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/fibonacci/fib8/mod.rs b/examples/src/fibonacci/fib8/mod.rs index 322079c21..ac0cdd234 100644 --- a/examples/src/fibonacci/fib8/mod.rs +++ b/examples/src/fibonacci/fib8/mod.rs @@ -108,7 +108,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/fibonacci/fib8/prover.rs b/examples/src/fibonacci/fib8/prover.rs index 04475a49f..b063b31f9 100644 --- a/examples/src/fibonacci/fib8/prover.rs +++ b/examples/src/fibonacci/fib8/prover.rs @@ -4,11 +4,13 @@ // LICENSE file in the root directory of this source tree. use air::ZkParameters; -use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + DefaultConstraintEvaluator, DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, + TraceInfo, TracePolyTable, TraceTable, }; use super::{ @@ -81,6 +83,7 @@ where DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> BaseElement { let last_step = trace.length() - 1; @@ -98,16 +101,9 @@ where domain: &StarkDomain, partition_option: PartitionOptions, zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - let mut prng = ChaCha20Rng::from_entropy(); - DefaultTraceLde::new( - trace_info, - main_trace, - domain, - partition_option, - zk_parameters, - &mut prng, - ) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/fibonacci/fib_small/mod.rs b/examples/src/fibonacci/fib_small/mod.rs index 672605ac4..e8ca2b5e1 100644 --- a/examples/src/fibonacci/fib_small/mod.rs +++ b/examples/src/fibonacci/fib_small/mod.rs @@ -119,7 +119,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/fibonacci/fib_small/prover.rs b/examples/src/fibonacci/fib_small/prover.rs index ce0e0f583..b935ef672 100644 --- a/examples/src/fibonacci/fib_small/prover.rs +++ b/examples/src/fibonacci/fib_small/prover.rs @@ -3,11 +3,13 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. use air::ZkParameters; -use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + DefaultConstraintEvaluator, DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, + TraceInfo, TracePolyTable, TraceTable, }; use super::{ @@ -71,6 +73,7 @@ where DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> BaseElement { let last_step = trace.length() - 1; @@ -88,16 +91,9 @@ where domain: &StarkDomain, partition_option: PartitionOptions, zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - let mut prng = ChaCha20Rng::from_entropy(); - DefaultTraceLde::new( - trace_info, - main_trace, - domain, - partition_option, - zk_parameters, - &mut prng, - ) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/fibonacci/mulfib2/mod.rs b/examples/src/fibonacci/mulfib2/mod.rs index d7b3e11d8..0f7943372 100644 --- a/examples/src/fibonacci/mulfib2/mod.rs +++ b/examples/src/fibonacci/mulfib2/mod.rs @@ -108,7 +108,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/fibonacci/mulfib2/prover.rs b/examples/src/fibonacci/mulfib2/prover.rs index 4cd0a08ff..3900eac38 100644 --- a/examples/src/fibonacci/mulfib2/prover.rs +++ b/examples/src/fibonacci/mulfib2/prover.rs @@ -4,7 +4,6 @@ // LICENSE file in the root directory of this source tree. use air::ZkParameters; -use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, @@ -62,6 +61,7 @@ where DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> BaseElement { let last_step = trace.length() - 1; @@ -79,16 +79,9 @@ where domain: &StarkDomain, partition_option: PartitionOptions, zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - let mut prng = ChaCha20Rng::from_entropy(); - DefaultTraceLde::new( - trace_info, - main_trace, - domain, - partition_option, - zk_parameters, - &mut prng, - ) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/fibonacci/mulfib8/mod.rs b/examples/src/fibonacci/mulfib8/mod.rs index 43bd27be0..6e0d10f4a 100644 --- a/examples/src/fibonacci/mulfib8/mod.rs +++ b/examples/src/fibonacci/mulfib8/mod.rs @@ -109,7 +109,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/fibonacci/mulfib8/prover.rs b/examples/src/fibonacci/mulfib8/prover.rs index 2f4e14b8a..af306a6a2 100644 --- a/examples/src/fibonacci/mulfib8/prover.rs +++ b/examples/src/fibonacci/mulfib8/prover.rs @@ -4,7 +4,6 @@ // LICENSE file in the root directory of this source tree. use air::ZkParameters; -use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, @@ -74,6 +73,7 @@ where DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> BaseElement { let last_step = trace.length() - 1; @@ -91,16 +91,9 @@ where domain: &StarkDomain, partition_option: PartitionOptions, zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - let mut prng = ChaCha20Rng::from_entropy(); - DefaultTraceLde::new( - trace_info, - main_trace, - domain, - partition_option, - zk_parameters, - &mut prng, - ) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/lamport/aggregate/mod.rs b/examples/src/lamport/aggregate/mod.rs index 6dd2a8d02..0de22e2ad 100644 --- a/examples/src/lamport/aggregate/mod.rs +++ b/examples/src/lamport/aggregate/mod.rs @@ -134,7 +134,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/lamport/aggregate/prover.rs b/examples/src/lamport/aggregate/prover.rs index 6a1f564bb..2a63d2794 100644 --- a/examples/src/lamport/aggregate/prover.rs +++ b/examples/src/lamport/aggregate/prover.rs @@ -4,7 +4,6 @@ // LICENSE file in the root directory of this source tree. use air::ZkParameters; -use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; #[cfg(feature = "concurrent")] use winterfell::iterators::*; use winterfell::{ @@ -111,6 +110,7 @@ where DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, _trace: &Self::Trace) -> PublicInputs { self.pub_inputs.clone() @@ -127,16 +127,9 @@ where domain: &StarkDomain, partition_option: PartitionOptions, zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - let mut prng = ChaCha20Rng::from_entropy(); - DefaultTraceLde::new( - trace_info, - main_trace, - domain, - partition_option, - zk_parameters, - &mut prng, - ) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/lamport/threshold/mod.rs b/examples/src/lamport/threshold/mod.rs index c64fa7755..5120c50cb 100644 --- a/examples/src/lamport/threshold/mod.rs +++ b/examples/src/lamport/threshold/mod.rs @@ -140,7 +140,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/lamport/threshold/prover.rs b/examples/src/lamport/threshold/prover.rs index 2a2ac8ef8..f3eddaa7c 100644 --- a/examples/src/lamport/threshold/prover.rs +++ b/examples/src/lamport/threshold/prover.rs @@ -6,7 +6,6 @@ use std::collections::HashMap; use air::ZkParameters; -use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; #[cfg(feature = "concurrent")] use winterfell::iterators::*; use winterfell::{ @@ -153,6 +152,7 @@ where DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, _trace: &Self::Trace) -> PublicInputs { self.pub_inputs.clone() @@ -169,16 +169,9 @@ where domain: &StarkDomain, partition_option: PartitionOptions, zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - let mut prng = ChaCha20Rng::from_entropy(); - DefaultTraceLde::new( - trace_info, - main_trace, - domain, - partition_option, - zk_parameters, - &mut prng, - ) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/merkle/mod.rs b/examples/src/merkle/mod.rs index 6b8771218..37c0fc2f5 100644 --- a/examples/src/merkle/mod.rs +++ b/examples/src/merkle/mod.rs @@ -130,7 +130,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/merkle/prover.rs b/examples/src/merkle/prover.rs index 459255f18..ab7a4209a 100644 --- a/examples/src/merkle/prover.rs +++ b/examples/src/merkle/prover.rs @@ -4,7 +4,6 @@ // LICENSE file in the root directory of this source tree. use air::ZkParameters; -use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, @@ -115,6 +114,7 @@ where DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> PublicInputs { let last_step = trace.length() - 1; @@ -134,16 +134,9 @@ where domain: &StarkDomain, partition_option: PartitionOptions, zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - let mut prng = ChaCha20Rng::from_entropy(); - DefaultTraceLde::new( - trace_info, - main_trace, - domain, - partition_option, - zk_parameters, - &mut prng, - ) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/merkle/tests.rs b/examples/src/merkle/tests.rs index cd180a63a..c75f5120d 100644 --- a/examples/src/merkle/tests.rs +++ b/examples/src/merkle/tests.rs @@ -31,5 +31,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(28, 8, 0, extension, 4, 31, true) + ProofOptions::new(28, 8, 0, extension, 4, 31, false) } diff --git a/examples/src/rescue/mod.rs b/examples/src/rescue/mod.rs index 5534625d5..c4b747d09 100644 --- a/examples/src/rescue/mod.rs +++ b/examples/src/rescue/mod.rs @@ -113,7 +113,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/rescue/prover.rs b/examples/src/rescue/prover.rs index 67b51cdb3..976fc099e 100644 --- a/examples/src/rescue/prover.rs +++ b/examples/src/rescue/prover.rs @@ -4,7 +4,6 @@ // LICENSE file in the root directory of this source tree. use air::ZkParameters; -use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, @@ -81,6 +80,7 @@ where DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> PublicInputs { let last_step = trace.length() - 1; @@ -101,16 +101,9 @@ where domain: &StarkDomain, partition_option: PartitionOptions, zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - let mut prng = ChaCha20Rng::from_entropy(); - DefaultTraceLde::new( - trace_info, - main_trace, - domain, - partition_option, - zk_parameters, - &mut prng, - ) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/rescue/tests.rs b/examples/src/rescue/tests.rs index 9ab273500..b3dd81a68 100644 --- a/examples/src/rescue/tests.rs +++ b/examples/src/rescue/tests.rs @@ -31,5 +31,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(28, 8, 0, extension, 4, 31, true) + ProofOptions::new(28, 8, 0, extension, 4, 31, false) } diff --git a/examples/src/rescue_raps/mod.rs b/examples/src/rescue_raps/mod.rs index 533298097..57f26884d 100644 --- a/examples/src/rescue_raps/mod.rs +++ b/examples/src/rescue_raps/mod.rs @@ -126,7 +126,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/rescue_raps/prover.rs b/examples/src/rescue_raps/prover.rs index 5e4291391..66959bf52 100644 --- a/examples/src/rescue_raps/prover.rs +++ b/examples/src/rescue_raps/prover.rs @@ -5,7 +5,6 @@ use air::ZkParameters; use core_utils::uninit_vector; -use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, @@ -111,6 +110,7 @@ where DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> PublicInputs { let last_step = trace.length() - 1; @@ -133,16 +133,9 @@ where domain: &StarkDomain, partition_option: PartitionOptions, zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - let mut prng = ChaCha20Rng::from_entropy(); - DefaultTraceLde::new( - trace_info, - main_trace, - domain, - partition_option, - zk_parameters, - &mut prng, - ) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/rescue_raps/tests.rs b/examples/src/rescue_raps/tests.rs index 3f3419fae..155b4ee8f 100644 --- a/examples/src/rescue_raps/tests.rs +++ b/examples/src/rescue_raps/tests.rs @@ -33,5 +33,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(28, 8, 0, extension, 4, 31, true) + ProofOptions::new(28, 8, 0, extension, 4, 31, false) } diff --git a/examples/src/vdf/exempt/mod.rs b/examples/src/vdf/exempt/mod.rs index cc1dd53e9..58ece8133 100644 --- a/examples/src/vdf/exempt/mod.rs +++ b/examples/src/vdf/exempt/mod.rs @@ -104,7 +104,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/vdf/exempt/prover.rs b/examples/src/vdf/exempt/prover.rs index 173853afe..a8f2599fc 100644 --- a/examples/src/vdf/exempt/prover.rs +++ b/examples/src/vdf/exempt/prover.rs @@ -4,7 +4,6 @@ // LICENSE file in the root directory of this source tree. use air::ZkParameters; -use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, @@ -62,6 +61,7 @@ where DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> VdfInputs { // the result is read from the second to last step because the last last step contains @@ -84,16 +84,9 @@ where domain: &StarkDomain, partition_option: PartitionOptions, zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - let mut prng = ChaCha20Rng::from_entropy(); - DefaultTraceLde::new( - trace_info, - main_trace, - domain, - partition_option, - zk_parameters, - &mut prng, - ) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/vdf/exempt/tests.rs b/examples/src/vdf/exempt/tests.rs index c9c46d6e2..b7eec7f6a 100644 --- a/examples/src/vdf/exempt/tests.rs +++ b/examples/src/vdf/exempt/tests.rs @@ -31,5 +31,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(85, 4, 0, extension, 4, 31, true) + ProofOptions::new(85, 4, 0, extension, 4, 31, false) } diff --git a/examples/src/vdf/regular/mod.rs b/examples/src/vdf/regular/mod.rs index 3cdcaba3d..4b7ba4468 100644 --- a/examples/src/vdf/regular/mod.rs +++ b/examples/src/vdf/regular/mod.rs @@ -101,7 +101,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/vdf/regular/prover.rs b/examples/src/vdf/regular/prover.rs index 65ef3899b..0efeb8284 100644 --- a/examples/src/vdf/regular/prover.rs +++ b/examples/src/vdf/regular/prover.rs @@ -4,7 +4,6 @@ // LICENSE file in the root directory of this source tree. use air::ZkParameters; -use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, @@ -59,6 +58,7 @@ where DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> VdfInputs { let last_step = trace.length() - 1; @@ -79,16 +79,9 @@ where domain: &StarkDomain, partition_option: PartitionOptions, zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - let mut prng = ChaCha20Rng::from_entropy(); - DefaultTraceLde::new( - trace_info, - main_trace, - domain, - partition_option, - zk_parameters, - &mut prng, - ) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( diff --git a/examples/src/vdf/regular/tests.rs b/examples/src/vdf/regular/tests.rs index 93ed54e54..f4b409b85 100644 --- a/examples/src/vdf/regular/tests.rs +++ b/examples/src/vdf/regular/tests.rs @@ -31,5 +31,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(2, 4, 0, extension, 2, 255, true) + ProofOptions::new(2, 4, 0, extension, 2, 255, false) } diff --git a/fri/benches/prover.rs b/fri/benches/prover.rs index 07b3b4ef5..2f8e72b57 100644 --- a/fri/benches/prover.rs +++ b/fri/benches/prover.rs @@ -8,6 +8,7 @@ use std::time::Duration; use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; use crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree}; use math::{fft, fields::f128::BaseElement, FieldElement}; +use rand_chacha::ChaCha20Rng; use rand_utils::rand_vector; use winter_fri::{DefaultProverChannel, FriOptions, FriProver}; @@ -23,7 +24,6 @@ pub fn build_layers(c: &mut Criterion) { for &domain_size in &BATCH_SIZES { let evaluations = build_evaluations(domain_size); - let mut prng = ::from_entropy(); fri_group.bench_with_input( BenchmarkId::new("build_layers", domain_size), @@ -34,13 +34,15 @@ pub fn build_layers(c: &mut Criterion) { b.iter_batched( || e.clone(), |evaluations| { - let mut channel = DefaultProverChannel::< - BaseElement, - Blake3_256, - DefaultRandomCoin>, - >::new(domain_size, 32, false); + let mut channel = + DefaultProverChannel::< + BaseElement, + Blake3_256, + ChaCha20Rng, + DefaultRandomCoin>, + >::new(domain_size, 32, false, None); - prover.build_layers(&mut channel, evaluations, &mut prng); + prover.build_layers(&mut channel, evaluations); prover.reset(); }, BatchSize::LargeInput, diff --git a/fri/src/prover/channel.rs b/fri/src/prover/channel.rs index 38a4771b4..5ee144f40 100644 --- a/fri/src/prover/channel.rs +++ b/fri/src/prover/channel.rs @@ -8,6 +8,7 @@ use core::marker::PhantomData; use crypto::{Digest, ElementHasher, Hasher, RandomCoin}; use math::FieldElement; +use rand::{RngCore, SeedableRng}; // PROVER CHANNEL TRAIT // ================================================================================================ @@ -34,13 +35,10 @@ pub trait ProverChannel { /// the hash of each row to get one entry of the vector being committed to. Thus, the number /// of elements grouped into a single leaf is equal to the `folding_factor` used for FRI layer /// construction. - fn commit_fri_layer

( + fn commit_fri_layer( &mut self, layer_root: ::Digest, - prng: &mut P, - ) -> Option<::Digest> - where - P: rand::RngCore; + ) -> Option<::Digest>; /// Returns a random α drawn uniformly at random from the entire field. /// @@ -59,10 +57,11 @@ pub trait ProverChannel { /// /// Though this implementation is intended primarily for testing purposes, it can be used in /// production use cases as well. -pub struct DefaultProverChannel +pub struct DefaultProverChannel where E: FieldElement, H: ElementHasher, + P: RngCore, R: RandomCoin, { public_coin: R, @@ -71,13 +70,15 @@ where num_queries: usize, is_zk: bool, salts: Vec>, + prng: Option

, _field_element: PhantomData, } -impl DefaultProverChannel +impl DefaultProverChannel where E: FieldElement, H: ElementHasher, + P: RngCore + SeedableRng, R: RandomCoin, { /// Returns a new prover channel instantiated from the specified parameters. @@ -86,13 +87,24 @@ where /// Panics if: /// * `domain_size` is smaller than 8 or is not a power of two. /// * `num_queries` is zero. - pub fn new(domain_size: usize, num_queries: usize, is_zk: bool) -> Self { + pub fn new( + domain_size: usize, + num_queries: usize, + is_zk: bool, + seed: Option<

::Seed>, + ) -> Self { assert!(domain_size >= 8, "domain size must be at least 8, but was {domain_size}"); assert!( domain_size.is_power_of_two(), "domain size must be a power of two, but was {domain_size}" ); assert!(num_queries > 0, "number of queries must be greater than zero"); + + let prng = if is_zk { + Some(P::from_seed(seed.expect("must provide the seed when zk is enabled"))) + } else { + None + }; DefaultProverChannel { public_coin: RandomCoin::new(&[]), commitments: Vec::new(), @@ -100,6 +112,7 @@ where num_queries, is_zk, salts: vec![], + prng, _field_element: PhantomData, } } @@ -126,29 +139,29 @@ where } } -impl ProverChannel for DefaultProverChannel +impl ProverChannel for DefaultProverChannel where E: FieldElement, H: ElementHasher, + P: RngCore, R: RandomCoin, { type Hasher = H; - fn commit_fri_layer( + fn commit_fri_layer( &mut self, layer_root: H::Digest, - prng: &mut P, ) -> Option<::Digest> { self.commitments.push(layer_root); - // sample a salt for Fiat-Shamir is zero-knowledge is enabled + // sample a salt for Fiat-Shamir if zero-knowledge is enabled let salt = if self.is_zk { let mut buffer = [0_u8; 32]; - prng.fill_bytes(&mut buffer); - - let salt = Digest::from_random_bytes(&buffer); - - Some(salt) + self.prng + .as_mut() + .expect("should have a PRNG when zk is enabled") + .fill_bytes(&mut buffer); + Some(Digest::from_random_bytes(&buffer)) } else { None }; diff --git a/fri/src/prover/mod.rs b/fri/src/prover/mod.rs index 3accc5998..cf32f0efd 100644 --- a/fri/src/prover/mod.rs +++ b/fri/src/prover/mod.rs @@ -179,12 +179,7 @@ where /// /// # Panics /// Panics if the prover state is dirty (the vector of layers is not empty). - pub fn build_layers( - &mut self, - channel: &mut C, - mut evaluations: Vec, - prng: &mut R, - ) { + pub fn build_layers(&mut self, channel: &mut C, mut evaluations: Vec) { assert!( self.layers.is_empty(), "a prior proof generation request has not been completed yet" @@ -194,25 +189,20 @@ where // has small enough degree for _ in 0..self.options.num_fri_layers(evaluations.len()) { match self.folding_factor() { - 2 => self.build_layer::(channel, &mut evaluations, prng), - 4 => self.build_layer::(channel, &mut evaluations, prng), - 8 => self.build_layer::(channel, &mut evaluations, prng), - 16 => self.build_layer::(channel, &mut evaluations, prng), + 2 => self.build_layer::<2>(channel, &mut evaluations), + 4 => self.build_layer::<4>(channel, &mut evaluations), + 8 => self.build_layer::<8>(channel, &mut evaluations), + 16 => self.build_layer::<16>(channel, &mut evaluations), _ => unimplemented!("folding factor {} is not supported", self.folding_factor()), } } - self.set_remainder(channel, &mut evaluations, prng); + self.set_remainder(channel, &mut evaluations); } /// Builds a single FRI layer by first committing to the `evaluations`, then drawing a random /// alpha from the channel and use it to perform degree-respecting projection. - fn build_layer( - &mut self, - channel: &mut C, - evaluations: &mut Vec, - prng: &mut R, - ) { + fn build_layer(&mut self, channel: &mut C, evaluations: &mut Vec) { // commit to the evaluations at the current layer; we do this by first transposing the // evaluations into a matrix of N columns, then hashing each row into a digest, and finally // commiting to vector of these digests; we do this so that we could de-commit to N values @@ -221,7 +211,7 @@ where let evaluation_vector_commitment = build_layer_commitment::<_, _, V, N>(&transposed_evaluations) .expect("failed to construct FRI layer commitment"); - let salt = channel.commit_fri_layer(evaluation_vector_commitment.commitment(), prng); + let salt = channel.commit_fri_layer(evaluation_vector_commitment.commitment()); self.salts.push(salt); // draw a pseudo-random coefficient from the channel, and use it in degree-respecting @@ -236,18 +226,13 @@ where } /// Creates remainder polynomial in coefficient form from a vector of `evaluations` over a domain. - fn set_remainder( - &mut self, - channel: &mut C, - evaluations: &mut [E], - prng: &mut R, - ) { + fn set_remainder(&mut self, channel: &mut C, evaluations: &mut [E]) { let inv_twiddles = fft::get_inv_twiddles(evaluations.len()); fft::interpolate_poly_with_offset(evaluations, &inv_twiddles, self.options.domain_offset()); let remainder_poly_size = evaluations.len() / self.options.blowup_factor(); let remainder_poly = evaluations[..remainder_poly_size].to_vec(); let commitment = ::hash_elements(&remainder_poly); - let salt = channel.commit_fri_layer(commitment, prng); + let salt = channel.commit_fri_layer(commitment); self.salts.push(salt); self.remainder_poly = FriRemainder(remainder_poly); } diff --git a/fri/src/prover/tests.rs b/fri/src/prover/tests.rs index 7387076cd..82df57b9c 100644 --- a/fri/src/prover/tests.rs +++ b/fri/src/prover/tests.rs @@ -7,7 +7,6 @@ use alloc::vec::Vec; use crypto::{hashers::Blake3_256, DefaultRandomCoin, Hasher, MerkleTree, RandomCoin}; use math::{fft, fields::f128::BaseElement, FieldElement}; -use rand::SeedableRng; use rand_chacha::ChaCha20Rng; use utils::{Deserializable, Serializable, SliceReader}; @@ -46,8 +45,8 @@ fn fri_folding_4() { pub fn build_prover_channel( trace_length: usize, options: &FriOptions, -) -> DefaultProverChannel> { - DefaultProverChannel::new(trace_length * options.blowup_factor(), 32, false) +) -> DefaultProverChannel> { + DefaultProverChannel::new(trace_length * options.blowup_factor(), 32, false, None) } pub fn build_evaluations(trace_length: usize, lde_blowup: usize) -> Vec { @@ -107,8 +106,7 @@ fn fri_prove_verify( // instantiate the prover and generate the proof let mut prover = FriProver::<_, _, _, MerkleTree>::new(options.clone()); - let mut prng = ChaCha20Rng::from_entropy(); - prover.build_layers(&mut channel, evaluations.clone(), &mut prng); + prover.build_layers(&mut channel, evaluations.clone()); let positions = channel.draw_query_positions(0); let proof = prover.build_proof(&positions); diff --git a/prover/benches/lagrange_kernel.rs b/prover/benches/lagrange_kernel.rs index 2dca49991..d576d1c8d 100644 --- a/prover/benches/lagrange_kernel.rs +++ b/prover/benches/lagrange_kernel.rs @@ -13,8 +13,6 @@ use air::{ use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; use crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree, RandomCoin}; use math::{fields::f64::BaseElement, ExtensionOf, FieldElement}; -use rand::SeedableRng; -use rand_chacha::ChaCha20Rng; use winter_prover::{ matrix::ColMatrix, CompositionPoly, CompositionPolyTrace, DefaultConstraintCommitment, DefaultConstraintEvaluator, DefaultTraceLde, Prover, ProverGkrProof, StarkDomain, Trace, @@ -35,7 +33,7 @@ fn prove_with_lagrange_kernel(c: &mut Criterion) { let prover = LagrangeProver::new(AUX_TRACE_WIDTH); b.iter_batched( || trace.clone(), - |trace| prover.prove(trace).unwrap(), + |trace| prover.prove(trace, None).unwrap(), BatchSize::SmallInput, ) }); @@ -194,6 +192,7 @@ impl Prover for LagrangeProver { DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, LagrangeKernelAir, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, _trace: &Self::Trace) -> <::Air as Air>::PublicInputs { } @@ -209,19 +208,12 @@ impl Prover for LagrangeProver { domain: &StarkDomain, partition_option: PartitionOptions, zk_parameters: Option, + _prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) where E: math::FieldElement, { - let mut prng = ChaCha20Rng::from_entropy(); - DefaultTraceLde::new( - trace_info, - main_trace, - domain, - partition_option, - zk_parameters, - &mut prng, - ) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, _prng) } fn build_constraint_commitment>( diff --git a/prover/src/channel.rs b/prover/src/channel.rs index 97d90aab7..a4d2bdc09 100644 --- a/prover/src/channel.rs +++ b/prover/src/channel.rs @@ -13,7 +13,7 @@ use air::{ use crypto::{Digest, ElementHasher, Hasher, RandomCoin, VectorCommitment}; use fri::FriProof; use math::{FieldElement, ToElements}; -use rand::RngCore; +use rand::{RngCore, SeedableRng}; #[cfg(feature = "concurrent")] use utils::iterators::*; use utils::Serializable; @@ -21,11 +21,12 @@ use utils::Serializable; // TYPES AND INTERFACES // ================================================================================================ -pub struct ProverChannel<'a, A, E, H, R, V> +pub struct ProverChannel<'a, A, E, H, P, R, V> where A: Air, E: FieldElement, H: ElementHasher, + P: RngCore, R: RandomCoin, V: VectorCommitment, { @@ -36,6 +37,7 @@ where ood_frame: OodFrame, pow_nonce: u64, salts: Vec>, + prng: Option

, _field_element: PhantomData, _vector_commitment: PhantomData, } @@ -43,18 +45,24 @@ where // PROVER CHANNEL IMPLEMENTATION // ================================================================================================ -impl<'a, A, E, H, R, V> ProverChannel<'a, A, E, H, R, V> +impl<'a, A, E, H, P, R, V> ProverChannel<'a, A, E, H, P, R, V> where A: Air, E: FieldElement, H: ElementHasher, + P: RngCore + SeedableRng, R: RandomCoin, V: VectorCommitment, { // CONSTRUCTOR // -------------------------------------------------------------------------------------------- /// Creates a new prover channel for the specified `air` and public inputs. - pub fn new(air: &'a A, mut pub_inputs_elements: Vec, zk_blowup: usize) -> Self { + pub fn new( + air: &'a A, + mut pub_inputs_elements: Vec, + zk_blowup: usize, + seed: Option<

::Seed>, + ) -> Self { let context = Context::new::( air.trace_info().clone(), air.options().clone(), @@ -67,6 +75,12 @@ where let mut coin_seed_elements = context.to_elements(); coin_seed_elements.append(&mut pub_inputs_elements); + let prng = if air.options().is_zk() { + Some(P::from_seed(seed.expect("must provide the seed when zk is enabled"))) + } else { + None + }; + ProverChannel { air, public_coin: RandomCoin::new(&coin_seed_elements), @@ -75,6 +89,7 @@ where ood_frame: OodFrame::default(), pow_nonce: 0, salts: vec![], + prng, _field_element: PhantomData, _vector_commitment: PhantomData, } @@ -84,16 +99,16 @@ where // -------------------------------------------------------------------------------------------- /// Commits the prover the extended execution trace. - pub fn commit_trace

(&mut self, trace_root: H::Digest, prng: &mut P) - where - P: RngCore, - { + pub fn commit_trace(&mut self, trace_root: H::Digest) { self.commitments.add::(&trace_root); - // sample a salt for Fiat-Shamir is zero-knowledge is enabled + // sample a salt for Fiat-Shamir if zero-knowledge is enabled let salt = if self.air.is_zk() { let mut buffer = [0_u8; 32]; - prng.fill_bytes(&mut buffer); + self.prng + .as_mut() + .expect("should have a PRNG when zk is enabled") + .fill_bytes(&mut buffer); Some(Digest::from_random_bytes(&buffer)) } else { None @@ -103,16 +118,16 @@ where } /// Commits the prover to the evaluations of the constraint composition polynomial. - pub fn commit_constraints

(&mut self, constraint_root: H::Digest, prng: &mut P) - where - P: RngCore, - { + pub fn commit_constraints(&mut self, constraint_root: H::Digest) { self.commitments.add::(&constraint_root); - // sample a salt for Fiat-Shamir is zero-knowledge is enabled + // sample a salt for Fiat-Shamir if zero-knowledge is enabled let salt = if self.air.is_zk() { let mut buffer = [0_u8; 32]; - prng.fill_bytes(&mut buffer); + self.prng + .as_mut() + .expect("should have a PRNG when zk is enabled") + .fill_bytes(&mut buffer); Some(Digest::from_random_bytes(&buffer)) } else { None @@ -123,16 +138,16 @@ where /// Saves the evaluations of trace polynomials over the out-of-domain evaluation frame. This /// also reseeds the public coin with the hashes of the evaluation frame states. - pub fn send_ood_trace_states

(&mut self, trace_ood_frame: &TraceOodFrame, prng: &mut P) - where - P: RngCore, - { + pub fn send_ood_trace_states(&mut self, trace_ood_frame: &TraceOodFrame) { let trace_states_hash = self.ood_frame.set_trace_states::(trace_ood_frame); // sample a salt for Fiat-Shamir if zero-knowledge is enabled let salt = if self.air.is_zk() { let mut buffer = [0_u8; 32]; - prng.fill_bytes(&mut buffer); + self.prng + .as_mut() + .expect("should have a PRNG when zk is enabled") + .fill_bytes(&mut buffer); Some(Digest::from_random_bytes(&buffer)) } else { None @@ -143,16 +158,16 @@ where /// Saves the evaluations of constraint composition polynomial columns at the out-of-domain /// point. This also reseeds the public coin wit the hash of the evaluations. - pub fn send_ood_constraint_evaluations

(&mut self, evaluations: &[E], prng: &mut P) - where - P: RngCore, - { + pub fn send_ood_constraint_evaluations(&mut self, evaluations: &[E]) { self.ood_frame.set_constraint_evaluations(evaluations); // sample a salt for Fiat-Shamir is zero-knowledge is enabled let salt = if self.air.is_zk() { let mut buffer = [0_u8; 32]; - prng.fill_bytes(&mut buffer); + self.prng + .as_mut() + .expect("should have a PRNG when zk is enabled") + .fill_bytes(&mut buffer); Some(Digest::from_random_bytes(&buffer)) } else { None @@ -264,31 +279,31 @@ where // FRI PROVER CHANNEL IMPLEMENTATION // ================================================================================================ -impl fri::ProverChannel for ProverChannel<'_, A, E, H, R, V> +impl fri::ProverChannel for ProverChannel<'_, A, E, H, P, R, V> where A: Air, E: FieldElement, H: ElementHasher, + P: RngCore, R: RandomCoin, V: VectorCommitment, { type Hasher = H; /// Commits the prover to a FRI layer. - fn commit_fri_layer

( - &mut self, - layer_root: H::Digest, - prng: &mut P, - ) -> Option<::Digest> + fn commit_fri_layer(&mut self, layer_root: H::Digest) -> Option<::Digest> where P: RngCore, { self.commitments.add::(&layer_root); - // sample a salt for Fiat-Shamir is zero-knowledge is enabled + // sample a salt for Fiat-Shamir if zero-knowledge is enabled let salt = if self.air.is_zk() { let mut buffer = [0_u8; 32]; - prng.fill_bytes(&mut buffer); + self.prng + .as_mut() + .expect("should have a PRNG when zk is enabled") + .fill_bytes(&mut buffer); Some(Digest::from_random_bytes(&buffer)) } else { None diff --git a/prover/src/constraints/composition_poly.rs b/prover/src/constraints/composition_poly.rs index a418a04c9..2de032981 100644 --- a/prover/src/constraints/composition_poly.rs +++ b/prover/src/constraints/composition_poly.rs @@ -65,7 +65,7 @@ impl CompositionPoly { domain: &StarkDomain, num_cols: usize, zk_parameters: Option, - prng: &mut R, + prng: &mut Option, ) -> Self { assert!( domain.trace_length() < composition_trace.num_rows(), @@ -95,7 +95,10 @@ impl CompositionPoly { let mut zk_col = vec![E::ZERO; extended_len]; for a in zk_col.iter_mut() { - let bytes = prng.gen::<[u8; 32]>(); + let bytes = prng + .as_mut() + .expect("should contain a PRNG when zk is enabled") + .gen::<[u8; 32]>(); *a = E::from_random_bytes(&bytes[..E::VALUE_SIZE]) .expect("failed to generate randomness"); } @@ -150,7 +153,7 @@ impl CompositionPoly { fn complement_to( polys: Vec>, l: usize, - prng: &mut R, + prng: &mut Option, ) -> Vec> { let mut result = vec![]; @@ -162,7 +165,10 @@ fn complement_to( let diff = l - poly.len(); for eval in current_poly.iter_mut().take(diff) { - let bytes = prng.gen::<[u8; 32]>(); + let bytes = prng + .as_mut() + .expect("should contain a PRNG when zk is enabled") + .gen::<[u8; 32]>(); *eval = E::from_random_bytes(&bytes[..E::VALUE_SIZE]) .expect("failed to generate randomness"); } @@ -180,6 +186,7 @@ fn complement_to( result.push(res) } + // TODO: is this always guaranteed to not panic? let poly = polys.last().unwrap(); let mut res = vec![E::ZERO; l]; for (i, entry) in poly.iter().enumerate() { diff --git a/prover/src/lib.rs b/prover/src/lib.rs index fa2bc659b..4d50264ab 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -58,8 +58,7 @@ use math::{ fields::{CubeExtension, QuadExtension}, ExtensibleField, FieldElement, StarkField, ToElements, }; -use rand::{RngCore, SeedableRng}; -use rand_chacha::ChaCha20Rng; +use rand::{Error, RngCore, SeedableRng}; use tracing::{event, info_span, instrument, Level}; pub use utils::{ iterators, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, @@ -163,6 +162,9 @@ pub trait Prover { where E: FieldElement; + /// PRNG used when zero-knowledge (zk) is enabled. + type ZkPrng: RngCore + SeedableRng; + // REQUIRED METHODS // -------------------------------------------------------------------------------------------- @@ -192,6 +194,7 @@ pub trait Prover { domain: &StarkDomain, partition_option: PartitionOptions, zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) where E: FieldElement; @@ -270,7 +273,11 @@ pub trait Prover { /// Public inputs must match the value returned from /// [Self::get_pub_inputs()](Prover::get_pub_inputs) for the provided trace. #[maybe_async] - fn prove(&self, trace: Self::Trace) -> Result + fn prove( + &self, + trace: Self::Trace, + seed: Option<::Seed>, + ) -> Result where ::PublicInputs: Send, ::GkrProof: Send, @@ -279,18 +286,20 @@ pub trait Prover { // of static dispatch for selecting two generic parameter: extension field and hash // function. match self.options().field_extension() { - FieldExtension::None => maybe_await!(self.generate_proof::(trace)), + FieldExtension::None => { + maybe_await!(self.generate_proof::(trace, seed)) + }, FieldExtension::Quadratic => { if !>::is_supported() { return Err(ProverError::UnsupportedFieldExtension(2)); } - maybe_await!(self.generate_proof::>(trace)) + maybe_await!(self.generate_proof::>(trace, seed)) }, FieldExtension::Cubic => { if !>::is_supported() { return Err(ProverError::UnsupportedFieldExtension(3)); } - maybe_await!(self.generate_proof::>(trace)) + maybe_await!(self.generate_proof::>(trace, seed)) }, } } @@ -303,7 +312,11 @@ pub trait Prover { /// TODO: make this function un-callable externally? #[doc(hidden)] #[maybe_async] - fn generate_proof(&self, trace: Self::Trace) -> Result + fn generate_proof( + &self, + trace: Self::Trace, + seed: Option<::Seed>, + ) -> Result where E: FieldElement, ::PublicInputs: Send, @@ -320,19 +333,26 @@ pub trait Prover { // execution of the computation for the provided public inputs. let air = Self::Air::new(trace.info().clone(), pub_inputs, self.options().clone()); + // get the zk parameter, which are None unless zk is enabled + let zk_parameters = air.context().zk_parameters(); + + // create a PRNG to be used when zk is enabled, and also generates a seed to be used in + // generating salting values for Fiat-Shamir by `ProverChannel` + let (mut prng, seed) = generate_prng_and_new_seed(seed); + // create a channel which is used to simulate interaction between the prover and the // verifier; the channel will be used to commit to values and to draw randomness that // should come from the verifier. - let mut channel = - ProverChannel::::new( - &air, - pub_inputs_elements, - air.context().zk_blowup_factor(), - ); - let mut _prng = ChaCha20Rng::from_entropy(); - let seed = [0_u8; 32]; - let mut prng = ChaCha20Rng::from_seed(seed); - let zk_parameters = air.context().zk_parameters(); + let mut channel = ProverChannel::< + Self::Air, + E, + Self::HashFn, + Self::ZkPrng, + Self::RandomCoin, + Self::VC, + >::new( + &air, pub_inputs_elements, air.context().zk_blowup_factor(), seed + ); // 1 ----- Commit to the execution trace -------------------------------------------------- @@ -349,8 +369,8 @@ pub trait Prover { &trace, &domain, zk_parameters, + &mut channel, &mut prng, - &mut channel )); // build the auxiliary trace segment, and append the resulting segments to trace commitment @@ -387,7 +407,7 @@ pub trait Prover { // commit to the LDE of the extended auxiliary trace segment by writing its // commitment into the channel - channel.commit_trace(aux_segment_commitment, &mut prng); + channel.commit_trace(aux_segment_commitment); drop(span); aux_segment_polys @@ -460,10 +480,10 @@ pub trait Prover { // z * g^2, z * g^4, ..., z * g^(2^(v-1)), where v = log(trace_len). let ood_trace_states = trace_polys.get_ood_frame(z, air.context().trace_info().length()); - channel.send_ood_trace_states(&ood_trace_states, &mut prng); + channel.send_ood_trace_states(&ood_trace_states); let ood_evaluations = composition_poly.evaluate_at(z, air.is_zk()); - channel.send_ood_constraint_evaluations(&ood_evaluations, &mut prng); + channel.send_ood_constraint_evaluations(&ood_evaluations); // draw random coefficients to use during DEEP polynomial composition, and use them to // initialize the DEEP composition polynomial @@ -508,7 +528,7 @@ pub trait Prover { let num_layers = fri_options.num_fri_layers(lde_domain_size); let mut fri_prover = FriProver::<_, _, _, Self::VC>::new(fri_options); info_span!("compute_fri_layers", num_layers) - .in_scope(|| fri_prover.build_layers(&mut channel, deep_evaluations, &mut prng)); + .in_scope(|| fri_prover.build_layers(&mut channel, deep_evaluations)); // 7 ----- determine query positions ------------------------------------------------------ let query_positions = { @@ -561,18 +581,26 @@ pub trait Prover { #[doc(hidden)] #[instrument(skip_all)] + #[allow(clippy::type_complexity)] #[maybe_async] - fn commit_to_main_trace_segment( + fn commit_to_main_trace_segment( &self, trace: &Self::Trace, domain: &StarkDomain, zk_parameters: Option, - prng: &mut R, - channel: &mut ProverChannel<'_, Self::Air, E, Self::HashFn, Self::RandomCoin, Self::VC>, + channel: &mut ProverChannel< + '_, + Self::Air, + E, + Self::HashFn, + Self::ZkPrng, + Self::RandomCoin, + Self::VC, + >, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) where E: FieldElement, - R: RngCore, { // extend the main execution trace and commit to the extended trace let (trace_lde, trace_polys) = maybe_await!(self.new_trace_lde( @@ -580,7 +608,8 @@ pub trait Prover { trace.main_segment(), domain, self.options().partition_options(), - zk_parameters + zk_parameters, + prng, )); // get the commitment to the main trace segment LDE @@ -588,31 +617,39 @@ pub trait Prover { // commit to the LDE of the main trace by writing the the commitment string into // the channel - channel.commit_trace(main_trace_commitment, prng); + channel.commit_trace(main_trace_commitment); (trace_lde, trace_polys) } #[doc(hidden)] #[instrument(skip_all)] + #[allow(clippy::type_complexity)] #[maybe_async] - fn commit_to_constraint_evaluations( + fn commit_to_constraint_evaluations( &self, air: &Self::Air, composition_poly_trace: CompositionPolyTrace, domain: &StarkDomain, - channel: &mut ProverChannel<'_, Self::Air, E, Self::HashFn, Self::RandomCoin, Self::VC>, + channel: &mut ProverChannel< + '_, + Self::Air, + E, + Self::HashFn, + Self::ZkPrng, + Self::RandomCoin, + Self::VC, + >, zk_parameters: Option, - prng: &mut R, + prng: &mut Option, ) -> (Self::ConstraintCommitment, CompositionPoly) where E: FieldElement, - R: RngCore, { // first, build a commitment to the evaluations of the constraint composition polynomial // columns let (constraint_commitment, composition_poly) = maybe_await!(self - .build_constraint_commitment::( + .build_constraint_commitment::( composition_poly_trace, air.context().num_constraint_composition_columns(), domain, @@ -623,8 +660,56 @@ pub trait Prover { // then, commit to the evaluations of constraints by writing the commitment string of // the constraint commitment into the channel - channel.commit_constraints(constraint_commitment.commitment(), prng); + channel.commit_constraints(constraint_commitment.commitment()); (constraint_commitment, composition_poly) } } + +// MOCK PRNG FOR ZERO-KNOWLEDGE +// ================================================================================================= + +/// A mock PRNG used when zero-knowledge is not enabled. +pub struct MockPrng; +impl SeedableRng for MockPrng { + type Seed = [u8; 8]; + + fn from_seed(_seed: Self::Seed) -> Self { + Self + } +} + +impl RngCore for MockPrng { + fn next_u32(&mut self) -> u32 { + 0 + } + + fn next_u64(&mut self) -> u64 { + 0 + } + + fn fill_bytes(&mut self, dest: &mut [u8]) { + dest.iter_mut().for_each(|d| *d = 0); + } + + fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> { + dest.iter_mut().for_each(|d| *d = 0); + Ok(()) + } +} + +/// A helper funcation that generates a PRNG from a seed when zero-knowledge is enabled. +fn generate_prng_and_new_seed( + seed: Option<

::Seed>, +) -> (Option

, Option<

::Seed>) { + match seed { + Some(seed) => { + let mut prng = P::from_seed(seed); + let mut seed =

::Seed::default(); + prng.fill_bytes(seed.as_mut()); + + (Some(prng), Some(seed)) + }, + None => (None, None), + } +} diff --git a/prover/src/matrix/col_matrix.rs b/prover/src/matrix/col_matrix.rs index 62a953878..2ec304d30 100644 --- a/prover/src/matrix/col_matrix.rs +++ b/prover/src/matrix/col_matrix.rs @@ -314,7 +314,7 @@ impl ColMatrix { /// 2. \hat{w}(x) is the randomized witness polynomial. /// 3. r(x) is the randomizer polynomial and has degree `(zk_blowup - 1) * n`. /// 4. Z_H(x) = (x^n - 1). - pub(crate) fn randomize(&self, zk_blowup: usize, prng: &mut R) -> Self { + pub(crate) fn randomize(&self, zk_blowup: usize, prng: &mut Option) -> Self { let cur_len = self.num_rows(); let extended_len = zk_blowup * cur_len; let pad_len = extended_len - cur_len; @@ -324,7 +324,10 @@ impl ColMatrix { .map(|col| { let mut added = vec![E::ZERO; pad_len]; for a in added.iter_mut() { - let bytes = prng.gen::<[u8; 32]>(); + let bytes = prng + .as_mut() + .expect("should have a PRNG when zk is enabled") + .gen::<[u8; 32]>(); *a = E::from_random_bytes(&bytes[..E::VALUE_SIZE]) .expect("failed to generate randomness"); } diff --git a/prover/src/trace/trace_lde/default/mod.rs b/prover/src/trace/trace_lde/default/mod.rs index 1cbb9f0d1..81bc6da4f 100644 --- a/prover/src/trace/trace_lde/default/mod.rs +++ b/prover/src/trace/trace_lde/default/mod.rs @@ -69,7 +69,7 @@ where domain: &StarkDomain, partition_options: PartitionOptions, zk_parameters: Option, - prng: &mut R, + prng: &mut Option, ) -> (Self, TracePolyTable) { // extend the main execution trace and build a commitment to the extended trace let (main_segment_lde, main_segment_vector_com, main_segment_polys) = @@ -153,7 +153,7 @@ where aux_trace: &ColMatrix, domain: &StarkDomain, zk_parameters: Option, - prng: &mut R, + prng: &mut Option, ) -> (ColMatrix, H::Digest) { // extend the auxiliary trace segment and build a commitment to the extended trace let (aux_segment_lde, aux_segment_oracles, aux_segment_polys) = @@ -287,7 +287,7 @@ fn build_trace_commitment( domain: &StarkDomain, partition_options: PartitionOptions, zk_parameters: Option, - prng: &mut R, + prng: &mut Option, ) -> (RowMatrix, V, ColMatrix) where E: FieldElement, diff --git a/prover/src/trace/trace_lde/default/tests.rs b/prover/src/trace/trace_lde/default/tests.rs index f716fa0ab..88d1c4234 100644 --- a/prover/src/trace/trace_lde/default/tests.rs +++ b/prover/src/trace/trace_lde/default/tests.rs @@ -16,7 +16,7 @@ use rand_chacha::ChaCha20Rng; use crate::{ tests::{build_fib_trace, MockAir}, - DefaultTraceLde, StarkDomain, Trace, TraceLde, + DefaultTraceLde, MockPrng, StarkDomain, Trace, TraceLde, }; type Blake3 = Blake3_256; @@ -29,7 +29,7 @@ fn extend_trace_table() { let trace = build_fib_trace(trace_length * 2); let domain = StarkDomain::new(&air); let partition_option = PartitionOptions::default(); - let mut prng = ChaCha20Rng::from_entropy(); + let prng = ChaCha20Rng::from_entropy(); // build the trace polynomials, extended trace, and commitment using the default TraceLde impl let (trace_lde, trace_polys) = DefaultTraceLde::>::new( @@ -38,7 +38,7 @@ fn extend_trace_table() { &domain, partition_option, None, - &mut prng, + &mut Some(prng), ); // check the width and length of the extended trace @@ -84,16 +84,15 @@ fn commit_trace_table() { let trace = build_fib_trace(trace_length * 2); let domain = StarkDomain::new(&air); let partition_option = PartitionOptions::default(); - let mut prng = ChaCha20Rng::from_entropy(); // build the trace polynomials, extended trace, and commitment using the default TraceLde impl - let (trace_lde, _) = DefaultTraceLde::>::new( + let (trace_lde, _) = DefaultTraceLde::>::new::( trace.info(), trace.main_segment(), &domain, partition_option, None, - &mut prng, + &mut None, ); // build commitment, using a Merkle tree, to the trace rows diff --git a/prover/src/trace/trace_lde/mod.rs b/prover/src/trace/trace_lde/mod.rs index 6abcf8b96..721f7733a 100644 --- a/prover/src/trace/trace_lde/mod.rs +++ b/prover/src/trace/trace_lde/mod.rs @@ -51,7 +51,7 @@ pub trait TraceLde: Sync { aux_trace: &ColMatrix, domain: &StarkDomain, zk_parameters: Option, - prng: &mut R, + prng: &mut Option, ) -> (ColMatrix, ::Digest); /// Reads current and next rows from the main trace segment into the specified frame. diff --git a/verifier/src/channel.rs b/verifier/src/channel.rs index 068921168..90f396d69 100644 --- a/verifier/src/channel.rs +++ b/verifier/src/channel.rs @@ -133,8 +133,9 @@ where .partition_size::(air.context().trace_info().main_trace_width()); let partition_size_aux = partition_options.partition_size::(air.context().trace_info().aux_segment_width()); - let partition_size_constraint = partition_options - .partition_size::(air.context().num_constraint_composition_columns() + air.is_zk() as usize); + let partition_size_constraint = partition_options.partition_size::( + air.context().num_constraint_composition_columns() + air.is_zk() as usize, + ); // --- parse Fiat-Shamir salts ----------------------------------------------- let salts: Vec> = Vec::read_from_bytes(&salts) .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; diff --git a/winterfell/Cargo.toml b/winterfell/Cargo.toml index 57bb1f528..258eb22e9 100644 --- a/winterfell/Cargo.toml +++ b/winterfell/Cargo.toml @@ -26,8 +26,6 @@ air = { version = "0.11", path = "../air", package = "winter-air", default-featu prover = { version = "0.11", path = "../prover", package = "winter-prover", default-features = false } verifier = { version = "0.11", path = "../verifier", package = "winter-verifier", default-features = false } -rand_chacha = { version = "0.3", default-features = false } - # Allow math in docs [package.metadata.docs.rs] rustdoc-args = ["--html-in-header", ".cargo/katex-header.html"] diff --git a/winterfell/src/lib.rs b/winterfell/src/lib.rs index 655d66c04..b986548b3 100644 --- a/winterfell/src/lib.rs +++ b/winterfell/src/lib.rs @@ -268,7 +268,7 @@ //! //! # use winterfell::{ //! # Air, AirContext, Assertion, AuxRandElements, ByteWriter, DefaultConstraintEvaluator, -//! # EvaluationFrame, PartitionOptions, TraceInfo, TransitionConstraintDegree, +//! # EvaluationFrame, PartitionOptions, TraceInfo, TransitionConstraintDegree, MockPrng, //! # }; //! # //! # pub struct PublicInputs { @@ -356,6 +356,7 @@ //! DefaultConstraintCommitment; //! type ConstraintEvaluator<'a, E: FieldElement> = //! DefaultConstraintEvaluator<'a, Self::Air, E>; +//! type ZkPrng = MockPrng; //! //! // Our public inputs consist of the first and last value in the execution trace. //! fn get_pub_inputs(&self, trace: &Self::Trace) -> PublicInputs { @@ -377,8 +378,9 @@ //! domain: &StarkDomain, //! partition_option: PartitionOptions, //! is_zk: Option, +//! prng: &mut Option, //! ) -> (Self::TraceLde, TracePolyTable) { -//! DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, is_zk) +//! DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, is_zk, prng) //! } //! //! fn build_constraint_commitment>( @@ -423,7 +425,7 @@ //! # DefaultConstraintEvaluator, DefaultConstraintCommitment, DefaultTraceLde, EvaluationFrame, //! # TraceInfo, TransitionConstraintDegree, TraceTable, FieldExtension, PartitionOptions, Prover, //! # ProofOptions, StarkDomain, Proof, Trace, TracePolyTable, -//! # ZkParameters +//! # ZkParameters, MockPrng, //! # }; //! # //! # pub fn build_do_work_trace(start: BaseElement, n: usize) -> TraceTable { @@ -520,6 +522,7 @@ //! # DefaultConstraintCommitment; //! # type ConstraintEvaluator<'a, E: FieldElement> = //! # DefaultConstraintEvaluator<'a, Self::Air, E>; +//! # type ZkPrng = MockPrng; //! # //! # fn get_pub_inputs(&self, trace: &Self::Trace) -> PublicInputs { //! # let last_step = trace.length() - 1; @@ -540,8 +543,9 @@ //! # domain: &StarkDomain, //! # partition_option: PartitionOptions, //! # is_zk: Option, +//! # prng: &mut Option, //! # ) -> (Self::TraceLde, TracePolyTable) { -//! # DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, is_zk) +//! # DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, is_zk, prng) //! # } //! # //! # fn build_constraint_commitment>( @@ -592,7 +596,7 @@ //! //! // Instantiate the prover and generate the proof. //! let prover = WorkProver::new(options); -//! let proof = prover.prove(trace).unwrap(); +//! let proof = prover.prove(trace, None).unwrap(); //! //! // The verifier will accept proofs with parameters which guarantee 95 bits or more of //! // conjectured security @@ -642,9 +646,9 @@ pub use prover::{ BoundaryConstraint, BoundaryConstraintGroup, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, ConstraintDivisor, ConstraintEvaluator, DeepCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, - DefaultTraceLde, EvaluationFrame, FieldExtension, Proof, ProofOptions, Prover, ProverError, - ProverGkrProof, StarkDomain, Trace, TraceInfo, TraceLde, TracePolyTable, TraceTable, - TraceTableFragment, TransitionConstraintDegree, + DefaultTraceLde, EvaluationFrame, FieldExtension, MockPrng, Proof, ProofOptions, Prover, + ProverError, ProverGkrProof, StarkDomain, Trace, TraceInfo, TraceLde, TracePolyTable, + TraceTable, TraceTableFragment, TransitionConstraintDegree, }; pub use verifier::{verify, AcceptableOptions, ByteWriter, VerifierError}; diff --git a/winterfell/src/tests.rs b/winterfell/src/tests.rs index 8b4db8d01..c112071f6 100644 --- a/winterfell/src/tests.rs +++ b/winterfell/src/tests.rs @@ -13,7 +13,6 @@ use prover::{ matrix::ColMatrix, CompositionPoly, DefaultConstraintCommitment, }; -use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use super::*; @@ -25,7 +24,7 @@ fn test_complex_lagrange_kernel_air() { let prover = LagrangeComplexProver::new(AUX_TRACE_WIDTH); - let proof = prover.prove(trace).unwrap(); + let proof = prover.prove(trace, None).unwrap(); verify::< LagrangeKernelComplexAir, @@ -225,6 +224,7 @@ impl Prover for LagrangeComplexProver { DefaultConstraintCommitment, Self::VC>; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, LagrangeKernelComplexAir, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, _trace: &Self::Trace) -> <::Air as Air>::PublicInputs { } @@ -240,19 +240,12 @@ impl Prover for LagrangeComplexProver { domain: &StarkDomain, partition_option: PartitionOptions, zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) where E: math::FieldElement, { - let mut prng = ChaCha20Rng::from_entropy(); - DefaultTraceLde::new( - trace_info, - main_trace, - domain, - partition_option, - zk_parameters, - &mut prng, - ) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn build_constraint_commitment>( From 6387aa7ed66d95dac248fa110377edaab799482e Mon Sep 17 00:00:00 2001 From: Al-Kindi-0 <82364884+Al-Kindi-0@users.noreply.github.com> Date: Tue, 3 Dec 2024 06:10:47 +0100 Subject: [PATCH 17/19] chore: fix after rebase --- Cargo.toml | 3 +- examples/Cargo.toml | 2 +- examples/src/fibonacci/fib2/prover.rs | 12 +-- examples/src/fibonacci/fib8/prover.rs | 12 +-- examples/src/fibonacci/fib_small/prover.rs | 12 +-- examples/src/fibonacci/mulfib2/prover.rs | 9 ++- examples/src/fibonacci/mulfib8/prover.rs | 9 ++- examples/src/lamport/aggregate/prover.rs | 9 ++- examples/src/lamport/threshold/prover.rs | 9 ++- examples/src/merkle/prover.rs | 9 ++- examples/src/rescue/prover.rs | 9 ++- examples/src/rescue_raps/prover.rs | 8 +- examples/src/vdf/exempt/prover.rs | 9 ++- examples/src/vdf/regular/prover.rs | 9 ++- prover/Cargo.toml | 2 +- prover/benches/lagrange_kernel.rs | 10 ++- prover/src/constraints/commitment.rs | 80 -------------------- prover/src/constraints/commitment/default.rs | 33 ++++++-- prover/src/lib.rs | 4 +- prover/src/trace/trace_lde/default/mod.rs | 2 +- winterfell/src/lib.rs | 12 ++- winterfell/src/tests.rs | 6 +- 22 files changed, 139 insertions(+), 131 deletions(-) delete mode 100644 prover/src/constraints/commitment.rs diff --git a/Cargo.toml b/Cargo.toml index 2eb4b7f3f..b0ed3f07c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,7 +10,8 @@ members = [ "prover", "verifier", "winterfell", - "examples"] + "examples" +] resolver = "2" [profile.release] diff --git a/examples/Cargo.toml b/examples/Cargo.toml index 14ba18dee..ff308297e 100644 --- a/examples/Cargo.toml +++ b/examples/Cargo.toml @@ -26,7 +26,7 @@ default = ["std"] std = ["core-utils/std", "hex/std", "rand-utils", "winterfell/std"] [dependencies] -air = { version = "0.10", path = "../air", package = "winter-air", default-features = false } +air = { version = "0.11", path = "../air", package = "winter-air", default-features = false } blake3 = { version = "1.5", default-features = false } core-utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } hex = { version = "0.4", optional = true } diff --git a/examples/src/fibonacci/fib2/prover.rs b/examples/src/fibonacci/fib2/prover.rs index 0b1dbe172..91070896b 100644 --- a/examples/src/fibonacci/fib2/prover.rs +++ b/examples/src/fibonacci/fib2/prover.rs @@ -7,10 +7,8 @@ use air::ZkParameters; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, - DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, - TraceInfo, TracePolyTable, TraceTable, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -65,7 +63,7 @@ where type TraceLde> = DefaultTraceLde; type ConstraintCommitment> = - DefaultConstraintCommitment; + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; type ZkPrng = MockPrng; @@ -106,12 +104,16 @@ where num_constraint_composition_columns: usize, domain: &StarkDomain, partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::ConstraintCommitment, CompositionPoly) { DefaultConstraintCommitment::new( composition_poly_trace, num_constraint_composition_columns, domain, partition_options, + zk_parameters, + prng, ) } } diff --git a/examples/src/fibonacci/fib8/prover.rs b/examples/src/fibonacci/fib8/prover.rs index b063b31f9..5f4e1d8aa 100644 --- a/examples/src/fibonacci/fib8/prover.rs +++ b/examples/src/fibonacci/fib8/prover.rs @@ -7,10 +7,8 @@ use air::ZkParameters; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, - DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, - TraceInfo, TracePolyTable, TraceTable, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -80,7 +78,7 @@ where type TraceLde> = DefaultTraceLde; type ConstraintCommitment> = - DefaultConstraintCommitment; + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; type ZkPrng = MockPrng; @@ -121,12 +119,16 @@ where num_constraint_composition_columns: usize, domain: &StarkDomain, partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::ConstraintCommitment, CompositionPoly) { DefaultConstraintCommitment::new( composition_poly_trace, num_constraint_composition_columns, domain, partition_options, + zk_parameters, + prng, ) } } diff --git a/examples/src/fibonacci/fib_small/prover.rs b/examples/src/fibonacci/fib_small/prover.rs index b935ef672..4b4cc6860 100644 --- a/examples/src/fibonacci/fib_small/prover.rs +++ b/examples/src/fibonacci/fib_small/prover.rs @@ -6,10 +6,8 @@ use air::ZkParameters; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, - DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, - TraceInfo, TracePolyTable, TraceTable, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -70,7 +68,7 @@ where type TraceLde> = DefaultTraceLde; type ConstraintCommitment> = - DefaultConstraintCommitment; + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; type ZkPrng = MockPrng; @@ -111,12 +109,16 @@ where num_constraint_composition_columns: usize, domain: &StarkDomain, partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::ConstraintCommitment, CompositionPoly) { DefaultConstraintCommitment::new( composition_poly_trace, num_constraint_composition_columns, domain, partition_options, + zk_parameters, + prng, ) } } diff --git a/examples/src/fibonacci/mulfib2/prover.rs b/examples/src/fibonacci/mulfib2/prover.rs index 3900eac38..5356f4a29 100644 --- a/examples/src/fibonacci/mulfib2/prover.rs +++ b/examples/src/fibonacci/mulfib2/prover.rs @@ -7,7 +7,8 @@ use air::ZkParameters; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, - DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -58,7 +59,7 @@ where type TraceLde> = DefaultTraceLde; type ConstraintCommitment> = - DefaultConstraintCommitment; + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; type ZkPrng = MockPrng; @@ -99,12 +100,16 @@ where num_constraint_composition_columns: usize, domain: &StarkDomain, partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::ConstraintCommitment, CompositionPoly) { DefaultConstraintCommitment::new( composition_poly_trace, num_constraint_composition_columns, domain, partition_options, + zk_parameters, + prng, ) } } diff --git a/examples/src/fibonacci/mulfib8/prover.rs b/examples/src/fibonacci/mulfib8/prover.rs index af306a6a2..563e29875 100644 --- a/examples/src/fibonacci/mulfib8/prover.rs +++ b/examples/src/fibonacci/mulfib8/prover.rs @@ -7,7 +7,8 @@ use air::ZkParameters; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, - DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -70,7 +71,7 @@ where type TraceLde> = DefaultTraceLde; type ConstraintCommitment> = - DefaultConstraintCommitment; + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; type ZkPrng = MockPrng; @@ -111,12 +112,16 @@ where num_constraint_composition_columns: usize, domain: &StarkDomain, partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::ConstraintCommitment, CompositionPoly) { DefaultConstraintCommitment::new( composition_poly_trace, num_constraint_composition_columns, domain, partition_options, + zk_parameters, + prng, ) } } diff --git a/examples/src/lamport/aggregate/prover.rs b/examples/src/lamport/aggregate/prover.rs index 2a63d2794..d387d3e7f 100644 --- a/examples/src/lamport/aggregate/prover.rs +++ b/examples/src/lamport/aggregate/prover.rs @@ -9,7 +9,8 @@ use winterfell::iterators::*; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, - DefaultTraceLde, PartitionOptions, StarkDomain, TraceInfo, TracePolyTable, TraceTable, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -107,7 +108,7 @@ where type TraceLde> = DefaultTraceLde; type ConstraintCommitment> = - DefaultConstraintCommitment; + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; type ZkPrng = MockPrng; @@ -147,12 +148,16 @@ where num_constraint_composition_columns: usize, domain: &StarkDomain, partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::ConstraintCommitment, CompositionPoly) { DefaultConstraintCommitment::new( composition_poly_trace, num_constraint_composition_columns, domain, partition_options, + zk_parameters, + prng, ) } } diff --git a/examples/src/lamport/threshold/prover.rs b/examples/src/lamport/threshold/prover.rs index f3eddaa7c..6092d7eb2 100644 --- a/examples/src/lamport/threshold/prover.rs +++ b/examples/src/lamport/threshold/prover.rs @@ -11,7 +11,8 @@ use winterfell::iterators::*; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, - DefaultTraceLde, PartitionOptions, StarkDomain, TraceInfo, TracePolyTable, TraceTable, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -149,7 +150,7 @@ where type TraceLde> = DefaultTraceLde; type ConstraintCommitment> = - DefaultConstraintCommitment; + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; type ZkPrng = MockPrng; @@ -189,12 +190,16 @@ where num_constraint_composition_columns: usize, domain: &StarkDomain, partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::ConstraintCommitment, CompositionPoly) { DefaultConstraintCommitment::new( composition_poly_trace, num_constraint_composition_columns, domain, partition_options, + zk_parameters, + prng, ) } } diff --git a/examples/src/merkle/prover.rs b/examples/src/merkle/prover.rs index ab7a4209a..4b2c9147f 100644 --- a/examples/src/merkle/prover.rs +++ b/examples/src/merkle/prover.rs @@ -7,7 +7,8 @@ use air::ZkParameters; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, - DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -111,7 +112,7 @@ where type TraceLde> = DefaultTraceLde; type ConstraintCommitment> = - DefaultConstraintCommitment; + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; type ZkPrng = MockPrng; @@ -154,12 +155,16 @@ where num_constraint_composition_columns: usize, domain: &StarkDomain, partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::ConstraintCommitment, CompositionPoly) { DefaultConstraintCommitment::new( composition_poly_trace, num_constraint_composition_columns, domain, partition_options, + zk_parameters, + prng, ) } } diff --git a/examples/src/rescue/prover.rs b/examples/src/rescue/prover.rs index 976fc099e..186962dcf 100644 --- a/examples/src/rescue/prover.rs +++ b/examples/src/rescue/prover.rs @@ -7,7 +7,8 @@ use air::ZkParameters; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, - DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -77,7 +78,7 @@ where type TraceLde> = DefaultTraceLde; type ConstraintCommitment> = - DefaultConstraintCommitment; + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; type ZkPrng = MockPrng; @@ -121,12 +122,16 @@ where num_constraint_composition_columns: usize, domain: &StarkDomain, partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::ConstraintCommitment, CompositionPoly) { DefaultConstraintCommitment::new( composition_poly_trace, num_constraint_composition_columns, domain, partition_options, + zk_parameters, + prng, ) } } diff --git a/examples/src/rescue_raps/prover.rs b/examples/src/rescue_raps/prover.rs index 66959bf52..a407898a0 100644 --- a/examples/src/rescue_raps/prover.rs +++ b/examples/src/rescue_raps/prover.rs @@ -8,7 +8,7 @@ use core_utils::uninit_vector; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, - DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, }; use super::{ @@ -107,7 +107,7 @@ where type TraceLde> = DefaultTraceLde; type ConstraintCommitment> = - DefaultConstraintCommitment; + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; type ZkPrng = MockPrng; @@ -153,12 +153,16 @@ where num_constraint_composition_columns: usize, domain: &StarkDomain, partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::ConstraintCommitment, CompositionPoly) { DefaultConstraintCommitment::new( composition_poly_trace, num_constraint_composition_columns, domain, partition_options, + zk_parameters, + prng, ) } fn build_aux_trace( diff --git a/examples/src/vdf/exempt/prover.rs b/examples/src/vdf/exempt/prover.rs index a8f2599fc..d57b72e81 100644 --- a/examples/src/vdf/exempt/prover.rs +++ b/examples/src/vdf/exempt/prover.rs @@ -7,7 +7,8 @@ use air::ZkParameters; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, - DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -58,7 +59,7 @@ where type TraceLde> = DefaultTraceLde; type ConstraintCommitment> = - DefaultConstraintCommitment; + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; type ZkPrng = MockPrng; @@ -104,12 +105,16 @@ where num_constraint_composition_columns: usize, domain: &StarkDomain, partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::ConstraintCommitment, CompositionPoly) { DefaultConstraintCommitment::new( composition_poly_trace, num_constraint_composition_columns, domain, partition_options, + zk_parameters, + prng, ) } } diff --git a/examples/src/vdf/regular/prover.rs b/examples/src/vdf/regular/prover.rs index 0efeb8284..68d237742 100644 --- a/examples/src/vdf/regular/prover.rs +++ b/examples/src/vdf/regular/prover.rs @@ -7,7 +7,8 @@ use air::ZkParameters; use winterfell::{ crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, - DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -55,7 +56,7 @@ where type TraceLde> = DefaultTraceLde; type ConstraintCommitment> = - DefaultConstraintCommitment; + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; type ZkPrng = MockPrng; @@ -99,12 +100,16 @@ where num_constraint_composition_columns: usize, domain: &StarkDomain, partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::ConstraintCommitment, CompositionPoly) { DefaultConstraintCommitment::new( composition_poly_trace, num_constraint_composition_columns, domain, partition_options, + zk_parameters, + prng, ) } } diff --git a/prover/Cargo.toml b/prover/Cargo.toml index c49ac34da..0961da8ec 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -36,7 +36,7 @@ fri = { version = "0.11", path = '../fri', package = "winter-fri", default-featu math = { version = "0.11", path = "../math", package = "winter-math", default-features = false } maybe_async = { version = "0.11", path = "../utils/maybe_async" , package = "winter-maybe-async" } rand_chacha = { version = "0.3", default-features = false } -rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils" } +rand-utils = { version = "0.11", path = "../utils/rand", package = "winter-rand-utils" } rand = { version = "0.8" } tracing = { version = "0.1", default-features = false, features = ["attributes"]} utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } diff --git a/prover/benches/lagrange_kernel.rs b/prover/benches/lagrange_kernel.rs index d576d1c8d..b64052976 100644 --- a/prover/benches/lagrange_kernel.rs +++ b/prover/benches/lagrange_kernel.rs @@ -15,8 +15,8 @@ use crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree, RandomCoin}; use math::{fields::f64::BaseElement, ExtensionOf, FieldElement}; use winter_prover::{ matrix::ColMatrix, CompositionPoly, CompositionPolyTrace, DefaultConstraintCommitment, - DefaultConstraintEvaluator, DefaultTraceLde, Prover, ProverGkrProof, StarkDomain, Trace, - TracePolyTable, + DefaultConstraintEvaluator, DefaultTraceLde, MockPrng, Prover, ProverGkrProof, StarkDomain, + Trace, TracePolyTable, }; const TRACE_LENS: [usize; 2] = [2_usize.pow(16), 2_usize.pow(20)]; @@ -189,7 +189,7 @@ impl Prover for LagrangeProver { type TraceLde> = DefaultTraceLde; type ConstraintCommitment> = - DefaultConstraintCommitment; + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, LagrangeKernelAir, E>; type ZkPrng = MockPrng; @@ -222,12 +222,16 @@ impl Prover for LagrangeProver { num_constraint_composition_columns: usize, domain: &StarkDomain, partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::ConstraintCommitment, CompositionPoly) { DefaultConstraintCommitment::new( composition_poly_trace, num_constraint_composition_columns, domain, partition_options, + zk_parameters, + prng, ) } diff --git a/prover/src/constraints/commitment.rs b/prover/src/constraints/commitment.rs deleted file mode 100644 index d79e02d46..000000000 --- a/prover/src/constraints/commitment.rs +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Facebook, Inc. and its affiliates. -// -// This source code is licensed under the MIT license found in the -// LICENSE file in the root directory of this source tree. - -use alloc::vec::Vec; -use core::marker::PhantomData; - -use air::proof::Queries; -use crypto::{ElementHasher, VectorCommitment}; -use math::FieldElement; - -use super::RowMatrix; - -// CONSTRAINT COMMITMENT -// ================================================================================================ - -/// Constraint evaluation commitment. -/// -/// The commitment consists of two components: -/// * Evaluations of composition polynomial columns over the LDE domain. -/// * Vector commitment where each vector element corresponds to the digest of a row in -/// the composition polynomial evaluation matrix. -pub struct ConstraintCommitment< - E: FieldElement, - H: ElementHasher, - V: VectorCommitment, -> { - evaluations: RowMatrix, - vector_commitment: V, - _h: PhantomData, -} - -impl ConstraintCommitment -where - E: FieldElement, - H: ElementHasher, - V: VectorCommitment, -{ - /// Creates a new constraint evaluation commitment from the provided composition polynomial - /// evaluations and the corresponding vector commitment. - pub fn new(evaluations: RowMatrix, commitment: V) -> ConstraintCommitment { - assert_eq!( - evaluations.num_rows(), - commitment.get_domain_len(), - "number of rows in constraint evaluation matrix must be the same as the size \ - of the vector commitment domain" - ); - - ConstraintCommitment { - evaluations, - vector_commitment: commitment, - _h: PhantomData, - } - } - - /// Returns the commitment. - pub fn commitment(&self) -> H::Digest { - self.vector_commitment.commitment() - } - - /// Returns constraint evaluations at the specified positions along with a batch opening proof - /// against the vector commitment. - pub fn query(self, positions: &[usize]) -> Queries { - // build batch opening proof to the leaves specified by positions - let opening_proof = self - .vector_commitment - .open_many(positions) - .expect("failed to generate a batch opening proof for constraint queries"); - - // determine a set of evaluations corresponding to each position - let mut evaluations = Vec::new(); - for &position in positions { - let row = self.evaluations.row(position).to_vec(); - evaluations.push(row); - } - - Queries::new::(opening_proof.1, evaluations) - } -} diff --git a/prover/src/constraints/commitment/default.rs b/prover/src/constraints/commitment/default.rs index 629c08cd3..dd8c20f7a 100644 --- a/prover/src/constraints/commitment/default.rs +++ b/prover/src/constraints/commitment/default.rs @@ -6,9 +6,10 @@ use alloc::vec::Vec; use core::marker::PhantomData; -use air::{proof::Queries, PartitionOptions}; +use air::{proof::Queries, PartitionOptions, ZkParameters}; use crypto::{ElementHasher, VectorCommitment}; use math::FieldElement; +use rand::RngCore; use tracing::info_span; use super::{ConstraintCommitment, RowMatrix}; @@ -26,17 +27,20 @@ use crate::{CompositionPoly, CompositionPolyTrace, StarkDomain, DEFAULT_SEGMENT_ pub struct DefaultConstraintCommitment< E: FieldElement, H: ElementHasher, + R: RngCore, V: VectorCommitment, > { evaluations: RowMatrix, vector_commitment: V, _h: PhantomData, + _prng: PhantomData, } -impl DefaultConstraintCommitment +impl DefaultConstraintCommitment where E: FieldElement, H: ElementHasher, + R: RngCore, V: VectorCommitment, { /// Creates a new constraint evaluation commitment from the provided composition polynomial @@ -46,18 +50,22 @@ where num_constraint_composition_columns: usize, domain: &StarkDomain, partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self, CompositionPoly) { // extend the main execution trace and build a commitment to the extended trace - let (evaluations, commitment, composition_poly) = build_constraint_commitment::( + let (evaluations, commitment, composition_poly) = build_constraint_commitment::( composition_poly_trace, num_constraint_composition_columns, domain, partition_options, + zk_parameters, + prng, ); assert_eq!( evaluations.num_rows(), - commitment.domain_len(), + commitment.get_domain_len(), "number of rows in constraint evaluation matrix must be the same as the size \ of the vector commitment domain" ); @@ -66,16 +74,18 @@ where evaluations, vector_commitment: commitment, _h: PhantomData, + _prng: PhantomData, }; (commitment, composition_poly) } } -impl ConstraintCommitment for DefaultConstraintCommitment +impl ConstraintCommitment for DefaultConstraintCommitment where E: FieldElement, H: ElementHasher + core::marker::Sync, + R: RngCore, V: VectorCommitment + core::marker::Sync, { type HashFn = H; @@ -106,15 +116,18 @@ where } } -fn build_constraint_commitment( +fn build_constraint_commitment( composition_poly_trace: CompositionPolyTrace, num_constraint_composition_columns: usize, domain: &StarkDomain, partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (RowMatrix, V, CompositionPoly) where E: FieldElement, H: ElementHasher, + R: RngCore, V: VectorCommitment, { // first, build constraint composition polynomial from its trace as follows: @@ -126,7 +139,13 @@ where num_columns = num_constraint_composition_columns ) .in_scope(|| { - CompositionPoly::new(composition_poly_trace, domain, num_constraint_composition_columns) + CompositionPoly::new( + composition_poly_trace, + domain, + num_constraint_composition_columns, + zk_parameters, + prng, + ) }); assert_eq!(composition_poly.num_columns(), num_constraint_composition_columns); assert_eq!(composition_poly.column_degree(), domain.trace_length() - 1); diff --git a/prover/src/lib.rs b/prover/src/lib.rs index 4d50264ab..926ef8563 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -228,6 +228,8 @@ pub trait Prover { num_constraint_composition_columns: usize, domain: &StarkDomain, partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::ConstraintCommitment, CompositionPoly) where E: FieldElement; @@ -653,7 +655,7 @@ pub trait Prover { composition_poly_trace, air.context().num_constraint_composition_columns(), domain, - self.options().partition_options() + self.options().partition_options(), zk_parameters, prng )); diff --git a/prover/src/trace/trace_lde/default/mod.rs b/prover/src/trace/trace_lde/default/mod.rs index 81bc6da4f..e2cbb1e83 100644 --- a/prover/src/trace/trace_lde/default/mod.rs +++ b/prover/src/trace/trace_lde/default/mod.rs @@ -328,7 +328,7 @@ where let commitment_domain_size = trace_lde.num_rows(); let trace_vector_com = info_span!("compute_execution_trace_commitment", commitment_domain_size) .in_scope(|| trace_lde.commit_to_rows::(partition_options)); - assert_eq!(trace_vector_com.domain_len(), commitment_domain_size); + assert_eq!(trace_vector_com.get_domain_len(), commitment_domain_size); (trace_lde, trace_vector_com, trace_polys) } diff --git a/winterfell/src/lib.rs b/winterfell/src/lib.rs index b986548b3..4919f157d 100644 --- a/winterfell/src/lib.rs +++ b/winterfell/src/lib.rs @@ -353,7 +353,7 @@ //! type RandomCoin = DefaultRandomCoin; //! type TraceLde> = DefaultTraceLde; //! type ConstraintCommitment> = -//! DefaultConstraintCommitment; +//! DefaultConstraintCommitment; //! type ConstraintEvaluator<'a, E: FieldElement> = //! DefaultConstraintEvaluator<'a, Self::Air, E>; //! type ZkPrng = MockPrng; @@ -389,12 +389,16 @@ //! num_constraint_composition_columns: usize, //! domain: &StarkDomain, //! partition_options: PartitionOptions, +//! zk_parameters: Option, +//! prng: &mut Option, //! ) -> (Self::ConstraintCommitment, CompositionPoly) { //! DefaultConstraintCommitment::new( //! composition_poly_trace, //! num_constraint_composition_columns, //! domain, //! partition_options, +//! zk_parameters, +//! prng, //! ) //! } //! @@ -519,7 +523,7 @@ //! # type RandomCoin = DefaultRandomCoin; //! # type TraceLde> = DefaultTraceLde; //! # type ConstraintCommitment> = -//! # DefaultConstraintCommitment; +//! # DefaultConstraintCommitment; //! # type ConstraintEvaluator<'a, E: FieldElement> = //! # DefaultConstraintEvaluator<'a, Self::Air, E>; //! # type ZkPrng = MockPrng; @@ -554,12 +558,16 @@ //! # num_constraint_composition_columns: usize, //! # domain: &StarkDomain, //! # partition_options: PartitionOptions, +//! # zk_parameters: Option, +//! # prng: &mut Option, //! # ) -> (Self::ConstraintCommitment, CompositionPoly) { //! # DefaultConstraintCommitment::new( //! # composition_poly_trace, //! # num_constraint_composition_columns, //! # domain, //! # partition_options, +//! # zk_parameters, +//! # prng, //! # ) //! # } //! # diff --git a/winterfell/src/tests.rs b/winterfell/src/tests.rs index c112071f6..12f49c010 100644 --- a/winterfell/src/tests.rs +++ b/winterfell/src/tests.rs @@ -221,7 +221,7 @@ impl Prover for LagrangeComplexProver { type TraceLde> = DefaultTraceLde; type ConstraintCommitment> = - DefaultConstraintCommitment, Self::VC>; + DefaultConstraintCommitment, Self::ZkPrng, Self::VC>; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, LagrangeKernelComplexAir, E>; type ZkPrng = MockPrng; @@ -254,12 +254,16 @@ impl Prover for LagrangeComplexProver { num_constraint_composition_columns: usize, domain: &StarkDomain, partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::ConstraintCommitment, CompositionPoly) { DefaultConstraintCommitment::new( composition_poly_trace, num_constraint_composition_columns, domain, partition_options, + zk_parameters, + prng, ) } From 606ffe7344aa165a5b58053cf678a5fb4f27ebcb Mon Sep 17 00:00:00 2001 From: Al-Kindi-0 <82364884+Al-Kindi-0@users.noreply.github.com> Date: Tue, 3 Dec 2024 06:17:41 +0100 Subject: [PATCH 18/19] fix: take into account randomizer column --- prover/src/constraints/commitment/default.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/prover/src/constraints/commitment/default.rs b/prover/src/constraints/commitment/default.rs index dd8c20f7a..4b8276bad 100644 --- a/prover/src/constraints/commitment/default.rs +++ b/prover/src/constraints/commitment/default.rs @@ -147,7 +147,10 @@ where prng, ) }); - assert_eq!(composition_poly.num_columns(), num_constraint_composition_columns); + assert_eq!( + composition_poly.num_columns(), + num_constraint_composition_columns + zk_parameters.is_some() as usize + ); assert_eq!(composition_poly.column_degree(), domain.trace_length() - 1); // then, evaluate composition polynomial columns over the LDE domain @@ -155,7 +158,10 @@ where let composed_evaluations = info_span!("evaluate_composition_poly_columns").in_scope(|| { RowMatrix::evaluate_polys_over::(composition_poly.data(), domain) }); - assert_eq!(composed_evaluations.num_cols(), num_constraint_composition_columns); + assert_eq!( + composed_evaluations.num_cols(), + num_constraint_composition_columns + zk_parameters.is_some() as usize + ); assert_eq!(composed_evaluations.num_rows(), domain_size); // finally, build constraint evaluation commitment From 5bafedbc2ba00cf85c6182725754547f6cddafc3 Mon Sep 17 00:00:00 2001 From: Al-Kindi-0 <82364884+Al-Kindi-0@users.noreply.github.com> Date: Tue, 10 Dec 2024 12:52:31 +0100 Subject: [PATCH 19/19] feat: use Distribution to generate salts for FS --- crypto/src/hash/mod.rs | 20 ++++---- crypto/src/hash/rescue/rp62_248/digest.rs | 28 ++++++----- crypto/src/hash/rescue/rp64_256/digest.rs | 26 +++-------- .../src/hash/rescue/rp64_256_jive/digest.rs | 30 ++++++------ examples/Cargo.toml | 1 + examples/src/fibonacci/fib2/mod.rs | 4 +- examples/src/fibonacci/fib8/mod.rs | 4 +- examples/src/fibonacci/fib_small/mod.rs | 4 +- examples/src/fibonacci/mulfib2/mod.rs | 4 +- examples/src/fibonacci/mulfib8/mod.rs | 4 +- examples/src/lamport/aggregate/mod.rs | 4 +- examples/src/lamport/threshold/mod.rs | 4 +- examples/src/merkle/mod.rs | 4 +- examples/src/rescue/mod.rs | 4 +- examples/src/rescue_raps/mod.rs | 4 +- examples/src/utils/rescue.rs | 12 ----- examples/src/vdf/exempt/mod.rs | 4 +- examples/src/vdf/regular/mod.rs | 4 +- fri/src/prover/channel.rs | 13 +++--- prover/src/channel.rs | 46 ++++++++++--------- prover/src/lib.rs | 8 +++- 21 files changed, 122 insertions(+), 110 deletions(-) diff --git a/crypto/src/hash/mod.rs b/crypto/src/hash/mod.rs index 4bfc5eea1..e1a9de33c 100644 --- a/crypto/src/hash/mod.rs +++ b/crypto/src/hash/mod.rs @@ -6,6 +6,7 @@ use core::{fmt::Debug, slice}; use math::{FieldElement, StarkField}; +use rand::{distributions::Standard, prelude::Distribution}; use utils::{ByteReader, Deserializable, DeserializationError, Serializable}; mod blake; @@ -77,9 +78,6 @@ pub trait Digest: /// upper limit on the possible digest size. For digests which are smaller than 32 bytes, the /// unused bytes should be set to 0. fn as_bytes(&self) -> [u8; 32]; - - /// Returns a digest that is drawn uniformly at random from the space of all digests. - fn from_random_bytes(buffer: &[u8]) -> Self; } // BYTE DIGEST @@ -114,14 +112,6 @@ impl Digest for ByteDigest { result[..N].copy_from_slice(&self.0); result } - - fn from_random_bytes(buffer: &[u8]) -> Self { - Self::new( - buffer - .try_into() - .expect("The size of the buffer with random bytes should be 32"), - ) - } } impl Default for ByteDigest { @@ -130,6 +120,14 @@ impl Default for ByteDigest { } } +impl Distribution> for Standard { + fn sample(&self, rng: &mut R) -> ByteDigest<24> { + let mut res = [0_u8; 24]; + rng.fill_bytes(&mut res); + ByteDigest(res) + } +} + impl Serializable for ByteDigest { fn write_into(&self, target: &mut W) { target.write_bytes(&self.0); diff --git a/crypto/src/hash/rescue/rp62_248/digest.rs b/crypto/src/hash/rescue/rp62_248/digest.rs index 01ecbf996..6004b682f 100644 --- a/crypto/src/hash/rescue/rp62_248/digest.rs +++ b/crypto/src/hash/rescue/rp62_248/digest.rs @@ -6,6 +6,10 @@ use core::slice; use math::{fields::f62::BaseElement, FieldElement, StarkField}; +use rand::{ + distributions::{Standard, Uniform}, + prelude::Distribution, +}; use utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; use super::{Digest, DIGEST_SIZE}; @@ -47,18 +51,6 @@ impl Digest for ElementDigest { result } - - fn from_random_bytes(buffer: &[u8]) -> Self { - let mut digest: [BaseElement; DIGEST_SIZE] = [BaseElement::ZERO; DIGEST_SIZE]; - - buffer.chunks(8).zip(digest.iter_mut()).for_each(|(chunk, digest)| { - *digest = BaseElement::new(u64::from_be_bytes( - chunk.try_into().expect("Given the size of the chunk this should not panic"), - )) - }); - - Self(digest) - } } impl Default for ElementDigest { @@ -67,6 +59,18 @@ impl Default for ElementDigest { } } +impl Distribution for Standard { + fn sample(&self, rng: &mut R) -> ElementDigest { + let mut res = [BaseElement::ZERO; DIGEST_SIZE]; + let uni_dist = Uniform::from(0..BaseElement::MODULUS); + for r in res.iter_mut() { + let sampled_integer = uni_dist.sample(rng); + *r = BaseElement::new(sampled_integer); + } + ElementDigest::new(res) + } +} + impl Serializable for ElementDigest { fn write_into(&self, target: &mut W) { target.write_bytes(&self.as_bytes()[..31]); diff --git a/crypto/src/hash/rescue/rp64_256/digest.rs b/crypto/src/hash/rescue/rp64_256/digest.rs index f1bc78d6b..e4dd76088 100644 --- a/crypto/src/hash/rescue/rp64_256/digest.rs +++ b/crypto/src/hash/rescue/rp64_256/digest.rs @@ -5,11 +5,9 @@ use core::slice; -use math::{fields::f64::BaseElement, FieldElement}; -use rand::distributions::{Distribution, Standard}; -use utils::{ - ByteReader, ByteWriter, Deserializable, DeserializationError, Randomizable, Serializable, -}; +use math::{fields::f64::BaseElement, FieldElement, StarkField}; +use rand::distributions::{Distribution, Standard, Uniform}; +use utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; use super::{Digest, DIGEST_SIZE}; @@ -46,18 +44,6 @@ impl Digest for ElementDigest { result } - - fn from_random_bytes(buffer: &[u8]) -> Self { - let mut digest: [BaseElement; DIGEST_SIZE] = [BaseElement::ZERO; DIGEST_SIZE]; - - buffer.chunks(8).zip(digest.iter_mut()).for_each(|(chunk, digest)| { - *digest = BaseElement::new(u64::from_be_bytes( - chunk.try_into().expect("Given the size of the chunk this should not panic"), - )) - }); - - digest.into() - } } impl Default for ElementDigest { @@ -105,10 +91,10 @@ impl From for [u8; 32] { impl Distribution for Standard { fn sample(&self, rng: &mut R) -> ElementDigest { let mut res = [BaseElement::ZERO; DIGEST_SIZE]; + let uni_dist = Uniform::from(0..BaseElement::MODULUS); for r in res.iter_mut() { - let mut source = [0_u8; 8]; - rng.fill_bytes(&mut source); - *r = BaseElement::from_random_bytes(&source).expect("failed to generate element"); + let sampled_integer = uni_dist.sample(rng); + *r = BaseElement::new(sampled_integer); } ElementDigest::new(res) } diff --git a/crypto/src/hash/rescue/rp64_256_jive/digest.rs b/crypto/src/hash/rescue/rp64_256_jive/digest.rs index 703118093..6a0a66f5b 100644 --- a/crypto/src/hash/rescue/rp64_256_jive/digest.rs +++ b/crypto/src/hash/rescue/rp64_256_jive/digest.rs @@ -5,7 +5,11 @@ use core::slice; -use math::{fields::f64::BaseElement, FieldElement}; +use math::{fields::f64::BaseElement, FieldElement, StarkField}; +use rand::{ + distributions::{Standard, Uniform}, + prelude::Distribution, +}; use utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; use super::{Digest, DIGEST_SIZE}; @@ -43,18 +47,6 @@ impl Digest for ElementDigest { result } - - fn from_random_bytes(buffer: &[u8]) -> Self { - let mut digest: [BaseElement; DIGEST_SIZE] = [BaseElement::ZERO; DIGEST_SIZE]; - - buffer.chunks(8).zip(digest.iter_mut()).for_each(|(chunk, digest)| { - *digest = BaseElement::new(u64::from_be_bytes( - chunk.try_into().expect("Given the size of the chunk this should not panic"), - )) - }); - - digest.into() - } } impl Default for ElementDigest { @@ -99,6 +91,18 @@ impl From for [u8; 32] { } } +impl Distribution for Standard { + fn sample(&self, rng: &mut R) -> ElementDigest { + let mut res = [BaseElement::ZERO; DIGEST_SIZE]; + let uni_dist = Uniform::from(0..BaseElement::MODULUS); + for r in res.iter_mut() { + let sampled_integer = uni_dist.sample(rng); + *r = BaseElement::new(sampled_integer); + } + ElementDigest::new(res) + } +} + // TESTS // ================================================================================================ diff --git a/examples/Cargo.toml b/examples/Cargo.toml index ff308297e..6588e7d07 100644 --- a/examples/Cargo.toml +++ b/examples/Cargo.toml @@ -30,6 +30,7 @@ air = { version = "0.11", path = "../air", package = "winter-air", default-featu blake3 = { version = "1.5", default-features = false } core-utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } hex = { version = "0.4", optional = true } +rand = { version = "0.8" } rand-utils = { version = "0.11", path = "../utils/rand", package = "winter-rand-utils", optional = true } structopt = { version = "0.3", default-features = false } tracing = { version = "0.1", default-features = false } diff --git a/examples/src/fibonacci/fib2/mod.rs b/examples/src/fibonacci/fib2/mod.rs index 7b06615ee..ff6e80680 100644 --- a/examples/src/fibonacci/fib2/mod.rs +++ b/examples/src/fibonacci/fib2/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -88,6 +89,7 @@ impl FibExample { impl Example for FibExample where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { println!( diff --git a/examples/src/fibonacci/fib8/mod.rs b/examples/src/fibonacci/fib8/mod.rs index ac0cdd234..fabc71055 100644 --- a/examples/src/fibonacci/fib8/mod.rs +++ b/examples/src/fibonacci/fib8/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -88,6 +89,7 @@ impl Fib8Example { impl Example for Fib8Example where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { println!( diff --git a/examples/src/fibonacci/fib_small/mod.rs b/examples/src/fibonacci/fib_small/mod.rs index e8ca2b5e1..05b7b3e9e 100644 --- a/examples/src/fibonacci/fib_small/mod.rs +++ b/examples/src/fibonacci/fib_small/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f64::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -99,6 +100,7 @@ impl FibExample { impl Example for FibExample where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { println!( diff --git a/examples/src/fibonacci/mulfib2/mod.rs b/examples/src/fibonacci/mulfib2/mod.rs index 0f7943372..a999f6ebd 100644 --- a/examples/src/fibonacci/mulfib2/mod.rs +++ b/examples/src/fibonacci/mulfib2/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -87,6 +88,7 @@ impl MulFib2Example { impl Example for MulFib2Example where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { let sequence_length = self.sequence_length; diff --git a/examples/src/fibonacci/mulfib8/mod.rs b/examples/src/fibonacci/mulfib8/mod.rs index 6e0d10f4a..65341d136 100644 --- a/examples/src/fibonacci/mulfib8/mod.rs +++ b/examples/src/fibonacci/mulfib8/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -88,6 +89,7 @@ impl MulFib8Example { impl Example for MulFib8Example where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { let sequence_length = self.sequence_length; diff --git a/examples/src/lamport/aggregate/mod.rs b/examples/src/lamport/aggregate/mod.rs index 0de22e2ad..a2aa631c6 100644 --- a/examples/src/lamport/aggregate/mod.rs +++ b/examples/src/lamport/aggregate/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, get_power_series, FieldElement, StarkField}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -115,6 +116,7 @@ impl LamportAggregateExample { impl Example for LamportAggregateExample where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { // generate the execution trace diff --git a/examples/src/lamport/threshold/mod.rs b/examples/src/lamport/threshold/mod.rs index 5120c50cb..0574a88b3 100644 --- a/examples/src/lamport/threshold/mod.rs +++ b/examples/src/lamport/threshold/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, get_power_series, FieldElement, StarkField}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -113,6 +114,7 @@ impl LamportThresholdExample { impl Example for LamportThresholdExample where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { // generate the execution trace diff --git a/examples/src/merkle/mod.rs b/examples/src/merkle/mod.rs index 37c0fc2f5..144b27130 100644 --- a/examples/src/merkle/mod.rs +++ b/examples/src/merkle/mod.rs @@ -6,10 +6,11 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use rand_utils::{rand_value, rand_vector}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, Digest, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, Digest, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement, StarkField}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -110,6 +111,7 @@ impl MerkleExample { impl Example for MerkleExample where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { // generate the execution trace diff --git a/examples/src/rescue/mod.rs b/examples/src/rescue/mod.rs index c4b747d09..9aac35e57 100644 --- a/examples/src/rescue/mod.rs +++ b/examples/src/rescue/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -95,6 +96,7 @@ impl RescueExample { impl Example for RescueExample where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { // generate the execution trace diff --git a/examples/src/rescue_raps/mod.rs b/examples/src/rescue_raps/mod.rs index 57f26884d..aa9801b32 100644 --- a/examples/src/rescue_raps/mod.rs +++ b/examples/src/rescue_raps/mod.rs @@ -6,10 +6,11 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use rand_utils::rand_array; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, ExtensionOf, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -108,6 +109,7 @@ impl RescueRapsExample { impl Example for RescueRapsExample where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { // generate the execution trace diff --git a/examples/src/utils/rescue.rs b/examples/src/utils/rescue.rs index 54d72a094..33ca425ca 100644 --- a/examples/src/utils/rescue.rs +++ b/examples/src/utils/rescue.rs @@ -162,18 +162,6 @@ impl Digest for Hash { result[..bytes.len()].copy_from_slice(bytes); result } - - fn from_random_bytes(buffer: &[u8]) -> Self { - let mut digest: [BaseElement; DIGEST_SIZE] = [BaseElement::ZERO; DIGEST_SIZE]; - - buffer.chunks(16).zip(digest.iter_mut()).for_each(|(chunk, digest)| { - *digest = BaseElement::new(u128::from_be_bytes( - chunk.try_into().expect("Given the size of the chunk this should not panic"), - )) - }); - - Self(digest) - } } impl Serializable for Hash { diff --git a/examples/src/vdf/exempt/mod.rs b/examples/src/vdf/exempt/mod.rs index 58ece8133..36045e53d 100644 --- a/examples/src/vdf/exempt/mod.rs +++ b/examples/src/vdf/exempt/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -87,6 +88,7 @@ impl VdfExample { impl Example for VdfExample where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { println!("Generating proof for executing a VDF function for {} steps", self.num_steps); diff --git a/examples/src/vdf/regular/mod.rs b/examples/src/vdf/regular/mod.rs index 4b7ba4468..780a6bad1 100644 --- a/examples/src/vdf/regular/mod.rs +++ b/examples/src/vdf/regular/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -84,6 +85,7 @@ impl VdfExample { impl Example for VdfExample where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { println!("Generating proof for executing a VDF function for {} steps", self.num_steps); diff --git a/fri/src/prover/channel.rs b/fri/src/prover/channel.rs index 5ee144f40..7069e2181 100644 --- a/fri/src/prover/channel.rs +++ b/fri/src/prover/channel.rs @@ -6,9 +6,9 @@ use alloc::vec::Vec; use core::marker::PhantomData; -use crypto::{Digest, ElementHasher, Hasher, RandomCoin}; +use crypto::{ElementHasher, Hasher, RandomCoin}; use math::FieldElement; -use rand::{RngCore, SeedableRng}; +use rand::{distributions::Standard, prelude::Distribution, Rng, RngCore, SeedableRng}; // PROVER CHANNEL TRAIT // ================================================================================================ @@ -145,6 +145,7 @@ where H: ElementHasher, P: RngCore, R: RandomCoin, + Standard: Distribution<::Digest>, { type Hasher = H; @@ -156,12 +157,12 @@ where // sample a salt for Fiat-Shamir if zero-knowledge is enabled let salt = if self.is_zk { - let mut buffer = [0_u8; 32]; - self.prng + let digest = self + .prng .as_mut() .expect("should have a PRNG when zk is enabled") - .fill_bytes(&mut buffer); - Some(Digest::from_random_bytes(&buffer)) + .sample(Standard); + Some(digest) } else { None }; diff --git a/prover/src/channel.rs b/prover/src/channel.rs index a4d2bdc09..facdc2d9d 100644 --- a/prover/src/channel.rs +++ b/prover/src/channel.rs @@ -10,10 +10,10 @@ use air::{ proof::{Commitments, Context, OodFrame, Proof, Queries, TraceOodFrame}, Air, ConstraintCompositionCoefficients, DeepCompositionCoefficients, }; -use crypto::{Digest, ElementHasher, Hasher, RandomCoin, VectorCommitment}; +use crypto::{ElementHasher, Hasher, RandomCoin, VectorCommitment}; use fri::FriProof; use math::{FieldElement, ToElements}; -use rand::{RngCore, SeedableRng}; +use rand::{distributions::Standard, prelude::Distribution, Rng, RngCore, SeedableRng}; #[cfg(feature = "concurrent")] use utils::iterators::*; use utils::Serializable; @@ -52,6 +52,7 @@ where H: ElementHasher, P: RngCore + SeedableRng, R: RandomCoin, + Standard: Distribution<::Digest>, V: VectorCommitment, { // CONSTRUCTOR @@ -104,12 +105,12 @@ where // sample a salt for Fiat-Shamir if zero-knowledge is enabled let salt = if self.air.is_zk() { - let mut buffer = [0_u8; 32]; - self.prng + let digest = self + .prng .as_mut() .expect("should have a PRNG when zk is enabled") - .fill_bytes(&mut buffer); - Some(Digest::from_random_bytes(&buffer)) + .sample(Standard); + Some(digest) } else { None }; @@ -123,12 +124,12 @@ where // sample a salt for Fiat-Shamir if zero-knowledge is enabled let salt = if self.air.is_zk() { - let mut buffer = [0_u8; 32]; - self.prng + let digest = self + .prng .as_mut() .expect("should have a PRNG when zk is enabled") - .fill_bytes(&mut buffer); - Some(Digest::from_random_bytes(&buffer)) + .sample(Standard); + Some(digest) } else { None }; @@ -143,12 +144,12 @@ where // sample a salt for Fiat-Shamir if zero-knowledge is enabled let salt = if self.air.is_zk() { - let mut buffer = [0_u8; 32]; - self.prng + let digest = self + .prng .as_mut() .expect("should have a PRNG when zk is enabled") - .fill_bytes(&mut buffer); - Some(Digest::from_random_bytes(&buffer)) + .sample(Standard); + Some(digest) } else { None }; @@ -163,12 +164,12 @@ where // sample a salt for Fiat-Shamir is zero-knowledge is enabled let salt = if self.air.is_zk() { - let mut buffer = [0_u8; 32]; - self.prng + let digest = self + .prng .as_mut() .expect("should have a PRNG when zk is enabled") - .fill_bytes(&mut buffer); - Some(Digest::from_random_bytes(&buffer)) + .sample(Standard); + Some(digest) } else { None }; @@ -286,6 +287,7 @@ where H: ElementHasher, P: RngCore, R: RandomCoin, + Standard: Distribution<::Digest>, V: VectorCommitment, { type Hasher = H; @@ -299,12 +301,12 @@ where // sample a salt for Fiat-Shamir if zero-knowledge is enabled let salt = if self.air.is_zk() { - let mut buffer = [0_u8; 32]; - self.prng + let digest = self + .prng .as_mut() .expect("should have a PRNG when zk is enabled") - .fill_bytes(&mut buffer); - Some(Digest::from_random_bytes(&buffer)) + .sample(Standard); + Some(digest) } else { None }; diff --git a/prover/src/lib.rs b/prover/src/lib.rs index 926ef8563..e0f1a5360 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -50,7 +50,7 @@ pub use air::{ }; use air::{AuxRandElements, GkrRandElements, PartitionOptions, ZkParameters}; pub use crypto; -use crypto::{ElementHasher, RandomCoin, VectorCommitment}; +use crypto::{ElementHasher, Hasher, RandomCoin, VectorCommitment}; use fri::FriProver; pub use math; use math::{ @@ -58,7 +58,7 @@ use math::{ fields::{CubeExtension, QuadExtension}, ExtensibleField, FieldElement, StarkField, ToElements, }; -use rand::{Error, RngCore, SeedableRng}; +use rand::{distributions::Standard, prelude::Distribution, Error, RngCore, SeedableRng}; use tracing::{event, info_span, instrument, Level}; pub use utils::{ iterators, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, @@ -281,6 +281,7 @@ pub trait Prover { seed: Option<::Seed>, ) -> Result where + Standard: Distribution<<::HashFn as crypto::Hasher>::Digest>, ::PublicInputs: Send, ::GkrProof: Send, { @@ -320,6 +321,7 @@ pub trait Prover { seed: Option<::Seed>, ) -> Result where + Standard: Distribution<<::HashFn as Hasher>::Digest>, E: FieldElement, ::PublicInputs: Send, ::GkrProof: Send, @@ -603,6 +605,7 @@ pub trait Prover { ) -> (Self::TraceLde, TracePolyTable) where E: FieldElement, + Standard: Distribution<<::HashFn as crypto::Hasher>::Digest>, { // extend the main execution trace and commit to the extended trace let (trace_lde, trace_polys) = maybe_await!(self.new_trace_lde( @@ -647,6 +650,7 @@ pub trait Prover { ) -> (Self::ConstraintCommitment, CompositionPoly) where E: FieldElement, + Standard: Distribution<<::HashFn as crypto::Hasher>::Digest>, { // first, build a commitment to the evaluations of the constraint composition polynomial // columns