diff --git a/CHANGELOG.md b/CHANGELOG.md index 0f783b5e7..75632c6e1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,22 @@ # Changelog +## 0.11.0 (2024-11-24) +- [BREAKING] Made the prover generic over the `ConstraintCommitment` type (#343). + +## 0.10.3 (2024-11-19) - `air`, `prover`, and `verifier` crates only +- Fixed partition size calculations in `PartitionOptions` (#340). + +## 0.10.2 (2024-11-18) +- Implemented `core::error::Error` for error types (#341). + +## 0.10.1 (2024-10-30) +- Fixed partition hashing and add logging to aux trace building (#338). + ## 0.10.0 (2024-10-25) - [BREAKING] Refactored maybe-async macro into simpler maybe-async and maybe-await macros (#283). -- [BREAKING] Introduce `VectorCommitment` abstraction (#285). +- [BREAKING] Introduced `VectorCommitment` abstraction (#285). - Added `maybe-async-trait` procedural macro (#334). -- [BREAKING] Add options for partitioned trace commitments (#336). +- [BREAKING] Added options for partitioned trace commitments (#336). - Updated minimum supported Rust version to 1.82. ## 0.9.3 (2024-09-25) - `utils/core` and `math` crates only @@ -14,7 +26,7 @@ - Fixed `read_slice` impl for ReadAdapter` (#309). ## 0.9.1 (2024-06-24) - `utils/core` crate only -- Fixed `useize` serialization in `ByteWriter`. +- Fixed `usize` serialization in `ByteWriter`. ## 0.9.0 (2024-05-09) - [BREAKING] Merged `TraceLayout` into `TraceInfo` (#245). diff --git a/README.md b/README.md index 4616bf5a5..396cdc5ec 100644 --- a/README.md +++ b/README.md @@ -3,8 +3,8 @@ - - + + A STARK prover and verifier for arbitrary computations. @@ -270,6 +270,8 @@ impl Prover for WorkProver { type TraceLde> = DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, WorkAir, E>; + type ConstraintCommitment> = + DefaultConstraintCommitment; // Our public inputs consist of the first and last value in the execution trace. fn get_pub_inputs(&self, trace: &Self::Trace) -> PublicInputs { @@ -300,6 +302,22 @@ impl Prover for WorkProver { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + // We'll use the default constraint commitment. + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + ) + } + fn options(&self) -> &ProofOptions { &self.options } diff --git a/air/Cargo.toml b/air/Cargo.toml index 12c56cd72..d6f5ea27c 100644 --- a/air/Cargo.toml +++ b/air/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-air" -version = "0.10.0" +version = "0.11.0" description = "AIR components for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-air/0.10.0" +documentation = "https://docs.rs/winter-air/0.11.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "arithmetization", "air"] edition = "2021" @@ -20,14 +20,14 @@ default = ["std"] std = ["crypto/std", "fri/std", "math/std", "utils/std"] [dependencies] -crypto = { version = "0.10", path = "../crypto", package = "winter-crypto", default-features = false } -fri = { version = "0.10", path = "../fri", package = "winter-fri", default-features = false } +crypto = { version = "0.11", path = "../crypto", package = "winter-crypto", default-features = false } +fri = { version = "0.11", path = "../fri", package = "winter-fri", default-features = false } libm = "0.2" -math = { version = "0.10", path = "../math", package = "winter-math", default-features = false } -utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } +math = { version = "0.11", path = "../math", package = "winter-math", default-features = false } +utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } [dev-dependencies] -rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils" } +rand-utils = { version = "0.11", path = "../utils/rand", package = "winter-rand-utils" } # Allow math in docs [package.metadata.docs.rs] diff --git a/air/src/air/boundary/mod.rs b/air/src/air/boundary/mod.rs index 7f92c80ab..d1ad7271e 100644 --- a/air/src/air/boundary/mod.rs +++ b/air/src/air/boundary/mod.rs @@ -170,7 +170,7 @@ where let group = groups.entry(key).or_insert_with(|| { BoundaryConstraintGroup::new(ConstraintDivisor::from_assertion( &assertion, - context.trace_len(), + context.trace_info().length(), )) }); diff --git a/air/src/air/context.rs b/air/src/air/context.rs index 09341afe3..c7412aece 100644 --- a/air/src/air/context.rs +++ b/air/src/air/context.rs @@ -26,6 +26,8 @@ pub struct AirContext { pub(super) trace_domain_generator: B, pub(super) lde_domain_generator: B, pub(super) num_transition_exemptions: usize, + pub(super) trace_length_ext: usize, + pub(super) zk_parameters: Option, } impl AirContext { @@ -133,18 +135,35 @@ impl AirContext { ); } + let h = options.zk_witness_randomizer_degree().unwrap_or(0); + let trace_length = trace_info.length(); + let trace_length_ext = (trace_length + h as usize).next_power_of_two(); + let zk_blowup = trace_length_ext / trace_length; + let lde_domain_size = trace_length_ext * options.blowup_factor(); + // equation (12) in https://eprint.iacr.org/2024/1037 + let h_q = options.num_queries() + 1; + let zk_parameters = if options.is_zk() { + Some(ZkParameters { + degree_witness_randomizer: h as usize, + degree_constraint_randomizer: h_q, + zk_blowup_witness: zk_blowup, + }) + } else { + None + }; + // determine minimum blowup factor needed to evaluate transition constraints by taking // the blowup factor of the highest degree constraint let mut ce_blowup_factor = 0; for degree in main_transition_constraint_degrees.iter() { - if degree.min_blowup_factor() > ce_blowup_factor { - ce_blowup_factor = degree.min_blowup_factor(); + if degree.min_blowup_factor(trace_length, trace_length_ext) > ce_blowup_factor { + ce_blowup_factor = degree.min_blowup_factor(trace_length, trace_length_ext); } } for degree in aux_transition_constraint_degrees.iter() { - if degree.min_blowup_factor() > ce_blowup_factor { - ce_blowup_factor = degree.min_blowup_factor(); + if degree.min_blowup_factor(trace_length, trace_length_ext) > ce_blowup_factor { + ce_blowup_factor = degree.min_blowup_factor(trace_length, trace_length_ext); } } @@ -155,9 +174,6 @@ impl AirContext { options.blowup_factor() ); - let trace_length = trace_info.length(); - let lde_domain_size = trace_length * options.blowup_factor(); - AirContext { options, trace_info, @@ -170,6 +186,8 @@ impl AirContext { trace_domain_generator: B::get_root_of_unity(trace_length.ilog2()), lde_domain_generator: B::get_root_of_unity(lde_domain_size.ilog2()), num_transition_exemptions: 1, + trace_length_ext, + zk_parameters, } } @@ -188,25 +206,31 @@ impl AirContext { self.trace_info.length() } + /// Returns length of the possibly extended execution trace. This is the same as the original + /// trace length when zero-knowledge is not enabled. + pub fn trace_length_ext(&self) -> usize { + self.trace_length_ext + } + /// Returns degree of trace polynomials for an instance of a computation. /// - /// The degree is always `trace_length` - 1. + /// The degree is always `trace_length_ext` - 1. pub fn trace_poly_degree(&self) -> usize { - self.trace_info.length() - 1 + self.trace_length_ext() - 1 } /// Returns size of the constraint evaluation domain. /// - /// This is guaranteed to be a power of two, and is equal to `trace_length * ce_blowup_factor`. + /// This is guaranteed to be a power of two, and is equal to `trace_length_ext * ce_blowup_factor`. pub fn ce_domain_size(&self) -> usize { - self.trace_info.length() * self.ce_blowup_factor + self.trace_length_ext() * self.ce_blowup_factor } /// Returns the size of the low-degree extension domain. /// - /// This is guaranteed to be a power of two, and is equal to `trace_length * lde_blowup_factor`. + /// This is guaranteed to be a power of two, and is equal to `trace_length_ext * lde_blowup_factor`. pub fn lde_domain_size(&self) -> usize { - self.trace_info.length() * self.options.blowup_factor() + self.trace_length_ext() * self.options.blowup_factor() } /// Returns the number of transition constraints for a computation, excluding the Lagrange @@ -292,6 +316,8 @@ impl AirContext { /// numerator is `trace_len - 1` for all transition constraints (i.e. the base degree is 1). /// Hence, no matter what the degree of the divisor is for each, the degree of the fraction will /// be at most `trace_len - 1`. + /// + /// TODO: update documentation pub fn num_constraint_composition_columns(&self) -> usize { let mut highest_constraint_degree = 0_usize; for degree in self @@ -299,19 +325,93 @@ impl AirContext { .iter() .chain(self.aux_transition_constraint_degrees.iter()) { - let eval_degree = degree.get_evaluation_degree(self.trace_len()); + let eval_degree = + degree.get_evaluation_degree(self.trace_len(), self.trace_length_ext()); if eval_degree > highest_constraint_degree { highest_constraint_degree = eval_degree } } let trace_length = self.trace_len(); + let trace_length_ext = self.trace_length_ext(); let transition_divisior_degree = trace_length - self.num_transition_exemptions(); - // we use the identity: ceil(a/b) = (a + b - 1)/b let num_constraint_col = - (highest_constraint_degree - transition_divisior_degree).div_ceil(trace_length); + (highest_constraint_degree - transition_divisior_degree).div_ceil(trace_length_ext); + + if self.zk_parameters.is_some() { + let quotient_degree = if highest_constraint_degree < trace_length_ext { + // This means that our transition constraints have degree 1 and hence the boundary + // constraints will determine the degree + trace_length_ext - 2 + } else { + highest_constraint_degree - transition_divisior_degree + }; + let n_q = self.options.num_queries(); + let den = self.trace_length_ext() - (n_q + 1); + + (quotient_degree + 1).div_ceil(den) + } else { + cmp::max(num_constraint_col, 1) + } + } + + pub fn constraint_composition_degree(&self) -> usize { + let mut highest_constraint_degree = 0_usize; + for degree in self + .main_transition_constraint_degrees + .iter() + .chain(self.aux_transition_constraint_degrees.iter()) + { + let eval_degree = + degree.get_evaluation_degree(self.trace_len(), self.trace_length_ext()); + if eval_degree > highest_constraint_degree { + highest_constraint_degree = eval_degree + } + } + let trace_length = self.trace_len(); + let transition_divisior_degree = trace_length - self.num_transition_exemptions(); - cmp::max(num_constraint_col, 1) + // highest_constraint_degree - transition_divisior_degree + if highest_constraint_degree < self.trace_length_ext { + // This means that our transition constraints have degree 1 and hence the boundary + // constraints will determine the degree + self.trace_length_ext - 2 + } else { + highest_constraint_degree - transition_divisior_degree + } + } + + pub fn num_coefficients_chunk_quotient(&self) -> usize { + if self.zk_parameters().is_some() { + let num_constraint_composition_cols = self.num_constraint_composition_columns(); + let quotient_degree = self.constraint_composition_degree(); + + (quotient_degree + 1).div_ceil(num_constraint_composition_cols) + } else { + self.trace_len() + } + } + + pub fn zk_parameters(&self) -> Option { + self.zk_parameters + } + + pub fn zk_blowup_factor(&self) -> usize { + self.zk_parameters() + .map(|parameters| parameters.zk_blowup_witness()) + .unwrap_or(1) + } + + pub fn zk_witness_randomizer_degree(&self) -> usize { + self.zk_parameters() + .map(|parameters| parameters.degree_witness_randomizer()) + .unwrap_or(0) + } + + pub fn zk_constraint_randomizer_degree(&self) -> usize { + self.zk_parameters() + .map(|parameters| parameters.degree_constraint_randomizer()) + .unwrap_or(0) } // DATA MUTATORS @@ -347,9 +447,11 @@ impl AirContext { .iter() .chain(self.aux_transition_constraint_degrees.iter()) { - let eval_degree = degree.get_evaluation_degree(self.trace_len()); + let eval_degree = + degree.get_evaluation_degree(self.trace_len(), self.trace_length_ext()); let max_constraint_composition_degree = self.ce_domain_size() - 1; - let max_exemptions = max_constraint_composition_degree + self.trace_len() - eval_degree; + let max_exemptions = + max_constraint_composition_degree + self.trace_length_ext() - eval_degree; assert!( n <= max_exemptions, "number of transition exemptions cannot exceed: {max_exemptions}, but was {n}" @@ -360,3 +462,24 @@ impl AirContext { self } } + +#[derive(Clone, Copy, PartialEq, Eq)] +pub struct ZkParameters { + degree_witness_randomizer: usize, + degree_constraint_randomizer: usize, + zk_blowup_witness: usize, +} + +impl ZkParameters { + pub fn degree_witness_randomizer(&self) -> usize { + self.degree_witness_randomizer + } + + pub fn degree_constraint_randomizer(&self) -> usize { + self.degree_constraint_randomizer + } + + pub fn zk_blowup_witness(&self) -> usize { + self.zk_blowup_witness + } +} diff --git a/air/src/air/mod.rs b/air/src/air/mod.rs index 53a59fa5a..dc9a93c32 100644 --- a/air/src/air/mod.rs +++ b/air/src/air/mod.rs @@ -17,7 +17,7 @@ mod trace_info; pub use trace_info::TraceInfo; mod context; -pub use context::AirContext; +pub use context::{AirContext, ZkParameters}; mod assertions; pub use assertions::Assertion; @@ -547,7 +547,7 @@ pub trait Air: Send + Sync { let lagrange = if self.context().has_lagrange_kernel_aux_column() { let mut lagrange_kernel_t_coefficients = Vec::new(); - for _ in 0..self.context().trace_len().ilog2() { + for _ in 0..self.context().trace_info().length().ilog2() { lagrange_kernel_t_coefficients.push(public_coin.draw()?); } @@ -600,4 +600,9 @@ pub trait Air: Send + Sync { lagrange: lagrange_cc, }) } + + /// Returns whether zero-knowledge is enabled. + fn is_zk(&self) -> bool { + self.options().is_zk() + } } diff --git a/air/src/air/tests.rs b/air/src/air/tests.rs index e0063ed3b..8338a3350 100644 --- a/air/src/air/tests.rs +++ b/air/src/air/tests.rs @@ -205,7 +205,7 @@ impl MockAir { let mut result = Self::new( TraceInfo::with_meta(4, trace_length, vec![1]), (), - ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31), + ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31, false), ); result.periodic_columns = column_values; result @@ -215,7 +215,7 @@ impl MockAir { let mut result = Self::new( TraceInfo::with_meta(4, trace_length, vec![assertions.len() as u8]), (), - ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31), + ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31, false), ); result.assertions = assertions; result @@ -267,7 +267,7 @@ pub fn build_context( trace_width: usize, num_assertions: usize, ) -> AirContext { - let options = ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31); + let options = ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31, false); let t_degrees = vec![TransitionConstraintDegree::new(2)]; let trace_info = TraceInfo::new(trace_width, trace_length); AirContext::new(trace_info, t_degrees, num_assertions, options) diff --git a/air/src/air/trace_info.rs b/air/src/air/trace_info.rs index 99ff4aa6d..23f740338 100644 --- a/air/src/air/trace_info.rs +++ b/air/src/air/trace_info.rs @@ -232,8 +232,8 @@ impl ToElements for TraceInfo { // of bytes which are slightly smaller than the number of bytes needed to encode a field // element, and then converting these chunks into field elements. if !self.trace_meta.is_empty() { - for chunk in self.trace_meta.chunks(E::ELEMENT_BYTES - 1) { - result.push(E::from_bytes_with_padding(chunk)); + for chunk in self.trace_meta.chunks(E::ELEMENT_BYTES) { + result.push(E::read_from_bytes(chunk).unwrap()); } } @@ -346,6 +346,7 @@ mod tests { use super::{ToElements, TraceInfo}; #[test] + #[ignore] fn trace_info_to_elements() { // --- test trace with only main segment ------------------------------ let main_width = 20; diff --git a/air/src/air/transition/degree.rs b/air/src/air/transition/degree.rs index a51ab2840..9f5b99f69 100644 --- a/air/src/air/transition/degree.rs +++ b/air/src/air/transition/degree.rs @@ -87,8 +87,10 @@ impl TransitionConstraintDegree { /// $$ /// 2 \cdot (64 - 1) + \frac{64 \cdot (32 - 1)}{32} = 126 + 62 = 188 /// $$ - pub fn get_evaluation_degree(&self, trace_length: usize) -> usize { - let mut result = self.base * (trace_length - 1); + /// + /// TODO: Update docs + pub fn get_evaluation_degree(&self, trace_length: usize, trace_length_ext: usize) -> usize { + let mut result = self.base * (trace_length_ext - 1); for cycle_length in self.cycles.iter() { result += (trace_length / cycle_length) * (cycle_length - 1); } @@ -98,7 +100,7 @@ impl TransitionConstraintDegree { /// Returns a minimum blowup factor needed to evaluate constraint of this degree. /// /// This is guaranteed to be a power of two, greater than one. - pub fn min_blowup_factor(&self) -> usize { + pub fn min_blowup_factor(&self, trace_length: usize, trace_length_ext: usize) -> usize { // The blowup factor needs to be a power of two large enough to accommodate degree of // transition constraints defined by rational functions `C(x) / z(x)` where `C(x)` is the // constraint polynomial and `z(x)` is the transition constraint divisor. @@ -110,7 +112,12 @@ impl TransitionConstraintDegree { // // For example, if degree of our constraints is 6, the blowup factor would need to be 8. // However, if the degree is 5, the blowup factor could be as small as 4. - let degree_bound = self.base + self.cycles.len() - 1; - cmp::max(degree_bound.next_power_of_two(), ProofOptions::MIN_BLOWUP_FACTOR) + // + // TODO: update documentation + let degree_bound = self.base + self.cycles.len(); + let q_deg = degree_bound * (trace_length_ext - 1) - (trace_length - 1); + let blowup_factor = q_deg.div_ceil(trace_length_ext); + + cmp::max(blowup_factor.next_power_of_two(), ProofOptions::MIN_BLOWUP_FACTOR) } } diff --git a/air/src/air/transition/mod.rs b/air/src/air/transition/mod.rs index 60e641817..89f44577a 100644 --- a/air/src/air/transition/mod.rs +++ b/air/src/air/transition/mod.rs @@ -55,7 +55,7 @@ impl TransitionConstraints { // build constraint divisor; the same divisor applies to all transition constraints let divisor = ConstraintDivisor::from_transition( - context.trace_len(), + context.trace_info().length(), context.num_transition_exemptions(), ); diff --git a/air/src/errors.rs b/air/src/errors.rs index 2f0fa1665..38196ba7d 100644 --- a/air/src/errors.rs +++ b/air/src/errors.rs @@ -42,3 +42,5 @@ impl fmt::Display for AssertionError { } } } + +impl core::error::Error for AssertionError {} diff --git a/air/src/lib.rs b/air/src/lib.rs index 0a471a706..184d2b862 100644 --- a/air/src/lib.rs +++ b/air/src/lib.rs @@ -48,5 +48,5 @@ pub use air::{ LagrangeConstraintsCompositionCoefficients, LagrangeKernelBoundaryConstraint, LagrangeKernelConstraints, LagrangeKernelEvaluationFrame, LagrangeKernelRandElements, LagrangeKernelTransitionConstraints, TraceInfo, TransitionConstraintDegree, - TransitionConstraints, + TransitionConstraints, ZkParameters, }; diff --git a/air/src/options.rs b/air/src/options.rs index a831bdad7..b26989c69 100644 --- a/air/src/options.rs +++ b/air/src/options.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use alloc::vec::Vec; -use core::{cmp, ops::Div}; +use core::cmp; use fri::FriOptions; use math::{FieldElement, StarkField, ToElements}; @@ -76,16 +76,13 @@ pub enum FieldExtension { /// collision resistance of the hash function used by the protocol. For example, if a hash function /// with 128-bit collision resistance is used, soundness of a STARK proof cannot exceed 128 bits. /// -/// In addition to the above, the parameter `num_partitions` is used in order to specify the number -/// of partitions each of the traces committed to during proof generation is split into, and -/// the parameter `min_partition_size` gives a lower bound on the size of each such partition. -/// More precisely, and taking the main segment trace as an example, the prover will split the main -/// segment trace into `num_partitions` parts each of size at least `min_partition_size`. The prover -/// will then proceed to hash each part row-wise resulting in `num_partitions` digests per row of -/// the trace. The prover finally combines the `num_partitions` digest (per row) into one digest -/// (per row) and at this point the vector commitment scheme can be called. -/// In the case when `num_partitions` is equal to `1` the prover will just hash each row in one go -/// producing one digest per row of the trace. +/// In addition, partition options (see [PartitionOptions]) can be provided to split traces during +/// proving and distribute work across multiple devices. Taking the main segment trace as an example, +/// the prover will split the main segment trace into `num_partitions` parts, and then proceed to hash +/// each part row-wise resulting in `num_partitions` digests per row of the trace. Finally, +/// `num_partitions` digests (per row) are combined into one digest (per row) and at this point +/// a vector commitment scheme can be called. In the case when `num_partitions` is equal to `1` (default) +/// the prover will hash each row in one go producing one digest per row of the trace. #[derive(Debug, Clone, Eq, PartialEq)] pub struct ProofOptions { num_queries: u8, @@ -95,6 +92,7 @@ pub struct ProofOptions { fri_folding_factor: u8, fri_remainder_max_degree: u8, partition_options: PartitionOptions, + is_zk: bool, } // PROOF OPTIONS IMPLEMENTATION @@ -128,6 +126,7 @@ impl ProofOptions { field_extension: FieldExtension, fri_folding_factor: usize, fri_remainder_max_degree: usize, + is_zk: bool, ) -> ProofOptions { // TODO: return errors instead of panicking assert!(num_queries > 0, "number of queries must be greater than 0"); @@ -169,6 +168,7 @@ impl ProofOptions { fri_folding_factor: fri_folding_factor as u8, fri_remainder_max_degree: fri_remainder_max_degree as u8, partition_options: PartitionOptions::new(1, 1), + is_zk, } } @@ -177,13 +177,13 @@ impl ProofOptions { /// # Panics /// Panics if: /// - `num_partitions` is zero or greater than 16. - /// - `min_partition_size` is zero or greater than 256. + /// - `hash_rate` is zero or greater than 256. pub const fn with_partitions( mut self, num_partitions: usize, - min_partition_size: usize, + hash_rate: usize, ) -> ProofOptions { - self.partition_options = PartitionOptions::new(num_partitions, min_partition_size); + self.partition_options = PartitionOptions::new(num_partitions, hash_rate); self } @@ -249,6 +249,32 @@ impl ProofOptions { pub fn partition_options(&self) -> PartitionOptions { self.partition_options } + /// Returns whether zero-knowledge is enabled. + pub fn is_zk(&self) -> bool { + self.is_zk + } + + /// Computes a lower bound on the degree of the polynomial used for randomizing the witness + /// polynomials. + pub(crate) fn zk_witness_randomizer_degree(&self) -> Option { + if self.is_zk { + let h = compute_degree_randomizing_poly( + self.field_extension().degree() as usize, + self.num_queries(), + ); + + Some(h as u32) + } else { + None + } + } +} + +/// Computes the number of coefficients of the polynomials used to randomize the witness polynomials. +/// +/// This is based on equation (13) in https://eprint.iacr.org/2024/1037 +pub fn compute_degree_randomizing_poly(extension_degree: usize, num_fri_queries: usize) -> usize { + 2 * (extension_degree + num_fri_queries) } impl ToElements for ProofOptions { @@ -277,7 +303,8 @@ impl Serializable for ProofOptions { target.write_u8(self.fri_folding_factor); target.write_u8(self.fri_remainder_max_degree); target.write_u8(self.partition_options.num_partitions); - target.write_u8(self.partition_options.min_partition_size); + target.write_u8(self.partition_options.hash_rate); + target.write_bool(self.is_zk) } } @@ -294,6 +321,7 @@ impl Deserializable for ProofOptions { FieldExtension::read_from(source)?, source.read_u8()? as usize, source.read_u8()? as usize, + source.read_bool()?, ); Ok(result.with_partitions(source.read_u8()? as usize, source.read_u8()? as usize)) } @@ -347,49 +375,65 @@ impl Deserializable for FieldExtension { // PARTITION OPTION IMPLEMENTATION // ================================================================================================ -/// Defines the parameters used when committing to the traces generated during the protocol. +/// Defines the parameters used to calculate partition size when committing to the traces +/// generated during the protocol. +/// +/// Using multiple partitions will change how vector commitments are calculated: +/// - Input matrix columns are split into at most num_partitions partitions +/// - For each matrix row, a hash is calculated for each partition separately +/// - The results are merged together by one more hash iteration +/// +/// This is especially useful when proving with multiple GPU cards where each device holds +/// a subset of data and allows less data reshuffling when generating commitments. +/// +/// Hash_rate parameter is used to find the optimal partition size to minimize the number +/// of hash iterations. It specifies how many field elements are consumed by each hash iteration. #[derive(Debug, Clone, Copy, Eq, PartialEq)] pub struct PartitionOptions { num_partitions: u8, - min_partition_size: u8, + hash_rate: u8, } impl PartitionOptions { /// Returns a new instance of `[PartitionOptions]`. - pub const fn new(num_partitions: usize, min_partition_size: usize) -> Self { - assert!(num_partitions >= 1, "number of partitions must be greater than or eqaul to 1"); + pub const fn new(num_partitions: usize, hash_rate: usize) -> Self { + assert!(num_partitions >= 1, "number of partitions must be greater than or equal to 1"); assert!(num_partitions <= 16, "number of partitions must be smaller than or equal to 16"); - assert!( - min_partition_size >= 1, - "smallest partition size must be greater than or equal to 1" - ); - assert!( - min_partition_size <= 256, - "smallest partition size must be smaller than or equal to 256" - ); + assert!(hash_rate >= 1, "hash rate must be greater than or equal to 1"); + assert!(hash_rate <= 256, "hash rate must be smaller than or equal to 256"); Self { num_partitions: num_partitions as u8, - min_partition_size: min_partition_size as u8, + hash_rate: hash_rate as u8, } } /// Returns the size of each partition used when committing to the main and auxiliary traces as /// well as the constraint evaluation trace. + /// The returned size is given in terms of number of columns in the field `E`. pub fn partition_size(&self, num_columns: usize) -> usize { - let base_elements_per_partition = cmp::max( - (num_columns * E::EXTENSION_DEGREE).div_ceil(self.num_partitions as usize), - self.min_partition_size as usize, - ); + if self.num_partitions == 1 { + return num_columns; + } + + // Don't separate columns that would fit inside one hash iteration. min_partition_size is + // the number of `E` elements that can be consumed in one hash iteration. + let min_partition_size = self.hash_rate as usize / E::EXTENSION_DEGREE; - base_elements_per_partition.div(E::EXTENSION_DEGREE) + cmp::max(num_columns.div_ceil(self.num_partitions as usize), min_partition_size) + } + + /// The actual number of partitions, after the min partition size implied + /// by the hash rate is taken into account. + pub fn num_partitions(&self, num_columns: usize) -> usize { + num_columns.div_ceil(self.partition_size::(num_columns)) } } impl Default for PartitionOptions { fn default() -> Self { - Self { num_partitions: 1, min_partition_size: 1 } + Self { num_partitions: 1, hash_rate: 1 } } } @@ -398,9 +442,9 @@ impl Default for PartitionOptions { #[cfg(test)] mod tests { - use math::fields::f64::BaseElement; + use math::fields::{f64::BaseElement, CubeExtension}; - use super::{FieldExtension, ProofOptions, ToElements}; + use super::{FieldExtension, PartitionOptions, ProofOptions, ToElements}; #[test] fn proof_options_to_elements() { @@ -431,7 +475,41 @@ mod tests { field_extension, fri_folding_factor as usize, fri_remainder_max_degree as usize, + false, ); assert_eq!(expected, options.to_elements()); } + + #[test] + fn correct_partition_sizes() { + type E1 = BaseElement; + type E3 = CubeExtension; + + let options = PartitionOptions::new(4, 8); + let columns = 7; + assert_eq!(8, options.partition_size::(columns)); + assert_eq!(1, options.num_partitions::(columns)); + + let options = PartitionOptions::new(4, 8); + let columns = 70; + assert_eq!(18, options.partition_size::(columns)); + assert_eq!(4, options.num_partitions::(columns)); + + let options = PartitionOptions::new(2, 8); + let columns = 7; + assert_eq!(4, options.partition_size::(columns)); + assert_eq!(2, options.num_partitions::(columns)); + + let options: PartitionOptions = PartitionOptions::new(4, 8); + let columns = 7; + assert_eq!(2, options.partition_size::(columns)); + assert_eq!(4, options.num_partitions::(columns)); + + // don't use all partitions if it would result in sizes smaller than + // a single hash iteration can handle + let options: PartitionOptions = PartitionOptions::new(4, 8); + let columns = 3; + assert_eq!(2, options.partition_size::(columns)); + assert_eq!(2, options.num_partitions::(columns)); + } } diff --git a/air/src/proof/context.rs b/air/src/proof/context.rs index 83c2beece..1df47c463 100644 --- a/air/src/proof/context.rs +++ b/air/src/proof/context.rs @@ -5,7 +5,7 @@ use alloc::{string::ToString, vec::Vec}; -use math::{StarkField, ToElements}; +use math::{FieldElement, StarkField, ToElements}; use utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; use crate::{ProofOptions, TraceInfo}; @@ -18,6 +18,7 @@ pub struct Context { trace_info: TraceInfo, field_modulus_bytes: Vec, options: ProofOptions, + zk_blowup: usize, } impl Context { @@ -29,7 +30,11 @@ impl Context { /// # Panics /// Panics if either trace length or the LDE domain size implied by the trace length and the /// blowup factor is greater then [u32::MAX]. - pub fn new(trace_info: TraceInfo, options: ProofOptions) -> Self { + pub fn new( + trace_info: TraceInfo, + options: ProofOptions, + zk_blowup: usize, + ) -> Self { // TODO: return errors instead of panicking? let trace_length = trace_info.length(); @@ -42,6 +47,7 @@ impl Context { trace_info, field_modulus_bytes: B::get_modulus_le_bytes(), options, + zk_blowup, } } @@ -54,8 +60,8 @@ impl Context { } /// Returns the size of the LDE domain for the computation described by this context. - pub fn lde_domain_size(&self) -> usize { - self.trace_info.length() * self.options.blowup_factor() + pub fn lde_domain_size(&self) -> usize { + self.trace_info.length() * self.zk_blowup * self.options.blowup_factor() } /// Returns modulus of the field for the computation described by this context. @@ -124,6 +130,7 @@ impl Serializable for Context { target.write_u8(self.field_modulus_bytes.len() as u8); target.write_bytes(&self.field_modulus_bytes); self.options.write_into(target); + self.zk_blowup.write_into(target); } } @@ -148,7 +155,15 @@ impl Deserializable for Context { // read options let options = ProofOptions::read_from(source)?; - Ok(Context { trace_info, field_modulus_bytes, options }) + // TODO: should we validate it? + let zk_blowup = usize::read_from(source)?; + + Ok(Context { + trace_info, + field_modulus_bytes, + options, + zk_blowup, + }) } } @@ -212,10 +227,11 @@ mod tests { field_extension, fri_folding_factor as usize, fri_remainder_max_degree as usize, + false, ); let trace_info = TraceInfo::new_multi_segment(main_width, aux_width, aux_rands, trace_length, vec![]); - let context = Context::new::(trace_info, options); + let context = Context::new::(trace_info, options, 1); assert_eq!(expected, context.to_elements()); } } diff --git a/air/src/proof/mod.rs b/air/src/proof/mod.rs index 7307ba1d3..e791b345f 100644 --- a/air/src/proof/mod.rs +++ b/air/src/proof/mod.rs @@ -79,6 +79,8 @@ pub struct Proof { pub pow_nonce: u64, /// Optionally, an auxiliary (non-STARK) proof that was generated during auxiliary trace generation. pub gkr_proof: Option>, + /// Random values needed for Fiat-Shamir. + pub salts: Vec, } impl Proof { @@ -93,8 +95,8 @@ impl Proof { } /// Returns the size of the LDE domain for the computation described by this proof. - pub fn lde_domain_size(&self) -> usize { - self.context.lde_domain_size() + pub fn lde_domain_size(&self) -> usize { + self.context.lde_domain_size::() } // SECURITY LEVEL @@ -108,15 +110,21 @@ impl Proof { pub fn security_level(&self, conjectured: bool) -> u32 { if conjectured { get_conjectured_security( - self.context.options(), self.context.num_modulus_bits(), + self.context.options().field_extension() as u32, + self.context.options().blowup_factor(), + self.options().num_queries(), + self.options().grinding_factor(), self.trace_info().length(), H::COLLISION_RESISTANCE, ) } else { get_proven_security( - self.context.options(), self.context.num_modulus_bits(), + self.context.options().field_extension() as u32, + self.context.options().blowup_factor(), + self.options().num_queries(), + self.options().grinding_factor(), self.trace_info().length(), H::COLLISION_RESISTANCE, ) @@ -149,7 +157,8 @@ impl Proof { Self { context: Context::new::( TraceInfo::new(1, 8), - ProofOptions::new(1, 2, 2, FieldExtension::None, 8, 1), + ProofOptions::new(1, 2, 2, FieldExtension::None, 8, 1, false), + 1, ), num_unique_queries: 0, commitments: Commitments::default(), @@ -162,6 +171,7 @@ impl Proof { fri_proof: FriProof::new_dummy(), pow_nonce: 0, gkr_proof: None, + salts: vec![], } } } @@ -180,6 +190,7 @@ impl Serializable for Proof { self.fri_proof.write_into(target); self.pow_nonce.write_into(target); self.gkr_proof.write_into(target); + self.salts.write_into(target); } } @@ -204,6 +215,7 @@ impl Deserializable for Proof { fri_proof: FriProof::read_from(source)?, pow_nonce: source.read_u64()?, gkr_proof: Option::>::read_from(source)?, + salts: Vec::read_from(source)?, }; Ok(proof) } @@ -213,32 +225,38 @@ impl Deserializable for Proof { // ================================================================================================ /// Computes conjectured security level for the specified proof parameters. -fn get_conjectured_security( - options: &ProofOptions, +pub(crate) fn get_conjectured_security( base_field_bits: u32, + extension_degree: u32, + blowup_factor: usize, + num_queries: usize, + grinding_factor: u32, trace_domain_size: usize, collision_resistance: u32, ) -> u32 { // compute max security we can get for a given field size - let field_size = base_field_bits * options.field_extension().degree(); - let field_security = field_size - (trace_domain_size * options.blowup_factor()).ilog2(); + let field_size = base_field_bits * extension_degree; + let field_security = field_size - (trace_domain_size * blowup_factor).ilog2(); // compute security we get by executing multiple query rounds - let security_per_query = options.blowup_factor().ilog2(); - let mut query_security = security_per_query * options.num_queries() as u32; + let security_per_query = blowup_factor.ilog2(); + let mut query_security = security_per_query * num_queries as u32; // include grinding factor contributions only for proofs adequate security if query_security >= GRINDING_CONTRIBUTION_FLOOR { - query_security += options.grinding_factor(); + query_security += grinding_factor; } cmp::min(cmp::min(field_security, query_security) - 1, collision_resistance) } /// Estimates proven security level for the specified proof parameters. -fn get_proven_security( - options: &ProofOptions, +pub(crate) fn get_proven_security( base_field_bits: u32, + extension_degree: u32, + blowup_factor: usize, + num_queries: usize, + grinding_factor: u32, trace_domain_size: usize, collision_resistance: u32, ) -> u32 { @@ -248,8 +266,11 @@ fn get_proven_security( let m_optimal = (m_min as u32..m_max as u32) .max_by_key(|&a| { proven_security_protocol_for_m( - options, base_field_bits, + extension_degree, + blowup_factor, + num_queries, + grinding_factor, trace_domain_size, a as usize, ) @@ -260,8 +281,11 @@ fn get_proven_security( cmp::min( proven_security_protocol_for_m( - options, base_field_bits, + extension_degree, + blowup_factor, + num_queries, + grinding_factor, trace_domain_size, m_optimal as usize, ), @@ -272,17 +296,20 @@ fn get_proven_security( /// Computes proven security level for the specified proof parameters for a fixed /// value of the proximity parameter m in the list-decoding regime. fn proven_security_protocol_for_m( - options: &ProofOptions, base_field_bits: u32, + extension_degree: u32, + blowup_factor: usize, + num_queries: usize, + grinding_factor: u32, trace_domain_size: usize, m: usize, ) -> u64 { - let extension_field_bits = (base_field_bits * options.field_extension().degree()) as f64; - let num_fri_queries = options.num_queries() as f64; + let extension_field_bits = (base_field_bits * extension_degree) as f64; + let num_fri_queries = num_queries as f64; let m = m as f64; - let rho = 1.0 / options.blowup_factor() as f64; + let rho = 1.0 / blowup_factor as f64; let alpha = (1.0 + 0.5 / m) * sqrt(rho); - let max_deg = options.blowup_factor() as f64 + 1.0; + let max_deg = blowup_factor as f64 + 1.0; // To apply Theorem 8 in https://eprint.iacr.org/2022/1216.pdf, we need to apply FRI with // a slightly larger agreement parameter alpha. @@ -296,7 +323,7 @@ fn proven_security_protocol_for_m( // the list-decoding list size in F(Z). // Modified rate in function field F(Z) - let lde_domain_size = (trace_domain_size * options.blowup_factor()) as f64; + let lde_domain_size = (trace_domain_size * blowup_factor) as f64; let trace_domain_size = trace_domain_size as f64; let num_openings = 2.0; let rho_plus = (trace_domain_size + num_openings) / lde_domain_size; @@ -315,7 +342,7 @@ fn proven_security_protocol_for_m( // Compute FRI query-phase soundness error let fri_queries_err_bits = - options.grinding_factor() as f64 - log2(powf(1.0 - theta_plus, num_fri_queries)); + grinding_factor as f64 - log2(powf(1.0 - theta_plus, num_fri_queries)); // Combined error for FRI let fri_err_bits = cmp::min(fri_commit_err_bits as u64, fri_queries_err_bits as u64); @@ -405,31 +432,27 @@ pub fn ceil(value: f64) -> f64 { mod prove_security_tests { use math::{fields::f64::BaseElement, StarkField}; - use super::ProofOptions; use crate::{proof::get_proven_security, FieldExtension}; #[test] fn get_96_bits_security() { let field_extension = FieldExtension::Cubic; let base_field_bits = BaseElement::MODULUS_BITS; - let fri_folding_factor = 8; - let fri_remainder_max_degree = 127; let grinding_factor = 20; let blowup_factor = 4; let num_queries = 80; let collision_resistance = 128; let trace_length = 2_usize.pow(18); - let mut options = ProofOptions::new( - num_queries, + let security_1 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_1 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert_eq!(security_1, 97); @@ -437,16 +460,15 @@ mod prove_security_tests { let blowup_factor = 8; let num_queries = 53; - options = ProofOptions::new( - num_queries, + let security_2 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_2 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert_eq!(security_2, 97); } @@ -455,24 +477,21 @@ mod prove_security_tests { fn get_128_bits_security() { let field_extension = FieldExtension::Cubic; let base_field_bits = BaseElement::MODULUS_BITS; - let fri_folding_factor = 8; - let fri_remainder_max_degree = 127; let grinding_factor = 20; let blowup_factor = 8; let num_queries = 85; let collision_resistance = 128; let trace_length = 2_usize.pow(18); - let mut options = ProofOptions::new( - num_queries, + let security_1 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_1 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert_eq!(security_1, 128); @@ -480,16 +499,15 @@ mod prove_security_tests { let blowup_factor = 16; let num_queries = 65; - options = ProofOptions::new( - num_queries, + let security_2 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_2 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert_eq!(security_2, 128); } @@ -498,24 +516,21 @@ mod prove_security_tests { fn extension_degree() { let field_extension = FieldExtension::Quadratic; let base_field_bits = BaseElement::MODULUS_BITS; - let fri_folding_factor = 8; - let fri_remainder_max_degree = 127; let grinding_factor = 20; let blowup_factor = 8; let num_queries = 85; let collision_resistance = 128; let trace_length = 2_usize.pow(18); - let mut options = ProofOptions::new( - num_queries, + let security_1 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_1 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert_eq!(security_1, 67); @@ -523,16 +538,15 @@ mod prove_security_tests { // reaching 128 bits security let field_extension = FieldExtension::Cubic; - options = ProofOptions::new( - num_queries, + let security_2 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_2 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert_eq!(security_2, 128); } @@ -541,37 +555,33 @@ mod prove_security_tests { fn trace_length() { let field_extension = FieldExtension::Cubic; let base_field_bits = BaseElement::MODULUS_BITS; - let fri_folding_factor = 8; - let fri_remainder_max_degree = 127; let grinding_factor = 20; let blowup_factor = 8; let num_queries = 80; let collision_resistance = 128; let trace_length = 2_usize.pow(20); - let mut options = ProofOptions::new( - num_queries, + let security_1 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_1 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); let trace_length = 2_usize.pow(16); - options = ProofOptions::new( - num_queries, + let security_2 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_2 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert!(security_1 < security_2); } @@ -580,37 +590,33 @@ mod prove_security_tests { fn num_fri_queries() { let field_extension = FieldExtension::Cubic; let base_field_bits = BaseElement::MODULUS_BITS; - let fri_folding_factor = 8; - let fri_remainder_max_degree = 127; let grinding_factor = 20; let blowup_factor = 8; let num_queries = 60; let collision_resistance = 128; let trace_length = 2_usize.pow(20); - let mut options = ProofOptions::new( - num_queries, + let security_1 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_1 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); let num_queries = 80; - options = ProofOptions::new( - num_queries, + let security_2 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_2 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert!(security_1 < security_2); } @@ -619,37 +625,33 @@ mod prove_security_tests { fn blowup_factor() { let field_extension = FieldExtension::Cubic; let base_field_bits = BaseElement::MODULUS_BITS; - let fri_folding_factor = 8; - let fri_remainder_max_degree = 127; let grinding_factor = 20; let blowup_factor = 8; let num_queries = 30; let collision_resistance = 128; let trace_length = 2_usize.pow(20); - let mut options = ProofOptions::new( - num_queries, + let security_1 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_1 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); let blowup_factor = 16; - options = ProofOptions::new( - num_queries, + let security_2 = get_proven_security( + base_field_bits, + field_extension.degree(), blowup_factor, + num_queries, grinding_factor, - field_extension, - fri_folding_factor as usize, - fri_remainder_max_degree as usize, + trace_length, + collision_resistance, ); - let security_2 = - get_proven_security(&options, base_field_bits, trace_length, collision_resistance); assert!(security_1 < security_2); } diff --git a/air/src/proof/ood_frame.rs b/air/src/proof/ood_frame.rs index 52d394747..2c8317637 100644 --- a/air/src/proof/ood_frame.rs +++ b/air/src/proof/ood_frame.rs @@ -145,7 +145,6 @@ impl OodFrame { let mut reader = SliceReader::new(&self.trace_states); let frame_size = reader.read_u8()? as usize; let trace = reader.read_many((main_trace_width + aux_trace_width) * frame_size)?; - if reader.has_more_bytes() { return Err(DeserializationError::UnconsumedBytes); } diff --git a/crypto/Cargo.toml b/crypto/Cargo.toml index 23f985fee..bb97ee5b9 100644 --- a/crypto/Cargo.toml +++ b/crypto/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-crypto" -version = "0.10.0" +version = "0.11.0" description = "Cryptographic library for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-crypto/0.10.0" +documentation = "https://docs.rs/winter-crypto/0.11.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "merkle-tree", "hash"] edition = "2021" @@ -31,11 +31,13 @@ std = ["blake3/std", "math/std", "sha3/std", "utils/std"] [dependencies] blake3 = { version = "1.5", default-features = false } -math = { version = "0.10", path = "../math", package = "winter-math", default-features = false } +math = { version = "0.11", path = "../math", package = "winter-math", default-features = false } sha3 = { version = "0.10", default-features = false } -utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } +utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } +rand = { version = "0.8" } +rand_chacha = { version = "0.3", default-features = false } [dev-dependencies] criterion = "0.5" proptest = "1.4" -rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils" } +rand-utils = { version = "0.11", path = "../utils/rand", package = "winter-rand-utils" } diff --git a/crypto/src/commitment.rs b/crypto/src/commitment.rs index 1d2667f7a..72ec674e7 100644 --- a/crypto/src/commitment.rs +++ b/crypto/src/commitment.rs @@ -49,7 +49,7 @@ pub trait VectorCommitment: Sized { fn commitment(&self) -> H::Digest; /// Returns the length of the vector committed to for `Self`. - fn domain_len(&self) -> usize; + fn get_domain_len(&self) -> usize; /// Returns the length of the vector committed to for `Self::Proof`. fn get_proof_domain_len(proof: &Self::Proof) -> usize; diff --git a/crypto/src/errors.rs b/crypto/src/errors.rs index 5e4f3a6da..637b90c99 100644 --- a/crypto/src/errors.rs +++ b/crypto/src/errors.rs @@ -61,6 +61,8 @@ impl fmt::Display for MerkleTreeError { } } +impl core::error::Error for MerkleTreeError {} + // RANDOM COIN ERROR // ================================================================================================ @@ -89,3 +91,5 @@ impl fmt::Display for RandomCoinError { } } } + +impl core::error::Error for RandomCoinError {} diff --git a/crypto/src/hash/mod.rs b/crypto/src/hash/mod.rs index 4bede6b8d..e1a9de33c 100644 --- a/crypto/src/hash/mod.rs +++ b/crypto/src/hash/mod.rs @@ -6,6 +6,7 @@ use core::{fmt::Debug, slice}; use math::{FieldElement, StarkField}; +use rand::{distributions::Standard, prelude::Distribution}; use utils::{ByteReader, Deserializable, DeserializationError, Serializable}; mod blake; @@ -17,7 +18,7 @@ pub use sha::Sha3_256; mod mds; mod rescue; -pub use rescue::{Rp62_248, Rp64_256, RpJive64_256}; +pub use rescue::{Rp62_248, Rp64_256, RpJive64_256, ARK1, ARK2, MDS}; // HASHER TRAITS // ================================================================================================ @@ -119,6 +120,14 @@ impl Default for ByteDigest { } } +impl Distribution> for Standard { + fn sample(&self, rng: &mut R) -> ByteDigest<24> { + let mut res = [0_u8; 24]; + rng.fill_bytes(&mut res); + ByteDigest(res) + } +} + impl Serializable for ByteDigest { fn write_into(&self, target: &mut W) { target.write_bytes(&self.0); diff --git a/crypto/src/hash/rescue/mod.rs b/crypto/src/hash/rescue/mod.rs index dbb13dee7..6a126ceb2 100644 --- a/crypto/src/hash/rescue/mod.rs +++ b/crypto/src/hash/rescue/mod.rs @@ -9,7 +9,7 @@ mod rp62_248; pub use rp62_248::Rp62_248; mod rp64_256; -pub use rp64_256::Rp64_256; +pub use rp64_256::{Rp64_256, ARK1, ARK2, MDS}; mod rp64_256_jive; pub use rp64_256_jive::RpJive64_256; diff --git a/crypto/src/hash/rescue/rp62_248/digest.rs b/crypto/src/hash/rescue/rp62_248/digest.rs index bacece257..6004b682f 100644 --- a/crypto/src/hash/rescue/rp62_248/digest.rs +++ b/crypto/src/hash/rescue/rp62_248/digest.rs @@ -5,7 +5,11 @@ use core::slice; -use math::{fields::f62::BaseElement, StarkField}; +use math::{fields::f62::BaseElement, FieldElement, StarkField}; +use rand::{ + distributions::{Standard, Uniform}, + prelude::Distribution, +}; use utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; use super::{Digest, DIGEST_SIZE}; @@ -55,6 +59,18 @@ impl Default for ElementDigest { } } +impl Distribution for Standard { + fn sample(&self, rng: &mut R) -> ElementDigest { + let mut res = [BaseElement::ZERO; DIGEST_SIZE]; + let uni_dist = Uniform::from(0..BaseElement::MODULUS); + for r in res.iter_mut() { + let sampled_integer = uni_dist.sample(rng); + *r = BaseElement::new(sampled_integer); + } + ElementDigest::new(res) + } +} + impl Serializable for ElementDigest { fn write_into(&self, target: &mut W) { target.write_bytes(&self.as_bytes()[..31]); diff --git a/crypto/src/hash/rescue/rp64_256/digest.rs b/crypto/src/hash/rescue/rp64_256/digest.rs index 84cec4123..e4dd76088 100644 --- a/crypto/src/hash/rescue/rp64_256/digest.rs +++ b/crypto/src/hash/rescue/rp64_256/digest.rs @@ -5,7 +5,8 @@ use core::slice; -use math::fields::f64::BaseElement; +use math::{fields::f64::BaseElement, FieldElement, StarkField}; +use rand::distributions::{Distribution, Standard, Uniform}; use utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; use super::{Digest, DIGEST_SIZE}; @@ -87,6 +88,18 @@ impl From for [u8; 32] { } } +impl Distribution for Standard { + fn sample(&self, rng: &mut R) -> ElementDigest { + let mut res = [BaseElement::ZERO; DIGEST_SIZE]; + let uni_dist = Uniform::from(0..BaseElement::MODULUS); + for r in res.iter_mut() { + let sampled_integer = uni_dist.sample(rng); + *r = BaseElement::new(sampled_integer); + } + ElementDigest::new(res) + } +} + // TESTS // ================================================================================================ diff --git a/crypto/src/hash/rescue/rp64_256/mod.rs b/crypto/src/hash/rescue/rp64_256/mod.rs index 0d87de3f7..584395d2e 100644 --- a/crypto/src/hash/rescue/rp64_256/mod.rs +++ b/crypto/src/hash/rescue/rp64_256/mod.rs @@ -388,7 +388,7 @@ impl Rp64_256 { // MDS // ================================================================================================ /// Rescue MDS matrix -const MDS: [[BaseElement; STATE_WIDTH]; STATE_WIDTH] = [ +pub const MDS: [[BaseElement; STATE_WIDTH]; STATE_WIDTH] = [ [ BaseElement::new(7), BaseElement::new(23), @@ -560,7 +560,7 @@ const MDS: [[BaseElement; STATE_WIDTH]; STATE_WIDTH] = [ ]; /// Rescue Inverse MDS matrix -const INV_MDS: [[BaseElement; STATE_WIDTH]; STATE_WIDTH] = [ +pub const INV_MDS: [[BaseElement; STATE_WIDTH]; STATE_WIDTH] = [ [ BaseElement::new(14868391535953158196), BaseElement::new(13278298489594233127), @@ -739,7 +739,7 @@ const INV_MDS: [[BaseElement; STATE_WIDTH]; STATE_WIDTH] = [ /// /// The constants are broken up into two arrays ARK1 and ARK2; ARK1 contains the constants for the /// first half of Rescue round, and ARK2 contains constants for the second half of Rescue round. -const ARK1: [[BaseElement; STATE_WIDTH]; NUM_ROUNDS] = [ +pub const ARK1: [[BaseElement; STATE_WIDTH]; NUM_ROUNDS] = [ [ BaseElement::new(13917550007135091859), BaseElement::new(16002276252647722320), @@ -840,7 +840,7 @@ const ARK1: [[BaseElement; STATE_WIDTH]; NUM_ROUNDS] = [ ], ]; -const ARK2: [[BaseElement; STATE_WIDTH]; NUM_ROUNDS] = [ +pub const ARK2: [[BaseElement; STATE_WIDTH]; NUM_ROUNDS] = [ [ BaseElement::new(7989257206380839449), BaseElement::new(8639509123020237648), diff --git a/crypto/src/hash/rescue/rp64_256_jive/digest.rs b/crypto/src/hash/rescue/rp64_256_jive/digest.rs index 84cec4123..6a0a66f5b 100644 --- a/crypto/src/hash/rescue/rp64_256_jive/digest.rs +++ b/crypto/src/hash/rescue/rp64_256_jive/digest.rs @@ -5,7 +5,11 @@ use core::slice; -use math::fields::f64::BaseElement; +use math::{fields::f64::BaseElement, FieldElement, StarkField}; +use rand::{ + distributions::{Standard, Uniform}, + prelude::Distribution, +}; use utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; use super::{Digest, DIGEST_SIZE}; @@ -87,6 +91,18 @@ impl From for [u8; 32] { } } +impl Distribution for Standard { + fn sample(&self, rng: &mut R) -> ElementDigest { + let mut res = [BaseElement::ZERO; DIGEST_SIZE]; + let uni_dist = Uniform::from(0..BaseElement::MODULUS); + for r in res.iter_mut() { + let sampled_integer = uni_dist.sample(rng); + *r = BaseElement::new(sampled_integer); + } + ElementDigest::new(res) + } +} + // TESTS // ================================================================================================ diff --git a/crypto/src/lib.rs b/crypto/src/lib.rs index ff29176bb..e9a961c77 100644 --- a/crypto/src/lib.rs +++ b/crypto/src/lib.rs @@ -26,13 +26,15 @@ pub use hash::{Digest, ElementHasher, Hasher}; pub mod hashers { //! Contains implementations of currently supported hash functions. - pub use super::hash::{Blake3_192, Blake3_256, Rp62_248, Rp64_256, RpJive64_256, Sha3_256}; + pub use super::hash::{ + Blake3_192, Blake3_256, Rp62_248, Rp64_256, RpJive64_256, Sha3_256, ARK1, ARK2, MDS, + }; } mod merkle; #[cfg(feature = "concurrent")] pub use merkle::concurrent; -pub use merkle::{build_merkle_nodes, BatchMerkleProof, MerkleTree}; +pub use merkle::{build_merkle_nodes, BatchMerkleProof, MerkleTree, SaltedMerkleTree}; mod random; pub use random::{DefaultRandomCoin, RandomCoin}; diff --git a/crypto/src/merkle/mod.rs b/crypto/src/merkle/mod.rs index 51b4a76dc..a125e05ff 100644 --- a/crypto/src/merkle/mod.rs +++ b/crypto/src/merkle/mod.rs @@ -7,13 +7,22 @@ use alloc::{ collections::{BTreeMap, BTreeSet}, vec::Vec, }; -use core::slice; +use core::{marker::PhantomData, slice}; + +use rand::{ + distributions::{Distribution, Standard}, + Rng, RngCore, SeedableRng, +}; + +use crate::{ + errors::MerkleTreeError, + hash::{ByteDigest, Hasher}, + VectorCommitment, +}; mod proofs; pub use proofs::BatchMerkleProof; -use crate::{Hasher, MerkleTreeError, VectorCommitment}; - #[cfg(feature = "concurrent")] pub mod concurrent; @@ -97,6 +106,17 @@ pub struct MerkleTree { /// up to the root (excluding the root itself). pub type MerkleTreeOpening = (::Digest, Vec<::Digest>); +/// Salted Merkle tree opening consisting of a leaf value, a salt, and a Merkle path leading +/// from this leaf up to the root (excluding the root itself). +pub type SaltedMerkleTreeOpening = + (::Digest, (::Digest, Vec<::Digest>)); + +/// Salted Merkle tree multi opening consisting of a vector of leaves, a vector of corresponding salts, +/// and a collection of corresponding Merkle paths leading from these leaves up to the root +/// (excluding the root itself). The collection of Merkle paths is stored as a [BatchMerkleProof]. +pub type SaltedMerkleTreeMultiOpening = + (Vec<::Digest>, (Vec<::Digest>, BatchMerkleProof)); + // MERKLE TREE IMPLEMENTATION // ================================================================================================ @@ -416,7 +436,7 @@ impl VectorCommitment for MerkleTree { *self.root() } - fn domain_len(&self) -> usize { + fn get_domain_len(&self) -> usize { 1 << self.depth() } @@ -457,3 +477,180 @@ impl VectorCommitment for MerkleTree { MerkleTree::::verify_batch(&commitment, indexes, items, proof) } } + +// SALTED MERKLE TREE +// ================================================================================================ + +pub struct SaltedMerkleTree { + leaves: Vec, + tree: MerkleTree, + salts: Vec, + _prng: PhantomData

, +} + +impl SaltedMerkleTree +where + Standard: Distribution<::Digest>, +{ + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + pub fn new(leaves: Vec, prng: &mut P) -> Result { + if leaves.len() < 2 { + return Err(MerkleTreeError::TooFewLeaves(2, leaves.len())); + } + if !leaves.len().is_power_of_two() { + return Err(MerkleTreeError::NumberOfLeavesNotPowerOfTwo(leaves.len())); + } + + let num_leaves = leaves.len(); + let salts: Vec = (0..num_leaves).map(|_| prng.sample(Standard)).collect(); + + let salted_leaves: Vec = leaves + .iter() + .zip(salts.iter()) + .map(|(leaf, salt)| H::merge(&[*leaf, *salt])) + .collect(); + + let tree = MerkleTree::new(salted_leaves)?; + + Ok(Self { tree, leaves, salts, _prng: PhantomData }) + } + + /// Returns the root of the tree. + pub fn root(&self) -> &H::Digest { + self.tree.root() + } + + /// Returns the depth of the tree. + pub fn depth(&self) -> usize { + self.tree.depth() + } + + /// Returns a Merkle proof to a leaf at the specified `index`. + pub fn prove(&self, index: usize) -> Result, MerkleTreeError> { + let (_, proof) = self.tree.prove(index)?; + Ok((self.leaves[index], (self.salts[index], proof))) + } + + /// Computes Merkle proofs for the provided indexes, compresses the proofs into a single batch + /// and returns the batch proof alongside the leaves at the provided indexes. + pub fn prove_batch( + &self, + indexes: &[usize], + ) -> Result, MerkleTreeError> { + let (_, proof) = self.tree.prove_batch(indexes)?; + let leaves_at_indices = indexes.iter().map(|index| self.leaves[*index]).collect(); + let salts_at_indices = indexes.iter().map(|index| self.salts[*index]).collect(); + Ok((leaves_at_indices, (salts_at_indices, proof))) + } + + /// Checks whether the `proof` for the given `leaf` at the specified `index` is valid. + pub fn verify( + root: H::Digest, + index: usize, + leaf: H::Digest, + salt: H::Digest, + proof: &[H::Digest], + ) -> Result<(), MerkleTreeError> { + let salted_leaf = H::merge(&[leaf, salt]); + MerkleTree::::verify(root, index, salted_leaf, proof) + } + + /// Checks whether the batch proof contains Merkle paths for the of the specified `indexes`. + pub fn verify_batch( + root: &H::Digest, + indexes: &[usize], + leaves: &[H::Digest], + salts: &[H::Digest], + proof: &BatchMerkleProof, + ) -> Result<(), MerkleTreeError> { + let salted_leaves: Vec = leaves + .iter() + .zip(salts.iter()) + .map(|(leaf, salt)| H::merge(&[*leaf, *salt])) + .collect(); + + MerkleTree::::verify_batch(root, indexes, &salted_leaves, proof) + } +} + +impl Distribution> for Standard { + fn sample(&self, rng: &mut R) -> ByteDigest<32> { + let mut dest = [0; 32]; + rng.fill_bytes(&mut dest); + ByteDigest::new(dest) + } +} + +impl VectorCommitment for SaltedMerkleTree +where + Standard: Distribution<::Digest>, +{ + type Options = (); + + type Proof = (H::Digest, Vec); + + type MultiProof = (Vec, BatchMerkleProof); + + type Error = MerkleTreeError; + + fn new(items: Vec) -> Result { + // TODO: make random + let seed = P::Seed::default(); + let mut prng = P::from_seed(seed); + SaltedMerkleTree::new(items, &mut prng) + } + + fn with_options(items: Vec, _options: Self::Options) -> Result { + // TODO: make random + let seed = P::Seed::default(); + let mut prng = P::from_seed(seed); + Self::new(items, &mut prng) + } + + fn get_domain_len(&self) -> usize { + 1 << self.depth() + } + + fn get_proof_domain_len(proof: &Self::Proof) -> usize { + proof.1.len() + } + + fn get_multiproof_domain_len(proof: &Self::MultiProof) -> usize { + 1 << proof.1.depth + } + + fn commitment(&self) -> H::Digest { + *self.root() + } + + fn open(&self, index: usize) -> Result<(H::Digest, Self::Proof), Self::Error> { + self.prove(index) + } + + fn open_many( + &self, + indexes: &[usize], + ) -> Result<(Vec, Self::MultiProof), Self::Error> { + self.prove_batch(indexes) + } + + fn verify( + commitment: H::Digest, + index: usize, + item: H::Digest, + proof: &Self::Proof, + ) -> Result<(), Self::Error> { + SaltedMerkleTree::::verify(commitment, index, item, proof.0, &proof.1) + } + + fn verify_many( + commitment: H::Digest, + indexes: &[usize], + items: &[H::Digest], + proof: &Self::MultiProof, + ) -> Result<(), Self::Error> { + SaltedMerkleTree::::verify_batch(&commitment, indexes, items, &proof.0, &proof.1) + } +} diff --git a/crypto/src/merkle/tests.rs b/crypto/src/merkle/tests.rs index f66c638a2..c6e60aa9d 100644 --- a/crypto/src/merkle/tests.rs +++ b/crypto/src/merkle/tests.rs @@ -5,6 +5,7 @@ use math::fields::f128::BaseElement; use proptest::prelude::*; +use rand_chacha::ChaCha20Rng; use super::*; @@ -254,6 +255,56 @@ fn from_proofs() { assert_eq!(proof1.depth, proof2.depth); } +#[test] +fn verify_salted() { + // depth 4 + let leaves = Digest256::bytes_as_digests(&LEAVES4).to_vec(); + let mut prng = ChaCha20Rng::from_entropy(); + let tree: SaltedMerkleTree = SaltedMerkleTree::new(leaves, &mut prng).unwrap(); + let (leaf, (salt, proof)) = tree.prove(1).unwrap(); + assert!(SaltedMerkleTree::::verify( + *tree.root(), + 1, + leaf, + salt, + &proof + ) + .is_ok()); + + let (leaf, (salt, proof)) = tree.prove(2).unwrap(); + assert!(SaltedMerkleTree::::verify( + *tree.root(), + 2, + leaf, + salt, + &proof + ) + .is_ok()); + + // depth 5 + let leaf = Digest256::bytes_as_digests(&LEAVES8).to_vec(); + let tree: SaltedMerkleTree = SaltedMerkleTree::new(leaf, &mut prng).unwrap(); + let (leaf, (salt, proof)) = tree.prove(1).unwrap(); + assert!(SaltedMerkleTree::::verify( + *tree.root(), + 1, + leaf, + salt, + &proof + ) + .is_ok()); + + let (leaf, (salt, proof)) = tree.prove(6).unwrap(); + assert!(SaltedMerkleTree::::verify( + *tree.root(), + 6, + leaf, + salt, + &proof + ) + .is_ok()); +} + proptest! { #[test] fn prove_n_verify(tree in random_blake3_merkle_tree(128), diff --git a/crypto/src/random/default.rs b/crypto/src/random/default.rs index f5a996404..fa002171d 100644 --- a/crypto/src/random/default.rs +++ b/crypto/src/random/default.rs @@ -118,6 +118,22 @@ impl> RandomCoin for DefaultRando self.counter = 0; } + fn reseed_with_salt( + &mut self, + data: ::Digest, + salt: Option<::Digest>, + ) { + // TODO: revisit + if let Some(salt) = salt { + self.seed = H::merge(&[self.seed, data]); + self.seed = H::merge(&[self.seed, salt]); + self.counter = 0; + } else { + self.seed = H::merge(&[self.seed, data]); + self.counter = 0; + } + } + // PUBLIC ACCESSORS // -------------------------------------------------------------------------------------------- diff --git a/crypto/src/random/mod.rs b/crypto/src/random/mod.rs index 7ee540ee5..10ee5d40c 100644 --- a/crypto/src/random/mod.rs +++ b/crypto/src/random/mod.rs @@ -38,6 +38,14 @@ pub trait RandomCoin: Sync { /// Reseeds the coin with the specified data by setting the new seed to hash(`seed` || `data`). fn reseed(&mut self, data: ::Digest); + /// Similar to `Self::reseed` but takes a salt which is not a `None` when zero-knowledge is enabled. + /// TODO: Should we remove `Self::reseed`? + fn reseed_with_salt( + &mut self, + data: ::Digest, + salt: Option<::Digest>, + ); + /// Computes hash(`seed` || `value`) and returns the number of leading zeros in the resulting /// value if it is interpreted as an integer in big-endian byte order. fn check_leading_zeros(&self, value: u64) -> u32; diff --git a/examples/Cargo.toml b/examples/Cargo.toml index f86e9ad50..6588e7d07 100644 --- a/examples/Cargo.toml +++ b/examples/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "examples" -version = "0.10.0" +version = "0.11.0" description = "Examples of using Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" @@ -26,15 +26,18 @@ default = ["std"] std = ["core-utils/std", "hex/std", "rand-utils", "winterfell/std"] [dependencies] +air = { version = "0.11", path = "../air", package = "winter-air", default-features = false } blake3 = { version = "1.5", default-features = false } -core-utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } +core-utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } hex = { version = "0.4", optional = true } -rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils", optional = true } +rand = { version = "0.8" } +rand-utils = { version = "0.11", path = "../utils/rand", package = "winter-rand-utils", optional = true } structopt = { version = "0.3", default-features = false } tracing = { version = "0.1", default-features = false } tracing-forest = { version = "0.1", features = ["ansi", "smallvec"], optional = true } tracing-subscriber = { version = "0.3", features = ["std", "env-filter"] } -winterfell = { version = "0.10", path = "../winterfell", default-features = false } +winterfell = { version = "0.11", path = "../winterfell", default-features = false } +rand_chacha = { version = "0.3", default-features = false } [dev-dependencies] criterion = "0.5" diff --git a/examples/benches/fibonacci.rs b/examples/benches/fibonacci.rs index 44094beaf..076f2ee2f 100644 --- a/examples/benches/fibonacci.rs +++ b/examples/benches/fibonacci.rs @@ -18,7 +18,7 @@ fn fibonacci(c: &mut Criterion) { group.sample_size(10); group.measurement_time(Duration::from_secs(20)); - let options = ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 255); + let options = ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 255, false); for &size in SIZES.iter() { let fib = diff --git a/examples/benches/rescue.rs b/examples/benches/rescue.rs index bf6e8cc26..19e3a0815 100644 --- a/examples/benches/rescue.rs +++ b/examples/benches/rescue.rs @@ -18,7 +18,7 @@ fn rescue(c: &mut Criterion) { group.sample_size(10); group.measurement_time(Duration::from_secs(25)); - let options = ProofOptions::new(32, 32, 0, FieldExtension::None, 4, 255); + let options = ProofOptions::new(32, 32, 0, FieldExtension::None, 4, 255, false); for &size in SIZES.iter() { let resc = rescue::RescueExample::>::new(size, options.clone()); diff --git a/examples/src/fibonacci/fib2/mod.rs b/examples/src/fibonacci/fib2/mod.rs index ddc6cf77e..ff6e80680 100644 --- a/examples/src/fibonacci/fib2/mod.rs +++ b/examples/src/fibonacci/fib2/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -88,6 +89,7 @@ impl FibExample { impl Example for FibExample where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { println!( @@ -108,7 +110,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/fibonacci/fib2/prover.rs b/examples/src/fibonacci/fib2/prover.rs index 99d48f004..91070896b 100644 --- a/examples/src/fibonacci/fib2/prover.rs +++ b/examples/src/fibonacci/fib2/prover.rs @@ -3,10 +3,12 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -60,8 +62,11 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> BaseElement { let last_step = trace.length() - 1; @@ -78,8 +83,10 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( @@ -90,4 +97,23 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + zk_parameters, + prng, + ) + } } diff --git a/examples/src/fibonacci/fib8/mod.rs b/examples/src/fibonacci/fib8/mod.rs index 322079c21..fabc71055 100644 --- a/examples/src/fibonacci/fib8/mod.rs +++ b/examples/src/fibonacci/fib8/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -88,6 +89,7 @@ impl Fib8Example { impl Example for Fib8Example where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { println!( @@ -108,7 +110,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/fibonacci/fib8/prover.rs b/examples/src/fibonacci/fib8/prover.rs index 64182978c..5f4e1d8aa 100644 --- a/examples/src/fibonacci/fib8/prover.rs +++ b/examples/src/fibonacci/fib8/prover.rs @@ -3,10 +3,12 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -75,8 +77,11 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> BaseElement { let last_step = trace.length() - 1; @@ -93,8 +98,10 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( @@ -105,4 +112,23 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + zk_parameters, + prng, + ) + } } diff --git a/examples/src/fibonacci/fib_small/mod.rs b/examples/src/fibonacci/fib_small/mod.rs index 672605ac4..05b7b3e9e 100644 --- a/examples/src/fibonacci/fib_small/mod.rs +++ b/examples/src/fibonacci/fib_small/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f64::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -99,6 +100,7 @@ impl FibExample { impl Example for FibExample where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { println!( @@ -119,7 +121,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/fibonacci/fib_small/prover.rs b/examples/src/fibonacci/fib_small/prover.rs index 553988064..4b4cc6860 100644 --- a/examples/src/fibonacci/fib_small/prover.rs +++ b/examples/src/fibonacci/fib_small/prover.rs @@ -2,10 +2,12 @@ // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -65,8 +67,11 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> BaseElement { let last_step = trace.length() - 1; @@ -83,8 +88,10 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( @@ -95,4 +102,23 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + zk_parameters, + prng, + ) + } } diff --git a/examples/src/fibonacci/mulfib2/mod.rs b/examples/src/fibonacci/mulfib2/mod.rs index d7b3e11d8..a999f6ebd 100644 --- a/examples/src/fibonacci/mulfib2/mod.rs +++ b/examples/src/fibonacci/mulfib2/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -87,6 +88,7 @@ impl MulFib2Example { impl Example for MulFib2Example where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { let sequence_length = self.sequence_length; @@ -108,7 +110,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/fibonacci/mulfib2/prover.rs b/examples/src/fibonacci/mulfib2/prover.rs index 4c99187bf..5356f4a29 100644 --- a/examples/src/fibonacci/mulfib2/prover.rs +++ b/examples/src/fibonacci/mulfib2/prover.rs @@ -3,10 +3,12 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -56,8 +58,11 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> BaseElement { let last_step = trace.length() - 1; @@ -74,8 +79,10 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( @@ -86,4 +93,23 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + zk_parameters, + prng, + ) + } } diff --git a/examples/src/fibonacci/mulfib8/mod.rs b/examples/src/fibonacci/mulfib8/mod.rs index 43bd27be0..65341d136 100644 --- a/examples/src/fibonacci/mulfib8/mod.rs +++ b/examples/src/fibonacci/mulfib8/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -88,6 +89,7 @@ impl MulFib8Example { impl Example for MulFib8Example where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { let sequence_length = self.sequence_length; @@ -109,7 +111,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/fibonacci/mulfib8/prover.rs b/examples/src/fibonacci/mulfib8/prover.rs index 1fb58bd1a..563e29875 100644 --- a/examples/src/fibonacci/mulfib8/prover.rs +++ b/examples/src/fibonacci/mulfib8/prover.rs @@ -3,10 +3,12 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -68,8 +70,11 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> BaseElement { let last_step = trace.length() - 1; @@ -86,8 +91,10 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( @@ -98,4 +105,23 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + zk_parameters, + prng, + ) + } } diff --git a/examples/src/fibonacci/utils.rs b/examples/src/fibonacci/utils.rs index e2f29f7c2..acb52d397 100644 --- a/examples/src/fibonacci/utils.rs +++ b/examples/src/fibonacci/utils.rs @@ -38,5 +38,5 @@ pub fn build_proof_options(use_extension_field: bool) -> winterfell::ProofOption } else { FieldExtension::None }; - ProofOptions::new(28, 8, 0, extension, 4, 7) + ProofOptions::new(28, 8, 0, extension, 4, 7, false) } diff --git a/examples/src/lamport/aggregate/mod.rs b/examples/src/lamport/aggregate/mod.rs index 6dd2a8d02..a2aa631c6 100644 --- a/examples/src/lamport/aggregate/mod.rs +++ b/examples/src/lamport/aggregate/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, get_power_series, FieldElement, StarkField}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -115,6 +116,7 @@ impl LamportAggregateExample { impl Example for LamportAggregateExample where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { // generate the execution trace @@ -134,7 +136,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/lamport/aggregate/prover.rs b/examples/src/lamport/aggregate/prover.rs index 3927a20e6..d387d3e7f 100644 --- a/examples/src/lamport/aggregate/prover.rs +++ b/examples/src/lamport/aggregate/prover.rs @@ -3,12 +3,14 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; #[cfg(feature = "concurrent")] use winterfell::iterators::*; use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -105,8 +107,11 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, _trace: &Self::Trace) -> PublicInputs { self.pub_inputs.clone() @@ -122,8 +127,10 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( @@ -134,6 +141,25 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + zk_parameters, + prng, + ) + } } // TRACE INITIALIZATION diff --git a/examples/src/lamport/threshold/mod.rs b/examples/src/lamport/threshold/mod.rs index c64fa7755..0574a88b3 100644 --- a/examples/src/lamport/threshold/mod.rs +++ b/examples/src/lamport/threshold/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, get_power_series, FieldElement, StarkField}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -113,6 +114,7 @@ impl LamportThresholdExample { impl Example for LamportThresholdExample where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { // generate the execution trace @@ -140,7 +142,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/lamport/threshold/prover.rs b/examples/src/lamport/threshold/prover.rs index 87bd09bf6..6092d7eb2 100644 --- a/examples/src/lamport/threshold/prover.rs +++ b/examples/src/lamport/threshold/prover.rs @@ -5,12 +5,14 @@ use std::collections::HashMap; +use air::ZkParameters; #[cfg(feature = "concurrent")] use winterfell::iterators::*; use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -147,8 +149,11 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, _trace: &Self::Trace) -> PublicInputs { self.pub_inputs.clone() @@ -164,8 +169,10 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( @@ -176,6 +183,25 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + zk_parameters, + prng, + ) + } } // TRACE INITIALIZATION diff --git a/examples/src/lib.rs b/examples/src/lib.rs index 33f733d7c..517871ecd 100644 --- a/examples/src/lib.rs +++ b/examples/src/lib.rs @@ -99,6 +99,7 @@ impl ExampleOptions { field_extension, self.folding_factor, 31, + false, ), hash_fn, ) diff --git a/examples/src/merkle/mod.rs b/examples/src/merkle/mod.rs index 6b8771218..144b27130 100644 --- a/examples/src/merkle/mod.rs +++ b/examples/src/merkle/mod.rs @@ -6,10 +6,11 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use rand_utils::{rand_value, rand_vector}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, Digest, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, Digest, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement, StarkField}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -110,6 +111,7 @@ impl MerkleExample { impl Example for MerkleExample where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { // generate the execution trace @@ -130,7 +132,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/merkle/prover.rs b/examples/src/merkle/prover.rs index b1164ff83..4b2c9147f 100644 --- a/examples/src/merkle/prover.rs +++ b/examples/src/merkle/prover.rs @@ -3,10 +3,12 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -109,8 +111,11 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> PublicInputs { let last_step = trace.length() - 1; @@ -129,8 +134,10 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( @@ -141,4 +148,23 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + zk_parameters, + prng, + ) + } } diff --git a/examples/src/merkle/tests.rs b/examples/src/merkle/tests.rs index 4851d596c..c75f5120d 100644 --- a/examples/src/merkle/tests.rs +++ b/examples/src/merkle/tests.rs @@ -31,5 +31,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(28, 8, 0, extension, 4, 31) + ProofOptions::new(28, 8, 0, extension, 4, 31, false) } diff --git a/examples/src/rescue/mod.rs b/examples/src/rescue/mod.rs index 5534625d5..9aac35e57 100644 --- a/examples/src/rescue/mod.rs +++ b/examples/src/rescue/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -95,6 +96,7 @@ impl RescueExample { impl Example for RescueExample where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { // generate the execution trace @@ -113,7 +115,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/rescue/prover.rs b/examples/src/rescue/prover.rs index e8ca93757..186962dcf 100644 --- a/examples/src/rescue/prover.rs +++ b/examples/src/rescue/prover.rs @@ -3,10 +3,12 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -75,8 +77,11 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> PublicInputs { let last_step = trace.length() - 1; @@ -96,8 +101,10 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( @@ -108,4 +115,23 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + zk_parameters, + prng, + ) + } } diff --git a/examples/src/rescue/tests.rs b/examples/src/rescue/tests.rs index 7daf66694..b3dd81a68 100644 --- a/examples/src/rescue/tests.rs +++ b/examples/src/rescue/tests.rs @@ -31,5 +31,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(28, 8, 0, extension, 4, 31) + ProofOptions::new(28, 8, 0, extension, 4, 31, false) } diff --git a/examples/src/rescue_raps/mod.rs b/examples/src/rescue_raps/mod.rs index 533298097..aa9801b32 100644 --- a/examples/src/rescue_raps/mod.rs +++ b/examples/src/rescue_raps/mod.rs @@ -6,10 +6,11 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use rand_utils::rand_array; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, ExtensionOf, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -108,6 +109,7 @@ impl RescueRapsExample { impl Example for RescueRapsExample where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { // generate the execution trace @@ -126,7 +128,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/rescue_raps/prover.rs b/examples/src/rescue_raps/prover.rs index b8b21b1f3..a407898a0 100644 --- a/examples/src/rescue_raps/prover.rs +++ b/examples/src/rescue_raps/prover.rs @@ -3,11 +3,12 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; use core_utils::uninit_vector; use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, }; use super::{ @@ -105,8 +106,11 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> PublicInputs { let last_step = trace.length() - 1; @@ -128,8 +132,10 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( @@ -141,6 +147,24 @@ where DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + zk_parameters, + prng, + ) + } fn build_aux_trace( &self, trace: &Self::Trace, diff --git a/examples/src/rescue_raps/tests.rs b/examples/src/rescue_raps/tests.rs index 99c8d24dc..155b4ee8f 100644 --- a/examples/src/rescue_raps/tests.rs +++ b/examples/src/rescue_raps/tests.rs @@ -33,5 +33,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(28, 8, 0, extension, 4, 31) + ProofOptions::new(28, 8, 0, extension, 4, 31, false) } diff --git a/examples/src/vdf/exempt/mod.rs b/examples/src/vdf/exempt/mod.rs index cc1dd53e9..36045e53d 100644 --- a/examples/src/vdf/exempt/mod.rs +++ b/examples/src/vdf/exempt/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -87,6 +88,7 @@ impl VdfExample { impl Example for VdfExample where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { println!("Generating proof for executing a VDF function for {} steps", self.num_steps); @@ -104,7 +106,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/vdf/exempt/prover.rs b/examples/src/vdf/exempt/prover.rs index 16a7b8169..d57b72e81 100644 --- a/examples/src/vdf/exempt/prover.rs +++ b/examples/src/vdf/exempt/prover.rs @@ -3,10 +3,12 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -56,8 +58,11 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> VdfInputs { // the result is read from the second to last step because the last last step contains @@ -79,8 +84,10 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( @@ -91,4 +98,23 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + zk_parameters, + prng, + ) + } } diff --git a/examples/src/vdf/exempt/tests.rs b/examples/src/vdf/exempt/tests.rs index 212cda767..b7eec7f6a 100644 --- a/examples/src/vdf/exempt/tests.rs +++ b/examples/src/vdf/exempt/tests.rs @@ -31,5 +31,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(85, 2, 0, extension, 4, 31) + ProofOptions::new(85, 4, 0, extension, 4, 31, false) } diff --git a/examples/src/vdf/regular/mod.rs b/examples/src/vdf/regular/mod.rs index 3cdcaba3d..780a6bad1 100644 --- a/examples/src/vdf/regular/mod.rs +++ b/examples/src/vdf/regular/mod.rs @@ -6,9 +6,10 @@ use core::marker::PhantomData; use std::time::Instant; +use rand::{distributions::Standard, prelude::Distribution}; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, + crypto::{DefaultRandomCoin, ElementHasher, Hasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -84,6 +85,7 @@ impl VdfExample { impl Example for VdfExample where H: ElementHasher + Sync, + Standard: Distribution<::Digest>, { fn prove(&self) -> Proof { println!("Generating proof for executing a VDF function for {} steps", self.num_steps); @@ -101,7 +103,7 @@ where }); // generate the proof - prover.prove(trace).unwrap() + prover.prove(trace, None).unwrap() } fn verify(&self, proof: Proof) -> Result<(), VerifierError> { diff --git a/examples/src/vdf/regular/prover.rs b/examples/src/vdf/regular/prover.rs index 20bdf7874..68d237742 100644 --- a/examples/src/vdf/regular/prover.rs +++ b/examples/src/vdf/regular/prover.rs @@ -3,10 +3,12 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use air::ZkParameters; use winterfell::{ - crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, - DefaultConstraintEvaluator, DefaultTraceLde, PartitionOptions, StarkDomain, Trace, TraceInfo, - TracePolyTable, TraceTable, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, CompositionPoly, CompositionPolyTrace, + ConstraintCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, MockPrng, PartitionOptions, StarkDomain, Trace, TraceInfo, TracePolyTable, + TraceTable, }; use super::{ @@ -53,8 +55,11 @@ where type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, trace: &Self::Trace) -> VdfInputs { let last_step = trace.length() - 1; @@ -74,8 +79,10 @@ where main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) } fn new_evaluator<'a, E: FieldElement>( @@ -86,4 +93,23 @@ where ) -> Self::ConstraintEvaluator<'a, E> { DefaultConstraintEvaluator::new(air, aux_rand_elements, composition_coefficients) } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + zk_parameters, + prng, + ) + } } diff --git a/examples/src/vdf/regular/tests.rs b/examples/src/vdf/regular/tests.rs index a3100a444..f4b409b85 100644 --- a/examples/src/vdf/regular/tests.rs +++ b/examples/src/vdf/regular/tests.rs @@ -31,5 +31,5 @@ fn build_options(use_extension_field: bool) -> ProofOptions { } else { FieldExtension::None }; - ProofOptions::new(85, 2, 0, extension, 4, 31) + ProofOptions::new(2, 4, 0, extension, 2, 255, false) } diff --git a/fri/Cargo.toml b/fri/Cargo.toml index 2e3d1b20b..46370b288 100644 --- a/fri/Cargo.toml +++ b/fri/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-fri" -version = "0.10.0" +version = "0.11.0" description = "Implementation of FRI protocol for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-fri/0.10.0" +documentation = "https://docs.rs/winter-fri/0.11.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "polynomial", "commitments"] edition = "2021" @@ -29,10 +29,12 @@ default = ["std"] std = ["crypto/std", "math/std", "utils/std"] [dependencies] -crypto = { version = "0.10", path = "../crypto", package = "winter-crypto", default-features = false } -math = { version = "0.10", path = "../math", package = "winter-math", default-features = false } -utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } +crypto = { version = "0.11", path = "../crypto", package = "winter-crypto", default-features = false } +math = { version = "0.11", path = "../math", package = "winter-math", default-features = false } +utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } +rand_chacha = { version = "0.3", default-features = false } +rand = { version = "0.8" } [dev-dependencies] criterion = "0.5" -rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils" } +rand-utils = { version = "0.11", path = "../utils/rand", package = "winter-rand-utils" } diff --git a/fri/benches/prover.rs b/fri/benches/prover.rs index bfc096fc3..2f8e72b57 100644 --- a/fri/benches/prover.rs +++ b/fri/benches/prover.rs @@ -8,6 +8,7 @@ use std::time::Duration; use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; use crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree}; use math::{fft, fields::f128::BaseElement, FieldElement}; +use rand_chacha::ChaCha20Rng; use rand_utils::rand_vector; use winter_fri::{DefaultProverChannel, FriOptions, FriProver}; @@ -33,11 +34,14 @@ pub fn build_layers(c: &mut Criterion) { b.iter_batched( || e.clone(), |evaluations| { - let mut channel = DefaultProverChannel::< - BaseElement, - Blake3_256, - DefaultRandomCoin>, - >::new(domain_size, 32); + let mut channel = + DefaultProverChannel::< + BaseElement, + Blake3_256, + ChaCha20Rng, + DefaultRandomCoin>, + >::new(domain_size, 32, false, None); + prover.build_layers(&mut channel, evaluations); prover.reset(); }, diff --git a/fri/src/errors.rs b/fri/src/errors.rs index 7961b8831..2947b17a8 100644 --- a/fri/src/errors.rs +++ b/fri/src/errors.rs @@ -73,3 +73,5 @@ impl fmt::Display for VerifierError { } } } + +impl core::error::Error for VerifierError {} diff --git a/fri/src/proof.rs b/fri/src/proof.rs index 65dd2af92..8d4495213 100644 --- a/fri/src/proof.rs +++ b/fri/src/proof.rs @@ -34,6 +34,7 @@ pub struct FriProof { layers: Vec, remainder: Vec, num_partitions: u8, // stored as power of 2 + salts: Vec, } impl FriProof { @@ -49,6 +50,7 @@ impl FriProof { layers: Vec, remainder: Vec, num_partitions: usize, + salts: Vec, ) -> Self { assert!(!remainder.is_empty(), "number of remainder elements must be greater than zero"); assert!( @@ -69,6 +71,7 @@ impl FriProof { layers, remainder: remainder_bytes, num_partitions: num_partitions.trailing_zeros() as u8, + salts, } } @@ -78,6 +81,7 @@ impl FriProof { layers: Vec::new(), remainder: Vec::new(), num_partitions: 0, + salts: vec![], } } @@ -190,6 +194,16 @@ impl FriProof { } Ok(remainder) } + + /// Returns a vector of values used in order to salt the transcript when zero-knowledge is + /// enabled. + pub fn parse_salts(&self) -> Result>, DeserializationError> + where + E: FieldElement, + H: ElementHasher, + { + Vec::read_from_bytes(&self.salts) + } } // SERIALIZATION / DESERIALIZATION @@ -210,6 +224,10 @@ impl Serializable for FriProof { // write number of partitions target.write_u8(self.num_partitions); + + // write salts + target.write_u32(self.salts.len() as u32); + target.write_bytes(&self.salts); } } @@ -230,7 +248,11 @@ impl Deserializable for FriProof { // read number of partitions let num_partitions = source.read_u8()?; - Ok(FriProof { layers, remainder, num_partitions }) + // read salts + let salts_len = source.read_u32()? as usize; + let salts = source.read_vec(salts_len)?; + + Ok(FriProof { layers, remainder, num_partitions, salts }) } } diff --git a/fri/src/prover/channel.rs b/fri/src/prover/channel.rs index 7231e757c..7069e2181 100644 --- a/fri/src/prover/channel.rs +++ b/fri/src/prover/channel.rs @@ -8,6 +8,7 @@ use core::marker::PhantomData; use crypto::{ElementHasher, Hasher, RandomCoin}; use math::FieldElement; +use rand::{distributions::Standard, prelude::Distribution, Rng, RngCore, SeedableRng}; // PROVER CHANNEL TRAIT // ================================================================================================ @@ -34,7 +35,10 @@ pub trait ProverChannel { /// the hash of each row to get one entry of the vector being committed to. Thus, the number /// of elements grouped into a single leaf is equal to the `folding_factor` used for FRI layer /// construction. - fn commit_fri_layer(&mut self, layer_root: ::Digest); + fn commit_fri_layer( + &mut self, + layer_root: ::Digest, + ) -> Option<::Digest>; /// Returns a random α drawn uniformly at random from the entire field. /// @@ -53,23 +57,28 @@ pub trait ProverChannel { /// /// Though this implementation is intended primarily for testing purposes, it can be used in /// production use cases as well. -pub struct DefaultProverChannel +pub struct DefaultProverChannel where E: FieldElement, H: ElementHasher, + P: RngCore, R: RandomCoin, { public_coin: R, commitments: Vec, domain_size: usize, num_queries: usize, + is_zk: bool, + salts: Vec>, + prng: Option

, _field_element: PhantomData, } -impl DefaultProverChannel +impl DefaultProverChannel where E: FieldElement, H: ElementHasher, + P: RngCore + SeedableRng, R: RandomCoin, { /// Returns a new prover channel instantiated from the specified parameters. @@ -78,18 +87,32 @@ where /// Panics if: /// * `domain_size` is smaller than 8 or is not a power of two. /// * `num_queries` is zero. - pub fn new(domain_size: usize, num_queries: usize) -> Self { + pub fn new( + domain_size: usize, + num_queries: usize, + is_zk: bool, + seed: Option<

::Seed>, + ) -> Self { assert!(domain_size >= 8, "domain size must be at least 8, but was {domain_size}"); assert!( domain_size.is_power_of_two(), "domain size must be a power of two, but was {domain_size}" ); assert!(num_queries > 0, "number of queries must be greater than zero"); + + let prng = if is_zk { + Some(P::from_seed(seed.expect("must provide the seed when zk is enabled"))) + } else { + None + }; DefaultProverChannel { public_coin: RandomCoin::new(&[]), commitments: Vec::new(), domain_size, num_queries, + is_zk, + salts: vec![], + prng, _field_element: PhantomData, } } @@ -116,17 +139,36 @@ where } } -impl ProverChannel for DefaultProverChannel +impl ProverChannel for DefaultProverChannel where E: FieldElement, H: ElementHasher, + P: RngCore, R: RandomCoin, + Standard: Distribution<::Digest>, { type Hasher = H; - fn commit_fri_layer(&mut self, layer_root: H::Digest) { + fn commit_fri_layer( + &mut self, + layer_root: H::Digest, + ) -> Option<::Digest> { self.commitments.push(layer_root); - self.public_coin.reseed(layer_root); + + // sample a salt for Fiat-Shamir if zero-knowledge is enabled + let salt = if self.is_zk { + let digest = self + .prng + .as_mut() + .expect("should have a PRNG when zk is enabled") + .sample(Standard); + Some(digest) + } else { + None + }; + self.salts.push(salt); + self.public_coin.reseed_with_salt(layer_root, salt); + salt } fn draw_fri_alpha(&mut self) -> E { diff --git a/fri/src/prover/mod.rs b/fri/src/prover/mod.rs index 17092ad34..cf32f0efd 100644 --- a/fri/src/prover/mod.rs +++ b/fri/src/prover/mod.rs @@ -12,6 +12,7 @@ use math::{fft, FieldElement}; use utils::iterators::*; use utils::{ flatten_vector_elements, group_slice_elements, iter_mut, transpose_slice, uninit_vector, + Serializable, }; use crate::{ @@ -102,6 +103,7 @@ where options: FriOptions, layers: Vec>, remainder_poly: FriRemainder, + salts: Vec>, _channel: PhantomData, } @@ -131,6 +133,7 @@ where options, layers: Vec::new(), remainder_poly: FriRemainder(vec![]), + salts: vec![], _channel: PhantomData, } } @@ -208,7 +211,8 @@ where let evaluation_vector_commitment = build_layer_commitment::<_, _, V, N>(&transposed_evaluations) .expect("failed to construct FRI layer commitment"); - channel.commit_fri_layer(evaluation_vector_commitment.commitment()); + let salt = channel.commit_fri_layer(evaluation_vector_commitment.commitment()); + self.salts.push(salt); // draw a pseudo-random coefficient from the channel, and use it in degree-respecting // projection to reduce the degree of evaluations by N @@ -228,7 +232,8 @@ where let remainder_poly_size = evaluations.len() / self.options.blowup_factor(); let remainder_poly = evaluations[..remainder_poly_size].to_vec(); let commitment = ::hash_elements(&remainder_poly); - channel.commit_fri_layer(commitment); + let salt = channel.commit_fri_layer(commitment); + self.salts.push(salt); self.remainder_poly = FriRemainder(remainder_poly); } @@ -278,7 +283,8 @@ where // clear layers so that another proof can be generated self.reset(); - FriProof::new(layers, remainder, 1) + let salts = self.salts.to_bytes(); + FriProof::new(layers, remainder, 1, salts) } } diff --git a/fri/src/prover/tests.rs b/fri/src/prover/tests.rs index e765092c5..82df57b9c 100644 --- a/fri/src/prover/tests.rs +++ b/fri/src/prover/tests.rs @@ -7,6 +7,7 @@ use alloc::vec::Vec; use crypto::{hashers::Blake3_256, DefaultRandomCoin, Hasher, MerkleTree, RandomCoin}; use math::{fft, fields::f128::BaseElement, FieldElement}; +use rand_chacha::ChaCha20Rng; use utils::{Deserializable, Serializable, SliceReader}; use super::{DefaultProverChannel, FriProver}; @@ -44,8 +45,8 @@ fn fri_folding_4() { pub fn build_prover_channel( trace_length: usize, options: &FriOptions, -) -> DefaultProverChannel> { - DefaultProverChannel::new(trace_length * options.blowup_factor(), 32) +) -> DefaultProverChannel> { + DefaultProverChannel::new(trace_length * options.blowup_factor(), 32, false, None) } pub fn build_evaluations(trace_length: usize, lde_blowup: usize) -> Vec { diff --git a/fri/src/verifier/channel.rs b/fri/src/verifier/channel.rs index 6f8709858..91f7ce142 100644 --- a/fri/src/verifier/channel.rs +++ b/fri/src/verifier/channel.rs @@ -70,6 +70,9 @@ pub trait VerifierChannel { /// Reads and removes the remainder polynomial from the channel. fn take_fri_remainder(&mut self) -> Vec; + /// Reads and removes the salt value needed for Fiat-Shamir at the current round. + fn take_salt(&mut self) -> Option<::Digest>; + // PROVIDED METHODS // -------------------------------------------------------------------------------------------- @@ -135,6 +138,7 @@ pub struct DefaultVerifierChannel< layer_queries: Vec>, remainder: Vec, num_partitions: usize, + salts: Vec>, _h: PhantomData, } @@ -156,6 +160,7 @@ where ) -> Result { let num_partitions = proof.num_partitions(); + let salts = proof.parse_salts::()?; let remainder = proof.parse_remainder()?; let (layer_queries, layer_proofs) = proof.parse_layers::(domain_size, folding_factor)?; @@ -166,6 +171,7 @@ where layer_queries, remainder, num_partitions, + salts, _h: PhantomData, }) } @@ -199,4 +205,8 @@ where fn take_fri_remainder(&mut self) -> Vec { self.remainder.clone() } + + fn take_salt(&mut self) -> Option { + self.salts.remove(0) + } } diff --git a/fri/src/verifier/mod.rs b/fri/src/verifier/mod.rs index ff0582b2c..da7f889fa 100644 --- a/fri/src/verifier/mod.rs +++ b/fri/src/verifier/mod.rs @@ -121,7 +121,8 @@ where let mut layer_alphas = Vec::with_capacity(layer_commitments.len()); let mut max_degree_plus_1 = max_poly_degree + 1; for (depth, commitment) in layer_commitments.iter().enumerate() { - public_coin.reseed(*commitment); + let salt = channel.take_salt(); + public_coin.reseed_with_salt(*commitment, salt); let alpha = public_coin.draw().map_err(VerifierError::RandomCoinError)?; layer_alphas.push(alpha); diff --git a/math/Cargo.toml b/math/Cargo.toml index 061c2d52f..987dc6da4 100644 --- a/math/Cargo.toml +++ b/math/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-math" -version = "0.10.0" +version = "0.11.0" description = "Math library for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-math/0.10.0" +documentation = "https://docs.rs/winter-math/0.11.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "finite-fields", "polynomials", "fft"] edition = "2021" @@ -38,13 +38,13 @@ std = ["utils/std"] [dependencies] serde = { version = "1.0", features = [ "derive" ], optional = true, default-features = false } -utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } +utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } [dev-dependencies] criterion = "0.5" num-bigint = "0.4" proptest = "1.4" -rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils" } +rand-utils = { version = "0.11", path = "../utils/rand", package = "winter-rand-utils" } # Allow math in docs [package.metadata.docs.rs] diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 85b5dbcd7..0961da8ec 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-prover" -version = "0.10.0" +version = "0.11.0" description = "Winterfell STARK prover" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-prover/0.10.0" +documentation = "https://docs.rs/winter-prover/0.11.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "zkp", "stark", "prover"] edition = "2021" @@ -30,17 +30,20 @@ default = ["std"] std = ["air/std", "crypto/std", "fri/std", "math/std", "utils/std"] [dependencies] -air = { version = "0.10", path = "../air", package = "winter-air", default-features = false } -crypto = { version = "0.10", path = "../crypto", package = "winter-crypto", default-features = false } -fri = { version = "0.10", path = '../fri', package = "winter-fri", default-features = false } -math = { version = "0.10", path = "../math", package = "winter-math", default-features = false } -maybe_async = { path = "../utils/maybe_async" , package = "winter-maybe-async" } +air = { version = "0.11", path = "../air", package = "winter-air", default-features = false } +crypto = { version = "0.11", path = "../crypto", package = "winter-crypto", default-features = false } +fri = { version = "0.11", path = '../fri', package = "winter-fri", default-features = false } +math = { version = "0.11", path = "../math", package = "winter-math", default-features = false } +maybe_async = { version = "0.11", path = "../utils/maybe_async" , package = "winter-maybe-async" } +rand_chacha = { version = "0.3", default-features = false } +rand-utils = { version = "0.11", path = "../utils/rand", package = "winter-rand-utils" } +rand = { version = "0.8" } tracing = { version = "0.1", default-features = false, features = ["attributes"]} -utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } +utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } [dev-dependencies] criterion = "0.5" -rand-utils = { version = "0.10", path = "../utils/rand", package = "winter-rand-utils" } +rand-utils = { version = "0.11", path = "../utils/rand", package = "winter-rand-utils" } # Allow math in docs [package.metadata.docs.rs] diff --git a/prover/README.md b/prover/README.md index 9c77a9e59..b95a73c0e 100644 --- a/prover/README.md +++ b/prover/README.md @@ -21,6 +21,7 @@ To define a prover for a computation, you'll need implement the `Prover` trait. * `get_pub_inputs()`, which describes how a set of public inputs can be extracted from a given instance of an execution trace. These inputs will need to be shared with the verifier in order for them to verify the proof. * `new_trace_lde()`, which constructs a new instance of trace low-degree extension. Unless your prover needs to implement specialized optimizations for performing low-degree extensions, this method can just return a default trace low-degree extension provided by Winterfell. * `new_evaluator()`, which constructs a new instance of the AIR constraint evaluator. Unless your prover needs to implement specialized optimizations for evaluating constraints, this method can just return a default constraint evaluator provided by Winterfell. +* `build_constraint_commitment()`, which constructs a new instance of constraint commitment. Unless your prover needs to implement specialized optimizations for committing to constraints, this method can just return a default constraint commitment provided by Winterfell. * `options()`, which defines STARK protocol parameters to be used during proof generation. These parameters include number of queries, blowup factor, grinding factor, hash function to be used during proof generation etc.. Values of these parameters directly inform such metrics as proof generation time, proof size, and proof security level. See [air crate](../air) for more info. A prover exposes a `prove()` method which can be used to generate a STARK proof using a given execution trace as a witness. diff --git a/prover/benches/lagrange_kernel.rs b/prover/benches/lagrange_kernel.rs index d6ab6a5bc..b64052976 100644 --- a/prover/benches/lagrange_kernel.rs +++ b/prover/benches/lagrange_kernel.rs @@ -8,14 +8,15 @@ use std::time::Duration; use air::{ Air, AirContext, Assertion, AuxRandElements, ConstraintCompositionCoefficients, EvaluationFrame, FieldExtension, GkrRandElements, LagrangeKernelRandElements, PartitionOptions, - ProofOptions, TraceInfo, TransitionConstraintDegree, + ProofOptions, TraceInfo, TransitionConstraintDegree, ZkParameters, }; use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; use crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree, RandomCoin}; use math::{fields::f64::BaseElement, ExtensionOf, FieldElement}; use winter_prover::{ - matrix::ColMatrix, DefaultConstraintEvaluator, DefaultTraceLde, Prover, ProverGkrProof, - StarkDomain, Trace, TracePolyTable, + matrix::ColMatrix, CompositionPoly, CompositionPolyTrace, DefaultConstraintCommitment, + DefaultConstraintEvaluator, DefaultTraceLde, MockPrng, Prover, ProverGkrProof, StarkDomain, + Trace, TracePolyTable, }; const TRACE_LENS: [usize; 2] = [2_usize.pow(16), 2_usize.pow(20)]; @@ -32,7 +33,7 @@ fn prove_with_lagrange_kernel(c: &mut Criterion) { let prover = LagrangeProver::new(AUX_TRACE_WIDTH); b.iter_batched( || trace.clone(), - |trace| prover.prove(trace).unwrap(), + |trace| prover.prove(trace, None).unwrap(), BatchSize::SmallInput, ) }); @@ -173,7 +174,7 @@ impl LagrangeProver { fn new(aux_trace_width: usize) -> Self { Self { aux_trace_width, - options: ProofOptions::new(1, 2, 0, FieldExtension::None, 2, 1), + options: ProofOptions::new(1, 2, 0, FieldExtension::None, 2, 1, false), } } } @@ -187,8 +188,11 @@ impl Prover for LagrangeProver { type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, LagrangeKernelAir, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, _trace: &Self::Trace) -> <::Air as Air>::PublicInputs { } @@ -203,11 +207,32 @@ impl Prover for LagrangeProver { main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, + _prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) where E: math::FieldElement, { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, _prng) + } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + zk_parameters, + prng, + ) } fn new_evaluator<'a, E>( diff --git a/prover/src/channel.rs b/prover/src/channel.rs index db82f5095..facdc2d9d 100644 --- a/prover/src/channel.rs +++ b/prover/src/channel.rs @@ -10,20 +10,23 @@ use air::{ proof::{Commitments, Context, OodFrame, Proof, Queries, TraceOodFrame}, Air, ConstraintCompositionCoefficients, DeepCompositionCoefficients, }; -use crypto::{ElementHasher, RandomCoin, VectorCommitment}; +use crypto::{ElementHasher, Hasher, RandomCoin, VectorCommitment}; use fri::FriProof; use math::{FieldElement, ToElements}; +use rand::{distributions::Standard, prelude::Distribution, Rng, RngCore, SeedableRng}; #[cfg(feature = "concurrent")] use utils::iterators::*; +use utils::Serializable; // TYPES AND INTERFACES // ================================================================================================ -pub struct ProverChannel<'a, A, E, H, R, V> +pub struct ProverChannel<'a, A, E, H, P, R, V> where A: Air, E: FieldElement, H: ElementHasher, + P: RngCore, R: RandomCoin, V: VectorCommitment, { @@ -33,6 +36,8 @@ where commitments: Commitments, ood_frame: OodFrame, pow_nonce: u64, + salts: Vec>, + prng: Option

, _field_element: PhantomData, _vector_commitment: PhantomData, } @@ -40,19 +45,30 @@ where // PROVER CHANNEL IMPLEMENTATION // ================================================================================================ -impl<'a, A, E, H, R, V> ProverChannel<'a, A, E, H, R, V> +impl<'a, A, E, H, P, R, V> ProverChannel<'a, A, E, H, P, R, V> where A: Air, E: FieldElement, H: ElementHasher, + P: RngCore + SeedableRng, R: RandomCoin, + Standard: Distribution<::Digest>, V: VectorCommitment, { // CONSTRUCTOR // -------------------------------------------------------------------------------------------- /// Creates a new prover channel for the specified `air` and public inputs. - pub fn new(air: &'a A, mut pub_inputs_elements: Vec) -> Self { - let context = Context::new::(air.trace_info().clone(), air.options().clone()); + pub fn new( + air: &'a A, + mut pub_inputs_elements: Vec, + zk_blowup: usize, + seed: Option<

::Seed>, + ) -> Self { + let context = Context::new::( + air.trace_info().clone(), + air.options().clone(), + zk_blowup, + ); // build a seed for the public coin; the initial seed is a hash of the proof context and // the public inputs, but as the protocol progresses, the coin will be reseeded with the @@ -60,6 +76,12 @@ where let mut coin_seed_elements = context.to_elements(); coin_seed_elements.append(&mut pub_inputs_elements); + let prng = if air.options().is_zk() { + Some(P::from_seed(seed.expect("must provide the seed when zk is enabled"))) + } else { + None + }; + ProverChannel { air, public_coin: RandomCoin::new(&coin_seed_elements), @@ -67,6 +89,8 @@ where commitments: Commitments::default(), ood_frame: OodFrame::default(), pow_nonce: 0, + salts: vec![], + prng, _field_element: PhantomData, _vector_commitment: PhantomData, } @@ -78,27 +102,79 @@ where /// Commits the prover the extended execution trace. pub fn commit_trace(&mut self, trace_root: H::Digest) { self.commitments.add::(&trace_root); - self.public_coin.reseed(trace_root); + + // sample a salt for Fiat-Shamir if zero-knowledge is enabled + let salt = if self.air.is_zk() { + let digest = self + .prng + .as_mut() + .expect("should have a PRNG when zk is enabled") + .sample(Standard); + Some(digest) + } else { + None + }; + self.salts.push(salt); + self.public_coin.reseed_with_salt(trace_root, salt); } /// Commits the prover to the evaluations of the constraint composition polynomial. pub fn commit_constraints(&mut self, constraint_root: H::Digest) { self.commitments.add::(&constraint_root); - self.public_coin.reseed(constraint_root); + + // sample a salt for Fiat-Shamir if zero-knowledge is enabled + let salt = if self.air.is_zk() { + let digest = self + .prng + .as_mut() + .expect("should have a PRNG when zk is enabled") + .sample(Standard); + Some(digest) + } else { + None + }; + self.salts.push(salt); + self.public_coin.reseed_with_salt(constraint_root, salt); } /// Saves the evaluations of trace polynomials over the out-of-domain evaluation frame. This /// also reseeds the public coin with the hashes of the evaluation frame states. pub fn send_ood_trace_states(&mut self, trace_ood_frame: &TraceOodFrame) { let trace_states_hash = self.ood_frame.set_trace_states::(trace_ood_frame); - self.public_coin.reseed(trace_states_hash); + + // sample a salt for Fiat-Shamir if zero-knowledge is enabled + let salt = if self.air.is_zk() { + let digest = self + .prng + .as_mut() + .expect("should have a PRNG when zk is enabled") + .sample(Standard); + Some(digest) + } else { + None + }; + self.salts.push(salt); + self.public_coin.reseed_with_salt(trace_states_hash, salt); } /// Saves the evaluations of constraint composition polynomial columns at the out-of-domain /// point. This also reseeds the public coin wit the hash of the evaluations. pub fn send_ood_constraint_evaluations(&mut self, evaluations: &[E]) { self.ood_frame.set_constraint_evaluations(evaluations); - self.public_coin.reseed(H::hash_elements(evaluations)); + + // sample a salt for Fiat-Shamir is zero-knowledge is enabled + let salt = if self.air.is_zk() { + let digest = self + .prng + .as_mut() + .expect("should have a PRNG when zk is enabled") + .sample(Standard); + Some(digest) + } else { + None + }; + self.salts.push(salt); + self.public_coin.reseed_with_salt(H::hash_elements(evaluations), salt); } // PUBLIC COIN METHODS @@ -139,7 +215,7 @@ where /// are removed from the returned vector. pub fn get_query_positions(&mut self) -> Vec { let num_queries = self.context.options().num_queries(); - let lde_domain_size = self.context.lde_domain_size(); + let lde_domain_size = self.context.lde_domain_size::(); let mut positions = self .public_coin .draw_integers(num_queries, lde_domain_size, self.pow_nonce) @@ -196,6 +272,7 @@ where pow_nonce: self.pow_nonce, num_unique_queries: num_query_positions as u8, gkr_proof, + salts: self.salts.to_bytes(), } } } @@ -203,20 +280,38 @@ where // FRI PROVER CHANNEL IMPLEMENTATION // ================================================================================================ -impl fri::ProverChannel for ProverChannel<'_, A, E, H, R, V> +impl fri::ProverChannel for ProverChannel<'_, A, E, H, P, R, V> where A: Air, E: FieldElement, H: ElementHasher, + P: RngCore, R: RandomCoin, + Standard: Distribution<::Digest>, V: VectorCommitment, { type Hasher = H; /// Commits the prover to a FRI layer. - fn commit_fri_layer(&mut self, layer_root: H::Digest) { + fn commit_fri_layer(&mut self, layer_root: H::Digest) -> Option<::Digest> + where + P: RngCore, + { self.commitments.add::(&layer_root); - self.public_coin.reseed(layer_root); + + // sample a salt for Fiat-Shamir if zero-knowledge is enabled + let salt = if self.air.is_zk() { + let digest = self + .prng + .as_mut() + .expect("should have a PRNG when zk is enabled") + .sample(Standard); + Some(digest) + } else { + None + }; + self.public_coin.reseed_with_salt(layer_root, salt); + salt } /// Returns a new alpha drawn from the public coin. diff --git a/prover/src/composer/mod.rs b/prover/src/composer/mod.rs index 5d463d331..1d394cc63 100644 --- a/prover/src/composer/mod.rs +++ b/prover/src/composer/mod.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use alloc::vec::Vec; -use air::{proof::TraceOodFrame, DeepCompositionCoefficients}; +use air::{proof::TraceOodFrame, Air, DeepCompositionCoefficients}; use math::{ add_in_place, fft, mul_acc, polynom::{self, syn_div_roots_in_place}, @@ -22,6 +22,8 @@ pub struct DeepCompositionPoly { coefficients: Vec, cc: DeepCompositionCoefficients, z: E, + g: E, + is_zk: bool, } impl DeepCompositionPoly { @@ -30,17 +32,27 @@ impl DeepCompositionPoly { /// Returns a new DEEP composition polynomial. Initially, this polynomial will be empty, and /// the intent is to populate the coefficients via add_trace_polys() and add_constraint_polys() /// methods. - pub fn new(z: E, cc: DeepCompositionCoefficients) -> Self { - DeepCompositionPoly { coefficients: vec![], cc, z } + pub fn new>( + air: &A, + z: E, + cc: DeepCompositionCoefficients, + ) -> Self { + DeepCompositionPoly { + coefficients: vec![], + cc, + z, + g: E::from(air.trace_domain_generator()), + is_zk: air.is_zk(), + } } // ACCESSORS // -------------------------------------------------------------------------------------------- - /// Returns the size of the DEEP composition polynomial. - pub fn poly_size(&self) -> usize { - self.coefficients.len() - } + ///// Returns the size of the DEEP composition polynomial. + //pub fn poly_size(&self) -> usize { + //self.coefficients.len() + //} /// Returns the degree of the composition polynomial. pub fn degree(&self) -> usize { @@ -82,8 +94,7 @@ impl DeepCompositionPoly { // compute a second out-of-domain point offset from z by exactly trace generator; this // point defines the "next" computation state in relation to point z let trace_length = trace_polys.poly_size(); - let g = E::from(E::BaseField::get_root_of_unity(trace_length.ilog2())); - let next_z = self.z * g; + let next_z = self.z * self.g; // combine trace polynomials into 2 composition polynomials T'(x) and T''(x) let mut t1_composition = vec![E::ZERO; trace_length]; @@ -185,7 +196,6 @@ impl DeepCompositionPoly { // set the coefficients of the DEEP composition polynomial self.coefficients = trace_poly; - assert_eq!(self.poly_size() - 2, self.degree()); } // CONSTRAINT POLYNOMIAL COMPOSITION @@ -194,7 +204,7 @@ impl DeepCompositionPoly { /// into the DEEP composition polynomial. This method is intended to be called only after the /// add_trace_polys() method has been executed. The composition is done as follows: /// - /// - For each H_i(x), compute H'_i(x) = (H_i(x) - H(z)) / (x - z), where H_i(x) is the + /// - For each H_i(x), compute H'_i(x) = (H_i(x) - H(z)) / (x - z^m), where H_i(x) is the /// ith composition polynomial column. /// - Then, combine all H_i(x) polynomials together by computing H(x) = sum(H_i(x) * cc_i) for /// all i, where cc_i is the coefficient for the random linear combination drawn from the @@ -208,22 +218,32 @@ impl DeepCompositionPoly { ) { assert!(!self.coefficients.is_empty()); - let z = self.z; - let mut column_polys = composition_poly.into_columns(); + let num_cols = ood_evaluations.len(); + let z = self.z; // Divide out the OOD point z from column polynomials - iter_mut!(column_polys).zip(ood_evaluations).for_each(|(poly, value_at_z)| { - // compute H'_i(x) = (H_i(x) - H_i(z)) / (x - z) - poly[0] -= value_at_z; - polynom::syn_div_in_place(poly, 1, z); - }); + iter_mut!(column_polys).take(num_cols).zip(ood_evaluations).for_each( + |(poly, value_at_z)| { + // compute H'_i(x) = (H_i(x) - H_i(z)) / (x - z) + poly[0] -= value_at_z; + polynom::syn_div_in_place(poly, 1, z); + }, + ); // add H'_i(x) * cc_i for all i into the DEEP composition polynomial - for (i, poly) in column_polys.into_iter().enumerate() { - mul_acc::(&mut self.coefficients, &poly, self.cc.constraints[i]); + for (i, poly) in column_polys.iter().enumerate().take(num_cols) { + mul_acc::(&mut self.coefficients, poly, self.cc.constraints[i]); + } + + // add the randomizer codeword for FRI + if self.is_zk { + iter_mut!(self.coefficients) + .zip(&column_polys[column_polys.len() - 1]) + .for_each(|(a, b)| *a += *b); } - assert_eq!(self.poly_size() - 2, self.degree()); + + assert_eq!(self.coefficients.len() - 2, self.degree()); } // LOW-DEGREE EXTENSION diff --git a/prover/src/constraints/commitment.rs b/prover/src/constraints/commitment.rs deleted file mode 100644 index ac71fdc94..000000000 --- a/prover/src/constraints/commitment.rs +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (c) Facebook, Inc. and its affiliates. -// -// This source code is licensed under the MIT license found in the -// LICENSE file in the root directory of this source tree. - -use alloc::vec::Vec; -use core::marker::PhantomData; - -use air::proof::Queries; -use crypto::{ElementHasher, VectorCommitment}; -use math::FieldElement; - -use super::RowMatrix; - -// CONSTRAINT COMMITMENT -// ================================================================================================ - -/// Constraint evaluation commitment. -/// -/// The commitment consists of two components: -/// * Evaluations of composition polynomial columns over the LDE domain. -/// * Vector commitment where each vector element corresponds to the digest of a row in -/// the composition polynomial evaluation matrix. -pub struct ConstraintCommitment< - E: FieldElement, - H: ElementHasher, - V: VectorCommitment, -> { - evaluations: RowMatrix, - vector_commitment: V, - _h: PhantomData, -} - -impl ConstraintCommitment -where - E: FieldElement, - H: ElementHasher, - V: VectorCommitment, -{ - /// Creates a new constraint evaluation commitment from the provided composition polynomial - /// evaluations and the corresponding vector commitment. - pub fn new(evaluations: RowMatrix, commitment: V) -> ConstraintCommitment { - assert_eq!( - evaluations.num_rows(), - commitment.domain_len(), - "number of rows in constraint evaluation matrix must be the same as the size \ - of the vector commitment domain" - ); - - ConstraintCommitment { - evaluations, - vector_commitment: commitment, - _h: PhantomData, - } - } - - /// Returns the commitment. - pub fn commitment(&self) -> H::Digest { - self.vector_commitment.commitment() - } - - /// Returns constraint evaluations at the specified positions along with a batch opening proof - /// against the vector commitment. - pub fn query(self, positions: &[usize]) -> Queries { - // build batch opening proof to the leaves specified by positions - let opening_proof = self - .vector_commitment - .open_many(positions) - .expect("failed to generate a batch opening proof for constraint queries"); - - // determine a set of evaluations corresponding to each position - let mut evaluations = Vec::new(); - for &position in positions { - let row = self.evaluations.row(position).to_vec(); - evaluations.push(row); - } - - Queries::new::(opening_proof.1, evaluations) - } -} diff --git a/prover/src/constraints/commitment/default.rs b/prover/src/constraints/commitment/default.rs new file mode 100644 index 000000000..4b8276bad --- /dev/null +++ b/prover/src/constraints/commitment/default.rs @@ -0,0 +1,175 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use alloc::vec::Vec; +use core::marker::PhantomData; + +use air::{proof::Queries, PartitionOptions, ZkParameters}; +use crypto::{ElementHasher, VectorCommitment}; +use math::FieldElement; +use rand::RngCore; +use tracing::info_span; + +use super::{ConstraintCommitment, RowMatrix}; +use crate::{CompositionPoly, CompositionPolyTrace, StarkDomain, DEFAULT_SEGMENT_WIDTH}; + +// CONSTRAINT COMMITMENT +// ================================================================================================ + +/// Constraint evaluation commitment. +/// +/// The commitment consists of two components: +/// * Evaluations of composition polynomial columns over the LDE domain. +/// * Vector commitment where each vector element corresponds to the digest of a row in +/// the composition polynomial evaluation matrix. +pub struct DefaultConstraintCommitment< + E: FieldElement, + H: ElementHasher, + R: RngCore, + V: VectorCommitment, +> { + evaluations: RowMatrix, + vector_commitment: V, + _h: PhantomData, + _prng: PhantomData, +} + +impl DefaultConstraintCommitment +where + E: FieldElement, + H: ElementHasher, + R: RngCore, + V: VectorCommitment, +{ + /// Creates a new constraint evaluation commitment from the provided composition polynomial + /// evaluations and the corresponding vector commitment. + pub fn new( + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, + ) -> (Self, CompositionPoly) { + // extend the main execution trace and build a commitment to the extended trace + let (evaluations, commitment, composition_poly) = build_constraint_commitment::( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + zk_parameters, + prng, + ); + + assert_eq!( + evaluations.num_rows(), + commitment.get_domain_len(), + "number of rows in constraint evaluation matrix must be the same as the size \ + of the vector commitment domain" + ); + + let commitment = Self { + evaluations, + vector_commitment: commitment, + _h: PhantomData, + _prng: PhantomData, + }; + + (commitment, composition_poly) + } +} + +impl ConstraintCommitment for DefaultConstraintCommitment +where + E: FieldElement, + H: ElementHasher + core::marker::Sync, + R: RngCore, + V: VectorCommitment + core::marker::Sync, +{ + type HashFn = H; + type VC = V; + + /// Returns the commitment. + fn commitment(&self) -> H::Digest { + self.vector_commitment.commitment() + } + + /// Returns constraint evaluations at the specified positions along with a batch opening proof + /// against the vector commitment. + fn query(self, positions: &[usize]) -> Queries { + // build batch opening proof to the leaves specified by positions + let opening_proof = self + .vector_commitment + .open_many(positions) + .expect("failed to generate a batch opening proof for constraint queries"); + + // determine a set of evaluations corresponding to each position + let mut evaluations = Vec::new(); + for &position in positions { + let row = self.evaluations.row(position).to_vec(); + evaluations.push(row); + } + + Queries::new::(opening_proof.1, evaluations) + } +} + +fn build_constraint_commitment( + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, +) -> (RowMatrix, V, CompositionPoly) +where + E: FieldElement, + H: ElementHasher, + R: RngCore, + V: VectorCommitment, +{ + // first, build constraint composition polynomial from its trace as follows: + // - interpolate the trace into a polynomial in coefficient form + // - "break" the polynomial into a set of column polynomials each of degree equal to + // trace_length - 1 + let composition_poly = info_span!( + "build_composition_poly_columns", + num_columns = num_constraint_composition_columns + ) + .in_scope(|| { + CompositionPoly::new( + composition_poly_trace, + domain, + num_constraint_composition_columns, + zk_parameters, + prng, + ) + }); + assert_eq!( + composition_poly.num_columns(), + num_constraint_composition_columns + zk_parameters.is_some() as usize + ); + assert_eq!(composition_poly.column_degree(), domain.trace_length() - 1); + + // then, evaluate composition polynomial columns over the LDE domain + let domain_size = domain.lde_domain_size(); + let composed_evaluations = info_span!("evaluate_composition_poly_columns").in_scope(|| { + RowMatrix::evaluate_polys_over::(composition_poly.data(), domain) + }); + assert_eq!( + composed_evaluations.num_cols(), + num_constraint_composition_columns + zk_parameters.is_some() as usize + ); + assert_eq!(composed_evaluations.num_rows(), domain_size); + + // finally, build constraint evaluation commitment + let commitment = info_span!( + "compute_constraint_evaluation_commitment", + log_domain_size = domain_size.ilog2() + ) + .in_scope(|| composed_evaluations.commit_to_rows::(partition_options)); + + (composed_evaluations, commitment, composition_poly) +} diff --git a/prover/src/constraints/commitment/mod.rs b/prover/src/constraints/commitment/mod.rs new file mode 100644 index 000000000..5ecfe8b08 --- /dev/null +++ b/prover/src/constraints/commitment/mod.rs @@ -0,0 +1,37 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use air::proof::Queries; +use crypto::{ElementHasher, Hasher, VectorCommitment}; +use math::FieldElement; + +use super::RowMatrix; + +mod default; +pub use default::DefaultConstraintCommitment; + +// CONSTRAINT COMMITMENT +// ================================================================================================ + +/// Constraint evaluation commitment. +/// +/// The commitment consists of two components: +/// * Evaluations of composition polynomial columns over the LDE domain. +/// * Vector commitment where each vector element corresponds to the digest of a row in +/// the composition polynomial evaluation matrix. +pub trait ConstraintCommitment { + /// The hash function used for hashing the rows of trace segment LDEs. + type HashFn: ElementHasher; + + /// The vector commitment scheme used for commiting to the trace. + type VC: VectorCommitment; + + /// Returns the commitment. + fn commitment(&self) -> ::Digest; + + /// Returns constraint evaluations at the specified positions along with a batch opening proof + /// against the vector commitment. + fn query(self, positions: &[usize]) -> Queries; +} diff --git a/prover/src/constraints/composition_poly.rs b/prover/src/constraints/composition_poly.rs index bad52f7f5..2de032981 100644 --- a/prover/src/constraints/composition_poly.rs +++ b/prover/src/constraints/composition_poly.rs @@ -5,7 +5,9 @@ use alloc::vec::Vec; -use math::{fft, polynom::degree_of, FieldElement}; +use air::ZkParameters; +use math::{fft, polynom, FieldElement}; +use rand::{Rng, RngCore}; use super::{ColMatrix, StarkDomain}; @@ -47,16 +49,23 @@ impl CompositionPolyTrace { /// /// For example, if the composition polynomial has degree 2N - 1, where N is the trace length, /// it will be stored as two columns of size N (each of degree N - 1). +/// +/// When zero-knowledge is enabled, the composition polynomial is split into segment polynomials +/// such that each segment polynomial's degree is small enough to accommodate adding a randomizer +/// polynomial without the degree of the resulting ranomized segment polynomial exceeding +/// `domain.trace_length()`. pub struct CompositionPoly { data: ColMatrix, } impl CompositionPoly { /// Returns a new composition polynomial. - pub fn new( + pub fn new( composition_trace: CompositionPolyTrace, domain: &StarkDomain, num_cols: usize, + zk_parameters: Option, + prng: &mut Option, ) -> Self { assert!( domain.trace_length() < composition_trace.num_rows(), @@ -70,7 +79,33 @@ impl CompositionPoly { let inv_twiddles = fft::get_inv_twiddles::(trace.len()); fft::interpolate_poly_with_offset(&mut trace, &inv_twiddles, domain.offset()); - let polys = segment(trace, domain.trace_length(), num_cols); + // compute the segment quotient polynomials + let quotient_degree = polynom::degree_of(&trace); + let degree_chunked_quotient = if zk_parameters.is_some() { + (quotient_degree + 1).div_ceil(num_cols) + } else { + domain.trace_length() + }; + let polys = segment(trace, degree_chunked_quotient, num_cols); + let mut polys = complement_to(polys, domain.trace_length(), prng); + + // generate a randomizer polynomial for FRI + if zk_parameters.is_some() { + let extended_len = polys[0].len(); + let mut zk_col = vec![E::ZERO; extended_len]; + + for a in zk_col.iter_mut() { + let bytes = prng + .as_mut() + .expect("should contain a PRNG when zk is enabled") + .gen::<[u8; 32]>(); + *a = E::from_random_bytes(&bytes[..E::VALUE_SIZE]) + .expect("failed to generate randomness"); + } + // reduce the degree to match that of the DEEP composition polynomial + zk_col[extended_len - 1] = E::ZERO; + polys.push(zk_col) + } CompositionPoly { data: ColMatrix::new(polys) } } @@ -96,8 +131,8 @@ impl CompositionPoly { } /// Returns evaluations of all composition polynomial columns at point z. - pub fn evaluate_at(&self, z: E) -> Vec { - self.data.evaluate_columns_at(z) + pub fn evaluate_at(&self, z: E, is_zk: bool) -> Vec { + self.data.evaluate_columns_at(z, is_zk) } /// Returns a reference to the matrix of individual column polynomials. @@ -111,6 +146,59 @@ impl CompositionPoly { } } +/// Takes a vector of coefficients representing the segment polynomials of a given composition +/// polynomial as input, and generates coefficients of their randomized version. +/// +/// The randomization technique is the one in section 4.1 in https://eprint.iacr.org/2024/1037.pdf. +fn complement_to( + polys: Vec>, + l: usize, + prng: &mut Option, +) -> Vec> { + let mut result = vec![]; + + let randomizer_poly_size = l - polys[0].len(); + let mut current_poly = vec![E::ZERO; randomizer_poly_size]; + let mut previous_poly = vec![E::ZERO; randomizer_poly_size]; + + for (_, poly) in polys.iter().enumerate().take_while(|(index, _)| *index != polys.len() - 1) { + let diff = l - poly.len(); + + for eval in current_poly.iter_mut().take(diff) { + let bytes = prng + .as_mut() + .expect("should contain a PRNG when zk is enabled") + .gen::<[u8; 32]>(); + *eval = E::from_random_bytes(&bytes[..E::VALUE_SIZE]) + .expect("failed to generate randomness"); + } + + let mut res = vec![]; + res.extend_from_slice(poly); + res.extend_from_slice(¤t_poly); + + for i in 0..randomizer_poly_size { + res[i] -= previous_poly[i]; + } + + previous_poly.copy_from_slice(¤t_poly[..randomizer_poly_size]); + + result.push(res) + } + + // TODO: is this always guaranteed to not panic? + let poly = polys.last().unwrap(); + let mut res = vec![E::ZERO; l]; + for (i, entry) in poly.iter().enumerate() { + res[i] = *entry; + } + for i in 0..randomizer_poly_size { + res[i] -= previous_poly[i]; + } + result.push(res); + result +} + // HELPER FUNCTIONS // ================================================================================================ @@ -123,8 +211,6 @@ fn segment( trace_len: usize, num_cols: usize, ) -> Vec> { - debug_assert!(degree_of(&coefficients) < trace_len * num_cols); - coefficients .chunks(trace_len) .take(num_cols) diff --git a/prover/src/constraints/evaluation_table.rs b/prover/src/constraints/evaluation_table.rs index 9add913f4..554136a56 100644 --- a/prover/src/constraints/evaluation_table.rs +++ b/prover/src/constraints/evaluation_table.rs @@ -73,7 +73,7 @@ impl<'a, E: FieldElement> ConstraintEvaluationTable<'a, E> { // collect expected degrees for all transition constraints to compare them against actual // degrees; we do this in debug mode only because this comparison is expensive let expected_transition_degrees = - build_transition_constraint_degrees(transition_constraints, domain.trace_length()); + build_transition_constraint_degrees(transition_constraints, domain); ConstraintEvaluationTable { evaluations: uninit_matrix(num_columns, num_rows), @@ -420,16 +420,35 @@ fn get_inv_evaluation( #[cfg(debug_assertions)] fn build_transition_constraint_degrees( constraints: &TransitionConstraints, - trace_length: usize, + domain: &StarkDomain, ) -> Vec { + use crate::domain::ZkInfo; + let mut result = Vec::new(); + let (trace_length, trace_len_ext) = if let Some(zk_info) = domain.zk_info() { + let ZkInfo { + original_trace_length, + degree_witness_randomizer, + }: ZkInfo = zk_info; + + let ext_len = (original_trace_length + degree_witness_randomizer).next_power_of_two(); + (original_trace_length, ext_len) + } else { + (domain.trace_length(), domain.trace_length()) + }; for degree in constraints.main_constraint_degrees() { - result.push(degree.get_evaluation_degree(trace_length) - constraints.divisor().degree()) + result.push( + degree.get_evaluation_degree(trace_length, trace_len_ext) + - constraints.divisor().degree(), + ) } for degree in constraints.aux_constraint_degrees() { - result.push(degree.get_evaluation_degree(trace_length) - constraints.divisor().degree()) + result.push( + degree.get_evaluation_degree(trace_length, trace_len_ext) + - constraints.divisor().degree(), + ) } result diff --git a/prover/src/constraints/evaluator/periodic_table.rs b/prover/src/constraints/evaluator/periodic_table.rs index ec72aa766..4601460e3 100644 --- a/prover/src/constraints/evaluator/periodic_table.rs +++ b/prover/src/constraints/evaluator/periodic_table.rs @@ -37,23 +37,29 @@ impl PeriodicValueTable { // them for polynomials of the same size let mut twiddle_map = BTreeMap::new(); + // zero-knowledge blowup factor + let factor = air.context().trace_length_ext() / air.trace_length(); let evaluations = polys .iter() .map(|poly| { let poly_size = poly.len(); let num_cycles = (air.trace_length() / poly_size) as u64; let offset = air.domain_offset().exp(num_cycles.into()); - let twiddles = - twiddle_map.entry(poly_size).or_insert_with(|| fft::get_twiddles(poly_size)); - fft::evaluate_poly_with_offset(poly, twiddles, offset, air.ce_blowup_factor()) + let mut new_poly = vec![B::ZERO; factor * poly_size]; + new_poly[..poly_size].copy_from_slice(&poly[..poly_size]); + let twiddles = twiddle_map + .entry(new_poly.len()) + .or_insert_with(|| fft::get_twiddles(new_poly.len())); + + fft::evaluate_poly_with_offset(&new_poly, twiddles, offset, air.ce_blowup_factor()) }) .collect::>(); // allocate memory to hold all expanded values and copy polynomial evaluations into the // table in such a way that values for the same row are adjacent to each other. let row_width = polys.len(); - let column_length = max_poly_size * air.ce_blowup_factor(); + let column_length = factor * max_poly_size * air.ce_blowup_factor(); let mut values = unsafe { uninit_vector(row_width * column_length) }; for i in 0..column_length { for (j, column) in evaluations.iter().enumerate() { diff --git a/prover/src/constraints/mod.rs b/prover/src/constraints/mod.rs index 566065f0f..054edb32c 100644 --- a/prover/src/constraints/mod.rs +++ b/prover/src/constraints/mod.rs @@ -15,4 +15,4 @@ mod evaluation_table; pub use evaluation_table::{ConstraintEvaluationTable, EvaluationTableFragment}; mod commitment; -pub use commitment::ConstraintCommitment; +pub use commitment::{ConstraintCommitment, DefaultConstraintCommitment}; diff --git a/prover/src/domain.rs b/prover/src/domain.rs index 87a54bbe5..525733a1b 100644 --- a/prover/src/domain.rs +++ b/prover/src/domain.rs @@ -30,6 +30,10 @@ pub struct StarkDomain { /// Offset of the low-degree extension domain. domain_offset: B, + + /// Extra information needed for constraint evaluation validation when zero-knowledge is enabled. + #[cfg(debug_assertions)] + zk_info: Option, } // STARK DOMAIN IMPLEMENTATION @@ -38,18 +42,30 @@ pub struct StarkDomain { impl StarkDomain { /// Returns a new STARK domain initialized with the provided `context`. pub fn new>(air: &A) -> Self { - let trace_twiddles = fft::get_twiddles(air.trace_length()); + let trace_twiddles = fft::get_twiddles(air.context().trace_length_ext()); // build constraint evaluation domain let domain_gen = B::get_root_of_unity(air.ce_domain_size().ilog2()); let ce_domain = get_power_series(domain_gen, air.ce_domain_size()); + #[cfg(debug_assertions)] + let zk_info = if air.is_zk() { + Some(ZkInfo { + original_trace_length: air.trace_length(), + degree_witness_randomizer: air.context().zk_witness_randomizer_degree(), + }) + } else { + None + }; + StarkDomain { trace_twiddles, ce_domain, ce_to_lde_blowup: air.lde_domain_size() / air.ce_domain_size(), ce_domain_mod_mask: air.ce_domain_size() - 1, domain_offset: air.domain_offset(), + #[cfg(debug_assertions)] + zk_info, } } @@ -72,6 +88,8 @@ impl StarkDomain { ce_to_lde_blowup: 1, ce_domain_mod_mask: ce_domain_size - 1, domain_offset, + #[cfg(debug_assertions)] + zk_info: None, } } @@ -152,4 +170,16 @@ impl StarkDomain { pub fn offset(&self) -> B { self.domain_offset } + + #[cfg(debug_assertions)] + pub(crate) fn zk_info(&self) -> Option { + self.zk_info + } +} + +#[cfg(debug_assertions)] +#[derive(Clone, Copy, Debug)] +pub struct ZkInfo { + pub(crate) original_trace_length: usize, + pub(crate) degree_witness_randomizer: usize, } diff --git a/prover/src/errors.rs b/prover/src/errors.rs index a0d01a233..6fd560582 100644 --- a/prover/src/errors.rs +++ b/prover/src/errors.rs @@ -39,3 +39,5 @@ impl fmt::Display for ProverError { } } } + +impl core::error::Error for ProverError {} diff --git a/prover/src/lib.rs b/prover/src/lib.rs index 035d6c655..e0f1a5360 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -48,9 +48,9 @@ pub use air::{ EvaluationFrame, FieldExtension, LagrangeKernelRandElements, ProofOptions, TraceInfo, TransitionConstraintDegree, }; -use air::{AuxRandElements, GkrRandElements, PartitionOptions}; +use air::{AuxRandElements, GkrRandElements, PartitionOptions, ZkParameters}; pub use crypto; -use crypto::{ElementHasher, RandomCoin, VectorCommitment}; +use crypto::{ElementHasher, Hasher, RandomCoin, VectorCommitment}; use fri::FriProver; pub use math; use math::{ @@ -58,6 +58,7 @@ use math::{ fields::{CubeExtension, QuadExtension}, ExtensibleField, FieldElement, StarkField, ToElements, }; +use rand::{distributions::Standard, prelude::Distribution, Error, RngCore, SeedableRng}; use tracing::{event, info_span, instrument, Level}; pub use utils::{ iterators, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, @@ -73,7 +74,7 @@ use matrix::{ColMatrix, RowMatrix}; mod constraints; pub use constraints::{ CompositionPoly, CompositionPolyTrace, ConstraintCommitment, ConstraintEvaluator, - DefaultConstraintEvaluator, + DefaultConstraintCommitment, DefaultConstraintEvaluator, }; mod composer; @@ -155,6 +156,15 @@ pub trait Prover { where E: FieldElement; + /// Constraint low-degree extension for building the LDEs of composition polynomial columns and + /// their commitments. + type ConstraintCommitment: ConstraintCommitment + where + E: FieldElement; + + /// PRNG used when zero-knowledge (zk) is enabled. + type ZkPrng: RngCore + SeedableRng; + // REQUIRED METHODS // -------------------------------------------------------------------------------------------- @@ -183,6 +193,8 @@ pub trait Prover { main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) where E: FieldElement; @@ -199,6 +211,29 @@ pub trait Prover { where E: FieldElement; + /// Extends constraint composition polynomial over the LDE domain and builds a commitment to + /// its evaluations. + /// + /// The extension is done by first interpolating the evaluations of the polynomial so that we + /// get the composition polynomial in coefficient form; then breaking the polynomial into + /// columns each of size equal to trace length, and finally evaluating each composition + /// polynomial column over the LDE domain. + /// + /// The commitment is computed by building a vector containing the hashes of each row in + /// the evaluation matrix, and then building vector commitment of the resulting vector. + #[maybe_async] + fn build_constraint_commitment( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, + ) -> (Self::ConstraintCommitment, CompositionPoly) + where + E: FieldElement; + // PROVIDED METHODS // -------------------------------------------------------------------------------------------- @@ -219,6 +254,7 @@ pub trait Prover { /// Builds and returns the auxiliary trace. #[allow(unused_variables)] #[maybe_async] + #[instrument(skip_all)] fn build_aux_trace( &self, main_trace: &Self::Trace, @@ -239,8 +275,13 @@ pub trait Prover { /// Public inputs must match the value returned from /// [Self::get_pub_inputs()](Prover::get_pub_inputs) for the provided trace. #[maybe_async] - fn prove(&self, trace: Self::Trace) -> Result + fn prove( + &self, + trace: Self::Trace, + seed: Option<::Seed>, + ) -> Result where + Standard: Distribution<<::HashFn as crypto::Hasher>::Digest>, ::PublicInputs: Send, ::GkrProof: Send, { @@ -248,18 +289,20 @@ pub trait Prover { // of static dispatch for selecting two generic parameter: extension field and hash // function. match self.options().field_extension() { - FieldExtension::None => maybe_await!(self.generate_proof::(trace)), + FieldExtension::None => { + maybe_await!(self.generate_proof::(trace, seed)) + }, FieldExtension::Quadratic => { if !>::is_supported() { return Err(ProverError::UnsupportedFieldExtension(2)); } - maybe_await!(self.generate_proof::>(trace)) + maybe_await!(self.generate_proof::>(trace, seed)) }, FieldExtension::Cubic => { if !>::is_supported() { return Err(ProverError::UnsupportedFieldExtension(3)); } - maybe_await!(self.generate_proof::>(trace)) + maybe_await!(self.generate_proof::>(trace, seed)) }, } } @@ -272,8 +315,13 @@ pub trait Prover { /// TODO: make this function un-callable externally? #[doc(hidden)] #[maybe_async] - fn generate_proof(&self, trace: Self::Trace) -> Result + fn generate_proof( + &self, + trace: Self::Trace, + seed: Option<::Seed>, + ) -> Result where + Standard: Distribution<<::HashFn as Hasher>::Digest>, E: FieldElement, ::PublicInputs: Send, ::GkrProof: Send, @@ -289,28 +337,45 @@ pub trait Prover { // execution of the computation for the provided public inputs. let air = Self::Air::new(trace.info().clone(), pub_inputs, self.options().clone()); + // get the zk parameter, which are None unless zk is enabled + let zk_parameters = air.context().zk_parameters(); + + // create a PRNG to be used when zk is enabled, and also generates a seed to be used in + // generating salting values for Fiat-Shamir by `ProverChannel` + let (mut prng, seed) = generate_prng_and_new_seed(seed); + // create a channel which is used to simulate interaction between the prover and the // verifier; the channel will be used to commit to values and to draw randomness that // should come from the verifier. - let mut channel = - ProverChannel::::new( - &air, - pub_inputs_elements, - ); + let mut channel = ProverChannel::< + Self::Air, + E, + Self::HashFn, + Self::ZkPrng, + Self::RandomCoin, + Self::VC, + >::new( + &air, pub_inputs_elements, air.context().zk_blowup_factor(), seed + ); // 1 ----- Commit to the execution trace -------------------------------------------------- // build computation domain; this is used later for polynomial evaluations let lde_domain_size = air.lde_domain_size(); - let trace_length = air.trace_length(); + let trace_length = air.context().trace_length_ext(); let domain = info_span!("build_domain", trace_length, lde_domain_size) .in_scope(|| StarkDomain::new(&air)); assert_eq!(domain.lde_domain_size(), lde_domain_size); assert_eq!(domain.trace_length(), trace_length); // commit to the main trace segment - let (mut trace_lde, mut trace_polys) = - maybe_await!(self.commit_to_main_trace_segment(&trace, &domain, &mut channel)); + let (mut trace_lde, mut trace_polys) = maybe_await!(self.commit_to_main_trace_segment( + &trace, + &domain, + zk_parameters, + &mut channel, + &mut prng, + )); // build the auxiliary trace segment, and append the resulting segments to trace commitment // and trace polynomial table structs @@ -342,7 +407,7 @@ pub trait Prover { // extend the auxiliary trace segment and commit to the extended trace let span = info_span!("commit_to_aux_trace_segment").entered(); let (aux_segment_polys, aux_segment_commitment) = - trace_lde.set_aux_trace(&aux_trace, &domain); + trace_lde.set_aux_trace(&aux_trace, &domain, zk_parameters, &mut prng); // commit to the LDE of the extended auxiliary trace segment by writing its // commitment into the channel @@ -391,7 +456,14 @@ pub trait Prover { // 3 ----- commit to constraint evaluations ----------------------------------------------- let (constraint_commitment, composition_poly) = maybe_await!(self - .commit_to_constraint_evaluations(&air, composition_poly_trace, &domain, &mut channel)); + .commit_to_constraint_evaluations( + &air, + composition_poly_trace, + &domain, + &mut channel, + zk_parameters, + &mut prng + )); // 4 ----- build DEEP composition polynomial ---------------------------------------------- let deep_composition_poly = { @@ -410,16 +482,17 @@ pub trait Prover { // g, where g is the generator of the trace domain. Additionally, if the Lagrange kernel // auxiliary column is present, we also evaluate that column over the points: z, z * g, // z * g^2, z * g^4, ..., z * g^(2^(v-1)), where v = log(trace_len). - let ood_trace_states = trace_polys.get_ood_frame(z); + let ood_trace_states = + trace_polys.get_ood_frame(z, air.context().trace_info().length()); channel.send_ood_trace_states(&ood_trace_states); - let ood_evaluations = composition_poly.evaluate_at(z); + let ood_evaluations = composition_poly.evaluate_at(z, air.is_zk()); channel.send_ood_constraint_evaluations(&ood_evaluations); // draw random coefficients to use during DEEP polynomial composition, and use them to // initialize the DEEP composition polynomial let deep_coefficients = channel.get_deep_composition_coeffs(); - let mut deep_composition_poly = DeepCompositionPoly::new(z, deep_coefficients); + let mut deep_composition_poly = DeepCompositionPoly::new(&air, z, deep_coefficients); // combine all trace polynomials together and merge them into the DEEP composition // polynomial @@ -437,7 +510,7 @@ pub trait Prover { // make sure the degree of the DEEP composition polynomial is equal to trace polynomial // degree minus 1. - assert_eq!(trace_length - 2, deep_composition_poly.degree()); + assert_eq!(air.context().trace_length_ext() - 2, deep_composition_poly.degree()); // 5 ----- evaluate DEEP composition polynomial over LDE domain --------------------------- let deep_evaluations = { @@ -445,7 +518,10 @@ pub trait Prover { let deep_evaluations = deep_composition_poly.evaluate(&domain); // we check the following condition in debug mode only because infer_degree is an // expensive operation - debug_assert_eq!(trace_length - 2, infer_degree(&deep_evaluations, domain.offset())); + debug_assert_eq!( + air.context().trace_length_ext() - 2, + infer_degree(&deep_evaluations, domain.offset()) + ); drop(span); deep_evaluations @@ -507,76 +583,29 @@ pub trait Prover { Ok(proof) } - /// Extends constraint composition polynomial over the LDE domain and builds a commitment to - /// its evaluations. - /// - /// The extension is done by first interpolating the evaluations of the polynomial so that we - /// get the composition polynomial in coefficient form; then breaking the polynomial into - /// columns each of size equal to trace length, and finally evaluating each composition - /// polynomial column over the LDE domain. - /// - /// The commitment is computed by building a vector containing the hashes of each row in - /// the evaluation matrix, and then building vector commitment of the resulting vector. - #[maybe_async] - fn build_constraint_commitment( - &self, - composition_poly_trace: CompositionPolyTrace, - num_constraint_composition_columns: usize, - domain: &StarkDomain, - ) -> (ConstraintCommitment, CompositionPoly) - where - E: FieldElement, - { - // first, build constraint composition polynomial from its trace as follows: - // - interpolate the trace into a polynomial in coefficient form - // - "break" the polynomial into a set of column polynomials each of degree equal to - // trace_length - 1 - let composition_poly = info_span!( - "build_composition_poly_columns", - num_columns = num_constraint_composition_columns - ) - .in_scope(|| { - CompositionPoly::new(composition_poly_trace, domain, num_constraint_composition_columns) - }); - assert_eq!(composition_poly.num_columns(), num_constraint_composition_columns); - assert_eq!(composition_poly.column_degree(), domain.trace_length() - 1); - - // then, evaluate composition polynomial columns over the LDE domain - let domain_size = domain.lde_domain_size(); - let composed_evaluations = info_span!("evaluate_composition_poly_columns").in_scope(|| { - RowMatrix::evaluate_polys_over::(composition_poly.data(), domain) - }); - assert_eq!(composed_evaluations.num_cols(), num_constraint_composition_columns); - assert_eq!(composed_evaluations.num_rows(), domain_size); - - // finally, build constraint evaluation commitment - let constraint_commitment = info_span!( - "compute_constraint_evaluation_commitment", - log_domain_size = domain_size.ilog2() - ) - .in_scope(|| { - let commitment = composed_evaluations.commit_to_rows::( - self.options() - .partition_options() - .partition_size::(num_constraint_composition_columns), - ); - ConstraintCommitment::new(composed_evaluations, commitment) - }); - - (constraint_commitment, composition_poly) - } - #[doc(hidden)] #[instrument(skip_all)] + #[allow(clippy::type_complexity)] #[maybe_async] fn commit_to_main_trace_segment( &self, trace: &Self::Trace, domain: &StarkDomain, - channel: &mut ProverChannel<'_, Self::Air, E, Self::HashFn, Self::RandomCoin, Self::VC>, + zk_parameters: Option, + channel: &mut ProverChannel< + '_, + Self::Air, + E, + Self::HashFn, + Self::ZkPrng, + Self::RandomCoin, + Self::VC, + >, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) where E: FieldElement, + Standard: Distribution<<::HashFn as crypto::Hasher>::Digest>, { // extend the main execution trace and commit to the extended trace let (trace_lde, trace_polys) = maybe_await!(self.new_trace_lde( @@ -584,6 +613,8 @@ pub trait Prover { trace.main_segment(), domain, self.options().partition_options(), + zk_parameters, + prng, )); // get the commitment to the main trace segment LDE @@ -598,16 +629,28 @@ pub trait Prover { #[doc(hidden)] #[instrument(skip_all)] + #[allow(clippy::type_complexity)] #[maybe_async] fn commit_to_constraint_evaluations( &self, air: &Self::Air, composition_poly_trace: CompositionPolyTrace, domain: &StarkDomain, - channel: &mut ProverChannel<'_, Self::Air, E, Self::HashFn, Self::RandomCoin, Self::VC>, - ) -> (ConstraintCommitment, CompositionPoly) + channel: &mut ProverChannel< + '_, + Self::Air, + E, + Self::HashFn, + Self::ZkPrng, + Self::RandomCoin, + Self::VC, + >, + zk_parameters: Option, + prng: &mut Option, + ) -> (Self::ConstraintCommitment, CompositionPoly) where E: FieldElement, + Standard: Distribution<<::HashFn as crypto::Hasher>::Digest>, { // first, build a commitment to the evaluations of the constraint composition polynomial // columns @@ -616,6 +659,9 @@ pub trait Prover { composition_poly_trace, air.context().num_constraint_composition_columns(), domain, + self.options().partition_options(), + zk_parameters, + prng )); // then, commit to the evaluations of constraints by writing the commitment string of @@ -625,3 +671,51 @@ pub trait Prover { (constraint_commitment, composition_poly) } } + +// MOCK PRNG FOR ZERO-KNOWLEDGE +// ================================================================================================= + +/// A mock PRNG used when zero-knowledge is not enabled. +pub struct MockPrng; +impl SeedableRng for MockPrng { + type Seed = [u8; 8]; + + fn from_seed(_seed: Self::Seed) -> Self { + Self + } +} + +impl RngCore for MockPrng { + fn next_u32(&mut self) -> u32 { + 0 + } + + fn next_u64(&mut self) -> u64 { + 0 + } + + fn fill_bytes(&mut self, dest: &mut [u8]) { + dest.iter_mut().for_each(|d| *d = 0); + } + + fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> { + dest.iter_mut().for_each(|d| *d = 0); + Ok(()) + } +} + +/// A helper funcation that generates a PRNG from a seed when zero-knowledge is enabled. +fn generate_prng_and_new_seed( + seed: Option<

::Seed>, +) -> (Option

, Option<

::Seed>) { + match seed { + Some(seed) => { + let mut prng = P::from_seed(seed); + let mut seed =

::Seed::default(); + prng.fill_bytes(seed.as_mut()); + + (Some(prng), Some(seed)) + }, + None => (None, None), + } +} diff --git a/prover/src/matrix/col_matrix.rs b/prover/src/matrix/col_matrix.rs index 8872cca71..2ec304d30 100644 --- a/prover/src/matrix/col_matrix.rs +++ b/prover/src/matrix/col_matrix.rs @@ -8,6 +8,7 @@ use core::{iter::FusedIterator, slice}; use crypto::{ElementHasher, VectorCommitment}; use math::{fft, polynom, FieldElement}; +use rand::{Rng, RngCore}; #[cfg(feature = "concurrent")] use utils::iterators::*; use utils::{batch_iter_mut, iter, iter_mut, uninit_vector}; @@ -242,11 +243,13 @@ impl ColMatrix { } /// Evaluates polynomials contained in the columns of this matrix at a single point `x`. - pub fn evaluate_columns_at(&self, x: F) -> Vec + pub fn evaluate_columns_at(&self, x: F, skip_last: bool) -> Vec where F: FieldElement + From, { - iter!(self.columns).map(|p| polynom::eval(p, x)).collect() + iter!(&self.columns[..self.columns.len() - skip_last as usize]) + .map(|p| polynom::eval(p, x)) + .collect() } // COMMITMENTS @@ -294,6 +297,52 @@ impl ColMatrix { pub fn into_columns(self) -> Vec> { self.columns } + + /// Randomizes the trace polynomials when zero-knowledge is enabled. + /// + /// Takes as input a factor that is a power of two which is used to determine the size (i.e., + /// the number of coefficients) of the randomized witness polynomial. + /// + /// The randomized witness polynomial has the form: + /// + /// ```text + /// \hat{w}(x) = w(x) + r(x) * Z_H(x) + /// ``` + /// where: + /// + /// 1. w(x) is the witness polynomial of degree trace length minus one. + /// 2. \hat{w}(x) is the randomized witness polynomial. + /// 3. r(x) is the randomizer polynomial and has degree `(zk_blowup - 1) * n`. + /// 4. Z_H(x) = (x^n - 1). + pub(crate) fn randomize(&self, zk_blowup: usize, prng: &mut Option) -> Self { + let cur_len = self.num_rows(); + let extended_len = zk_blowup * cur_len; + let pad_len = extended_len - cur_len; + + let randomized_cols: Vec> = self + .columns() + .map(|col| { + let mut added = vec![E::ZERO; pad_len]; + for a in added.iter_mut() { + let bytes = prng + .as_mut() + .expect("should have a PRNG when zk is enabled") + .gen::<[u8; 32]>(); + *a = E::from_random_bytes(&bytes[..E::VALUE_SIZE]) + .expect("failed to generate randomness"); + } + + let mut res_col = col.to_vec(); + res_col.extend_from_slice(&added); + for i in 0..pad_len { + res_col[i] -= added[i] + } + res_col + }) + .collect(); + + Self { columns: randomized_cols } + } } // COLUMN ITERATOR diff --git a/prover/src/matrix/row_matrix.rs b/prover/src/matrix/row_matrix.rs index 85b43122e..91c5c04aa 100644 --- a/prover/src/matrix/row_matrix.rs +++ b/prover/src/matrix/row_matrix.rs @@ -5,6 +5,7 @@ use alloc::vec::Vec; +use air::PartitionOptions; use crypto::{ElementHasher, VectorCommitment}; use math::{fft, FieldElement, StarkField}; #[cfg(feature = "concurrent")] @@ -180,15 +181,16 @@ impl RowMatrix { /// * A vector commitment is computed for the resulting vector using the specified vector /// commitment scheme. /// * The resulting vector commitment is returned as the commitment to the entire matrix. - pub fn commit_to_rows(&self, partition_size: usize) -> V + pub fn commit_to_rows(&self, partition_options: PartitionOptions) -> V where H: ElementHasher, V: VectorCommitment, { // allocate vector to store row hashes let mut row_hashes = unsafe { uninit_vector::(self.num_rows()) }; + let partition_size = partition_options.partition_size::(self.num_cols()); - if partition_size == self.num_cols() * E::EXTENSION_DEGREE { + if partition_size == self.num_cols() { // iterate though matrix rows, hashing each row batch_iter_mut!( &mut row_hashes, @@ -200,17 +202,21 @@ impl RowMatrix { } ); } else { + let num_partitions = partition_options.num_partitions::(self.num_cols()); + // iterate though matrix rows, hashing each row batch_iter_mut!( &mut row_hashes, 128, // min batch size |batch: &mut [H::Digest], batch_offset: usize| { - let mut buffer = vec![H::Digest::default(); partition_size]; + let mut buffer = vec![H::Digest::default(); num_partitions]; for (i, row_hash) in batch.iter_mut().enumerate() { self.row(batch_offset + i) .chunks(partition_size) .zip(buffer.iter_mut()) - .for_each(|(chunk, buf)| *buf = H::hash_elements(chunk)); + .for_each(|(chunk, buf)| { + *buf = H::hash_elements(chunk); + }); *row_hash = H::merge_many(&buffer); } } diff --git a/prover/src/tests/mod.rs b/prover/src/tests/mod.rs index 6b44fa0e9..a4230e3d1 100644 --- a/prover/src/tests/mod.rs +++ b/prover/src/tests/mod.rs @@ -44,7 +44,7 @@ impl MockAir { Self::new( TraceInfo::new(4, trace_length), (), - ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31), + ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31, false), ) } @@ -55,7 +55,7 @@ impl MockAir { let mut result = Self::new( TraceInfo::new(4, trace_length), (), - ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31), + ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31, false), ); result.periodic_columns = column_values; result @@ -65,7 +65,7 @@ impl MockAir { let mut result = Self::new( TraceInfo::new(4, trace_length), (), - ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31), + ProofOptions::new(32, 8, 0, FieldExtension::None, 4, 31, false), ); result.assertions = assertions; result @@ -116,7 +116,7 @@ fn build_context( blowup_factor: usize, num_assertions: usize, ) -> AirContext { - let options = ProofOptions::new(32, blowup_factor, 0, FieldExtension::None, 4, 31); + let options = ProofOptions::new(32, blowup_factor, 0, FieldExtension::None, 4, 31, false); let t_degrees = vec![TransitionConstraintDegree::new(2)]; AirContext::new(trace_info, t_degrees, num_assertions, options) } diff --git a/prover/src/trace/poly_table.rs b/prover/src/trace/poly_table.rs index 87fec88d4..bca5c9bad 100644 --- a/prover/src/trace/poly_table.rs +++ b/prover/src/trace/poly_table.rs @@ -69,10 +69,10 @@ impl TracePolyTable { } /// Evaluates all trace polynomials (across all trace segments) at the specified point `x`. - pub fn evaluate_at(&self, x: E) -> Vec { - let mut result = self.main_trace_polys.evaluate_columns_at(x); + pub fn evaluate_at(&self, x: E, skip_last: bool) -> Vec { + let mut result = self.main_trace_polys.evaluate_columns_at(x, skip_last); for aux_polys in self.aux_trace_polys.iter() { - result.append(&mut aux_polys.evaluate_columns_at(x)); + result.append(&mut aux_polys.evaluate_columns_at(x, false)); } result } @@ -82,11 +82,11 @@ impl TracePolyTable { /// Additionally, if the Lagrange kernel auxiliary column is present, we also evaluate that /// column over the points: z, z * g, z * g^2, z * g^4, ..., z * g^(2^(v-1)), where v = /// log(trace_len). - pub fn get_ood_frame(&self, z: E) -> TraceOodFrame { - let log_trace_len = self.poly_size().ilog2(); + pub fn get_ood_frame(&self, z: E, trace_len: usize) -> TraceOodFrame { + let log_trace_len = trace_len.ilog2(); let g = E::from(E::BaseField::get_root_of_unity(log_trace_len)); - let current_row = self.evaluate_at(z); - let next_row = self.evaluate_at(z * g); + let current_row = self.evaluate_at(z, false); + let next_row = self.evaluate_at(z * g, false); let lagrange_kernel_frame = self.lagrange_kernel_poly.as_ref().map(|lagrange_kernel_col_poly| { diff --git a/prover/src/trace/trace_lde/default/mod.rs b/prover/src/trace/trace_lde/default/mod.rs index 26b5e3916..e2cbb1e83 100644 --- a/prover/src/trace/trace_lde/default/mod.rs +++ b/prover/src/trace/trace_lde/default/mod.rs @@ -6,8 +6,11 @@ use alloc::vec::Vec; use core::marker::PhantomData; -use air::{proof::Queries, LagrangeKernelEvaluationFrame, PartitionOptions, TraceInfo}; +use air::{ + proof::Queries, LagrangeKernelEvaluationFrame, PartitionOptions, TraceInfo, ZkParameters, +}; use crypto::VectorCommitment; +use rand::RngCore; use tracing::info_span; use super::{ @@ -43,7 +46,7 @@ pub struct DefaultTraceLde< aux_segment_oracles: Option, blowup: usize, trace_info: TraceInfo, - partition_option: PartitionOptions, + partition_options: PartitionOptions, _h: PhantomData, } @@ -60,18 +63,22 @@ where /// /// Returns a tuple containing a [TracePolyTable] with the trace polynomials for the main trace /// segment and the new [DefaultTraceLde]. - pub fn new( + pub fn new( trace_info: &TraceInfo, main_trace: &ColMatrix, domain: &StarkDomain, - partition_option: PartitionOptions, + partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self, TracePolyTable) { // extend the main execution trace and build a commitment to the extended trace let (main_segment_lde, main_segment_vector_com, main_segment_polys) = - build_trace_commitment::( + build_trace_commitment::( main_trace, domain, - partition_option.partition_size::(main_trace.num_cols()), + partition_options, + zk_parameters, + prng, ); let trace_poly_table = TracePolyTable::new(main_segment_polys); @@ -80,9 +87,9 @@ where main_segment_oracles: main_segment_vector_com, aux_segment_lde: None, aux_segment_oracles: None, - blowup: domain.trace_to_lde_blowup(), trace_info: trace_info.clone(), - partition_option, + partition_options, + blowup: domain.lde_domain_size() / trace_info.length(), _h: PhantomData, }; @@ -141,17 +148,21 @@ where /// This function will panic if any of the following are true: /// - the number of rows in the provided `aux_trace` does not match the main trace. /// - the auxiliary trace has been previously set already. - fn set_aux_trace( + fn set_aux_trace( &mut self, aux_trace: &ColMatrix, domain: &StarkDomain, + zk_parameters: Option, + prng: &mut Option, ) -> (ColMatrix, H::Digest) { // extend the auxiliary trace segment and build a commitment to the extended trace let (aux_segment_lde, aux_segment_oracles, aux_segment_polys) = - build_trace_commitment::( + build_trace_commitment::( aux_trace, domain, - self.partition_option.partition_size::(aux_trace.num_cols()), + self.partition_options, + zk_parameters, + prng, ); // check errors @@ -181,10 +192,9 @@ where ) { // at the end of the trace, next state wraps around and we read the first step again let next_lde_step = (lde_step + self.blowup()) % self.trace_len(); - - // copy main trace segment values into the frame - frame.current_mut().copy_from_slice(self.main_segment_lde.row(lde_step)); - frame.next_mut().copy_from_slice(self.main_segment_lde.row(next_lde_step)); + let l = frame.current().len(); + frame.current_mut().copy_from_slice(&self.main_segment_lde.row(lde_step)[..l]); + frame.next_mut().copy_from_slice(&self.main_segment_lde.row(next_lde_step)[..l]); } /// Reads current and next rows from the auxiliary trace segment into the specified frame. @@ -260,7 +270,6 @@ where &self.trace_info } } - // HELPER FUNCTIONS // ================================================================================================ @@ -273,16 +282,19 @@ where /// /// The trace commitment is computed by building a vector containing the hashes of each row of /// the extended execution trace, then building a vector commitment to the resulting vector. -fn build_trace_commitment( +fn build_trace_commitment( trace: &ColMatrix, domain: &StarkDomain, - partition_size: usize, + partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (RowMatrix, V, ColMatrix) where E: FieldElement, F: FieldElement, H: ElementHasher, V: VectorCommitment, + R: RngCore, { // extend the execution trace let (trace_lde, trace_polys) = { @@ -292,22 +304,31 @@ where blowup = domain.trace_to_lde_blowup() ) .entered(); + let trace_polys = trace.interpolate_columns(); + + // when zero-knowledge is enabled, we randomize the witness polynomials by adding a random + // polynomial times the zerofier over the trace domain. The degree of the random polynomial + // is a function of the number of FRI queries. + let trace_polys = if let Some(parameters) = zk_parameters { + trace_polys.randomize(parameters.zk_blowup_witness(), prng) + } else { + trace_polys + }; + let trace_lde = RowMatrix::evaluate_polys_over::(&trace_polys, domain); drop(span); (trace_lde, trace_polys) }; - assert_eq!(trace_lde.num_cols(), trace.num_cols()); - assert_eq!(trace_polys.num_rows(), trace.num_rows()); assert_eq!(trace_lde.num_rows(), domain.lde_domain_size()); // build trace commitment let commitment_domain_size = trace_lde.num_rows(); let trace_vector_com = info_span!("compute_execution_trace_commitment", commitment_domain_size) - .in_scope(|| trace_lde.commit_to_rows::(partition_size)); - assert_eq!(trace_vector_com.domain_len(), commitment_domain_size); + .in_scope(|| trace_lde.commit_to_rows::(partition_options)); + assert_eq!(trace_vector_com.get_domain_len(), commitment_domain_size); (trace_lde, trace_vector_com, trace_polys) } diff --git a/prover/src/trace/trace_lde/default/tests.rs b/prover/src/trace/trace_lde/default/tests.rs index 734accf68..88d1c4234 100644 --- a/prover/src/trace/trace_lde/default/tests.rs +++ b/prover/src/trace/trace_lde/default/tests.rs @@ -11,10 +11,12 @@ use math::{ fields::f128::BaseElement, get_power_series, get_power_series_with_offset, polynom, FieldElement, StarkField, }; +use rand::SeedableRng; +use rand_chacha::ChaCha20Rng; use crate::{ tests::{build_fib_trace, MockAir}, - DefaultTraceLde, StarkDomain, Trace, TraceLde, + DefaultTraceLde, MockPrng, StarkDomain, Trace, TraceLde, }; type Blake3 = Blake3_256; @@ -27,6 +29,7 @@ fn extend_trace_table() { let trace = build_fib_trace(trace_length * 2); let domain = StarkDomain::new(&air); let partition_option = PartitionOptions::default(); + let prng = ChaCha20Rng::from_entropy(); // build the trace polynomials, extended trace, and commitment using the default TraceLde impl let (trace_lde, trace_polys) = DefaultTraceLde::>::new( @@ -34,6 +37,8 @@ fn extend_trace_table() { trace.main_segment(), &domain, partition_option, + None, + &mut Some(prng), ); // check the width and length of the extended trace @@ -81,11 +86,13 @@ fn commit_trace_table() { let partition_option = PartitionOptions::default(); // build the trace polynomials, extended trace, and commitment using the default TraceLde impl - let (trace_lde, _) = DefaultTraceLde::>::new( + let (trace_lde, _) = DefaultTraceLde::>::new::( trace.info(), trace.main_segment(), &domain, partition_option, + None, + &mut None, ); // build commitment, using a Merkle tree, to the trace rows diff --git a/prover/src/trace/trace_lde/mod.rs b/prover/src/trace/trace_lde/mod.rs index dbce21491..721f7733a 100644 --- a/prover/src/trace/trace_lde/mod.rs +++ b/prover/src/trace/trace_lde/mod.rs @@ -5,8 +5,9 @@ use alloc::vec::Vec; -use air::{proof::Queries, LagrangeKernelEvaluationFrame, TraceInfo}; +use air::{proof::Queries, LagrangeKernelEvaluationFrame, TraceInfo, ZkParameters}; use crypto::{ElementHasher, Hasher, VectorCommitment}; +use rand::RngCore; use super::{ColMatrix, EvaluationFrame, FieldElement, TracePolyTable}; use crate::StarkDomain; @@ -45,10 +46,12 @@ pub trait TraceLde: Sync { /// This function is expected to panic if any of the following are true: /// - the number of rows in the provided `aux_trace` does not match the main trace. /// - this segment would exceed the number of segments specified by the trace layout. - fn set_aux_trace( + fn set_aux_trace( &mut self, aux_trace: &ColMatrix, domain: &StarkDomain, + zk_parameters: Option, + prng: &mut Option, ) -> (ColMatrix, ::Digest); /// Reads current and next rows from the main trace segment into the specified frame. diff --git a/prover/src/trace/trace_table.rs b/prover/src/trace/trace_table.rs index a5c10069b..9ce826ffb 100644 --- a/prover/src/trace/trace_table.rs +++ b/prover/src/trace/trace_table.rs @@ -272,6 +272,11 @@ impl TraceTable { pub fn read_row_into(&self, step: usize, target: &mut [B]) { self.trace.read_row_into(step, target); } + + /// Returns the trace meta data. + pub fn meta_data(&self) -> &[u8] { + self.info.meta() + } } // TRACE TRAIT IMPLEMENTATION diff --git a/utils/core/Cargo.toml b/utils/core/Cargo.toml index c606caa08..5679398bf 100644 --- a/utils/core/Cargo.toml +++ b/utils/core/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-utils" -version = "0.10.0" +version = "0.11.0" description = "Utilities for the Winterfell STARK prover/verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-utils/0.10.0" +documentation = "https://docs.rs/winter-utils/0.11.0" categories = ["cryptography", "no-std"] keywords = ["serialization", "transmute"] edition = "2021" diff --git a/utils/core/src/errors.rs b/utils/core/src/errors.rs index 52df2b007..1bba975bf 100644 --- a/utils/core/src/errors.rs +++ b/utils/core/src/errors.rs @@ -32,3 +32,5 @@ impl fmt::Display for DeserializationError { } } } + +impl core::error::Error for DeserializationError {} diff --git a/utils/core/src/serde/mod.rs b/utils/core/src/serde/mod.rs index edf9ba1c8..90ac01b6d 100644 --- a/utils/core/src/serde/mod.rs +++ b/utils/core/src/serde/mod.rs @@ -344,7 +344,7 @@ impl Serializable for str { } fn get_size_hint(&self) -> usize { - self.len().get_size_hint() + self.as_bytes().len() + self.len().get_size_hint() + self.len() } } @@ -355,7 +355,7 @@ impl Serializable for String { } fn get_size_hint(&self) -> usize { - self.len().get_size_hint() + self.as_bytes().len() + self.len().get_size_hint() + self.len() } } diff --git a/utils/maybe_async/Cargo.toml b/utils/maybe_async/Cargo.toml index 825d991b0..92f19151c 100644 --- a/utils/maybe_async/Cargo.toml +++ b/utils/maybe_async/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-maybe-async" -version = "0.10.1" +version = "0.11.0" description = "sync/async macro for winterfell" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/facebook/winterfell" -documentation = "https://docs.rs/winter-maybe-async/0.10.1" +documentation = "https://docs.rs/winter-maybe-async/0.11.0" keywords = ["async"] edition = "2021" rust-version = "1.82" diff --git a/utils/rand/Cargo.toml b/utils/rand/Cargo.toml index 3e05c6437..74bc24800 100644 --- a/utils/rand/Cargo.toml +++ b/utils/rand/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-rand-utils" -version = "0.10.0" +version = "0.11.0" description = "Random value generation utilities for Winterfell crates" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-rand-utils/0.10.0" +documentation = "https://docs.rs/winter-rand-utils/0.11.0" categories = ["cryptography"] keywords = ["rand"] edition = "2021" @@ -16,7 +16,7 @@ rust-version = "1.82" bench = false [dependencies] -utils = { version = "0.10", path = "../core", package = "winter-utils" } +utils = { version = "0.11", path = "../core", package = "winter-utils" } [target.'cfg(not(target_family = "wasm"))'.dependencies] rand = { version = "0.8" } diff --git a/verifier/Cargo.toml b/verifier/Cargo.toml index 63d4b9c0f..cbdf1e464 100644 --- a/verifier/Cargo.toml +++ b/verifier/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winter-verifier" -version = "0.10.0" +version = "0.11.0" description = "Winterfell STARK verifier" authors = ["winterfell contributors"] readme = "README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winter-verifier/0.10.0" +documentation = "https://docs.rs/winter-verifier/0.11.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "zkp", "stark", "verifier"] edition = "2021" @@ -20,11 +20,11 @@ default = ["std"] std = ["air/std", "crypto/std", "fri/std", "math/std", "utils/std"] [dependencies] -air = { version = "0.10", path = "../air", package = "winter-air", default-features = false } -crypto = { version = "0.10", path = "../crypto", package = "winter-crypto", default-features = false } -fri = { version = "0.10", path = "../fri", package = "winter-fri", default-features = false } -math = { version = "0.10", path = "../math", package = "winter-math", default-features = false } -utils = { version = "0.10", path = "../utils/core", package = "winter-utils", default-features = false } +air = { version = "0.11", path = "../air", package = "winter-air", default-features = false } +crypto = { version = "0.11", path = "../crypto", package = "winter-crypto", default-features = false } +fri = { version = "0.11", path = "../fri", package = "winter-fri", default-features = false } +math = { version = "0.11", path = "../math", package = "winter-math", default-features = false } +utils = { version = "0.11", path = "../utils/core", package = "winter-utils", default-features = false } # Allow math in docs [package.metadata.docs.rs] diff --git a/verifier/src/channel.rs b/verifier/src/channel.rs index 9d7dbc426..90f396d69 100644 --- a/verifier/src/channel.rs +++ b/verifier/src/channel.rs @@ -13,6 +13,7 @@ use air::{ use crypto::{ElementHasher, VectorCommitment}; use fri::VerifierChannel as FriVerifierChannel; use math::{FieldElement, StarkField}; +use utils::Deserializable; use crate::VerifierError; @@ -45,12 +46,14 @@ pub struct VerifierChannel< fri_layer_queries: Vec>, fri_remainder: Option>, fri_num_partitions: usize, + fri_salts: Vec>, // out-of-domain frame ood_trace_frame: Option>, ood_constraint_evaluations: Option>, // query proof-of-work pow_nonce: u64, gkr_proof: Option>, + salts: Vec>, } impl VerifierChannel @@ -76,6 +79,7 @@ where fri_proof, pow_nonce, gkr_proof, + salts, } = proof; // make sure AIR and proof base fields are the same @@ -103,6 +107,7 @@ where constraint_queries, air, num_unique_queries as usize, + air.is_zk(), )?; // --- parse FRI proofs ------------------------------------------------------------------- @@ -110,6 +115,10 @@ where let fri_remainder = fri_proof .parse_remainder() .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; + + let fri_salts = fri_proof + .parse_salts::() + .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; let (fri_layer_queries, fri_layer_proofs) = fri_proof .parse_layers::(lde_domain_size, fri_options.folding_factor()) .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; @@ -124,8 +133,12 @@ where .partition_size::(air.context().trace_info().main_trace_width()); let partition_size_aux = partition_options.partition_size::(air.context().trace_info().aux_segment_width()); - let partition_size_constraint = partition_options - .partition_size::(air.context().num_constraint_composition_columns()); + let partition_size_constraint = partition_options.partition_size::( + air.context().num_constraint_composition_columns() + air.is_zk() as usize, + ); + // --- parse Fiat-Shamir salts ----------------------------------------------- + let salts: Vec> = Vec::read_from_bytes(&salts) + .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; Ok(VerifierChannel { // trace queries @@ -144,12 +157,14 @@ where fri_layer_queries, fri_remainder: Some(fri_remainder), fri_num_partitions, + fri_salts, // out-of-domain evaluation ood_trace_frame: Some(ood_trace_frame), ood_constraint_evaluations: Some(ood_constraint_evaluations), // query seed pow_nonce, gkr_proof, + salts, }) } @@ -194,6 +209,11 @@ where self.gkr_proof.as_ref() } + /// Returns the salts needed for Fiat-Shamir. + pub fn read_salts(&self) -> Vec> { + self.salts.clone() + } + /// Returns trace states at the specified positions of the LDE domain. This also checks if /// the trace states are valid against the trace commitment sent by the prover. /// @@ -298,6 +318,10 @@ where fn take_fri_remainder(&mut self) -> Vec { self.fri_remainder.take().expect("already read") } + + fn take_salt(&mut self) -> Option<::Digest> { + self.fri_salts.remove(0) + } } // TRACE QUERIES @@ -414,8 +438,11 @@ where queries: Queries, air: &A, num_queries: usize, + is_zk: bool, ) -> Result { - let constraint_frame_width = air.context().num_constraint_composition_columns(); + // In the case zero-knowledge is enabled, we parse the randomizer polynomial as well + let constraint_frame_width = + air.context().num_constraint_composition_columns() + is_zk as usize; let (query_proofs, evaluations) = queries .parse::(air.lde_domain_size(), num_queries, constraint_frame_width) @@ -442,10 +469,12 @@ where E: FieldElement, H: ElementHasher, { - if partition_size == row.len() * E::EXTENSION_DEGREE { + if partition_size == row.len() { H::hash_elements(row) } else { - let mut buffer = vec![H::Digest::default(); partition_size]; + let num_partitions = row.len().div_ceil(partition_size); + + let mut buffer = vec![H::Digest::default(); num_partitions]; row.chunks(partition_size) .zip(buffer.iter_mut()) diff --git a/verifier/src/composer.rs b/verifier/src/composer.rs index 5f10ef79f..4c6af9cbe 100644 --- a/verifier/src/composer.rs +++ b/verifier/src/composer.rs @@ -88,12 +88,13 @@ impl DeepComposer { let n = queried_main_trace_states.num_rows(); let mut result_num = Vec::::with_capacity(n); let mut result_den = Vec::::with_capacity(n); - for ((_, row), &x) in (0..n).zip(queried_main_trace_states.rows()).zip(&self.x_coordinates) { let mut t1_num = E::ZERO; let mut t2_num = E::ZERO; + // we iterate over all polynomials except for the randomizer when zero-knowledge + // is enabled for (i, &value) in row.iter().enumerate() { let value = E::from(value); // compute the numerator of T'_i(x) as (T_i(x) - T_i(z)), multiply it by a @@ -122,6 +123,8 @@ impl DeepComposer { // we define this offset here because composition of the main trace columns has // consumed some number of composition coefficients already. + // In the case zero-knowledge is enabled, the offset is adjusted so as to account for + // the randomizer polynomial. let cc_offset = queried_main_trace_states.num_columns(); // we treat the Lagrange column separately if present @@ -215,10 +218,12 @@ impl DeepComposer { &self, queried_evaluations: Table, ood_evaluations: Vec, + is_zk: bool, ) -> Vec { assert_eq!(queried_evaluations.num_rows(), self.x_coordinates.len()); let n = queried_evaluations.num_rows(); + let num_cols = ood_evaluations.len(); let mut result_num = Vec::::with_capacity(n); let mut result_den = Vec::::with_capacity(n); @@ -228,11 +233,17 @@ impl DeepComposer { // this way we can use batch inversion in the end. for (query_values, &x) in queried_evaluations.rows().zip(&self.x_coordinates) { let mut composition_num = E::ZERO; - for (i, &evaluation) in query_values.iter().enumerate() { + for (i, &evaluation) in query_values.iter().enumerate().take(num_cols) { // compute the numerator of H'_i(x) as (H_i(x) - H_i(z)), multiply it by a // composition coefficient, and add the result to the numerator aggregator composition_num += (evaluation - ood_evaluations[i]) * self.cc.constraints[i]; } + // In the case zero-knowledge is enabled, the randomizer is added to DEEP composition + // polynomial. + if is_zk { + let randmizer_at_x = query_values[num_cols]; + composition_num += randmizer_at_x * (x - z); + } result_num.push(composition_num); result_den.push(x - z); } diff --git a/verifier/src/errors.rs b/verifier/src/errors.rs index e1b072db5..fadaee1fa 100644 --- a/verifier/src/errors.rs +++ b/verifier/src/errors.rs @@ -99,3 +99,5 @@ impl fmt::Display for VerifierError { } } } + +impl core::error::Error for VerifierError {} diff --git a/verifier/src/lib.rs b/verifier/src/lib.rs index 2c75ecd1d..9d54ac60c 100644 --- a/verifier/src/lib.rs +++ b/verifier/src/lib.rs @@ -170,8 +170,12 @@ where const AUX_TRACE_IDX: usize = 1; let trace_commitments = channel.read_trace_commitments(); + // read all the salts needed for Fiat-Shamir. These are random values sampled by the Prover + // and required for zero-knowledge i.e., if zero-knowledge is not enabled then they are `None`. + let mut salts = channel.read_salts(); + // reseed the coin with the commitment to the main trace segment - public_coin.reseed(trace_commitments[MAIN_TRACE_IDX]); + public_coin.reseed_with_salt(trace_commitments[MAIN_TRACE_IDX], salts.remove(0)); // process auxiliary trace segments (if any), to build a set of random elements for each segment let aux_trace_rand_elements = if air.trace_info().is_multi_segment() { @@ -193,7 +197,7 @@ where "failed to generate the random elements needed to build the auxiliary trace", ); - public_coin.reseed(trace_commitments[AUX_TRACE_IDX]); + public_coin.reseed_with_salt(trace_commitments[AUX_TRACE_IDX], salts.remove(0)); Some(AuxRandElements::new_with_gkr(rand_elements, gkr_rand_elements)) } else { @@ -201,7 +205,7 @@ where "failed to generate the random elements needed to build the auxiliary trace", ); - public_coin.reseed(trace_commitments[AUX_TRACE_IDX]); + public_coin.reseed_with_salt(trace_commitments[AUX_TRACE_IDX], salts.remove(0)); Some(AuxRandElements::new(rand_elements)) } @@ -221,7 +225,7 @@ where // to the prover, and the prover evaluates trace and constraint composition polynomials at z, // and sends the results back to the verifier. let constraint_commitment = channel.read_constraint_commitment(); - public_coin.reseed(constraint_commitment); + public_coin.reseed_with_salt(constraint_commitment, salts.remove(0)); let z = public_coin.draw::().map_err(|_| VerifierError::RandomCoinError)?; // 3 ----- OOD consistency check -------------------------------------------------------------- @@ -244,14 +248,14 @@ where aux_trace_rand_elements.as_ref(), z, ); - public_coin.reseed(ood_trace_frame.hash::()); + public_coin.reseed_with_salt(ood_trace_frame.hash::(), salts.remove(0)); // read evaluations of composition polynomial columns sent by the prover, and reduce them into - // a single value by computing \sum_{i=0}^{m-1}(z^(i * l) * value_i), where value_i is the - // evaluation of the ith column polynomial H_i(X) at z, l is the trace length and m is + // a single value by computing \sum_{i=0}^{m-1}(z^(i) * value_i), where value_i is the + // evaluation of the ith column polynomial H_i(X) at z^m, l is the trace length and m is // the number of composition column polynomials. This computes H(z) (i.e. // the evaluation of the composition polynomial at z) using the fact that - // H(X) = \sum_{i=0}^{m-1} X^{i * l} H_i(X). + // H(X) = \sum_{i=0}^{m-1} X^{i} H_i(X^m). // Also, reseed the public coin with the OOD constraint evaluations received from the prover. let ood_constraint_evaluations = channel.read_ood_constraint_evaluations(); let ood_constraint_evaluation_2 = @@ -259,9 +263,12 @@ where .iter() .enumerate() .fold(E::ZERO, |result, (i, &value)| { - result + z.exp_vartime(((i * (air.trace_length())) as u32).into()) * value + result + + z.exp_vartime( + ((i * air.context().num_coefficients_chunk_quotient()) as u32).into(), + ) * value }); - public_coin.reseed(H::hash_elements(&ood_constraint_evaluations)); + public_coin.reseed_with_salt(H::hash_elements(&ood_constraint_evaluations), salts.remove(0)); // finally, make sure the values are the same if ood_constraint_evaluation_1 != ood_constraint_evaluation_2 { @@ -329,8 +336,11 @@ where ood_aux_trace_frame, ood_lagrange_kernel_frame, ); - let c_composition = composer - .compose_constraint_evaluations(queried_constraint_evaluations, ood_constraint_evaluations); + let c_composition = composer.compose_constraint_evaluations( + queried_constraint_evaluations, + ood_constraint_evaluations, + air.is_zk(), + ); let deep_evaluations = composer.combine_compositions(t_composition, c_composition); // 7 ----- Verify low-degree proof ------------------------------------------------------------- diff --git a/winterfell/Cargo.toml b/winterfell/Cargo.toml index cdeeb59ec..258eb22e9 100644 --- a/winterfell/Cargo.toml +++ b/winterfell/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "winterfell" -version = "0.10.0" +version = "0.11.0" description = "Winterfell STARK prover and verifier" authors = ["winterfell contributors"] readme = "../README.md" license = "MIT" repository = "https://github.com/novifinancial/winterfell" -documentation = "https://docs.rs/winterfell/0.10.0" +documentation = "https://docs.rs/winterfell/0.11.0" categories = ["cryptography", "no-std"] keywords = ["crypto", "zkp", "stark", "prover", "verifier"] edition = "2021" @@ -22,9 +22,9 @@ default = ["std"] std = ["prover/std", "verifier/std"] [dependencies] -air = { version = "0.10", path = "../air", package = "winter-air", default-features = false } -prover = { version = "0.10", path = "../prover", package = "winter-prover", default-features = false } -verifier = { version = "0.10", path = "../verifier", package = "winter-verifier", default-features = false } +air = { version = "0.11", path = "../air", package = "winter-air", default-features = false } +prover = { version = "0.11", path = "../prover", package = "winter-prover", default-features = false } +verifier = { version = "0.11", path = "../verifier", package = "winter-verifier", default-features = false } # Allow math in docs [package.metadata.docs.rs] diff --git a/winterfell/src/lib.rs b/winterfell/src/lib.rs index e05d5ca5c..4919f157d 100644 --- a/winterfell/src/lib.rs +++ b/winterfell/src/lib.rs @@ -261,12 +261,14 @@ //! crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree}, //! math::{fields::f128::BaseElement, FieldElement, ToElements}, //! matrix::ColMatrix, -//! DefaultTraceLde, ProofOptions, Prover, StarkDomain, Trace, TracePolyTable, TraceTable, +//! CompositionPoly, CompositionPolyTrace, DefaultConstraintCommitment, +//! DefaultTraceLde, ProofOptions, Prover, StarkDomain, Trace, +//! TracePolyTable, TraceTable, ZkParameters, //! }; //! //! # use winterfell::{ //! # Air, AirContext, Assertion, AuxRandElements, ByteWriter, DefaultConstraintEvaluator, -//! # EvaluationFrame, PartitionOptions, TraceInfo, TransitionConstraintDegree, +//! # EvaluationFrame, PartitionOptions, TraceInfo, TransitionConstraintDegree, MockPrng, //! # }; //! # //! # pub struct PublicInputs { @@ -350,8 +352,11 @@ //! type VC = MerkleTree; //! type RandomCoin = DefaultRandomCoin; //! type TraceLde> = DefaultTraceLde; +//! type ConstraintCommitment> = +//! DefaultConstraintCommitment; //! type ConstraintEvaluator<'a, E: FieldElement> = //! DefaultConstraintEvaluator<'a, Self::Air, E>; +//! type ZkPrng = MockPrng; //! //! // Our public inputs consist of the first and last value in the execution trace. //! fn get_pub_inputs(&self, trace: &Self::Trace) -> PublicInputs { @@ -372,8 +377,29 @@ //! main_trace: &ColMatrix, //! domain: &StarkDomain, //! partition_option: PartitionOptions, +//! is_zk: Option, +//! prng: &mut Option, //! ) -> (Self::TraceLde, TracePolyTable) { -//! DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) +//! DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, is_zk, prng) +//! } +//! +//! fn build_constraint_commitment>( +//! &self, +//! composition_poly_trace: CompositionPolyTrace, +//! num_constraint_composition_columns: usize, +//! domain: &StarkDomain, +//! partition_options: PartitionOptions, +//! zk_parameters: Option, +//! prng: &mut Option, +//! ) -> (Self::ConstraintCommitment, CompositionPoly) { +//! DefaultConstraintCommitment::new( +//! composition_poly_trace, +//! num_constraint_composition_columns, +//! domain, +//! partition_options, +//! zk_parameters, +//! prng, +//! ) //! } //! //! fn new_evaluator<'a, E: FieldElement>( @@ -399,10 +425,11 @@ //! # crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree}, //! # math::{fields::f128::BaseElement, FieldElement, ToElements}, //! # matrix::ColMatrix, -//! # Air, AirContext, Assertion, AuxRandElements, ByteWriter, DefaultConstraintEvaluator, -//! # DefaultTraceLde, EvaluationFrame, TraceInfo, -//! # TransitionConstraintDegree, TraceTable, FieldExtension, PartitionOptions, Prover, +//! # Air, AirContext, Assertion, AuxRandElements, ByteWriter, CompositionPoly, CompositionPolyTrace, +//! # DefaultConstraintEvaluator, DefaultConstraintCommitment, DefaultTraceLde, EvaluationFrame, +//! # TraceInfo, TransitionConstraintDegree, TraceTable, FieldExtension, PartitionOptions, Prover, //! # ProofOptions, StarkDomain, Proof, Trace, TracePolyTable, +//! # ZkParameters, MockPrng, //! # }; //! # //! # pub fn build_do_work_trace(start: BaseElement, n: usize) -> TraceTable { @@ -495,8 +522,11 @@ //! # type VC = MerkleTree; //! # type RandomCoin = DefaultRandomCoin; //! # type TraceLde> = DefaultTraceLde; +//! # type ConstraintCommitment> = +//! # DefaultConstraintCommitment; //! # type ConstraintEvaluator<'a, E: FieldElement> = //! # DefaultConstraintEvaluator<'a, Self::Air, E>; +//! # type ZkPrng = MockPrng; //! # //! # fn get_pub_inputs(&self, trace: &Self::Trace) -> PublicInputs { //! # let last_step = trace.length() - 1; @@ -516,8 +546,29 @@ //! # main_trace: &ColMatrix, //! # domain: &StarkDomain, //! # partition_option: PartitionOptions, +//! # is_zk: Option, +//! # prng: &mut Option, //! # ) -> (Self::TraceLde, TracePolyTable) { -//! # DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) +//! # DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, is_zk, prng) +//! # } +//! # +//! # fn build_constraint_commitment>( +//! # &self, +//! # composition_poly_trace: CompositionPolyTrace, +//! # num_constraint_composition_columns: usize, +//! # domain: &StarkDomain, +//! # partition_options: PartitionOptions, +//! # zk_parameters: Option, +//! # prng: &mut Option, +//! # ) -> (Self::ConstraintCommitment, CompositionPoly) { +//! # DefaultConstraintCommitment::new( +//! # composition_poly_trace, +//! # num_constraint_composition_columns, +//! # domain, +//! # partition_options, +//! # zk_parameters, +//! # prng, +//! # ) //! # } //! # //! # fn new_evaluator<'a, E: FieldElement>( @@ -548,11 +599,12 @@ //! FieldExtension::None, //! 8, // FRI folding factor //! 31, // FRI max remainder polynomial degree +//! false, // Enable zero-knowledge //! ); //! //! // Instantiate the prover and generate the proof. //! let prover = WorkProver::new(options); -//! let proof = prover.prove(trace).unwrap(); +//! let proof = prover.prove(trace, None).unwrap(); //! //! // The verifier will accept proofs with parameters which guarantee 95 bits or more of //! // conjectured security @@ -596,15 +648,15 @@ #[cfg(test)] extern crate std; -pub use air::{AuxRandElements, GkrVerifier, PartitionOptions}; +pub use air::{AuxRandElements, GkrVerifier, PartitionOptions, ZkParameters}; pub use prover::{ crypto, iterators, math, matrix, Air, AirContext, Assertion, AuxTraceWithMetadata, - BoundaryConstraint, BoundaryConstraintGroup, CompositionPolyTrace, + BoundaryConstraint, BoundaryConstraintGroup, CompositionPoly, CompositionPolyTrace, ConstraintCompositionCoefficients, ConstraintDivisor, ConstraintEvaluator, - DeepCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, EvaluationFrame, - FieldExtension, Proof, ProofOptions, Prover, ProverError, ProverGkrProof, StarkDomain, Trace, - TraceInfo, TraceLde, TracePolyTable, TraceTable, TraceTableFragment, - TransitionConstraintDegree, + DeepCompositionCoefficients, DefaultConstraintCommitment, DefaultConstraintEvaluator, + DefaultTraceLde, EvaluationFrame, FieldExtension, MockPrng, Proof, ProofOptions, Prover, + ProverError, ProverGkrProof, StarkDomain, Trace, TraceInfo, TraceLde, TracePolyTable, + TraceTable, TraceTableFragment, TransitionConstraintDegree, }; pub use verifier::{verify, AcceptableOptions, ByteWriter, VerifierError}; diff --git a/winterfell/src/tests.rs b/winterfell/src/tests.rs index 3fb0c5197..12f49c010 100644 --- a/winterfell/src/tests.rs +++ b/winterfell/src/tests.rs @@ -5,12 +5,13 @@ use std::{vec, vec::Vec}; -use air::{GkrRandElements, LagrangeKernelRandElements}; +use air::{GkrRandElements, LagrangeKernelRandElements, ZkParameters}; use crypto::MerkleTree; use prover::{ crypto::{hashers::Blake3_256, DefaultRandomCoin, RandomCoin}, math::{fields::f64::BaseElement, ExtensionOf, FieldElement}, matrix::ColMatrix, + CompositionPoly, DefaultConstraintCommitment, }; use super::*; @@ -23,7 +24,7 @@ fn test_complex_lagrange_kernel_air() { let prover = LagrangeComplexProver::new(AUX_TRACE_WIDTH); - let proof = prover.prove(trace).unwrap(); + let proof = prover.prove(trace, None).unwrap(); verify::< LagrangeKernelComplexAir, @@ -205,7 +206,7 @@ impl LagrangeComplexProver { fn new(aux_trace_width: usize) -> Self { Self { aux_trace_width, - options: ProofOptions::new(1, 2, 0, FieldExtension::None, 2, 1), + options: ProofOptions::new(1, 2, 0, FieldExtension::None, 2, 1, false), } } } @@ -219,8 +220,11 @@ impl Prover for LagrangeComplexProver { type RandomCoin = DefaultRandomCoin; type TraceLde> = DefaultTraceLde; + type ConstraintCommitment> = + DefaultConstraintCommitment, Self::ZkPrng, Self::VC>; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, LagrangeKernelComplexAir, E>; + type ZkPrng = MockPrng; fn get_pub_inputs(&self, _trace: &Self::Trace) -> <::Air as Air>::PublicInputs { } @@ -235,11 +239,32 @@ impl Prover for LagrangeComplexProver { main_trace: &ColMatrix, domain: &StarkDomain, partition_option: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, ) -> (Self::TraceLde, TracePolyTable) where E: math::FieldElement, { - DefaultTraceLde::new(trace_info, main_trace, domain, partition_option) + DefaultTraceLde::new(trace_info, main_trace, domain, partition_option, zk_parameters, prng) + } + + fn build_constraint_commitment>( + &self, + composition_poly_trace: CompositionPolyTrace, + num_constraint_composition_columns: usize, + domain: &StarkDomain, + partition_options: PartitionOptions, + zk_parameters: Option, + prng: &mut Option, + ) -> (Self::ConstraintCommitment, CompositionPoly) { + DefaultConstraintCommitment::new( + composition_poly_trace, + num_constraint_composition_columns, + domain, + partition_options, + zk_parameters, + prng, + ) } fn new_evaluator<'a, E>(