From a1f2fa7ad06eb85536a1b0852542a815b0f80b3c Mon Sep 17 00:00:00 2001 From: Sergio Chouhy <41742639+schouhy@users.noreply.github.com> Date: Tue, 17 Oct 2023 15:39:36 -0300 Subject: [PATCH] Stark: Stone prover compatibility end to end for Fibonacci AIR (#596) * add test * make trace commitment SHARP compatible * wip * use powers of a single challenge for the boundary and transition coefficients * add permutation to match sharp compatible commitments on the trace * change trait bound from ByteConversion to Serializable * minor refactor * fmt, clippy * move std feature to inner trait function in Serializable * add IsStarkProver and IsStarkVerifier traits * proof of concept * composition poly breaker * WIP: commitment composition poly works. Opens are broken. * WIP Refactor open_trace_polys and open_composition_poly * Refactor sample iotas * Refactor sample iotas * make fri a trait * change trace ood evaluations in transcript * wip * sample gammas as power of a single challenge * fix z fri sampling * wip * wip * wip, broken * Compiles but fibonacci_5 does not work * Opens of query phase and OOD broken. Commit phase of FRI works. * Dont append to the transcript when grinding factor is zero * skip grinding factor when security bits is zero * remove permutation function * fmt * fix standard verifier * removes deep consistency check and openings of the first layer of fri for each query * SHARP computes the trace and composition polynomial openings and their symmetric elements consistently * Test symmetric elements in trace openings to compute deep composition polynomial * Composition polynomial opening evaluations are splitted between symmetric and not. The authentication paths remain equal * check openings in symmetric elements * make verifier sharp compatible * compute number of parts * fix verify fri for original prover * fix verify sym in stone prover * rename * rename file * wip * remove unnecessary variable * wip * move verifier * move fri * fix open * move stone to prover * remove file * fmt * clippy * clippy * remove redundant trait bounds * remove custom serialization/deserialization and replace it with serde_cbor * fmt * clippy * remove old files after merge from main * fmt * make field a type of IsStarkVerifier * remove frame serialization * separate compatibility test into individual tests * remove redundant test * add test case 2 * minor refactor. add docs * minor refactor * remove unnecessary method * revert unintended changes to exercises * clippy * remove isFri trait * move Prover definition to the top of the file * update docs and add unit test * minor refactors. clippy * remove unused trait method * Move function only used for tests, to tests --------- Co-authored-by: Agustin Co-authored-by: MauroFab --- exercises/message/src/starks/verifier.rs | 2 +- math/src/fft/cpu/bit_reversing.rs | 8 +- math/src/polynomial.rs | 49 +- provers/cairo-prover-cli/Cargo.toml | 1 + provers/cairo-prover-cli/src/main.rs | 5 +- provers/cairo/Cargo.toml | 4 +- provers/cairo/benches/criterion_verifier.rs | 3 +- .../cairo/benches/criterion_verifier_70k.rs | 3 +- provers/cairo/src/air.rs | 53 +- provers/cairo/src/tests/integration_tests.rs | 40 +- provers/stark/Cargo.toml | 4 +- provers/stark/src/constraints/evaluator.rs | 5 +- provers/stark/src/domain.rs | 2 - provers/stark/src/frame.rs | 115 - provers/stark/src/fri/fri_commitment.rs | 28 +- provers/stark/src/fri/fri_decommit.rs | 232 +- provers/stark/src/fri/mod.rs | 94 +- provers/stark/src/proof/stark.rs | 699 +----- provers/stark/src/prover.rs | 1955 ++++++++++------- provers/stark/src/tests/integration_tests.rs | 79 +- provers/stark/src/verifier.rs | 1271 ++++++----- 21 files changed, 2054 insertions(+), 2598 deletions(-) diff --git a/exercises/message/src/starks/verifier.rs b/exercises/message/src/starks/verifier.rs index a23e62ef1..ab631ff0c 100644 --- a/exercises/message/src/starks/verifier.rs +++ b/exercises/message/src/starks/verifier.rs @@ -605,7 +605,7 @@ where let timer4 = Instant::now(); #[allow(clippy::let_and_return)] - if !step_4_verify_deep_composition_polynomial(&air, proof, &domain, &challenges) { + if !step_4_verify_deep_composition_polynomial(&air, proof, &challenges) { error!("DEEP Composition Polynomial verification failed"); return false; } diff --git a/math/src/fft/cpu/bit_reversing.rs b/math/src/fft/cpu/bit_reversing.rs index 139bf615a..68eb73427 100644 --- a/math/src/fft/cpu/bit_reversing.rs +++ b/math/src/fft/cpu/bit_reversing.rs @@ -1,7 +1,7 @@ /// In-place bit-reverse permutation algorithm. Requires input length to be a power of two. pub fn in_place_bit_reverse_permute(input: &mut [E]) { for i in 0..input.len() { - let bit_reversed_index = reverse_index(&i, input.len() as u64); + let bit_reversed_index = reverse_index(i, input.len() as u64); if bit_reversed_index > i { input.swap(i, bit_reversed_index); } @@ -9,9 +9,9 @@ pub fn in_place_bit_reverse_permute(input: &mut [E]) { } /// Reverses the `log2(size)` first bits of `i` -pub fn reverse_index(i: &usize, size: u64) -> usize { +pub fn reverse_index(i: usize, size: u64) -> usize { if size == 1 { - *i + i } else { i.reverse_bits() >> (usize::BITS - size.trailing_zeros()) } @@ -26,7 +26,7 @@ mod test { fn bit_reverse_permutation_works() { let mut reversed: Vec = Vec::with_capacity(16); for i in 0..reversed.capacity() { - reversed.push(reverse_index(&i, reversed.capacity() as u64)); + reversed.push(reverse_index(i, reversed.capacity() as u64)); } assert_eq!( reversed[..], diff --git a/math/src/polynomial.rs b/math/src/polynomial.rs index 34580ec2d..940e49cfc 100644 --- a/math/src/polynomial.rs +++ b/math/src/polynomial.rs @@ -227,27 +227,24 @@ impl Polynomial> { } } - /// For the given polynomial, returns a tuple `(even, odd)` of polynomials - /// with the even and odd coefficients respectively. - /// Note that `even` and `odd` ARE NOT actually even/odd polynomials themselves. + /// Returns a vector of polynomials [p₀, p₁, ..., p_{d-1}], where d is `number_of_parts`, such that `self` equals + /// p₀(Xᵈ) + Xp₁(Xᵈ) + ... + X^(d-1)p_{d-1}(Xᵈ). /// - /// Example: if poly = 3 X^3 + X^2 + 2X + 1, then - /// `poly.even_odd_decomposition = (even, odd)` with - /// `even` = X + 1 and `odd` = 3X + 1. - /// - /// In general, the decomposition satisfies the following: - /// `poly(x)` = `even(x^2)` + X * `odd(x^2)` - pub fn even_odd_decomposition(&self) -> (Self, Self) { + /// Example: if d = 2 and `self` is 3 X^3 + X^2 + 2X + 1, then `poly.break_in_parts(2)` + /// returns a vector with two polynomials `(p₀, p₁)`, where p₀ = X + 1 and p₁ = 3X + 2. + pub fn break_in_parts(&self, number_of_parts: usize) -> Vec { let coef = self.coefficients(); - let even_coef: Vec> = coef.iter().step_by(2).cloned().collect(); - - // odd coeficients of poly are multiplied by beta - let odd_coef: Vec> = coef.iter().skip(1).step_by(2).cloned().collect(); - - Polynomial::pad_with_zero_coefficients( - &Polynomial::new(&even_coef), - &Polynomial::new(&odd_coef), - ) + let mut parts: Vec = Vec::with_capacity(number_of_parts); + for i in 0..number_of_parts { + let coeffs: Vec<_> = coef + .iter() + .skip(i) + .step_by(number_of_parts) + .cloned() + .collect(); + parts.push(Polynomial::new(&coeffs)); + } + parts } } @@ -895,6 +892,20 @@ mod tests { ); } + #[test] + fn break_in_parts() { + // p = 3 X^3 + X^2 + 2X + 1 + let p = Polynomial::new(&[FE::new(1), FE::new(2), FE::new(1), FE::new(3)]); + let p0_expected = Polynomial::new(&[FE::new(1), FE::new(1)]); + let p1_expected = Polynomial::new(&[FE::new(2), FE::new(3)]); + let parts = p.break_in_parts(2); + assert_eq!(parts.len(), 2); + let p0 = &parts[0]; + let p1 = &parts[1]; + assert_eq!(p0, &p0_expected); + assert_eq!(p1, &p1_expected); + } + use proptest::prelude::*; proptest! { #[test] diff --git a/provers/cairo-prover-cli/Cargo.toml b/provers/cairo-prover-cli/Cargo.toml index f1f391e2e..75ab6fcbc 100644 --- a/provers/cairo-prover-cli/Cargo.toml +++ b/provers/cairo-prover-cli/Cargo.toml @@ -10,6 +10,7 @@ name = "cairo-platinum-prover-cli" path = "src/main.rs" [dependencies] +serde_cbor = { version = "0.11.1"} lambdaworks-math = { workspace = true , features = ["lambdaworks-serde"] } stark-platinum-prover = { workspace = true, features = ["wasm"] } cairo-platinum-prover = { workspace = true} diff --git a/provers/cairo-prover-cli/src/main.rs b/provers/cairo-prover-cli/src/main.rs index 35ab08794..14273700c 100644 --- a/provers/cairo-prover-cli/src/main.rs +++ b/provers/cairo-prover-cli/src/main.rs @@ -117,7 +117,7 @@ fn main() { }; let mut bytes = vec![]; - let proof_bytes = proof.serialize(); + let proof_bytes: Vec = serde_cbor::to_vec(&proof).unwrap(); bytes.extend(proof_bytes.len().to_be_bytes()); bytes.extend(proof_bytes); bytes.extend(pub_inputs.serialize()); @@ -145,8 +145,7 @@ fn main() { println!("Error reading proof from file: {}", args.proof_path); return; } - let Ok(proof) = StarkProof::::deserialize(&bytes[0..proof_len]) - else { + let Ok(proof) = serde_cbor::from_slice(&bytes[0..proof_len]) else { println!("Error reading proof from file: {}", args.proof_path); return; }; diff --git a/provers/cairo/Cargo.toml b/provers/cairo/Cargo.toml index 8b1679300..0ccd0960b 100644 --- a/provers/cairo/Cargo.toml +++ b/provers/cairo/Cargo.toml @@ -33,7 +33,7 @@ rayon = { version = "1.7.0", optional = true } wasm-bindgen = { version = "0.2", optional = true } serde-wasm-bindgen = { version = "0.5", optional = true } web-sys = { version = "0.3.64", features = ['console'], optional = true } -serde_cbor = { version = "0.11.1", optional = true } +serde_cbor = { version = "0.11.1"} [dev-dependencies] hex = "0.4.3" @@ -50,7 +50,7 @@ test_fiat_shamir = [] instruments = [] # This enables timing prints in prover and verifier metal = ["lambdaworks-math/metal"] parallel = ["dep:rayon"] -wasm = ["dep:wasm-bindgen", "dep:serde-wasm-bindgen", "dep:web-sys", "dep:serde_cbor"] +wasm = ["dep:wasm-bindgen", "dep:serde-wasm-bindgen", "dep:web-sys"] [target.'cfg(not(all(target_arch = "wasm32", target_os = "unknown")))'.dev-dependencies] proptest = "1.2.0" diff --git a/provers/cairo/benches/criterion_verifier.rs b/provers/cairo/benches/criterion_verifier.rs index 396d5264d..999b3ced4 100644 --- a/provers/cairo/benches/criterion_verifier.rs +++ b/provers/cairo/benches/criterion_verifier.rs @@ -17,7 +17,8 @@ fn load_proof_and_pub_inputs(input_path: &str) -> (StarkProof::deserialize(&bytes[0..proof_len]).unwrap(); + let proof: StarkProof = + serde_cbor::from_slice(&bytes[0..proof_len]).unwrap(); bytes = &bytes[proof_len..]; let public_inputs = PublicInputs::deserialize(bytes).unwrap(); diff --git a/provers/cairo/benches/criterion_verifier_70k.rs b/provers/cairo/benches/criterion_verifier_70k.rs index bd9034c14..8a279da19 100644 --- a/provers/cairo/benches/criterion_verifier_70k.rs +++ b/provers/cairo/benches/criterion_verifier_70k.rs @@ -18,7 +18,8 @@ fn load_proof_and_pub_inputs(input_path: &str) -> (StarkProof::deserialize(&bytes[0..proof_len]).unwrap(); + let proof: StarkProof = + serde_cbor::from_slice(&bytes[0..proof_len]).unwrap(); bytes = &bytes[proof_len..]; let public_inputs = PublicInputs::deserialize(bytes).unwrap(); diff --git a/provers/cairo/src/air.rs b/provers/cairo/src/air.rs index 9be92f439..3210f6221 100644 --- a/provers/cairo/src/air.rs +++ b/provers/cairo/src/air.rs @@ -13,11 +13,11 @@ use stark_platinum_prover::{ context::AirContext, frame::Frame, proof::{options::ProofOptions, stark::StarkProof}, - prover::{prove, ProvingError}, + prover::{IsStarkProver, Prover, ProvingError}, trace::TraceTable, traits::AIR, transcript::{IsStarkTranscript, StoneProverTranscript}, - verifier::verify, + verifier::{IsStarkVerifier, Verifier}, }; use crate::Felt252; @@ -1254,7 +1254,7 @@ pub fn generate_cairo_proof( pub_input: &PublicInputs, proof_options: &ProofOptions, ) -> Result, ProvingError> { - prove::( + Prover::prove::( trace, pub_input, proof_options, @@ -1270,7 +1270,7 @@ pub fn verify_cairo_proof( pub_input: &PublicInputs, proof_options: &ProofOptions, ) -> bool { - verify::( + Verifier::verify::( proof, pub_input, proof_options, @@ -1567,7 +1567,6 @@ mod test { #[cfg(test)] mod prop_test { use lambdaworks_math::{ - errors::DeserializationError, field::fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, traits::{Deserializable, Serializable}, }; @@ -1652,7 +1651,7 @@ mod prop_test { // The proof is generated and serialized. let proof = generate_cairo_proof(&main_trace, &pub_inputs, &proof_options).unwrap(); - let proof_bytes = proof.serialize(); + let proof_bytes: Vec = serde_cbor::to_vec(&proof).unwrap(); // The trace and original proof are dropped to show that they are decoupled from // the verifying process. @@ -1661,49 +1660,9 @@ mod prop_test { // At this point, the verifier only knows about the serialized proof, the proof options // and the public inputs. - let proof = StarkProof::::deserialize(&proof_bytes).unwrap(); + let proof: StarkProof = serde_cbor::from_slice(&proof_bytes).unwrap(); // The proof is verified successfully. assert!(verify_cairo_proof(&proof, &pub_inputs, &proof_options)); } - - #[test] - fn deserialize_should_not_panic_with_changed_and_sliced_bytes() { - let program_content = std::fs::read(cairo0_program_path("fibonacci_10.json")).unwrap(); - let (main_trace, pub_inputs) = - generate_prover_args(&program_content, &None, CairoLayout::Plain).unwrap(); - - let proof_options = ProofOptions::default_test_options(); - - // The proof is generated and serialized. - let proof = generate_cairo_proof(&main_trace, &pub_inputs, &proof_options).unwrap(); - let mut proof_bytes = proof.serialize(); - - // The trace and original proof are dropped to show that they are decoupled from - // the verifying process. - drop(main_trace); - drop(proof); - - for byte in proof_bytes.iter_mut().take(21664) { - *byte = 255; - } - proof_bytes = proof_bytes[0..517].to_vec(); - - assert_eq!( - DeserializationError::InvalidAmountOfBytes, - StarkProof::::deserialize(&proof_bytes) - .err() - .unwrap() - ); - } - - #[test] - fn deserialize_empty_proof_should_give_error() { - assert_eq!( - DeserializationError::InvalidAmountOfBytes, - StarkProof::::deserialize(&[]) - .err() - .unwrap() - ); - } } diff --git a/provers/cairo/src/tests/integration_tests.rs b/provers/cairo/src/tests/integration_tests.rs index e7b262579..db64d0180 100644 --- a/provers/cairo/src/tests/integration_tests.rs +++ b/provers/cairo/src/tests/integration_tests.rs @@ -1,8 +1,4 @@ -use lambdaworks_math::{ - errors::DeserializationError, - field::fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, - traits::{Deserializable, Serializable}, -}; +use lambdaworks_math::field::fields::fft_friendly::stark_252_prime_field::Stark252PrimeField; use stark_platinum_prover::{ debug::validate_trace, domain::Domain, @@ -299,7 +295,7 @@ fn deserialize_and_verify() { // The proof is generated and serialized. let proof = generate_cairo_proof(&main_trace, &pub_inputs, &proof_options).unwrap(); - let proof_bytes = proof.serialize(); + let proof_bytes: Vec = serde_cbor::to_vec(&proof).unwrap(); // The trace and original proof are dropped to show that they are decoupled from // the verifying process. @@ -308,38 +304,8 @@ fn deserialize_and_verify() { // At this point, the verifier only knows about the serialized proof, the proof options // and the public inputs. - let proof = StarkProof::::deserialize(&proof_bytes).unwrap(); + let proof: StarkProof = serde_cbor::from_slice(&proof_bytes).unwrap(); // The proof is verified successfully. assert!(verify_cairo_proof(&proof, &pub_inputs, &proof_options)); } - -#[test] -fn deserialize_should_not_panic_with_changed_and_sliced_bytes() { - let program_content = std::fs::read(cairo0_program_path("fibonacci_10.json")).unwrap(); - let (main_trace, pub_inputs) = - generate_prover_args(&program_content, &None, CairoLayout::Plain).unwrap(); - - let proof_options = ProofOptions::default_test_options(); - - // The proof is generated and serialized. - let proof = generate_cairo_proof(&main_trace, &pub_inputs, &proof_options).unwrap(); - let mut proof_bytes = proof.serialize(); - - // The trace and original proof are dropped to show that they are decoupled from - // the verifying process. - drop(main_trace); - drop(proof); - - for byte in proof_bytes.iter_mut().take(21664) { - *byte = 255; - } - proof_bytes = proof_bytes[0..517].to_vec(); - - assert_eq!( - DeserializationError::InvalidAmountOfBytes, - StarkProof::::deserialize(&proof_bytes) - .err() - .unwrap() - ); -} diff --git a/provers/stark/Cargo.toml b/provers/stark/Cargo.toml index 9ae1b8434..f8de81ef5 100644 --- a/provers/stark/Cargo.toml +++ b/provers/stark/Cargo.toml @@ -29,7 +29,7 @@ rayon = { version = "1.7.0", optional = true } wasm-bindgen = { version = "0.2", optional = true } serde-wasm-bindgen = { version = "0.5", optional = true } web-sys = { version = "0.3.64", features = ['console'], optional = true } -serde_cbor = { version = "0.11.1", optional = true } +serde_cbor = { version = "0.11.1"} [dev-dependencies] hex = "0.4.3" @@ -46,7 +46,7 @@ test_fiat_shamir = [] instruments = [] # This enables timing prints in prover and verifier metal = ["lambdaworks-math/metal"] parallel = ["dep:rayon"] -wasm = ["dep:wasm-bindgen", "dep:serde-wasm-bindgen", "dep:web-sys", "dep:serde_cbor"] +wasm = ["dep:wasm-bindgen", "dep:serde-wasm-bindgen", "dep:web-sys"] [target.'cfg(not(all(target_arch = "wasm32", target_os = "unknown")))'.dev-dependencies] proptest = "1.2.0" diff --git a/provers/stark/src/constraints/evaluator.rs b/provers/stark/src/constraints/evaluator.rs index 9c07823cc..f5d699f65 100644 --- a/provers/stark/src/constraints/evaluator.rs +++ b/provers/stark/src/constraints/evaluator.rs @@ -14,10 +14,9 @@ use rayon::prelude::{ #[cfg(all(debug_assertions, not(feature = "parallel")))] use crate::debug::check_boundary_polys_divisibility; use crate::domain::Domain; -use crate::frame::Frame; -use crate::prover::evaluate_polynomial_on_lde_domain; use crate::trace::TraceTable; use crate::traits::AIR; +use crate::{frame::Frame, prover::evaluate_polynomial_on_lde_domain}; use super::{boundary::BoundaryConstraints, evaluation_table::ConstraintEvaluationTable}; @@ -272,7 +271,7 @@ fn evaluate_transition_exemptions( domain: &Domain, ) -> Vec>> where - FieldElement: Send + Sync, + FieldElement: Send + Sync + Serializable, Polynomial>: Send + Sync, { #[cfg(feature = "parallel")] diff --git a/provers/stark/src/domain.rs b/provers/stark/src/domain.rs index a287d9ad1..4744a50d2 100644 --- a/provers/stark/src/domain.rs +++ b/provers/stark/src/domain.rs @@ -8,7 +8,6 @@ use super::traits::AIR; pub struct Domain { pub(crate) root_order: u32, pub(crate) lde_roots_of_unity_coset: Vec>, - pub(crate) lde_root_order: u32, pub(crate) trace_primitive_root: FieldElement, pub(crate) trace_roots_of_unity: Vec>, pub(crate) coset_offset: FieldElement, @@ -46,7 +45,6 @@ impl Domain { Self { root_order, lde_roots_of_unity_coset, - lde_root_order, trace_primitive_root, trace_roots_of_unity, blowup_factor, diff --git a/provers/stark/src/frame.rs b/provers/stark/src/frame.rs index c51fde5d3..809929be5 100644 --- a/provers/stark/src/frame.rs +++ b/provers/stark/src/frame.rs @@ -1,9 +1,7 @@ use super::trace::TraceTable; use lambdaworks_math::{ - errors::DeserializationError, field::{element::FieldElement, traits::IsFFTField}, polynomial::Polynomial, - traits::{ByteConversion, Deserializable, Serializable}, }; #[derive(Clone, Debug, PartialEq, serde::Serialize, serde::Deserialize)] @@ -81,116 +79,3 @@ impl Frame { .collect() } } - -impl Serializable for Frame -where - F: IsFFTField, - FieldElement: ByteConversion, -{ - fn serialize(&self) -> Vec { - let mut bytes = vec![]; - bytes.extend(self.data.len().to_be_bytes()); - let felt_len = if self.data.is_empty() { - 0 - } else { - self.data[0].to_bytes_be().len() - }; - bytes.extend(felt_len.to_be_bytes()); - for felt in &self.data { - bytes.extend(felt.to_bytes_be()); - } - bytes.extend(self.row_width.to_be_bytes()); - bytes - } -} - -impl Deserializable for Frame -where - F: IsFFTField, - FieldElement: ByteConversion, -{ - fn deserialize(bytes: &[u8]) -> Result - where - Self: Sized, - { - let mut bytes = bytes; - let data_len = usize::from_be_bytes( - bytes - .get(..8) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - bytes = &bytes[8..]; - - let felt_len = usize::from_be_bytes( - bytes - .get(..8) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - bytes = &bytes[8..]; - - let mut data = vec![]; - for _ in 0..data_len { - let felt = FieldElement::::from_bytes_be( - bytes - .get(..felt_len) - .ok_or(DeserializationError::InvalidAmountOfBytes)?, - )?; - data.push(felt); - bytes = &bytes[felt_len..]; - } - - let row_width = usize::from_be_bytes( - bytes - .get(..8) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - - Ok(Self::new(data, row_width)) - } -} - -#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))] -#[cfg(test)] -mod prop_test { - use lambdaworks_math::field::{ - element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, - }; - use proptest::{collection, prelude::*, prop_compose, proptest}; - - use lambdaworks_math::traits::{Deserializable, Serializable}; - - use crate::frame::Frame; - - type FE = FieldElement; - - prop_compose! { - fn some_felt()(base in any::(), exponent in any::()) -> FE { - FE::from(base).pow(exponent) - } - } - - prop_compose! { - fn field_vec()(vec in collection::vec(some_felt(), 16)) -> Vec { - vec - } - } - - proptest! { - #![proptest_config(ProptestConfig {cases: 5, .. ProptestConfig::default()})] - #[test] - fn test_serialize_and_deserialize(data in field_vec(), row_width in any::()) { - let frame = Frame::new(data, row_width); - let serialized = frame.serialize(); - let deserialized: Frame = Frame::deserialize(&serialized).unwrap(); - - prop_assert_eq!(frame.data, deserialized.data); - prop_assert_eq!(frame.row_width, deserialized.row_width); - } - } -} diff --git a/provers/stark/src/fri/fri_commitment.rs b/provers/stark/src/fri/fri_commitment.rs index 60df1b6e5..b39f42f3f 100644 --- a/provers/stark/src/fri/fri_commitment.rs +++ b/provers/stark/src/fri/fri_commitment.rs @@ -1,47 +1,41 @@ +use lambdaworks_crypto::merkle_tree::{merkle::MerkleTree, traits::IsMerkleTreeBackend}; use lambdaworks_math::{ - fft::polynomial::FFTPoly, field::{ element::FieldElement, traits::{IsFFTField, IsField}, }, - polynomial::Polynomial, traits::Serializable, }; -use crate::config::FriMerkleTree; - #[derive(Clone)] -pub struct FriLayer +pub struct FriLayer where F: IsField, FieldElement: Serializable, + B: IsMerkleTreeBackend, { pub evaluation: Vec>, - pub merkle_tree: FriMerkleTree, + pub merkle_tree: MerkleTree, pub coset_offset: FieldElement, pub domain_size: usize, } -impl FriLayer +impl FriLayer where F: IsField + IsFFTField, FieldElement: Serializable, + B: IsMerkleTreeBackend, { pub fn new( - poly: &Polynomial>, - coset_offset: &FieldElement, + evaluation: &[FieldElement], + merkle_tree: MerkleTree, + coset_offset: FieldElement, domain_size: usize, ) -> Self { - let evaluation = poly - .evaluate_offset_fft(1, Some(domain_size), coset_offset) - .unwrap(); // TODO: return error - - let merkle_tree = FriMerkleTree::build(&evaluation); - Self { - evaluation, + evaluation: evaluation.to_vec(), merkle_tree, - coset_offset: coset_offset.clone(), + coset_offset, domain_size, } } diff --git a/provers/stark/src/fri/fri_decommit.rs b/provers/stark/src/fri/fri_decommit.rs index 78ce63acd..dbbe61434 100644 --- a/provers/stark/src/fri/fri_decommit.rs +++ b/provers/stark/src/fri/fri_decommit.rs @@ -1,243 +1,13 @@ pub use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_crypto::merkle_tree::proof::Proof; -use lambdaworks_math::errors::DeserializationError; + use lambdaworks_math::field::element::FieldElement; use lambdaworks_math::field::traits::IsPrimeField; -use lambdaworks_math::traits::{ByteConversion, Deserializable, Serializable}; use crate::config::Commitment; -use crate::utils::{deserialize_proof, serialize_proof}; #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct FriDecommitment { pub layers_auth_paths_sym: Vec>, pub layers_evaluations_sym: Vec>, - pub layers_auth_paths: Vec>, - pub layers_evaluations: Vec>, -} - -impl Serializable for FriDecommitment -where - F: IsPrimeField, - FieldElement: ByteConversion, -{ - fn serialize(&self) -> Vec { - let mut bytes = vec![]; - bytes.extend(self.layers_auth_paths_sym.len().to_be_bytes()); - for proof in &self.layers_auth_paths_sym { - bytes.extend(serialize_proof(proof)); - } - let felt_len = self.layers_evaluations[0].to_bytes_be().len(); - bytes.extend(felt_len.to_be_bytes()); - bytes.extend(self.layers_evaluations_sym.len().to_be_bytes()); - for evaluation in &self.layers_evaluations_sym { - bytes.extend(evaluation.to_bytes_be()); - } - bytes.extend(self.layers_evaluations.len().to_be_bytes()); - for evaluation in &self.layers_evaluations { - bytes.extend(evaluation.to_bytes_be()); - } - bytes.extend(self.layers_auth_paths.len().to_be_bytes()); - for proof in &self.layers_auth_paths { - bytes.extend(serialize_proof(proof)); - } - bytes - } -} - -impl Deserializable for FriDecommitment -where - F: IsPrimeField, - FieldElement: ByteConversion, -{ - fn deserialize(bytes: &[u8]) -> Result - where - Self: Sized, - { - let mut bytes = bytes; - let mut layers_auth_paths_sym = vec![]; - let layers_auth_paths_sym_len = usize::from_be_bytes( - bytes - .get(..8) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - bytes = &bytes[8..]; - - for _ in 0..layers_auth_paths_sym_len { - let proof; - (proof, bytes) = deserialize_proof(bytes)?; - layers_auth_paths_sym.push(proof); - } - - let felt_len = usize::from_be_bytes( - bytes - .get(..8) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - bytes = &bytes[8..]; - - let layers_evaluations_sym_len = usize::from_be_bytes( - bytes - .get(..8) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - bytes = &bytes[8..]; - - let mut layers_evaluations_sym = vec![]; - for _ in 0..layers_evaluations_sym_len { - let evaluation = FieldElement::::from_bytes_be( - bytes - .get(..felt_len) - .ok_or(DeserializationError::InvalidAmountOfBytes)?, - )?; - bytes = &bytes[felt_len..]; - layers_evaluations_sym.push(evaluation); - } - - let layer_evaluations_len = usize::from_be_bytes( - bytes - .get(..8) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - bytes = &bytes[8..]; - - let mut layers_evaluations = vec![]; - for _ in 0..layer_evaluations_len { - let evaluation = FieldElement::::from_bytes_be( - bytes - .get(..felt_len) - .ok_or(DeserializationError::InvalidAmountOfBytes)?, - )?; - bytes = &bytes[felt_len..]; - layers_evaluations.push(evaluation); - } - - let mut layers_auth_paths = vec![]; - let layers_auth_paths_len = usize::from_be_bytes( - bytes - .get(..8) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - bytes = &bytes[8..]; - - for _ in 0..layers_auth_paths_len { - let proof; - (proof, bytes) = deserialize_proof(bytes)?; - layers_auth_paths.push(proof); - } - - Ok(Self { - layers_auth_paths_sym, - layers_evaluations_sym, - layers_evaluations, - layers_auth_paths, - }) - } -} -#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))] -#[cfg(test)] -mod prop_test { - use lambdaworks_crypto::merkle_tree::proof::Proof; - use lambdaworks_math::field::{ - element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, - }; - use proptest::{collection, prelude::*, prop_compose, proptest}; - - use lambdaworks_math::traits::{Deserializable, Serializable}; - - use crate::config::{Commitment, COMMITMENT_SIZE}; - - use super::FriDecommitment; - - type FE = FieldElement; - - prop_compose! { - fn some_commitment()(high in any::(), low in any::()) -> Commitment { - let mut bytes = [0u8; COMMITMENT_SIZE]; - bytes[..16].copy_from_slice(&high.to_be_bytes()); - bytes[16..].copy_from_slice(&low.to_be_bytes()); - bytes - } - } - - prop_compose! { - fn commitment_vec()(vec in collection::vec(some_commitment(), 4)) -> Vec { - vec - } - } - - prop_compose! { - fn some_proof()(merkle_path in commitment_vec()) -> Proof { - Proof{merkle_path} - } - } - - prop_compose! { - fn proof_vec()(vec in collection::vec(some_proof(), 4)) -> Vec> { - vec - } - } - - prop_compose! { - fn some_felt()(base in any::(), exponent in any::()) -> FE { - FE::from(base).pow(exponent) - } - } - - prop_compose! { - fn field_vec()(vec in collection::vec(some_felt(), 16)) -> Vec { - vec - } - } - - prop_compose! { - fn some_fri_decommitment()( - layers_auth_paths_sym in proof_vec(), - layers_evaluations_sym in field_vec(), - layers_evaluations in field_vec(), - layers_auth_paths in proof_vec() - ) -> FriDecommitment { - FriDecommitment{ - layers_auth_paths_sym, - layers_evaluations_sym, - layers_evaluations, - layers_auth_paths - } - } - } - - proptest! { - #![proptest_config(ProptestConfig {cases: 5, .. ProptestConfig::default()})] - #[test] - fn test_serialize_and_deserialize(fri_decommitment in some_fri_decommitment()) { - let serialized = fri_decommitment.serialize(); - let deserialized: FriDecommitment = FriDecommitment::deserialize(&serialized).unwrap(); - - for (a, b) in fri_decommitment.layers_auth_paths_sym.iter().zip(deserialized.layers_auth_paths_sym.iter()) { - prop_assert_eq!(&a.merkle_path, &b.merkle_path); - } - - for (a, b) in fri_decommitment.layers_evaluations_sym.iter().zip(deserialized.layers_evaluations_sym.iter()) { - prop_assert_eq!(a, b); - } - - for (a, b) in fri_decommitment.layers_evaluations.iter().zip(deserialized.layers_evaluations.iter()) { - prop_assert_eq!(a, b); - } - - for (a, b) in fri_decommitment.layers_auth_paths.iter().zip(deserialized.layers_auth_paths.iter()) { - prop_assert_eq!(&a.merkle_path, &b.merkle_path); - } - } - } } diff --git a/provers/stark/src/fri/mod.rs b/provers/stark/src/fri/mod.rs index 1ee9dde90..5ec2a68c4 100644 --- a/provers/stark/src/fri/mod.rs +++ b/provers/stark/src/fri/mod.rs @@ -2,39 +2,40 @@ pub mod fri_commitment; pub mod fri_decommit; mod fri_functions; -use lambdaworks_math::field::traits::{IsFFTField, IsField}; +use lambdaworks_math::fft::cpu::bit_reversing::in_place_bit_reverse_permute; +use lambdaworks_math::fft::polynomial::FFTPoly; +use lambdaworks_math::field::traits::IsFFTField; use lambdaworks_math::traits::Serializable; pub use lambdaworks_math::{ field::{element::FieldElement, fields::u64_prime_field::U64PrimeField}, polynomial::Polynomial, }; +use crate::config::{BatchedMerkleTree, BatchedMerkleTreeBackend}; use crate::transcript::IsStarkTranscript; use self::fri_commitment::FriLayer; use self::fri_decommit::FriDecommitment; use self::fri_functions::fold_polynomial; -use super::traits::AIR; - -pub fn fri_commit_phase( +pub fn commit_phase( number_layers: usize, p_0: Polynomial>, transcript: &mut impl IsStarkTranscript, coset_offset: &FieldElement, domain_size: usize, -) -> (FieldElement, Vec>) +) -> ( + FieldElement, + Vec>>, +) where FieldElement: Serializable, { let mut domain_size = domain_size; let mut fri_layer_list = Vec::with_capacity(number_layers); - let mut current_layer = FriLayer::new(&p_0, coset_offset, domain_size); - fri_layer_list.push(current_layer.clone()); + let mut current_layer: FriLayer>; let mut current_poly = p_0; - // >>>> Send commitment: [p₀] - transcript.append_bytes(¤t_layer.merkle_tree.root); let mut coset_offset = coset_offset.clone(); @@ -45,8 +46,8 @@ where domain_size /= 2; // Compute layer polynomial and domain - current_poly = fold_polynomial(¤t_poly, &zeta); - current_layer = FriLayer::new(¤t_poly, &coset_offset, domain_size); + current_poly = fold_polynomial(¤t_poly, &zeta) * FieldElement::from(2); + current_layer = new_fri_layer(¤t_poly, &coset_offset, domain_size); let new_data = ¤t_layer.merkle_tree.root; fri_layer_list.push(current_layer.clone()); // TODO: remove this clone @@ -57,7 +58,7 @@ where // <<<< Receive challenge: 𝜁ₙ₋₁ let zeta = transcript.sample_field_element(); - let last_poly = fold_polynomial(¤t_poly, &zeta); + let last_poly = fold_polynomial(¤t_poly, &zeta) * FieldElement::from(2); let last_value = last_poly .coefficients() @@ -71,56 +72,65 @@ where (last_value, fri_layer_list) } -pub fn fri_query_phase( - air: &A, - domain_size: usize, - fri_layers: &Vec>, - transcript: &mut impl IsStarkTranscript, -) -> (Vec>, Vec) +pub fn query_phase( + fri_layers: &Vec>>, + iotas: &[usize], +) -> Vec> where - F: IsFFTField, - A: AIR, FieldElement: Serializable, { if !fri_layers.is_empty() { - let number_of_queries = air.options().fri_number_of_queries; - let iotas = (0..number_of_queries) - .map(|_| (transcript.sample_u64(domain_size as u64)) as usize) - .collect::>(); let query_list = iotas .iter() .map(|iota_s| { - // <<<< Receive challenge 𝜄ₛ (iota_s) - let mut layers_auth_paths_sym = vec![]; - let mut layers_evaluations_sym = vec![]; - let mut layers_evaluations = vec![]; - let mut layers_auth_paths = vec![]; + let mut layers_evaluations_sym = Vec::new(); + let mut layers_auth_paths_sym = Vec::new(); + let mut index = *iota_s; for layer in fri_layers { // symmetric element - let index = iota_s % layer.domain_size; - let index_sym = (iota_s + layer.domain_size / 2) % layer.domain_size; - let evaluation_sym = layer.evaluation[index_sym].clone(); - let auth_path_sym = layer.merkle_tree.get_proof_by_pos(index_sym).unwrap(); - let evaluation = layer.evaluation[index].clone(); - let auth_path = layer.merkle_tree.get_proof_by_pos(index).unwrap(); - layers_auth_paths_sym.push(auth_path_sym); + let evaluation_sym = layer.evaluation[index ^ 1].clone(); + let auth_path_sym = layer.merkle_tree.get_proof_by_pos(index >> 1).unwrap(); layers_evaluations_sym.push(evaluation_sym); - layers_evaluations.push(evaluation); - layers_auth_paths.push(auth_path); + layers_auth_paths_sym.push(auth_path_sym); + + index >>= 1; } FriDecommitment { layers_auth_paths_sym, layers_evaluations_sym, - layers_evaluations, - layers_auth_paths, } }) .collect(); - (query_list, iotas) + query_list } else { - (vec![], vec![]) + vec![] + } +} + +pub fn new_fri_layer( + poly: &Polynomial>, + coset_offset: &FieldElement, + domain_size: usize, +) -> crate::fri::fri_commitment::FriLayer> +where + F: IsFFTField, + FieldElement: Serializable, +{ + let mut evaluation = poly + .evaluate_offset_fft(1, Some(domain_size), coset_offset) + .unwrap(); // TODO: return error + + in_place_bit_reverse_permute(&mut evaluation); + + let mut to_commit = Vec::new(); + for chunk in evaluation.chunks(2) { + to_commit.push(vec![chunk[0].clone(), chunk[1].clone()]); } + + let merkle_tree = BatchedMerkleTree::build(&to_commit); + + FriLayer::new(&evaluation, merkle_tree, coset_offset.clone(), domain_size) } diff --git a/provers/stark/src/proof/stark.rs b/provers/stark/src/proof/stark.rs index 72d3ef53c..96f0b4ee3 100644 --- a/provers/stark/src/proof/stark.rs +++ b/provers/stark/src/proof/stark.rs @@ -1,27 +1,18 @@ use lambdaworks_crypto::merkle_tree::proof::Proof; -use lambdaworks_math::{ - errors::DeserializationError, - field::{element::FieldElement, traits::IsFFTField}, - traits::{ByteConversion, Deserializable, Serializable}, -}; +use lambdaworks_math::field::{element::FieldElement, traits::IsFFTField}; -use crate::{ - config::Commitment, - frame::Frame, - fri::fri_decommit::FriDecommitment, - utils::{deserialize_proof, serialize_proof}, -}; -use core::mem; +use crate::{config::Commitment, frame::Frame, fri::fri_decommit::FriDecommitment}; #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct DeepPolynomialOpenings { +pub struct DeepPolynomialOpening { pub lde_composition_poly_proof: Proof, - pub lde_composition_poly_even_evaluation: FieldElement, - pub lde_composition_poly_odd_evaluation: FieldElement, + pub lde_composition_poly_parts_evaluation: Vec>, pub lde_trace_merkle_proofs: Vec>, pub lde_trace_evaluations: Vec>, } +pub type DeepPolynomialOpenings = Vec>; + #[derive(Debug, serde::Serialize, serde::Deserialize)] pub struct StarkProof { // Length of the execution trace @@ -31,682 +22,20 @@ pub struct StarkProof { pub lde_trace_merkle_roots: Vec, // tⱼ(zgᵏ) pub trace_ood_frame_evaluations: Frame, - // [H₁] and [H₂] + // Commitments to Hᵢ pub composition_poly_root: Commitment, - // H₁(z²) - pub composition_poly_even_ood_evaluation: FieldElement, - // H₂(z²) - pub composition_poly_odd_ood_evaluation: FieldElement, + // Hᵢ(z^N) + pub composition_poly_parts_ood_evaluation: Vec>, // [pₖ] pub fri_layers_merkle_roots: Vec, // pₙ pub fri_last_value: FieldElement, - // Open(p₀(D₀), 𝜐ₛ), Opwn(pₖ(Dₖ), −𝜐ₛ^(2ᵏ)) + // Open(pₖ(Dₖ), −𝜐ₛ^(2ᵏ)) pub query_list: Vec>, - // Open(H₁(D_LDE, 𝜐₀), Open(H₂(D_LDE, 𝜐₀), Open(tⱼ(D_LDE), 𝜐₀) - pub deep_poly_openings: Vec>, + // Open(H₁(D_LDE, 𝜐ᵢ), Open(H₂(D_LDE, 𝜐ᵢ), Open(tⱼ(D_LDE), 𝜐ᵢ) + pub deep_poly_openings: DeepPolynomialOpenings, + // Open(H₁(D_LDE, -𝜐ᵢ), Open(H₂(D_LDE, -𝜐ᵢ), Open(tⱼ(D_LDE), -𝜐ᵢ) + pub deep_poly_openings_sym: DeepPolynomialOpenings, // nonce obtained from grinding pub nonce: u64, } - -impl Serializable for DeepPolynomialOpenings -where - F: IsFFTField, - FieldElement: ByteConversion, -{ - fn serialize(&self) -> Vec { - let mut bytes = vec![]; - bytes.extend(serialize_proof(&self.lde_composition_poly_proof)); - let lde_composition_poly_even_evaluation_bytes = - self.lde_composition_poly_even_evaluation.to_bytes_be(); - let felt_len = lde_composition_poly_even_evaluation_bytes.len(); - bytes.extend(felt_len.to_be_bytes()); - bytes.extend(lde_composition_poly_even_evaluation_bytes); - bytes.extend(self.lde_composition_poly_odd_evaluation.to_bytes_be()); - bytes.extend(self.lde_trace_merkle_proofs.len().to_be_bytes()); - for proof in &self.lde_trace_merkle_proofs { - bytes.extend(serialize_proof(proof)); - } - bytes.extend(self.lde_trace_evaluations.len().to_be_bytes()); - for evaluation in &self.lde_trace_evaluations { - bytes.extend(evaluation.to_bytes_be()); - } - bytes - } -} - -impl Deserializable for DeepPolynomialOpenings -where - F: IsFFTField, - FieldElement: ByteConversion, -{ - fn deserialize(bytes: &[u8]) -> Result - where - Self: Sized, - { - let mut bytes = bytes; - let lde_composition_poly_proof; - (lde_composition_poly_proof, bytes) = deserialize_proof(bytes)?; - - let felt_len = usize::from_be_bytes( - bytes - .get(..8) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - bytes = &bytes[8..]; - - let lde_composition_poly_even_evaluation = FieldElement::from_bytes_be( - bytes - .get(..felt_len) - .ok_or(DeserializationError::InvalidAmountOfBytes)?, - )?; - bytes = &bytes[felt_len..]; - - let lde_composition_poly_odd_evaluation = FieldElement::from_bytes_be( - bytes - .get(..felt_len) - .ok_or(DeserializationError::InvalidAmountOfBytes)?, - )?; - bytes = &bytes[felt_len..]; - - let lde_trace_merkle_proofs_len = usize::from_be_bytes( - bytes - .get(..8) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - bytes = &bytes[8..]; - - let mut lde_trace_merkle_proofs = vec![]; - for _ in 0..lde_trace_merkle_proofs_len { - let proof; - (proof, bytes) = deserialize_proof(bytes)?; - lde_trace_merkle_proofs.push(proof); - } - - let lde_trace_evaluations_len = usize::from_be_bytes( - bytes - .get(..8) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - bytes = &bytes[8..]; - - let mut lde_trace_evaluations = vec![]; - for _ in 0..lde_trace_evaluations_len { - let evaluation = FieldElement::from_bytes_be( - bytes - .get(..felt_len) - .ok_or(DeserializationError::InvalidAmountOfBytes)?, - )?; - bytes = &bytes[felt_len..]; - lde_trace_evaluations.push(evaluation); - } - - Ok(DeepPolynomialOpenings { - lde_composition_poly_proof, - lde_composition_poly_even_evaluation, - lde_composition_poly_odd_evaluation, - lde_trace_merkle_proofs, - lde_trace_evaluations, - }) - } -} - -impl Serializable for StarkProof -where - F: IsFFTField, - FieldElement: ByteConversion, -{ - fn serialize(&self) -> Vec { - let mut bytes = vec![]; - - // Serialize trace length - bytes.extend(self.trace_length.to_be_bytes()); - - bytes.extend(self.lde_trace_merkle_roots.len().to_be_bytes()); - for commitment in &self.lde_trace_merkle_roots { - bytes.extend(commitment); - } - let trace_ood_frame_evaluations_bytes = self.trace_ood_frame_evaluations.serialize(); - bytes.extend(trace_ood_frame_evaluations_bytes.len().to_be_bytes()); - bytes.extend(trace_ood_frame_evaluations_bytes); - - bytes.extend(self.composition_poly_root); - - let composition_poly_even_ood_evaluation_bytes = - self.composition_poly_even_ood_evaluation.to_bytes_be(); - bytes.extend( - composition_poly_even_ood_evaluation_bytes - .len() - .to_be_bytes(), - ); - bytes.extend(composition_poly_even_ood_evaluation_bytes); - bytes.extend(self.composition_poly_odd_ood_evaluation.to_bytes_be()); - - bytes.extend(self.fri_layers_merkle_roots.len().to_be_bytes()); - for commitment in &self.fri_layers_merkle_roots { - bytes.extend(commitment); - } - - bytes.extend(self.fri_last_value.to_bytes_be()); - - bytes.extend(self.query_list.len().to_be_bytes()); - for query in &self.query_list { - let query_bytes = query.serialize(); - bytes.extend(query_bytes.len().to_be_bytes()); - bytes.extend(query_bytes); - } - - bytes.extend(self.deep_poly_openings.len().to_be_bytes()); - for opening in &self.deep_poly_openings { - let opening_bytes = opening.serialize(); - bytes.extend(opening_bytes.len().to_be_bytes()); - bytes.extend(opening_bytes); - } - - // serialize nonce - bytes.extend(self.nonce.to_be_bytes()); - - bytes - } -} - -impl Deserializable for StarkProof -where - F: IsFFTField, - FieldElement: ByteConversion, -{ - fn deserialize(bytes: &[u8]) -> Result - where - Self: Sized, - { - let mut bytes = bytes; - let trace_length_buffer_size = mem::size_of::(); - let trace_length = usize::from_be_bytes( - bytes - .get(..trace_length_buffer_size) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - - bytes = &bytes[8..]; - - let lde_trace_merkle_roots_len = usize::from_be_bytes( - bytes - .get(..8) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - - bytes = &bytes[8..]; - - let mut lde_trace_merkle_roots: Vec<[u8; 32]> = vec![]; - for _ in 0..lde_trace_merkle_roots_len { - let commitment = bytes - .get(..32) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?; - - lde_trace_merkle_roots.push(commitment); - bytes = &bytes[32..]; - } - - let trace_ood_frame_evaluations_len = usize::from_be_bytes( - bytes - .get(..8) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - - bytes = &bytes[8..]; - - let trace_ood_frame_evaluations: Frame = Frame::deserialize( - bytes - .get(..trace_ood_frame_evaluations_len) - .ok_or(DeserializationError::InvalidAmountOfBytes)?, - )?; - - bytes = &bytes[trace_ood_frame_evaluations_len..]; - - let composition_poly_root = bytes - .get(..32) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?; - - bytes = &bytes[32..]; - - let felt_len = usize::from_be_bytes( - bytes - .get(..8) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - - bytes = &bytes[8..]; - - let composition_poly_even_ood_evaluation = FieldElement::from_bytes_be( - bytes - .get(..felt_len) - .ok_or(DeserializationError::InvalidAmountOfBytes)?, - )?; - - bytes = &bytes[felt_len..]; - - let composition_poly_odd_ood_evaluation = FieldElement::from_bytes_be( - bytes - .get(..felt_len) - .ok_or(DeserializationError::InvalidAmountOfBytes)?, - )?; - - bytes = &bytes[felt_len..]; - - let fri_layers_merkle_roots_len = usize::from_be_bytes( - bytes - .get(..8) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - - bytes = &bytes[8..]; - - let mut fri_layers_merkle_roots: Vec<[u8; 32]> = vec![]; - for _ in 0..fri_layers_merkle_roots_len { - let commitment = bytes - .get(..32) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?; - fri_layers_merkle_roots.push(commitment); - bytes = &bytes[32..]; - } - - let fri_last_value = FieldElement::from_bytes_be( - bytes - .get(..felt_len) - .ok_or(DeserializationError::InvalidAmountOfBytes)?, - )?; - - bytes = &bytes[felt_len..]; - - let query_list_len = usize::from_be_bytes( - bytes - .get(..8) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - - bytes = &bytes[8..]; - - let mut query_list = vec![]; - for _ in 0..query_list_len { - let query_len = usize::from_be_bytes( - bytes - .get(..8) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - - bytes = &bytes[8..]; - - let query = FriDecommitment::deserialize( - bytes - .get(..query_len) - .ok_or(DeserializationError::InvalidAmountOfBytes)?, - )?; - - bytes = &bytes[query_len..]; - - query_list.push(query); - } - - let deep_poly_openings_len = usize::from_be_bytes( - bytes - .get(..8) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - - bytes = &bytes[8..]; - - let mut deep_poly_openings = vec![]; - for _ in 0..deep_poly_openings_len { - let opening_len = usize::from_be_bytes( - bytes - .get(..8) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - - bytes = &bytes[8..]; - - let opening = DeepPolynomialOpenings::deserialize( - bytes - .get(..opening_len) - .ok_or(DeserializationError::InvalidAmountOfBytes)?, - )?; - - bytes = &bytes[opening_len..]; - - deep_poly_openings.push(opening); - } - - // deserialize nonce - let start_nonce = bytes - .len() - .checked_sub(core::mem::size_of::()) - .ok_or(DeserializationError::InvalidAmountOfBytes)?; - - let nonce = u64::from_be_bytes( - bytes - .get(start_nonce..) - .ok_or(DeserializationError::InvalidAmountOfBytes)? - .try_into() - .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, - ); - - Ok(StarkProof { - trace_length, - lde_trace_merkle_roots, - trace_ood_frame_evaluations, - composition_poly_root, - composition_poly_even_ood_evaluation, - composition_poly_odd_ood_evaluation, - fri_layers_merkle_roots, - fri_last_value, - query_list, - deep_poly_openings, - nonce, - }) - } -} - -#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))] -#[cfg(test)] -mod prop_test { - - use lambdaworks_crypto::merkle_tree::proof::Proof; - use lambdaworks_math::{ - errors::DeserializationError, - field::{ - element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, - }, - }; - use proptest::{collection, prelude::*, prop_compose, proptest}; - - use crate::{ - config::{Commitment, COMMITMENT_SIZE}, - frame::Frame, - fri::fri_decommit::FriDecommitment, - }; - use lambdaworks_math::traits::{Deserializable, Serializable}; - - use super::{DeepPolynomialOpenings, StarkProof}; - - type FE = FieldElement; - - prop_compose! { - fn some_commitment()(high in any::(), low in any::()) -> Commitment { - let mut bytes = [0u8; COMMITMENT_SIZE]; - bytes[..16].copy_from_slice(&high.to_be_bytes()); - bytes[16..].copy_from_slice(&low.to_be_bytes()); - bytes - } - } - - prop_compose! { - fn commitment_vec()(vec in collection::vec(some_commitment(), (16_usize, 32_usize))) -> Vec { - vec - } - } - - prop_compose! { - fn some_proof()(merkle_path in commitment_vec()) -> Proof { - Proof{merkle_path} - } - } - - prop_compose! { - fn proof_vec()(vec in collection::vec(some_proof(), (4_usize, 8_usize))) -> Vec> { - vec - } - } - - prop_compose! { - fn some_felt()(base in any::(), exponent in any::()) -> FE { - FE::from(base).pow(exponent) - } - } - - prop_compose! { - fn field_vec()(vec in collection::vec(some_felt(), (8_usize, 16_usize))) -> Vec { - vec - } - } - - prop_compose! { - fn some_fri_decommitment()( - layers_auth_paths_sym in proof_vec(), - layers_evaluations_sym in field_vec(), - layers_evaluations in field_vec(), - layers_auth_paths in proof_vec() - ) -> FriDecommitment { - FriDecommitment{ - layers_auth_paths_sym, - layers_evaluations_sym, - layers_evaluations, - layers_auth_paths - } - } - } - - prop_compose! { - fn fri_decommitment_vec()(vec in collection::vec(some_fri_decommitment(), (16_usize, 32_usize))) -> Vec> { - vec - } - } - - prop_compose! { - fn some_deep_polynomial_openings()( - lde_composition_poly_proof in some_proof(), - lde_composition_poly_even_evaluation in some_felt(), - lde_composition_poly_odd_evaluation in some_felt(), - lde_trace_merkle_proofs in proof_vec(), - lde_trace_evaluations in field_vec() - ) -> DeepPolynomialOpenings { - DeepPolynomialOpenings { - lde_composition_poly_proof, - lde_composition_poly_even_evaluation, - lde_composition_poly_odd_evaluation, - lde_trace_merkle_proofs, - lde_trace_evaluations - } - } - } - - prop_compose! { - fn deep_polynomial_openings_vec()(vec in collection::vec(some_deep_polynomial_openings(), (8_usize, 16_usize))) -> Vec> { - vec - } - } - - prop_compose! { - fn some_frame()(data in field_vec(), row_width in any::()) -> Frame { - Frame::new(data, row_width) - } - } - - prop_compose! { - fn some_usize()(len in any::()) -> usize { - len - } - } - - prop_compose! { - fn some_stark_proof()( - trace_length in some_usize(), - lde_trace_merkle_roots in commitment_vec(), - trace_ood_frame_evaluations in some_frame(), - composition_poly_root in some_commitment(), - composition_poly_even_ood_evaluation in some_felt(), - composition_poly_odd_ood_evaluation in some_felt(), - fri_layers_merkle_roots in commitment_vec(), - fri_last_value in some_felt(), - query_list in fri_decommitment_vec(), - deep_poly_openings in deep_polynomial_openings_vec() - - ) -> StarkProof { - StarkProof { - trace_length, - lde_trace_merkle_roots, - trace_ood_frame_evaluations, - composition_poly_root, - composition_poly_even_ood_evaluation, - composition_poly_odd_ood_evaluation, - fri_layers_merkle_roots, - fri_last_value, - query_list, - deep_poly_openings, - nonce: 0 - } - } - } - - proptest! { - #[test] - fn test_deep_polynomial_openings_serialization( - deep_polynomial_openings in some_deep_polynomial_openings() - ) { - let serialized = deep_polynomial_openings.serialize(); - let deserialized = DeepPolynomialOpenings::::deserialize(&serialized).unwrap(); - - for (a, b) in deep_polynomial_openings.lde_trace_merkle_proofs.iter().zip(deserialized.lde_trace_merkle_proofs.iter()) { - prop_assert_eq!(&a.merkle_path, &b.merkle_path); - }; - - prop_assert_eq!(deep_polynomial_openings.lde_composition_poly_even_evaluation, deserialized.lde_composition_poly_even_evaluation); - prop_assert_eq!(deep_polynomial_openings.lde_composition_poly_odd_evaluation, deserialized.lde_composition_poly_odd_evaluation); - prop_assert_eq!(deep_polynomial_openings.lde_composition_poly_proof.merkle_path, deserialized.lde_composition_poly_proof.merkle_path); - prop_assert_eq!(deep_polynomial_openings.lde_trace_evaluations, deserialized.lde_trace_evaluations); - } - } - - proptest! { - #![proptest_config(ProptestConfig {cases: 5, .. ProptestConfig::default()})] - #[test] - fn test_stark_proof_serialization( - stark_proof in some_stark_proof() - ) { - let serialized = stark_proof.serialize(); - let deserialized = StarkProof::::deserialize(&serialized).unwrap(); - - prop_assert_eq!( - stark_proof.lde_trace_merkle_roots, - deserialized.lde_trace_merkle_roots - ); - prop_assert_eq!( - stark_proof.trace_ood_frame_evaluations.num_columns(), - deserialized.trace_ood_frame_evaluations.num_columns() - ); - prop_assert_eq!( - stark_proof.trace_ood_frame_evaluations.num_rows(), - deserialized.trace_ood_frame_evaluations.num_rows() - ); - prop_assert_eq!( - stark_proof.composition_poly_root, - deserialized.composition_poly_root - ); - prop_assert_eq!( - stark_proof.composition_poly_even_ood_evaluation, - deserialized.composition_poly_even_ood_evaluation - ); - prop_assert_eq!( - stark_proof.composition_poly_odd_ood_evaluation, - deserialized.composition_poly_odd_ood_evaluation - ); - prop_assert_eq!( - stark_proof.fri_layers_merkle_roots, - deserialized.fri_layers_merkle_roots - ); - prop_assert_eq!(stark_proof.fri_last_value, deserialized.fri_last_value); - - for (a, b) in stark_proof - .query_list - .iter() - .zip(deserialized.query_list.iter()) - { - for (x, y) in a - .clone() - .layers_auth_paths_sym - .iter() - .zip(b.clone().layers_auth_paths_sym.iter()) - { - prop_assert_eq!(&x.merkle_path, &y.merkle_path); - } - prop_assert_eq!(&a.layers_evaluations_sym, &b.layers_evaluations_sym); - prop_assert_eq!(&a.layers_evaluations, &b.layers_evaluations); - for (x, y) in a - .clone() - .layers_auth_paths - .iter() - .zip(b.clone().layers_auth_paths.iter()) - { - prop_assert_eq!(&x.merkle_path, &y.merkle_path); - } - } - - for (a, b) in stark_proof - .deep_poly_openings - .iter() - .zip(deserialized.deep_poly_openings.iter()) - { - for (x, y) in a - .clone() - .lde_trace_merkle_proofs - .iter() - .zip(b.clone().lde_trace_merkle_proofs.iter()) - { - prop_assert_eq!(&x.merkle_path, &y.merkle_path); - } - prop_assert_eq!( - &a.lde_composition_poly_even_evaluation, - &b.lde_composition_poly_even_evaluation - ); - prop_assert_eq!( - &a.lde_composition_poly_odd_evaluation, - &b.lde_composition_poly_odd_evaluation - ); - prop_assert_eq!( - &a.lde_composition_poly_proof.merkle_path, - &b.lde_composition_poly_proof.merkle_path - ); - prop_assert_eq!(&a.lde_trace_evaluations, &b.lde_trace_evaluations); - } - } - } - - #[test] - fn deserialize_empty_proof_should_give_error() { - assert_eq!( - DeserializationError::InvalidAmountOfBytes, - StarkProof::::deserialize(&[]) - .err() - .unwrap() - ); - } -} diff --git a/provers/stark/src/prover.rs b/provers/stark/src/prover.rs index d177621af..e03e2238f 100644 --- a/provers/stark/src/prover.rs +++ b/provers/stark/src/prover.rs @@ -2,8 +2,9 @@ use std::time::Instant; use lambdaworks_crypto::merkle_tree::proof::Proof; -use lambdaworks_math::fft::cpu::bit_reversing::in_place_bit_reverse_permute; +use lambdaworks_math::fft::cpu::bit_reversing::{in_place_bit_reverse_permute, reverse_index}; use lambdaworks_math::fft::{errors::FFTError, polynomial::FFTPoly}; +use lambdaworks_math::field::fields::fft_friendly::stark_252_prime_field::Stark252PrimeField; use lambdaworks_math::traits::Serializable; use lambdaworks_math::{ field::{element::FieldElement, traits::IsFFTField}, @@ -16,6 +17,8 @@ use rayon::prelude::{IndexedParallelIterator, IntoParallelRefIterator, ParallelI #[cfg(debug_assertions)] use crate::debug::validate_trace; +use crate::fri; +use crate::proof::stark::DeepPolynomialOpenings; use crate::transcript::IsStarkTranscript; use super::config::{BatchedMerkleTree, Commitment}; @@ -23,69 +26,60 @@ use super::constraints::evaluator::ConstraintEvaluator; use super::domain::Domain; use super::frame::Frame; use super::fri::fri_decommit::FriDecommitment; -use super::fri::{fri_commit_phase, fri_query_phase}; use super::grinding::generate_nonce_with_grinding; use super::proof::options::ProofOptions; -use super::proof::stark::{DeepPolynomialOpenings, StarkProof}; +use super::proof::stark::{DeepPolynomialOpening, StarkProof}; use super::trace::TraceTable; use super::traits::AIR; -use super::transcript::batch_sample_challenges; + +pub struct Prover; + +impl IsStarkProver for Prover { + type Field = Stark252PrimeField; +} #[derive(Debug)] pub enum ProvingError { WrongParameter(String), } -struct Round1 +pub struct Round1 where F: IsFFTField, A: AIR, FieldElement: Serializable, { - trace_polys: Vec>>, - lde_trace: TraceTable, - lde_trace_merkle_trees: Vec>, - lde_trace_merkle_roots: Vec, - rap_challenges: A::RAPChallenges, + pub(crate) trace_polys: Vec>>, + pub(crate) lde_trace: TraceTable, + pub(crate) lde_trace_merkle_trees: Vec>, + pub(crate) lde_trace_merkle_roots: Vec, + pub(crate) rap_challenges: A::RAPChallenges, } -struct Round2 +pub struct Round2 where F: IsFFTField, FieldElement: Serializable, { - composition_poly_even: Polynomial>, - lde_composition_poly_even_evaluations: Vec>, - composition_poly_merkle_tree: BatchedMerkleTree, - composition_poly_root: Commitment, - composition_poly_odd: Polynomial>, - lde_composition_poly_odd_evaluations: Vec>, + pub(crate) composition_poly_parts: Vec>>, + pub(crate) lde_composition_poly_evaluations: Vec>>, + pub(crate) composition_poly_merkle_tree: BatchedMerkleTree, + pub(crate) composition_poly_root: Commitment, } -struct Round3 { +pub struct Round3 { trace_ood_evaluations: Vec>>, - composition_poly_even_ood_evaluation: FieldElement, - composition_poly_odd_ood_evaluation: FieldElement, + composition_poly_parts_ood_evaluation: Vec>, } -struct Round4 { +pub struct Round4 { fri_last_value: FieldElement, fri_layers_merkle_roots: Vec, - deep_poly_openings: Vec>, + deep_poly_openings: DeepPolynomialOpenings, + deep_poly_openings_sym: DeepPolynomialOpenings, query_list: Vec>, nonce: u64, } - -fn batch_commit(vectors: &[Vec>]) -> (BatchedMerkleTree, Commitment) -where - F: IsFFTField, - FieldElement: Serializable, -{ - let tree = BatchedMerkleTree::::build(vectors); - let commitment = tree.root; - (tree, commitment) -} - pub fn evaluate_polynomial_on_lde_domain( p: &Polynomial>, blowup_factor: usize, @@ -104,731 +98,807 @@ where } } -fn apply_permutation(vector: &mut Vec>, permutation: &[usize]) { - assert_eq!( - vector.len(), - permutation.len(), - "Vector and permutation must have the same length" - ); +pub trait IsStarkProver { + type Field: IsFFTField; - let mut temp = Vec::with_capacity(vector.len()); - for &index in permutation { - temp.push(vector[index].clone()); + fn batch_commit( + vectors: &[Vec>], + ) -> (BatchedMerkleTree, Commitment) + where + FieldElement: Serializable, + { + let tree = BatchedMerkleTree::::build(vectors); + let commitment = tree.root; + (tree, commitment) } - vector.clear(); - vector.extend(temp); -} + #[allow(clippy::type_complexity)] + fn interpolate_and_commit( + trace: &TraceTable, + domain: &Domain, + transcript: &mut impl IsStarkTranscript, + ) -> ( + Vec>>, + Vec>>, + BatchedMerkleTree, + Commitment, + ) + where + FieldElement: Serializable + Send + Sync, + { + let trace_polys = trace.compute_trace_polys(); -/// This function returns the permutation that converts lambdaworks ordering of rows to the one used in the stone prover -pub fn get_stone_prover_domain_permutation(domain_size: usize, blowup_factor: usize) -> Vec { - let mut permutation = Vec::new(); - let n = domain_size; + // Evaluate those polynomials t_j on the large domain D_LDE. + let lde_trace_evaluations = Self::compute_lde_trace_evaluations(&trace_polys, domain); - let mut indices: Vec = (0..blowup_factor).collect(); - in_place_bit_reverse_permute(&mut indices); + let mut lde_trace_permuted = lde_trace_evaluations.clone(); - for i in indices.iter() { - for j in 0..n { - permutation.push(i + j * blowup_factor) + for col in lde_trace_permuted.iter_mut() { + in_place_bit_reverse_permute(col); } + + // Compute commitments [t_j]. + let lde_trace = TraceTable::new_from_cols(&lde_trace_permuted); + let (lde_trace_merkle_tree, lde_trace_merkle_root) = Self::batch_commit(&lde_trace.rows()); + + // >>>> Send commitments: [tⱼ] + transcript.append_bytes(&lde_trace_merkle_root); + + ( + trace_polys, + lde_trace_evaluations, + lde_trace_merkle_tree, + lde_trace_merkle_root, + ) } - for coset_indices in permutation.chunks_mut(n) { - let mut temp = coset_indices.to_owned(); - in_place_bit_reverse_permute(&mut temp); - for (j, elem) in coset_indices.iter_mut().enumerate() { - *elem = temp[j]; + fn compute_lde_trace_evaluations( + trace_polys: &[Polynomial>], + domain: &Domain, + ) -> Vec>> + where + FieldElement: Send + Sync, + { + #[cfg(not(feature = "parallel"))] + let trace_polys_iter = trace_polys.iter(); + #[cfg(feature = "parallel")] + let trace_polys_iter = trace_polys.par_iter(); + + trace_polys_iter + .map(|poly| { + evaluate_polynomial_on_lde_domain( + poly, + domain.blowup_factor, + domain.interpolation_domain_size, + &domain.coset_offset, + ) + }) + .collect::>>, FFTError>>() + .unwrap() + } + + fn round_1_randomized_air_with_preprocessing>( + air: &A, + main_trace: &TraceTable, + domain: &Domain, + transcript: &mut impl IsStarkTranscript, + ) -> Result, ProvingError> + where + FieldElement: Serializable + Send + Sync, + { + let (mut trace_polys, mut evaluations, main_merkle_tree, main_merkle_root) = + Self::interpolate_and_commit(main_trace, domain, transcript); + + let rap_challenges = air.build_rap_challenges(transcript); + + let aux_trace = air.build_auxiliary_trace(main_trace, &rap_challenges); + + let mut lde_trace_merkle_trees = vec![main_merkle_tree]; + let mut lde_trace_merkle_roots = vec![main_merkle_root]; + if !aux_trace.is_empty() { + // Check that this is valid for interpolation + let (aux_trace_polys, aux_trace_polys_evaluations, aux_merkle_tree, aux_merkle_root) = + Self::interpolate_and_commit(&aux_trace, domain, transcript); + trace_polys.extend_from_slice(&aux_trace_polys); + evaluations.extend_from_slice(&aux_trace_polys_evaluations); + lde_trace_merkle_trees.push(aux_merkle_tree); + lde_trace_merkle_roots.push(aux_merkle_root); } + + let lde_trace = TraceTable::new_from_cols(&evaluations); + + Ok(Round1 { + trace_polys, + lde_trace, + lde_trace_merkle_roots, + lde_trace_merkle_trees, + rap_challenges, + }) } - permutation.to_vec() -} + fn commit_composition_polynomial( + lde_composition_poly_parts_evaluations: &[Vec>], + ) -> (BatchedMerkleTree, Commitment) + where + FieldElement: Serializable, + { + // TODO: Remove clones + let mut lde_composition_poly_evaluations = Vec::new(); + for i in 0..lde_composition_poly_parts_evaluations[0].len() { + let mut row = Vec::new(); + for evaluation in lde_composition_poly_parts_evaluations.iter() { + row.push(evaluation[i].clone()); + } + lde_composition_poly_evaluations.push(row); + } -#[allow(clippy::type_complexity)] -fn interpolate_and_commit( - trace: &TraceTable, - domain: &Domain, - transcript: &mut impl IsStarkTranscript, -) -> ( - Vec>>, - Vec>>, - BatchedMerkleTree, - Commitment, -) -where - F: IsFFTField, - FieldElement: Serializable + Send + Sync, -{ - let trace_polys = trace.compute_trace_polys(); + in_place_bit_reverse_permute(&mut lde_composition_poly_evaluations); + + let mut lde_composition_poly_evaluations_merged = Vec::new(); + for chunk in lde_composition_poly_evaluations.chunks(2) { + let (mut chunk0, chunk1) = (chunk[0].clone(), &chunk[1]); + chunk0.extend_from_slice(chunk1); + lde_composition_poly_evaluations_merged.push(chunk0); + } + + Self::batch_commit(&lde_composition_poly_evaluations_merged) + } + + fn round_2_compute_composition_polynomial( + air: &A, + domain: &Domain, + round_1_result: &Round1, + transition_coefficients: &[FieldElement], + boundary_coefficients: &[FieldElement], + ) -> Round2 + where + A: AIR + Send + Sync, + A::RAPChallenges: Send + Sync, + FieldElement: Serializable + Send + Sync, + { + // Create evaluation table + let evaluator = ConstraintEvaluator::new(air, &round_1_result.rap_challenges); + + let constraint_evaluations = evaluator.evaluate( + &round_1_result.lde_trace, + domain, + transition_coefficients, + boundary_coefficients, + &round_1_result.rap_challenges, + ); - // Evaluate those polynomials t_j on the large domain D_LDE. - let lde_trace_evaluations = compute_lde_trace_evaluations(&trace_polys, domain); + // Get the composition poly H + let composition_poly = + constraint_evaluations.compute_composition_poly(&domain.coset_offset); - let permutation = - get_stone_prover_domain_permutation(domain.interpolation_domain_size, domain.blowup_factor); + let number_of_parts = air.composition_poly_degree_bound() / air.trace_length(); + let composition_poly_parts = composition_poly.break_in_parts(number_of_parts); - let mut lde_trace_permuted = lde_trace_evaluations.clone(); + let lde_composition_poly_parts_evaluations: Vec<_> = composition_poly_parts + .iter() + .map(|part| { + evaluate_polynomial_on_lde_domain( + part, + domain.blowup_factor, + domain.interpolation_domain_size, + &domain.coset_offset, + ) + .unwrap() + }) + .collect(); - for col in lde_trace_permuted.iter_mut() { - apply_permutation(col, &permutation); + let (composition_poly_merkle_tree, composition_poly_root) = + Self::commit_composition_polynomial(&lde_composition_poly_parts_evaluations); + + Round2 { + lde_composition_poly_evaluations: lde_composition_poly_parts_evaluations, + composition_poly_parts, + composition_poly_merkle_tree, + composition_poly_root, + } } - // Compute commitments [t_j]. - let lde_trace = TraceTable::new_from_cols(&lde_trace_permuted); - let (lde_trace_merkle_tree, lde_trace_merkle_root) = batch_commit(&lde_trace.rows()); + fn round_3_evaluate_polynomials_in_out_of_domain_element>( + air: &A, + domain: &Domain, + round_1_result: &Round1, + round_2_result: &Round2, + z: &FieldElement, + ) -> Round3 + where + FieldElement: Serializable, + { + let z_power = z.pow(round_2_result.composition_poly_parts.len()); - // >>>> Send commitments: [tⱼ] - transcript.append_bytes(&lde_trace_merkle_root); + // Evaluate H_i in z^N for all i, where N is the number of parts the composition poly was + // broken into. + let composition_poly_parts_ood_evaluation: Vec<_> = round_2_result + .composition_poly_parts + .iter() + .map(|part| part.evaluate(&z_power)) + .collect(); + + // Returns the Out of Domain Frame for the given trace polynomials, out of domain evaluation point (called `z` in the literature), + // frame offsets given by the AIR and primitive root used for interpolating the trace polynomials. + // An out of domain frame is nothing more than the evaluation of the trace polynomials in the points required by the + // verifier to check the consistency between the trace and the composition polynomial. + // + // In the fibonacci example, the ood frame is simply the evaluations `[t(z), t(z * g), t(z * g^2)]`, where `t` is the trace + // polynomial and `g` is the primitive root of unity used when interpolating `t`. + let trace_ood_evaluations = Frame::get_trace_evaluations( + &round_1_result.trace_polys, + z, + &air.context().transition_offsets, + &domain.trace_primitive_root, + ); - ( - trace_polys, - lde_trace_evaluations, - lde_trace_merkle_tree, - lde_trace_merkle_root, - ) -} + Round3 { + trace_ood_evaluations, + composition_poly_parts_ood_evaluation, + } + } -fn compute_lde_trace_evaluations( - trace_polys: &[Polynomial>], - domain: &Domain, -) -> Vec>> -where - F: IsFFTField, - FieldElement: Send + Sync, -{ - #[cfg(not(feature = "parallel"))] - let trace_polys_iter = trace_polys.iter(); - #[cfg(feature = "parallel")] - let trace_polys_iter = trace_polys.par_iter(); - - trace_polys_iter - .map(|poly| { - evaluate_polynomial_on_lde_domain( - poly, - domain.blowup_factor, - domain.interpolation_domain_size, - &domain.coset_offset, - ) - }) - .collect::>>, FFTError>>() - .unwrap() -} + fn round_4_compute_and_run_fri_on_the_deep_composition_polynomial>( + air: &A, + domain: &Domain, + round_1_result: &Round1, + round_2_result: &Round2, + round_3_result: &Round3, + z: &FieldElement, + transcript: &mut impl IsStarkTranscript, + ) -> Round4 + where + FieldElement: Serializable + Send + Sync, + { + let coset_offset_u64 = air.context().proof_options.coset_offset; + let coset_offset = FieldElement::::from(coset_offset_u64); -fn round_1_randomized_air_with_preprocessing>( - air: &A, - main_trace: &TraceTable, - domain: &Domain, - transcript: &mut impl IsStarkTranscript, -) -> Result, ProvingError> -where - FieldElement: Serializable + Send + Sync, -{ - let (mut trace_polys, mut evaluations, main_merkle_tree, main_merkle_root) = - interpolate_and_commit(main_trace, domain, transcript); - - let rap_challenges = air.build_rap_challenges(transcript); - - let aux_trace = air.build_auxiliary_trace(main_trace, &rap_challenges); - - let mut lde_trace_merkle_trees = vec![main_merkle_tree]; - let mut lde_trace_merkle_roots = vec![main_merkle_root]; - if !aux_trace.is_empty() { - // Check that this is valid for interpolation - let (aux_trace_polys, aux_trace_polys_evaluations, aux_merkle_tree, aux_merkle_root) = - interpolate_and_commit(&aux_trace, domain, transcript); - trace_polys.extend_from_slice(&aux_trace_polys); - evaluations.extend_from_slice(&aux_trace_polys_evaluations); - lde_trace_merkle_trees.push(aux_merkle_tree); - lde_trace_merkle_roots.push(aux_merkle_root); - } - - let lde_trace = TraceTable::new_from_cols(&evaluations); - - Ok(Round1 { - trace_polys, - lde_trace, - lde_trace_merkle_roots, - lde_trace_merkle_trees, - rap_challenges, - }) -} + let gamma = transcript.sample_field_element(); + let n_terms_composition_poly = round_2_result.lde_composition_poly_evaluations.len(); + let n_terms_trace = air.context().transition_offsets.len() * air.context().trace_columns; -fn round_2_compute_composition_polynomial( - air: &A, - domain: &Domain, - round_1_result: &Round1, - transition_coefficients: &[FieldElement], - boundary_coefficients: &[FieldElement], -) -> Round2 -where - F: IsFFTField, - A: AIR + Send + Sync, - A::RAPChallenges: Send + Sync, - FieldElement: Serializable + Send + Sync, -{ - // Create evaluation table - let evaluator = ConstraintEvaluator::new(air, &round_1_result.rap_challenges); - - let constraint_evaluations = evaluator.evaluate( - &round_1_result.lde_trace, - domain, - transition_coefficients, - boundary_coefficients, - &round_1_result.rap_challenges, - ); - - // Get the composition poly H - let composition_poly = constraint_evaluations.compute_composition_poly(&domain.coset_offset); - let (composition_poly_even, composition_poly_odd) = composition_poly.even_odd_decomposition(); - - let lde_composition_poly_even_evaluations = evaluate_polynomial_on_lde_domain( - &composition_poly_even, - domain.blowup_factor, - domain.interpolation_domain_size, - &domain.coset_offset, - ) - .unwrap(); - let lde_composition_poly_odd_evaluations = evaluate_polynomial_on_lde_domain( - &composition_poly_odd, - domain.blowup_factor, - domain.interpolation_domain_size, - &domain.coset_offset, - ) - .unwrap(); - - // TODO: Remove clones - let composition_poly_evaluations: Vec> = lde_composition_poly_even_evaluations - .iter() - .zip(&lde_composition_poly_odd_evaluations) - .map(|(a, b)| vec![a.clone(), b.clone()]) - .collect(); - let (composition_poly_merkle_tree, composition_poly_root) = - batch_commit(&composition_poly_evaluations); - - Round2 { - composition_poly_even, - lde_composition_poly_even_evaluations, - composition_poly_merkle_tree, - composition_poly_root, - composition_poly_odd, - lde_composition_poly_odd_evaluations, + // <<<< Receive challenges: 𝛾, 𝛾' + let mut deep_composition_coefficients: Vec<_> = + core::iter::successors(Some(FieldElement::one()), |x| Some(x * &gamma)) + .take(n_terms_composition_poly + n_terms_trace) + .collect(); + + let trace_poly_coeffients: Vec<_> = deep_composition_coefficients + .drain(..n_terms_trace) + .collect(); + + // <<<< Receive challenges: 𝛾ⱼ, 𝛾ⱼ' + let gammas = deep_composition_coefficients; + + // Compute p₀ (deep composition polynomial) + let deep_composition_poly = Self::compute_deep_composition_poly( + air, + &round_1_result.trace_polys, + round_2_result, + round_3_result, + z, + &domain.trace_primitive_root, + &gammas, + &trace_poly_coeffients, + ); + + let domain_size = domain.lde_roots_of_unity_coset.len(); + + // FRI commit and query phases + let (fri_last_value, fri_layers) = fri::commit_phase( + domain.root_order as usize, + deep_composition_poly, + transcript, + &coset_offset, + domain_size, + ); + + // grinding: generate nonce and append it to the transcript + let security_bits = air.context().proof_options.grinding_factor; + let mut nonce = 0; + if security_bits > 0 { + let transcript_challenge = transcript.state(); + nonce = generate_nonce_with_grinding(&transcript_challenge, security_bits) + .expect("nonce not found"); + transcript.append_bytes(&nonce.to_be_bytes()); + } + + let number_of_queries = air.options().fri_number_of_queries; + let iotas = Self::sample_query_indexes(number_of_queries, domain, transcript); + let query_list = fri::query_phase(&fri_layers, &iotas); + + let fri_layers_merkle_roots: Vec<_> = fri_layers + .iter() + .map(|layer| layer.merkle_tree.root) + .collect(); + + let (deep_poly_openings, deep_poly_openings_sym) = + Self::open_deep_composition_poly(domain, round_1_result, round_2_result, &iotas); + + Round4 { + fri_last_value, + fri_layers_merkle_roots, + deep_poly_openings, + deep_poly_openings_sym, + query_list, + nonce, + } } -} -fn round_3_evaluate_polynomials_in_out_of_domain_element>( - air: &A, - domain: &Domain, - round_1_result: &Round1, - round_2_result: &Round2, - z: &FieldElement, -) -> Round3 -where - FieldElement: Serializable, -{ - let z_squared = z.square(); - - // Evaluate H_1 and H_2 in z^2. - let composition_poly_even_ood_evaluation = - round_2_result.composition_poly_even.evaluate(&z_squared); - let composition_poly_odd_ood_evaluation = - round_2_result.composition_poly_odd.evaluate(&z_squared); - - // Returns the Out of Domain Frame for the given trace polynomials, out of domain evaluation point (called `z` in the literature), - // frame offsets given by the AIR and primitive root used for interpolating the trace polynomials. - // An out of domain frame is nothing more than the evaluation of the trace polynomials in the points required by the - // verifier to check the consistency between the trace and the composition polynomial. - // - // In the fibonacci example, the ood frame is simply the evaluations `[t(z), t(z * g), t(z * g^2)]`, where `t` is the trace - // polynomial and `g` is the primitive root of unity used when interpolating `t`. - let trace_ood_evaluations = Frame::get_trace_evaluations( - &round_1_result.trace_polys, - z, - &air.context().transition_offsets, - &domain.trace_primitive_root, - ); - - Round3 { - trace_ood_evaluations, - composition_poly_even_ood_evaluation, - composition_poly_odd_ood_evaluation, + fn sample_query_indexes( + number_of_queries: usize, + domain: &Domain, + transcript: &mut impl IsStarkTranscript, + ) -> Vec { + let domain_size = domain.lde_roots_of_unity_coset.len() as u64; + (0..number_of_queries) + .map(|_| (transcript.sample_u64(domain_size >> 1)) as usize) + .collect::>() } -} -fn round_4_compute_and_run_fri_on_the_deep_composition_polynomial< - F: IsFFTField, - A: AIR, ->( - air: &A, - domain: &Domain, - round_1_result: &Round1, - round_2_result: &Round2, - round_3_result: &Round3, - z: &FieldElement, - transcript: &mut impl IsStarkTranscript, -) -> Round4 -where - FieldElement: Serializable + Send + Sync, -{ - let coset_offset_u64 = air.context().proof_options.coset_offset; - let coset_offset = FieldElement::::from(coset_offset_u64); - - // <<<< Receive challenges: 𝛾, 𝛾' - let composition_poly_coeffients = [ - transcript.sample_field_element(), - transcript.sample_field_element(), - ]; - // <<<< Receive challenges: 𝛾ⱼ, 𝛾ⱼ' - let trace_poly_coeffients = batch_sample_challenges::( - air.context().transition_offsets.len() * air.context().trace_columns, - transcript, - ); - - // Compute p₀ (deep composition polynomial) - let deep_composition_poly = compute_deep_composition_poly( - air, - &round_1_result.trace_polys, - round_2_result, - round_3_result, - z, - &domain.trace_primitive_root, - &composition_poly_coeffients, - &trace_poly_coeffients, - ); - - let domain_size = domain.lde_roots_of_unity_coset.len(); - - // FRI commit and query phases - let (fri_last_value, fri_layers) = fri_commit_phase( - domain.root_order as usize, - deep_composition_poly, - transcript, - &coset_offset, - domain_size, - ); - - // grinding: generate nonce and append it to the transcript - let grinding_factor = air.context().proof_options.grinding_factor; - let transcript_challenge = transcript.state(); - let nonce = generate_nonce_with_grinding(&transcript_challenge, grinding_factor) - .expect("nonce not found"); - transcript.append_bytes(&nonce.to_be_bytes()); - - let (query_list, iotas) = fri_query_phase(air, domain_size, &fri_layers, transcript); - - let fri_layers_merkle_roots: Vec<_> = fri_layers - .iter() - .map(|layer| layer.merkle_tree.root) - .collect(); - - let deep_poly_openings = - open_deep_composition_poly(domain, round_1_result, round_2_result, &iotas); - - Round4 { - fri_last_value, - fri_layers_merkle_roots, - deep_poly_openings, - query_list, - nonce, + /// Returns the DEEP composition polynomial that the prover then commits to using + /// FRI. This polynomial is a linear combination of the trace polynomial and the + /// composition polynomial, with coefficients sampled by the verifier (i.e. using Fiat-Shamir). + #[allow(clippy::too_many_arguments)] + fn compute_deep_composition_poly( + air: &A, + trace_polys: &[Polynomial>], + round_2_result: &Round2, + round_3_result: &Round3, + z: &FieldElement, + primitive_root: &FieldElement, + composition_poly_gammas: &[FieldElement], + trace_terms_gammas: &[FieldElement], + ) -> Polynomial> + where + A: AIR, + FieldElement: Serializable + Send + Sync, + { + let z_power = z.pow(round_2_result.composition_poly_parts.len()); + + // ∑ᵢ 𝛾ᵢ ( Hᵢ − Hᵢ(z^N) ) / ( X − z^N ) + let mut h_terms = Polynomial::zero(); + for (i, part) in round_2_result.composition_poly_parts.iter().enumerate() { + // h_i_eval is the evaluation of the i-th part of the composition polynomial at z^N, + // where N is the number of parts of the composition polynomial. + let h_i_eval = &round_3_result.composition_poly_parts_ood_evaluation[i]; + let h_i_term = &composition_poly_gammas[i] * (part - h_i_eval); + h_terms = h_terms + h_i_term; + } + assert_eq!(h_terms.evaluate(&z_power), FieldElement::zero()); + h_terms.ruffini_division_inplace(&z_power); + + // Get trace evaluations needed for the trace terms of the deep composition polynomial + let transition_offsets = &air.context().transition_offsets; + let trace_frame_evaluations = &round_3_result.trace_ood_evaluations; + + // Compute the sum of all the trace terms of the deep composition polynomial. + // There is one term for every trace polynomial and for every row in the frame. + // ∑ ⱼₖ [ 𝛾ₖ ( tⱼ − tⱼ(z) ) / ( X − zgᵏ )] + + // @@@ this could be const + let trace_frame_length = trace_frame_evaluations.len(); + + #[cfg(feature = "parallel")] + let trace_terms = trace_polys + .par_iter() + .enumerate() + .fold( + || Polynomial::zero(), + |trace_terms, (i, t_j)| { + compute_trace_term( + &trace_terms, + (i, t_j), + trace_frame_length, + trace_terms_gammas, + trace_frame_evaluations, + transition_offsets, + (z, primitive_root), + ) + }, + ) + .reduce(|| Polynomial::zero(), |a, b| a + b); + + #[cfg(not(feature = "parallel"))] + let trace_terms = + trace_polys + .iter() + .enumerate() + .fold(Polynomial::zero(), |trace_terms, (i, t_j)| { + Self::compute_trace_term( + &trace_terms, + (i, t_j), + trace_frame_length, + trace_terms_gammas, + trace_frame_evaluations, + transition_offsets, + (z, primitive_root), + ) + }); + + h_terms + trace_terms } -} -/// Returns the DEEP composition polynomial that the prover then commits to using -/// FRI. This polynomial is a linear combination of the trace polynomial and the -/// composition polynomial, with coefficients sampled by the verifier (i.e. using Fiat-Shamir). -#[allow(clippy::too_many_arguments)] -fn compute_deep_composition_poly( - air: &A, - trace_polys: &[Polynomial>], - round_2_result: &Round2, - round_3_result: &Round3, - z: &FieldElement, - primitive_root: &FieldElement, - composition_poly_gammas: &[FieldElement; 2], - trace_terms_gammas: &[FieldElement], -) -> Polynomial> -where - A: AIR, - F: IsFFTField, - FieldElement: Serializable + Send + Sync, -{ - // Compute composition polynomial terms of the deep composition polynomial. - let h_1 = &round_2_result.composition_poly_even; - let h_1_z2 = &round_3_result.composition_poly_even_ood_evaluation; - let h_2 = &round_2_result.composition_poly_odd; - let h_2_z2 = &round_3_result.composition_poly_odd_ood_evaluation; - let gamma = &composition_poly_gammas[0]; - let gamma_p = &composition_poly_gammas[1]; - let z_squared = z.square(); - - // 𝛾 ( H₁ − H₁(z²) ) / ( X − z² ) - let mut h_1_term = gamma * (h_1 - h_1_z2); - h_1_term.ruffini_division_inplace(&z_squared); - - // 𝛾' ( H₂ − H₂(z²) ) / ( X − z² ) - let mut h_2_term = gamma_p * (h_2 - h_2_z2); - h_2_term.ruffini_division_inplace(&z_squared); - - // Get trace evaluations needed for the trace terms of the deep composition polynomial - let transition_offsets = &air.context().transition_offsets; - let trace_frame_evaluations = &round_3_result.trace_ood_evaluations; - - // Compute the sum of all the trace terms of the deep composition polynomial. - // There is one term for every trace polynomial and for every row in the frame. - // ∑ ⱼₖ [ 𝛾ₖ ( tⱼ − tⱼ(z) ) / ( X − zgᵏ )] - - // @@@ this could be const - let trace_frame_length = trace_frame_evaluations.len(); - - #[cfg(feature = "parallel")] - let trace_term = trace_polys - .par_iter() - .enumerate() - .fold( - || Polynomial::zero(), - |trace_terms, (i, t_j)| { - compute_trace_term( - &trace_terms, - (i, t_j), - trace_frame_length, - trace_terms_gammas, - trace_frame_evaluations, - transition_offsets, - (z, primitive_root), - ) - }, - ) - .reduce(|| Polynomial::zero(), |a, b| a + b); + fn compute_trace_term( + trace_terms: &Polynomial>, + (i, t_j): (usize, &Polynomial>), + trace_frame_length: usize, + trace_terms_gammas: &[FieldElement], + trace_frame_evaluations: &[Vec>], + transition_offsets: &[usize], + (z, primitive_root): (&FieldElement, &FieldElement), + ) -> Polynomial> + where + FieldElement: Serializable + Send + Sync, + { + let i_times_trace_frame_evaluation = i * trace_frame_length; + let iter_trace_gammas = trace_terms_gammas + .iter() + .skip(i_times_trace_frame_evaluation); + let trace_int = trace_frame_evaluations + .iter() + .zip(transition_offsets) + .zip(iter_trace_gammas) + .fold( + Polynomial::zero(), + |trace_agg, ((eval, offset), trace_gamma)| { + // @@@ we can avoid this clone + let t_j_z = &eval[i]; + // @@@ this can be pre-computed + let z_shifted = z * primitive_root.pow(*offset); + let mut poly = t_j - t_j_z; + poly.ruffini_division_inplace(&z_shifted); + trace_agg + poly * trace_gamma + }, + ); + + trace_terms + trace_int + } - #[cfg(not(feature = "parallel"))] - let trace_term = - trace_polys + fn open_composition_poly( + composition_poly_merkle_tree: &BatchedMerkleTree, + lde_composition_poly_evaluations: &[Vec>], + index: usize, + ) -> (Proof, Vec>) + where + FieldElement: Serializable, + { + let proof = composition_poly_merkle_tree + .get_proof_by_pos(index) + .unwrap(); + + let lde_composition_poly_parts_evaluation: Vec<_> = lde_composition_poly_evaluations .iter() - .enumerate() - .fold(Polynomial::zero(), |trace_terms, (i, t_j)| { - compute_trace_term( - &trace_terms, - (i, t_j), - trace_frame_length, - trace_terms_gammas, - trace_frame_evaluations, - transition_offsets, - (z, primitive_root), - ) - }); + .flat_map(|part| { + vec![ + part[reverse_index(index * 2, part.len() as u64)].clone(), + part[reverse_index(index * 2 + 1, part.len() as u64)].clone(), + ] + }) + .collect(); + + (proof, lde_composition_poly_parts_evaluation) + } - h_1_term + h_2_term + trace_term -} + fn open_trace_polys( + domain: &Domain, + lde_trace_merkle_trees: &[BatchedMerkleTree], + lde_trace: &TraceTable, + index: usize, + ) -> (Vec>, Vec>) + where + FieldElement: Serializable, + { + let domain_size = domain.lde_roots_of_unity_coset.len(); + let lde_trace_evaluations = lde_trace + .get_row(reverse_index(index, domain_size as u64)) + .to_vec(); + + // Trace polynomials openings + #[cfg(feature = "parallel")] + let merkle_trees_iter = lde_trace_merkle_trees.par_iter(); + #[cfg(not(feature = "parallel"))] + let merkle_trees_iter = lde_trace_merkle_trees.iter(); + + let lde_trace_merkle_proofs: Vec> = merkle_trees_iter + .map(|tree| tree.get_proof_by_pos(index).unwrap()) + .collect(); + + (lde_trace_merkle_proofs, lde_trace_evaluations) + } -fn compute_trace_term( - trace_terms: &Polynomial>, - (i, t_j): (usize, &Polynomial>), - trace_frame_length: usize, - trace_terms_gammas: &[FieldElement], - trace_frame_evaluations: &[Vec>], - transition_offsets: &[usize], - (z, primitive_root): (&FieldElement, &FieldElement), -) -> Polynomial> -where - F: IsFFTField, - FieldElement: Serializable + Send + Sync, -{ - let i_times_trace_frame_evaluation = i * trace_frame_length; - let iter_trace_gammas = trace_terms_gammas - .iter() - .skip(i_times_trace_frame_evaluation); - let trace_int = trace_frame_evaluations - .iter() - .zip(transition_offsets) - .zip(iter_trace_gammas) - .fold( - Polynomial::zero(), - |trace_agg, ((eval, offset), trace_gamma)| { - // @@@ we can avoid this clone - let t_j_z = &eval[i]; - // @@@ this can be pre-computed - let z_shifted = z * primitive_root.pow(*offset); - let mut poly = t_j - t_j_z; - poly.ruffini_division_inplace(&z_shifted); - trace_agg + poly * trace_gamma - }, - ); - - trace_terms + trace_int -} + /// Open the deep composition polynomial on a list of indexes + /// and their symmetric elements. + fn open_deep_composition_poly>( + domain: &Domain, + round_1_result: &Round1, + round_2_result: &Round2, + indexes_to_open: &[usize], + ) -> ( + DeepPolynomialOpenings, + DeepPolynomialOpenings, + ) + where + FieldElement: Serializable, + { + let mut openings = Vec::new(); + let mut openings_symmetric = Vec::new(); + + for index in indexes_to_open.iter() { + let (lde_trace_merkle_proofs, lde_trace_evaluations) = Self::open_trace_polys( + domain, + &round_1_result.lde_trace_merkle_trees, + &round_1_result.lde_trace, + index * 2, + ); -fn open_deep_composition_poly>( - domain: &Domain, - round_1_result: &Round1, - round_2_result: &Round2, - indexes_to_open: &[usize], // list of iotas -) -> Vec> -where - FieldElement: Serializable, -{ - let permutation = - get_stone_prover_domain_permutation(domain.interpolation_domain_size, domain.blowup_factor); - indexes_to_open - .iter() - .map(|index_to_open| { - let index = index_to_open % domain.lde_roots_of_unity_coset.len(); - - let lde_composition_poly_proof = round_2_result - .composition_poly_merkle_tree - .get_proof_by_pos(index) - .unwrap(); - - // H₁ openings - let lde_composition_poly_even_evaluation = - round_2_result.lde_composition_poly_even_evaluations[index].clone(); - - // H₂ openings - let lde_composition_poly_odd_evaluation = - round_2_result.lde_composition_poly_odd_evaluations[index].clone(); - - let lde_trace_evaluations = round_1_result.lde_trace.get_row(index).to_vec(); - - let index = permutation[index]; - // Trace polynomials openings - #[cfg(feature = "parallel")] - let merkle_trees_iter = round_1_result.lde_trace_merkle_trees.par_iter(); - #[cfg(not(feature = "parallel"))] - let merkle_trees_iter = round_1_result.lde_trace_merkle_trees.iter(); - - let lde_trace_merkle_proofs: Vec> = merkle_trees_iter - .map(|tree| tree.get_proof_by_pos(index).unwrap()) - .collect(); + let (lde_trace_sym_merkle_proofs, lde_trace_sym_evaluations) = Self::open_trace_polys( + domain, + &round_1_result.lde_trace_merkle_trees, + &round_1_result.lde_trace, + index * 2 + 1, + ); - DeepPolynomialOpenings { - lde_composition_poly_proof, - lde_composition_poly_even_evaluation, - lde_composition_poly_odd_evaluation, + let (lde_composition_poly_proof, lde_composition_poly_parts_evaluation) = + Self::open_composition_poly( + &round_2_result.composition_poly_merkle_tree, + &round_2_result.lde_composition_poly_evaluations, + *index, + ); + + openings.push(DeepPolynomialOpening { + lde_composition_poly_proof: lde_composition_poly_proof.clone(), + lde_composition_poly_parts_evaluation: lde_composition_poly_parts_evaluation + .clone() + .into_iter() + .step_by(2) + .collect(), lde_trace_merkle_proofs, lde_trace_evaluations, - } - }) - .collect() -} + }); -// FIXME remove unwrap() calls and return errors -pub fn prove( - main_trace: &TraceTable, - pub_inputs: &A::PublicInputs, - proof_options: &ProofOptions, - mut transcript: impl IsStarkTranscript, -) -> Result, ProvingError> -where - F: IsFFTField, - A: AIR + Send + Sync, - A::RAPChallenges: Send + Sync, - FieldElement: Serializable + Send + Sync, -{ - info!("Started proof generation..."); - #[cfg(feature = "instruments")] - println!("- Started round 0: Air Initialization"); - #[cfg(feature = "instruments")] - let timer0 = Instant::now(); - - let air = A::new(main_trace.n_rows(), pub_inputs, proof_options); - let domain = Domain::new(&air); - - #[cfg(feature = "instruments")] - let elapsed0 = timer0.elapsed(); - #[cfg(feature = "instruments")] - println!(" Time spent: {:?}", elapsed0); - - // =================================== - // ==========| Round 1 |========== - // =================================== - - #[cfg(feature = "instruments")] - println!("- Started round 1: RAP"); - #[cfg(feature = "instruments")] - let timer1 = Instant::now(); - - let round_1_result = round_1_randomized_air_with_preprocessing::( - &air, - main_trace, - &domain, - &mut transcript, - )?; - - #[cfg(debug_assertions)] - validate_trace( - &air, - &round_1_result.trace_polys, - &domain, - &round_1_result.rap_challenges, - ); - - #[cfg(feature = "instruments")] - let elapsed1 = timer1.elapsed(); - #[cfg(feature = "instruments")] - println!(" Time spent: {:?}", elapsed1); - - // =================================== - // ==========| Round 2 |========== - // =================================== - - #[cfg(feature = "instruments")] - println!("- Started round 2: Compute composition polynomial"); - #[cfg(feature = "instruments")] - let timer2 = Instant::now(); - - // <<<< Receive challenge: 𝛽 - let beta = transcript.sample_field_element(); - let num_boundary_constraints = air - .boundary_constraints(&round_1_result.rap_challenges) - .constraints - .len(); - - let num_transition_constraints = air.context().num_transition_constraints; - - let mut coefficients: Vec<_> = (1..num_boundary_constraints + num_transition_constraints + 1) - .map(|i| beta.pow(i)) - .collect(); - - let transition_coefficients: Vec<_> = - coefficients.drain(..num_transition_constraints).collect(); - let boundary_coefficients = coefficients; - - let round_2_result = round_2_compute_composition_polynomial( - &air, - &domain, - &round_1_result, - &transition_coefficients, - &boundary_coefficients, - ); - - // >>>> Send commitments: [H₁], [H₂] - transcript.append_bytes(&round_2_result.composition_poly_root); - - #[cfg(feature = "instruments")] - let elapsed2 = timer2.elapsed(); - #[cfg(feature = "instruments")] - println!(" Time spent: {:?}", elapsed2); - - // =================================== - // ==========| Round 3 |========== - // =================================== - - #[cfg(feature = "instruments")] - println!("- Started round 3: Evaluate polynomial in out of domain elements"); - #[cfg(feature = "instruments")] - let timer3 = Instant::now(); - - // <<<< Receive challenge: z - let z = transcript.sample_z_ood( - &domain.lde_roots_of_unity_coset, - &domain.trace_roots_of_unity, - ); - - let round_3_result = round_3_evaluate_polynomials_in_out_of_domain_element( - &air, - &domain, - &round_1_result, - &round_2_result, - &z, - ); - - // >>>> Send value: H₁(z²) - transcript.append_field_element(&round_3_result.composition_poly_even_ood_evaluation); - - // >>>> Send value: H₂(z²) - transcript.append_field_element(&round_3_result.composition_poly_odd_ood_evaluation); - // >>>> Send values: tⱼ(zgᵏ) - for row in round_3_result.trace_ood_evaluations.iter() { - for element in row.iter() { - transcript.append_field_element(element); + openings_symmetric.push(DeepPolynomialOpening { + lde_composition_poly_proof, + lde_composition_poly_parts_evaluation: lde_composition_poly_parts_evaluation + .into_iter() + .skip(1) + .step_by(2) + .collect(), + lde_trace_merkle_proofs: lde_trace_sym_merkle_proofs, + lde_trace_evaluations: lde_trace_sym_evaluations, + }); } + + (openings, openings_symmetric) } - #[cfg(feature = "instruments")] - let elapsed3 = timer3.elapsed(); - #[cfg(feature = "instruments")] - println!(" Time spent: {:?}", elapsed3); - - // =================================== - // ==========| Round 4 |========== - // =================================== - - #[cfg(feature = "instruments")] - println!("- Started round 4: FRI"); - #[cfg(feature = "instruments")] - let timer4 = Instant::now(); - - // Part of this round is running FRI, which is an interactive - // protocol on its own. Therefore we pass it the transcript - // to simulate the interactions with the verifier. - let round_4_result = round_4_compute_and_run_fri_on_the_deep_composition_polynomial( - &air, - &domain, - &round_1_result, - &round_2_result, - &round_3_result, - &z, - &mut transcript, - ); - - #[cfg(feature = "instruments")] - let elapsed4 = timer4.elapsed(); - #[cfg(feature = "instruments")] - println!(" Time spent: {:?}", elapsed4); - - #[cfg(feature = "instruments")] + // FIXME remove unwrap() calls and return errors + fn prove( + main_trace: &TraceTable, + pub_inputs: &A::PublicInputs, + proof_options: &ProofOptions, + mut transcript: impl IsStarkTranscript, + ) -> Result, ProvingError> + where + A: AIR + Send + Sync, + A::RAPChallenges: Send + Sync, + FieldElement: Serializable + Send + Sync, { - let total_time = elapsed1 + elapsed2 + elapsed3 + elapsed4; - println!( - " Fraction of proving time per round: {:.4} {:.4} {:.4} {:.4} {:.4}", - elapsed0.as_nanos() as f64 / total_time.as_nanos() as f64, - elapsed1.as_nanos() as f64 / total_time.as_nanos() as f64, - elapsed2.as_nanos() as f64 / total_time.as_nanos() as f64, - elapsed3.as_nanos() as f64 / total_time.as_nanos() as f64, - elapsed4.as_nanos() as f64 / total_time.as_nanos() as f64 - ); - } - - info!("End proof generation"); - - let trace_ood_frame_evaluations = Frame::new( - round_3_result - .trace_ood_evaluations - .into_iter() - .flatten() - .collect(), - round_1_result.trace_polys.len(), - ); - - Ok(StarkProof { - // [tⱼ] - lde_trace_merkle_roots: round_1_result.lde_trace_merkle_roots, - // tⱼ(zgᵏ) - trace_ood_frame_evaluations, - // [H₁] and [H₂] - composition_poly_root: round_2_result.composition_poly_root, - // H₁(z²) - composition_poly_even_ood_evaluation: round_3_result.composition_poly_even_ood_evaluation, - // H₂(z²) - composition_poly_odd_ood_evaluation: round_3_result.composition_poly_odd_ood_evaluation, - // [pₖ] - fri_layers_merkle_roots: round_4_result.fri_layers_merkle_roots, - // pₙ - fri_last_value: round_4_result.fri_last_value, - // Open(p₀(D₀), 𝜐ₛ), Open(pₖ(Dₖ), −𝜐ₛ^(2ᵏ)) - query_list: round_4_result.query_list, - // Open(H₁(D_LDE, 𝜐₀), Open(H₂(D_LDE, 𝜐₀), Open(tⱼ(D_LDE), 𝜐₀) - deep_poly_openings: round_4_result.deep_poly_openings, - // nonce obtained from grinding - nonce: round_4_result.nonce, - - trace_length: air.trace_length(), - }) -} + info!("Started proof generation..."); + #[cfg(feature = "instruments")] + println!("- Started round 0: Air Initialization"); + #[cfg(feature = "instruments")] + let timer0 = Instant::now(); + + let air = A::new(main_trace.n_rows(), pub_inputs, proof_options); + let domain = Domain::new(&air); + + #[cfg(feature = "instruments")] + let elapsed0 = timer0.elapsed(); + #[cfg(feature = "instruments")] + println!(" Time spent: {:?}", elapsed0); + + // =================================== + // ==========| Round 1 |========== + // =================================== + + #[cfg(feature = "instruments")] + println!("- Started round 1: RAP"); + #[cfg(feature = "instruments")] + let timer1 = Instant::now(); + + let round_1_result = Self::round_1_randomized_air_with_preprocessing::( + &air, + main_trace, + &domain, + &mut transcript, + )?; + + #[cfg(debug_assertions)] + validate_trace( + &air, + &round_1_result.trace_polys, + &domain, + &round_1_result.rap_challenges, + ); + + #[cfg(feature = "instruments")] + let elapsed1 = timer1.elapsed(); + #[cfg(feature = "instruments")] + println!(" Time spent: {:?}", elapsed1); + + // =================================== + // ==========| Round 2 |========== + // =================================== + #[cfg(feature = "instruments")] + println!("- Started round 2: Compute composition polynomial"); + #[cfg(feature = "instruments")] + let timer2 = Instant::now(); + + // <<<< Receive challenge: 𝛽 + let beta = transcript.sample_field_element(); + let num_boundary_constraints = air + .boundary_constraints(&round_1_result.rap_challenges) + .constraints + .len(); + + let num_transition_constraints = air.context().num_transition_constraints; + + let mut coefficients: Vec<_> = + core::iter::successors(Some(FieldElement::one()), |x| Some(x * &beta)) + .take(num_boundary_constraints + num_transition_constraints) + .collect(); + + let transition_coefficients: Vec<_> = + coefficients.drain(..num_transition_constraints).collect(); + let boundary_coefficients = coefficients; + + let round_2_result = Self::round_2_compute_composition_polynomial( + &air, + &domain, + &round_1_result, + &transition_coefficients, + &boundary_coefficients, + ); + + // >>>> Send commitments: [H₁], [H₂] + transcript.append_bytes(&round_2_result.composition_poly_root); + + #[cfg(feature = "instruments")] + let elapsed2 = timer2.elapsed(); + #[cfg(feature = "instruments")] + println!(" Time spent: {:?}", elapsed2); + + // =================================== + // ==========| Round 3 |========== + // =================================== + + #[cfg(feature = "instruments")] + println!("- Started round 3: Evaluate polynomial in out of domain elements"); + #[cfg(feature = "instruments")] + let timer3 = Instant::now(); + + // <<<< Receive challenge: z + let z = transcript.sample_z_ood( + &domain.lde_roots_of_unity_coset, + &domain.trace_roots_of_unity, + ); + + let round_3_result = Self::round_3_evaluate_polynomials_in_out_of_domain_element( + &air, + &domain, + &round_1_result, + &round_2_result, + &z, + ); + + // >>>> Send values: tⱼ(zgᵏ) + for i in 0..round_3_result.trace_ood_evaluations[0].len() { + for j in 0..round_3_result.trace_ood_evaluations.len() { + transcript.append_field_element(&round_3_result.trace_ood_evaluations[j][i]); + } + } + + // >>>> Send values: Hᵢ(z^N) + for element in round_3_result.composition_poly_parts_ood_evaluation.iter() { + transcript.append_field_element(element); + } + + #[cfg(feature = "instruments")] + let elapsed3 = timer3.elapsed(); + #[cfg(feature = "instruments")] + println!(" Time spent: {:?}", elapsed3); + + // =================================== + // ==========| Round 4 |========== + // =================================== + + #[cfg(feature = "instruments")] + println!("- Started round 4: FRI"); + #[cfg(feature = "instruments")] + let timer4 = Instant::now(); + + // Part of this round is running FRI, which is an interactive + // protocol on its own. Therefore we pass it the transcript + // to simulate the interactions with the verifier. + let round_4_result = Self::round_4_compute_and_run_fri_on_the_deep_composition_polynomial( + &air, + &domain, + &round_1_result, + &round_2_result, + &round_3_result, + &z, + &mut transcript, + ); + + #[cfg(feature = "instruments")] + let elapsed4 = timer4.elapsed(); + #[cfg(feature = "instruments")] + println!(" Time spent: {:?}", elapsed4); + + #[cfg(feature = "instruments")] + { + let total_time = elapsed1 + elapsed2 + elapsed3 + elapsed4; + println!( + " Fraction of proving time per round: {:.4} {:.4} {:.4} {:.4} {:.4}", + elapsed0.as_nanos() as f64 / total_time.as_nanos() as f64, + elapsed1.as_nanos() as f64 / total_time.as_nanos() as f64, + elapsed2.as_nanos() as f64 / total_time.as_nanos() as f64, + elapsed3.as_nanos() as f64 / total_time.as_nanos() as f64, + elapsed4.as_nanos() as f64 / total_time.as_nanos() as f64 + ); + } + + info!("End proof generation"); + + let trace_ood_frame_evaluations = Frame::new( + round_3_result + .trace_ood_evaluations + .into_iter() + .flatten() + .collect(), + round_1_result.trace_polys.len(), + ); + + Ok(StarkProof { + // [tⱼ] + lde_trace_merkle_roots: round_1_result.lde_trace_merkle_roots, + // tⱼ(zgᵏ) + trace_ood_frame_evaluations, + // [H₁] and [H₂] + composition_poly_root: round_2_result.composition_poly_root, + // Hᵢ(z^N) + composition_poly_parts_ood_evaluation: round_3_result + .composition_poly_parts_ood_evaluation, + // [pₖ] + fri_layers_merkle_roots: round_4_result.fri_layers_merkle_roots, + // pₙ + fri_last_value: round_4_result.fri_last_value, + // Open(p₀(D₀), 𝜐ₛ), Open(pₖ(Dₖ), −𝜐ₛ^(2ᵏ)) + query_list: round_4_result.query_list, + // Open(H₁(D_LDE, 𝜐₀), Open(H₂(D_LDE, 𝜐₀), Open(tⱼ(D_LDE), 𝜐₀) + deep_poly_openings: round_4_result.deep_poly_openings, + // Open(H₁(D_LDE, 𝜐₀), Open(H₂(D_LDE, 𝜐₀), Open(tⱼ(D_LDE), 𝜐₀) + deep_poly_openings_sym: round_4_result.deep_poly_openings_sym, + // nonce obtained from grinding + nonce: round_4_result.nonce, + + trace_length: air.trace_length(), + }) + } +} #[cfg(test)] mod tests { use std::num::ParseIntError; + fn decode_hex(s: &str) -> Result, ParseIntError> { + (0..s.len()) + .step_by(2) + .map(|i| u8::from_str_radix(&s[i..i + 2], 16)) + .collect() + } + use crate::{ examples::{ fibonacci_2_cols_shifted::{self, Fibonacci2ColsShifted}, @@ -836,6 +906,7 @@ mod tests { }, proof::options::ProofOptions, transcript::StoneProverTranscript, + verifier::{Challenges, IsStarkVerifier, Verifier}, Felt252, }; @@ -875,10 +946,6 @@ mod tests { assert_eq!(domain.blowup_factor, 2); assert_eq!(domain.interpolation_domain_size, trace_length); assert_eq!(domain.root_order, trace_length.trailing_zeros()); - assert_eq!( - domain.lde_root_order, - (trace_length * blowup_factor).trailing_zeros() - ); assert_eq!(domain.coset_offset, FieldElement::from(coset_offset)); let primitive_root = Stark252PrimeField::get_primitive_root_of_unity( @@ -945,15 +1012,12 @@ mod tests { } } - pub fn decode_hex(s: &str) -> Result, ParseIntError> { - (0..s.len()) - .step_by(2) - .map(|i| u8::from_str_radix(&s[i..i + 2], 16)) - .collect() - } - - #[test] - fn test_trace_commitment_is_compatible_with_stone_prover_1() { + fn proof_parts_stone_compatibility_case_1() -> ( + StarkProof, + fibonacci_2_cols_shifted::PublicInputs, + ProofOptions, + [u8; 4], + ) { let trace = fibonacci_2_cols_shifted::compute_trace(FieldElement::one(), 4); let claimed_index = 3; @@ -961,6 +1025,8 @@ mod tests { let mut proof_options = ProofOptions::default_test_options(); proof_options.blowup_factor = 4; proof_options.coset_offset = 3; + proof_options.grinding_factor = 0; + proof_options.fri_number_of_queries = 1; let pub_inputs = fibonacci_2_cols_shifted::PublicInputs { claimed_value, @@ -969,30 +1035,381 @@ mod tests { let transcript_init_seed = [0xca, 0xfe, 0xca, 0xfe]; - let air = Fibonacci2ColsShifted::new(trace.n_rows(), &pub_inputs, &proof_options); - let domain = Domain::new(&air); - - let (_, _, _, trace_commitment) = interpolate_and_commit( + let proof = Prover::prove::>( &trace, + &pub_inputs, + &proof_options, + StoneProverTranscript::new(&transcript_init_seed), + ) + .unwrap(); + (proof, pub_inputs, proof_options, transcript_init_seed) + } + + fn stone_compatibility_case_1_proof() -> StarkProof { + let (proof, _, _, _) = proof_parts_stone_compatibility_case_1(); + proof + } + + fn stone_compatibility_case_1_challenges( + ) -> Challenges> { + let (proof, public_inputs, options, seed) = proof_parts_stone_compatibility_case_1(); + + let air = Fibonacci2ColsShifted::new(proof.trace_length, &public_inputs, &options); + let domain = Domain::new(&air); + Verifier::step_1_replay_rounds_and_recover_challenges( + &air, + &proof, &domain, - &mut StoneProverTranscript::new(&transcript_init_seed), + &mut StoneProverTranscript::new(&seed), + ) + } + + #[test] + fn stone_compatibility_case_1_proof_is_valid() { + let (proof, public_inputs, options, seed) = proof_parts_stone_compatibility_case_1(); + assert!(Verifier::verify::>( + &proof, + &public_inputs, + &options, + StoneProverTranscript::new(&seed) + )); + } + + #[test] + fn stone_compatibility_case_1_trace_commitment() { + let proof = stone_compatibility_case_1_proof(); + + assert_eq!( + proof.lde_trace_merkle_roots[0].to_vec(), + decode_hex("0eb9dcc0fb1854572a01236753ce05139d392aa3aeafe72abff150fe21175594").unwrap() ); + } + + #[test] + fn stone_compatibility_case_1_composition_poly_challenges() { + let challenges = stone_compatibility_case_1_challenges(); + assert_eq!(challenges.transition_coeffs[0], FieldElement::one()); + let beta = challenges.transition_coeffs[1]; assert_eq!( - &trace_commitment.to_vec(), - &decode_hex("0eb9dcc0fb1854572a01236753ce05139d392aa3aeafe72abff150fe21175594") - .unwrap() + beta, + FieldElement::from_hex_unchecked( + "86105fff7b04ed4068ecccb8dbf1ed223bd45cd26c3532d6c80a818dbd4fa7" + ), + ); + + assert_eq!(challenges.boundary_coeffs[0], beta.pow(2u64)); + assert_eq!(challenges.boundary_coeffs[1], beta.pow(3u64)); + } + + #[test] + fn stone_compatibility_case_1_composition_poly_commitment() { + let proof = stone_compatibility_case_1_proof(); + // Composition polynomial commitment + assert_eq!( + proof.composition_poly_root.to_vec(), + decode_hex("7cdd8d5fe3bd62254a417e2e260e0fed4fccdb6c9005e828446f645879394f38").unwrap() ); } + #[test] - fn test_trace_commitment_is_compatible_with_stone_prover_2() { - let trace = fibonacci_2_cols_shifted::compute_trace(FieldElement::one(), 4); + fn stone_compatibility_case_1_out_of_domain_challenge() { + let challenges = stone_compatibility_case_1_challenges(); + assert_eq!( + challenges.z, + FieldElement::from_hex_unchecked( + "317629e783794b52cd27ac3a5e418c057fec9dd42f2b537cdb3f24c95b3e550" + ) + ); + } - let claimed_index = 3; + #[test] + fn stone_compatibility_case_1_out_of_domain_trace_evaluation() { + let proof = stone_compatibility_case_1_proof(); + + assert_eq!( + proof.trace_ood_frame_evaluations.get_row(0)[0], + FieldElement::from_hex_unchecked( + "70d8181785336cc7e0a0a1078a79ee6541ca0803ed3ff716de5a13c41684037", + ) + ); + assert_eq!( + proof.trace_ood_frame_evaluations.get_row(1)[0], + FieldElement::from_hex_unchecked( + "29808fc8b7480a69295e4b61600480ae574ca55f8d118100940501b789c1630", + ) + ); + assert_eq!( + proof.trace_ood_frame_evaluations.get_row(0)[1], + FieldElement::from_hex_unchecked( + "7d8110f21d1543324cc5e472ab82037eaad785707f8cae3d64c5b9034f0abd2", + ) + ); + assert_eq!( + proof.trace_ood_frame_evaluations.get_row(1)[1], + FieldElement::from_hex_unchecked( + "1b58470130218c122f71399bf1e04cf75a6e8556c4751629d5ce8c02cc4e62d", + ) + ); + } + + #[test] + fn stone_compatibility_case_1_out_of_domain_composition_poly_evaluation() { + let proof = stone_compatibility_case_1_proof(); + + assert_eq!( + proof.composition_poly_parts_ood_evaluation[0], + FieldElement::from_hex_unchecked( + "1c0b7c2275e36d62dfb48c791be122169dcc00c616c63f8efb2c2a504687e85", + ) + ); + } + + #[test] + fn stone_compatibility_case_1_deep_composition_poly_challenges() { + let challenges = stone_compatibility_case_1_challenges(); + + // Trace terms coefficients + assert_eq!(challenges.trace_term_coeffs[0][0], FieldElement::one()); + let gamma = challenges.trace_term_coeffs[0][1]; + assert_eq!( + &gamma, + &FieldElement::from_hex_unchecked( + "a0c79c1c77ded19520873d9c2440451974d23302e451d13e8124cf82fc15dd" + ) + ); + assert_eq!(&challenges.trace_term_coeffs[1][0], &gamma.pow(2_u64)); + assert_eq!(&challenges.trace_term_coeffs[1][1], &gamma.pow(3_u64)); + + // Composition polynomial parts terms coefficient + assert_eq!(&challenges.gammas[0], &gamma.pow(4_u64)); + } + + #[test] + fn stone_compatibility_case_1_fri_commit_phase_challenge_0() { + let challenges = stone_compatibility_case_1_challenges(); + + // Challenge to fold FRI polynomial + assert_eq!( + challenges.zetas[0], + FieldElement::from_hex_unchecked( + "5c6b5a66c9fda19f583f0b10edbaade98d0e458288e62c2fa40e3da2b293cef" + ) + ); + } + + #[test] + fn stone_compatibility_case_1_fri_commit_phase_layer_1_commitment() { + let proof = stone_compatibility_case_1_proof(); + + // Commitment of first layer of FRI + assert_eq!( + proof.fri_layers_merkle_roots[0].to_vec(), + decode_hex("327d47da86f5961ee012b2b0e412de16023ffba97c82bfe85102f00daabd49fb").unwrap() + ); + } + + #[test] + fn stone_compatibility_case_1_fri_commit_phase_challenge_1() { + let challenges = stone_compatibility_case_1_challenges(); + assert_eq!( + challenges.zetas[1], + FieldElement::from_hex_unchecked( + "13c337c9dc727bea9eef1f82cab86739f17acdcef562f9e5151708f12891295" + ) + ); + } + + #[test] + fn stone_compatibility_case_1_fri_commit_phase_last_value() { + let proof = stone_compatibility_case_1_proof(); + + assert_eq!( + proof.fri_last_value, + FieldElement::from_hex_unchecked( + "43fedf9f9e3d1469309862065c7d7ca0e7e9ce451906e9c01553056f695aec9" + ) + ); + } + + #[test] + fn stone_compatibility_case_1_fri_query_iota_challenge() { + let challenges = stone_compatibility_case_1_challenges(); + assert_eq!(challenges.iotas[0], 1); + } + + #[test] + fn stone_compatibility_case_1_fri_query_phase_trace_openings() { + let proof = stone_compatibility_case_1_proof(); + + // Trace Col 0 + assert_eq!( + proof.deep_poly_openings[0].lde_trace_evaluations[0], + FieldElement::from_hex_unchecked( + "4de0d56f9cf97dff326c26592fbd4ae9ee756080b12c51cfe4864e9b8734f43" + ) + ); + + // Trace Col 1 + assert_eq!( + proof.deep_poly_openings[0].lde_trace_evaluations[1], + FieldElement::from_hex_unchecked( + "1bc1aadf39f2faee64d84cb25f7a95d3dceac1016258a39fc90c9d370e69e8e" + ) + ); + + // Trace Col 0 symmetric + assert_eq!( + proof.deep_poly_openings_sym[0].lde_trace_evaluations[0], + FieldElement::from_hex_unchecked( + "321f2a9063068310cd93d9a6d042b516118a9f7f4ed3ae301b79b16478cb0c6" + ) + ); + + // Trace Col 1 symmetric + assert_eq!( + proof.deep_poly_openings_sym[0].lde_trace_evaluations[1], + FieldElement::from_hex_unchecked( + "643e5520c60d06219b27b34da0856a2c23153efe9da75c6036f362c8f196186" + ) + ); + } + + #[test] + fn stone_compatibility_case_1_fri_query_phase_trace_terms_authentication_path() { + let proof = stone_compatibility_case_1_proof(); + + // Trace poly auth path level 1 + assert_eq!( + proof.deep_poly_openings[0].lde_trace_merkle_proofs[0].merkle_path[1].to_vec(), + decode_hex("91b0c0b24b9d00067b0efab50832b76cf97192091624d42b86740666c5d369e6").unwrap() + ); + + // Trace poly auth path level 2 + assert_eq!( + proof.deep_poly_openings[0].lde_trace_merkle_proofs[0].merkle_path[2].to_vec(), + decode_hex("993b044db22444c0c0ebf1095b9a51faeb001c9b4dea36abe905f7162620dbbd").unwrap() + ); + + // Trace poly auth path level 3 + assert_eq!( + proof.deep_poly_openings[0].lde_trace_merkle_proofs[0].merkle_path[3].to_vec(), + decode_hex("5017abeca33fa82576b5c5c2c61792693b48c9d4414a407eef66b6029dae07ea").unwrap() + ); + } + + #[test] + fn stone_compatibility_case_1_fri_query_phase_composition_poly_openings() { + let proof = stone_compatibility_case_1_proof(); + + // Composition poly + assert_eq!( + proof.deep_poly_openings[0].lde_composition_poly_parts_evaluation[0], + FieldElement::from_hex_unchecked( + "2b54852557db698e97253e9d110d60e9bf09f1d358b4c1a96f9f3cf9d2e8755" + ) + ); + // Composition poly sym + assert_eq!( + proof.deep_poly_openings_sym[0].lde_composition_poly_parts_evaluation[0], + FieldElement::from_hex_unchecked( + "190f1b0acb7858bd3f5285b68befcf32b436a5f1e3a280e1f42565c1f35c2c3" + ) + ); + } + + #[test] + fn stone_compatibility_case_1_fri_query_phase_composition_poly_authentication_path() { + let proof = stone_compatibility_case_1_proof(); + + // Composition poly auth path level 0 + assert_eq!( + proof.deep_poly_openings[0] + .lde_composition_poly_proof + .merkle_path[0] + .to_vec(), + decode_hex("403b75a122eaf90a298e5d3db2cc7ca096db478078122379a6e3616e72da7546").unwrap() + ); + + // Composition poly auth path level 1 + assert_eq!( + proof.deep_poly_openings[0] + .lde_composition_poly_proof + .merkle_path[1] + .to_vec(), + decode_hex("07950888c0355c204a1e83ecbee77a0a6a89f93d41cc2be6b39ddd1e727cc965").unwrap() + ); + + // Composition poly auth path level 2 + assert_eq!( + proof.deep_poly_openings[0] + .lde_composition_poly_proof + .merkle_path[2] + .to_vec(), + decode_hex("58befe2c5de74cc5a002aa82ea219c5b242e761b45fd266eb95521e9f53f44eb").unwrap() + ); + } + + #[test] + fn stone_compatibility_case_1_fri_query_phase_query_lengths() { + let proof = stone_compatibility_case_1_proof(); + + assert_eq!(proof.query_list.len(), 1); + + assert_eq!(proof.query_list[0].layers_evaluations_sym.len(), 1); + + assert_eq!( + proof.query_list[0].layers_auth_paths_sym[0] + .merkle_path + .len(), + 2 + ); + } + + #[test] + fn stone_compatibility_case_1_fri_query_phase_layer_1_evaluation_symmetric() { + let proof = stone_compatibility_case_1_proof(); + + assert_eq!( + proof.query_list[0].layers_evaluations_sym[0], + FieldElement::from_hex_unchecked( + "0684991e76e5c08db17f33ea7840596be876d92c143f863e77cad10548289fd0" + ) + ); + } + + #[test] + fn stone_compatibility_case_1_fri_query_phase_layer_1_authentication_path() { + let proof = stone_compatibility_case_1_proof(); + + // FRI layer 1 auth path level 0 + assert_eq!( + proof.query_list[0].layers_auth_paths_sym[0].merkle_path[0].to_vec(), + decode_hex("0683622478e9e93cc2d18754872f043619f030b494d7ec8e003b1cbafe83b67b").unwrap() + ); + + // FRI layer 1 auth path level 1 + assert_eq!( + proof.query_list[0].layers_auth_paths_sym[0].merkle_path[1].to_vec(), + decode_hex("7985d945abe659a7502698051ec739508ed6bab594984c7f25e095a0a57a2e55").unwrap() + ); + } + + fn proof_parts_stone_compatibility_case_2() -> ( + StarkProof, + fibonacci_2_cols_shifted::PublicInputs, + ProofOptions, + [u8; 4], + ) { + let trace = fibonacci_2_cols_shifted::compute_trace(FieldElement::from(12345), 512); + + let claimed_index = 420; let claimed_value = trace.get_row(claimed_index)[0]; let mut proof_options = ProofOptions::default_test_options(); - proof_options.blowup_factor = 64; + proof_options.blowup_factor = 1 << 6; proof_options.coset_offset = 3; + proof_options.grinding_factor = 0; + proof_options.fri_number_of_queries = 1; let pub_inputs = fibonacci_2_cols_shifted::PublicInputs { claimed_value, @@ -1001,19 +1418,71 @@ mod tests { let transcript_init_seed = [0xfa, 0xfa, 0xfa, 0xee]; - let air = Fibonacci2ColsShifted::new(trace.n_rows(), &pub_inputs, &proof_options); - let domain = Domain::new(&air); - - let (_, _, _, trace_commitment) = interpolate_and_commit( + let proof = Prover::prove::>( &trace, + &pub_inputs, + &proof_options, + StoneProverTranscript::new(&transcript_init_seed), + ) + .unwrap(); + (proof, pub_inputs, proof_options, transcript_init_seed) + } + + fn stone_compatibility_case_2_proof() -> StarkProof { + let (proof, _, _, _) = proof_parts_stone_compatibility_case_2(); + proof + } + + fn stone_compatibility_case_2_challenges( + ) -> Challenges> { + let (proof, public_inputs, options, seed) = proof_parts_stone_compatibility_case_2(); + + let air = Fibonacci2ColsShifted::new(proof.trace_length, &public_inputs, &options); + let domain = Domain::new(&air); + Verifier::step_1_replay_rounds_and_recover_challenges( + &air, + &proof, &domain, - &mut StoneProverTranscript::new(&transcript_init_seed), + &mut StoneProverTranscript::new(&seed), + ) + } + + #[test] + fn stone_compatibility_case_2_trace_commitment() { + let proof = stone_compatibility_case_2_proof(); + + assert_eq!( + proof.lde_trace_merkle_roots[0].to_vec(), + decode_hex("6d31dd00038974bde5fe0c5e3a765f8ddc822a5df3254fca85a1950ae0208cbe").unwrap() + ); + } + + #[test] + fn stone_compatibility_case_2_fri_query_iota_challenge() { + let challenges = stone_compatibility_case_2_challenges(); + assert_eq!(challenges.iotas[0], 4239); + } + + #[test] + fn stone_compatibility_case_2_fri_query_phase_layer_7_evaluation_symmetric() { + let proof = stone_compatibility_case_2_proof(); + + assert_eq!( + proof.query_list[0].layers_evaluations_sym[7], + FieldElement::from_hex_unchecked( + "7aa40c5a4e30b44fee5bcc47c54072a435aa35c1a31b805cad8126118cc6860" + ) ); + } + #[test] + fn stone_compatibility_case_2_fri_query_phase_layer_8_authentication_path() { + let proof = stone_compatibility_case_2_proof(); + + // FRI layer 7 auth path level 5 assert_eq!( - &trace_commitment.to_vec(), - &decode_hex("99d8d4342895c4e35a084f8ea993036be06f51e7fa965734ed9c7d41104f0848") - .unwrap() + proof.query_list[0].layers_auth_paths_sym[7].merkle_path[5].to_vec(), + decode_hex("f12f159b548ca2c571a270870d43e7ec2ead78b3e93b635738c31eb9bcda3dda").unwrap() ); } } diff --git a/provers/stark/src/tests/integration_tests.rs b/provers/stark/src/tests/integration_tests.rs index 77f244658..53b6fdfc1 100644 --- a/provers/stark/src/tests/integration_tests.rs +++ b/provers/stark/src/tests/integration_tests.rs @@ -12,9 +12,9 @@ use crate::{ simple_fibonacci::{self, FibonacciAIR, FibonacciPublicInputs}, }, proof::options::ProofOptions, - prover::prove, + prover::{IsStarkProver, Prover}, transcript::StoneProverTranscript, - verifier::verify, + verifier::{IsStarkVerifier, Verifier}, Felt252, }; @@ -29,21 +29,19 @@ fn test_prove_fib() { a1: Felt252::one(), }; - let proof = prove::>( + let proof = Prover::prove::>( &trace, &pub_inputs, &proof_options, StoneProverTranscript::new(&[]), ) .unwrap(); - assert!( - verify::>( - &proof, - &pub_inputs, - &proof_options, - StoneProverTranscript::new(&[]), - ) - ); + assert!(Verifier::verify::>( + &proof, + &pub_inputs, + &proof_options, + StoneProverTranscript::new(&[]), + )); } #[test_log::test] @@ -63,14 +61,14 @@ fn test_prove_fib17() { a1: FE::one(), }; - let proof = prove::<_, FibonacciAIR<_>>( + let proof = Prover::prove::>( &trace, &pub_inputs, &proof_options, StoneProverTranscript::new(&[]), ) .unwrap(); - assert!(verify::<_, FibonacciAIR<_>>( + assert!(Verifier::verify::>( &proof, &pub_inputs, &proof_options, @@ -89,17 +87,14 @@ fn test_prove_fib_2_cols() { a1: Felt252::one(), }; - let proof = prove::>( + let proof = Prover::prove::>( &trace, &pub_inputs, &proof_options, StoneProverTranscript::new(&[]), ) .unwrap(); - assert!(verify::< - Stark252PrimeField, - Fibonacci2ColsAIR, - >( + assert!(Verifier::verify::>( &proof, &pub_inputs, &proof_options, @@ -120,14 +115,14 @@ fn test_prove_fib_2_cols_shifted() { claimed_index, }; - let proof = prove::>( + let proof = Prover::prove::>( &trace, &pub_inputs, &proof_options, StoneProverTranscript::new(&[]), ) .unwrap(); - assert!(verify::>( + assert!(Verifier::verify::>( &proof, &pub_inputs, &proof_options, @@ -145,21 +140,19 @@ fn test_prove_quadratic() { a0: Felt252::from(3), }; - let proof = prove::>( + let proof = Prover::prove::>( &trace, &pub_inputs, &proof_options, StoneProverTranscript::new(&[]), ) .unwrap(); - assert!( - verify::>( - &proof, - &pub_inputs, - &proof_options, - StoneProverTranscript::new(&[]) - ) - ); + assert!(Verifier::verify::>( + &proof, + &pub_inputs, + &proof_options, + StoneProverTranscript::new(&[]) + )); } #[test_log::test] @@ -175,21 +168,19 @@ fn test_prove_rap_fib() { a1: Felt252::one(), }; - let proof = prove::>( + let proof = Prover::prove::>( &trace, &pub_inputs, &proof_options, StoneProverTranscript::new(&[]), ) .unwrap(); - assert!( - verify::>( - &proof, - &pub_inputs, - &proof_options, - StoneProverTranscript::new(&[]) - ) - ); + assert!(Verifier::verify::>( + &proof, + &pub_inputs, + &proof_options, + StoneProverTranscript::new(&[]) + )); } #[test_log::test] @@ -199,14 +190,10 @@ fn test_prove_dummy() { let proof_options = ProofOptions::default_test_options(); - let proof = prove::( - &trace, - &(), - &proof_options, - StoneProverTranscript::new(&[]), - ) - .unwrap(); - assert!(verify::( + let proof = + Prover::prove::(&trace, &(), &proof_options, StoneProverTranscript::new(&[])) + .unwrap(); + assert!(Verifier::verify::( &proof, &(), &proof_options, diff --git a/provers/stark/src/verifier.rs b/provers/stark/src/verifier.rs index 38a505602..a67c11666 100644 --- a/provers/stark/src/verifier.rs +++ b/provers/stark/src/verifier.rs @@ -1,22 +1,26 @@ #[cfg(feature = "instruments")] use std::time::Instant; +use lambdaworks_crypto::merkle_tree::proof::Proof; //use itertools::multizip; #[cfg(not(feature = "test_fiat_shamir"))] use log::error; use lambdaworks_math::{ + fft::cpu::bit_reversing::reverse_index, field::{ - element::FieldElement, - traits::{IsFFTField, IsField}, + element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, + traits::IsFFTField, }, traits::Serializable, }; -use crate::{prover::get_stone_prover_domain_permutation, transcript::IsStarkTranscript}; +use crate::{ + config::Commitment, proof::stark::DeepPolynomialOpening, transcript::IsStarkTranscript, +}; use super::{ - config::{BatchedMerkleTreeBackend, FriMerkleTreeBackend}, + config::BatchedMerkleTreeBackend, domain::Domain, fri::fri_decommit::FriDecommitment, grinding::hash_transcript_with_int_and_get_leading_zeros, @@ -24,662 +28,735 @@ use super::{ traits::AIR, }; -struct Challenges -where - F: IsFFTField, - A: AIR, -{ - z: FieldElement, - boundary_coeffs: Vec>, - transition_coeffs: Vec>, - trace_term_coeffs: Vec>>, - gamma_even: FieldElement, - gamma_odd: FieldElement, - zetas: Vec>, - iotas: Vec, - rap_challenges: A::RAPChallenges, - leading_zeros_count: u8, // number of leading zeros in the grinding +pub struct Verifier {} + +impl IsStarkVerifier for Verifier { + type Field = Stark252PrimeField; } -fn step_1_replay_rounds_and_recover_challenges( - air: &A, - proof: &StarkProof, - domain: &Domain, - transcript: &mut impl IsStarkTranscript, -) -> Challenges +pub struct Challenges where F: IsFFTField, - FieldElement: Serializable, A: AIR, { - // =================================== - // ==========| Round 1 |========== - // =================================== - - // <<<< Receive commitments:[tⱼ] - let total_columns = air.context().trace_columns; + pub z: FieldElement, + pub boundary_coeffs: Vec>, + pub transition_coeffs: Vec>, + pub trace_term_coeffs: Vec>>, + pub gammas: Vec>, + pub zetas: Vec>, + pub iotas: Vec, + pub rap_challenges: A::RAPChallenges, + pub leading_zeros_count: u8, // number of leading zeros in the grinding +} - transcript.append_bytes(&proof.lde_trace_merkle_roots[0]); +pub type DeepPolynomialEvaluations = (Vec>, Vec>); - let rap_challenges = air.build_rap_challenges(transcript); +pub trait IsStarkVerifier { + type Field: IsFFTField; - if let Some(root) = proof.lde_trace_merkle_roots.get(1) { - transcript.append_bytes(root); + fn sample_query_indexes( + number_of_queries: usize, + domain: &Domain, + transcript: &mut impl IsStarkTranscript, + ) -> Vec { + let domain_size = domain.lde_roots_of_unity_coset.len() as u64; + (0..number_of_queries) + .map(|_| (transcript.sample_u64(domain_size >> 1)) as usize) + .collect::>() } - // =================================== - // ==========| Round 2 |========== - // =================================== + fn step_1_replay_rounds_and_recover_challenges( + air: &A, + proof: &StarkProof, + domain: &Domain, + transcript: &mut impl IsStarkTranscript, + ) -> Challenges + where + FieldElement: Serializable, + A: AIR, + { + // =================================== + // ==========| Round 1 |========== + // =================================== + + // <<<< Receive commitments:[tⱼ] + transcript.append_bytes(&proof.lde_trace_merkle_roots[0]); + + let rap_challenges = air.build_rap_challenges(transcript); + + if let Some(root) = proof.lde_trace_merkle_roots.get(1) { + transcript.append_bytes(root); + } + + // =================================== + // ==========| Round 2 |========== + // =================================== - // <<<< Receive challenge: 𝛽 - let beta = transcript.sample_field_element(); - let num_boundary_constraints = air.boundary_constraints(&rap_challenges).constraints.len(); + // <<<< Receive challenge: 𝛽 + let beta = transcript.sample_field_element(); + let num_boundary_constraints = air.boundary_constraints(&rap_challenges).constraints.len(); - let num_transition_constraints = air.context().num_transition_constraints; + let num_transition_constraints = air.context().num_transition_constraints; - let mut coefficients: Vec<_> = (1..num_boundary_constraints + num_transition_constraints + 1) - .map(|i| beta.pow(i)) - .collect(); + let mut coefficients: Vec<_> = (0..num_boundary_constraints + num_transition_constraints) + .map(|i| beta.pow(i)) + .collect(); - let transition_coeffs: Vec<_> = coefficients.drain(..num_transition_constraints).collect(); - let boundary_coeffs = coefficients; + let transition_coeffs: Vec<_> = coefficients.drain(..num_transition_constraints).collect(); + let boundary_coeffs = coefficients; - // <<<< Receive commitments: [H₁], [H₂] - transcript.append_bytes(&proof.composition_poly_root); + // <<<< Receive commitments: [H₁], [H₂] + transcript.append_bytes(&proof.composition_poly_root); - // =================================== - // ==========| Round 3 |========== - // =================================== + // =================================== + // ==========| Round 3 |========== + // =================================== - // >>>> Send challenge: z - let z = transcript.sample_z_ood( - &domain.lde_roots_of_unity_coset, - &domain.trace_roots_of_unity, - ); + // >>>> Send challenge: z + let z = transcript.sample_z_ood( + &domain.lde_roots_of_unity_coset, + &domain.trace_roots_of_unity, + ); - // <<<< Receive value: H₁(z²) - transcript.append_field_element(&proof.composition_poly_even_ood_evaluation); - // <<<< Receive value: H₂(z²) - transcript.append_field_element(&proof.composition_poly_odd_ood_evaluation); - // <<<< Receive values: tⱼ(zgᵏ) - for i in 0..proof.trace_ood_frame_evaluations.num_rows() { - for element in proof.trace_ood_frame_evaluations.get_row(i).iter() { + // <<<< Receive values: tⱼ(zgᵏ) + for i in 0..proof.trace_ood_frame_evaluations.num_columns() { + for j in 0..proof.trace_ood_frame_evaluations.num_rows() { + transcript.append_field_element(&proof.trace_ood_frame_evaluations.get_row(j)[i]); + } + } + // <<<< Receive value: Hᵢ(z^N) + for element in proof.composition_poly_parts_ood_evaluation.iter() { transcript.append_field_element(element); } - } - // =================================== - // ==========| Round 4 |========== - // =================================== - - // >>>> Send challenges: 𝛾, 𝛾' - let gamma_even = transcript.sample_field_element(); - let gamma_odd = transcript.sample_field_element(); - - // >>>> Send challenges: 𝛾ⱼ, 𝛾ⱼ' - // Get the number of trace terms the DEEP composition poly will have. - // One coefficient will be sampled for each of them. - // TODO: try remove this, call transcript inside for and move gamma declarations - let trace_term_coeffs = (0..total_columns) - .map(|_| { - (0..air.context().transition_offsets.len()) - .map(|_| transcript.sample_field_element()) - .collect() - }) - .collect::>>>(); - - // FRI commit phase - - let merkle_roots = &proof.fri_layers_merkle_roots; - let zetas = merkle_roots - .iter() - .map(|root| { - // <<<< Receive commitment: [pₖ] (the first one is [p₀]) - transcript.append_bytes(root); + // =================================== + // ==========| Round 4 |========== + // =================================== + + let n_terms_composition_poly = proof.composition_poly_parts_ood_evaluation.len(); + let n_terms_trace = air.context().transition_offsets.len() * air.context().trace_columns; + let gamma = transcript.sample_field_element(); + + // <<<< Receive challenges: 𝛾, 𝛾' + let mut deep_composition_coefficients: Vec<_> = + core::iter::successors(Some(FieldElement::one()), |x| Some(x * &gamma)) + .take(n_terms_composition_poly + n_terms_trace) + .collect(); + + let trace_term_coeffs: Vec<_> = deep_composition_coefficients + .drain(..n_terms_trace) + .collect::>() + .chunks(air.context().transition_offsets.len()) + .map(|chunk| chunk.to_vec()) + .collect(); + + // <<<< Receive challenges: 𝛾ⱼ, 𝛾ⱼ' + let gammas = deep_composition_coefficients; + + // FRI commit phase + let merkle_roots = &proof.fri_layers_merkle_roots; + let mut zetas = merkle_roots + .iter() + .map(|root| { + // >>>> Send challenge 𝜁ₖ + let element = transcript.sample_field_element(); + // <<<< Receive commitment: [pₖ] (the first one is [p₀]) + transcript.append_bytes(root); + element + }) + .collect::>>(); + + // >>>> Send challenge 𝜁ₙ₋₁ + zetas.push(transcript.sample_field_element()); + + // <<<< Receive value: pₙ + transcript.append_field_element(&proof.fri_last_value); + + // Receive grinding value + // 1) Receive challenge from the transcript + let security_bits = air.context().proof_options.grinding_factor; + let mut leading_zeros_count = 0; + if security_bits > 0 { + let transcript_challenge = transcript.state(); + let nonce = proof.nonce; + leading_zeros_count = + hash_transcript_with_int_and_get_leading_zeros(&transcript_challenge, nonce); + transcript.append_bytes(&nonce.to_be_bytes()); + } - // >>>> Send challenge 𝜁ₖ - transcript.sample_field_element() - }) - .collect::>>(); - - // <<<< Receive value: pₙ - transcript.append_field_element(&proof.fri_last_value); - - // Receive grinding value - // 1) Receive challenge from the transcript - let transcript_challenge = transcript.state(); - let nonce = proof.nonce; - let leading_zeros_count = - hash_transcript_with_int_and_get_leading_zeros(&transcript_challenge, nonce); - transcript.append_bytes(&nonce.to_be_bytes()); - - // FRI query phase - // <<<< Send challenges 𝜄ₛ (iota_s) - let iota_max: usize = 2_usize.pow(domain.lde_root_order); - let iotas: Vec = (0..air.options().fri_number_of_queries) - .map(|_| (transcript.sample_u64(iota_max as u64) as usize) % iota_max) - .collect(); - - Challenges { - z, - boundary_coeffs, - transition_coeffs, - trace_term_coeffs, - gamma_even, - gamma_odd, - zetas, - iotas, - rap_challenges, - leading_zeros_count, + // FRI query phase + // <<<< Send challenges 𝜄ₛ (iota_s) + let number_of_queries = air.options().fri_number_of_queries; + let iotas = Self::sample_query_indexes(number_of_queries, domain, transcript); + + Challenges { + z, + boundary_coeffs, + transition_coeffs, + trace_term_coeffs, + gammas, + zetas, + iotas, + rap_challenges, + leading_zeros_count, + } } -} -fn step_2_verify_claimed_composition_polynomial>( - air: &A, - proof: &StarkProof, - domain: &Domain, - challenges: &Challenges, -) -> bool { - // BEGIN TRACE <-> Composition poly consistency evaluation check - // These are H_1(z^2) and H_2(z^2) - let composition_poly_even_ood_evaluation = &proof.composition_poly_even_ood_evaluation; - let composition_poly_odd_ood_evaluation = &proof.composition_poly_odd_ood_evaluation; - - let boundary_constraints = air.boundary_constraints(&challenges.rap_challenges); - - //let n_trace_cols = air.context().trace_columns; - // special cases. - let trace_length = air.trace_length(); - let number_of_b_constraints = boundary_constraints.constraints.len(); - - // Following naming conventions from https://www.notamonadtutorial.com/diving-deep-fri/ - let (boundary_c_i_evaluations_num, mut boundary_c_i_evaluations_den): ( - Vec>, - Vec>, - ) = (0..number_of_b_constraints) - .map(|index| { - let step = boundary_constraints.constraints[index].step; - let point = &domain.trace_primitive_root.pow(step as u64); - let trace_idx = boundary_constraints.constraints[index].col; - let trace_evaluation = &proof.trace_ood_frame_evaluations.get_row(0)[trace_idx]; - let boundary_zerofier_challenges_z_den = &challenges.z - point; - - let boundary_quotient_ood_evaluation_num = - trace_evaluation - &boundary_constraints.constraints[index].value; - - ( - boundary_quotient_ood_evaluation_num, - boundary_zerofier_challenges_z_den, - ) - }) - .collect::>() - .into_iter() - .unzip(); - - FieldElement::inplace_batch_inverse(&mut boundary_c_i_evaluations_den).unwrap(); - - let boundary_quotient_ood_evaluation: FieldElement = boundary_c_i_evaluations_num - .iter() - .zip(&boundary_c_i_evaluations_den) - .zip(&challenges.boundary_coeffs) - .map(|((num, den), beta)| num * den * beta) - .fold(FieldElement::::zero(), |acc, x| acc + x); - - let transition_ood_frame_evaluations = air.compute_transition( - &proof.trace_ood_frame_evaluations, - &challenges.rap_challenges, - ); - - let denominator = (&challenges.z.pow(trace_length) - FieldElement::::one()) - .inv() - .unwrap(); - - let exemption = air - .transition_exemptions_verifier( - domain.trace_roots_of_unity.iter().last().expect("has last"), - ) - .iter() - .map(|poly| poly.evaluate(&challenges.z)) - .collect::>>(); - - let unity = &FieldElement::one(); - let transition_c_i_evaluations_sum = transition_ood_frame_evaluations - .iter() - .zip(&air.context().transition_degrees) - .zip(&air.context().transition_exemptions) - .zip(&challenges.transition_coeffs) - .fold(FieldElement::zero(), |acc, (((eval, _), except), beta)| { - let except = except - .checked_sub(1) - .map(|i| &exemption[i]) - .unwrap_or(unity); - acc + &denominator * eval * beta * except - }); - - let composition_poly_ood_evaluation = - &boundary_quotient_ood_evaluation + transition_c_i_evaluations_sum; - - let composition_poly_claimed_ood_evaluation = - composition_poly_even_ood_evaluation + &challenges.z * composition_poly_odd_ood_evaluation; - - composition_poly_claimed_ood_evaluation == composition_poly_ood_evaluation -} - -fn step_3_verify_fri( - proof: &StarkProof, - domain: &Domain, - challenges: &Challenges, -) -> bool -where - F: IsFFTField, - FieldElement: Serializable, - A: AIR, -{ - // verify FRI - let two_inv = &FieldElement::from(2).inv().unwrap(); - let mut evaluation_point_inverse = challenges - .iotas - .iter() - .map(|iota| &domain.lde_roots_of_unity_coset[*iota]) - .cloned() - .collect::>>(); - FieldElement::inplace_batch_inverse(&mut evaluation_point_inverse).unwrap(); - proof - .query_list - .iter() - .zip(&challenges.iotas) - .zip(evaluation_point_inverse) - .fold(true, |mut result, ((proof_s, iota_s), eval)| { - // this is done in constant time - result &= verify_query_and_sym_openings( - proof, - &challenges.zetas, - *iota_s, - proof_s, - domain, - eval, - two_inv, - ); - result - }) -} - -fn step_4_verify_deep_composition_polynomial>( - air: &A, - proof: &StarkProof, - domain: &Domain, - challenges: &Challenges, -) -> bool -where - FieldElement: Serializable, -{ - let permutation = - get_stone_prover_domain_permutation(domain.interpolation_domain_size, domain.blowup_factor); - let primitive_root = &F::get_primitive_root_of_unity(domain.root_order as u64).unwrap(); - let z_squared = &challenges.z.square(); - let mut denom_inv = challenges - .iotas - .iter() - .map(|iota_n| &domain.lde_roots_of_unity_coset[*iota_n] - z_squared) - .collect::>>(); - FieldElement::inplace_batch_inverse(&mut denom_inv).unwrap(); - - challenges - .iotas - .iter() - .zip(&proof.deep_poly_openings) - .zip(&denom_inv) - .enumerate() - .fold( - true, - |mut result, (i, ((iota_n, deep_poly_opening), denom_inv))| { - let evaluations = vec![ - deep_poly_opening - .lde_composition_poly_even_evaluation - .clone(), - deep_poly_opening - .lde_composition_poly_odd_evaluation - .clone(), - ]; - - // Verify opening Open(H₁(D_LDE, 𝜐₀) and Open(H₂(D_LDE, 𝜐₀), - result &= deep_poly_opening - .lde_composition_poly_proof - .verify::>( - &proof.composition_poly_root, - *iota_n, - &evaluations, - ); + fn step_2_verify_claimed_composition_polynomial( + air: &A, + proof: &StarkProof, + domain: &Domain, + challenges: &Challenges, + ) -> bool + where + A: AIR, + { + let boundary_constraints = air.boundary_constraints(&challenges.rap_challenges); + + let trace_length = air.trace_length(); + let number_of_b_constraints = boundary_constraints.constraints.len(); + + #[allow(clippy::type_complexity)] + let (boundary_c_i_evaluations_num, mut boundary_c_i_evaluations_den): ( + Vec>, + Vec>, + ) = (0..number_of_b_constraints) + .map(|index| { + let step = boundary_constraints.constraints[index].step; + let point = &domain.trace_primitive_root.pow(step as u64); + let trace_idx = boundary_constraints.constraints[index].col; + let trace_evaluation = &proof.trace_ood_frame_evaluations.get_row(0)[trace_idx]; + let boundary_zerofier_challenges_z_den = &challenges.z - point; + + let boundary_quotient_ood_evaluation_num = + trace_evaluation - &boundary_constraints.constraints[index].value; + + ( + boundary_quotient_ood_evaluation_num, + boundary_zerofier_challenges_z_den, + ) + }) + .collect::>() + .into_iter() + .unzip(); + + FieldElement::inplace_batch_inverse(&mut boundary_c_i_evaluations_den).unwrap(); + + let boundary_quotient_ood_evaluation: FieldElement = + boundary_c_i_evaluations_num + .iter() + .zip(&boundary_c_i_evaluations_den) + .zip(&challenges.boundary_coeffs) + .map(|((num, den), beta)| num * den * beta) + .fold(FieldElement::::zero(), |acc, x| acc + x); + + let transition_ood_frame_evaluations = air.compute_transition( + &proof.trace_ood_frame_evaluations, + &challenges.rap_challenges, + ); - let num_main_columns = - air.context().trace_columns - air.number_auxiliary_rap_columns(); - let lde_trace_evaluations = vec![ - deep_poly_opening.lde_trace_evaluations[..num_main_columns].to_vec(), - deep_poly_opening.lde_trace_evaluations[num_main_columns..].to_vec(), - ]; - - // Verify openings Open(tⱼ(D_LDE), 𝜐₀) - result &= proof - .lde_trace_merkle_roots - .iter() - .zip(&deep_poly_opening.lde_trace_merkle_proofs) - .zip(lde_trace_evaluations) - .fold(result, |acc, ((merkle_root, merkle_proof), evaluation)| { - acc & merkle_proof.verify::>( - merkle_root, - permutation[*iota_n], - &evaluation, - ) - }); + let denominator = (&challenges.z.pow(trace_length) - FieldElement::::one()) + .inv() + .unwrap(); - // DEEP consistency check - // Verify that Deep(x) is constructed correctly - let mut divisors = (0..proof.trace_ood_frame_evaluations.num_rows()) - .map(|row_idx| { - &domain.lde_roots_of_unity_coset[*iota_n] - - &challenges.z * primitive_root.pow(row_idx as u64) - }) - .collect::>>(); - FieldElement::inplace_batch_inverse(&mut divisors).unwrap(); - let deep_poly_evaluation = reconstruct_deep_composition_poly_evaluation( - proof, challenges, denom_inv, &divisors, i, - ); + let exemption = air + .transition_exemptions_verifier( + domain.trace_roots_of_unity.iter().last().expect("has last"), + ) + .iter() + .map(|poly| poly.evaluate(&challenges.z)) + .collect::>>(); + + let unity = &FieldElement::one(); + let transition_c_i_evaluations_sum = transition_ood_frame_evaluations + .iter() + .zip(&air.context().transition_degrees) + .zip(&air.context().transition_exemptions) + .zip(&challenges.transition_coeffs) + .fold(FieldElement::zero(), |acc, (((eval, _), except), beta)| { + let except = except + .checked_sub(1) + .map(|i| &exemption[i]) + .unwrap_or(unity); + acc + &denominator * eval * beta * except + }); + + let composition_poly_ood_evaluation = + &boundary_quotient_ood_evaluation + transition_c_i_evaluations_sum; + + let composition_poly_claimed_ood_evaluation = proof + .composition_poly_parts_ood_evaluation + .iter() + .rev() + .fold(FieldElement::zero(), |acc, coeff| { + acc * &challenges.z + coeff + }); + + composition_poly_claimed_ood_evaluation == composition_poly_ood_evaluation + } - let deep_poly_claimed_evaluation = &proof.query_list[i].layers_evaluations[0]; - result & (deep_poly_claimed_evaluation == &deep_poly_evaluation) - }, - ) -} + fn step_3_verify_fri( + proof: &StarkProof, + domain: &Domain, + challenges: &Challenges, + ) -> bool + where + FieldElement: Serializable, + A: AIR, + { + let (deep_poly_evaluations, deep_poly_evaluations_sym) = + Self::reconstruct_deep_composition_poly_evaluations_for_all_queries( + challenges, domain, proof, + ); -fn verify_query_and_sym_openings( - proof: &StarkProof, - zetas: &[FieldElement], - iota: usize, - fri_decommitment: &FriDecommitment, - domain: &Domain, - evaluation_point: FieldElement, - two_inv: &FieldElement, -) -> bool -where - FieldElement: Serializable, -{ - let fri_layers_merkle_roots = &proof.fri_layers_merkle_roots; - let evaluation_point_vec: Vec> = - core::iter::successors(Some(evaluation_point), |evaluation_point| { - Some(evaluation_point.square()) - }) - .take(fri_layers_merkle_roots.len()) - .collect(); - - let mut v = fri_decommitment.layers_evaluations[0].clone(); - // For each fri layer merkle proof check: - // That each merkle path verifies - - // Sample beta with fiat shamir - // Compute v = [P_i(z_i) + P_i(-z_i)] / 2 + beta * [P_i(z_i) - P_i(-z_i)] / (2 * z_i) - // Where P_i is the folded polynomial of the i-th fiat shamir round - // z_i is obtained from the first z (that was derived through Fiat-Shamir) through a known calculation - // The calculation is, given the index, index % length_of_evaluation_domain - - // Check that v = P_{i+1}(z_i) - - // For each (merkle_root, merkle_auth_path) / fold - // With the auth path containining the element that the path proves it's existence - fri_layers_merkle_roots - .iter() - .enumerate() - .zip(&fri_decommitment.layers_auth_paths) - .zip(&fri_decommitment.layers_evaluations) - .zip(&fri_decommitment.layers_auth_paths_sym) - .zip(&fri_decommitment.layers_evaluations_sym) - .zip(evaluation_point_vec) - .fold( - true, - |result, - ( - (((((k, merkle_root), auth_path), evaluation), auth_path_sym), evaluation_sym), - evaluation_point_inv, - )| { - let domain_length = 1 << (domain.lde_root_order - k as u32); - let layer_evaluation_index_sym = (iota + domain_length / 2) % domain_length; - // Since we always derive the current layer from the previous layer - // We start with the second one, skipping the first, so previous is layer is the first one - // This is the current layer's evaluation domain length. - // We need it to know what the decommitment index for the current - // layer is, so we can check the merkle paths at the right index. - - // Verify opening Open(pₖ(Dₖ), −𝜐ₛ^(2ᵏ)) - let auth_sym = &auth_path_sym.verify::>( - merkle_root, - layer_evaluation_index_sym, - evaluation_sym, + // verify FRI + let mut evaluation_point_inverse = challenges + .iotas + .iter() + .map(|iota| Self::query_challenge_to_evaluation_point(*iota, domain)) + .collect::>>(); + FieldElement::inplace_batch_inverse(&mut evaluation_point_inverse).unwrap(); + proof + .query_list + .iter() + .zip(&challenges.iotas) + .zip(evaluation_point_inverse) + .enumerate() + .fold(true, |mut result, (i, ((proof_s, iota_s), eval))| { + // this is done in constant time + result &= Self::verify_query_and_sym_openings( + proof, + &challenges.zetas, + *iota_s, + proof_s, + eval, + &deep_poly_evaluations[i], + &deep_poly_evaluations_sym[i], ); - // Verify opening Open(pₖ(Dₖ), 𝜐ₛ) - let auth_point = - auth_path.verify::>(merkle_root, iota, evaluation); - let beta = &zetas[k]; - // v is the calculated element for the co linearity check - v = (&v + evaluation_sym) * two_inv - + beta * (&v - evaluation_sym) * two_inv * evaluation_point_inv; - - // Check that next value is the given by the prover - if k < fri_decommitment.layers_evaluations.len() - 1 { - let next_layer_evaluation = &fri_decommitment.layers_evaluations[k + 1]; - result & (v == *next_layer_evaluation) & auth_point & auth_sym - } else { - result & (v == proof.fri_last_value) & auth_point & auth_sym - } - }, - ) -} + result + }) + } -// Reconstruct Deep(\upsilon_0) off the values in the proof -fn reconstruct_deep_composition_poly_evaluation>( - proof: &StarkProof, - challenges: &Challenges, - denom_inv: &FieldElement, - divisors: &[FieldElement], - i: usize, -) -> FieldElement { - let trace_term = (0..proof.trace_ood_frame_evaluations.num_columns()) - .zip(&challenges.trace_term_coeffs) - .fold(FieldElement::zero(), |trace_terms, (col_idx, coeff_row)| { - let trace_i = (0..proof.trace_ood_frame_evaluations.num_rows()) - .zip(coeff_row) - .fold(FieldElement::zero(), |trace_t, (row_idx, coeff)| { - let poly_evaluation = - (proof.deep_poly_openings[i].lde_trace_evaluations[col_idx].clone() - - proof.trace_ood_frame_evaluations.get_row(row_idx)[col_idx].clone()) - * &divisors[row_idx]; - trace_t + &poly_evaluation * coeff - }); - trace_terms + trace_i - }); + fn query_challenge_to_evaluation_point( + iota: usize, + domain: &Domain, + ) -> FieldElement { + domain.lde_roots_of_unity_coset + [reverse_index(iota * 2, domain.lde_roots_of_unity_coset.len() as u64)] + .clone() + } - let h_1_upsilon_0 = &proof.deep_poly_openings[i].lde_composition_poly_even_evaluation; - let h_1_zsquared = &proof.composition_poly_even_ood_evaluation; - let h_2_upsilon_0 = &proof.deep_poly_openings[i].lde_composition_poly_odd_evaluation; - let h_2_zsquared = &proof.composition_poly_odd_ood_evaluation; + fn query_challenge_to_evaluation_point_sym( + iota: usize, + domain: &Domain, + ) -> FieldElement { + domain.lde_roots_of_unity_coset + [reverse_index(iota * 2 + 1, domain.lde_roots_of_unity_coset.len() as u64)] + .clone() + } - let h_1_term = (h_1_upsilon_0 - h_1_zsquared) * denom_inv; - let h_2_term = (h_2_upsilon_0 - h_2_zsquared) * denom_inv; + fn verify_opening( + proof: &Proof, + root: &Commitment, + index: usize, + value: &[FieldElement], + ) -> bool + where + FieldElement: Serializable, + { + proof.verify::>(root, index, &value.to_owned()) + } - trace_term + h_1_term * &challenges.gamma_even + h_2_term * &challenges.gamma_odd -} + /// Verify opening Open(tⱼ(D_LDE), 𝜐) and Open(tⱼ(D_LDE), -𝜐) for all trace polynomials tⱼ, + /// where 𝜐 and -𝜐 are the elements corresponding to the index challenge `iota`. + fn verify_trace_openings( + num_main_columns: usize, + proof: &StarkProof, + deep_poly_openings: &DeepPolynomialOpening, + deep_poly_openings_sym: &DeepPolynomialOpening, + iota: usize, + ) -> bool + where + FieldElement: Serializable, + { + let lde_trace_evaluations = vec![ + deep_poly_openings.lde_trace_evaluations[..num_main_columns].to_vec(), + deep_poly_openings.lde_trace_evaluations[num_main_columns..].to_vec(), + ]; + + let index = iota * 2; + let openings_are_valid = proof + .lde_trace_merkle_roots + .iter() + .zip(&deep_poly_openings.lde_trace_merkle_proofs) + .zip(lde_trace_evaluations) + .fold(true, |acc, ((merkle_root, merkle_proof), evaluation)| { + acc & Self::verify_opening(merkle_proof, merkle_root, index, &evaluation) + }); + + let lde_trace_evaluations_sym = vec![ + deep_poly_openings_sym.lde_trace_evaluations[..num_main_columns].to_vec(), + deep_poly_openings_sym.lde_trace_evaluations[num_main_columns..].to_vec(), + ]; + + let index_sym = iota * 2 + 1; + let openings_sym_are_valid = proof + .lde_trace_merkle_roots + .iter() + .zip(&deep_poly_openings_sym.lde_trace_merkle_proofs) + .zip(lde_trace_evaluations_sym) + .fold(true, |acc, ((merkle_root, merkle_proof), evaluation)| { + acc & Self::verify_opening(merkle_proof, merkle_root, index_sym, &evaluation) + }); + openings_are_valid & openings_sym_are_valid + } -pub fn verify( - proof: &StarkProof, - pub_input: &A::PublicInputs, - proof_options: &ProofOptions, - mut transcript: impl IsStarkTranscript, -) -> bool -where - F: IsFFTField, - A: AIR, - FieldElement: Serializable, -{ - // Verify there are enough queries - if proof.query_list.len() < proof_options.fri_number_of_queries { - return false; + /// Verify opening Open(Hᵢ(D_LDE), 𝜐) and Open(Hᵢ(D_LDE), -𝜐) for all parts Hᵢof the composition + /// polynomial, where 𝜐 and -𝜐 are the elements corresponding to the index challenge `iota`. + fn verify_composition_poly_opening( + deep_poly_openings: &DeepPolynomialOpening, + deep_poly_openings_sym: &DeepPolynomialOpening, + composition_poly_merkle_root: &Commitment, + iota: &usize, + ) -> bool + where + FieldElement: Serializable, + { + let mut value = deep_poly_openings + .lde_composition_poly_parts_evaluation + .clone(); + value.extend_from_slice(&deep_poly_openings_sym.lde_composition_poly_parts_evaluation); + + deep_poly_openings + .lde_composition_poly_proof + .verify::>( + composition_poly_merkle_root, + *iota, + &value, + ) } - #[cfg(feature = "instruments")] - println!("- Started step 1: Recover challenges"); - #[cfg(feature = "instruments")] - let timer1 = Instant::now(); + fn step_4_verify_trace_and_composition_openings>( + air: &A, + proof: &StarkProof, + challenges: &Challenges, + ) -> bool + where + FieldElement: Serializable, + { + challenges + .iotas + .iter() + .zip(&proof.deep_poly_openings) + .zip(&proof.deep_poly_openings_sym) + .fold( + true, + |mut result, ((iota_n, deep_poly_opening), deep_poly_openings_sym)| { + result &= Self::verify_composition_poly_opening( + deep_poly_opening, + deep_poly_openings_sym, + &proof.composition_poly_root, + iota_n, + ); - let air = A::new(proof.trace_length, pub_input, proof_options); - let domain = Domain::new(&air); + let num_main_columns = + air.context().trace_columns - air.number_auxiliary_rap_columns(); + result &= Self::verify_trace_openings( + num_main_columns, + proof, + deep_poly_opening, + deep_poly_openings_sym, + *iota_n, + ); + result + }, + ) + } - let challenges = - step_1_replay_rounds_and_recover_challenges(&air, proof, &domain, &mut transcript); + fn verify_fri_layer_openings( + merkle_root: &Commitment, + auth_path_sym: &Proof, + evaluation: &FieldElement, + evaluation_sym: &FieldElement, + iota: usize, + ) -> bool + where + FieldElement: Serializable, + { + let evaluations = if iota % 2 == 1 { + vec![evaluation_sym.clone(), evaluation.clone()] + } else { + vec![evaluation.clone(), evaluation_sym.clone()] + }; - // verify grinding - let grinding_factor = air.context().proof_options.grinding_factor; - if challenges.leading_zeros_count < grinding_factor { - error!("Grinding factor not satisfied"); - return false; + auth_path_sym.verify::>( + merkle_root, + iota >> 1, + &evaluations, + ) } - #[cfg(feature = "instruments")] - let elapsed1 = timer1.elapsed(); - #[cfg(feature = "instruments")] - println!(" Time spent: {:?}", elapsed1); - - #[cfg(feature = "instruments")] - println!("- Started step 2: Verify claimed polynomial"); - #[cfg(feature = "instruments")] - let timer2 = Instant::now(); + /// Verify a single FRI query + /// `zetas`: the vector of all challenges sent by the verifier to the prover at the commit + /// phase to fold polynomials. + /// `iota`: the index challenge of this FRI query. This index uniquely determines two elements 𝜐 and -𝜐 + /// of the evaluation domain of FRI layer 0. + /// `evaluation_point_inv`: precomputed value of 𝜐⁻¹. + /// `deep_composition_evaluation`: precomputed value of p₀(𝜐), where p₀ is the deep composition polynomial. + /// `deep_composition_evaluation_sym`: precomputed value of p₀(-𝜐), where p₀ is the deep composition polynomial. + fn verify_query_and_sym_openings( + proof: &StarkProof, + zetas: &[FieldElement], + iota: usize, + fri_decommitment: &FriDecommitment, + evaluation_point_inv: FieldElement, + deep_composition_evaluation: &FieldElement, + deep_composition_evaluation_sym: &FieldElement, + ) -> bool + where + FieldElement: Serializable, + { + let fri_layers_merkle_roots = &proof.fri_layers_merkle_roots; + let evaluation_point_vec: Vec> = + core::iter::successors(Some(evaluation_point_inv.square()), |evaluation_point| { + Some(evaluation_point.square()) + }) + .take(fri_layers_merkle_roots.len()) + .collect(); + + let p0_eval = deep_composition_evaluation; + let p0_eval_sym = deep_composition_evaluation_sym; + + // Reconstruct p₁(𝜐²) + let mut v = + (p0_eval + p0_eval_sym) + &zetas[0] * (p0_eval - p0_eval_sym) * evaluation_point_inv; + let mut index = iota; + + // For each FRI layer, starting from the layer 1: use the proof to verify the validity of values pᵢ(−𝜐^(2ⁱ)) (given by the prover) and + // pᵢ(𝜐^(2ⁱ)) (computed on the previous iteration by the verifier). Then use them to obtain pᵢ₊₁(𝜐^(2ⁱ⁺¹)). + // Finally, check that the final value coincides with the given by the prover. + fri_layers_merkle_roots + .iter() + .enumerate() + .zip(&fri_decommitment.layers_auth_paths_sym) + .zip(&fri_decommitment.layers_evaluations_sym) + .zip(evaluation_point_vec) + .fold( + true, + |result, + ( + (((i, merkle_root), auth_path_sym), evaluation_sym), + evaluation_point_inv, + )| { + // Verify opening Open(pᵢ(Dₖ), −𝜐^(2ⁱ)) and Open(pᵢ(Dₖ), 𝜐^(2ⁱ)). + // `v` is pᵢ(𝜐^(2ⁱ)). + // `evaluation_sym` is pᵢ(−𝜐^(2ⁱ)). + let openings_ok = Self::verify_fri_layer_openings( + merkle_root, + auth_path_sym, + &v, + evaluation_sym, + index, + ); - if !step_2_verify_claimed_composition_polynomial(&air, proof, &domain, &challenges) { - error!("Composition Polynomial verification failed"); - return false; + // Update `v` with next value pᵢ₊₁(𝜐^(2ⁱ⁺¹)). + v = (&v + evaluation_sym) + &zetas[i + 1] * (&v - evaluation_sym) * evaluation_point_inv; + + // Update index for next iteration. The index of the squares in the next layer + // is obtained by halving the current index. This is due to the bit-reverse + // ordering of the elements in the Merkle tree. + index >>= 1; + + if i < fri_decommitment.layers_evaluations_sym.len() - 1 { + result & openings_ok + } else { + // Check that final value is the given by the prover + result & (v == proof.fri_last_value) & openings_ok + } + }, + ) } - #[cfg(feature = "instruments")] - let elapsed2 = timer2.elapsed(); - #[cfg(feature = "instruments")] - println!(" Time spent: {:?}", elapsed2); - #[cfg(feature = "instruments")] + fn reconstruct_deep_composition_poly_evaluations_for_all_queries( + challenges: &Challenges, + domain: &Domain, + proof: &StarkProof, + ) -> DeepPolynomialEvaluations + where + A: AIR, + { + let mut deep_poly_evaluations = Vec::new(); + let mut deep_poly_evaluations_sym = Vec::new(); + for (i, iota) in challenges.iotas.iter().enumerate() { + let primitive_root = + &Self::Field::get_primitive_root_of_unity(domain.root_order as u64).unwrap(); + + let evaluation_point = Self::query_challenge_to_evaluation_point(*iota, domain); + deep_poly_evaluations.push(Self::reconstruct_deep_composition_poly_evaluation( + proof, + &evaluation_point, + primitive_root, + challenges, + &proof.deep_poly_openings[i].lde_trace_evaluations, + &proof.deep_poly_openings[i].lde_composition_poly_parts_evaluation, + )); + + let evaluation_point = Self::query_challenge_to_evaluation_point_sym(*iota, domain); + deep_poly_evaluations_sym.push(Self::reconstruct_deep_composition_poly_evaluation( + proof, + &evaluation_point, + primitive_root, + challenges, + &proof.deep_poly_openings_sym[i].lde_trace_evaluations, + &proof.deep_poly_openings_sym[i].lde_composition_poly_parts_evaluation, + )); + } + (deep_poly_evaluations, deep_poly_evaluations_sym) + } - println!("- Started step 3: Verify FRI"); - #[cfg(feature = "instruments")] - let timer3 = Instant::now(); + fn reconstruct_deep_composition_poly_evaluation>( + proof: &StarkProof, + evaluation_point: &FieldElement, + primitive_root: &FieldElement, + challenges: &Challenges, + lde_trace_evaluations: &[FieldElement], + lde_composition_poly_parts_evaluation: &[FieldElement], + ) -> FieldElement { + let mut denoms_trace = (0..proof.trace_ood_frame_evaluations.num_rows()) + .map(|row_idx| evaluation_point - &challenges.z * primitive_root.pow(row_idx as u64)) + .collect::>>(); + FieldElement::inplace_batch_inverse(&mut denoms_trace).unwrap(); + + let trace_term = (0..proof.trace_ood_frame_evaluations.num_columns()) + .zip(&challenges.trace_term_coeffs) + .fold(FieldElement::zero(), |trace_terms, (col_idx, coeff_row)| { + let trace_i = (0..proof.trace_ood_frame_evaluations.num_rows()) + .zip(coeff_row) + .fold(FieldElement::zero(), |trace_t, (row_idx, coeff)| { + let poly_evaluation = (lde_trace_evaluations[col_idx].clone() + - proof.trace_ood_frame_evaluations.get_row(row_idx)[col_idx].clone()) + * &denoms_trace[row_idx]; + trace_t + &poly_evaluation * coeff + }); + trace_terms + trace_i + }); + + let number_of_parts = lde_composition_poly_parts_evaluation.len(); + let z_pow = &challenges.z.pow(number_of_parts); + + let denom_composition = (evaluation_point - z_pow).inv().unwrap(); + let mut h_terms = FieldElement::zero(); + for (j, h_i_upsilon) in lde_composition_poly_parts_evaluation.iter().enumerate() { + let h_i_zpower = &proof.composition_poly_parts_ood_evaluation[j]; + let h_i_term = (h_i_upsilon - h_i_zpower) * &challenges.gammas[j]; + h_terms += h_i_term; + } + h_terms = h_terms * denom_composition; - if !step_3_verify_fri(proof, &domain, &challenges) { - error!("FRI verification failed"); - return false; + trace_term + h_terms } - #[cfg(feature = "instruments")] - let elapsed3 = timer3.elapsed(); - #[cfg(feature = "instruments")] - println!(" Time spent: {:?}", elapsed3); - - #[cfg(feature = "instruments")] - println!("- Started step 4: Verify deep composition polynomial"); - #[cfg(feature = "instruments")] - let timer4 = Instant::now(); + fn verify( + proof: &StarkProof, + pub_input: &A::PublicInputs, + proof_options: &ProofOptions, + mut transcript: impl IsStarkTranscript, + ) -> bool + where + A: AIR, + FieldElement: Serializable, + { + // Verify there are enough queries + if proof.query_list.len() < proof_options.fri_number_of_queries { + return false; + } - #[allow(clippy::let_and_return)] - if !step_4_verify_deep_composition_polynomial(&air, proof, &domain, &challenges) { - error!("DEEP Composition Polynomial verification failed"); - return false; - } + #[cfg(feature = "instruments")] + println!("- Started step 1: Recover challenges"); + #[cfg(feature = "instruments")] + let timer1 = Instant::now(); - #[cfg(feature = "instruments")] - let elapsed4 = timer4.elapsed(); - #[cfg(feature = "instruments")] - println!(" Time spent: {:?}", elapsed4); + let air = A::new(proof.trace_length, pub_input, proof_options); + let domain = Domain::new(&air); - #[cfg(feature = "instruments")] - { - let total_time = elapsed1 + elapsed2 + elapsed3 + elapsed4; - println!( - " Fraction of verifying time per step: {:.4} {:.4} {:.4} {:.4}", - elapsed1.as_nanos() as f64 / total_time.as_nanos() as f64, - elapsed2.as_nanos() as f64 / total_time.as_nanos() as f64, - elapsed3.as_nanos() as f64 / total_time.as_nanos() as f64, - elapsed4.as_nanos() as f64 / total_time.as_nanos() as f64 + let challenges = Self::step_1_replay_rounds_and_recover_challenges( + &air, + proof, + &domain, + &mut transcript, ); - } - true -} + // verify grinding + let grinding_factor = air.context().proof_options.grinding_factor; + if challenges.leading_zeros_count < grinding_factor { + error!("Grinding factor not satisfied"); + return false; + } -#[cfg(test)] -pub mod tests { - use std::num::ParseIntError; + #[cfg(feature = "instruments")] + let elapsed1 = timer1.elapsed(); + #[cfg(feature = "instruments")] + println!(" Time spent: {:?}", elapsed1); - use lambdaworks_math::field::{ - element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, - }; - - use crate::{ - domain::Domain, - examples::fibonacci_2_cols_shifted::{self, Fibonacci2ColsShifted}, - proof::options::ProofOptions, - prover::prove, - traits::AIR, - transcript::StoneProverTranscript, - verifier::step_1_replay_rounds_and_recover_challenges, - }; - - pub fn decode_hex(s: &str) -> Result, ParseIntError> { - (0..s.len()) - .step_by(2) - .map(|i| u8::from_str_radix(&s[i..i + 2], 16)) - .collect() - } + #[cfg(feature = "instruments")] + println!("- Started step 2: Verify claimed polynomial"); + #[cfg(feature = "instruments")] + let timer2 = Instant::now(); - #[test] - fn test_sharp_compatibility() { - let trace = fibonacci_2_cols_shifted::compute_trace(FieldElement::one(), 4); + if !Self::step_2_verify_claimed_composition_polynomial(&air, proof, &domain, &challenges) { + error!("Composition Polynomial verification failed"); + return false; + } - let claimed_index = 3; - let claimed_value = trace.get_row(claimed_index)[0]; - let mut proof_options = ProofOptions::default_test_options(); - proof_options.blowup_factor = 4; - proof_options.coset_offset = 3; + #[cfg(feature = "instruments")] + let elapsed2 = timer2.elapsed(); + #[cfg(feature = "instruments")] + println!(" Time spent: {:?}", elapsed2); + #[cfg(feature = "instruments")] - let pub_inputs = fibonacci_2_cols_shifted::PublicInputs { - claimed_value, - claimed_index, - }; + println!("- Started step 3: Verify FRI"); + #[cfg(feature = "instruments")] + let timer3 = Instant::now(); - let transcript_init_seed = [0xca, 0xfe, 0xca, 0xfe]; + if !Self::step_3_verify_fri(proof, &domain, &challenges) { + error!("FRI verification failed"); + return false; + } - let proof = prove::>( - &trace, - &pub_inputs, - &proof_options, - StoneProverTranscript::new(&transcript_init_seed), - ) - .unwrap(); + #[cfg(feature = "instruments")] + let elapsed3 = timer3.elapsed(); + #[cfg(feature = "instruments")] + println!(" Time spent: {:?}", elapsed3); - let air = Fibonacci2ColsShifted::new(proof.trace_length, &pub_inputs, &proof_options); - let domain = Domain::new(&air); - let challenges = step_1_replay_rounds_and_recover_challenges( - &air, - &proof, - &domain, - &mut StoneProverTranscript::new(&transcript_init_seed), - ); + #[cfg(feature = "instruments")] + println!("- Started step 4: Verify deep composition polynomial"); + #[cfg(feature = "instruments")] + let timer4 = Instant::now(); - assert_eq!( - proof.lde_trace_merkle_roots[0].to_vec(), - decode_hex("0eb9dcc0fb1854572a01236753ce05139d392aa3aeafe72abff150fe21175594").unwrap() - ); + #[allow(clippy::let_and_return)] + if !Self::step_4_verify_trace_and_composition_openings(&air, proof, &challenges) { + error!("DEEP Composition Polynomial verification failed"); + return false; + } - let beta = challenges.transition_coeffs[0]; - assert_eq!( - beta, - FieldElement::from_hex_unchecked( - "86105fff7b04ed4068ecccb8dbf1ed223bd45cd26c3532d6c80a818dbd4fa7" - ), - ); - assert_eq!(challenges.transition_coeffs[1], beta.pow(2u64)); - assert_eq!(challenges.boundary_coeffs[0], beta.pow(3u64)); - assert_eq!(challenges.boundary_coeffs[1], beta.pow(4u64)); + #[cfg(feature = "instruments")] + let elapsed4 = timer4.elapsed(); + #[cfg(feature = "instruments")] + println!(" Time spent: {:?}", elapsed4); + + #[cfg(feature = "instruments")] + { + let total_time = elapsed1 + elapsed2 + elapsed3 + elapsed4; + println!( + " Fraction of verifying time per step: {:.4} {:.4} {:.4} {:.4}", + elapsed1.as_nanos() as f64 / total_time.as_nanos() as f64, + elapsed2.as_nanos() as f64 / total_time.as_nanos() as f64, + elapsed3.as_nanos() as f64 / total_time.as_nanos() as f64, + elapsed4.as_nanos() as f64 / total_time.as_nanos() as f64 + ); + } + + true } }