diff --git a/provers/cairo/src/air.rs b/provers/cairo/src/air.rs index 5fb91794d..84f6fb6df 100644 --- a/provers/cairo/src/air.rs +++ b/provers/cairo/src/air.rs @@ -1,7 +1,6 @@ use std::ops::Range; use cairo_vm::without_std::collections::HashMap; -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::{ errors::DeserializationError, field::{ @@ -17,7 +16,7 @@ use stark_platinum_prover::{ prover::{prove, ProvingError}, trace::TraceTable, traits::AIR, - transcript::transcript_to_field, + transcript::{IsStarkTranscript, StoneProverTranscript}, verifier::verify, }; @@ -790,11 +789,14 @@ impl AIR for CairoAIR { TraceTable::new(aux_table, self.number_auxiliary_rap_columns()) } - fn build_rap_challenges(&self, transcript: &mut T) -> Self::RAPChallenges { + fn build_rap_challenges( + &self, + transcript: &mut impl IsStarkTranscript, + ) -> Self::RAPChallenges { CairoRAPChallenges { - alpha_memory: transcript_to_field(transcript), - z_memory: transcript_to_field(transcript), - z_range_check: transcript_to_field(transcript), + alpha_memory: transcript.sample_field_element(), + z_memory: transcript.sample_field_element(), + z_range_check: transcript.sample_field_element(), } } @@ -1252,7 +1254,12 @@ pub fn generate_cairo_proof( pub_input: &PublicInputs, proof_options: &ProofOptions, ) -> Result, ProvingError> { - prove::(trace, pub_input, proof_options) + prove::( + trace, + pub_input, + proof_options, + StoneProverTranscript::new(&[]), + ) } /// Wrapper function for verifying Cairo proofs without the need to specify @@ -1263,7 +1270,12 @@ pub fn verify_cairo_proof( pub_input: &PublicInputs, proof_options: &ProofOptions, ) -> bool { - verify::(proof, pub_input, proof_options) + verify::( + proof, + pub_input, + proof_options, + StoneProverTranscript::new(&[]), + ) } #[cfg(test)] diff --git a/provers/cairo/src/tests/integration_tests.rs b/provers/cairo/src/tests/integration_tests.rs index e96260257..70acac1b1 100644 --- a/provers/cairo/src/tests/integration_tests.rs +++ b/provers/cairo/src/tests/integration_tests.rs @@ -1,4 +1,3 @@ -use lambdaworks_crypto::fiat_shamir::default_transcript::DefaultTranscript; use lambdaworks_math::{ errors::DeserializationError, field::fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, @@ -13,6 +12,7 @@ use stark_platinum_prover::{ }, trace::TraceTable, traits::AIR, + transcript::StoneProverTranscript, }; use crate::{ @@ -240,7 +240,7 @@ fn check_simple_cairo_trace_evaluates_to_zero() { let (main_trace, public_input) = generate_prover_args(&program_content, &None, CairoLayout::Plain).unwrap(); let mut trace_polys = main_trace.compute_trace_polys(); - let mut transcript = DefaultTranscript::new(); + let mut transcript = StoneProverTranscript::new(&[]); let proof_options = ProofOptions::default_test_options(); let cairo_air = CairoAIR::new(main_trace.n_rows(), &public_input, &proof_options); diff --git a/provers/stark/src/examples/dummy_air.rs b/provers/stark/src/examples/dummy_air.rs index 40918eb8a..1fe41f46c 100644 --- a/provers/stark/src/examples/dummy_air.rs +++ b/provers/stark/src/examples/dummy_air.rs @@ -1,4 +1,3 @@ -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::field::{ element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, traits::IsFFTField, @@ -11,6 +10,7 @@ use crate::{ proof::options::ProofOptions, trace::TraceTable, traits::AIR, + transcript::IsStarkTranscript, }; #[derive(Clone)] @@ -53,7 +53,11 @@ impl AIR for DummyAIR { TraceTable::empty() } - fn build_rap_challenges(&self, _transcript: &mut T) -> Self::RAPChallenges {} + fn build_rap_challenges( + &self, + _transcript: &mut impl IsStarkTranscript, + ) -> Self::RAPChallenges { + } fn compute_transition( &self, frame: &Frame, diff --git a/provers/stark/src/examples/fibonacci_2_cols_shifted.rs b/provers/stark/src/examples/fibonacci_2_cols_shifted.rs index f5201bb31..66199d337 100644 --- a/provers/stark/src/examples/fibonacci_2_cols_shifted.rs +++ b/provers/stark/src/examples/fibonacci_2_cols_shifted.rs @@ -1,4 +1,3 @@ -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::field::{element::FieldElement, traits::IsFFTField}; use crate::{ @@ -8,6 +7,7 @@ use crate::{ proof::options::ProofOptions, trace::TraceTable, traits::AIR, + transcript::IsStarkTranscript, }; #[derive(Clone, Debug)] @@ -71,7 +71,11 @@ where TraceTable::empty() } - fn build_rap_challenges(&self, _transcript: &mut T) -> Self::RAPChallenges {} + fn build_rap_challenges( + &self, + _transcript: &mut impl IsStarkTranscript, + ) -> Self::RAPChallenges { + } fn compute_transition( &self, diff --git a/provers/stark/src/examples/fibonacci_2_columns.rs b/provers/stark/src/examples/fibonacci_2_columns.rs index 5cbf31e23..b058f6a90 100644 --- a/provers/stark/src/examples/fibonacci_2_columns.rs +++ b/provers/stark/src/examples/fibonacci_2_columns.rs @@ -1,4 +1,3 @@ -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::field::{element::FieldElement, traits::IsFFTField}; use crate::{ @@ -8,6 +7,7 @@ use crate::{ proof::options::ProofOptions, trace::TraceTable, traits::AIR, + transcript::IsStarkTranscript, }; use super::simple_fibonacci::FibonacciPublicInputs; @@ -62,7 +62,11 @@ where TraceTable::empty() } - fn build_rap_challenges(&self, _transcript: &mut T) -> Self::RAPChallenges {} + fn build_rap_challenges( + &self, + _transcript: &mut impl IsStarkTranscript, + ) -> Self::RAPChallenges { + } fn compute_transition( &self, diff --git a/provers/stark/src/examples/fibonacci_rap.rs b/provers/stark/src/examples/fibonacci_rap.rs index c5f634caf..08db064b1 100644 --- a/provers/stark/src/examples/fibonacci_rap.rs +++ b/provers/stark/src/examples/fibonacci_rap.rs @@ -1,6 +1,5 @@ use std::ops::Div; -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::{ field::{element::FieldElement, traits::IsFFTField}, helpers::resize_to_next_power_of_two, @@ -14,7 +13,7 @@ use crate::{ proof::options::ProofOptions, trace::TraceTable, traits::AIR, - transcript::transcript_to_field, + transcript::IsStarkTranscript, }; #[derive(Clone)] @@ -96,8 +95,11 @@ where TraceTable::new_from_cols(&[aux_col]) } - fn build_rap_challenges(&self, transcript: &mut T) -> Self::RAPChallenges { - transcript_to_field(transcript) + fn build_rap_challenges( + &self, + transcript: &mut impl IsStarkTranscript, + ) -> Self::RAPChallenges { + transcript.sample_field_element() } fn number_auxiliary_rap_columns(&self) -> usize { diff --git a/provers/stark/src/examples/quadratic_air.rs b/provers/stark/src/examples/quadratic_air.rs index a52244dff..d0fcc35b3 100644 --- a/provers/stark/src/examples/quadratic_air.rs +++ b/provers/stark/src/examples/quadratic_air.rs @@ -1,4 +1,3 @@ -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::field::{element::FieldElement, traits::IsFFTField}; use crate::{ @@ -8,6 +7,7 @@ use crate::{ proof::options::ProofOptions, trace::TraceTable, traits::AIR, + transcript::IsStarkTranscript, }; #[derive(Clone)] @@ -66,7 +66,11 @@ where TraceTable::empty() } - fn build_rap_challenges(&self, _transcript: &mut T) -> Self::RAPChallenges {} + fn build_rap_challenges( + &self, + _transcript: &mut impl IsStarkTranscript, + ) -> Self::RAPChallenges { + } fn compute_transition( &self, diff --git a/provers/stark/src/examples/simple_fibonacci.rs b/provers/stark/src/examples/simple_fibonacci.rs index f766d6b4e..3a7998d5e 100644 --- a/provers/stark/src/examples/simple_fibonacci.rs +++ b/provers/stark/src/examples/simple_fibonacci.rs @@ -1,4 +1,3 @@ -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::field::{element::FieldElement, traits::IsFFTField}; use crate::{ @@ -8,6 +7,7 @@ use crate::{ proof::options::ProofOptions, trace::TraceTable, traits::AIR, + transcript::IsStarkTranscript, }; #[derive(Clone)] @@ -71,7 +71,11 @@ where TraceTable::empty() } - fn build_rap_challenges(&self, _transcript: &mut T) -> Self::RAPChallenges {} + fn build_rap_challenges( + &self, + _transcript: &mut impl IsStarkTranscript, + ) -> Self::RAPChallenges { + } fn compute_transition( &self, diff --git a/provers/stark/src/fri/mod.rs b/provers/stark/src/fri/mod.rs index f677ea00e..2feb10b95 100644 --- a/provers/stark/src/fri/mod.rs +++ b/provers/stark/src/fri/mod.rs @@ -2,7 +2,6 @@ pub mod fri_commitment; pub mod fri_decommit; mod fri_functions; -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::field::traits::{IsFFTField, IsField}; use lambdaworks_math::traits::ByteConversion; pub use lambdaworks_math::{ @@ -10,17 +9,18 @@ pub use lambdaworks_math::{ polynomial::Polynomial, }; +use crate::transcript::IsStarkTranscript; + use self::fri_commitment::FriLayer; use self::fri_decommit::FriDecommitment; use self::fri_functions::fold_polynomial; use super::traits::AIR; -use super::transcript::{transcript_to_field, transcript_to_u32}; -pub fn fri_commit_phase( +pub fn fri_commit_phase( number_layers: usize, p_0: Polynomial>, - transcript: &mut T, + transcript: &mut impl IsStarkTranscript, coset_offset: &FieldElement, domain_size: usize, ) -> (FieldElement, Vec>) @@ -34,13 +34,13 @@ where fri_layer_list.push(current_layer.clone()); let mut current_poly = p_0; // >>>> Send commitment: [p₀] - transcript.append(¤t_layer.merkle_tree.root); + transcript.append_bytes(¤t_layer.merkle_tree.root); let mut coset_offset = coset_offset.clone(); for _ in 1..number_layers { // <<<< Receive challenge 𝜁ₖ₋₁ - let zeta = transcript_to_field(transcript); + let zeta = transcript.sample_field_element(); coset_offset = coset_offset.square(); domain_size /= 2; @@ -51,11 +51,11 @@ where fri_layer_list.push(current_layer.clone()); // TODO: remove this clone // >>>> Send commitment: [pₖ] - transcript.append(new_data); + transcript.append_bytes(new_data); } // <<<< Receive challenge: 𝜁ₙ₋₁ - let zeta = transcript_to_field(transcript); + let zeta = transcript.sample_field_element(); let last_poly = fold_polynomial(¤t_poly, &zeta); @@ -66,27 +66,26 @@ where .clone(); // >>>> Send value: pₙ - transcript.append(&last_value.to_bytes_be()); + transcript.append_field_element(&last_value); (last_value, fri_layer_list) } -pub fn fri_query_phase( +pub fn fri_query_phase( air: &A, domain_size: usize, fri_layers: &Vec>, - transcript: &mut T, + transcript: &mut impl IsStarkTranscript, ) -> (Vec>, Vec) where F: IsFFTField, A: AIR, - T: Transcript, FieldElement: ByteConversion, { if !fri_layers.is_empty() { let number_of_queries = air.options().fri_number_of_queries; let iotas = (0..number_of_queries) - .map(|_| (transcript_to_u32(transcript) as usize) % domain_size) + .map(|_| (transcript.sample_u64(domain_size as u64)) as usize) .collect::>(); let query_list = iotas .iter() diff --git a/provers/stark/src/prover.rs b/provers/stark/src/prover.rs index b687f5084..1c7e43fb1 100644 --- a/provers/stark/src/prover.rs +++ b/provers/stark/src/prover.rs @@ -1,13 +1,6 @@ #[cfg(feature = "instruments")] use std::time::Instant; -#[cfg(not(feature = "test_fiat_shamir"))] -use lambdaworks_crypto::fiat_shamir::default_transcript::DefaultTranscript; -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; - -#[cfg(feature = "test_fiat_shamir")] -use lambdaworks_crypto::fiat_shamir::test_transcript::TestTranscript; - use lambdaworks_math::fft::{errors::FFTError, polynomial::FFTPoly}; use lambdaworks_math::{ field::{element::FieldElement, traits::IsFFTField}, @@ -21,7 +14,7 @@ use rayon::prelude::{IndexedParallelIterator, IntoParallelRefIterator, ParallelI #[cfg(debug_assertions)] use crate::debug::validate_trace; -use crate::transcript::sample_z_ood; +use crate::transcript::{sample_z_ood, IsStarkTranscript}; use super::config::{BatchedMerkleTree, Commitment}; use super::constraints::evaluator::ConstraintEvaluator; @@ -34,7 +27,7 @@ use super::proof::options::ProofOptions; use super::proof::stark::{DeepPolynomialOpenings, StarkProof}; use super::trace::TraceTable; use super::traits::AIR; -use super::transcript::{batch_sample_challenges, transcript_to_field}; +use super::transcript::batch_sample_challenges; #[derive(Debug)] pub enum ProvingError { @@ -81,17 +74,6 @@ struct Round4 { nonce: u64, } -#[cfg(feature = "test_fiat_shamir")] -fn round_0_transcript_initialization() -> TestTranscript { - TestTranscript::new() -} - -#[cfg(not(feature = "test_fiat_shamir"))] -fn round_0_transcript_initialization() -> DefaultTranscript { - // TODO: add strong fiat shamir - DefaultTranscript::new() -} - fn batch_commit(vectors: &[Vec>]) -> (BatchedMerkleTree, Commitment) where F: IsFFTField, @@ -122,10 +104,10 @@ where } #[allow(clippy::type_complexity)] -fn interpolate_and_commit( +fn interpolate_and_commit( trace: &TraceTable, domain: &Domain, - transcript: &mut T, + transcript: &mut impl IsStarkTranscript, ) -> ( Vec>>, Vec>>, @@ -133,7 +115,6 @@ fn interpolate_and_commit( Commitment, ) where - T: Transcript, F: IsFFTField, FieldElement: ByteConversion + Send + Sync, { @@ -147,7 +128,7 @@ where let (lde_trace_merkle_tree, lde_trace_merkle_root) = batch_commit(&lde_trace.rows()); // >>>> Send commitments: [tⱼ] - transcript.append(&lde_trace_merkle_root); + transcript.append_bytes(&lde_trace_merkle_root); ( trace_polys, @@ -183,11 +164,11 @@ where .unwrap() } -fn round_1_randomized_air_with_preprocessing, T: Transcript>( +fn round_1_randomized_air_with_preprocessing>( air: &A, main_trace: &TraceTable, domain: &Domain, - transcript: &mut T, + transcript: &mut impl IsStarkTranscript, ) -> Result, ProvingError> where FieldElement: ByteConversion + Send + Sync, @@ -326,7 +307,6 @@ where fn round_4_compute_and_run_fri_on_the_deep_composition_polynomial< F: IsFFTField, A: AIR, - T: Transcript, >( air: &A, domain: &Domain, @@ -334,7 +314,7 @@ fn round_4_compute_and_run_fri_on_the_deep_composition_polynomial< round_2_result: &Round2, round_3_result: &Round3, z: &FieldElement, - transcript: &mut T, + transcript: &mut impl IsStarkTranscript, ) -> Round4 where FieldElement: ByteConversion + Send + Sync, @@ -344,11 +324,11 @@ where // <<<< Receive challenges: 𝛾, 𝛾' let composition_poly_coeffients = [ - transcript_to_field(transcript), - transcript_to_field(transcript), + transcript.sample_field_element(), + transcript.sample_field_element(), ]; // <<<< Receive challenges: 𝛾ⱼ, 𝛾ⱼ' - let trace_poly_coeffients = batch_sample_challenges::( + let trace_poly_coeffients = batch_sample_challenges::( air.context().transition_offsets.len() * air.context().trace_columns, transcript, ); @@ -378,10 +358,10 @@ where // grinding: generate nonce and append it to the transcript let grinding_factor = air.context().proof_options.grinding_factor; - let transcript_challenge = transcript.challenge(); + let transcript_challenge = transcript.state(); let nonce = generate_nonce_with_grinding(&transcript_challenge, grinding_factor) .expect("nonce not found"); - transcript.append(&nonce.to_be_bytes()); + transcript.append_bytes(&nonce.to_be_bytes()); let (query_list, iotas) = fri_query_phase(air, domain_size, &fri_layers, transcript); @@ -581,6 +561,7 @@ pub fn prove( main_trace: &TraceTable, pub_inputs: &A::PublicInputs, proof_options: &ProofOptions, + mut transcript: impl IsStarkTranscript, ) -> Result, ProvingError> where F: IsFFTField, @@ -590,13 +571,12 @@ where { info!("Started proof generation..."); #[cfg(feature = "instruments")] - println!("- Started round 0: Transcript Initialization"); + println!("- Started round 0: Air Initialization"); #[cfg(feature = "instruments")] let timer0 = Instant::now(); let air = A::new(main_trace.n_rows(), pub_inputs, proof_options); let domain = Domain::new(&air); - let mut transcript = round_0_transcript_initialization(); #[cfg(feature = "instruments")] let elapsed0 = timer0.elapsed(); @@ -612,7 +592,7 @@ where #[cfg(feature = "instruments")] let timer1 = Instant::now(); - let round_1_result = round_1_randomized_air_with_preprocessing::( + let round_1_result = round_1_randomized_air_with_preprocessing::( &air, main_trace, &domain, @@ -661,7 +641,7 @@ where ); // >>>> Send commitments: [H₁], [H₂] - transcript.append(&round_2_result.composition_poly_root); + transcript.append_bytes(&round_2_result.composition_poly_root); #[cfg(feature = "instruments")] let elapsed2 = timer2.elapsed(); @@ -693,22 +673,14 @@ where ); // >>>> Send value: H₁(z²) - transcript.append( - &round_3_result - .composition_poly_even_ood_evaluation - .to_bytes_be(), - ); + transcript.append_field_element(&round_3_result.composition_poly_even_ood_evaluation); // >>>> Send value: H₂(z²) - transcript.append( - &round_3_result - .composition_poly_odd_ood_evaluation - .to_bytes_be(), - ); + transcript.append_field_element(&round_3_result.composition_poly_odd_ood_evaluation); // >>>> Send values: tⱼ(zgᵏ) for row in round_3_result.trace_ood_evaluations.iter() { for element in row.iter() { - transcript.append(&element.to_bytes_be()); + transcript.append_field_element(element); } } diff --git a/provers/stark/src/tests/integration_tests.rs b/provers/stark/src/tests/integration_tests.rs index f45ca5f1a..77f244658 100644 --- a/provers/stark/src/tests/integration_tests.rs +++ b/provers/stark/src/tests/integration_tests.rs @@ -1,9 +1,5 @@ use lambdaworks_math::field::{ - element::FieldElement, - fields::{ - fft_friendly::stark_252_prime_field::Stark252PrimeField, - u64_prime_field::{F17, FE17}, - }, + element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, }; use crate::{ @@ -17,6 +13,7 @@ use crate::{ }, proof::options::ProofOptions, prover::prove, + transcript::StoneProverTranscript, verifier::verify, Felt252, }; @@ -36,20 +33,23 @@ fn test_prove_fib() { &trace, &pub_inputs, &proof_options, + StoneProverTranscript::new(&[]), ) .unwrap(); assert!( verify::>( &proof, &pub_inputs, - &proof_options + &proof_options, + StoneProverTranscript::new(&[]), ) ); } #[test_log::test] fn test_prove_fib17() { - let trace = simple_fibonacci::fibonacci_trace([FE17::from(1), FE17::from(1)], 4); + type FE = FieldElement; + let trace = simple_fibonacci::fibonacci_trace([FE::from(1), FE::from(1)], 4); let proof_options = ProofOptions { blowup_factor: 2, @@ -59,15 +59,22 @@ fn test_prove_fib17() { }; let pub_inputs = FibonacciPublicInputs { - a0: FE17::one(), - a1: FE17::one(), + a0: FE::one(), + a1: FE::one(), }; - let proof = prove::>(&trace, &pub_inputs, &proof_options).unwrap(); - assert!(verify::>( + let proof = prove::<_, FibonacciAIR<_>>( + &trace, + &pub_inputs, + &proof_options, + StoneProverTranscript::new(&[]), + ) + .unwrap(); + assert!(verify::<_, FibonacciAIR<_>>( &proof, &pub_inputs, - &proof_options + &proof_options, + StoneProverTranscript::new(&[]), )); } @@ -86,12 +93,18 @@ fn test_prove_fib_2_cols() { &trace, &pub_inputs, &proof_options, + StoneProverTranscript::new(&[]), ) .unwrap(); assert!(verify::< Stark252PrimeField, Fibonacci2ColsAIR, - >(&proof, &pub_inputs, &proof_options)); + >( + &proof, + &pub_inputs, + &proof_options, + StoneProverTranscript::new(&[]) + )); } #[test_log::test] @@ -107,13 +120,18 @@ fn test_prove_fib_2_cols_shifted() { claimed_index, }; - let proof = - prove::>(&trace, &pub_inputs, &proof_options) - .unwrap(); + let proof = prove::>( + &trace, + &pub_inputs, + &proof_options, + StoneProverTranscript::new(&[]), + ) + .unwrap(); assert!(verify::>( &proof, &pub_inputs, - &proof_options + &proof_options, + StoneProverTranscript::new(&[]) )); } @@ -131,13 +149,15 @@ fn test_prove_quadratic() { &trace, &pub_inputs, &proof_options, + StoneProverTranscript::new(&[]), ) .unwrap(); assert!( verify::>( &proof, &pub_inputs, - &proof_options + &proof_options, + StoneProverTranscript::new(&[]) ) ); } @@ -159,13 +179,15 @@ fn test_prove_rap_fib() { &trace, &pub_inputs, &proof_options, + StoneProverTranscript::new(&[]), ) .unwrap(); assert!( verify::>( &proof, &pub_inputs, - &proof_options + &proof_options, + StoneProverTranscript::new(&[]) ) ); } @@ -177,10 +199,17 @@ fn test_prove_dummy() { let proof_options = ProofOptions::default_test_options(); - let proof = prove::(&trace, &(), &proof_options).unwrap(); + let proof = prove::( + &trace, + &(), + &proof_options, + StoneProverTranscript::new(&[]), + ) + .unwrap(); assert!(verify::( &proof, &(), - &proof_options + &proof_options, + StoneProverTranscript::new(&[]) )); } diff --git a/provers/stark/src/traits.rs b/provers/stark/src/traits.rs index e3189312a..251b89ea3 100644 --- a/provers/stark/src/traits.rs +++ b/provers/stark/src/traits.rs @@ -1,11 +1,12 @@ use itertools::Itertools; -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::{ fft::cpu::roots_of_unity::get_powers_of_primitive_root_coset, field::{element::FieldElement, traits::IsFFTField}, polynomial::Polynomial, }; +use crate::transcript::IsStarkTranscript; + use super::{ constraints::boundary::BoundaryConstraints, context::AirContext, frame::Frame, proof::options::ProofOptions, trace::TraceTable, @@ -29,7 +30,10 @@ pub trait AIR: Clone { rap_challenges: &Self::RAPChallenges, ) -> TraceTable; - fn build_rap_challenges(&self, transcript: &mut T) -> Self::RAPChallenges; + fn build_rap_challenges( + &self, + transcript: &mut impl IsStarkTranscript, + ) -> Self::RAPChallenges; fn number_auxiliary_rap_columns(&self) -> usize; diff --git a/provers/stark/src/transcript.rs b/provers/stark/src/transcript.rs index 635c8c418..6a8d7bebd 100644 --- a/provers/stark/src/transcript.rs +++ b/provers/stark/src/transcript.rs @@ -1,63 +1,148 @@ -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::{ field::{ element::FieldElement, - traits::{IsFFTField, IsPrimeField}, + fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, + traits::{IsFFTField, IsField, IsPrimeField}, }, traits::ByteConversion, + unsigned_integer::element::U256, }; +use sha3::{Digest, Keccak256}; -/// Uses randomness from the transcript to create a FieldElement -/// One bit less than the max used by the FieldElement is used as randomness. For StarkFields, this would be 251 bits randomness. -/// Randomness is interpreted as limbs in BigEndian, and each Limb is ordered in BigEndian -pub fn transcript_to_field(transcript: &mut T) -> FieldElement -where - FieldElement: lambdaworks_math::traits::ByteConversion, -{ - let mut randomness = transcript.challenge(); - randomness_to_field(&mut randomness) +pub trait IsStarkTranscript { + fn append_field_element(&mut self, element: &FieldElement); + fn append_bytes(&mut self, new_bytes: &[u8]); + fn state(&self) -> [u8; 32]; + fn sample_field_element(&mut self) -> FieldElement; + fn sample_u64(&mut self, upper_bound: u64) -> u64; } -/// Transforms some random bytes to a field -/// Slicing the randomness to one bit less than what the max number of the field is to ensure each random element has the same probability of appearing -fn randomness_to_field(randomness: &mut [u8; 32]) -> FieldElement -where - FieldElement: ByteConversion, -{ - let random_bits_required = F::field_bit_size() - 1; - let random_bits_created = randomness.len() * 8; - let mut bits_to_clear = random_bits_created - random_bits_required; +fn keccak_hash(data: &[u8]) -> Keccak256 { + let mut hasher = Keccak256::new(); + hasher.update(data); + hasher +} + +const MODULUS_MAX_MULTIPLE: U256 = + U256::from_hex_unchecked("f80000000000020f00000000000000000000000000000000000000000000001f"); +const R_INV: U256 = + U256::from_hex_unchecked("0x40000000000001100000000000012100000000000000000000000000000000"); + +pub struct StoneProverTranscript { + hash: Keccak256, + seed_increment: U256, + counter: u32, + spare_bytes: Vec, +} + +impl StoneProverTranscript { + pub fn new(public_input_data: &[u8]) -> Self { + let hash = keccak_hash(public_input_data); + StoneProverTranscript { + hash, + seed_increment: U256::from_hex_unchecked("1"), + counter: 0, + spare_bytes: vec![], + } + } - let mut i = 0; - while bits_to_clear >= 8 { - randomness[i] = 0; - bits_to_clear -= 8; - i += 1; + pub fn sample_block(&mut self, used_bytes: usize) -> Vec { + let mut first_part: Vec = self.hash.clone().finalize().to_vec(); + let mut counter_bytes: Vec = vec![0; 28] + .into_iter() + .chain(self.counter.to_be_bytes().to_vec()) + .collect(); + self.counter += 1; + first_part.append(&mut counter_bytes); + let block = keccak_hash(&first_part).finalize().to_vec(); + self.spare_bytes.extend(&block[used_bytes..]); + block[..used_bytes].to_vec() } - let pre_mask: u8 = 1u8.checked_shl(8 - bits_to_clear as u32).unwrap_or(0); - let mask: u8 = pre_mask.wrapping_sub(1); - randomness[i] &= mask; + pub fn sample(&mut self, num_bytes: usize) -> Vec { + let num_blocks = num_bytes / 32; + let mut result: Vec = Vec::new(); + + for _ in 0..num_blocks { + let mut block = self.sample_block(32); + result.append(&mut block); + } + + let rest = num_bytes % 32; + if rest <= self.spare_bytes.len() { + result.append(&mut self.spare_bytes[..rest].to_vec()); + self.spare_bytes.drain(..rest); + } else { + let mut block = self.sample_block(rest); + result.append(&mut block); + } + result + } - FieldElement::from_bytes_be(randomness).unwrap() + pub fn sample_big_int(&mut self) -> U256 { + U256::from_bytes_be(&self.sample(32)).unwrap() + } } -pub fn transcript_to_u32(transcript: &mut T) -> u32 { - const CANT_BYTES_U32: usize = (u32::BITS / 8) as usize; - let value = transcript.challenge()[..CANT_BYTES_U32].try_into().unwrap(); - u32::from_be_bytes(value) +impl IsStarkTranscript for StoneProverTranscript { + fn append_field_element(&mut self, element: &FieldElement) { + let limbs = element.value().limbs; + let mut bytes: [u8; 32] = [0; 32]; + + for i in (0..4).rev() { + let limb_bytes = limbs[i].to_be_bytes(); + for j in 0..8 { + bytes[i * 8 + j] = limb_bytes[j] + } + } + self.append_bytes(&bytes); + } + + fn append_bytes(&mut self, new_bytes: &[u8]) { + let mut result_hash = [0_u8; 32]; + result_hash.copy_from_slice(&self.hash.clone().finalize_reset()); + result_hash.reverse(); + + let digest = U256::from_bytes_be(&self.hash.clone().finalize()).unwrap(); + let new_seed = (digest + self.seed_increment).to_bytes_be(); + self.hash = keccak_hash(&[&new_seed, new_bytes].concat()); + self.counter = 0; + self.spare_bytes.clear(); + } + + fn state(&self) -> [u8; 32] { + let mut state = [0u8; 32]; + state.copy_from_slice(&self.hash.clone().finalize()); + state + } + + fn sample_field_element(&mut self) -> FieldElement { + let mut result = self.sample_big_int(); + while result >= MODULUS_MAX_MULTIPLE { + result = self.sample_big_int(); + } + FieldElement::new(result) * FieldElement::new(R_INV) + } + + fn sample_u64(&mut self, upper_bound: u64) -> u64 { + // assert!(upper_bound < (1 << 12)); + let mut bytes = [0u8; 8]; + bytes.copy_from_slice(&self.sample(8)); + let u64_val: u64 = u64::from_be_bytes(bytes); + u64_val % upper_bound + } } -pub fn sample_z_ood( +pub fn sample_z_ood( lde_roots_of_unity_coset: &[FieldElement], trace_roots_of_unity: &[FieldElement], - transcript: &mut T, + transcript: &mut impl IsStarkTranscript, ) -> FieldElement where FieldElement: ByteConversion, { loop { - let value: FieldElement = transcript_to_field(transcript); + let value: FieldElement = transcript.sample_field_element(); if !lde_roots_of_unity_coset.iter().any(|x| x == &value) && !trace_roots_of_unity.iter().any(|x| x == &value) { @@ -66,149 +151,307 @@ where } } -pub fn batch_sample_challenges( +pub fn batch_sample_challenges( size: usize, - transcript: &mut T, + transcript: &mut impl IsStarkTranscript, ) -> Vec> where FieldElement: ByteConversion, { - (0..size).map(|_| transcript_to_field(transcript)).collect() + (0..size) + .map(|_| transcript.sample_field_element()) + .collect() } #[cfg(test)] mod tests { - use lambdaworks_math::{ - field::{ - element::FieldElement, - fields::{ - fft_friendly::stark_252_prime_field::Stark252PrimeField, - montgomery_backed_prime_fields::{IsModulus, U256PrimeField}, - }, - }, - unsigned_integer::element::U256, + use lambdaworks_math::field::{ + element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, }; - use crate::transcript::randomness_to_field; + use crate::transcript::{IsStarkTranscript, StoneProverTranscript}; - #[test] - fn test_stark_prime_field_random_to_field_32() { - #[rustfmt::skip] - let mut randomness: [u8; 32] = [ - 248, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 32, - ]; + use std::num::ParseIntError; + + type FE = FieldElement; - type FE = FieldElement; - let field_element: FE = randomness_to_field(&mut randomness); - let expected_fe = FE::from(32u64); - assert_eq!(field_element, expected_fe) + pub fn decode_hex(s: &str) -> Result, ParseIntError> { + (0..s.len()) + .step_by(2) + .map(|i| u8::from_str_radix(&s[i..i + 2], 16)) + .collect() } #[test] - fn test_stark_prime_field_random_to_fiel_repeated_f_and_zero() { - #[rustfmt::skip] - let mut randomness: [u8; 32] = [ - 255, 0, 255, 0, 255, 0, 255, 0, - 255, 0, 255, 0, 255, 0, 255, 0, - 255, 0, 255, 0, 255, 0, 255, 0, - 255, 0, 255, 0, 255, 0, 255, 0, - ]; - - type FE = FieldElement; - - // 251 bits should be used (252 of StarkField - 1) to avoid duplicates - // This leaves a 7 - let expected_fe = FE::from_hex_unchecked( - "\ - 0700FF00FF00FF00\ - FF00FF00FF00FF00\ - FF00FF00FF00FF00\ - FF00FF00FF00FF00", + fn sample_bytes_from_stone_prover_channel() { + let mut transcript = StoneProverTranscript::new(&[0x01, 0x02, 0x03]); + transcript.append_bytes(&[0x04, 0x05, 0x06]); + assert_eq!( + transcript.sample(32), + vec![ + 0x8a, 0x3a, 0x67, 0xd1, 0x25, 0xa5, 0xa5, 0xea, 0x57, 0xc3, 0xfb, 0xe2, 0xc2, 0x55, + 0xb6, 0x0d, 0x0c, 0x89, 0x13, 0xa6, 0x27, 0x13, 0xe0, 0x99, 0xb3, 0x77, 0xc6, 0xc2, + 0x9a, 0x21, 0x85, 0x97 + ] + ); + assert_eq!( + transcript.sample(64), + vec![ + 0x56, 0xde, 0x56, 0x2a, 0xfd, 0x98, 0x19, 0xb9, 0xaa, 0xa0, 0x1b, 0x16, 0xf4, 0xeb, + 0x33, 0x71, 0xd5, 0xd8, 0x0f, 0x35, 0x29, 0xd8, 0xc1, 0x7a, 0x4b, 0xf4, 0x10, 0xe3, + 0x19, 0xb7, 0x64, 0x4a, 0xd2, 0x1c, 0xff, 0x14, 0x3d, 0xfd, 0xca, 0x32, 0x2c, 0x59, + 0xa3, 0x47, 0x5d, 0xd0, 0x34, 0xdf, 0x6d, 0xa7, 0x0c, 0xf5, 0xd2, 0x6a, 0xdd, 0x65, + 0xe0, 0x6d, 0x1e, 0x4f, 0xc7, 0x39, 0x52, 0x32 + ] + ); + assert_eq!( + transcript.sample(48), + vec![ + 0xe4, 0xb6, 0x3c, 0xfc, 0x03, 0xc9, 0x82, 0x8b, 0x63, 0x53, 0xb9, 0xad, 0x73, 0x6d, + 0x23, 0x88, 0x4c, 0x07, 0xb4, 0x9d, 0xf1, 0x1d, 0xef, 0xb9, 0x53, 0xfa, 0x02, 0xb5, + 0x3c, 0x43, 0xcf, 0xa3, 0x30, 0x5a, 0x02, 0x7e, 0xa6, 0x5e, 0x3c, 0x86, 0x3d, 0xdb, + 0x48, 0xea, 0x73, 0xbf, 0xdf, 0xab + ] + ); + assert_eq!( + transcript.sample(32), + vec![ + 0x82, 0xe1, 0xd4, 0xf8, 0xf0, 0x61, 0xa4, 0x17, 0x4b, 0xed, 0x58, 0x4e, 0xb5, 0x73, + 0x26, 0xb7, 0x63, 0x10, 0x37, 0x97, 0xbe, 0x0b, 0x57, 0xaf, 0x74, 0xfe, 0x33, 0x19, + 0xbd, 0xe5, 0x53, 0x21, + ] + ); + assert_eq!( + transcript.sample(16), + vec![ + 0xb0, 0xc6, 0x7a, 0x04, 0x19, 0x0a, 0x25, 0x72, 0xa8, 0x2e, 0xfa, 0x97, 0x92, 0x44, + 0x73, 0xe9 + ] + ); + assert_eq!( + transcript.sample(8), + vec![0xbd, 0x41, 0x28, 0xdd, 0x3a, 0xbc, 0x66, 0x18] + ); + assert_eq!( + transcript.sample(32), + vec![ + 0xcb, 0x66, 0xc9, 0x72, 0x39, 0x85, 0xe8, 0x7c, 0x30, 0xe1, 0xc7, 0x1d, 0x2f, 0x83, + 0x4a, 0xcd, 0x33, 0x85, 0xfb, 0xd5, 0x40, 0x69, 0x22, 0x6e, 0xc0, 0xf1, 0x8c, 0x40, + 0x26, 0x2f, 0x5f, 0x7c, + ] + ); + transcript.append_bytes(&[0x03, 0x02]); + assert_eq!( + transcript.sample(32), + vec![ + 0x69, 0x63, 0x72, 0x01, 0x84, 0x8b, 0x22, 0x82, 0xa6, 0x14, 0x6d, 0x47, 0xbb, 0xa9, + 0xa3, 0xc8, 0xdc, 0x1b, 0x8e, 0x2e, 0x2e, 0x21, 0x87, 0x77, 0xac, 0xe0, 0x3e, 0xce, + 0x6e, 0xa7, 0x9e, 0xb0, + ] ); - - let field_element: FE = randomness_to_field(&mut randomness); - - assert_eq!(field_element, expected_fe) } #[test] - fn test_241_bit_random_to_field() { - #[derive(Clone, Debug)] - pub struct TestModulus; - impl IsModulus for TestModulus { - const MODULUS: U256 = U256::from_hex_unchecked( - "\ - 0001000000000011\ - 0000000000000000\ - 0000000000000000\ - 0000000000000001", - ); - } - - pub type TestField = U256PrimeField; - - #[rustfmt::skip] - let mut randomness: [u8; 32] = [ - 255, 255, 255, 1, 2, 3, 4, 5, - 6, 7, 8, 1, 2, 3, 4, 5, - 6, 7, 8, 1, 2, 3, 4, 5, - 6, 7, 8, 1, 2, 3, 4, 5, - ]; - - type FE = FieldElement; - - let expected_fe = FE::from_hex_unchecked( - "\ - 0000FF0102030405\ - 0607080102030405\ - 0607080102030405\ - 0607080102030405", + fn test_sample_bytes() { + let mut transcript = StoneProverTranscript::new(&[0x01, 0x02]); + assert_eq!( + transcript.sample(8), + vec![89, 27, 84, 161, 127, 200, 195, 181] ); - - let field_element: FE = randomness_to_field(&mut randomness); - - assert_eq!(field_element, expected_fe); } #[test] - fn test_249_bit_random_to_field() { - #[derive(Clone, Debug)] - pub struct TestModulus; - impl IsModulus for TestModulus { - const MODULUS: U256 = U256::from_hex_unchecked( - "\ - 0200000000000011\ - 0000000000000000\ - 0000000000000000\ - 0000000000000001", - ); - } + fn test_sample_field_element() { + let mut transcript = StoneProverTranscript::new(&[0x01, 0x02]); + assert_eq!( + transcript.sample_field_element(), + FE::from_hex_unchecked( + "20b962ed1a29c942e11dc63c00b51de816bcd8bf9acd221f3fa55e5201d69be" + ) + ); + } - pub type TestField = U256PrimeField; + #[test] + fn test_sample_u64_element() { + let mut transcript = StoneProverTranscript::new(&[0x01, 0x02]); + assert_eq!(transcript.sample_u64(1024), 949); + } - #[rustfmt::skip] - let mut randomness: [u8; 32] = [ - 255, 0, 255, 0, 255, 0, 255, 0, - 255, 0, 255, 0, 255, 0, 255, 0, - 255, 0, 255, 0, 255, 0, 255, 0, - 255, 0, 255, 0, 255, 0, 255, 0, - ]; + #[test] + fn test_sample_u64_after_appending_and_sampling_bytes() { + let mut transcript = StoneProverTranscript::new(&[0x01, 0x02]); + transcript.append_bytes(&[0x01, 0x02]); + assert_eq!(transcript.sample(4), vec![0x06, 0xe5, 0x36, 0xf5]); + assert_eq!(transcript.sample_u64(16), 5); + } - let expected_fe = FE::from_hex_unchecked( - "\ - 0100FF00FF00FF00\ - FF00FF00FF00FF00\ - FF00FF00FF00FF00\ - FF00FF00FF00FF00", + #[test] + fn test_transcript_compatibility_with_stone_prover_1() { + // This corresponds to the following run. + // Air: `Fibonacci2ColsShifted` + // `trace_length`: 4 + // `blowup_factor`: 2 + // `fri_number_of_queries`: 1 + let mut transcript = StoneProverTranscript::new(&[0xca, 0xfe, 0xca, 0xfe]); + // Send hash of trace commitment + transcript.append_bytes( + &decode_hex("0eb9dcc0fb1854572a01236753ce05139d392aa3aeafe72abff150fe21175594") + .unwrap(), ); + // Sample challenge to collapse the constraints for the composition polynomial + assert_eq!( + transcript.sample_field_element(), + FE::from_hex_unchecked( + "86105fff7b04ed4068ecccb8dbf1ed223bd45cd26c3532d6c80a818dbd4fa7" + ) + ); + // Send hash of composition poly commitment H(z) + transcript.append_bytes( + &decode_hex("7cdd8d5fe3bd62254a417e2e260e0fed4fccdb6c9005e828446f645879394f38") + .unwrap(), + ); + // Sample challenge Z to compute t_j(z), H(z) + assert_eq!( + transcript.sample_field_element(), + FE::from_hex_unchecked( + "317629e783794b52cd27ac3a5e418c057fec9dd42f2b537cdb3f24c95b3e550" + ) + ); + // Append t_j(z), H(z) + transcript.append_field_element(&FE::from_hex_unchecked( + "70d8181785336cc7e0a0a1078a79ee6541ca0803ed3ff716de5a13c41684037", + )); + transcript.append_field_element(&FE::from_hex_unchecked( + "29808fc8b7480a69295e4b61600480ae574ca55f8d118100940501b789c1630", + )); + transcript.append_field_element(&FE::from_hex_unchecked( + "7d8110f21d1543324cc5e472ab82037eaad785707f8cae3d64c5b9034f0abd2", + )); + transcript.append_field_element(&FE::from_hex_unchecked( + "1b58470130218c122f71399bf1e04cf75a6e8556c4751629d5ce8c02cc4e62d", + )); + transcript.append_field_element(&FE::from_hex_unchecked( + "1c0b7c2275e36d62dfb48c791be122169dcc00c616c63f8efb2c2a504687e85", + )); + // Sample challenge Gamma to collapse the terms of the deep composition polynomial (batch open). + // Powers of this challenge are used if more than two terms. + assert_eq!( + transcript.sample_field_element(), + FE::from_hex_unchecked( + "a0c79c1c77ded19520873d9c2440451974d23302e451d13e8124cf82fc15dd" + ) + ); + // FRI: Sample challenge Zeta to split the polynomial in half + assert_eq!( + transcript.sample_field_element(), + FE::from_hex_unchecked( + "5c6b5a66c9fda19f583f0b10edbaade98d0e458288e62c2fa40e3da2b293cef" + ) + ); + // FRI: Send hash of commitment at Layer 1 + transcript.append_bytes( + &decode_hex("49c5672520e20eccc72aa28d6fa0d7ef446f1ede38d7c64fbb95d0f34a281803") + .unwrap(), + ); + // FRI: Sample challenge to split the polynomial in half + assert_eq!( + transcript.sample_field_element(), + FE::from_hex_unchecked( + "4243ca9a618e2127590af8e1b38c63a156863fe95e4211cc1ade9b50667bbfa" + ) + ); + // Send field element at final layer of FRI + transcript.append_field_element(&FE::from_hex_unchecked( + "702ddae5809ad82a82556eed2d202202d770962b7d4d82581e183df3efa2da6", + )); + // Send proof of work + transcript.append_bytes(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x30, 0x4d]); // Eight bytes + // Sample query indices + assert_eq!(transcript.sample_u64(8), 0); + } - type FE = FieldElement; - - let field_element: FE = randomness_to_field(&mut randomness); - - assert_eq!(field_element, expected_fe) + #[test] + fn test_transcript_compatibility_with_stone_prover_2() { + // This corresponds to the following run. + // Air: `Fibonacci2ColsShifted` + // `trace_length`: 4 + // `blowup_factor`: 6 + // `fri_number_of_queries`: 2 + let mut transcript = StoneProverTranscript::new(&[0xfa, 0xfa, 0xfa, 0xee]); + // Send hash of trace commitment + transcript.append_bytes( + &decode_hex("99d8d4342895c4e35a084f8ea993036be06f51e7fa965734ed9c7d41104f0848") + .unwrap(), + ); + // Sample challenge to collapse the constraints for the composition polynomial + assert_eq!( + transcript.sample_field_element(), + FE::from_hex_unchecked( + "3fc675742e0692558bb95f36bd34bdfe050697ed0d849e5369808685e548441" + ) + ); + // Send hash of composition poly commitment H(z) + transcript.append_bytes( + &decode_hex("2f4b599828a3f1ac458202ce06ec223bc9f4ad9ac758030109d40eebcf5776fd") + .unwrap(), + ); + // Sample challenge Z to compute t_j(z), H(z) + assert_eq!( + transcript.sample_field_element(), + FE::from_hex_unchecked( + "7298af9e2574933e62e51b107b8ef52f253d20644fc7250e9af118b02bc8a71" + ) + ); + // Append t_j(z), H(z) + transcript.append_field_element(&FE::from_hex_unchecked( + "6791c8cdbd981f7db9786d702b21b87f4128a6941f35683d8b10faafcab83d5", + )); + transcript.append_field_element(&FE::from_hex_unchecked( + "3cd6d8a23d01db66ea4911d6d7b09595b674f0507278fbf1f15cd85aa4ba72d", + )); + transcript.append_field_element(&FE::from_hex_unchecked( + "3123deded538b40c1faa7988310f315860a43e320ae70f8f86eaeadf3828a10", + )); + transcript.append_field_element(&FE::from_hex_unchecked( + "4d2edcc28870d79cbbb87181ffcb5942f7fa1c7b5f5bd5794c43452700e00d7", + )); + transcript.append_field_element(&FE::from_hex_unchecked( + "5c244407085950973147074ee245bd1c7ed6d8a019df997aab1928a4a9a1e19", + )); + // Sample challenge Gamma to collapse the terms of the deep composition polynomial (batch open). + // Powers of this challenge are used if more than two terms. + assert_eq!( + transcript.sample_field_element(), + FE::from_hex_unchecked( + "12f2b9edda6bb334bdf340d99eb0e6815e57aabffb48359117f71e7d0159d93" + ) + ); + // FRI: Sample challenge Zeta to split the polynomial in half + assert_eq!( + transcript.sample_field_element(), + FE::from_hex_unchecked( + "7549307d78354156552667acf19a0ae978d4ec4954d210e23d9979672987dc" + ) + ); + // FRI: Send hash of commitment at Layer 1 + transcript.append_bytes( + &decode_hex("97decf0ad3cd590e7e5a4f85b3d4fa8c02c6d4b5343388c4536127dc8ef0fbf2") + .unwrap(), + ); + // FRI: Sample challenge to split the polynomial in half + assert_eq!( + transcript.sample_field_element(), + FE::from_hex_unchecked( + "4b79e806108567fd0f670ded2be5468009aaefeb993b346579c4f295fa3ddd0" + ) + ); + // Send field element at final layer of FRI + transcript.append_field_element(&FE::from_hex_unchecked( + "7b8aa43aef4d3f2d476608251cffc9fa1c655bedecbcac49e4cafb012c7edf4", + )); + // Send proof of work + transcript.append_bytes(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x3b, 0xb8]); + assert_eq!(transcript.sample_u64(128), 28); + assert_eq!(transcript.sample_u64(128), 31); } } diff --git a/provers/stark/src/verifier.rs b/provers/stark/src/verifier.rs index 028241a30..5b948607f 100644 --- a/provers/stark/src/verifier.rs +++ b/provers/stark/src/verifier.rs @@ -3,13 +3,8 @@ use std::time::Instant; //use itertools::multizip; #[cfg(not(feature = "test_fiat_shamir"))] -use lambdaworks_crypto::fiat_shamir::default_transcript::DefaultTranscript; -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use log::error; -#[cfg(feature = "test_fiat_shamir")] -use lambdaworks_crypto::fiat_shamir::test_transcript::TestTranscript; - use lambdaworks_math::{ field::{ element::FieldElement, @@ -18,6 +13,8 @@ use lambdaworks_math::{ traits::ByteConversion, }; +use crate::transcript::IsStarkTranscript; + use super::{ config::{BatchedMerkleTreeBackend, FriMerkleTreeBackend}, domain::Domain, @@ -25,20 +22,9 @@ use super::{ grinding::hash_transcript_with_int_and_get_leading_zeros, proof::{options::ProofOptions, stark::StarkProof}, traits::AIR, - transcript::{batch_sample_challenges, sample_z_ood, transcript_to_field, transcript_to_u32}, + transcript::{batch_sample_challenges, sample_z_ood}, }; -#[cfg(feature = "test_fiat_shamir")] -fn step_1_transcript_initialization() -> TestTranscript { - TestTranscript::new() -} - -#[cfg(not(feature = "test_fiat_shamir"))] -fn step_1_transcript_initialization() -> DefaultTranscript { - // TODO: add strong fiat shamir - DefaultTranscript::new() -} - struct Challenges where F: IsFFTField, @@ -56,17 +42,16 @@ where leading_zeros_count: u8, // number of leading zeros in the grinding } -fn step_1_replay_rounds_and_recover_challenges( +fn step_1_replay_rounds_and_recover_challenges( air: &A, proof: &StarkProof, domain: &Domain, - transcript: &mut T, + transcript: &mut impl IsStarkTranscript, ) -> Challenges where F: IsFFTField, FieldElement: ByteConversion, A: AIR, - T: Transcript, { // =================================== // ==========| Round 1 |========== @@ -75,12 +60,12 @@ where // <<<< Receive commitments:[tⱼ] let total_columns = air.context().trace_columns; - transcript.append(&proof.lde_trace_merkle_roots[0]); + transcript.append_bytes(&proof.lde_trace_merkle_roots[0]); let rap_challenges = air.build_rap_challenges(transcript); if let Some(root) = proof.lde_trace_merkle_roots.get(1) { - transcript.append(root); + transcript.append_bytes(root); } // =================================== @@ -98,7 +83,7 @@ where batch_sample_challenges(air.context().num_transition_constraints, transcript); // <<<< Receive commitments: [H₁], [H₂] - transcript.append(&proof.composition_poly_root); + transcript.append_bytes(&proof.composition_poly_root); // =================================== // ==========| Round 3 |========== @@ -112,13 +97,13 @@ where ); // <<<< Receive value: H₁(z²) - transcript.append(&proof.composition_poly_even_ood_evaluation.to_bytes_be()); + transcript.append_field_element(&proof.composition_poly_even_ood_evaluation); // <<<< Receive value: H₂(z²) - transcript.append(&proof.composition_poly_odd_ood_evaluation.to_bytes_be()); + transcript.append_field_element(&proof.composition_poly_odd_ood_evaluation); // <<<< Receive values: tⱼ(zgᵏ) for i in 0..proof.trace_ood_frame_evaluations.num_rows() { for element in proof.trace_ood_frame_evaluations.get_row(i).iter() { - transcript.append(&element.to_bytes_be()); + transcript.append_field_element(element); } } @@ -127,8 +112,8 @@ where // =================================== // >>>> Send challenges: 𝛾, 𝛾' - let gamma_even = transcript_to_field(transcript); - let gamma_odd = transcript_to_field(transcript); + let gamma_even = transcript.sample_field_element(); + let gamma_odd = transcript.sample_field_element(); // >>>> Send challenges: 𝛾ⱼ, 𝛾ⱼ' // Get the number of trace terms the DEEP composition poly will have. @@ -137,7 +122,7 @@ where let trace_term_coeffs = (0..total_columns) .map(|_| { (0..air.context().transition_offsets.len()) - .map(|_| transcript_to_field(transcript)) + .map(|_| transcript.sample_field_element()) .collect() }) .collect::>>>(); @@ -149,29 +134,29 @@ where .iter() .map(|root| { // <<<< Receive commitment: [pₖ] (the first one is [p₀]) - transcript.append(root); + transcript.append_bytes(root); // >>>> Send challenge 𝜁ₖ - transcript_to_field(transcript) + transcript.sample_field_element() }) .collect::>>(); // <<<< Receive value: pₙ - transcript.append(&proof.fri_last_value.to_bytes_be()); + transcript.append_field_element(&proof.fri_last_value); // Receive grinding value // 1) Receive challenge from the transcript - let transcript_challenge = transcript.challenge(); + let transcript_challenge = transcript.state(); let nonce = proof.nonce; let leading_zeros_count = hash_transcript_with_int_and_get_leading_zeros(&transcript_challenge, nonce); - transcript.append(&nonce.to_be_bytes()); + transcript.append_bytes(&nonce.to_be_bytes()); // FRI query phase // <<<< Send challenges 𝜄ₛ (iota_s) let iota_max: usize = 2_usize.pow(domain.lde_root_order); let iotas: Vec = (0..air.options().fri_number_of_queries) - .map(|_| (transcript_to_u32(transcript) as usize) % iota_max) + .map(|_| (transcript.sample_u64(iota_max as u64) as usize) % iota_max) .collect(); Challenges { @@ -523,6 +508,7 @@ pub fn verify( proof: &StarkProof, pub_input: &A::PublicInputs, proof_options: &ProofOptions, + mut transcript: impl IsStarkTranscript, ) -> bool where F: IsFFTField, @@ -539,7 +525,6 @@ where #[cfg(feature = "instruments")] let timer1 = Instant::now(); - let mut transcript = step_1_transcript_initialization(); let air = A::new(proof.trace_length, pub_input, proof_options); let domain = Domain::new(&air);