From e6005d878a2c8453c7907e5ee54f70db1b7e5524 Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Wed, 20 Sep 2023 23:25:13 -0300 Subject: [PATCH] add StarkTranscript trait and implementation --- exercises/message/src/starks/prover.rs | 6 + provers/cairo/src/air.rs | 28 +- provers/cairo/src/tests/integration_tests.rs | 4 +- provers/stark/src/examples/dummy_air.rs | 8 +- .../src/examples/fibonacci_2_cols_shifted.rs | 8 +- .../stark/src/examples/fibonacci_2_columns.rs | 8 +- provers/stark/src/examples/fibonacci_rap.rs | 10 +- provers/stark/src/examples/quadratic_air.rs | 8 +- .../stark/src/examples/simple_fibonacci.rs | 8 +- provers/stark/src/fri/mod.rs | 19 +- provers/stark/src/prover.rs | 48 +- provers/stark/src/tests/integration_tests.rs | 103 +-- provers/stark/src/traits.rs | 8 +- provers/stark/src/transcript.rs | 613 +++++++++++++----- provers/stark/src/verifier.rs | 39 +- 15 files changed, 631 insertions(+), 287 deletions(-) diff --git a/exercises/message/src/starks/prover.rs b/exercises/message/src/starks/prover.rs index 2e0fb19171..12fbcad471 100644 --- a/exercises/message/src/starks/prover.rs +++ b/exercises/message/src/starks/prover.rs @@ -744,6 +744,12 @@ where .collect() } +strct StoneProverTranscript { + counter: usize, + spare_bytes: Vec, + hash: dyn Transcript, +} + // FIXME remove unwrap() calls and return errors pub fn prove( main_trace: &TraceTable, diff --git a/provers/cairo/src/air.rs b/provers/cairo/src/air.rs index 5fb91794d7..84f6fb6dfe 100644 --- a/provers/cairo/src/air.rs +++ b/provers/cairo/src/air.rs @@ -1,7 +1,6 @@ use std::ops::Range; use cairo_vm::without_std::collections::HashMap; -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::{ errors::DeserializationError, field::{ @@ -17,7 +16,7 @@ use stark_platinum_prover::{ prover::{prove, ProvingError}, trace::TraceTable, traits::AIR, - transcript::transcript_to_field, + transcript::{IsStarkTranscript, StoneProverTranscript}, verifier::verify, }; @@ -790,11 +789,14 @@ impl AIR for CairoAIR { TraceTable::new(aux_table, self.number_auxiliary_rap_columns()) } - fn build_rap_challenges(&self, transcript: &mut T) -> Self::RAPChallenges { + fn build_rap_challenges( + &self, + transcript: &mut impl IsStarkTranscript, + ) -> Self::RAPChallenges { CairoRAPChallenges { - alpha_memory: transcript_to_field(transcript), - z_memory: transcript_to_field(transcript), - z_range_check: transcript_to_field(transcript), + alpha_memory: transcript.sample_field_element(), + z_memory: transcript.sample_field_element(), + z_range_check: transcript.sample_field_element(), } } @@ -1252,7 +1254,12 @@ pub fn generate_cairo_proof( pub_input: &PublicInputs, proof_options: &ProofOptions, ) -> Result, ProvingError> { - prove::(trace, pub_input, proof_options) + prove::( + trace, + pub_input, + proof_options, + StoneProverTranscript::new(&[]), + ) } /// Wrapper function for verifying Cairo proofs without the need to specify @@ -1263,7 +1270,12 @@ pub fn verify_cairo_proof( pub_input: &PublicInputs, proof_options: &ProofOptions, ) -> bool { - verify::(proof, pub_input, proof_options) + verify::( + proof, + pub_input, + proof_options, + StoneProverTranscript::new(&[]), + ) } #[cfg(test)] diff --git a/provers/cairo/src/tests/integration_tests.rs b/provers/cairo/src/tests/integration_tests.rs index e962602570..70acac1b13 100644 --- a/provers/cairo/src/tests/integration_tests.rs +++ b/provers/cairo/src/tests/integration_tests.rs @@ -1,4 +1,3 @@ -use lambdaworks_crypto::fiat_shamir::default_transcript::DefaultTranscript; use lambdaworks_math::{ errors::DeserializationError, field::fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, @@ -13,6 +12,7 @@ use stark_platinum_prover::{ }, trace::TraceTable, traits::AIR, + transcript::StoneProverTranscript, }; use crate::{ @@ -240,7 +240,7 @@ fn check_simple_cairo_trace_evaluates_to_zero() { let (main_trace, public_input) = generate_prover_args(&program_content, &None, CairoLayout::Plain).unwrap(); let mut trace_polys = main_trace.compute_trace_polys(); - let mut transcript = DefaultTranscript::new(); + let mut transcript = StoneProverTranscript::new(&[]); let proof_options = ProofOptions::default_test_options(); let cairo_air = CairoAIR::new(main_trace.n_rows(), &public_input, &proof_options); diff --git a/provers/stark/src/examples/dummy_air.rs b/provers/stark/src/examples/dummy_air.rs index 40918eb8af..1fe41f46cb 100644 --- a/provers/stark/src/examples/dummy_air.rs +++ b/provers/stark/src/examples/dummy_air.rs @@ -1,4 +1,3 @@ -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::field::{ element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, traits::IsFFTField, @@ -11,6 +10,7 @@ use crate::{ proof::options::ProofOptions, trace::TraceTable, traits::AIR, + transcript::IsStarkTranscript, }; #[derive(Clone)] @@ -53,7 +53,11 @@ impl AIR for DummyAIR { TraceTable::empty() } - fn build_rap_challenges(&self, _transcript: &mut T) -> Self::RAPChallenges {} + fn build_rap_challenges( + &self, + _transcript: &mut impl IsStarkTranscript, + ) -> Self::RAPChallenges { + } fn compute_transition( &self, frame: &Frame, diff --git a/provers/stark/src/examples/fibonacci_2_cols_shifted.rs b/provers/stark/src/examples/fibonacci_2_cols_shifted.rs index f5201bb315..66199d337f 100644 --- a/provers/stark/src/examples/fibonacci_2_cols_shifted.rs +++ b/provers/stark/src/examples/fibonacci_2_cols_shifted.rs @@ -1,4 +1,3 @@ -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::field::{element::FieldElement, traits::IsFFTField}; use crate::{ @@ -8,6 +7,7 @@ use crate::{ proof::options::ProofOptions, trace::TraceTable, traits::AIR, + transcript::IsStarkTranscript, }; #[derive(Clone, Debug)] @@ -71,7 +71,11 @@ where TraceTable::empty() } - fn build_rap_challenges(&self, _transcript: &mut T) -> Self::RAPChallenges {} + fn build_rap_challenges( + &self, + _transcript: &mut impl IsStarkTranscript, + ) -> Self::RAPChallenges { + } fn compute_transition( &self, diff --git a/provers/stark/src/examples/fibonacci_2_columns.rs b/provers/stark/src/examples/fibonacci_2_columns.rs index 5cbf31e23a..b058f6a903 100644 --- a/provers/stark/src/examples/fibonacci_2_columns.rs +++ b/provers/stark/src/examples/fibonacci_2_columns.rs @@ -1,4 +1,3 @@ -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::field::{element::FieldElement, traits::IsFFTField}; use crate::{ @@ -8,6 +7,7 @@ use crate::{ proof::options::ProofOptions, trace::TraceTable, traits::AIR, + transcript::IsStarkTranscript, }; use super::simple_fibonacci::FibonacciPublicInputs; @@ -62,7 +62,11 @@ where TraceTable::empty() } - fn build_rap_challenges(&self, _transcript: &mut T) -> Self::RAPChallenges {} + fn build_rap_challenges( + &self, + _transcript: &mut impl IsStarkTranscript, + ) -> Self::RAPChallenges { + } fn compute_transition( &self, diff --git a/provers/stark/src/examples/fibonacci_rap.rs b/provers/stark/src/examples/fibonacci_rap.rs index c5f634cafb..08db064b18 100644 --- a/provers/stark/src/examples/fibonacci_rap.rs +++ b/provers/stark/src/examples/fibonacci_rap.rs @@ -1,6 +1,5 @@ use std::ops::Div; -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::{ field::{element::FieldElement, traits::IsFFTField}, helpers::resize_to_next_power_of_two, @@ -14,7 +13,7 @@ use crate::{ proof::options::ProofOptions, trace::TraceTable, traits::AIR, - transcript::transcript_to_field, + transcript::IsStarkTranscript, }; #[derive(Clone)] @@ -96,8 +95,11 @@ where TraceTable::new_from_cols(&[aux_col]) } - fn build_rap_challenges(&self, transcript: &mut T) -> Self::RAPChallenges { - transcript_to_field(transcript) + fn build_rap_challenges( + &self, + transcript: &mut impl IsStarkTranscript, + ) -> Self::RAPChallenges { + transcript.sample_field_element() } fn number_auxiliary_rap_columns(&self) -> usize { diff --git a/provers/stark/src/examples/quadratic_air.rs b/provers/stark/src/examples/quadratic_air.rs index a52244dff0..d0fcc35b35 100644 --- a/provers/stark/src/examples/quadratic_air.rs +++ b/provers/stark/src/examples/quadratic_air.rs @@ -1,4 +1,3 @@ -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::field::{element::FieldElement, traits::IsFFTField}; use crate::{ @@ -8,6 +7,7 @@ use crate::{ proof::options::ProofOptions, trace::TraceTable, traits::AIR, + transcript::IsStarkTranscript, }; #[derive(Clone)] @@ -66,7 +66,11 @@ where TraceTable::empty() } - fn build_rap_challenges(&self, _transcript: &mut T) -> Self::RAPChallenges {} + fn build_rap_challenges( + &self, + _transcript: &mut impl IsStarkTranscript, + ) -> Self::RAPChallenges { + } fn compute_transition( &self, diff --git a/provers/stark/src/examples/simple_fibonacci.rs b/provers/stark/src/examples/simple_fibonacci.rs index f766d6b4e5..3a7998d5e6 100644 --- a/provers/stark/src/examples/simple_fibonacci.rs +++ b/provers/stark/src/examples/simple_fibonacci.rs @@ -1,4 +1,3 @@ -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::field::{element::FieldElement, traits::IsFFTField}; use crate::{ @@ -8,6 +7,7 @@ use crate::{ proof::options::ProofOptions, trace::TraceTable, traits::AIR, + transcript::IsStarkTranscript, }; #[derive(Clone)] @@ -71,7 +71,11 @@ where TraceTable::empty() } - fn build_rap_challenges(&self, _transcript: &mut T) -> Self::RAPChallenges {} + fn build_rap_challenges( + &self, + _transcript: &mut impl IsStarkTranscript, + ) -> Self::RAPChallenges { + } fn compute_transition( &self, diff --git a/provers/stark/src/fri/mod.rs b/provers/stark/src/fri/mod.rs index f677ea00eb..840b496f09 100644 --- a/provers/stark/src/fri/mod.rs +++ b/provers/stark/src/fri/mod.rs @@ -2,7 +2,6 @@ pub mod fri_commitment; pub mod fri_decommit; mod fri_functions; -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::field::traits::{IsFFTField, IsField}; use lambdaworks_math::traits::ByteConversion; pub use lambdaworks_math::{ @@ -10,17 +9,18 @@ pub use lambdaworks_math::{ polynomial::Polynomial, }; +use crate::transcript::IsStarkTranscript; + use self::fri_commitment::FriLayer; use self::fri_decommit::FriDecommitment; use self::fri_functions::fold_polynomial; use super::traits::AIR; -use super::transcript::{transcript_to_field, transcript_to_u32}; -pub fn fri_commit_phase( +pub fn fri_commit_phase( number_layers: usize, p_0: Polynomial>, - transcript: &mut T, + transcript: &mut impl IsStarkTranscript, coset_offset: &FieldElement, domain_size: usize, ) -> (FieldElement, Vec>) @@ -40,7 +40,7 @@ where for _ in 1..number_layers { // <<<< Receive challenge 𝜁ₖ₋₁ - let zeta = transcript_to_field(transcript); + let zeta = transcript.sample_field_element(); coset_offset = coset_offset.square(); domain_size /= 2; @@ -55,7 +55,7 @@ where } // <<<< Receive challenge: 𝜁ₙ₋₁ - let zeta = transcript_to_field(transcript); + let zeta = transcript.sample_field_element(); let last_poly = fold_polynomial(¤t_poly, &zeta); @@ -71,22 +71,21 @@ where (last_value, fri_layer_list) } -pub fn fri_query_phase( +pub fn fri_query_phase( air: &A, domain_size: usize, fri_layers: &Vec>, - transcript: &mut T, + transcript: &mut impl IsStarkTranscript, ) -> (Vec>, Vec) where F: IsFFTField, A: AIR, - T: Transcript, FieldElement: ByteConversion, { if !fri_layers.is_empty() { let number_of_queries = air.options().fri_number_of_queries; let iotas = (0..number_of_queries) - .map(|_| (transcript_to_u32(transcript) as usize) % domain_size) + .map(|_| (transcript.sample_u64(domain_size as u64)) as usize) .collect::>(); let query_list = iotas .iter() diff --git a/provers/stark/src/prover.rs b/provers/stark/src/prover.rs index ff9102e8e1..001d4f4d21 100644 --- a/provers/stark/src/prover.rs +++ b/provers/stark/src/prover.rs @@ -1,13 +1,6 @@ #[cfg(feature = "instruments")] use std::time::Instant; -#[cfg(not(feature = "test_fiat_shamir"))] -use lambdaworks_crypto::fiat_shamir::default_transcript::DefaultTranscript; -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; - -#[cfg(feature = "test_fiat_shamir")] -use lambdaworks_crypto::fiat_shamir::test_transcript::TestTranscript; - use lambdaworks_math::fft::{errors::FFTError, polynomial::FFTPoly}; use lambdaworks_math::{ field::{element::FieldElement, traits::IsFFTField}, @@ -21,7 +14,7 @@ use rayon::prelude::{IndexedParallelIterator, IntoParallelRefIterator, ParallelI #[cfg(debug_assertions)] use crate::debug::validate_trace; -use crate::transcript::sample_z_ood; +use crate::transcript::{sample_z_ood, IsStarkTranscript}; use super::config::{BatchedMerkleTree, Commitment}; use super::constraints::evaluator::ConstraintEvaluator; @@ -34,7 +27,7 @@ use super::proof::options::ProofOptions; use super::proof::stark::{DeepPolynomialOpenings, StarkProof}; use super::trace::TraceTable; use super::traits::AIR; -use super::transcript::{batch_sample_challenges, transcript_to_field}; +use super::transcript::batch_sample_challenges; #[derive(Debug)] pub enum ProvingError { @@ -81,17 +74,6 @@ struct Round4 { nonce: u64, } -#[cfg(feature = "test_fiat_shamir")] -fn round_0_transcript_initialization() -> TestTranscript { - TestTranscript::new() -} - -#[cfg(not(feature = "test_fiat_shamir"))] -fn round_0_transcript_initialization() -> DefaultTranscript { - // TODO: add strong fiat shamir - DefaultTranscript::new() -} - fn batch_commit(vectors: &[Vec>]) -> (BatchedMerkleTree, Commitment) where F: IsFFTField, @@ -122,10 +104,10 @@ where } #[allow(clippy::type_complexity)] -fn interpolate_and_commit( +fn interpolate_and_commit( trace: &TraceTable, domain: &Domain, - transcript: &mut T, + transcript: &mut impl IsStarkTranscript, ) -> ( Vec>>, Vec>>, @@ -133,7 +115,6 @@ fn interpolate_and_commit( Commitment, ) where - T: Transcript, F: IsFFTField, FieldElement: ByteConversion + Send + Sync, { @@ -183,11 +164,11 @@ where .unwrap() } -fn round_1_randomized_air_with_preprocessing, T: Transcript>( +fn round_1_randomized_air_with_preprocessing>( air: &A, main_trace: &TraceTable, domain: &Domain, - transcript: &mut T, + transcript: &mut impl IsStarkTranscript, ) -> Result, ProvingError> where FieldElement: ByteConversion + Send + Sync, @@ -326,7 +307,6 @@ where fn round_4_compute_and_run_fri_on_the_deep_composition_polynomial< F: IsFFTField, A: AIR, - T: Transcript, >( air: &A, domain: &Domain, @@ -334,7 +314,7 @@ fn round_4_compute_and_run_fri_on_the_deep_composition_polynomial< round_2_result: &Round2, round_3_result: &Round3, z: &FieldElement, - transcript: &mut T, + transcript: &mut impl IsStarkTranscript, ) -> Round4 where FieldElement: ByteConversion + Send + Sync, @@ -344,11 +324,11 @@ where // <<<< Receive challenges: 𝛾, 𝛾' let composition_poly_coeffients = [ - transcript_to_field(transcript), - transcript_to_field(transcript), + transcript.sample_field_element(), + transcript.sample_field_element(), ]; // <<<< Receive challenges: 𝛾ⱼ, 𝛾ⱼ' - let trace_poly_coeffients = batch_sample_challenges::( + let trace_poly_coeffients = batch_sample_challenges::( air.context().transition_offsets.len() * air.context().trace_columns, transcript, ); @@ -378,7 +358,7 @@ where // grinding: generate nonce and append it to the transcript let grinding_factor = air.context().proof_options.grinding_factor; - let transcript_challenge = transcript.challenge(); + let transcript_challenge = transcript.state(); let nonce = generate_nonce_with_grinding(&transcript_challenge, grinding_factor) .expect("nonce not found"); transcript.append(&nonce.to_be_bytes()); @@ -581,6 +561,7 @@ pub fn prove( main_trace: &TraceTable, pub_inputs: &A::PublicInputs, proof_options: &ProofOptions, + mut transcript: impl IsStarkTranscript, ) -> Result, ProvingError> where F: IsFFTField, @@ -590,13 +571,12 @@ where { info!("Started proof generation..."); #[cfg(feature = "instruments")] - println!("- Started round 0: Transcript Initialization"); + println!("- Started round 0: Air Initialization"); #[cfg(feature = "instruments")] let timer0 = Instant::now(); let air = A::new(main_trace.n_rows(), pub_inputs, proof_options); let domain = Domain::new(&air); - let mut transcript = round_0_transcript_initialization(); #[cfg(feature = "instruments")] let elapsed0 = timer0.elapsed(); @@ -612,7 +592,7 @@ where #[cfg(feature = "instruments")] let timer1 = Instant::now(); - let round_1_result = round_1_randomized_air_with_preprocessing::( + let round_1_result = round_1_randomized_air_with_preprocessing::( &air, main_trace, &domain, diff --git a/provers/stark/src/tests/integration_tests.rs b/provers/stark/src/tests/integration_tests.rs index f45ca5f1af..a4d76bbb98 100644 --- a/provers/stark/src/tests/integration_tests.rs +++ b/provers/stark/src/tests/integration_tests.rs @@ -1,9 +1,5 @@ use lambdaworks_math::field::{ - element::FieldElement, - fields::{ - fft_friendly::stark_252_prime_field::Stark252PrimeField, - u64_prime_field::{F17, FE17}, - }, + element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, }; use crate::{ @@ -17,6 +13,7 @@ use crate::{ }, proof::options::ProofOptions, prover::prove, + transcript::StoneProverTranscript, verifier::verify, Felt252, }; @@ -36,40 +33,48 @@ fn test_prove_fib() { &trace, &pub_inputs, &proof_options, + StoneProverTranscript::new(&[]), ) .unwrap(); assert!( verify::>( &proof, &pub_inputs, - &proof_options + &proof_options, + StoneProverTranscript::new(&[]), ) ); } -#[test_log::test] -fn test_prove_fib17() { - let trace = simple_fibonacci::fibonacci_trace([FE17::from(1), FE17::from(1)], 4); - - let proof_options = ProofOptions { - blowup_factor: 2, - fri_number_of_queries: 7, - coset_offset: 3, - grinding_factor: 1, - }; - - let pub_inputs = FibonacciPublicInputs { - a0: FE17::one(), - a1: FE17::one(), - }; - - let proof = prove::>(&trace, &pub_inputs, &proof_options).unwrap(); - assert!(verify::>( - &proof, - &pub_inputs, - &proof_options - )); -} +// #[test_log::test] +// fn test_prove_fib17() { +// let trace = simple_fibonacci::fibonacci_trace([FE17::from(1), FE17::from(1)], 4); +// +// let proof_options = ProofOptions { +// blowup_factor: 2, +// fri_number_of_queries: 7, +// coset_offset: 3, +// grinding_factor: 1, +// }; +// +// let pub_inputs = FibonacciPublicInputs { +// a0: FE17::one(), +// a1: FE17::one(), +// }; +// +// let proof = prove::>( +// &trace, +// &pub_inputs, +// &proof_options, +// StoneProverTranscript::new(&[]), +// ) +// .unwrap(); +// assert!(verify::>( +// &proof, +// &pub_inputs, +// &proof_options, +// )); +// } #[test_log::test] fn test_prove_fib_2_cols() { @@ -86,12 +91,18 @@ fn test_prove_fib_2_cols() { &trace, &pub_inputs, &proof_options, + StoneProverTranscript::new(&[]), ) .unwrap(); assert!(verify::< Stark252PrimeField, Fibonacci2ColsAIR, - >(&proof, &pub_inputs, &proof_options)); + >( + &proof, + &pub_inputs, + &proof_options, + StoneProverTranscript::new(&[]) + )); } #[test_log::test] @@ -107,13 +118,18 @@ fn test_prove_fib_2_cols_shifted() { claimed_index, }; - let proof = - prove::>(&trace, &pub_inputs, &proof_options) - .unwrap(); + let proof = prove::>( + &trace, + &pub_inputs, + &proof_options, + StoneProverTranscript::new(&[]), + ) + .unwrap(); assert!(verify::>( &proof, &pub_inputs, - &proof_options + &proof_options, + StoneProverTranscript::new(&[]) )); } @@ -131,13 +147,15 @@ fn test_prove_quadratic() { &trace, &pub_inputs, &proof_options, + StoneProverTranscript::new(&[]), ) .unwrap(); assert!( verify::>( &proof, &pub_inputs, - &proof_options + &proof_options, + StoneProverTranscript::new(&[]) ) ); } @@ -159,13 +177,15 @@ fn test_prove_rap_fib() { &trace, &pub_inputs, &proof_options, + StoneProverTranscript::new(&[]), ) .unwrap(); assert!( verify::>( &proof, &pub_inputs, - &proof_options + &proof_options, + StoneProverTranscript::new(&[]) ) ); } @@ -177,10 +197,17 @@ fn test_prove_dummy() { let proof_options = ProofOptions::default_test_options(); - let proof = prove::(&trace, &(), &proof_options).unwrap(); + let proof = prove::( + &trace, + &(), + &proof_options, + StoneProverTranscript::new(&[]), + ) + .unwrap(); assert!(verify::( &proof, &(), - &proof_options + &proof_options, + StoneProverTranscript::new(&[]) )); } diff --git a/provers/stark/src/traits.rs b/provers/stark/src/traits.rs index e3189312a5..251b89ea37 100644 --- a/provers/stark/src/traits.rs +++ b/provers/stark/src/traits.rs @@ -1,11 +1,12 @@ use itertools::Itertools; -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::{ fft::cpu::roots_of_unity::get_powers_of_primitive_root_coset, field::{element::FieldElement, traits::IsFFTField}, polynomial::Polynomial, }; +use crate::transcript::IsStarkTranscript; + use super::{ constraints::boundary::BoundaryConstraints, context::AirContext, frame::Frame, proof::options::ProofOptions, trace::TraceTable, @@ -29,7 +30,10 @@ pub trait AIR: Clone { rap_challenges: &Self::RAPChallenges, ) -> TraceTable; - fn build_rap_challenges(&self, transcript: &mut T) -> Self::RAPChallenges; + fn build_rap_challenges( + &self, + transcript: &mut impl IsStarkTranscript, + ) -> Self::RAPChallenges; fn number_auxiliary_rap_columns(&self) -> usize; diff --git a/provers/stark/src/transcript.rs b/provers/stark/src/transcript.rs index 635c8c4188..ef5a174410 100644 --- a/provers/stark/src/transcript.rs +++ b/provers/stark/src/transcript.rs @@ -1,63 +1,134 @@ -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use lambdaworks_math::{ field::{ element::FieldElement, - traits::{IsFFTField, IsPrimeField}, + fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, + traits::{IsFFTField, IsField, IsPrimeField}, }, traits::ByteConversion, + unsigned_integer::element::U256, }; +use sha3::{Digest, Keccak256}; -/// Uses randomness from the transcript to create a FieldElement -/// One bit less than the max used by the FieldElement is used as randomness. For StarkFields, this would be 251 bits randomness. -/// Randomness is interpreted as limbs in BigEndian, and each Limb is ordered in BigEndian -pub fn transcript_to_field(transcript: &mut T) -> FieldElement -where - FieldElement: lambdaworks_math::traits::ByteConversion, -{ - let mut randomness = transcript.challenge(); - randomness_to_field(&mut randomness) +const MODULUS_MAX_MULTIPLE: U256 = + U256::from_hex_unchecked("f80000000000020f00000000000000000000000000000000000000000000001f"); +const R_INV: U256 = + U256::from_hex_unchecked("0x40000000000001100000000000012100000000000000000000000000000000"); + +fn keccak_hash(data: &[u8]) -> Keccak256 { + let mut hasher = Keccak256::new(); + hasher.update(data); + hasher } -/// Transforms some random bytes to a field -/// Slicing the randomness to one bit less than what the max number of the field is to ensure each random element has the same probability of appearing -fn randomness_to_field(randomness: &mut [u8; 32]) -> FieldElement -where - FieldElement: ByteConversion, -{ - let random_bits_required = F::field_bit_size() - 1; - let random_bits_created = randomness.len() * 8; - let mut bits_to_clear = random_bits_created - random_bits_required; - - let mut i = 0; - while bits_to_clear >= 8 { - randomness[i] = 0; - bits_to_clear -= 8; - i += 1; +pub trait IsStarkTranscript { + fn append(&mut self, new_bytes: &[u8]); + fn state(&self) -> [u8; 32]; + fn sample_field_element(&mut self) -> FieldElement; + fn sample_u64(&mut self, upper_bound: u64) -> u64; +} + +pub struct StoneProverTranscript { + hash: Keccak256, + seed_increment: U256, + counter: usize, + spare_bytes: Vec, +} + +impl StoneProverTranscript { + pub fn new(public_input_data: &[u8]) -> Self { + let hash = keccak_hash(public_input_data); + StoneProverTranscript { + hash, + seed_increment: U256::from_hex_unchecked("1"), + counter: 0, + spare_bytes: vec![], + } + } + + pub fn sample_block(&mut self, used_bytes: usize) -> Vec { + let mut first_part: Vec = self.hash.clone().finalize().to_vec(); + let mut counter_bytes: Vec = vec![0; 24] + .into_iter() + .chain(self.counter.to_be_bytes().to_vec()) + .collect(); + self.counter += 1; + first_part.append(&mut counter_bytes); + let block = keccak_hash(&first_part).finalize().to_vec(); + self.spare_bytes.extend(&block[used_bytes..]); + block[..used_bytes].to_vec() } - let pre_mask: u8 = 1u8.checked_shl(8 - bits_to_clear as u32).unwrap_or(0); - let mask: u8 = pre_mask.wrapping_sub(1); - randomness[i] &= mask; + pub fn sample(&mut self, num_bytes: usize) -> Vec { + let num_blocks = num_bytes / 32; + let mut result: Vec = Vec::new(); - FieldElement::from_bytes_be(randomness).unwrap() + for _ in 0..num_blocks { + let mut block = self.sample_block(32); + result.append(&mut block); + } + + let rest = num_bytes % 32; + if rest <= self.spare_bytes.len() { + result.append(&mut self.spare_bytes[..rest].to_vec()); + self.spare_bytes.drain(..rest); + } else { + let mut block = self.sample_block(rest); + result.append(&mut block); + } + result + } + + pub fn sample_big_int(&mut self) -> U256 { + U256::from_bytes_be(&self.sample(32)).unwrap() + } } -pub fn transcript_to_u32(transcript: &mut T) -> u32 { - const CANT_BYTES_U32: usize = (u32::BITS / 8) as usize; - let value = transcript.challenge()[..CANT_BYTES_U32].try_into().unwrap(); - u32::from_be_bytes(value) +impl IsStarkTranscript for StoneProverTranscript { + fn append(&mut self, new_bytes: &[u8]) { + let mut result_hash = [0_u8; 32]; + result_hash.copy_from_slice(&self.hash.clone().finalize_reset()); + result_hash.reverse(); + + let digest = U256::from_bytes_be(&self.hash.clone().finalize()).unwrap(); + let new_seed = (digest + self.seed_increment).to_bytes_be(); + self.hash = keccak_hash(&[&new_seed, new_bytes].concat()); + self.counter = 0; + self.spare_bytes.clear(); + } + + fn sample_field_element(&mut self) -> FieldElement { + let mut result = self.sample_big_int(); + while result >= MODULUS_MAX_MULTIPLE { + result = self.sample_big_int(); + } + FieldElement::new(result) * FieldElement::new(R_INV) + } + + fn sample_u64(&mut self, upper_bound: u64) -> u64 { + // assert!(upper_bound < (1 << 12)); + let mut bytes = [0u8; 8]; + bytes.copy_from_slice(&self.sample(8)); + let u64_val: u64 = u64::from_be_bytes(bytes); + u64_val % upper_bound + } + + fn state(&self) -> [u8; 32] { + let mut state = [0u8; 32]; + state.copy_from_slice(&self.hash.clone().finalize()); + state + } } -pub fn sample_z_ood( +pub fn sample_z_ood( lde_roots_of_unity_coset: &[FieldElement], trace_roots_of_unity: &[FieldElement], - transcript: &mut T, + transcript: &mut impl IsStarkTranscript, ) -> FieldElement where FieldElement: ByteConversion, { loop { - let value: FieldElement = transcript_to_field(transcript); + let value: FieldElement = transcript.sample_field_element(); if !lde_roots_of_unity_coset.iter().any(|x| x == &value) && !trace_roots_of_unity.iter().any(|x| x == &value) { @@ -66,149 +137,387 @@ where } } -pub fn batch_sample_challenges( +pub fn batch_sample_challenges( size: usize, - transcript: &mut T, + transcript: &mut impl IsStarkTranscript, ) -> Vec> where FieldElement: ByteConversion, { - (0..size).map(|_| transcript_to_field(transcript)).collect() + (0..size) + .map(|_| transcript.sample_field_element()) + .collect() } #[cfg(test)] mod tests { - use lambdaworks_math::{ - field::{ - element::FieldElement, - fields::{ - fft_friendly::stark_252_prime_field::Stark252PrimeField, - montgomery_backed_prime_fields::{IsModulus, U256PrimeField}, - }, - }, - unsigned_integer::element::U256, + use lambdaworks_math::field::{ + element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, }; - use crate::transcript::randomness_to_field; + use crate::transcript::{IsStarkTranscript, StoneProverTranscript}; - #[test] - fn test_stark_prime_field_random_to_field_32() { - #[rustfmt::skip] - let mut randomness: [u8; 32] = [ - 248, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 32, - ]; - - type FE = FieldElement; - let field_element: FE = randomness_to_field(&mut randomness); - let expected_fe = FE::from(32u64); - assert_eq!(field_element, expected_fe) - } + // #[test] + // fn test_stark_prime_field_random_to_field_32() { + // #[rustfmt::skip] + // let mut randomness: [u8; 32] = [ + // 248, 0, 0, 0, 0, 0, 0, 0, + // 0, 0, 0, 0, 0, 0, 0, 0, + // 0, 0, 0, 0, 0, 0, 0, 0, + // 0, 0, 0, 0, 0, 0, 0, 32, + // ]; + // + // type FE = FieldElement; + // let field_element: FE = randomness_to_field(&mut randomness); + // let expected_fe = FE::from(32u64); + // assert_eq!(field_element, expected_fe) + // } + // + // #[test] + // fn test_stark_prime_field_random_to_fiel_repeated_f_and_zero() { + // #[rustfmt::skip] + // let mut randomness: [u8; 32] = [ + // 255, 0, 255, 0, 255, 0, 255, 0, + // 255, 0, 255, 0, 255, 0, 255, 0, + // 255, 0, 255, 0, 255, 0, 255, 0, + // 255, 0, 255, 0, 255, 0, 255, 0, + // ]; + // + // type FE = FieldElement; + // + // // 251 bits should be used (252 of StarkField - 1) to avoid duplicates + // // This leaves a 7 + // let expected_fe = FE::from_hex_unchecked( + // "\ + // 0700FF00FF00FF00\ + // FF00FF00FF00FF00\ + // FF00FF00FF00FF00\ + // FF00FF00FF00FF00", + // ); + // + // let field_element: FE = randomness_to_field(&mut randomness); + // + // assert_eq!(field_element, expected_fe) + // } + // + // #[test] + // fn test_241_bit_random_to_field() { + // #[derive(Clone, Debug)] + // pub struct TestModulus; + // impl IsModulus for TestModulus { + // const MODULUS: U256 = U256::from_hex_unchecked( + // "\ + // 0001000000000011\ + // 0000000000000000\ + // 0000000000000000\ + // 0000000000000001", + // ); + // } + // + // pub type TestField = U256PrimeField; + // + // #[rustfmt::skip] + // let mut randomness: [u8; 32] = [ + // 255, 255, 255, 1, 2, 3, 4, 5, + // 6, 7, 8, 1, 2, 3, 4, 5, + // 6, 7, 8, 1, 2, 3, 4, 5, + // 6, 7, 8, 1, 2, 3, 4, 5, + // ]; + // + // type FE = FieldElement; + // + // let expected_fe = FE::from_hex_unchecked( + // "\ + // 0000FF0102030405\ + // 0607080102030405\ + // 0607080102030405\ + // 0607080102030405", + // ); + // + // let field_element: FE = randomness_to_field(&mut randomness); + // + // assert_eq!(field_element, expected_fe); + // } + // + // #[test] + // fn test_249_bit_random_to_field() { + // #[derive(Clone, Debug)] + // pub struct TestModulus; + // impl IsModulus for TestModulus { + // const MODULUS: U256 = U256::from_hex_unchecked( + // "\ + // 0200000000000011\ + // 0000000000000000\ + // 0000000000000000\ + // 0000000000000001", + // ); + // } + // + // pub type TestField = U256PrimeField; + // + // #[rustfmt::skip] + // let mut randomness: [u8; 32] = [ + // 255, 0, 255, 0, 255, 0, 255, 0, + // 255, 0, 255, 0, 255, 0, 255, 0, + // 255, 0, 255, 0, 255, 0, 255, 0, + // 255, 0, 255, 0, 255, 0, 255, 0, + // ]; + // + // let expected_fe = FE::from_hex_unchecked( + // "\ + // 0100FF00FF00FF00\ + // FF00FF00FF00FF00\ + // FF00FF00FF00FF00\ + // FF00FF00FF00FF00", + // ); + // + // type FE = FieldElement; + // + // let field_element: FE = randomness_to_field(&mut randomness); + // + // assert_eq!(field_element, expected_fe) + // } - #[test] - fn test_stark_prime_field_random_to_fiel_repeated_f_and_zero() { - #[rustfmt::skip] - let mut randomness: [u8; 32] = [ - 255, 0, 255, 0, 255, 0, 255, 0, - 255, 0, 255, 0, 255, 0, 255, 0, - 255, 0, 255, 0, 255, 0, 255, 0, - 255, 0, 255, 0, 255, 0, 255, 0, - ]; - - type FE = FieldElement; - - // 251 bits should be used (252 of StarkField - 1) to avoid duplicates - // This leaves a 7 - let expected_fe = FE::from_hex_unchecked( - "\ - 0700FF00FF00FF00\ - FF00FF00FF00FF00\ - FF00FF00FF00FF00\ - FF00FF00FF00FF00", - ); - - let field_element: FE = randomness_to_field(&mut randomness); - - assert_eq!(field_element, expected_fe) - } + use std::num::ParseIntError; - #[test] - fn test_241_bit_random_to_field() { - #[derive(Clone, Debug)] - pub struct TestModulus; - impl IsModulus for TestModulus { - const MODULUS: U256 = U256::from_hex_unchecked( - "\ - 0001000000000011\ - 0000000000000000\ - 0000000000000000\ - 0000000000000001", - ); - } + type FE = FieldElement; - pub type TestField = U256PrimeField; + pub fn decode_hex(s: &str) -> Result, ParseIntError> { + (0..s.len()) + .step_by(2) + .map(|i| u8::from_str_radix(&s[i..i + 2], 16)) + .collect() + } - #[rustfmt::skip] - let mut randomness: [u8; 32] = [ - 255, 255, 255, 1, 2, 3, 4, 5, - 6, 7, 8, 1, 2, 3, 4, 5, - 6, 7, 8, 1, 2, 3, 4, 5, - 6, 7, 8, 1, 2, 3, 4, 5, - ]; + pub fn send_field_element(s: &str) -> Vec { + // Taken from serialize_be method, but reverses the limbs for + // compatibility with the stone prover. + let a = FE::from_hex_unchecked(s); + let limbs = a.value().limbs; + let mut bytes: [u8; 32] = [0; 32]; - type FE = FieldElement; + for i in (0..4).rev() { + let limb_bytes = limbs[i].to_be_bytes(); + for j in 0..8 { + bytes[i * 8 + j] = limb_bytes[j] + } + } + bytes.to_vec() + } - let expected_fe = FE::from_hex_unchecked( - "\ - 0000FF0102030405\ - 0607080102030405\ - 0607080102030405\ - 0607080102030405", + #[test] + fn sample_bytes_from_stone_prover_channel() { + let mut transcript = StoneProverTranscript::new(&[0x01, 0x02, 0x03]); + transcript.append(&[0x04, 0x05, 0x06]); + assert_eq!( + transcript.sample(32), + vec![ + 0x8a, 0x3a, 0x67, 0xd1, 0x25, 0xa5, 0xa5, 0xea, 0x57, 0xc3, 0xfb, 0xe2, 0xc2, 0x55, + 0xb6, 0x0d, 0x0c, 0x89, 0x13, 0xa6, 0x27, 0x13, 0xe0, 0x99, 0xb3, 0x77, 0xc6, 0xc2, + 0x9a, 0x21, 0x85, 0x97 + ] + ); + assert_eq!( + transcript.sample(64), + vec![ + 0x56, 0xde, 0x56, 0x2a, 0xfd, 0x98, 0x19, 0xb9, 0xaa, 0xa0, 0x1b, 0x16, 0xf4, 0xeb, + 0x33, 0x71, 0xd5, 0xd8, 0x0f, 0x35, 0x29, 0xd8, 0xc1, 0x7a, 0x4b, 0xf4, 0x10, 0xe3, + 0x19, 0xb7, 0x64, 0x4a, 0xd2, 0x1c, 0xff, 0x14, 0x3d, 0xfd, 0xca, 0x32, 0x2c, 0x59, + 0xa3, 0x47, 0x5d, 0xd0, 0x34, 0xdf, 0x6d, 0xa7, 0x0c, 0xf5, 0xd2, 0x6a, 0xdd, 0x65, + 0xe0, 0x6d, 0x1e, 0x4f, 0xc7, 0x39, 0x52, 0x32 + ] + ); + assert_eq!( + transcript.sample(48), + vec![ + 0xe4, 0xb6, 0x3c, 0xfc, 0x03, 0xc9, 0x82, 0x8b, 0x63, 0x53, 0xb9, 0xad, 0x73, 0x6d, + 0x23, 0x88, 0x4c, 0x07, 0xb4, 0x9d, 0xf1, 0x1d, 0xef, 0xb9, 0x53, 0xfa, 0x02, 0xb5, + 0x3c, 0x43, 0xcf, 0xa3, 0x30, 0x5a, 0x02, 0x7e, 0xa6, 0x5e, 0x3c, 0x86, 0x3d, 0xdb, + 0x48, 0xea, 0x73, 0xbf, 0xdf, 0xab + ] + ); + assert_eq!( + transcript.sample(32), + vec![ + 0x82, 0xe1, 0xd4, 0xf8, 0xf0, 0x61, 0xa4, 0x17, 0x4b, 0xed, 0x58, 0x4e, 0xb5, 0x73, + 0x26, 0xb7, 0x63, 0x10, 0x37, 0x97, 0xbe, 0x0b, 0x57, 0xaf, 0x74, 0xfe, 0x33, 0x19, + 0xbd, 0xe5, 0x53, 0x21, + ] + ); + assert_eq!( + transcript.sample(16), + vec![ + 0xb0, 0xc6, 0x7a, 0x04, 0x19, 0x0a, 0x25, 0x72, 0xa8, 0x2e, 0xfa, 0x97, 0x92, 0x44, + 0x73, 0xe9 + ] + ); + assert_eq!( + transcript.sample(8), + vec![0xbd, 0x41, 0x28, 0xdd, 0x3a, 0xbc, 0x66, 0x18] + ); + assert_eq!( + transcript.sample(32), + vec![ + 0xcb, 0x66, 0xc9, 0x72, 0x39, 0x85, 0xe8, 0x7c, 0x30, 0xe1, 0xc7, 0x1d, 0x2f, 0x83, + 0x4a, 0xcd, 0x33, 0x85, 0xfb, 0xd5, 0x40, 0x69, 0x22, 0x6e, 0xc0, 0xf1, 0x8c, 0x40, + 0x26, 0x2f, 0x5f, 0x7c, + ] + ); + transcript.append(&[0x03, 0x02]); + assert_eq!( + transcript.sample(32), + vec![ + 0x69, 0x63, 0x72, 0x01, 0x84, 0x8b, 0x22, 0x82, 0xa6, 0x14, 0x6d, 0x47, 0xbb, 0xa9, + 0xa3, 0xc8, 0xdc, 0x1b, 0x8e, 0x2e, 0x2e, 0x21, 0x87, 0x77, 0xac, 0xe0, 0x3e, 0xce, + 0x6e, 0xa7, 0x9e, 0xb0, + ] ); - - let field_element: FE = randomness_to_field(&mut randomness); - - assert_eq!(field_element, expected_fe); } #[test] - fn test_249_bit_random_to_field() { - #[derive(Clone, Debug)] - pub struct TestModulus; - impl IsModulus for TestModulus { - const MODULUS: U256 = U256::from_hex_unchecked( - "\ - 0200000000000011\ - 0000000000000000\ - 0000000000000000\ - 0000000000000001", - ); - } + fn sample_numbers_and_field_elements_from_stone_prover_channel() { + let mut transcript = StoneProverTranscript::new(&[0x01, 0x02]); + transcript.append(&[0x01, 0x02]); + assert_eq!(transcript.sample(4), vec![0x06, 0xe5, 0x36, 0xf5]); + assert_eq!(transcript.sample_u64(16), 5); + } - pub type TestField = U256PrimeField; + #[test] + fn fibonacci_transcript_replicate() { + let mut transcript = StoneProverTranscript::new(&[0xca, 0xfe, 0xca, 0xfe]); + // Send hash of trace commitment + transcript.append( + &decode_hex("0eb9dcc0fb1854572a01236753ce05139d392aa3aeafe72abff150fe21175594") + .unwrap(), + ); + // Sample challenge to collapse the constraints for the composition polynomial + assert_eq!( + transcript.sample_field_element(), + FE::from_hex_unchecked( + "86105fff7b04ed4068ecccb8dbf1ed223bd45cd26c3532d6c80a818dbd4fa7" + ) + ); + // Send hash of composition poly commitment H(z) + transcript.append( + &decode_hex("7cdd8d5fe3bd62254a417e2e260e0fed4fccdb6c9005e828446f645879394f38") + .unwrap(), + ); + // Sample challenge Z to compute t_j(z), H(z) + assert_eq!( + transcript.sample_field_element(), + FE::from_hex_unchecked( + "317629e783794b52cd27ac3a5e418c057fec9dd42f2b537cdb3f24c95b3e550" + ) + ); + // Append t_j(z), H(z) + transcript.append(&send_field_element( + "70d8181785336cc7e0a0a1078a79ee6541ca0803ed3ff716de5a13c41684037", + )); + transcript.append(&send_field_element( + "29808fc8b7480a69295e4b61600480ae574ca55f8d118100940501b789c1630", + )); + transcript.append(&send_field_element( + "7d8110f21d1543324cc5e472ab82037eaad785707f8cae3d64c5b9034f0abd2", + )); + transcript.append(&send_field_element( + "1b58470130218c122f71399bf1e04cf75a6e8556c4751629d5ce8c02cc4e62d", + )); + transcript.append(&send_field_element( + "1c0b7c2275e36d62dfb48c791be122169dcc00c616c63f8efb2c2a504687e85", + )); + // Sample challenge Gamma to collapse the terms of the deep composition polynomial (batch open). + // Powers of this challenge are used if more than two terms. + assert_eq!( + transcript.sample_field_element(), + FE::from_hex_unchecked( + "a0c79c1c77ded19520873d9c2440451974d23302e451d13e8124cf82fc15dd" + ) + ); + // FRI: Sample challenge Zeta to split the polynomial in half + assert_eq!( + transcript.sample_field_element(), + FE::from_hex_unchecked( + "5c6b5a66c9fda19f583f0b10edbaade98d0e458288e62c2fa40e3da2b293cef" + ) + ); + // FRI: Send hash of commitment at Layer 1 + transcript.append( + &decode_hex("49c5672520e20eccc72aa28d6fa0d7ef446f1ede38d7c64fbb95d0f34a281803") + .unwrap(), + ); + // FRI: Sample challenge to split the polynomial in half + assert_eq!( + transcript.sample_field_element(), + FE::from_hex_unchecked( + "4243ca9a618e2127590af8e1b38c63a156863fe95e4211cc1ade9b50667bbfa" + ) + ); + // Send field element at final layer of FRI + transcript.append(&send_field_element( + "702ddae5809ad82a82556eed2d202202d770962b7d4d82581e183df3efa2da6", + )); + // Send proof of work + transcript.append(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x30, 0x4d]); // Eight bytes + // Sample query indices + assert_eq!(transcript.sample_u64(8), 0); - #[rustfmt::skip] - let mut randomness: [u8; 32] = [ - 255, 0, 255, 0, 255, 0, 255, 0, - 255, 0, 255, 0, 255, 0, 255, 0, - 255, 0, 255, 0, 255, 0, 255, 0, - 255, 0, 255, 0, 255, 0, 255, 0, - ]; + transcript.append(&send_field_element( + "643e5520c60d06219b27b34da0856a2c23153efe9da75c6036f362c8f19615e", + )); + transcript.append(&send_field_element( + "165d7fb12913882268bb8cf470c81f42349fde7dec7b0a90526d142d6a61205", + )); + transcript.append(&send_field_element( + "1bc1aadf39f2faee64d84cb25f7a95d3dceac1016258a39fc90c9d370e69ea2", + )); + transcript.append(&send_field_element( + "69a2804ed6ec78ed9744730b8f37e0bdcb6021821384f56fad92ebd2959edf4", + )); - let expected_fe = FE::from_hex_unchecked( - "\ - 0100FF00FF00FF00\ - FF00FF00FF00FF00\ - FF00FF00FF00FF00\ - FF00FF00FF00FF00", + transcript.append( + &decode_hex("0160a780da72e50c596b9b6712bd040475d30777a4fef2c9f9be3a7fbaa98072") + .unwrap(), + ); + transcript.append( + &decode_hex("993b044db22444c0c0ebf1095b9a51faeb001c9b4dea36abe905f7162620dbbd") + .unwrap(), + ); + transcript.append( + &decode_hex("5017abeca33fa82576b5c5c2c61792693b48c9d4414a407eef66b6029dae07ea") + .unwrap(), ); - type FE = FieldElement; + transcript.append(&send_field_element( + "483069de80bf48a1b5ca2f55bdeb9ec3ed1b7bf9c794c3c8832f14928124cbb", + )); + transcript.append(&send_field_element( + "1cf5d5ed8348c3dee617bceff2d59cb14099d2978b1f7f928027dbbded1d66f", + )); + + transcript.append( + &decode_hex("6a23307160a636ea45c08f6b56e7585a850b5e14170a6c63f4d166a2220a7c2f") + .unwrap(), + ); + transcript.append( + &decode_hex("7950888c0355c204a1e83ecbee77a0a6a89f93d41cc2be6b39ddd1e727cc9650") + .unwrap(), + ); + transcript.append( + &decode_hex("58befe2c5de74cc5a002aa82ea219c5b242e761b45fd266eb95521e9f53f44eb") + .unwrap(), + ); - let field_element: FE = randomness_to_field(&mut randomness); + transcript.append(&send_field_element( + "724fcd17f8649ed5e180d4e98ba7e8900c8da2643f5ed548773b145230cf12d", + )); - assert_eq!(field_element, expected_fe) + transcript.append( + &decode_hex("f1f135fc9228ae46afe83d108b256dda8a6ad63e05d630be1f8b461bf2dccf3d") + .unwrap(), + ); + transcript.append( + &decode_hex("3fdabd3f5fae2bf405d423417141678f4b9afa5666b00790baac61116c5ea8af") + .unwrap(), + ); } } diff --git a/provers/stark/src/verifier.rs b/provers/stark/src/verifier.rs index b1d3dcab42..ef489fa0c0 100644 --- a/provers/stark/src/verifier.rs +++ b/provers/stark/src/verifier.rs @@ -3,13 +3,8 @@ use std::time::Instant; //use itertools::multizip; #[cfg(not(feature = "test_fiat_shamir"))] -use lambdaworks_crypto::fiat_shamir::default_transcript::DefaultTranscript; -use lambdaworks_crypto::fiat_shamir::transcript::Transcript; use log::error; -#[cfg(feature = "test_fiat_shamir")] -use lambdaworks_crypto::fiat_shamir::test_transcript::TestTranscript; - use lambdaworks_math::{ field::{ element::FieldElement, @@ -18,6 +13,8 @@ use lambdaworks_math::{ traits::ByteConversion, }; +use crate::transcript::IsStarkTranscript; + use super::{ config::{BatchedMerkleTreeBackend, FriMerkleTreeBackend}, domain::Domain, @@ -25,20 +22,9 @@ use super::{ grinding::hash_transcript_with_int_and_get_leading_zeros, proof::{options::ProofOptions, stark::StarkProof}, traits::AIR, - transcript::{batch_sample_challenges, sample_z_ood, transcript_to_field, transcript_to_u32}, + transcript::{batch_sample_challenges, sample_z_ood}, }; -#[cfg(feature = "test_fiat_shamir")] -fn step_1_transcript_initialization() -> TestTranscript { - TestTranscript::new() -} - -#[cfg(not(feature = "test_fiat_shamir"))] -fn step_1_transcript_initialization() -> DefaultTranscript { - // TODO: add strong fiat shamir - DefaultTranscript::new() -} - struct Challenges where F: IsFFTField, @@ -56,17 +42,16 @@ where leading_zeros_count: u8, // number of leading zeros in the grinding } -fn step_1_replay_rounds_and_recover_challenges( +fn step_1_replay_rounds_and_recover_challenges( air: &A, proof: &StarkProof, domain: &Domain, - transcript: &mut T, + transcript: &mut impl IsStarkTranscript, ) -> Challenges where F: IsFFTField, FieldElement: ByteConversion, A: AIR, - T: Transcript, { // =================================== // ==========| Round 1 |========== @@ -144,8 +129,8 @@ where // =================================== // >>>> Send challenges: 𝛾, 𝛾' - let gamma_even = transcript_to_field(transcript); - let gamma_odd = transcript_to_field(transcript); + let gamma_even = transcript.sample_field_element(); + let gamma_odd = transcript.sample_field_element(); // >>>> Send challenges: 𝛾ⱼ, 𝛾ⱼ' // Get the number of trace terms the DEEP composition poly will have. @@ -154,7 +139,7 @@ where let trace_term_coeffs = (0..total_columns) .map(|_| { (0..air.context().transition_offsets.len()) - .map(|_| transcript_to_field(transcript)) + .map(|_| transcript.sample_field_element()) .collect() }) .collect::>>>(); @@ -169,7 +154,7 @@ where transcript.append(root); // >>>> Send challenge 𝜁ₖ - transcript_to_field(transcript) + transcript.sample_field_element() }) .collect::>>(); @@ -178,7 +163,7 @@ where // Receive grinding value // 1) Receive challenge from the transcript - let transcript_challenge = transcript.challenge(); + let transcript_challenge = transcript.state(); let nonce = proof.nonce; let leading_zeros_count = hash_transcript_with_int_and_get_leading_zeros(&transcript_challenge, nonce); @@ -188,7 +173,7 @@ where // <<<< Send challenges 𝜄ₛ (iota_s) let iota_max: usize = 2_usize.pow(domain.lde_root_order); let iotas: Vec = (0..air.options().fri_number_of_queries) - .map(|_| (transcript_to_u32(transcript) as usize) % iota_max) + .map(|_| (transcript.sample_u64(iota_max as u64) as usize) % iota_max) .collect(); Challenges { @@ -562,6 +547,7 @@ pub fn verify( proof: &StarkProof, pub_input: &A::PublicInputs, proof_options: &ProofOptions, + mut transcript: impl IsStarkTranscript, ) -> bool where F: IsFFTField, @@ -578,7 +564,6 @@ where #[cfg(feature = "instruments")] let timer1 = Instant::now(); - let mut transcript = step_1_transcript_initialization(); let air = A::new(proof.trace_length, pub_input, proof_options); let domain = Domain::new(&air);