diff --git a/provers/stark/src/fri/mod.rs b/provers/stark/src/fri/mod.rs index 1ee9dde908..5ca535c583 100644 --- a/provers/stark/src/fri/mod.rs +++ b/provers/stark/src/fri/mod.rs @@ -80,7 +80,8 @@ pub fn fri_query_phase( where F: IsFFTField, A: AIR, - FieldElement: Serializable, + T: Transcript, + FieldElement: ByteConversion, { if !fri_layers.is_empty() { let number_of_queries = air.options().fri_number_of_queries; diff --git a/provers/stark/src/prover.rs b/provers/stark/src/prover.rs index d177621af4..4dff4d0c26 100644 --- a/provers/stark/src/prover.rs +++ b/provers/stark/src/prover.rs @@ -1,8 +1,6 @@ #[cfg(feature = "instruments")] use std::time::Instant; -use lambdaworks_crypto::merkle_tree::proof::Proof; -use lambdaworks_math::fft::cpu::bit_reversing::in_place_bit_reverse_permute; use lambdaworks_math::fft::{errors::FFTError, polynomial::FFTPoly}; use lambdaworks_math::traits::Serializable; use lambdaworks_math::{ @@ -16,7 +14,7 @@ use rayon::prelude::{IndexedParallelIterator, IntoParallelRefIterator, ParallelI #[cfg(debug_assertions)] use crate::debug::validate_trace; -use crate::transcript::IsStarkTranscript; +use crate::transcript::{sample_z_ood, IsStarkTranscript}; use super::config::{BatchedMerkleTree, Commitment}; use super::constraints::evaluator::ConstraintEvaluator; diff --git a/provers/stark/src/transcript.rs b/provers/stark/src/transcript.rs index b6952b70bb..12a7848e36 100644 --- a/provers/stark/src/transcript.rs +++ b/provers/stark/src/transcript.rs @@ -2,9 +2,9 @@ use lambdaworks_math::{ field::{ element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, - traits::{IsFFTField, IsField}, + traits::{IsFFTField, IsField, IsPrimeField}, }, - traits::{ByteConversion, Serializable}, + traits::ByteConversion, unsigned_integer::element::U256, }; use sha3::{Digest, Keccak256}; @@ -15,65 +15,46 @@ pub trait IsStarkTranscript { fn state(&self) -> [u8; 32]; fn sample_field_element(&mut self) -> FieldElement; fn sample_u64(&mut self, upper_bound: u64) -> u64; - fn sample_z_ood( - &mut self, - lde_roots_of_unity_coset: &[FieldElement], - trace_roots_of_unity: &[FieldElement], - ) -> FieldElement - where - FieldElement: Serializable, - { - loop { - let value: FieldElement = self.sample_field_element(); - if !lde_roots_of_unity_coset.iter().any(|x| x == &value) - && !trace_roots_of_unity.iter().any(|x| x == &value) - { - return value; - } - } - } } +fn keccak_hash(data: &[u8]) -> Keccak256 { + let mut hasher = Keccak256::new(); + hasher.update(data); + hasher +} + +const MODULUS_MAX_MULTIPLE: U256 = + U256::from_hex_unchecked("f80000000000020f00000000000000000000000000000000000000000000001f"); +const R_INV: U256 = + U256::from_hex_unchecked("0x40000000000001100000000000012100000000000000000000000000000000"); + pub struct StoneProverTranscript { - state: [u8; 32], + hash: Keccak256, seed_increment: U256, counter: u32, spare_bytes: Vec, } impl StoneProverTranscript { - const MODULUS_MAX_MULTIPLE: U256 = U256::from_hex_unchecked( - "f80000000000020f00000000000000000000000000000000000000000000001f", - ); - const R_INV: U256 = U256::from_hex_unchecked( - "0x40000000000001100000000000012100000000000000000000000000000000", - ); pub fn new(public_input_data: &[u8]) -> Self { + let hash = keccak_hash(public_input_data); StoneProverTranscript { - state: Self::keccak_hash(public_input_data), + hash, seed_increment: U256::from_hex_unchecked("1"), counter: 0, spare_bytes: vec![], } } - fn keccak_hash(data: &[u8]) -> [u8; 32] { - let mut hasher = Keccak256::new(); - hasher.update(data); - let mut result_hash = [0_u8; 32]; - result_hash.copy_from_slice(&hasher.finalize_reset()); - result_hash - } - pub fn sample_block(&mut self, used_bytes: usize) -> Vec { - let mut first_part: Vec = self.state.to_vec(); + let mut first_part: Vec = self.hash.clone().finalize().to_vec(); let mut counter_bytes: Vec = vec![0; 28] .into_iter() .chain(self.counter.to_be_bytes().to_vec()) .collect(); self.counter += 1; first_part.append(&mut counter_bytes); - let block = Self::keccak_hash(&first_part); + let block = keccak_hash(&first_part).finalize().to_vec(); self.spare_bytes.extend(&block[used_bytes..]); block[..used_bytes].to_vec() } @@ -119,26 +100,28 @@ impl IsStarkTranscript for StoneProverTranscript { fn append_bytes(&mut self, new_bytes: &[u8]) { let mut result_hash = [0_u8; 32]; - result_hash.copy_from_slice(&self.state); + result_hash.copy_from_slice(&self.hash.clone().finalize_reset()); result_hash.reverse(); - let digest = U256::from_bytes_be(&self.state).unwrap(); + let digest = U256::from_bytes_be(&self.hash.clone().finalize()).unwrap(); let new_seed = (digest + self.seed_increment).to_bytes_be(); - self.state = Self::keccak_hash(&[&new_seed, new_bytes].concat()); + self.hash = keccak_hash(&[&new_seed, new_bytes].concat()); self.counter = 0; self.spare_bytes.clear(); } fn state(&self) -> [u8; 32] { - self.state + let mut state = [0u8; 32]; + state.copy_from_slice(&self.hash.clone().finalize()); + state } fn sample_field_element(&mut self) -> FieldElement { let mut result = self.sample_big_int(); - while result >= Self::MODULUS_MAX_MULTIPLE { + while result >= MODULUS_MAX_MULTIPLE { result = self.sample_big_int(); } - FieldElement::new(result) * FieldElement::new(Self::R_INV) + FieldElement::new(result) * FieldElement::new(R_INV) } fn sample_u64(&mut self, upper_bound: u64) -> u64 { @@ -150,6 +133,24 @@ impl IsStarkTranscript for StoneProverTranscript { } } +pub fn sample_z_ood( + lde_roots_of_unity_coset: &[FieldElement], + trace_roots_of_unity: &[FieldElement], + transcript: &mut impl IsStarkTranscript, +) -> FieldElement +where + FieldElement: ByteConversion, +{ + loop { + let value: FieldElement = transcript.sample_field_element(); + if !lde_roots_of_unity_coset.iter().any(|x| x == &value) + && !trace_roots_of_unity.iter().any(|x| x == &value) + { + return value; + } + } +} + pub fn batch_sample_challenges( size: usize, transcript: &mut impl IsStarkTranscript, @@ -174,6 +175,22 @@ mod tests { type FE = FieldElement; + pub fn decode_hex(s: &str) -> Result, ParseIntError> { + (0..s.len()) + .step_by(2) + .map(|i| u8::from_str_radix(&s[i..i + 2], 16)) + .collect() + } + use lambdaworks_math::field::{ + element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, + }; + + use crate::transcript::{IsStarkTranscript, StoneProverTranscript}; + + use std::num::ParseIntError; + + type FE = FieldElement; + pub fn decode_hex(s: &str) -> Result, ParseIntError> { (0..s.len()) .step_by(2) @@ -251,6 +268,12 @@ mod tests { } #[test] + fn test_sample_bytes() { + let mut transcript = StoneProverTranscript::new(&[0x01, 0x02]); + assert_eq!( + transcript.sample(8), + vec![89, 27, 84, 161, 127, 200, 195, 181] + ); fn test_sample_bytes() { let mut transcript = StoneProverTranscript::new(&[0x01, 0x02]); assert_eq!( @@ -289,7 +312,7 @@ mod tests { // This corresponds to the following run. // Air: `Fibonacci2ColsShifted` // `trace_length`: 4 - // `blowup_factor`: 4 + // `blowup_factor`: 2 // `fri_number_of_queries`: 1 let mut transcript = StoneProverTranscript::new(&[0xca, 0xfe, 0xca, 0xfe]); // Send hash of trace commitment @@ -304,7 +327,7 @@ mod tests { "86105fff7b04ed4068ecccb8dbf1ed223bd45cd26c3532d6c80a818dbd4fa7" ) ); - // Send hash of composition poly commitment H + // Send hash of composition poly commitment H(z) transcript.append_bytes( &decode_hex("7cdd8d5fe3bd62254a417e2e260e0fed4fccdb6c9005e828446f645879394f38") .unwrap(), @@ -374,7 +397,7 @@ mod tests { // This corresponds to the following run. // Air: `Fibonacci2ColsShifted` // `trace_length`: 4 - // `blowup_factor`: 64 + // `blowup_factor`: 6 // `fri_number_of_queries`: 2 let mut transcript = StoneProverTranscript::new(&[0xfa, 0xfa, 0xfa, 0xee]); // Send hash of trace commitment diff --git a/provers/stark/src/verifier.rs b/provers/stark/src/verifier.rs index 38a505602a..5c275f6838 100644 --- a/provers/stark/src/verifier.rs +++ b/provers/stark/src/verifier.rs @@ -13,7 +13,7 @@ use lambdaworks_math::{ traits::Serializable, }; -use crate::{prover::get_stone_prover_domain_permutation, transcript::IsStarkTranscript}; +use crate::transcript::IsStarkTranscript; use super::{ config::{BatchedMerkleTreeBackend, FriMerkleTreeBackend}, @@ -22,6 +22,7 @@ use super::{ grinding::hash_transcript_with_int_and_get_leading_zeros, proof::{options::ProofOptions, stark::StarkProof}, traits::AIR, + transcript::{batch_sample_challenges, sample_z_ood}, }; struct Challenges