Skip to content

Commit

Permalink
Stark: make transcript compatible with Stone Prover (lambdaclass#570)
Browse files Browse the repository at this point in the history
* add StarkTranscript trait and implementation

* make append field element compatible with stone prover

* add test

* add tests

* uncomment test

* remove code added by mistake to exercises

* make counter of type u32
  • Loading branch information
schouhy authored and PatStiles committed Oct 6, 2023
1 parent fd6b985 commit 6db6010
Show file tree
Hide file tree
Showing 4 changed files with 74 additions and 51 deletions.
3 changes: 2 additions & 1 deletion provers/stark/src/fri/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,8 @@ pub fn fri_query_phase<F, A>(
where
F: IsFFTField,
A: AIR<Field = F>,
FieldElement<F>: Serializable,
T: Transcript,
FieldElement<F>: ByteConversion,
{
if !fri_layers.is_empty() {
let number_of_queries = air.options().fri_number_of_queries;
Expand Down
4 changes: 1 addition & 3 deletions provers/stark/src/prover.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
#[cfg(feature = "instruments")]
use std::time::Instant;

use lambdaworks_crypto::merkle_tree::proof::Proof;
use lambdaworks_math::fft::cpu::bit_reversing::in_place_bit_reverse_permute;
use lambdaworks_math::fft::{errors::FFTError, polynomial::FFTPoly};
use lambdaworks_math::traits::Serializable;
use lambdaworks_math::{
Expand All @@ -16,7 +14,7 @@ use rayon::prelude::{IndexedParallelIterator, IntoParallelRefIterator, ParallelI

#[cfg(debug_assertions)]
use crate::debug::validate_trace;
use crate::transcript::IsStarkTranscript;
use crate::transcript::{sample_z_ood, IsStarkTranscript};

use super::config::{BatchedMerkleTree, Commitment};
use super::constraints::evaluator::ConstraintEvaluator;
Expand Down
115 changes: 69 additions & 46 deletions provers/stark/src/transcript.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ use lambdaworks_math::{
field::{
element::FieldElement,
fields::fft_friendly::stark_252_prime_field::Stark252PrimeField,
traits::{IsFFTField, IsField},
traits::{IsFFTField, IsField, IsPrimeField},
},
traits::{ByteConversion, Serializable},
traits::ByteConversion,
unsigned_integer::element::U256,
};
use sha3::{Digest, Keccak256};
Expand All @@ -15,65 +15,46 @@ pub trait IsStarkTranscript<F: IsField> {
fn state(&self) -> [u8; 32];
fn sample_field_element(&mut self) -> FieldElement<F>;
fn sample_u64(&mut self, upper_bound: u64) -> u64;
fn sample_z_ood(
&mut self,
lde_roots_of_unity_coset: &[FieldElement<F>],
trace_roots_of_unity: &[FieldElement<F>],
) -> FieldElement<F>
where
FieldElement<F>: Serializable,
{
loop {
let value: FieldElement<F> = self.sample_field_element();
if !lde_roots_of_unity_coset.iter().any(|x| x == &value)
&& !trace_roots_of_unity.iter().any(|x| x == &value)
{
return value;
}
}
}
}

fn keccak_hash(data: &[u8]) -> Keccak256 {
let mut hasher = Keccak256::new();
hasher.update(data);
hasher
}

const MODULUS_MAX_MULTIPLE: U256 =
U256::from_hex_unchecked("f80000000000020f00000000000000000000000000000000000000000000001f");
const R_INV: U256 =
U256::from_hex_unchecked("0x40000000000001100000000000012100000000000000000000000000000000");

pub struct StoneProverTranscript {
state: [u8; 32],
hash: Keccak256,
seed_increment: U256,
counter: u32,
spare_bytes: Vec<u8>,
}

impl StoneProverTranscript {
const MODULUS_MAX_MULTIPLE: U256 = U256::from_hex_unchecked(
"f80000000000020f00000000000000000000000000000000000000000000001f",
);
const R_INV: U256 = U256::from_hex_unchecked(
"0x40000000000001100000000000012100000000000000000000000000000000",
);
pub fn new(public_input_data: &[u8]) -> Self {
let hash = keccak_hash(public_input_data);
StoneProverTranscript {
state: Self::keccak_hash(public_input_data),
hash,
seed_increment: U256::from_hex_unchecked("1"),
counter: 0,
spare_bytes: vec![],
}
}

fn keccak_hash(data: &[u8]) -> [u8; 32] {
let mut hasher = Keccak256::new();
hasher.update(data);
let mut result_hash = [0_u8; 32];
result_hash.copy_from_slice(&hasher.finalize_reset());
result_hash
}

pub fn sample_block(&mut self, used_bytes: usize) -> Vec<u8> {
let mut first_part: Vec<u8> = self.state.to_vec();
let mut first_part: Vec<u8> = self.hash.clone().finalize().to_vec();
let mut counter_bytes: Vec<u8> = vec![0; 28]
.into_iter()
.chain(self.counter.to_be_bytes().to_vec())
.collect();
self.counter += 1;
first_part.append(&mut counter_bytes);
let block = Self::keccak_hash(&first_part);
let block = keccak_hash(&first_part).finalize().to_vec();
self.spare_bytes.extend(&block[used_bytes..]);
block[..used_bytes].to_vec()
}
Expand Down Expand Up @@ -119,26 +100,28 @@ impl IsStarkTranscript<Stark252PrimeField> for StoneProverTranscript {

fn append_bytes(&mut self, new_bytes: &[u8]) {
let mut result_hash = [0_u8; 32];
result_hash.copy_from_slice(&self.state);
result_hash.copy_from_slice(&self.hash.clone().finalize_reset());
result_hash.reverse();

let digest = U256::from_bytes_be(&self.state).unwrap();
let digest = U256::from_bytes_be(&self.hash.clone().finalize()).unwrap();
let new_seed = (digest + self.seed_increment).to_bytes_be();
self.state = Self::keccak_hash(&[&new_seed, new_bytes].concat());
self.hash = keccak_hash(&[&new_seed, new_bytes].concat());
self.counter = 0;
self.spare_bytes.clear();
}

fn state(&self) -> [u8; 32] {
self.state
let mut state = [0u8; 32];
state.copy_from_slice(&self.hash.clone().finalize());
state
}

fn sample_field_element(&mut self) -> FieldElement<Stark252PrimeField> {
let mut result = self.sample_big_int();
while result >= Self::MODULUS_MAX_MULTIPLE {
while result >= MODULUS_MAX_MULTIPLE {
result = self.sample_big_int();
}
FieldElement::new(result) * FieldElement::new(Self::R_INV)
FieldElement::new(result) * FieldElement::new(R_INV)
}

fn sample_u64(&mut self, upper_bound: u64) -> u64 {
Expand All @@ -150,6 +133,24 @@ impl IsStarkTranscript<Stark252PrimeField> for StoneProverTranscript {
}
}

pub fn sample_z_ood<F: IsPrimeField>(
lde_roots_of_unity_coset: &[FieldElement<F>],
trace_roots_of_unity: &[FieldElement<F>],
transcript: &mut impl IsStarkTranscript<F>,
) -> FieldElement<F>
where
FieldElement<F>: ByteConversion,
{
loop {
let value: FieldElement<F> = transcript.sample_field_element();
if !lde_roots_of_unity_coset.iter().any(|x| x == &value)
&& !trace_roots_of_unity.iter().any(|x| x == &value)
{
return value;
}
}
}

pub fn batch_sample_challenges<F: IsFFTField>(
size: usize,
transcript: &mut impl IsStarkTranscript<F>,
Expand All @@ -174,6 +175,22 @@ mod tests {

type FE = FieldElement<Stark252PrimeField>;

pub fn decode_hex(s: &str) -> Result<Vec<u8>, ParseIntError> {
(0..s.len())
.step_by(2)
.map(|i| u8::from_str_radix(&s[i..i + 2], 16))
.collect()
}
use lambdaworks_math::field::{
element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField,
};

use crate::transcript::{IsStarkTranscript, StoneProverTranscript};

use std::num::ParseIntError;

type FE = FieldElement<Stark252PrimeField>;

pub fn decode_hex(s: &str) -> Result<Vec<u8>, ParseIntError> {
(0..s.len())
.step_by(2)
Expand Down Expand Up @@ -251,6 +268,12 @@ mod tests {
}

#[test]
fn test_sample_bytes() {
let mut transcript = StoneProverTranscript::new(&[0x01, 0x02]);
assert_eq!(
transcript.sample(8),
vec![89, 27, 84, 161, 127, 200, 195, 181]
);
fn test_sample_bytes() {
let mut transcript = StoneProverTranscript::new(&[0x01, 0x02]);
assert_eq!(
Expand Down Expand Up @@ -289,7 +312,7 @@ mod tests {
// This corresponds to the following run.
// Air: `Fibonacci2ColsShifted`
// `trace_length`: 4
// `blowup_factor`: 4
// `blowup_factor`: 2
// `fri_number_of_queries`: 1
let mut transcript = StoneProverTranscript::new(&[0xca, 0xfe, 0xca, 0xfe]);
// Send hash of trace commitment
Expand All @@ -304,7 +327,7 @@ mod tests {
"86105fff7b04ed4068ecccb8dbf1ed223bd45cd26c3532d6c80a818dbd4fa7"
)
);
// Send hash of composition poly commitment H
// Send hash of composition poly commitment H(z)
transcript.append_bytes(
&decode_hex("7cdd8d5fe3bd62254a417e2e260e0fed4fccdb6c9005e828446f645879394f38")
.unwrap(),
Expand Down Expand Up @@ -374,7 +397,7 @@ mod tests {
// This corresponds to the following run.
// Air: `Fibonacci2ColsShifted`
// `trace_length`: 4
// `blowup_factor`: 64
// `blowup_factor`: 6
// `fri_number_of_queries`: 2
let mut transcript = StoneProverTranscript::new(&[0xfa, 0xfa, 0xfa, 0xee]);
// Send hash of trace commitment
Expand Down
3 changes: 2 additions & 1 deletion provers/stark/src/verifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use lambdaworks_math::{
traits::Serializable,
};

use crate::{prover::get_stone_prover_domain_permutation, transcript::IsStarkTranscript};
use crate::transcript::IsStarkTranscript;

use super::{
config::{BatchedMerkleTreeBackend, FriMerkleTreeBackend},
Expand All @@ -22,6 +22,7 @@ use super::{
grinding::hash_transcript_with_int_and_get_leading_zeros,
proof::{options::ProofOptions, stark::StarkProof},
traits::AIR,
transcript::{batch_sample_challenges, sample_z_ood},
};

struct Challenges<F, A>
Expand Down

0 comments on commit 6db6010

Please sign in to comment.