From d5460804d6605b32475685ae8b09b437b44e8a21 Mon Sep 17 00:00:00 2001 From: Nicole Date: Wed, 13 Nov 2024 16:55:50 -0300 Subject: [PATCH 01/29] create file --- provers/stark/src/examples/mod.rs | 1 + .../stark/src/examples/read_only_memory.rs | 370 ++++++++++++++++++ 2 files changed, 371 insertions(+) create mode 100644 provers/stark/src/examples/read_only_memory.rs diff --git a/provers/stark/src/examples/mod.rs b/provers/stark/src/examples/mod.rs index 6a8949f7a..ba4f6586e 100644 --- a/provers/stark/src/examples/mod.rs +++ b/provers/stark/src/examples/mod.rs @@ -4,5 +4,6 @@ pub mod fibonacci_2_cols_shifted; pub mod fibonacci_2_columns; pub mod fibonacci_rap; pub mod quadratic_air; +pub mod read_only_memory; pub mod simple_fibonacci; pub mod simple_periodic_cols; diff --git a/provers/stark/src/examples/read_only_memory.rs b/provers/stark/src/examples/read_only_memory.rs new file mode 100644 index 000000000..18084f86f --- /dev/null +++ b/provers/stark/src/examples/read_only_memory.rs @@ -0,0 +1,370 @@ +use std::{marker::PhantomData, ops::Div}; + +use crate::{ + constraints::{ + boundary::{BoundaryConstraint, BoundaryConstraints}, + transition::TransitionConstraint, + }, + context::AirContext, + frame::Frame, + proof::options::ProofOptions, + trace::TraceTable, + traits::AIR, +}; +use lambdaworks_crypto::fiat_shamir::is_transcript::IsTranscript; +use lambdaworks_math::{ + field::{element::FieldElement, traits::IsFFTField}, + helpers::resize_to_next_power_of_two, + traits::ByteConversion, +}; + +#[derive(Clone)] +struct FibConstraint { + phantom: PhantomData, +} + +impl FibConstraint { + pub fn new() -> Self { + Self { + phantom: PhantomData, + } + } +} + +impl TransitionConstraint for FibConstraint +where + F: IsFFTField + Send + Sync, +{ + fn degree(&self) -> usize { + 1 + } + + fn constraint_idx(&self) -> usize { + 0 + } + + fn end_exemptions(&self) -> usize { + // NOTE: This is hard-coded for the example of steps = 16 in the integration tests. + // If that number changes in the test, this should be changed too or the test will fail. + 3 + 32 - 16 - 1 + } + + fn evaluate( + &self, + frame: &Frame, + transition_evaluations: &mut [FieldElement], + _periodic_values: &[FieldElement], + _rap_challenges: &[FieldElement], + ) { + let first_step = frame.get_evaluation_step(0); + let second_step = frame.get_evaluation_step(1); + let third_step = frame.get_evaluation_step(2); + + let a0 = first_step.get_main_evaluation_element(0, 0); + let a1 = second_step.get_main_evaluation_element(0, 0); + let a2 = third_step.get_main_evaluation_element(0, 0); + + let res = a2 - a1 - a0; + + transition_evaluations[self.constraint_idx()] = res; + } +} + +#[derive(Clone)] +struct PermutationConstraint { + phantom: PhantomData, +} + +impl PermutationConstraint { + pub fn new() -> Self { + Self { + phantom: PhantomData, + } + } +} + +impl TransitionConstraint for PermutationConstraint +where + F: IsFFTField + Send + Sync, +{ + fn degree(&self) -> usize { + 2 + } + + fn constraint_idx(&self) -> usize { + 1 + } + + fn end_exemptions(&self) -> usize { + 1 + } + + fn evaluate( + &self, + frame: &Frame, + transition_evaluations: &mut [FieldElement], + _periodic_values: &[FieldElement], + rap_challenges: &[FieldElement], + ) { + let first_step = frame.get_evaluation_step(0); + let second_step = frame.get_evaluation_step(1); + + // Auxiliary constraints + let z_i = first_step.get_aux_evaluation_element(0, 0); + let z_i_plus_one = second_step.get_aux_evaluation_element(0, 0); + let gamma = &rap_challenges[0]; + + let a_i = first_step.get_main_evaluation_element(0, 0); + let b_i = first_step.get_main_evaluation_element(0, 1); + + let res = z_i_plus_one * (b_i + gamma) - z_i * (a_i + gamma); + + transition_evaluations[self.constraint_idx()] = res; + } +} + +pub struct FibonacciRAP +where + F: IsFFTField, +{ + context: AirContext, + trace_length: usize, + pub_inputs: FibonacciRAPPublicInputs, + transition_constraints: Vec>>, +} + +#[derive(Clone, Debug)] +pub struct FibonacciRAPPublicInputs +where + F: IsFFTField, +{ + pub steps: usize, + pub a0: FieldElement, + pub a1: FieldElement, +} + +impl AIR for FibonacciRAP +where + F: IsFFTField + Send + Sync + 'static, + FieldElement: ByteConversion, +{ + type Field = F; + type FieldExtension = F; + type PublicInputs = FibonacciRAPPublicInputs; + + const STEP_SIZE: usize = 1; + + fn new( + trace_length: usize, + pub_inputs: &Self::PublicInputs, + proof_options: &ProofOptions, + ) -> Self { + let transition_constraints: Vec< + Box>, + > = vec![ + Box::new(FibConstraint::new()), + Box::new(PermutationConstraint::new()), + ]; + + let exemptions = 3 + trace_length - pub_inputs.steps - 1; + + let context = AirContext { + proof_options: proof_options.clone(), + trace_columns: 3, + transition_offsets: vec![0, 1, 2], + transition_exemptions: vec![exemptions, 1], + num_transition_constraints: transition_constraints.len(), + }; + + Self { + context, + trace_length, + pub_inputs: pub_inputs.clone(), + transition_constraints, + } + } + + fn build_auxiliary_trace( + &self, + main_trace: &TraceTable, + challenges: &[FieldElement], + ) -> TraceTable { + let main_segment_cols = main_trace.columns(); + let not_perm = &main_segment_cols[0]; + let perm = &main_segment_cols[1]; + let gamma = &challenges[0]; + + let trace_len = main_trace.n_rows(); + + let mut aux_col = Vec::new(); + for i in 0..trace_len { + if i == 0 { + aux_col.push(FieldElement::::one()); + } else { + let z_i = &aux_col[i - 1]; + let n_p_term = not_perm[i - 1].clone() + gamma; + let p_term = &perm[i - 1] + gamma; + + aux_col.push(z_i * n_p_term.div(p_term)); + } + } + TraceTable::from_columns(vec![aux_col], 0, 1) + } + + fn build_rap_challenges( + &self, + transcript: &mut impl IsTranscript, + ) -> Vec> { + vec![transcript.sample_field_element()] + } + + fn trace_layout(&self) -> (usize, usize) { + (2, 1) + } + + fn boundary_constraints( + &self, + _rap_challenges: &[FieldElement], + ) -> BoundaryConstraints { + // Main boundary constraints + let a0 = + BoundaryConstraint::new_simple_main(0, FieldElement::::one()); + let a1 = + BoundaryConstraint::new_simple_main(1, FieldElement::::one()); + + // Auxiliary boundary constraints + let a0_aux = BoundaryConstraint::new_aux(0, 0, FieldElement::::one()); + + BoundaryConstraints::from_constraints(vec![a0, a1, a0_aux]) + // BoundaryConstraints::from_constraints(vec![a0, a1]) + } + + fn transition_constraints( + &self, + ) -> &Vec>> { + &self.transition_constraints + } + + fn context(&self) -> &AirContext { + &self.context + } + + fn composition_poly_degree_bound(&self) -> usize { + self.trace_length() + } + + fn trace_length(&self) -> usize { + self.trace_length + } + + fn pub_inputs(&self) -> &Self::PublicInputs { + &self.pub_inputs + } + + fn compute_transition_verifier( + &self, + frame: &Frame, + periodic_values: &[FieldElement], + rap_challenges: &[FieldElement], + ) -> Vec> { + self.compute_transition_prover(frame, periodic_values, rap_challenges) + } +} + +pub fn fibonacci_rap_trace( + initial_values: [FieldElement; 2], + trace_length: usize, +) -> TraceTable { + let mut fib_seq: Vec> = vec![]; + + fib_seq.push(initial_values[0].clone()); + fib_seq.push(initial_values[1].clone()); + + for i in 2..(trace_length) { + fib_seq.push(fib_seq[i - 1].clone() + fib_seq[i - 2].clone()); + } + + let last_value = fib_seq[trace_length - 1].clone(); + let mut fib_permuted = fib_seq.clone(); + fib_permuted[0] = last_value; + fib_permuted[trace_length - 1] = initial_values[0].clone(); + + fib_seq.push(FieldElement::::zero()); + fib_permuted.push(FieldElement::::zero()); + let mut trace_cols = vec![fib_seq, fib_permuted]; + resize_to_next_power_of_two(&mut trace_cols); + + TraceTable::from_columns(trace_cols, 2, 1) +} + +#[cfg(test)] +mod test { + use super::*; + use lambdaworks_math::field::fields::u64_prime_field::FE17; + + #[test] + fn test_build_fibonacci_rap_trace() { + // The fibonacci RAP trace should have two columns: + // * The usual fibonacci sequence column + // * The permuted fibonacci sequence column. The first and last elements are permuted. + // Also, a 0 is appended at the end of both columns. The reason for this can be read in + // https://hackmd.io/@aztec-network/plonk-arithmetiization-air#RAPs---PAIRs-with-interjected-verifier-randomness + + let trace = fibonacci_rap_trace([FE17::from(1), FE17::from(1)], 8); + let mut expected_trace = vec![ + vec![ + FE17::one(), + FE17::one(), + FE17::from(2), + FE17::from(3), + FE17::from(5), + FE17::from(8), + FE17::from(13), + FE17::from(21), + FE17::zero(), + ], + vec![ + FE17::from(21), + FE17::one(), + FE17::from(2), + FE17::from(3), + FE17::from(5), + FE17::from(8), + FE17::from(13), + FE17::one(), + FE17::zero(), + ], + ]; + resize_to_next_power_of_two(&mut expected_trace); + + assert_eq!(trace.columns(), expected_trace); + } + + #[test] + fn aux_col() { + let trace = fibonacci_rap_trace([FE17::from(1), FE17::from(1)], 64); + let trace_cols = trace.columns(); + + let not_perm = trace_cols[0].clone(); + let perm = trace_cols[1].clone(); + let gamma = FE17::from(10); + + assert_eq!(perm.len(), not_perm.len()); + let trace_len = not_perm.len(); + + let mut aux_col = Vec::new(); + for i in 0..trace_len { + if i == 0 { + aux_col.push(FE17::one()); + } else { + let z_i = aux_col[i - 1]; + let n_p_term = not_perm[i - 1] + gamma; + let p_term = perm[i - 1] + gamma; + + aux_col.push(z_i * n_p_term.div(p_term)); + } + } + + assert_eq!(aux_col.last().unwrap(), &FE17::one()); + } +} From 7ff029063ac4caf7ef6eac2e5189a30630c569b8 Mon Sep 17 00:00:00 2001 From: Nicole Date: Wed, 13 Nov 2024 17:21:25 -0300 Subject: [PATCH 02/29] continuity and single value constraint --- .../stark/src/examples/read_only_memory.rs | 73 +++++++++++++++---- 1 file changed, 60 insertions(+), 13 deletions(-) diff --git a/provers/stark/src/examples/read_only_memory.rs b/provers/stark/src/examples/read_only_memory.rs index 18084f86f..51e32c2ca 100644 --- a/provers/stark/src/examples/read_only_memory.rs +++ b/provers/stark/src/examples/read_only_memory.rs @@ -19,11 +19,11 @@ use lambdaworks_math::{ }; #[derive(Clone)] -struct FibConstraint { +struct ContinuityConstraint { phantom: PhantomData, } -impl FibConstraint { +impl ContinuityConstraint { pub fn new() -> Self { Self { phantom: PhantomData, @@ -31,12 +31,12 @@ impl FibConstraint { } } -impl TransitionConstraint for FibConstraint +impl TransitionConstraint for ContinuityConstraint where F: IsFFTField + Send + Sync, { fn degree(&self) -> usize { - 1 + 2 } fn constraint_idx(&self) -> usize { @@ -44,9 +44,56 @@ where } fn end_exemptions(&self) -> usize { - // NOTE: This is hard-coded for the example of steps = 16 in the integration tests. - // If that number changes in the test, this should be changed too or the test will fail. - 3 + 32 - 16 - 1 + // NOTE: We are assuming that hte trace has as length a power of 2. + 1 + } + + fn evaluate( + &self, + frame: &Frame, + transition_evaluations: &mut [FieldElement], + _periodic_values: &[FieldElement], + _rap_challenges: &[FieldElement], + ) { + let first_step = frame.get_evaluation_step(0); + let second_step = frame.get_evaluation_step(1); + + let a0 = first_step.get_main_evaluation_element(0, 2); + let a1 = second_step.get_main_evaluation_element(0, 2); + let res = (a1 - a0) * (a1 - a0 - FieldElement::::one()); + + transition_evaluations[self.constraint_idx()] = res; + } +} + +#[derive(Clone)] +struct SingleValueConstraint { + phantom: PhantomData, +} + +impl SingleValueConstraint { + pub fn new() -> Self { + Self { + phantom: PhantomData, + } + } +} + +impl TransitionConstraint for SingleValueConstraint +where + F: IsFFTField + Send + Sync, +{ + fn degree(&self) -> usize { + 2 + } + + fn constraint_idx(&self) -> usize { + 1 + } + + fn end_exemptions(&self) -> usize { + // NOTE: We are assuming that hte trace has as length a power of 2. + 1 } fn evaluate( @@ -58,13 +105,13 @@ where ) { let first_step = frame.get_evaluation_step(0); let second_step = frame.get_evaluation_step(1); - let third_step = frame.get_evaluation_step(2); - let a0 = first_step.get_main_evaluation_element(0, 0); - let a1 = second_step.get_main_evaluation_element(0, 0); - let a2 = third_step.get_main_evaluation_element(0, 0); + let a0 = first_step.get_main_evaluation_element(0, 2); + let a1 = second_step.get_main_evaluation_element(0, 2); + let v0 = first_step.get_main_evaluation_element(0, 3); + let v1 = second_step.get_main_evaluation_element(0, 3); - let res = a2 - a1 - a0; + let res = (v1 - v0) * (a1 - a0 - FieldElement::::one()); transition_evaluations[self.constraint_idx()] = res; } @@ -162,7 +209,7 @@ where let transition_constraints: Vec< Box>, > = vec![ - Box::new(FibConstraint::new()), + Box::new(ContinuityConstraint::new()), Box::new(PermutationConstraint::new()), ]; From e9929700ef034c0613a2401bbe26ee734878141e Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Wed, 13 Nov 2024 18:00:22 -0300 Subject: [PATCH 03/29] imp air --- .../stark/src/examples/read_only_memory.rs | 51 +++++++++---------- 1 file changed, 24 insertions(+), 27 deletions(-) diff --git a/provers/stark/src/examples/read_only_memory.rs b/provers/stark/src/examples/read_only_memory.rs index 51e32c2ca..1332fadf4 100644 --- a/provers/stark/src/examples/read_only_memory.rs +++ b/provers/stark/src/examples/read_only_memory.rs @@ -213,13 +213,11 @@ where Box::new(PermutationConstraint::new()), ]; - let exemptions = 3 + trace_length - pub_inputs.steps - 1; - let context = AirContext { proof_options: proof_options.clone(), - trace_columns: 3, - transition_offsets: vec![0, 1, 2], - transition_exemptions: vec![exemptions, 1], + trace_columns: 5, + transition_offsets: vec![0, 1], + transition_exemptions: vec![1], num_transition_constraints: transition_constraints.len(), }; @@ -237,24 +235,26 @@ where challenges: &[FieldElement], ) -> TraceTable { let main_segment_cols = main_trace.columns(); - let not_perm = &main_segment_cols[0]; - let perm = &main_segment_cols[1]; - let gamma = &challenges[0]; + let a = &main_segment_cols[0]; + let v = &main_segment_cols[1]; + let a_perm = &main_segment_cols[2]; + let v_perm = &main_segment_cols[3]; + let z = &challenges[0]; + let alpha = &challenges[1]; let trace_len = main_trace.n_rows(); let mut aux_col = Vec::new(); - for i in 0..trace_len { - if i == 0 { - aux_col.push(FieldElement::::one()); - } else { - let z_i = &aux_col[i - 1]; - let n_p_term = not_perm[i - 1].clone() + gamma; - let p_term = &perm[i - 1] + gamma; - - aux_col.push(z_i * n_p_term.div(p_term)); - } + let num = z - (&a[0] + alpha * &v[0]); + let den = z - (&a_perm[0] + alpha * &v_perm[0]); + aux_col.push(num / den); + + for i in 0..trace_len - 1 { + let num = (z - (&a[i + 1] + alpha * &v[i + 1])) * &aux_col[i]; + let den = z - (&a_perm[i + 1] + alpha * &v_perm[i + 1]); + aux_col.push(num / den); } + TraceTable::from_columns(vec![aux_col], 0, 1) } @@ -262,27 +262,24 @@ where &self, transcript: &mut impl IsTranscript, ) -> Vec> { - vec![transcript.sample_field_element()] + vec![ + transcript.sample_field_element(), + transcript.sample_field_element(), + ] } fn trace_layout(&self) -> (usize, usize) { - (2, 1) + (4, 1) } fn boundary_constraints( &self, _rap_challenges: &[FieldElement], ) -> BoundaryConstraints { - // Main boundary constraints - let a0 = - BoundaryConstraint::new_simple_main(0, FieldElement::::one()); - let a1 = - BoundaryConstraint::new_simple_main(1, FieldElement::::one()); - // Auxiliary boundary constraints let a0_aux = BoundaryConstraint::new_aux(0, 0, FieldElement::::one()); - BoundaryConstraints::from_constraints(vec![a0, a1, a0_aux]) + BoundaryConstraints::from_constraints(vec![a0_aux]) // BoundaryConstraints::from_constraints(vec![a0, a1]) } From 821e1f628672c0a0cb1c670d0d1b7fdb1fea6d95 Mon Sep 17 00:00:00 2001 From: Nicole Date: Wed, 13 Nov 2024 18:01:50 -0300 Subject: [PATCH 04/29] permutation constraint --- .../stark/src/examples/read_only_memory.rs | 51 ++++++++----------- 1 file changed, 21 insertions(+), 30 deletions(-) diff --git a/provers/stark/src/examples/read_only_memory.rs b/provers/stark/src/examples/read_only_memory.rs index 1332fadf4..f2d2d2938 100644 --- a/provers/stark/src/examples/read_only_memory.rs +++ b/provers/stark/src/examples/read_only_memory.rs @@ -58,9 +58,9 @@ where let first_step = frame.get_evaluation_step(0); let second_step = frame.get_evaluation_step(1); - let a0 = first_step.get_main_evaluation_element(0, 2); - let a1 = second_step.get_main_evaluation_element(0, 2); - let res = (a1 - a0) * (a1 - a0 - FieldElement::::one()); + let a_perm0 = first_step.get_main_evaluation_element(0, 2); + let a_perm1 = second_step.get_main_evaluation_element(0, 2); + let res = (a_perm1 - a_perm0) * (a_perm1 - a_perm0 - FieldElement::::one()); transition_evaluations[self.constraint_idx()] = res; } @@ -106,12 +106,12 @@ where let first_step = frame.get_evaluation_step(0); let second_step = frame.get_evaluation_step(1); - let a0 = first_step.get_main_evaluation_element(0, 2); - let a1 = second_step.get_main_evaluation_element(0, 2); - let v0 = first_step.get_main_evaluation_element(0, 3); - let v1 = second_step.get_main_evaluation_element(0, 3); + let a_perm0 = first_step.get_main_evaluation_element(0, 2); + let a_perm1 = second_step.get_main_evaluation_element(0, 2); + let v_perm0 = first_step.get_main_evaluation_element(0, 3); + let v_perm1 = second_step.get_main_evaluation_element(0, 3); - let res = (v1 - v0) * (a1 - a0 - FieldElement::::one()); + let res = (v_perm1 - v_perm0) * (a_perm1 - a_perm0 - FieldElement::::one()); transition_evaluations[self.constraint_idx()] = res; } @@ -139,7 +139,7 @@ where } fn constraint_idx(&self) -> usize { - 1 + 2 } fn end_exemptions(&self) -> usize { @@ -157,47 +157,38 @@ where let second_step = frame.get_evaluation_step(1); // Auxiliary constraints - let z_i = first_step.get_aux_evaluation_element(0, 0); - let z_i_plus_one = second_step.get_aux_evaluation_element(0, 0); - let gamma = &rap_challenges[0]; + let p0 = first_step.get_aux_evaluation_element(0, 0); + let p1 = second_step.get_aux_evaluation_element(0, 0); + let alpha = &rap_challenges[0]; + let z = &rap_challenges[1]; + let a1 = second_step.get_main_evaluation_element(0, 0); + let v1 = second_step.get_main_evaluation_element(0, 1); + let a_perm_1 = second_step.get_main_evaluation_element(0, 2); + let v_perm_1 = second_step.get_main_evaluation_element(0, 3); - let a_i = first_step.get_main_evaluation_element(0, 0); - let b_i = first_step.get_main_evaluation_element(0, 1); - - let res = z_i_plus_one * (b_i + gamma) - z_i * (a_i + gamma); + let res = (z - (a_perm_1 + alpha * v_perm_1)) * p1 - (z - (a1 + alpha * v1)) * p0; transition_evaluations[self.constraint_idx()] = res; } } -pub struct FibonacciRAP +pub struct ReadOnlyRAP where F: IsFFTField, { context: AirContext, trace_length: usize, - pub_inputs: FibonacciRAPPublicInputs, transition_constraints: Vec>>, } -#[derive(Clone, Debug)] -pub struct FibonacciRAPPublicInputs -where - F: IsFFTField, -{ - pub steps: usize, - pub a0: FieldElement, - pub a1: FieldElement, -} - -impl AIR for FibonacciRAP +impl AIR for ReadOnlyRAP where F: IsFFTField + Send + Sync + 'static, FieldElement: ByteConversion, { type Field = F; type FieldExtension = F; - type PublicInputs = FibonacciRAPPublicInputs; + type PublicInputs = (); const STEP_SIZE: usize = 1; From 56de567d84be95d6a7bc3ef3d21d88eb9cd5b071 Mon Sep 17 00:00:00 2001 From: Nicole Date: Thu, 14 Nov 2024 12:06:53 -0300 Subject: [PATCH 05/29] evaluate function for SingleValueConstraint --- provers/stark/src/examples/read_only_memory.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/provers/stark/src/examples/read_only_memory.rs b/provers/stark/src/examples/read_only_memory.rs index f2d2d2938..4b6c1fe4f 100644 --- a/provers/stark/src/examples/read_only_memory.rs +++ b/provers/stark/src/examples/read_only_memory.rs @@ -215,7 +215,6 @@ where Self { context, trace_length, - pub_inputs: pub_inputs.clone(), transition_constraints, } } @@ -293,7 +292,7 @@ where } fn pub_inputs(&self) -> &Self::PublicInputs { - &self.pub_inputs + &() } fn compute_transition_verifier( From 52912f12884819f2271203ba1b47c5b4b3f1752a Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Thu, 14 Nov 2024 12:18:27 -0300 Subject: [PATCH 06/29] add last element constraint --- provers/stark/src/examples/read_only_memory.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/provers/stark/src/examples/read_only_memory.rs b/provers/stark/src/examples/read_only_memory.rs index 4b6c1fe4f..c9fb9eb9b 100644 --- a/provers/stark/src/examples/read_only_memory.rs +++ b/provers/stark/src/examples/read_only_memory.rs @@ -267,7 +267,11 @@ where _rap_challenges: &[FieldElement], ) -> BoundaryConstraints { // Auxiliary boundary constraints - let a0_aux = BoundaryConstraint::new_aux(0, 0, FieldElement::::one()); + let a0_aux = BoundaryConstraint::new_aux( + 0, + self.trace_length - 1, + FieldElement::::one(), + ); BoundaryConstraints::from_constraints(vec![a0_aux]) // BoundaryConstraints::from_constraints(vec![a0, a1]) From 0b5ac615e878ccda7b6583630b22e24b45d2ca0b Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Thu, 14 Nov 2024 15:20:39 -0300 Subject: [PATCH 07/29] add public inputs --- .../stark/src/examples/read_only_memory.rs | 53 ++++++++++++++----- 1 file changed, 41 insertions(+), 12 deletions(-) diff --git a/provers/stark/src/examples/read_only_memory.rs b/provers/stark/src/examples/read_only_memory.rs index c9fb9eb9b..505b99749 100644 --- a/provers/stark/src/examples/read_only_memory.rs +++ b/provers/stark/src/examples/read_only_memory.rs @@ -178,9 +178,21 @@ where { context: AirContext, trace_length: usize, + pub_inputs: ReadOnlyPublicInputs, transition_constraints: Vec>>, } +#[derive(Clone, Debug)] +pub struct ReadOnlyPublicInputs +where + F: IsFFTField, +{ + pub a0: FieldElement, + pub v0: FieldElement, + pub a_perm0: FieldElement, + pub v_perm0: FieldElement, +} + impl AIR for ReadOnlyRAP where F: IsFFTField + Send + Sync + 'static, @@ -188,7 +200,7 @@ where { type Field = F; type FieldExtension = F; - type PublicInputs = (); + type PublicInputs = ReadOnlyPublicInputs; const STEP_SIZE: usize = 1; @@ -208,23 +220,23 @@ where proof_options: proof_options.clone(), trace_columns: 5, transition_offsets: vec![0, 1], - transition_exemptions: vec![1], num_transition_constraints: transition_constraints.len(), }; Self { context, trace_length, + pub_inputs: pub_inputs.clone(), transition_constraints, } } fn build_auxiliary_trace( &self, - main_trace: &TraceTable, + trace: &mut TraceTable, challenges: &[FieldElement], - ) -> TraceTable { - let main_segment_cols = main_trace.columns(); + ) { + let main_segment_cols = trace.columns_main(); let a = &main_segment_cols[0]; let v = &main_segment_cols[1]; let a_perm = &main_segment_cols[2]; @@ -232,7 +244,7 @@ where let z = &challenges[0]; let alpha = &challenges[1]; - let trace_len = main_trace.n_rows(); + let trace_len = trace.num_rows(); let mut aux_col = Vec::new(); let num = z - (&a[0] + alpha * &v[0]); @@ -245,7 +257,9 @@ where aux_col.push(num / den); } - TraceTable::from_columns(vec![aux_col], 0, 1) + for (i, aux_elem) in aux_col.iter().enumerate().take(trace.num_rows()) { + trace.set_aux(i, 0, aux_elem.clone()) + } } fn build_rap_challenges( @@ -264,17 +278,32 @@ where fn boundary_constraints( &self, - _rap_challenges: &[FieldElement], + rap_challenges: &[FieldElement], ) -> BoundaryConstraints { + let a0 = &self.pub_inputs.a0; + let v0 = &self.pub_inputs.v0; + let a_perm0 = &self.pub_inputs.a_perm0; + let v_perm0 = &self.pub_inputs.v_perm0; + let alpha = &rap_challenges[0]; + let z = &rap_challenges[1]; + // Main boundary constraints + let c1 = BoundaryConstraint::new_main(0, 0, a0.clone()); + let c2 = BoundaryConstraint::new_main(1, 0, v0.clone()); + let c3 = BoundaryConstraint::new_main(2, 0, a_perm0.clone()); + let c4 = BoundaryConstraint::new_main(3, 0, v_perm0.clone()); + // Auxiliary boundary constraints - let a0_aux = BoundaryConstraint::new_aux( + let num = z - (a0 + alpha * v0); + let den = z - (a_perm0 + alpha * v_perm0); + let p0_value = (num / den); + let c_aux1 = BoundaryConstraint::new_aux(0, 0, p0_value); + let c_aux2 = BoundaryConstraint::new_aux( 0, self.trace_length - 1, FieldElement::::one(), ); - BoundaryConstraints::from_constraints(vec![a0_aux]) - // BoundaryConstraints::from_constraints(vec![a0, a1]) + BoundaryConstraints::from_constraints(vec![c1, c2, c3, c4, c_aux1, c_aux2]) } fn transition_constraints( @@ -296,7 +325,7 @@ where } fn pub_inputs(&self) -> &Self::PublicInputs { - &() + &self.pub_inputs } fn compute_transition_verifier( From f9118377864b3d96c168253e6257bc1d8eefbdd2 Mon Sep 17 00:00:00 2001 From: jotabulacios Date: Thu, 14 Nov 2024 17:56:35 -0300 Subject: [PATCH 08/29] add sort function for the trace --- .../stark/src/examples/read_only_memory.rs | 146 ++++++++---------- 1 file changed, 62 insertions(+), 84 deletions(-) diff --git a/provers/stark/src/examples/read_only_memory.rs b/provers/stark/src/examples/read_only_memory.rs index 505b99749..9d41d6d98 100644 --- a/provers/stark/src/examples/read_only_memory.rs +++ b/provers/stark/src/examples/read_only_memory.rs @@ -12,6 +12,7 @@ use crate::{ traits::AIR, }; use lambdaworks_crypto::fiat_shamir::is_transcript::IsTranscript; +use lambdaworks_math::field::traits::IsPrimeField; use lambdaworks_math::{ field::{element::FieldElement, traits::IsFFTField}, helpers::resize_to_next_power_of_two, @@ -338,30 +339,23 @@ where } } -pub fn fibonacci_rap_trace( - initial_values: [FieldElement; 2], - trace_length: usize, -) -> TraceTable { - let mut fib_seq: Vec> = vec![]; - - fib_seq.push(initial_values[0].clone()); - fib_seq.push(initial_values[1].clone()); - - for i in 2..(trace_length) { - fib_seq.push(fib_seq[i - 1].clone() + fib_seq[i - 2].clone()); - } - - let last_value = fib_seq[trace_length - 1].clone(); - let mut fib_permuted = fib_seq.clone(); - fib_permuted[0] = last_value; - fib_permuted[trace_length - 1] = initial_values[0].clone(); - - fib_seq.push(FieldElement::::zero()); - fib_permuted.push(FieldElement::::zero()); - let mut trace_cols = vec![fib_seq, fib_permuted]; - resize_to_next_power_of_two(&mut trace_cols); - - TraceTable::from_columns(trace_cols, 2, 1) +pub fn sort_rap_trace( + address: Vec>, + value: Vec>, +) -> TraceTable { + let mut address_value_pairs: Vec<_> = address.iter().zip(value.iter()).collect(); + + address_value_pairs.sort_by_key(|(addr, _)| addr.representative()); + + let (sorted_address, sorted_value): (Vec>, Vec>) = + address_value_pairs + .into_iter() + .map(|(addr, val)| (addr.clone(), val.clone())) + .unzip(); + let main_columns = vec![address.clone(), value.clone(), sorted_address, sorted_value]; + // create a vector with zeros of the same length as the main columns + let zero_vec = vec![FieldElement::::zero(); main_columns[0].len()]; + TraceTable::from_columns(main_columns, vec![zero_vec], 1) } #[cfg(test)] @@ -370,68 +364,52 @@ mod test { use lambdaworks_math::field::fields::u64_prime_field::FE17; #[test] - fn test_build_fibonacci_rap_trace() { - // The fibonacci RAP trace should have two columns: - // * The usual fibonacci sequence column - // * The permuted fibonacci sequence column. The first and last elements are permuted. - // Also, a 0 is appended at the end of both columns. The reason for this can be read in - // https://hackmd.io/@aztec-network/plonk-arithmetiization-air#RAPs---PAIRs-with-interjected-verifier-randomness - - let trace = fibonacci_rap_trace([FE17::from(1), FE17::from(1)], 8); - let mut expected_trace = vec![ - vec![ - FE17::one(), - FE17::one(), - FE17::from(2), - FE17::from(3), - FE17::from(5), - FE17::from(8), - FE17::from(13), - FE17::from(21), - FE17::zero(), - ], - vec![ - FE17::from(21), - FE17::one(), - FE17::from(2), - FE17::from(3), - FE17::from(5), - FE17::from(8), - FE17::from(13), - FE17::one(), - FE17::zero(), - ], + fn test_sort_rap_trace() { + let address_col = vec![ + FE17::from(5), + FE17::from(2), + FE17::from(3), + FE17::from(4), + FE17::from(1), + FE17::from(6), + FE17::from(7), + FE17::from(8), + ]; + let value_col = vec![ + FE17::from(50), + FE17::from(20), + FE17::from(30), + FE17::from(40), + FE17::from(10), + FE17::from(60), + FE17::from(70), + FE17::from(80), ]; - resize_to_next_power_of_two(&mut expected_trace); - - assert_eq!(trace.columns(), expected_trace); - } - - #[test] - fn aux_col() { - let trace = fibonacci_rap_trace([FE17::from(1), FE17::from(1)], 64); - let trace_cols = trace.columns(); - - let not_perm = trace_cols[0].clone(); - let perm = trace_cols[1].clone(); - let gamma = FE17::from(10); - - assert_eq!(perm.len(), not_perm.len()); - let trace_len = not_perm.len(); - let mut aux_col = Vec::new(); - for i in 0..trace_len { - if i == 0 { - aux_col.push(FE17::one()); - } else { - let z_i = aux_col[i - 1]; - let n_p_term = not_perm[i - 1] + gamma; - let p_term = perm[i - 1] + gamma; - - aux_col.push(z_i * n_p_term.div(p_term)); - } - } + let sorted_trace = sort_rap_trace(address_col.clone(), value_col.clone()); + + let expected_sorted_addresses = vec![ + FE17::from(1), + FE17::from(2), + FE17::from(3), + FE17::from(4), + FE17::from(5), + FE17::from(6), + FE17::from(7), + FE17::from(8), + ]; + let expected_sorted_values = vec![ + FE17::from(10), + FE17::from(20), + FE17::from(30), + FE17::from(40), + FE17::from(50), + FE17::from(60), + FE17::from(70), + FE17::from(80), + ]; - assert_eq!(aux_col.last().unwrap(), &FE17::one()); + assert_eq!(sorted_trace.columns_main()[2], expected_sorted_addresses); + assert_eq!(sorted_trace.columns_main()[3], expected_sorted_values); } } From e25efde96ec35a474b29231125b158eae27d65ef Mon Sep 17 00:00:00 2001 From: Nicole Date: Thu, 14 Nov 2024 18:32:10 -0300 Subject: [PATCH 09/29] add integration test --- .../stark/src/examples/read_only_memory.rs | 1 + provers/stark/src/tests/integration_tests.rs | 47 +++++++++++++++++++ 2 files changed, 48 insertions(+) diff --git a/provers/stark/src/examples/read_only_memory.rs b/provers/stark/src/examples/read_only_memory.rs index 9d41d6d98..1dca83f37 100644 --- a/provers/stark/src/examples/read_only_memory.rs +++ b/provers/stark/src/examples/read_only_memory.rs @@ -214,6 +214,7 @@ where Box>, > = vec![ Box::new(ContinuityConstraint::new()), + Box::new(SingleValueConstraint::new()), Box::new(PermutationConstraint::new()), ]; diff --git a/provers/stark/src/tests/integration_tests.rs b/provers/stark/src/tests/integration_tests.rs index c7f2f6a4c..08604bfc2 100644 --- a/provers/stark/src/tests/integration_tests.rs +++ b/provers/stark/src/tests/integration_tests.rs @@ -10,6 +10,7 @@ use crate::{ fibonacci_2_columns::{self, Fibonacci2ColsAIR}, fibonacci_rap::{fibonacci_rap_trace, FibonacciRAP, FibonacciRAPPublicInputs}, quadratic_air::{self, QuadraticAIR, QuadraticPublicInputs}, + read_only_memory::{sort_rap_trace, ReadOnlyPublicInputs, ReadOnlyRAP}, simple_fibonacci::{self, FibonacciAIR, FibonacciPublicInputs}, simple_periodic_cols::{self, SimplePeriodicAIR, SimplePeriodicPublicInputs}, // simple_periodic_cols::{self, SimplePeriodicAIR, SimplePeriodicPublicInputs}, }, @@ -247,3 +248,49 @@ fn test_prove_bit_flags() { StoneProverTranscript::new(&[]), )); } + +#[test_log::test] +fn test_prove_read_only_memory() { + let address_col = vec![ + FieldElement::::from(4), + FieldElement::::from(5), + FieldElement::::from(2), + FieldElement::::from(6), + FieldElement::::from(8), + FieldElement::::from(7), + FieldElement::::from(1), + FieldElement::::from(3), + ]; + let value_col = vec![ + FieldElement::::from(1), + FieldElement::::from(2), + FieldElement::::from(3), + FieldElement::::from(4), + FieldElement::::from(5), + FieldElement::::from(6), + FieldElement::::from(7), + FieldElement::::from(8), + ]; + + let pub_inputs = ReadOnlyPublicInputs { + a0: FieldElement::::from(4), + v0: FieldElement::::from(1), + a_perm0: FieldElement::::from(1), + v_perm0: FieldElement::::from(7), + }; + let mut trace = sort_rap_trace(address_col, value_col); + let proof_options = ProofOptions::default_test_options(); + let proof = Prover::>::prove( + &mut trace, + &pub_inputs, + &proof_options, + StoneProverTranscript::new(&[]), + ) + .unwrap(); + assert!(Verifier::>::verify( + &proof, + &pub_inputs, + &proof_options, + StoneProverTranscript::new(&[]) + )); +} From b975d36093b4a8049fd038e7308f91146d69d7fe Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Fri, 15 Nov 2024 11:36:25 -0300 Subject: [PATCH 10/29] fix clippy --- provers/stark/src/examples/read_only_memory.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/provers/stark/src/examples/read_only_memory.rs b/provers/stark/src/examples/read_only_memory.rs index 1dca83f37..5f76559ae 100644 --- a/provers/stark/src/examples/read_only_memory.rs +++ b/provers/stark/src/examples/read_only_memory.rs @@ -1,4 +1,4 @@ -use std::{marker::PhantomData, ops::Div}; +use std::marker::PhantomData; use crate::{ constraints::{ @@ -15,7 +15,6 @@ use lambdaworks_crypto::fiat_shamir::is_transcript::IsTranscript; use lambdaworks_math::field::traits::IsPrimeField; use lambdaworks_math::{ field::{element::FieldElement, traits::IsFFTField}, - helpers::resize_to_next_power_of_two, traits::ByteConversion, }; @@ -297,7 +296,7 @@ where // Auxiliary boundary constraints let num = z - (a0 + alpha * v0); let den = z - (a_perm0 + alpha * v_perm0); - let p0_value = (num / den); + let p0_value = num / den; let c_aux1 = BoundaryConstraint::new_aux(0, 0, p0_value); let c_aux2 = BoundaryConstraint::new_aux( 0, From ca1fe3d46db49575745f6487b0a2fed1ae4bd26f Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Fri, 15 Nov 2024 12:21:21 -0300 Subject: [PATCH 11/29] fix constraints --- provers/stark/src/examples/read_only_memory.rs | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/provers/stark/src/examples/read_only_memory.rs b/provers/stark/src/examples/read_only_memory.rs index 5f76559ae..ac6e84447 100644 --- a/provers/stark/src/examples/read_only_memory.rs +++ b/provers/stark/src/examples/read_only_memory.rs @@ -159,8 +159,8 @@ where // Auxiliary constraints let p0 = first_step.get_aux_evaluation_element(0, 0); let p1 = second_step.get_aux_evaluation_element(0, 0); - let alpha = &rap_challenges[0]; - let z = &rap_challenges[1]; + let z = &rap_challenges[0]; + let alpha = &rap_challenges[1]; let a1 = second_step.get_main_evaluation_element(0, 0); let v1 = second_step.get_main_evaluation_element(0, 1); let a_perm_1 = second_step.get_main_evaluation_element(0, 2); @@ -285,8 +285,9 @@ where let v0 = &self.pub_inputs.v0; let a_perm0 = &self.pub_inputs.a_perm0; let v_perm0 = &self.pub_inputs.v_perm0; - let alpha = &rap_challenges[0]; - let z = &rap_challenges[1]; + let z = &rap_challenges[0]; + let alpha = &rap_challenges[1]; + // Main boundary constraints let c1 = BoundaryConstraint::new_main(0, 0, a0.clone()); let c2 = BoundaryConstraint::new_main(1, 0, v0.clone()); @@ -297,6 +298,7 @@ where let num = z - (a0 + alpha * v0); let den = z - (a_perm0 + alpha * v_perm0); let p0_value = num / den; + let c_aux1 = BoundaryConstraint::new_aux(0, 0, p0_value); let c_aux2 = BoundaryConstraint::new_aux( 0, From cceb719d572a7c90842f41784a57c11c3e8ab512 Mon Sep 17 00:00:00 2001 From: Nicole Date: Fri, 15 Nov 2024 18:02:18 -0300 Subject: [PATCH 12/29] add documentation --- .../stark/src/examples/read_only_memory.rs | 63 +++++++++++-------- provers/stark/src/tests/integration_tests.rs | 40 ++++++------ 2 files changed, 56 insertions(+), 47 deletions(-) diff --git a/provers/stark/src/examples/read_only_memory.rs b/provers/stark/src/examples/read_only_memory.rs index ac6e84447..edc5a1d6f 100644 --- a/provers/stark/src/examples/read_only_memory.rs +++ b/provers/stark/src/examples/read_only_memory.rs @@ -18,6 +18,8 @@ use lambdaworks_math::{ traits::ByteConversion, }; +/// This condition ensures the continuity in a read-only memory structure, preserving strict ordering. +/// Equation based on Cairo Whitepaper section 9.7.2 #[derive(Clone)] struct ContinuityConstraint { phantom: PhantomData, @@ -44,7 +46,7 @@ where } fn end_exemptions(&self) -> usize { - // NOTE: We are assuming that hte trace has as length a power of 2. + // NOTE: We are assuming that the trace has as length a power of 2. 1 } @@ -58,15 +60,18 @@ where let first_step = frame.get_evaluation_step(0); let second_step = frame.get_evaluation_step(1); - let a_perm0 = first_step.get_main_evaluation_element(0, 2); - let a_perm1 = second_step.get_main_evaluation_element(0, 2); - let res = (a_perm1 - a_perm0) * (a_perm1 - a_perm0 - FieldElement::::one()); + let a_sorted0 = first_step.get_main_evaluation_element(0, 2); + let a_sorted1 = second_step.get_main_evaluation_element(0, 2); + // (a'_{i+1} - a'_i)(a'_{i+1} - a'_i - 1) = 0 where a' is the sorted address + let res = (a_sorted1 - a_sorted0) * (a_sorted1 - a_sorted0 - FieldElement::::one()); transition_evaluations[self.constraint_idx()] = res; } } #[derive(Clone)] +/// Transition constraint that ensures that same addresses have same values, making the memory read-only. +/// Equation based on Cairo Whitepaper section 9.7.2 struct SingleValueConstraint { phantom: PhantomData, } @@ -92,7 +97,7 @@ where } fn end_exemptions(&self) -> usize { - // NOTE: We are assuming that hte trace has as length a power of 2. + // NOTE: We are assuming that the trace has as length a power of 2. 1 } @@ -106,16 +111,18 @@ where let first_step = frame.get_evaluation_step(0); let second_step = frame.get_evaluation_step(1); - let a_perm0 = first_step.get_main_evaluation_element(0, 2); - let a_perm1 = second_step.get_main_evaluation_element(0, 2); - let v_perm0 = first_step.get_main_evaluation_element(0, 3); - let v_perm1 = second_step.get_main_evaluation_element(0, 3); - - let res = (v_perm1 - v_perm0) * (a_perm1 - a_perm0 - FieldElement::::one()); + let a_sorted0 = first_step.get_main_evaluation_element(0, 2); + let a_sorted1 = second_step.get_main_evaluation_element(0, 2); + let v_sorted0 = first_step.get_main_evaluation_element(0, 3); + let v_sorted1 = second_step.get_main_evaluation_element(0, 3); + // (v'_{i+1} - v'_i) * (a'_{i+1} - a'_i - 1) = 0 + let res = (v_sorted1 - v_sorted0) * (a_sorted1 - a_sorted0 - FieldElement::::one()); transition_evaluations[self.constraint_idx()] = res; } } +/// Permutation constraint ensures that the values are permuted in the memory. +/// Equation based on Cairo Whitepaper section 9.7.2 #[derive(Clone)] struct PermutationConstraint { @@ -163,10 +170,10 @@ where let alpha = &rap_challenges[1]; let a1 = second_step.get_main_evaluation_element(0, 0); let v1 = second_step.get_main_evaluation_element(0, 1); - let a_perm_1 = second_step.get_main_evaluation_element(0, 2); - let v_perm_1 = second_step.get_main_evaluation_element(0, 3); - - let res = (z - (a_perm_1 + alpha * v_perm_1)) * p1 - (z - (a1 + alpha * v1)) * p0; + let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); + let v_sorted_1 = second_step.get_main_evaluation_element(0, 3); + // (z - (a'_{i+1} + α * v'_{i+1})) * p_{i+1} = (z - (a_{i+1} + α * v_{i+1})) * p_i + let res = (z - (a_sorted_1 + alpha * v_sorted_1)) * p1 - (z - (a1 + alpha * v1)) * p0; transition_evaluations[self.constraint_idx()] = res; } @@ -189,8 +196,8 @@ where { pub a0: FieldElement, pub v0: FieldElement, - pub a_perm0: FieldElement, - pub v_perm0: FieldElement, + pub a_sorted0: FieldElement, + pub v_sorted0: FieldElement, } impl AIR for ReadOnlyRAP @@ -240,8 +247,8 @@ where let main_segment_cols = trace.columns_main(); let a = &main_segment_cols[0]; let v = &main_segment_cols[1]; - let a_perm = &main_segment_cols[2]; - let v_perm = &main_segment_cols[3]; + let a_sorted = &main_segment_cols[2]; + let v_sorted = &main_segment_cols[3]; let z = &challenges[0]; let alpha = &challenges[1]; @@ -249,12 +256,12 @@ where let mut aux_col = Vec::new(); let num = z - (&a[0] + alpha * &v[0]); - let den = z - (&a_perm[0] + alpha * &v_perm[0]); + let den = z - (&a_sorted[0] + alpha * &v_sorted[0]); aux_col.push(num / den); - + // Apply the same equation given in the permutation case to the rest of the trace for i in 0..trace_len - 1 { let num = (z - (&a[i + 1] + alpha * &v[i + 1])) * &aux_col[i]; - let den = z - (&a_perm[i + 1] + alpha * &v_perm[i + 1]); + let den = z - (&a_sorted[i + 1] + alpha * &v_sorted[i + 1]); aux_col.push(num / den); } @@ -283,20 +290,20 @@ where ) -> BoundaryConstraints { let a0 = &self.pub_inputs.a0; let v0 = &self.pub_inputs.v0; - let a_perm0 = &self.pub_inputs.a_perm0; - let v_perm0 = &self.pub_inputs.v_perm0; + let a_sorted0 = &self.pub_inputs.a_sorted0; + let v_sorted0 = &self.pub_inputs.v_sorted0; let z = &rap_challenges[0]; let alpha = &rap_challenges[1]; // Main boundary constraints let c1 = BoundaryConstraint::new_main(0, 0, a0.clone()); let c2 = BoundaryConstraint::new_main(1, 0, v0.clone()); - let c3 = BoundaryConstraint::new_main(2, 0, a_perm0.clone()); - let c4 = BoundaryConstraint::new_main(3, 0, v_perm0.clone()); + let c3 = BoundaryConstraint::new_main(2, 0, a_sorted0.clone()); + let c4 = BoundaryConstraint::new_main(3, 0, v_sorted0.clone()); // Auxiliary boundary constraints let num = z - (a0 + alpha * v0); - let den = z - (a_perm0 + alpha * v_perm0); + let den = z - (a_sorted0 + alpha * v_sorted0); let p0_value = num / den; let c_aux1 = BoundaryConstraint::new_aux(0, 0, p0_value); @@ -341,6 +348,8 @@ where } } +/// Given the adress and value columns, it returns the trace table with 5 columns, which are: +/// Addres, Value, Adress Sorted, Value Sorted and a Column of Zeroes (where we'll insert the auxiliary colunn). pub fn sort_rap_trace( address: Vec>, value: Vec>, diff --git a/provers/stark/src/tests/integration_tests.rs b/provers/stark/src/tests/integration_tests.rs index 08604bfc2..7513caad0 100644 --- a/provers/stark/src/tests/integration_tests.rs +++ b/provers/stark/src/tests/integration_tests.rs @@ -252,31 +252,31 @@ fn test_prove_bit_flags() { #[test_log::test] fn test_prove_read_only_memory() { let address_col = vec![ - FieldElement::::from(4), - FieldElement::::from(5), - FieldElement::::from(2), - FieldElement::::from(6), - FieldElement::::from(8), - FieldElement::::from(7), - FieldElement::::from(1), - FieldElement::::from(3), + FieldElement::::from(3), // a0 + FieldElement::::from(2), // a1 + FieldElement::::from(2), // a2 + FieldElement::::from(3), // a3 + FieldElement::::from(4), // a4 + FieldElement::::from(5), // a5 + FieldElement::::from(1), // a6 + FieldElement::::from(3), // a7 ]; let value_col = vec![ - FieldElement::::from(1), - FieldElement::::from(2), - FieldElement::::from(3), - FieldElement::::from(4), - FieldElement::::from(5), - FieldElement::::from(6), - FieldElement::::from(7), - FieldElement::::from(8), + FieldElement::::from(10), // v0 + FieldElement::::from(5), // v1 + FieldElement::::from(5), // v2 + FieldElement::::from(10), // v3 + FieldElement::::from(25), // v4 + FieldElement::::from(25), // v5 + FieldElement::::from(7), // v6 + FieldElement::::from(10), // v7 ]; let pub_inputs = ReadOnlyPublicInputs { - a0: FieldElement::::from(4), - v0: FieldElement::::from(1), - a_perm0: FieldElement::::from(1), - v_perm0: FieldElement::::from(7), + a0: FieldElement::::from(3), + v0: FieldElement::::from(10), + a_sorted0: FieldElement::::from(1), // a6 + v_sorted0: FieldElement::::from(7), // v6 }; let mut trace = sort_rap_trace(address_col, value_col); let proof_options = ProofOptions::default_test_options(); From 57241311981792918a26ba935532aad2a94e337a Mon Sep 17 00:00:00 2001 From: Joaquin Carletti Date: Thu, 21 Nov 2024 10:57:44 -0300 Subject: [PATCH 13/29] handle possible panic --- provers/stark/src/examples/read_only_memory.rs | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/provers/stark/src/examples/read_only_memory.rs b/provers/stark/src/examples/read_only_memory.rs index edc5a1d6f..1c93802b0 100644 --- a/provers/stark/src/examples/read_only_memory.rs +++ b/provers/stark/src/examples/read_only_memory.rs @@ -65,7 +65,10 @@ where // (a'_{i+1} - a'_i)(a'_{i+1} - a'_i - 1) = 0 where a' is the sorted address let res = (a_sorted1 - a_sorted0) * (a_sorted1 - a_sorted0 - FieldElement::::one()); - transition_evaluations[self.constraint_idx()] = res; + // The eval always exists, except if the constraint idx were incorrectly defined. + if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { + *eval = res; + } } } @@ -118,7 +121,10 @@ where // (v'_{i+1} - v'_i) * (a'_{i+1} - a'_i - 1) = 0 let res = (v_sorted1 - v_sorted0) * (a_sorted1 - a_sorted0 - FieldElement::::one()); - transition_evaluations[self.constraint_idx()] = res; + // The eval always exists, except if the constraint idx were incorrectly defined. + if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { + *eval = res; + } } } /// Permutation constraint ensures that the values are permuted in the memory. @@ -175,7 +181,10 @@ where // (z - (a'_{i+1} + α * v'_{i+1})) * p_{i+1} = (z - (a_{i+1} + α * v_{i+1})) * p_i let res = (z - (a_sorted_1 + alpha * v_sorted_1)) * p1 - (z - (a1 + alpha * v1)) * p0; - transition_evaluations[self.constraint_idx()] = res; + // The eval always exists, except if the constraint idx were incorrectly defined. + if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { + *eval = res; + } } } From 0fc7284c1bb4566b7557ffc2f88a329662bb3e6f Mon Sep 17 00:00:00 2001 From: jotabulacios Date: Thu, 21 Nov 2024 11:09:39 -0300 Subject: [PATCH 14/29] rename variables --- provers/stark/src/examples/read_only_memory.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/provers/stark/src/examples/read_only_memory.rs b/provers/stark/src/examples/read_only_memory.rs index 1c93802b0..95526c9c3 100644 --- a/provers/stark/src/examples/read_only_memory.rs +++ b/provers/stark/src/examples/read_only_memory.rs @@ -60,10 +60,10 @@ where let first_step = frame.get_evaluation_step(0); let second_step = frame.get_evaluation_step(1); - let a_sorted0 = first_step.get_main_evaluation_element(0, 2); - let a_sorted1 = second_step.get_main_evaluation_element(0, 2); + let a_sorted_0 = first_step.get_main_evaluation_element(0, 2); + let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); // (a'_{i+1} - a'_i)(a'_{i+1} - a'_i - 1) = 0 where a' is the sorted address - let res = (a_sorted1 - a_sorted0) * (a_sorted1 - a_sorted0 - FieldElement::::one()); + let res = (a_sorted_1 - a_sorted_0) * (a_sorted_1 - a_sorted_0 - FieldElement::::one()); // The eval always exists, except if the constraint idx were incorrectly defined. if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { From 59c8b2681bf85c79d3bd5c484569d4ceb3a63612 Mon Sep 17 00:00:00 2001 From: Nicole Date: Mon, 25 Nov 2024 14:59:02 -0300 Subject: [PATCH 15/29] fix doc --- provers/stark/src/examples/read_only_memory.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/provers/stark/src/examples/read_only_memory.rs b/provers/stark/src/examples/read_only_memory.rs index 95526c9c3..8b5b01b07 100644 --- a/provers/stark/src/examples/read_only_memory.rs +++ b/provers/stark/src/examples/read_only_memory.rs @@ -71,10 +71,9 @@ where } } } - -#[derive(Clone)] /// Transition constraint that ensures that same addresses have same values, making the memory read-only. /// Equation based on Cairo Whitepaper section 9.7.2 +#[derive(Clone)] struct SingleValueConstraint { phantom: PhantomData, } @@ -129,7 +128,6 @@ where } /// Permutation constraint ensures that the values are permuted in the memory. /// Equation based on Cairo Whitepaper section 9.7.2 - #[derive(Clone)] struct PermutationConstraint { phantom: PhantomData, From 02283aaa3eed96e6ee3be6750133f65e265c0102 Mon Sep 17 00:00:00 2001 From: Nicole Date: Wed, 27 Nov 2024 12:11:48 -0300 Subject: [PATCH 16/29] FRI verification fail --- provers/stark/src/examples/mod.rs | 1 + .../src/examples/read_only_memory_logup.rs | 462 ++++++++++++++++++ provers/stark/src/tests/integration_tests.rs | 47 ++ 3 files changed, 510 insertions(+) create mode 100644 provers/stark/src/examples/read_only_memory_logup.rs diff --git a/provers/stark/src/examples/mod.rs b/provers/stark/src/examples/mod.rs index ba4f6586e..f21b62568 100644 --- a/provers/stark/src/examples/mod.rs +++ b/provers/stark/src/examples/mod.rs @@ -5,5 +5,6 @@ pub mod fibonacci_2_columns; pub mod fibonacci_rap; pub mod quadratic_air; pub mod read_only_memory; +pub mod read_only_memory_logup; pub mod simple_fibonacci; pub mod simple_periodic_cols; diff --git a/provers/stark/src/examples/read_only_memory_logup.rs b/provers/stark/src/examples/read_only_memory_logup.rs new file mode 100644 index 000000000..bf21d7a8e --- /dev/null +++ b/provers/stark/src/examples/read_only_memory_logup.rs @@ -0,0 +1,462 @@ +use std::marker::PhantomData; + +use crate::{ + constraints::{ + boundary::{BoundaryConstraint, BoundaryConstraints}, + transition::TransitionConstraint, + }, + context::AirContext, + frame::Frame, + proof::options::ProofOptions, + trace::TraceTable, + traits::AIR, +}; +use lambdaworks_crypto::fiat_shamir::is_transcript::IsTranscript; +use lambdaworks_math::field::traits::IsPrimeField; +use lambdaworks_math::{ + field::{element::FieldElement, traits::IsFFTField}, + traits::ByteConversion, +}; + +/// This condition ensures the continuity in a read-only memory structure, preserving strict ordering. +/// Equation based on Cairo Whitepaper section 9.7.2 +#[derive(Clone)] +struct ContinuityConstraint { + phantom: PhantomData, +} + +impl ContinuityConstraint { + pub fn new() -> Self { + Self { + phantom: PhantomData, + } + } +} + +impl TransitionConstraint for ContinuityConstraint +where + F: IsFFTField + Send + Sync, +{ + fn degree(&self) -> usize { + 2 + } + + fn constraint_idx(&self) -> usize { + 0 + } + + fn end_exemptions(&self) -> usize { + // NOTE: We are assuming that the trace has as length a power of 2. + 1 + } + + fn evaluate( + &self, + frame: &Frame, + transition_evaluations: &mut [FieldElement], + _periodic_values: &[FieldElement], + _rap_challenges: &[FieldElement], + ) { + let first_step = frame.get_evaluation_step(0); + let second_step = frame.get_evaluation_step(1); + + let a_sorted_0 = first_step.get_main_evaluation_element(0, 2); + let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); + // (a'_{i+1} - a'_i)(a'_{i+1} - a'_i - 1) = 0 where a' is the sorted address + let res = (a_sorted_1 - a_sorted_0) * (a_sorted_1 - a_sorted_0 - FieldElement::::one()); + + // The eval always exists, except if the constraint idx were incorrectly defined. + if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { + *eval = res; + } + } +} +/// Transition constraint that ensures that same addresses have same values, making the memory read-only. +/// Equation based on Cairo Whitepaper section 9.7.2 +#[derive(Clone)] +struct SingleValueConstraint { + phantom: PhantomData, +} + +impl SingleValueConstraint { + pub fn new() -> Self { + Self { + phantom: PhantomData, + } + } +} + +impl TransitionConstraint for SingleValueConstraint +where + F: IsFFTField + Send + Sync, +{ + fn degree(&self) -> usize { + 2 + } + + fn constraint_idx(&self) -> usize { + 1 + } + + fn end_exemptions(&self) -> usize { + // NOTE: We are assuming that the trace has as length a power of 2. + 1 + } + + fn evaluate( + &self, + frame: &Frame, + transition_evaluations: &mut [FieldElement], + _periodic_values: &[FieldElement], + _rap_challenges: &[FieldElement], + ) { + let first_step = frame.get_evaluation_step(0); + let second_step = frame.get_evaluation_step(1); + + let a_sorted0 = first_step.get_main_evaluation_element(0, 2); + let a_sorted1 = second_step.get_main_evaluation_element(0, 2); + let v_sorted0 = first_step.get_main_evaluation_element(0, 3); + let v_sorted1 = second_step.get_main_evaluation_element(0, 3); + // (v'_{i+1} - v'_i) * (a'_{i+1} - a'_i - 1) = 0 + let res = (v_sorted1 - v_sorted0) * (a_sorted1 - a_sorted0 - FieldElement::::one()); + + // The eval always exists, except if the constraint idx were incorrectly defined. + if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { + *eval = res; + } + } +} +/// Permutation constraint ensures that the values are permuted in the memory. +/// Equation based on Cairo Whitepaper section 9.7.2 +#[derive(Clone)] +struct PermutationConstraint { + phantom: PhantomData, +} + +impl PermutationConstraint { + pub fn new() -> Self { + Self { + phantom: PhantomData, + } + } +} + +impl TransitionConstraint for PermutationConstraint +where + F: IsFFTField + Send + Sync, +{ + fn degree(&self) -> usize { + 3 + } + + fn constraint_idx(&self) -> usize { + 2 + } + + fn end_exemptions(&self) -> usize { + 1 + } + + fn evaluate( + &self, + frame: &Frame, + transition_evaluations: &mut [FieldElement], + _periodic_values: &[FieldElement], + rap_challenges: &[FieldElement], + ) { + let first_step = frame.get_evaluation_step(0); + let second_step = frame.get_evaluation_step(1); + + // Auxiliary constraints + let p0 = first_step.get_aux_evaluation_element(0, 0); + let p1 = second_step.get_aux_evaluation_element(0, 0); + let z = &rap_challenges[0]; + let alpha = &rap_challenges[1]; + let a1 = second_step.get_main_evaluation_element(0, 0); + let v1 = second_step.get_main_evaluation_element(0, 1); + let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); + let v_sorted_1 = second_step.get_main_evaluation_element(0, 3); + + // // plookup version: (z - (a'_{i+1} + α * v'_{i+1})) * p_{i+1} = (z - (a_{i+1} + α * v_{i+1})) * p_i + // let res = (z - (a_sorted_1 + alpha * v_sorted_1)) * p1 - (z - (a1 + alpha * v1)) * p0; + + // Logup: s_{i+1} = s_i + (1 / (z - (a_{i+1} + α * v_{i+1}))) - (1 / (z - (a'_{i+1} + α * v'_{i+1}))). + // Then, s_{i+1} + (1 / (z - (a'_{i+1} + α * v'_{i+1}))) = s_i + (1 / (z - (a_{i+1} + α * v_{i+1}))). + + // let unsorted_term = (z - (a1 + alpha * v1)).inv().unwrap(); + // let sorted_term = (z - (a_sorted_1 + alpha * v_sorted_1)).inv().unwrap(); + // let res = p0 + unsorted_term - sorted_term - p1; + + let unsorted_term = z - (a1 + alpha * v1); + let sorted_term = z - (a_sorted_1 + alpha * v_sorted_1); + let res = p0 * &unsorted_term * &sorted_term + &sorted_term + - &unsorted_term + - p1 * unsorted_term * sorted_term; + + // The eval always exists, except if the constraint idx were incorrectly defined. + if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { + *eval = res; + } + } +} + +pub struct LogReadOnlyRAP +where + F: IsFFTField, +{ + context: AirContext, + trace_length: usize, + pub_inputs: LogReadOnlyPublicInputs, + transition_constraints: Vec>>, +} + +#[derive(Clone, Debug)] +pub struct LogReadOnlyPublicInputs +where + F: IsFFTField, +{ + pub a0: FieldElement, + pub v0: FieldElement, + pub a_sorted0: FieldElement, + pub v_sorted0: FieldElement, +} + +impl AIR for LogReadOnlyRAP +where + F: IsFFTField + Send + Sync + 'static, + FieldElement: ByteConversion, +{ + type Field = F; + type FieldExtension = F; + type PublicInputs = LogReadOnlyPublicInputs; + + const STEP_SIZE: usize = 1; + + fn new( + trace_length: usize, + pub_inputs: &Self::PublicInputs, + proof_options: &ProofOptions, + ) -> Self { + let transition_constraints: Vec< + Box>, + > = vec![ + Box::new(ContinuityConstraint::new()), + Box::new(SingleValueConstraint::new()), + Box::new(PermutationConstraint::new()), + ]; + + let context = AirContext { + proof_options: proof_options.clone(), + trace_columns: 5, + transition_offsets: vec![0, 1], + num_transition_constraints: transition_constraints.len(), + }; + + Self { + context, + trace_length, + pub_inputs: pub_inputs.clone(), + transition_constraints, + } + } + + fn build_auxiliary_trace( + &self, + trace: &mut TraceTable, + challenges: &[FieldElement], + ) { + let main_segment_cols = trace.columns_main(); + let a = &main_segment_cols[0]; + let v = &main_segment_cols[1]; + let a_sorted = &main_segment_cols[2]; + let v_sorted = &main_segment_cols[3]; + let z = &challenges[0]; + let alpha = &challenges[1]; + + let trace_len = trace.num_rows(); + let mut aux_col = Vec::new(); + + let unsorted_term = (z - (&a[0] + alpha * &v[0])).inv().unwrap(); + let sorted_term = (z - (&a_sorted[0] + alpha * &v_sorted[0])).inv().unwrap(); + aux_col.push(unsorted_term - sorted_term); + + // Apply the same equation given in the permutation case to the rest of the trace + for i in 0..trace_len - 1 { + let unsorted_term = (z - (&a[i + 1] + alpha * &v[i + 1])).inv().unwrap(); + let sorted_term = (z - (&a_sorted[i + 1] + alpha * &v_sorted[i + 1])) + .inv() + .unwrap(); + aux_col.push(&aux_col[i] + unsorted_term - sorted_term); + + // // plookup version: + // let num = (z - (&a[i + 1] + alpha * &v[i + 1])) * &aux_col[i]; + // let den = z - (&a_sorted[i + 1] + alpha * &v_sorted[i + 1]); + // aux_col.push(num / den); + } + + for (i, aux_elem) in aux_col.iter().enumerate().take(trace.num_rows()) { + trace.set_aux(i, 0, aux_elem.clone()) + } + } + + fn build_rap_challenges( + &self, + transcript: &mut impl IsTranscript, + ) -> Vec> { + vec![ + transcript.sample_field_element(), + transcript.sample_field_element(), + ] + } + + fn trace_layout(&self) -> (usize, usize) { + (4, 1) + } + + fn boundary_constraints( + &self, + rap_challenges: &[FieldElement], + ) -> BoundaryConstraints { + let a0 = &self.pub_inputs.a0; + let v0 = &self.pub_inputs.v0; + let a_sorted0 = &self.pub_inputs.a_sorted0; + let v_sorted0 = &self.pub_inputs.v_sorted0; + let z = &rap_challenges[0]; + let alpha = &rap_challenges[1]; + + // Main boundary constraints + let c1 = BoundaryConstraint::new_main(0, 0, a0.clone()); + let c2 = BoundaryConstraint::new_main(1, 0, v0.clone()); + let c3 = BoundaryConstraint::new_main(2, 0, a_sorted0.clone()); + let c4 = BoundaryConstraint::new_main(3, 0, v_sorted0.clone()); + + // Auxiliary boundary constraints + + // // plookup version: + // let num = z - (a0 + alpha * v0); + // let den = z - (a_sorted0 + alpha * v_sorted0); + // let p0_value = num / den; + + let unsorted_term = (z - (a0 + alpha * v0)).inv().unwrap(); + let sorted_term = (z - (a_sorted0 + alpha * v_sorted0)).inv().unwrap(); + let p0_value = unsorted_term - sorted_term; + + let c_aux1 = BoundaryConstraint::new_aux(0, 0, p0_value); + let c_aux2 = BoundaryConstraint::new_aux( + 0, + self.trace_length - 1, + //logup version: + FieldElement::::zero(), + ); + + BoundaryConstraints::from_constraints(vec![c1, c2, c3, c4, c_aux1, c_aux2]) + } + + fn transition_constraints( + &self, + ) -> &Vec>> { + &self.transition_constraints + } + + fn context(&self) -> &AirContext { + &self.context + } + + fn composition_poly_degree_bound(&self) -> usize { + self.trace_length() + } + + fn trace_length(&self) -> usize { + self.trace_length + } + + fn pub_inputs(&self) -> &Self::PublicInputs { + &self.pub_inputs + } + + fn compute_transition_verifier( + &self, + frame: &Frame, + periodic_values: &[FieldElement], + rap_challenges: &[FieldElement], + ) -> Vec> { + self.compute_transition_prover(frame, periodic_values, rap_challenges) + } +} + +/// Given the adress and value columns, it returns the trace table with 5 columns, which are: +/// Addres, Value, Adress Sorted, Value Sorted and a Column of Zeroes (where we'll insert the auxiliary column). +pub fn sort_rap_trace( + address: Vec>, + value: Vec>, +) -> TraceTable { + let mut address_value_pairs: Vec<_> = address.iter().zip(value.iter()).collect(); + + address_value_pairs.sort_by_key(|(addr, _)| addr.representative()); + + let (sorted_address, sorted_value): (Vec>, Vec>) = + address_value_pairs + .into_iter() + .map(|(addr, val)| (addr.clone(), val.clone())) + .unzip(); + let main_columns = vec![address.clone(), value.clone(), sorted_address, sorted_value]; + // create a vector with zeros of the same length as the main columns + let zero_vec = vec![FieldElement::::zero(); main_columns[0].len()]; + TraceTable::from_columns(main_columns, vec![zero_vec], 1) +} + +#[cfg(test)] +mod test { + use super::*; + use lambdaworks_math::field::fields::u64_prime_field::FE17; + + #[test] + fn test_sort_rap_trace() { + let address_col = vec![ + FE17::from(5), + FE17::from(2), + FE17::from(3), + FE17::from(4), + FE17::from(1), + FE17::from(6), + FE17::from(7), + FE17::from(8), + ]; + let value_col = vec![ + FE17::from(50), + FE17::from(20), + FE17::from(30), + FE17::from(40), + FE17::from(10), + FE17::from(60), + FE17::from(70), + FE17::from(80), + ]; + + let sorted_trace = sort_rap_trace(address_col.clone(), value_col.clone()); + + let expected_sorted_addresses = vec![ + FE17::from(1), + FE17::from(2), + FE17::from(3), + FE17::from(4), + FE17::from(5), + FE17::from(6), + FE17::from(7), + FE17::from(8), + ]; + let expected_sorted_values = vec![ + FE17::from(10), + FE17::from(20), + FE17::from(30), + FE17::from(40), + FE17::from(50), + FE17::from(60), + FE17::from(70), + FE17::from(80), + ]; + + assert_eq!(sorted_trace.columns_main()[2], expected_sorted_addresses); + assert_eq!(sorted_trace.columns_main()[3], expected_sorted_values); + } +} diff --git a/provers/stark/src/tests/integration_tests.rs b/provers/stark/src/tests/integration_tests.rs index 7513caad0..198e693f6 100644 --- a/provers/stark/src/tests/integration_tests.rs +++ b/provers/stark/src/tests/integration_tests.rs @@ -11,6 +11,7 @@ use crate::{ fibonacci_rap::{fibonacci_rap_trace, FibonacciRAP, FibonacciRAPPublicInputs}, quadratic_air::{self, QuadraticAIR, QuadraticPublicInputs}, read_only_memory::{sort_rap_trace, ReadOnlyPublicInputs, ReadOnlyRAP}, + read_only_memory_logup::{LogReadOnlyPublicInputs, LogReadOnlyRAP}, simple_fibonacci::{self, FibonacciAIR, FibonacciPublicInputs}, simple_periodic_cols::{self, SimplePeriodicAIR, SimplePeriodicPublicInputs}, // simple_periodic_cols::{self, SimplePeriodicAIR, SimplePeriodicPublicInputs}, }, @@ -294,3 +295,49 @@ fn test_prove_read_only_memory() { StoneProverTranscript::new(&[]) )); } + +#[test_log::test] +fn test_prove_log_read_only_memory() { + let address_col = vec![ + FieldElement::::from(3), // a0 + FieldElement::::from(2), // a1 + FieldElement::::from(2), // a2 + FieldElement::::from(3), // a3 + FieldElement::::from(4), // a4 + FieldElement::::from(5), // a5 + FieldElement::::from(1), // a6 + FieldElement::::from(3), // a7 + ]; + let value_col = vec![ + FieldElement::::from(10), // v0 + FieldElement::::from(5), // v1 + FieldElement::::from(5), // v2 + FieldElement::::from(10), // v3 + FieldElement::::from(25), // v4 + FieldElement::::from(25), // v5 + FieldElement::::from(7), // v6 + FieldElement::::from(10), // v7 + ]; + + let pub_inputs = LogReadOnlyPublicInputs { + a0: FieldElement::::from(3), + v0: FieldElement::::from(10), + a_sorted0: FieldElement::::from(1), // a6 + v_sorted0: FieldElement::::from(7), // v6 + }; + let mut trace = sort_rap_trace(address_col, value_col); + let proof_options = ProofOptions::default_test_options(); + let proof = Prover::>::prove( + &mut trace, + &pub_inputs, + &proof_options, + StoneProverTranscript::new(&[]), + ) + .unwrap(); + assert!(Verifier::>::verify( + &proof, + &pub_inputs, + &proof_options, + StoneProverTranscript::new(&[]) + )); +} From 1167f54c93fee879fa9dca2e16588b4f9be48956 Mon Sep 17 00:00:00 2001 From: Nicole Date: Wed, 27 Nov 2024 16:46:01 -0300 Subject: [PATCH 17/29] different way of doing logup rap --- .../src/examples/read_only_memory_logup.rs | 145 +++++++++++++----- provers/stark/src/tests/integration_tests.rs | 33 ++-- 2 files changed, 128 insertions(+), 50 deletions(-) diff --git a/provers/stark/src/examples/read_only_memory_logup.rs b/provers/stark/src/examples/read_only_memory_logup.rs index bf21d7a8e..075fe1b4d 100644 --- a/provers/stark/src/examples/read_only_memory_logup.rs +++ b/provers/stark/src/examples/read_only_memory_logup.rs @@ -11,6 +11,7 @@ use crate::{ trace::TraceTable, traits::AIR, }; +use itertools::Itertools; use lambdaworks_crypto::fiat_shamir::is_transcript::IsTranscript; use lambdaworks_math::field::traits::IsPrimeField; use lambdaworks_math::{ @@ -47,7 +48,7 @@ where fn end_exemptions(&self) -> usize { // NOTE: We are assuming that the trace has as length a power of 2. - 1 + 4 } fn evaluate( @@ -100,7 +101,7 @@ where fn end_exemptions(&self) -> usize { // NOTE: We are assuming that the trace has as length a power of 2. - 1 + 4 } fn evaluate( @@ -154,7 +155,7 @@ where } fn end_exemptions(&self) -> usize { - 1 + 4 } fn evaluate( @@ -168,30 +169,21 @@ where let second_step = frame.get_evaluation_step(1); // Auxiliary constraints - let p0 = first_step.get_aux_evaluation_element(0, 0); - let p1 = second_step.get_aux_evaluation_element(0, 0); + let s0 = first_step.get_aux_evaluation_element(0, 0); + let s1 = second_step.get_aux_evaluation_element(0, 0); let z = &rap_challenges[0]; let alpha = &rap_challenges[1]; let a1 = second_step.get_main_evaluation_element(0, 0); let v1 = second_step.get_main_evaluation_element(0, 1); let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); let v_sorted_1 = second_step.get_main_evaluation_element(0, 3); - - // // plookup version: (z - (a'_{i+1} + α * v'_{i+1})) * p_{i+1} = (z - (a_{i+1} + α * v_{i+1})) * p_i - // let res = (z - (a_sorted_1 + alpha * v_sorted_1)) * p1 - (z - (a1 + alpha * v1)) * p0; - - // Logup: s_{i+1} = s_i + (1 / (z - (a_{i+1} + α * v_{i+1}))) - (1 / (z - (a'_{i+1} + α * v'_{i+1}))). - // Then, s_{i+1} + (1 / (z - (a'_{i+1} + α * v'_{i+1}))) = s_i + (1 / (z - (a_{i+1} + α * v_{i+1}))). - - // let unsorted_term = (z - (a1 + alpha * v1)).inv().unwrap(); - // let sorted_term = (z - (a_sorted_1 + alpha * v_sorted_1)).inv().unwrap(); - // let res = p0 + unsorted_term - sorted_term - p1; + let m = second_step.get_main_evaluation_element(0, 4); let unsorted_term = z - (a1 + alpha * v1); let sorted_term = z - (a_sorted_1 + alpha * v_sorted_1); - let res = p0 * &unsorted_term * &sorted_term + &sorted_term - - &unsorted_term - - p1 * unsorted_term * sorted_term; + let res = s0 * &unsorted_term * &sorted_term + m * &unsorted_term + - &sorted_term + - s1 * unsorted_term * sorted_term; // The eval always exists, except if the constraint idx were incorrectly defined. if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { @@ -219,6 +211,7 @@ where pub v0: FieldElement, pub a_sorted0: FieldElement, pub v_sorted0: FieldElement, + pub m0: FieldElement, } impl AIR for LogReadOnlyRAP @@ -247,7 +240,7 @@ where let context = AirContext { proof_options: proof_options.clone(), - trace_columns: 5, + trace_columns: 6, transition_offsets: vec![0, 1], num_transition_constraints: transition_constraints.len(), }; @@ -270,6 +263,7 @@ where let v = &main_segment_cols[1]; let a_sorted = &main_segment_cols[2]; let v_sorted = &main_segment_cols[3]; + let m = &main_segment_cols[4]; let z = &challenges[0]; let alpha = &challenges[1]; @@ -278,7 +272,7 @@ where let unsorted_term = (z - (&a[0] + alpha * &v[0])).inv().unwrap(); let sorted_term = (z - (&a_sorted[0] + alpha * &v_sorted[0])).inv().unwrap(); - aux_col.push(unsorted_term - sorted_term); + aux_col.push(&m[0] * sorted_term - unsorted_term); // Apply the same equation given in the permutation case to the rest of the trace for i in 0..trace_len - 1 { @@ -286,12 +280,7 @@ where let sorted_term = (z - (&a_sorted[i + 1] + alpha * &v_sorted[i + 1])) .inv() .unwrap(); - aux_col.push(&aux_col[i] + unsorted_term - sorted_term); - - // // plookup version: - // let num = (z - (&a[i + 1] + alpha * &v[i + 1])) * &aux_col[i]; - // let den = z - (&a_sorted[i + 1] + alpha * &v_sorted[i + 1]); - // aux_col.push(num / den); + aux_col.push(&aux_col[i] + &m[i + 1] * sorted_term - unsorted_term); } for (i, aux_elem) in aux_col.iter().enumerate().take(trace.num_rows()) { @@ -310,7 +299,7 @@ where } fn trace_layout(&self) -> (usize, usize) { - (4, 1) + (5, 1) } fn boundary_constraints( @@ -321,6 +310,7 @@ where let v0 = &self.pub_inputs.v0; let a_sorted0 = &self.pub_inputs.a_sorted0; let v_sorted0 = &self.pub_inputs.v_sorted0; + let m0 = &self.pub_inputs.m0; let z = &rap_challenges[0]; let alpha = &rap_challenges[1]; @@ -331,21 +321,14 @@ where let c4 = BoundaryConstraint::new_main(3, 0, v_sorted0.clone()); // Auxiliary boundary constraints - - // // plookup version: - // let num = z - (a0 + alpha * v0); - // let den = z - (a_sorted0 + alpha * v_sorted0); - // let p0_value = num / den; - let unsorted_term = (z - (a0 + alpha * v0)).inv().unwrap(); let sorted_term = (z - (a_sorted0 + alpha * v_sorted0)).inv().unwrap(); - let p0_value = unsorted_term - sorted_term; + let p0_value = m0 * sorted_term - unsorted_term; let c_aux1 = BoundaryConstraint::new_aux(0, 0, p0_value); let c_aux2 = BoundaryConstraint::new_aux( 0, self.trace_length - 1, - //logup version: FieldElement::::zero(), ); @@ -405,6 +388,38 @@ pub fn sort_rap_trace( TraceTable::from_columns(main_columns, vec![zero_vec], 1) } +pub fn read_only_logup_trace( + addresses: Vec>, + values: Vec>, +) -> TraceTable { + let mut address_value_pairs: Vec<_> = addresses.iter().zip(values.iter()).collect(); + address_value_pairs.sort_by_key(|(addr, _)| addr.representative()); + let mut multiplicities = Vec::new(); + let mut sorted_addresses = Vec::new(); + let mut sorted_values = Vec::new(); + for (key, group) in &address_value_pairs.into_iter().group_by(|&(a, v)| (a, v)) { + let group_vec: Vec<_> = group.collect(); + multiplicities.push(FieldElement::::from(group_vec.len() as u64)); + sorted_addresses.push(key.0.clone()); + sorted_values.push(key.1.clone()); + } + sorted_addresses.resize(addresses.len(), FieldElement::::zero()); + sorted_values.resize(addresses.len(), FieldElement::::zero()); + multiplicities.resize(addresses.len(), FieldElement::::zero()); + + let main_columns = vec![ + addresses.clone(), + values.clone(), + sorted_addresses, + sorted_values, + multiplicities, + ]; + + // create a vector with zeros of the same length as the main columns + let zero_vec = vec![FieldElement::::zero(); main_columns[0].len()]; + TraceTable::from_columns(main_columns, vec![zero_vec], 1) +} + #[cfg(test)] mod test { use super::*; @@ -459,4 +474,64 @@ mod test { assert_eq!(sorted_trace.columns_main()[2], expected_sorted_addresses); assert_eq!(sorted_trace.columns_main()[3], expected_sorted_values); } + + #[test] + fn test_logup_trace() { + let address_col = vec![ + FE17::from(5), + FE17::from(2), + FE17::from(3), + FE17::from(4), + FE17::from(1), + FE17::from(5), + FE17::from(6), + FE17::from(5), + ]; + let value_col = vec![ + FE17::from(50), + FE17::from(20), + FE17::from(30), + FE17::from(40), + FE17::from(10), + FE17::from(50), + FE17::from(60), + FE17::from(50), + ]; + + let logup_trace = read_only_logup_trace(address_col, value_col); + + let expected_sorted_addresses = vec![ + FE17::from(1), + FE17::from(2), + FE17::from(3), + FE17::from(4), + FE17::from(5), + FE17::from(6), + FE17::zero(), + FE17::zero(), + ]; + let expected_sorted_values = vec![ + FE17::from(10), + FE17::from(20), + FE17::from(30), + FE17::from(40), + FE17::from(50), + FE17::from(60), + FE17::zero(), + FE17::zero(), + ]; + let expected_multiplicities = vec![ + FE17::one(), + FE17::one(), + FE17::one(), + FE17::one(), + FE17::from(3), + FE17::one(), + FE17::zero(), + FE17::zero(), + ]; + // assert_eq!(logup_trace.columns_main()[2], expected_sorted_addresses); + // assert_eq!(logup_trace.columns_main()[3], expected_sorted_values); + assert_eq!(logup_trace.columns_main()[4], expected_multiplicities); + } } diff --git a/provers/stark/src/tests/integration_tests.rs b/provers/stark/src/tests/integration_tests.rs index 198e693f6..bd5e946c9 100644 --- a/provers/stark/src/tests/integration_tests.rs +++ b/provers/stark/src/tests/integration_tests.rs @@ -11,7 +11,7 @@ use crate::{ fibonacci_rap::{fibonacci_rap_trace, FibonacciRAP, FibonacciRAPPublicInputs}, quadratic_air::{self, QuadraticAIR, QuadraticPublicInputs}, read_only_memory::{sort_rap_trace, ReadOnlyPublicInputs, ReadOnlyRAP}, - read_only_memory_logup::{LogReadOnlyPublicInputs, LogReadOnlyRAP}, + read_only_memory_logup::{read_only_logup_trace, LogReadOnlyPublicInputs, LogReadOnlyRAP}, simple_fibonacci::{self, FibonacciAIR, FibonacciPublicInputs}, simple_periodic_cols::{self, SimplePeriodicAIR, SimplePeriodicPublicInputs}, // simple_periodic_cols::{self, SimplePeriodicAIR, SimplePeriodicPublicInputs}, }, @@ -309,23 +309,24 @@ fn test_prove_log_read_only_memory() { FieldElement::::from(3), // a7 ]; let value_col = vec![ - FieldElement::::from(10), // v0 - FieldElement::::from(5), // v1 - FieldElement::::from(5), // v2 - FieldElement::::from(10), // v3 - FieldElement::::from(25), // v4 - FieldElement::::from(25), // v5 - FieldElement::::from(7), // v6 - FieldElement::::from(10), // v7 + FieldElement::::from(30), // v0 + FieldElement::::from(20), // v1 + FieldElement::::from(20), // v2 + FieldElement::::from(30), // v3 + FieldElement::::from(40), // v4 + FieldElement::::from(50), // v5 + FieldElement::::from(10), // v6 + FieldElement::::from(30), // v7 ]; let pub_inputs = LogReadOnlyPublicInputs { a0: FieldElement::::from(3), - v0: FieldElement::::from(10), + v0: FieldElement::::from(30), a_sorted0: FieldElement::::from(1), // a6 - v_sorted0: FieldElement::::from(7), // v6 + v_sorted0: FieldElement::::from(10), // v6 + m0: FieldElement::::one(), }; - let mut trace = sort_rap_trace(address_col, value_col); + let mut trace = read_only_logup_trace(address_col, value_col); let proof_options = ProofOptions::default_test_options(); let proof = Prover::>::prove( &mut trace, @@ -334,10 +335,12 @@ fn test_prove_log_read_only_memory() { StoneProverTranscript::new(&[]), ) .unwrap(); - assert!(Verifier::>::verify( + let result = Verifier::>::verify( &proof, &pub_inputs, &proof_options, - StoneProverTranscript::new(&[]) - )); + StoneProverTranscript::new(&[]), + ); + println!("RESULT: {:?}", result); + // assert_eq!(result, true); } From ff4fc4323cdb673ea84bf1f8fadcaf1a9735f2fe Mon Sep 17 00:00:00 2001 From: Nicole Date: Wed, 27 Nov 2024 17:37:19 -0300 Subject: [PATCH 18/29] add m0 public input boundary constraint --- provers/stark/src/examples/read_only_memory_logup.rs | 3 ++- provers/stark/src/tests/integration_tests.rs | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/provers/stark/src/examples/read_only_memory_logup.rs b/provers/stark/src/examples/read_only_memory_logup.rs index 075fe1b4d..05f7c3dc7 100644 --- a/provers/stark/src/examples/read_only_memory_logup.rs +++ b/provers/stark/src/examples/read_only_memory_logup.rs @@ -319,6 +319,7 @@ where let c2 = BoundaryConstraint::new_main(1, 0, v0.clone()); let c3 = BoundaryConstraint::new_main(2, 0, a_sorted0.clone()); let c4 = BoundaryConstraint::new_main(3, 0, v_sorted0.clone()); + let c5 = BoundaryConstraint::new_main(4, 0, m0.clone()); // Auxiliary boundary constraints let unsorted_term = (z - (a0 + alpha * v0)).inv().unwrap(); @@ -332,7 +333,7 @@ where FieldElement::::zero(), ); - BoundaryConstraints::from_constraints(vec![c1, c2, c3, c4, c_aux1, c_aux2]) + BoundaryConstraints::from_constraints(vec![c1, c2, c3, c4, c5, c_aux1, c_aux2]) } fn transition_constraints( diff --git a/provers/stark/src/tests/integration_tests.rs b/provers/stark/src/tests/integration_tests.rs index bd5e946c9..b48ba0ee0 100644 --- a/provers/stark/src/tests/integration_tests.rs +++ b/provers/stark/src/tests/integration_tests.rs @@ -341,6 +341,6 @@ fn test_prove_log_read_only_memory() { &proof_options, StoneProverTranscript::new(&[]), ); - println!("RESULT: {:?}", result); + //println!("RESULT: {:?}", result); // assert_eq!(result, true); } From 0243f882d1dc965a8dcbf9cd9c68fea2d8e14083 Mon Sep 17 00:00:00 2001 From: Nicole Date: Thu, 28 Nov 2024 11:47:01 -0300 Subject: [PATCH 19/29] change end expemtions to 1 --- .../src/examples/read_only_memory_logup.rs | 82 ++++++++++++++++--- provers/stark/src/tests/integration_tests.rs | 15 ++-- 2 files changed, 78 insertions(+), 19 deletions(-) diff --git a/provers/stark/src/examples/read_only_memory_logup.rs b/provers/stark/src/examples/read_only_memory_logup.rs index 05f7c3dc7..82c9fb98c 100644 --- a/provers/stark/src/examples/read_only_memory_logup.rs +++ b/provers/stark/src/examples/read_only_memory_logup.rs @@ -48,7 +48,7 @@ where fn end_exemptions(&self) -> usize { // NOTE: We are assuming that the trace has as length a power of 2. - 4 + 1 } fn evaluate( @@ -101,7 +101,7 @@ where fn end_exemptions(&self) -> usize { // NOTE: We are assuming that the trace has as length a power of 2. - 4 + 1 } fn evaluate( @@ -155,7 +155,7 @@ where } fn end_exemptions(&self) -> usize { - 4 + 1 } fn evaluate( @@ -404,8 +404,8 @@ pub fn read_only_logup_trace( sorted_addresses.push(key.0.clone()); sorted_values.push(key.1.clone()); } - sorted_addresses.resize(addresses.len(), FieldElement::::zero()); - sorted_values.resize(addresses.len(), FieldElement::::zero()); + sorted_addresses.resize(addresses.len(), sorted_addresses.last().unwrap().clone()); + sorted_values.resize(addresses.len(), sorted_values.last().unwrap().clone()); multiplicities.resize(addresses.len(), FieldElement::::zero()); let main_columns = vec![ @@ -508,8 +508,8 @@ mod test { FE17::from(4), FE17::from(5), FE17::from(6), - FE17::zero(), - FE17::zero(), + FE17::from(6), + FE17::from(6), ]; let expected_sorted_values = vec![ FE17::from(10), @@ -518,8 +518,8 @@ mod test { FE17::from(40), FE17::from(50), FE17::from(60), - FE17::zero(), - FE17::zero(), + FE17::from(60), + FE17::from(60), ]; let expected_multiplicities = vec![ FE17::one(), @@ -531,8 +531,68 @@ mod test { FE17::zero(), FE17::zero(), ]; - // assert_eq!(logup_trace.columns_main()[2], expected_sorted_addresses); - // assert_eq!(logup_trace.columns_main()[3], expected_sorted_values); + assert_eq!(logup_trace.columns_main()[2], expected_sorted_addresses); + assert_eq!(logup_trace.columns_main()[3], expected_sorted_values); + assert_eq!(logup_trace.columns_main()[4], expected_multiplicities); + } + + #[test] + fn tes_logup_trace_2() { + let address_col = vec![ + FE17::from(3), + FE17::from(7), + FE17::from(2), + FE17::from(8), + FE17::from(4), + FE17::from(5), + FE17::from(1), + FE17::from(6), + ]; + let value_col = vec![ + FE17::from(30), + FE17::from(70), + FE17::from(20), + FE17::from(80), + FE17::from(40), + FE17::from(50), + FE17::from(10), + FE17::from(60), + ]; + + let logup_trace = read_only_logup_trace(address_col, value_col); + + let expected_sorted_addresses = vec![ + FE17::from(1), + FE17::from(2), + FE17::from(3), + FE17::from(4), + FE17::from(5), + FE17::from(6), + FE17::from(7), + FE17::from(8), + ]; + let expected_sorted_values = vec![ + FE17::from(10), + FE17::from(20), + FE17::from(30), + FE17::from(40), + FE17::from(50), + FE17::from(60), + FE17::from(70), + FE17::from(80), + ]; + let expected_multiplicities = vec![ + FE17::one(), + FE17::one(), + FE17::one(), + FE17::one(), + FE17::one(), + FE17::one(), + FE17::one(), + FE17::one(), + ]; + assert_eq!(logup_trace.columns_main()[2], expected_sorted_addresses); + assert_eq!(logup_trace.columns_main()[3], expected_sorted_values); assert_eq!(logup_trace.columns_main()[4], expected_multiplicities); } } diff --git a/provers/stark/src/tests/integration_tests.rs b/provers/stark/src/tests/integration_tests.rs index b48ba0ee0..63bdee973 100644 --- a/provers/stark/src/tests/integration_tests.rs +++ b/provers/stark/src/tests/integration_tests.rs @@ -300,23 +300,23 @@ fn test_prove_read_only_memory() { fn test_prove_log_read_only_memory() { let address_col = vec![ FieldElement::::from(3), // a0 - FieldElement::::from(2), // a1 + FieldElement::::from(7), // a1 FieldElement::::from(2), // a2 - FieldElement::::from(3), // a3 + FieldElement::::from(8), // a3 FieldElement::::from(4), // a4 FieldElement::::from(5), // a5 FieldElement::::from(1), // a6 - FieldElement::::from(3), // a7 + FieldElement::::from(6), // a7 ]; let value_col = vec![ FieldElement::::from(30), // v0 - FieldElement::::from(20), // v1 + FieldElement::::from(70), // v1 FieldElement::::from(20), // v2 - FieldElement::::from(30), // v3 + FieldElement::::from(80), // v3 FieldElement::::from(40), // v4 FieldElement::::from(50), // v5 FieldElement::::from(10), // v6 - FieldElement::::from(30), // v7 + FieldElement::::from(60), // v7 ]; let pub_inputs = LogReadOnlyPublicInputs { @@ -341,6 +341,5 @@ fn test_prove_log_read_only_memory() { &proof_options, StoneProverTranscript::new(&[]), ); - //println!("RESULT: {:?}", result); - // assert_eq!(result, true); + println!("RESULT: {:?}", result); } From dba0fc0d39b086cc266e5347d5c08aa302dcad56 Mon Sep 17 00:00:00 2001 From: Nicole Date: Fri, 29 Nov 2024 14:21:42 -0300 Subject: [PATCH 20/29] change cp degree bound --- .../src/examples/read_only_memory_logup.rs | 254 +++++++----------- provers/stark/src/tests/integration_tests.rs | 23 +- 2 files changed, 112 insertions(+), 165 deletions(-) diff --git a/provers/stark/src/examples/read_only_memory_logup.rs b/provers/stark/src/examples/read_only_memory_logup.rs index 82c9fb98c..2bbf2c134 100644 --- a/provers/stark/src/examples/read_only_memory_logup.rs +++ b/provers/stark/src/examples/read_only_memory_logup.rs @@ -1,4 +1,4 @@ -use std::marker::PhantomData; +use std::{iter::once, marker::PhantomData}; use crate::{ constraints::{ @@ -19,8 +19,7 @@ use lambdaworks_math::{ traits::ByteConversion, }; -/// This condition ensures the continuity in a read-only memory structure, preserving strict ordering. -/// Equation based on Cairo Whitepaper section 9.7.2 +/// Transition Constraint that ensures the continuity of the sorted address column of a memory. #[derive(Clone)] struct ContinuityConstraint { phantom: PhantomData, @@ -72,8 +71,7 @@ where } } } -/// Transition constraint that ensures that same addresses have same values, making the memory read-only. -/// Equation based on Cairo Whitepaper section 9.7.2 +/// Transition constraint that ensures that same addresses have same values, making the sorted memory read-only. #[derive(Clone)] struct SingleValueConstraint { phantom: PhantomData, @@ -114,12 +112,12 @@ where let first_step = frame.get_evaluation_step(0); let second_step = frame.get_evaluation_step(1); - let a_sorted0 = first_step.get_main_evaluation_element(0, 2); - let a_sorted1 = second_step.get_main_evaluation_element(0, 2); - let v_sorted0 = first_step.get_main_evaluation_element(0, 3); - let v_sorted1 = second_step.get_main_evaluation_element(0, 3); + let a_sorted_0 = first_step.get_main_evaluation_element(0, 2); + let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); + let v_sorted_0 = first_step.get_main_evaluation_element(0, 3); + let v_sorted_1 = second_step.get_main_evaluation_element(0, 3); // (v'_{i+1} - v'_i) * (a'_{i+1} - a'_i - 1) = 0 - let res = (v_sorted1 - v_sorted0) * (a_sorted1 - a_sorted0 - FieldElement::::one()); + let res = (v_sorted_1 - v_sorted_0) * (a_sorted_1 - a_sorted_0 - FieldElement::::one()); // The eval always exists, except if the constraint idx were incorrectly defined. if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { @@ -127,8 +125,9 @@ where } } } -/// Permutation constraint ensures that the values are permuted in the memory. -/// Equation based on Cairo Whitepaper section 9.7.2 +/// Transition constraint that ensures that the sorted columns are a permutation of the original ones. +/// We are using the LogUp construction described in: +/// #[derive(Clone)] struct PermutationConstraint { phantom: PhantomData, @@ -181,6 +180,10 @@ where let unsorted_term = z - (a1 + alpha * v1); let sorted_term = z - (a_sorted_1 + alpha * v_sorted_1); + + // We are using the following LogUp equation: + // s1 = s0 + m / sorted_term - 1/unsorted_term. + // Since constraints must be expressed without division, we multiply each term by sorted_term * unsorted_term: let res = s0 * &unsorted_term * &sorted_term + m * &unsorted_term - &sorted_term - s1 * unsorted_term * sorted_term; @@ -192,6 +195,7 @@ where } } +/// AIR for a continuous read-only memory. pub struct LogReadOnlyRAP where F: IsFFTField, @@ -209,8 +213,9 @@ where { pub a0: FieldElement, pub v0: FieldElement, - pub a_sorted0: FieldElement, - pub v_sorted0: FieldElement, + pub a_sorted_0: FieldElement, + pub v_sorted_0: FieldElement, + // The multiplicity of (a_sorted_0, v_sorted_0) pub m0: FieldElement, } @@ -308,8 +313,8 @@ where ) -> BoundaryConstraints { let a0 = &self.pub_inputs.a0; let v0 = &self.pub_inputs.v0; - let a_sorted0 = &self.pub_inputs.a_sorted0; - let v_sorted0 = &self.pub_inputs.v_sorted0; + let a_sorted_0 = &self.pub_inputs.a_sorted_0; + let v_sorted_0 = &self.pub_inputs.v_sorted_0; let m0 = &self.pub_inputs.m0; let z = &rap_challenges[0]; let alpha = &rap_challenges[1]; @@ -317,13 +322,13 @@ where // Main boundary constraints let c1 = BoundaryConstraint::new_main(0, 0, a0.clone()); let c2 = BoundaryConstraint::new_main(1, 0, v0.clone()); - let c3 = BoundaryConstraint::new_main(2, 0, a_sorted0.clone()); - let c4 = BoundaryConstraint::new_main(3, 0, v_sorted0.clone()); + let c3 = BoundaryConstraint::new_main(2, 0, a_sorted_0.clone()); + let c4 = BoundaryConstraint::new_main(3, 0, v_sorted_0.clone()); let c5 = BoundaryConstraint::new_main(4, 0, m0.clone()); // Auxiliary boundary constraints let unsorted_term = (z - (a0 + alpha * v0)).inv().unwrap(); - let sorted_term = (z - (a_sorted0 + alpha * v_sorted0)).inv().unwrap(); + let sorted_term = (z - (a_sorted_0 + alpha * v_sorted_0)).inv().unwrap(); let p0_value = m0 * sorted_term - unsorted_term; let c_aux1 = BoundaryConstraint::new_aux(0, 0, p0_value); @@ -346,8 +351,11 @@ where &self.context } + // The prover use this function to define the number of parts of the composition polynomial. + // The number of parts will be: composition_poly_degree_bound() / trace_length(). + // Since we have a transition constraint of degree 3, we need the bound to be two times the trace length. fn composition_poly_degree_bound(&self) -> usize { - self.trace_length() + self.trace_length() * 2 } fn trace_length(&self) -> usize { @@ -368,42 +376,29 @@ where } } -/// Given the adress and value columns, it returns the trace table with 5 columns, which are: -/// Addres, Value, Adress Sorted, Value Sorted and a Column of Zeroes (where we'll insert the auxiliary column). -pub fn sort_rap_trace( - address: Vec>, - value: Vec>, -) -> TraceTable { - let mut address_value_pairs: Vec<_> = address.iter().zip(value.iter()).collect(); - - address_value_pairs.sort_by_key(|(addr, _)| addr.representative()); - - let (sorted_address, sorted_value): (Vec>, Vec>) = - address_value_pairs - .into_iter() - .map(|(addr, val)| (addr.clone(), val.clone())) - .unzip(); - let main_columns = vec![address.clone(), value.clone(), sorted_address, sorted_value]; - // create a vector with zeros of the same length as the main columns - let zero_vec = vec![FieldElement::::zero(); main_columns[0].len()]; - TraceTable::from_columns(main_columns, vec![zero_vec], 1) -} - +/// Return a trace table with an auxiliary column full of zeros (that will be completed by the air) and +/// the following five main columns: The original addresses and values, the sorted addresses and values without +/// repetition and the multiplicities that tell pub fn read_only_logup_trace( addresses: Vec>, values: Vec>, ) -> TraceTable { let mut address_value_pairs: Vec<_> = addresses.iter().zip(values.iter()).collect(); address_value_pairs.sort_by_key(|(addr, _)| addr.representative()); + let mut multiplicities = Vec::new(); let mut sorted_addresses = Vec::new(); let mut sorted_values = Vec::new(); + for (key, group) in &address_value_pairs.into_iter().group_by(|&(a, v)| (a, v)) { let group_vec: Vec<_> = group.collect(); multiplicities.push(FieldElement::::from(group_vec.len() as u64)); sorted_addresses.push(key.0.clone()); sorted_values.push(key.1.clone()); } + + // We resize the sorted addresses and values with the last value of each one so they have the + // same number of rows as the original addresses and values. However, their multiplicity should be zero. sorted_addresses.resize(addresses.len(), sorted_addresses.last().unwrap().clone()); sorted_values.resize(addresses.len(), sorted_values.last().unwrap().clone()); multiplicities.resize(addresses.len(), FieldElement::::zero()); @@ -424,120 +419,12 @@ pub fn read_only_logup_trace( #[cfg(test)] mod test { use super::*; - use lambdaworks_math::field::fields::u64_prime_field::FE17; + use lambdaworks_math::field::fields::{ + fft_friendly::stark_252_prime_field::Stark252PrimeField, u64_prime_field::FE17, + }; #[test] - fn test_sort_rap_trace() { - let address_col = vec![ - FE17::from(5), - FE17::from(2), - FE17::from(3), - FE17::from(4), - FE17::from(1), - FE17::from(6), - FE17::from(7), - FE17::from(8), - ]; - let value_col = vec![ - FE17::from(50), - FE17::from(20), - FE17::from(30), - FE17::from(40), - FE17::from(10), - FE17::from(60), - FE17::from(70), - FE17::from(80), - ]; - - let sorted_trace = sort_rap_trace(address_col.clone(), value_col.clone()); - - let expected_sorted_addresses = vec![ - FE17::from(1), - FE17::from(2), - FE17::from(3), - FE17::from(4), - FE17::from(5), - FE17::from(6), - FE17::from(7), - FE17::from(8), - ]; - let expected_sorted_values = vec![ - FE17::from(10), - FE17::from(20), - FE17::from(30), - FE17::from(40), - FE17::from(50), - FE17::from(60), - FE17::from(70), - FE17::from(80), - ]; - - assert_eq!(sorted_trace.columns_main()[2], expected_sorted_addresses); - assert_eq!(sorted_trace.columns_main()[3], expected_sorted_values); - } - - #[test] - fn test_logup_trace() { - let address_col = vec![ - FE17::from(5), - FE17::from(2), - FE17::from(3), - FE17::from(4), - FE17::from(1), - FE17::from(5), - FE17::from(6), - FE17::from(5), - ]; - let value_col = vec![ - FE17::from(50), - FE17::from(20), - FE17::from(30), - FE17::from(40), - FE17::from(10), - FE17::from(50), - FE17::from(60), - FE17::from(50), - ]; - - let logup_trace = read_only_logup_trace(address_col, value_col); - - let expected_sorted_addresses = vec![ - FE17::from(1), - FE17::from(2), - FE17::from(3), - FE17::from(4), - FE17::from(5), - FE17::from(6), - FE17::from(6), - FE17::from(6), - ]; - let expected_sorted_values = vec![ - FE17::from(10), - FE17::from(20), - FE17::from(30), - FE17::from(40), - FE17::from(50), - FE17::from(60), - FE17::from(60), - FE17::from(60), - ]; - let expected_multiplicities = vec![ - FE17::one(), - FE17::one(), - FE17::one(), - FE17::one(), - FE17::from(3), - FE17::one(), - FE17::zero(), - FE17::zero(), - ]; - assert_eq!(logup_trace.columns_main()[2], expected_sorted_addresses); - assert_eq!(logup_trace.columns_main()[3], expected_sorted_values); - assert_eq!(logup_trace.columns_main()[4], expected_multiplicities); - } - - #[test] - fn tes_logup_trace_2() { + fn tes_logup_trace_construction() { let address_col = vec![ FE17::from(3), FE17::from(7), @@ -595,4 +482,65 @@ mod test { assert_eq!(logup_trace.columns_main()[3], expected_sorted_values); assert_eq!(logup_trace.columns_main()[4], expected_multiplicities); } + + #[test] + fn test_logup_trace_construction_2() { + let address_col = vec![ + FieldElement::::from(3), // a0 + FieldElement::::from(2), // a1 + FieldElement::::from(2), // a2 + FieldElement::::from(3), // a3 + FieldElement::::from(4), // a4 + FieldElement::::from(5), // a5 + FieldElement::::from(1), // a6 + FieldElement::::from(3), // a7 + ]; + let value_col = vec![ + FieldElement::::from(30), // v0 + FieldElement::::from(20), // v1 + FieldElement::::from(20), // v2 + FieldElement::::from(30), // v3 + FieldElement::::from(40), // v4 + FieldElement::::from(50), // v5 + FieldElement::::from(10), // v6 + FieldElement::::from(30), // v7 + ]; + + let sorted_address_col = vec![ + FieldElement::::from(1), // a0 + FieldElement::::from(2), // a1 + FieldElement::::from(3), // a2 + FieldElement::::from(4), // a3 + FieldElement::::from(5), // a4 + FieldElement::::from(5), // a5 + FieldElement::::from(5), // a6 + FieldElement::::from(5), // a7 + ]; + let sorted_value_col = vec![ + FieldElement::::from(10), // v0 + FieldElement::::from(20), // v1 + FieldElement::::from(30), // v2 + FieldElement::::from(40), // v3 + FieldElement::::from(50), // v4 + FieldElement::::from(50), // v5 + FieldElement::::from(50), // v6 + FieldElement::::from(50), // v7 + ]; + + let multiplicity_col = vec![ + FieldElement::::from(1), // v0 + FieldElement::::from(2), // v1 + FieldElement::::from(3), // v2 + FieldElement::::from(1), // v3 + FieldElement::::from(1), // v4 + FieldElement::::from(0), // v5 + FieldElement::::from(0), // v6 + FieldElement::::from(0), // v7 + ]; + let logup_trace = read_only_logup_trace(address_col, value_col); + + assert_eq!(logup_trace.columns_main()[2], sorted_address_col); + assert_eq!(logup_trace.columns_main()[3], sorted_value_col); + assert_eq!(logup_trace.columns_main()[4], multiplicity_col); + } } diff --git a/provers/stark/src/tests/integration_tests.rs b/provers/stark/src/tests/integration_tests.rs index 63bdee973..8abad0b86 100644 --- a/provers/stark/src/tests/integration_tests.rs +++ b/provers/stark/src/tests/integration_tests.rs @@ -300,31 +300,31 @@ fn test_prove_read_only_memory() { fn test_prove_log_read_only_memory() { let address_col = vec![ FieldElement::::from(3), // a0 - FieldElement::::from(7), // a1 + FieldElement::::from(2), // a1 FieldElement::::from(2), // a2 - FieldElement::::from(8), // a3 + FieldElement::::from(3), // a3 FieldElement::::from(4), // a4 FieldElement::::from(5), // a5 FieldElement::::from(1), // a6 - FieldElement::::from(6), // a7 + FieldElement::::from(3), // a7 ]; let value_col = vec![ FieldElement::::from(30), // v0 - FieldElement::::from(70), // v1 + FieldElement::::from(20), // v1 FieldElement::::from(20), // v2 - FieldElement::::from(80), // v3 + FieldElement::::from(30), // v3 FieldElement::::from(40), // v4 FieldElement::::from(50), // v5 FieldElement::::from(10), // v6 - FieldElement::::from(60), // v7 + FieldElement::::from(30), // v7 ]; let pub_inputs = LogReadOnlyPublicInputs { a0: FieldElement::::from(3), v0: FieldElement::::from(30), - a_sorted0: FieldElement::::from(1), // a6 - v_sorted0: FieldElement::::from(10), // v6 - m0: FieldElement::::one(), + a_sorted_0: FieldElement::::from(1), + v_sorted_0: FieldElement::::from(10), + m0: FieldElement::::from(1), }; let mut trace = read_only_logup_trace(address_col, value_col); let proof_options = ProofOptions::default_test_options(); @@ -335,11 +335,10 @@ fn test_prove_log_read_only_memory() { StoneProverTranscript::new(&[]), ) .unwrap(); - let result = Verifier::>::verify( + assert!(Verifier::>::verify( &proof, &pub_inputs, &proof_options, StoneProverTranscript::new(&[]), - ); - println!("RESULT: {:?}", result); + )); } From 09a06158a52efad1da01453edb481ed69fc0d0cc Mon Sep 17 00:00:00 2001 From: Nicole Date: Fri, 29 Nov 2024 16:50:54 -0300 Subject: [PATCH 21/29] fix clippy --- provers/stark/src/examples/read_only_memory_logup.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/provers/stark/src/examples/read_only_memory_logup.rs b/provers/stark/src/examples/read_only_memory_logup.rs index 2bbf2c134..406ce2c1d 100644 --- a/provers/stark/src/examples/read_only_memory_logup.rs +++ b/provers/stark/src/examples/read_only_memory_logup.rs @@ -1,4 +1,4 @@ -use std::{iter::once, marker::PhantomData}; +use std::marker::PhantomData; use crate::{ constraints::{ From 04ac811d5f85a16d22d53d1047782dfa3393c237 Mon Sep 17 00:00:00 2001 From: Nicole Date: Thu, 5 Dec 2024 16:47:42 -0300 Subject: [PATCH 22/29] fix comment --- provers/stark/src/examples/read_only_memory_logup.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/provers/stark/src/examples/read_only_memory_logup.rs b/provers/stark/src/examples/read_only_memory_logup.rs index 406ce2c1d..4d911ea9d 100644 --- a/provers/stark/src/examples/read_only_memory_logup.rs +++ b/provers/stark/src/examples/read_only_memory_logup.rs @@ -167,11 +167,15 @@ where let first_step = frame.get_evaluation_step(0); let second_step = frame.get_evaluation_step(1); - // Auxiliary constraints + // Auxiliary frame elements let s0 = first_step.get_aux_evaluation_element(0, 0); let s1 = second_step.get_aux_evaluation_element(0, 0); + + // Challenges let z = &rap_challenges[0]; let alpha = &rap_challenges[1]; + + // Main frame elements let a1 = second_step.get_main_evaluation_element(0, 0); let v1 = second_step.get_main_evaluation_element(0, 1); let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); From 9e1cee78502274e379b30abcddb76cba00cae086 Mon Sep 17 00:00:00 2001 From: Nicole Date: Mon, 23 Dec 2024 10:30:39 -0300 Subject: [PATCH 23/29] fix read_mem and tests for small fields. And add byteConversion for degree4BabyBear --- .../fields/fft_friendly/quartic_babybear.rs | 44 ++ .../src/examples/read_only_memory_logup.rs | 380 ++++++++++++------ provers/stark/src/tests/integration_tests.rs | 86 ++-- 3 files changed, 357 insertions(+), 153 deletions(-) diff --git a/math/src/field/fields/fft_friendly/quartic_babybear.rs b/math/src/field/fields/fft_friendly/quartic_babybear.rs index 361de0e0b..457ed9526 100644 --- a/math/src/field/fields/fft_friendly/quartic_babybear.rs +++ b/math/src/field/fields/fft_friendly/quartic_babybear.rs @@ -262,6 +262,50 @@ impl ByteConversion for [FieldElement; 4] { } } +impl ByteConversion for FieldElement { + fn to_bytes_be(&self) -> alloc::vec::Vec { + let mut byte_slice = ByteConversion::to_bytes_be(&self.value()[0]); + byte_slice.extend(ByteConversion::to_bytes_be(&self.value()[1])); + byte_slice.extend(ByteConversion::to_bytes_be(&self.value()[2])); + byte_slice.extend(ByteConversion::to_bytes_be(&self.value()[3])); + byte_slice + } + + fn to_bytes_le(&self) -> alloc::vec::Vec { + let mut byte_slice = ByteConversion::to_bytes_le(&self.value()[0]); + byte_slice.extend(ByteConversion::to_bytes_le(&self.value()[1])); + byte_slice.extend(ByteConversion::to_bytes_le(&self.value()[2])); + byte_slice.extend(ByteConversion::to_bytes_le(&self.value()[3])); + byte_slice + } + + fn from_bytes_be(bytes: &[u8]) -> Result + where + Self: Sized, + { + const BYTES_PER_FIELD: usize = 8; + let x0 = FieldElement::from_bytes_be(&bytes[0..BYTES_PER_FIELD])?; + let x1 = FieldElement::from_bytes_be(&bytes[BYTES_PER_FIELD..BYTES_PER_FIELD * 2])?; + let x2 = FieldElement::from_bytes_be(&bytes[BYTES_PER_FIELD * 2..BYTES_PER_FIELD * 3])?; + let x3 = FieldElement::from_bytes_be(&bytes[BYTES_PER_FIELD * 3..BYTES_PER_FIELD * 4])?; + + Ok(Self::new([x0, x1, x2, x3])) + } + + fn from_bytes_le(bytes: &[u8]) -> Result + where + Self: Sized, + { + const BYTES_PER_FIELD: usize = 8; + let x0 = FieldElement::from_bytes_le(&bytes[0..BYTES_PER_FIELD])?; + let x1 = FieldElement::from_bytes_le(&bytes[BYTES_PER_FIELD..BYTES_PER_FIELD * 2])?; + let x2 = FieldElement::from_bytes_le(&bytes[BYTES_PER_FIELD * 2..BYTES_PER_FIELD * 3])?; + let x3 = FieldElement::from_bytes_le(&bytes[BYTES_PER_FIELD * 3..BYTES_PER_FIELD * 4])?; + + Ok(Self::new([x0, x1, x2, x3])) + } +} + impl IsFFTField for Degree4BabyBearExtensionField { const TWO_ADICITY: u64 = 29; const TWO_ADIC_PRIMITVE_ROOT_OF_UNITY: Self::BaseType = [ diff --git a/provers/stark/src/examples/read_only_memory_logup.rs b/provers/stark/src/examples/read_only_memory_logup.rs index 4d911ea9d..70539a07d 100644 --- a/provers/stark/src/examples/read_only_memory_logup.rs +++ b/provers/stark/src/examples/read_only_memory_logup.rs @@ -9,11 +9,11 @@ use crate::{ frame::Frame, proof::options::ProofOptions, trace::TraceTable, - traits::AIR, + traits::{TransitionEvaluationContext, AIR}, }; use itertools::Itertools; use lambdaworks_crypto::fiat_shamir::is_transcript::IsTranscript; -use lambdaworks_math::field::traits::IsPrimeField; +use lambdaworks_math::field::traits::{IsField, IsPrimeField, IsSubFieldOf}; use lambdaworks_math::{ field::{element::FieldElement, traits::IsFFTField}, traits::ByteConversion, @@ -21,21 +21,29 @@ use lambdaworks_math::{ /// Transition Constraint that ensures the continuity of the sorted address column of a memory. #[derive(Clone)] -struct ContinuityConstraint { - phantom: PhantomData, +struct ContinuityConstraint + IsFFTField + Send + Sync, E: IsField + Send + Sync> +{ + phantom_f: PhantomData, + phantom_e: PhantomData, } -impl ContinuityConstraint { +impl ContinuityConstraint +where + F: IsSubFieldOf + IsFFTField + Send + Sync, + E: IsField + Send + Sync, +{ pub fn new() -> Self { Self { - phantom: PhantomData, + phantom_f: PhantomData::, + phantom_e: PhantomData::, } } } -impl TransitionConstraint for ContinuityConstraint +impl TransitionConstraint for ContinuityConstraint where - F: IsFFTField + Send + Sync, + F: IsFFTField + IsSubFieldOf + Send + Sync, + E: IsField + Send + Sync, { fn degree(&self) -> usize { 2 @@ -52,42 +60,82 @@ where fn evaluate( &self, - frame: &Frame, - transition_evaluations: &mut [FieldElement], - _periodic_values: &[FieldElement], - _rap_challenges: &[FieldElement], + evaluation_context: &TransitionEvaluationContext, + transition_evaluations: &mut [FieldElement], ) { - let first_step = frame.get_evaluation_step(0); - let second_step = frame.get_evaluation_step(1); - - let a_sorted_0 = first_step.get_main_evaluation_element(0, 2); - let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); - // (a'_{i+1} - a'_i)(a'_{i+1} - a'_i - 1) = 0 where a' is the sorted address - let res = (a_sorted_1 - a_sorted_0) * (a_sorted_1 - a_sorted_0 - FieldElement::::one()); - - // The eval always exists, except if the constraint idx were incorrectly defined. - if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { - *eval = res; + // In both evaluation contexts, Prover and Verfier will evaluate the transition polynomial in the same way. + // The only difference is that the Prover's Frame has base fiel and field extension elemnts, + // while the Verfier's Frame has only field extension elements + match evaluation_context { + TransitionEvaluationContext::Prover { + frame, + periodic_values: _periodic_values, + rap_challenges: _rap_challenges, + } => { + let first_step = frame.get_evaluation_step(0); + let second_step = frame.get_evaluation_step(1); + + let a_sorted_0 = first_step.get_main_evaluation_element(0, 2); + let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); + // (a'_{i+1} - a'_i)(a'_{i+1} - a'_i - 1) = 0 where a' is the sorted address + let res = (a_sorted_1 - a_sorted_0) + * (a_sorted_1 - a_sorted_0 - FieldElement::::one()); + + // The eval always exists, except if the constraint idx were incorrectly defined. + if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { + *eval = res.to_extension(); + } + } + + TransitionEvaluationContext::Verifier { + frame, + periodic_values: _periodic_values, + rap_challenges: _rap_challenges, + } => { + let first_step = frame.get_evaluation_step(0); + let second_step = frame.get_evaluation_step(1); + + let a_sorted_0 = first_step.get_main_evaluation_element(0, 2); + let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); + // (a'_{i+1} - a'_i)(a'_{i+1} - a'_i - 1) = 0 where a' is the sorted address + let res = (a_sorted_1 - a_sorted_0) + * (a_sorted_1 - a_sorted_0 - FieldElement::::one()); + + // The eval always exists, except if the constraint idx were incorrectly defined. + if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { + *eval = res; + } + } } } } /// Transition constraint that ensures that same addresses have same values, making the sorted memory read-only. #[derive(Clone)] -struct SingleValueConstraint { - phantom: PhantomData, +struct SingleValueConstraint< + F: IsSubFieldOf + IsFFTField + Send + Sync, + E: IsField + Send + Sync, +> { + phantom_f: PhantomData, + phantom_e: PhantomData, } -impl SingleValueConstraint { +impl SingleValueConstraint +where + F: IsSubFieldOf + IsFFTField + Send + Sync, + E: IsField + Send + Sync, +{ pub fn new() -> Self { Self { - phantom: PhantomData, + phantom_f: PhantomData::, + phantom_e: PhantomData::, } } } -impl TransitionConstraint for SingleValueConstraint +impl TransitionConstraint for SingleValueConstraint where - F: IsFFTField + Send + Sync, + F: IsFFTField + IsSubFieldOf + Send + Sync, + E: IsField + Send + Sync, { fn degree(&self) -> usize { 2 @@ -104,24 +152,56 @@ where fn evaluate( &self, - frame: &Frame, - transition_evaluations: &mut [FieldElement], - _periodic_values: &[FieldElement], - _rap_challenges: &[FieldElement], + evaluation_context: &TransitionEvaluationContext, + transition_evaluations: &mut [FieldElement], ) { - let first_step = frame.get_evaluation_step(0); - let second_step = frame.get_evaluation_step(1); - - let a_sorted_0 = first_step.get_main_evaluation_element(0, 2); - let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); - let v_sorted_0 = first_step.get_main_evaluation_element(0, 3); - let v_sorted_1 = second_step.get_main_evaluation_element(0, 3); - // (v'_{i+1} - v'_i) * (a'_{i+1} - a'_i - 1) = 0 - let res = (v_sorted_1 - v_sorted_0) * (a_sorted_1 - a_sorted_0 - FieldElement::::one()); - - // The eval always exists, except if the constraint idx were incorrectly defined. - if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { - *eval = res; + // In both evaluation contexts, Prover and Verfier will evaluate the transition polynomial in the same way. + // The only difference is that the Prover's Frame has base fiel and field extension elemnts, + // while the Verfier's Frame has only field extension elements + match evaluation_context { + TransitionEvaluationContext::Prover { + frame, + periodic_values: _periodic_values, + rap_challenges: _rap_challenges, + } => { + let first_step = frame.get_evaluation_step(0); + let second_step = frame.get_evaluation_step(1); + + let a_sorted_0 = first_step.get_main_evaluation_element(0, 2); + let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); + let v_sorted_0 = first_step.get_main_evaluation_element(0, 3); + let v_sorted_1 = second_step.get_main_evaluation_element(0, 3); + // (v'_{i+1} - v'_i) * (a'_{i+1} - a'_i - 1) = 0 + let res = (v_sorted_1 - v_sorted_0) + * (a_sorted_1 - a_sorted_0 - FieldElement::::one()); + + // The eval always exists, except if the constraint idx were incorrectly defined. + if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { + *eval = res.to_extension(); + } + } + + TransitionEvaluationContext::Verifier { + frame, + periodic_values: _periodic_values, + rap_challenges: _rap_challenges, + } => { + let first_step = frame.get_evaluation_step(0); + let second_step = frame.get_evaluation_step(1); + + let a_sorted_0 = first_step.get_main_evaluation_element(0, 2); + let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); + let v_sorted_0 = first_step.get_main_evaluation_element(0, 3); + let v_sorted_1 = second_step.get_main_evaluation_element(0, 3); + // (v'_{i+1} - v'_i) * (a'_{i+1} - a'_i - 1) = 0 + let res = (v_sorted_1 - v_sorted_0) + * (a_sorted_1 - a_sorted_0 - FieldElement::::one()); + + // The eval always exists, except if the constraint idx were incorrectly defined. + if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { + *eval = res; + } + } } } } @@ -129,21 +209,31 @@ where /// We are using the LogUp construction described in: /// #[derive(Clone)] -struct PermutationConstraint { - phantom: PhantomData, +struct PermutationConstraint< + F: IsSubFieldOf + IsFFTField + Send + Sync, + E: IsField + Send + Sync, +> { + phantom_f: PhantomData, + phantom_e: PhantomData, } -impl PermutationConstraint { +impl PermutationConstraint +where + F: IsSubFieldOf + IsFFTField + Send + Sync, + E: IsField + Send + Sync, +{ pub fn new() -> Self { Self { - phantom: PhantomData, + phantom_f: PhantomData::, + phantom_e: PhantomData::, } } } -impl TransitionConstraint for PermutationConstraint +impl TransitionConstraint for PermutationConstraint where - F: IsFFTField + Send + Sync, + F: IsSubFieldOf + IsFFTField + Send + Sync, + E: IsField + Send + Sync, { fn degree(&self) -> usize { 3 @@ -159,61 +249,110 @@ where fn evaluate( &self, - frame: &Frame, - transition_evaluations: &mut [FieldElement], - _periodic_values: &[FieldElement], - rap_challenges: &[FieldElement], + evaluation_context: &TransitionEvaluationContext, + transition_evaluations: &mut [FieldElement], ) { - let first_step = frame.get_evaluation_step(0); - let second_step = frame.get_evaluation_step(1); - - // Auxiliary frame elements - let s0 = first_step.get_aux_evaluation_element(0, 0); - let s1 = second_step.get_aux_evaluation_element(0, 0); - - // Challenges - let z = &rap_challenges[0]; - let alpha = &rap_challenges[1]; - - // Main frame elements - let a1 = second_step.get_main_evaluation_element(0, 0); - let v1 = second_step.get_main_evaluation_element(0, 1); - let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); - let v_sorted_1 = second_step.get_main_evaluation_element(0, 3); - let m = second_step.get_main_evaluation_element(0, 4); - - let unsorted_term = z - (a1 + alpha * v1); - let sorted_term = z - (a_sorted_1 + alpha * v_sorted_1); - - // We are using the following LogUp equation: - // s1 = s0 + m / sorted_term - 1/unsorted_term. - // Since constraints must be expressed without division, we multiply each term by sorted_term * unsorted_term: - let res = s0 * &unsorted_term * &sorted_term + m * &unsorted_term - - &sorted_term - - s1 * unsorted_term * sorted_term; - - // The eval always exists, except if the constraint idx were incorrectly defined. - if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { - *eval = res; + // In both evaluation contexts, Prover and Verfier will evaluate the transition polynomial in the same way. + // The only difference is that the Prover's Frame has base fiel and field extension elemnts, + // while the Verfier's Frame has only field extension elements + match evaluation_context { + TransitionEvaluationContext::Prover { + frame, + periodic_values: _periodic_values, + rap_challenges, + } => { + let first_step = frame.get_evaluation_step(0); + let second_step = frame.get_evaluation_step(1); + + // Auxiliary frame elements + let s0 = first_step.get_aux_evaluation_element(0, 0); + let s1 = second_step.get_aux_evaluation_element(0, 0); + + // Challenges + let z = &rap_challenges[0]; + let alpha = &rap_challenges[1]; + + // Main frame elements + let a1 = second_step.get_main_evaluation_element(0, 0); + let v1 = second_step.get_main_evaluation_element(0, 1); + let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); + let v_sorted_1 = second_step.get_main_evaluation_element(0, 3); + let m = second_step.get_main_evaluation_element(0, 4); + + let unsorted_term = -(a1 + v1 * alpha) + z; + let sorted_term = (a_sorted_1 + v_sorted_1 * alpha) + z; + + // We are using the following LogUp equation: + // s1 = s0 + m / sorted_term - 1/unsorted_term. + // Since constraints must be expressed without division, we multiply each term by sorted_term * unsorted_term: + let res = s0 * &unsorted_term * &sorted_term + m * &unsorted_term + - &sorted_term + - s1 * unsorted_term * sorted_term; + + // The eval always exists, except if the constraint idx were incorrectly defined. + if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { + *eval = res; + } + } + + TransitionEvaluationContext::Verifier { + frame, + periodic_values: _periodic_values, + rap_challenges, + } => { + let first_step = frame.get_evaluation_step(0); + let second_step = frame.get_evaluation_step(1); + + // Auxiliary frame elements + let s0 = first_step.get_aux_evaluation_element(0, 0); + let s1 = second_step.get_aux_evaluation_element(0, 0); + + // Challenges + let z = &rap_challenges[0]; + let alpha = &rap_challenges[1]; + + // Main frame elements + let a1 = second_step.get_main_evaluation_element(0, 0); + let v1 = second_step.get_main_evaluation_element(0, 1); + let a_sorted_1 = second_step.get_main_evaluation_element(0, 2); + let v_sorted_1 = second_step.get_main_evaluation_element(0, 3); + let m = second_step.get_main_evaluation_element(0, 4); + + let unsorted_term = z - (a1 + alpha * v1); + let sorted_term = z - (a_sorted_1 + alpha * v_sorted_1); + + // We are using the following LogUp equation: + // s1 = s0 + m / sorted_term - 1/unsorted_term. + // Since constraints must be expressed without division, we multiply each term by sorted_term * unsorted_term: + let res = s0 * &unsorted_term * &sorted_term + m * &unsorted_term + - &sorted_term + - s1 * unsorted_term * sorted_term; + + // The eval always exists, except if the constraint idx were incorrectly defined. + if let Some(eval) = transition_evaluations.get_mut(self.constraint_idx()) { + *eval = res; + } + } } } } /// AIR for a continuous read-only memory. -pub struct LogReadOnlyRAP +pub struct LogReadOnlyRAP where - F: IsFFTField, + F: IsFFTField + IsSubFieldOf + Send + Sync, + E: IsField + Send + Sync, { context: AirContext, trace_length: usize, pub_inputs: LogReadOnlyPublicInputs, - transition_constraints: Vec>>, + transition_constraints: Vec>>, } #[derive(Clone, Debug)] pub struct LogReadOnlyPublicInputs where - F: IsFFTField, + F: IsFFTField + Send + Sync, { pub a0: FieldElement, pub v0: FieldElement, @@ -223,13 +362,14 @@ where pub m0: FieldElement, } -impl AIR for LogReadOnlyRAP +impl AIR for LogReadOnlyRAP where - F: IsFFTField + Send + Sync + 'static, + F: IsFFTField + IsSubFieldOf + Send + Sync + 'static, + E: IsField + Send + Sync + 'static, FieldElement: ByteConversion, { type Field = F; - type FieldExtension = F; + type FieldExtension = E; type PublicInputs = LogReadOnlyPublicInputs; const STEP_SIZE: usize = 1; @@ -265,7 +405,7 @@ where fn build_auxiliary_trace( &self, trace: &mut TraceTable, - challenges: &[FieldElement], + challenges: &[FieldElement], ) { let main_segment_cols = trace.columns_main(); let a = &main_segment_cols[0]; @@ -279,14 +419,14 @@ where let trace_len = trace.num_rows(); let mut aux_col = Vec::new(); - let unsorted_term = (z - (&a[0] + alpha * &v[0])).inv().unwrap(); - let sorted_term = (z - (&a_sorted[0] + alpha * &v_sorted[0])).inv().unwrap(); + let unsorted_term = (-(&a[0] + &v[0] * alpha) + z).inv().unwrap(); + let sorted_term = (-(&a_sorted[0] + &v_sorted[0] * alpha) + z).inv().unwrap(); aux_col.push(&m[0] * sorted_term - unsorted_term); // Apply the same equation given in the permutation case to the rest of the trace for i in 0..trace_len - 1 { - let unsorted_term = (z - (&a[i + 1] + alpha * &v[i + 1])).inv().unwrap(); - let sorted_term = (z - (&a_sorted[i + 1] + alpha * &v_sorted[i + 1])) + let unsorted_term = (-(&a[i + 1] + &v[i + 1] * alpha) + z).inv().unwrap(); + let sorted_term = (-(&a_sorted[i + 1] + &v_sorted[i + 1] * alpha) + z) .inv() .unwrap(); aux_col.push(&aux_col[i] + &m[i + 1] * sorted_term - unsorted_term); @@ -299,7 +439,7 @@ where fn build_rap_challenges( &self, - transcript: &mut impl IsTranscript, + transcript: &mut impl IsTranscript, ) -> Vec> { vec![ transcript.sample_field_element(), @@ -324,15 +464,15 @@ where let alpha = &rap_challenges[1]; // Main boundary constraints - let c1 = BoundaryConstraint::new_main(0, 0, a0.clone()); - let c2 = BoundaryConstraint::new_main(1, 0, v0.clone()); - let c3 = BoundaryConstraint::new_main(2, 0, a_sorted_0.clone()); - let c4 = BoundaryConstraint::new_main(3, 0, v_sorted_0.clone()); - let c5 = BoundaryConstraint::new_main(4, 0, m0.clone()); + let c1 = BoundaryConstraint::new_main(0, 0, a0.clone().to_extension()); + let c2 = BoundaryConstraint::new_main(1, 0, v0.clone().to_extension()); + let c3 = BoundaryConstraint::new_main(2, 0, a_sorted_0.clone().to_extension()); + let c4 = BoundaryConstraint::new_main(3, 0, v_sorted_0.clone().to_extension()); + let c5 = BoundaryConstraint::new_main(4, 0, m0.clone().to_extension()); // Auxiliary boundary constraints - let unsorted_term = (z - (a0 + alpha * v0)).inv().unwrap(); - let sorted_term = (z - (a_sorted_0 + alpha * v_sorted_0)).inv().unwrap(); + let unsorted_term = (-(a0 + v0 * alpha) + z).inv().unwrap(); + let sorted_term = (-(a_sorted_0 + v_sorted_0 * alpha) + z).inv().unwrap(); let p0_value = m0 * sorted_term - unsorted_term; let c_aux1 = BoundaryConstraint::new_aux(0, 0, p0_value); @@ -369,24 +509,18 @@ where fn pub_inputs(&self) -> &Self::PublicInputs { &self.pub_inputs } - - fn compute_transition_verifier( - &self, - frame: &Frame, - periodic_values: &[FieldElement], - rap_challenges: &[FieldElement], - ) -> Vec> { - self.compute_transition_prover(frame, periodic_values, rap_challenges) - } } /// Return a trace table with an auxiliary column full of zeros (that will be completed by the air) and /// the following five main columns: The original addresses and values, the sorted addresses and values without /// repetition and the multiplicities that tell -pub fn read_only_logup_trace( +pub fn read_only_logup_trace< + F: IsPrimeField + IsFFTField + IsSubFieldOf + Send + Sync, + E: IsField + Send + Sync, +>( addresses: Vec>, values: Vec>, -) -> TraceTable { +) -> TraceTable { let mut address_value_pairs: Vec<_> = addresses.iter().zip(values.iter()).collect(); address_value_pairs.sort_by_key(|(addr, _)| addr.representative()); @@ -416,7 +550,7 @@ pub fn read_only_logup_trace( ]; // create a vector with zeros of the same length as the main columns - let zero_vec = vec![FieldElement::::zero(); main_columns[0].len()]; + let zero_vec = vec![FieldElement::::zero(); main_columns[0].len()]; TraceTable::from_columns(main_columns, vec![zero_vec], 1) } @@ -424,7 +558,8 @@ pub fn read_only_logup_trace( mod test { use super::*; use lambdaworks_math::field::fields::{ - fft_friendly::stark_252_prime_field::Stark252PrimeField, u64_prime_field::FE17, + fft_friendly::stark_252_prime_field::Stark252PrimeField, + u64_prime_field::{F17, FE17}, }; #[test] @@ -450,7 +585,7 @@ mod test { FE17::from(60), ]; - let logup_trace = read_only_logup_trace(address_col, value_col); + let logup_trace: TraceTable = read_only_logup_trace(address_col, value_col); let expected_sorted_addresses = vec![ FE17::from(1), @@ -541,7 +676,8 @@ mod test { FieldElement::::from(0), // v6 FieldElement::::from(0), // v7 ]; - let logup_trace = read_only_logup_trace(address_col, value_col); + let logup_trace: TraceTable = + read_only_logup_trace(address_col, value_col); assert_eq!(logup_trace.columns_main()[2], sorted_address_col); assert_eq!(logup_trace.columns_main()[3], sorted_value_col); diff --git a/provers/stark/src/tests/integration_tests.rs b/provers/stark/src/tests/integration_tests.rs index 8abad0b86..4c501b06b 100644 --- a/provers/stark/src/tests/integration_tests.rs +++ b/provers/stark/src/tests/integration_tests.rs @@ -1,5 +1,10 @@ +use lambdaworks_crypto::fiat_shamir::default_transcript::DefaultTranscript; use lambdaworks_math::field::{ - element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, + element::FieldElement, + fields::fft_friendly::{ + babybear::Babybear31PrimeField, quartic_babybear::Degree4BabyBearExtensionField, + stark_252_prime_field::Stark252PrimeField, + }, }; use crate::{ @@ -17,6 +22,7 @@ use crate::{ }, proof::options::ProofOptions, prover::{IsStarkProver, Prover}, + trace::TraceTable, transcript::StoneProverTranscript, verifier::{IsStarkVerifier, Verifier}, Felt252, @@ -299,46 +305,64 @@ fn test_prove_read_only_memory() { #[test_log::test] fn test_prove_log_read_only_memory() { let address_col = vec![ - FieldElement::::from(3), // a0 - FieldElement::::from(2), // a1 - FieldElement::::from(2), // a2 - FieldElement::::from(3), // a3 - FieldElement::::from(4), // a4 - FieldElement::::from(5), // a5 - FieldElement::::from(1), // a6 - FieldElement::::from(3), // a7 + FieldElement::::from(3), // a0 + FieldElement::::from(2), // a1 + FieldElement::::from(2), // a2 + FieldElement::::from(3), // a3 + FieldElement::::from(4), // a4 + FieldElement::::from(5), // a5 + FieldElement::::from(1), // a6 + FieldElement::::from(3), // a7 ]; let value_col = vec![ - FieldElement::::from(30), // v0 - FieldElement::::from(20), // v1 - FieldElement::::from(20), // v2 - FieldElement::::from(30), // v3 - FieldElement::::from(40), // v4 - FieldElement::::from(50), // v5 - FieldElement::::from(10), // v6 - FieldElement::::from(30), // v7 + FieldElement::::from(30), // v0 + FieldElement::::from(20), // v1 + FieldElement::::from(20), // v2 + FieldElement::::from(30), // v3 + FieldElement::::from(40), // v4 + FieldElement::::from(50), // v5 + FieldElement::::from(10), // v6 + FieldElement::::from(30), // v7 ]; let pub_inputs = LogReadOnlyPublicInputs { - a0: FieldElement::::from(3), - v0: FieldElement::::from(30), - a_sorted_0: FieldElement::::from(1), - v_sorted_0: FieldElement::::from(10), - m0: FieldElement::::from(1), + a0: FieldElement::::from(3), + v0: FieldElement::::from(30), + a_sorted_0: FieldElement::::from(1), + v_sorted_0: FieldElement::::from(10), + m0: FieldElement::::from(1), }; + // let transcript = DefaultTranscript::::new(&[]); + // let mut trace: TraceTable = + // read_only_logup_trace(address_col, value_col); + // let proof_options = ProofOptions::default_test_options(); + // let proof = + // Prover::>::prove(&mut trace, &pub_inputs, &proof_options, transcript) + // .unwrap(); + + // assert!(Verifier:: + // &proof, + // &pub_inputs, + // &proof_options, + // DefaultTranscript::::new(&[]) + // ); + let mut trace = read_only_logup_trace(address_col, value_col); let proof_options = ProofOptions::default_test_options(); - let proof = Prover::>::prove( - &mut trace, - &pub_inputs, - &proof_options, - StoneProverTranscript::new(&[]), - ) - .unwrap(); - assert!(Verifier::>::verify( + let proof = + Prover::>::prove( + &mut trace, + &pub_inputs, + &proof_options, + DefaultTranscript::::new(&[]), + ) + .unwrap(); + assert!(Verifier::< + LogReadOnlyRAP, + >::verify( &proof, &pub_inputs, &proof_options, - StoneProverTranscript::new(&[]), + DefaultTranscript::::new(&[]), )); } From 023dd566fc3ed36aca1d5b72b0136e9b33ded6fc Mon Sep 17 00:00:00 2001 From: Nicole Date: Mon, 23 Dec 2024 12:09:05 -0300 Subject: [PATCH 24/29] Add AsBytes for FE --- .../fields/fft_friendly/quartic_babybear.rs | 11 +- .../src/examples/read_only_memory_logup.rs | 118 +++++++++--------- provers/stark/src/tests/integration_tests.rs | 16 --- 3 files changed, 70 insertions(+), 75 deletions(-) diff --git a/math/src/field/fields/fft_friendly/quartic_babybear.rs b/math/src/field/fields/fft_friendly/quartic_babybear.rs index 457ed9526..662187cee 100644 --- a/math/src/field/fields/fft_friendly/quartic_babybear.rs +++ b/math/src/field/fields/fft_friendly/quartic_babybear.rs @@ -1,9 +1,9 @@ -use crate::field::{ +use crate::{field::{ element::FieldElement, errors::FieldError, fields::fft_friendly::babybear::Babybear31PrimeField, traits::{IsFFTField, IsField, IsSubFieldOf}, -}; +}, traits::AsBytes}; #[cfg(feature = "lambdaworks-serde-binary")] use crate::traits::ByteConversion; @@ -306,6 +306,13 @@ impl ByteConversion for FieldElement { } } +#[cfg(feature = "alloc")] +impl AsBytes for FieldElement { + fn as_bytes(&self) -> alloc::vec::Vec { + self.value().to_bytes_be() + } +} + impl IsFFTField for Degree4BabyBearExtensionField { const TWO_ADICITY: u64 = 29; const TWO_ADIC_PRIMITVE_ROOT_OF_UNITY: Self::BaseType = [ diff --git a/provers/stark/src/examples/read_only_memory_logup.rs b/provers/stark/src/examples/read_only_memory_logup.rs index 70539a07d..f734be811 100644 --- a/provers/stark/src/examples/read_only_memory_logup.rs +++ b/provers/stark/src/examples/read_only_memory_logup.rs @@ -6,7 +6,6 @@ use crate::{ transition::TransitionConstraint, }, context::AirContext, - frame::Frame, proof::options::ProofOptions, trace::TraceTable, traits::{TransitionEvaluationContext, AIR}, @@ -64,8 +63,8 @@ where transition_evaluations: &mut [FieldElement], ) { // In both evaluation contexts, Prover and Verfier will evaluate the transition polynomial in the same way. - // The only difference is that the Prover's Frame has base fiel and field extension elemnts, - // while the Verfier's Frame has only field extension elements + // The only difference is that the Prover's Frame has base field and field extension elements, + // while the Verfier's Frame has only field extension elements. match evaluation_context { TransitionEvaluationContext::Prover { frame, @@ -156,8 +155,8 @@ where transition_evaluations: &mut [FieldElement], ) { // In both evaluation contexts, Prover and Verfier will evaluate the transition polynomial in the same way. - // The only difference is that the Prover's Frame has base fiel and field extension elemnts, - // while the Verfier's Frame has only field extension elements + // The only difference is that the Prover's Frame has base field and field extension elements, + // while the Verfier's Frame has only field extension elements. match evaluation_context { TransitionEvaluationContext::Prover { frame, @@ -207,7 +206,8 @@ where } /// Transition constraint that ensures that the sorted columns are a permutation of the original ones. /// We are using the LogUp construction described in: -/// +/// . +/// See also our post of LogUp argument in blog.lambdaclass.com. #[derive(Clone)] struct PermutationConstraint< F: IsSubFieldOf + IsFFTField + Send + Sync, @@ -253,8 +253,8 @@ where transition_evaluations: &mut [FieldElement], ) { // In both evaluation contexts, Prover and Verfier will evaluate the transition polynomial in the same way. - // The only difference is that the Prover's Frame has base fiel and field extension elemnts, - // while the Verfier's Frame has only field extension elements + // The only difference is that the Prover's Frame has base field and field extension elements, + // while the Verfier's Frame has only field extension elements. match evaluation_context { TransitionEvaluationContext::Prover { frame, @@ -280,7 +280,7 @@ where let m = second_step.get_main_evaluation_element(0, 4); let unsorted_term = -(a1 + v1 * alpha) + z; - let sorted_term = (a_sorted_1 + v_sorted_1 * alpha) + z; + let sorted_term = -(a_sorted_1 + v_sorted_1 * alpha) + z; // We are using the following LogUp equation: // s1 = s0 + m / sorted_term - 1/unsorted_term. @@ -337,7 +337,8 @@ where } } -/// AIR for a continuous read-only memory. +/// AIR for a continuous read-only memory using the LogUp Lookup Argument. +/// To accompany the understanding of this code you can see corresponding post in blog.lambdaclass.com. pub struct LogReadOnlyRAP where F: IsFFTField + IsSubFieldOf + Send + Sync, @@ -406,7 +407,9 @@ where &self, trace: &mut TraceTable, challenges: &[FieldElement], - ) { + ) where + Self::FieldExtension: IsFFTField, + { let main_segment_cols = trace.columns_main(); let a = &main_segment_cols[0]; let v = &main_segment_cols[1]; @@ -423,7 +426,7 @@ where let sorted_term = (-(&a_sorted[0] + &v_sorted[0] * alpha) + z).inv().unwrap(); aux_col.push(&m[0] * sorted_term - unsorted_term); - // Apply the same equation given in the permutation case to the rest of the trace + // Apply the same equation given in the permutation transition contraint to the rest of the trace. for i in 0..trace_len - 1 { let unsorted_term = (-(&a[i + 1] + &v[i + 1] * alpha) + z).inv().unwrap(); let sorted_term = (-(&a_sorted[i + 1] + &v_sorted[i + 1] * alpha) + z) @@ -511,9 +514,10 @@ where } } -/// Return a trace table with an auxiliary column full of zeros (that will be completed by the air) and -/// the following five main columns: The original addresses and values, the sorted addresses and values without -/// repetition and the multiplicities that tell +/// Return a trace table with an auxiliary column full of zeros (that will be then replaced with the correct values by the air) and +/// and the following five main columns: +/// The original addresses and values, the sorted addresses and values without duplicates, and +/// the multiplicities of each sorted address and value in the original ones (i.e. how many times they appear in the original address an value columns). pub fn read_only_logup_trace< F: IsPrimeField + IsFFTField + IsSubFieldOf + Send + Sync, E: IsField + Send + Sync, @@ -558,7 +562,7 @@ pub fn read_only_logup_trace< mod test { use super::*; use lambdaworks_math::field::fields::{ - fft_friendly::stark_252_prime_field::Stark252PrimeField, + fft_friendly::{babybear::Babybear31PrimeField, quartic_babybear::Degree4BabyBearExtensionField}, u64_prime_field::{F17, FE17}, }; @@ -625,58 +629,58 @@ mod test { #[test] fn test_logup_trace_construction_2() { let address_col = vec![ - FieldElement::::from(3), // a0 - FieldElement::::from(2), // a1 - FieldElement::::from(2), // a2 - FieldElement::::from(3), // a3 - FieldElement::::from(4), // a4 - FieldElement::::from(5), // a5 - FieldElement::::from(1), // a6 - FieldElement::::from(3), // a7 + FieldElement::::from(3), // a0 + FieldElement::::from(2), // a1 + FieldElement::::from(2), // a2 + FieldElement::::from(3), // a3 + FieldElement::::from(4), // a4 + FieldElement::::from(5), // a5 + FieldElement::::from(1), // a6 + FieldElement::::from(3), // a7 ]; let value_col = vec![ - FieldElement::::from(30), // v0 - FieldElement::::from(20), // v1 - FieldElement::::from(20), // v2 - FieldElement::::from(30), // v3 - FieldElement::::from(40), // v4 - FieldElement::::from(50), // v5 - FieldElement::::from(10), // v6 - FieldElement::::from(30), // v7 + FieldElement::::from(30), // v0 + FieldElement::::from(20), // v1 + FieldElement::::from(20), // v2 + FieldElement::::from(30), // v3 + FieldElement::::from(40), // v4 + FieldElement::::from(50), // v5 + FieldElement::::from(10), // v6 + FieldElement::::from(30), // v7 ]; let sorted_address_col = vec![ - FieldElement::::from(1), // a0 - FieldElement::::from(2), // a1 - FieldElement::::from(3), // a2 - FieldElement::::from(4), // a3 - FieldElement::::from(5), // a4 - FieldElement::::from(5), // a5 - FieldElement::::from(5), // a6 - FieldElement::::from(5), // a7 + FieldElement::::from(1), // a0 + FieldElement::::from(2), // a1 + FieldElement::::from(3), // a2 + FieldElement::::from(4), // a3 + FieldElement::::from(5), // a4 + FieldElement::::from(5), // a5 + FieldElement::::from(5), // a6 + FieldElement::::from(5), // a7 ]; let sorted_value_col = vec![ - FieldElement::::from(10), // v0 - FieldElement::::from(20), // v1 - FieldElement::::from(30), // v2 - FieldElement::::from(40), // v3 - FieldElement::::from(50), // v4 - FieldElement::::from(50), // v5 - FieldElement::::from(50), // v6 - FieldElement::::from(50), // v7 + FieldElement::::from(10), // v0 + FieldElement::::from(20), // v1 + FieldElement::::from(30), // v2 + FieldElement::::from(40), // v3 + FieldElement::::from(50), // v4 + FieldElement::::from(50), // v5 + FieldElement::::from(50), // v6 + FieldElement::::from(50), // v7 ]; let multiplicity_col = vec![ - FieldElement::::from(1), // v0 - FieldElement::::from(2), // v1 - FieldElement::::from(3), // v2 - FieldElement::::from(1), // v3 - FieldElement::::from(1), // v4 - FieldElement::::from(0), // v5 - FieldElement::::from(0), // v6 - FieldElement::::from(0), // v7 + FieldElement::::from(1), // v0 + FieldElement::::from(2), // v1 + FieldElement::::from(3), // v2 + FieldElement::::from(1), // v3 + FieldElement::::from(1), // v4 + FieldElement::::from(0), // v5 + FieldElement::::from(0), // v6 + FieldElement::::from(0), // v7 ]; - let logup_trace: TraceTable = + let logup_trace: TraceTable = read_only_logup_trace(address_col, value_col); assert_eq!(logup_trace.columns_main()[2], sorted_address_col); diff --git a/provers/stark/src/tests/integration_tests.rs b/provers/stark/src/tests/integration_tests.rs index 4c501b06b..b47bb6b32 100644 --- a/provers/stark/src/tests/integration_tests.rs +++ b/provers/stark/src/tests/integration_tests.rs @@ -22,7 +22,6 @@ use crate::{ }, proof::options::ProofOptions, prover::{IsStarkProver, Prover}, - trace::TraceTable, transcript::StoneProverTranscript, verifier::{IsStarkVerifier, Verifier}, Felt252, @@ -332,21 +331,6 @@ fn test_prove_log_read_only_memory() { v_sorted_0: FieldElement::::from(10), m0: FieldElement::::from(1), }; - // let transcript = DefaultTranscript::::new(&[]); - // let mut trace: TraceTable = - // read_only_logup_trace(address_col, value_col); - // let proof_options = ProofOptions::default_test_options(); - // let proof = - // Prover::>::prove(&mut trace, &pub_inputs, &proof_options, transcript) - // .unwrap(); - - // assert!(Verifier:: - // &proof, - // &pub_inputs, - // &proof_options, - // DefaultTranscript::::new(&[]) - // ); - let mut trace = read_only_logup_trace(address_col, value_col); let proof_options = ProofOptions::default_test_options(); let proof = From 7f04be05f7da13b32bf50ab2f9b8ba629b90e3f3 Mon Sep 17 00:00:00 2001 From: Nicole Date: Mon, 23 Dec 2024 12:44:15 -0300 Subject: [PATCH 25/29] fix cargo check with no-std --- .../src/field/fields/fft_friendly/quartic_babybear.rs | 11 ++++++++--- provers/stark/src/examples/read_only_memory_logup.rs | 5 +++++ 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/math/src/field/fields/fft_friendly/quartic_babybear.rs b/math/src/field/fields/fft_friendly/quartic_babybear.rs index 662187cee..398b99177 100644 --- a/math/src/field/fields/fft_friendly/quartic_babybear.rs +++ b/math/src/field/fields/fft_friendly/quartic_babybear.rs @@ -1,13 +1,16 @@ -use crate::{field::{ +use crate::field::{ element::FieldElement, errors::FieldError, fields::fft_friendly::babybear::Babybear31PrimeField, traits::{IsFFTField, IsField, IsSubFieldOf}, -}, traits::AsBytes}; +}; #[cfg(feature = "lambdaworks-serde-binary")] use crate::traits::ByteConversion; +#[cfg(feature = "alloc")] +use crate::traits::AsBytes; + /// We are implementig the extension of Baby Bear of degree 4 using the irreducible polynomial x^4 + 11. /// BETA = 11 and -BETA = -11 is the non-residue. pub const BETA: FieldElement = @@ -262,6 +265,7 @@ impl ByteConversion for [FieldElement; 4] { } } +#[cfg(feature = "lambdaworks-serde-binary")] impl ByteConversion for FieldElement { fn to_bytes_be(&self) -> alloc::vec::Vec { let mut byte_slice = ByteConversion::to_bytes_be(&self.value()[0]); @@ -306,10 +310,11 @@ impl ByteConversion for FieldElement { } } +#[cfg(feature = "lambdaworks-serde-binary")] #[cfg(feature = "alloc")] impl AsBytes for FieldElement { fn as_bytes(&self) -> alloc::vec::Vec { - self.value().to_bytes_be() + self.to_bytes_be() } } diff --git a/provers/stark/src/examples/read_only_memory_logup.rs b/provers/stark/src/examples/read_only_memory_logup.rs index f734be811..c870b4d4c 100644 --- a/provers/stark/src/examples/read_only_memory_logup.rs +++ b/provers/stark/src/examples/read_only_memory_logup.rs @@ -410,23 +410,28 @@ where ) where Self::FieldExtension: IsFFTField, { + // Main table let main_segment_cols = trace.columns_main(); let a = &main_segment_cols[0]; let v = &main_segment_cols[1]; let a_sorted = &main_segment_cols[2]; let v_sorted = &main_segment_cols[3]; let m = &main_segment_cols[4]; + + // Challenges let z = &challenges[0]; let alpha = &challenges[1]; let trace_len = trace.num_rows(); let mut aux_col = Vec::new(); + // s_0 = m_0/(z - (a'_0 + α * v'_0) - 1/(z - (a_0 + α * v_0) let unsorted_term = (-(&a[0] + &v[0] * alpha) + z).inv().unwrap(); let sorted_term = (-(&a_sorted[0] + &v_sorted[0] * alpha) + z).inv().unwrap(); aux_col.push(&m[0] * sorted_term - unsorted_term); // Apply the same equation given in the permutation transition contraint to the rest of the trace. + // s_{i+1} = s_i + m_{i+1}/(z - (a'_{i+1} + α * v'_{i+1}) - 1/(z - (a_{i+1} + α * v_{i+1}) for i in 0..trace_len - 1 { let unsorted_term = (-(&a[i + 1] + &v[i + 1] * alpha) + z).inv().unwrap(); let sorted_term = (-(&a_sorted[i + 1] + &v_sorted[i + 1] * alpha) + z) From 2fd42b18d424be35c7e5eeeebdb9b22c5e68fd85 Mon Sep 17 00:00:00 2001 From: Nicole Date: Mon, 23 Dec 2024 12:51:02 -0300 Subject: [PATCH 26/29] fix cargo fmt --- provers/stark/src/examples/read_only_memory_logup.rs | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/provers/stark/src/examples/read_only_memory_logup.rs b/provers/stark/src/examples/read_only_memory_logup.rs index c870b4d4c..a6bb424ca 100644 --- a/provers/stark/src/examples/read_only_memory_logup.rs +++ b/provers/stark/src/examples/read_only_memory_logup.rs @@ -519,10 +519,11 @@ where } } -/// Return a trace table with an auxiliary column full of zeros (that will be then replaced with the correct values by the air) and -/// and the following five main columns: +/// Return a trace table with an auxiliary column full of zeros (that will be then replaced +/// with the correct values by the air) and the following five main columns: /// The original addresses and values, the sorted addresses and values without duplicates, and -/// the multiplicities of each sorted address and value in the original ones (i.e. how many times they appear in the original address an value columns). +/// the multiplicities of each sorted address and value in the original ones (i.e. how many times +/// they appear in the original address an value columns). pub fn read_only_logup_trace< F: IsPrimeField + IsFFTField + IsSubFieldOf + Send + Sync, E: IsField + Send + Sync, @@ -567,7 +568,9 @@ pub fn read_only_logup_trace< mod test { use super::*; use lambdaworks_math::field::fields::{ - fft_friendly::{babybear::Babybear31PrimeField, quartic_babybear::Degree4BabyBearExtensionField}, + fft_friendly::{ + babybear::Babybear31PrimeField,quartic_babybear::Degree4BabyBearExtensionField + }, u64_prime_field::{F17, FE17}, }; From 5de9b62b48326624560bd3c469428fb763e3b270 Mon Sep 17 00:00:00 2001 From: Nicole Date: Mon, 23 Dec 2024 12:52:52 -0300 Subject: [PATCH 27/29] fix cargo fmt --- provers/stark/src/examples/read_only_memory_logup.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/provers/stark/src/examples/read_only_memory_logup.rs b/provers/stark/src/examples/read_only_memory_logup.rs index a6bb424ca..7f2f97fd5 100644 --- a/provers/stark/src/examples/read_only_memory_logup.rs +++ b/provers/stark/src/examples/read_only_memory_logup.rs @@ -569,7 +569,7 @@ mod test { use super::*; use lambdaworks_math::field::fields::{ fft_friendly::{ - babybear::Babybear31PrimeField,quartic_babybear::Degree4BabyBearExtensionField + babybear::Babybear31PrimeField, quartic_babybear::Degree4BabyBearExtensionField, }, u64_prime_field::{F17, FE17}, }; From bf9b5cd1c3b40babc11dbb6ab77f756eadcbb5a4 Mon Sep 17 00:00:00 2001 From: Nicole Date: Mon, 23 Dec 2024 12:56:34 -0300 Subject: [PATCH 28/29] fix cargo fmt --- provers/stark/src/examples/read_only_memory_logup.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/provers/stark/src/examples/read_only_memory_logup.rs b/provers/stark/src/examples/read_only_memory_logup.rs index 7f2f97fd5..6a3f1abdd 100644 --- a/provers/stark/src/examples/read_only_memory_logup.rs +++ b/provers/stark/src/examples/read_only_memory_logup.rs @@ -519,10 +519,10 @@ where } } -/// Return a trace table with an auxiliary column full of zeros (that will be then replaced -/// with the correct values by the air) and the following five main columns: +/// Return a trace table with an auxiliary column full of zeros (that will be then replaced +/// with the correct values by the air) and the following five main columns: /// The original addresses and values, the sorted addresses and values without duplicates, and -/// the multiplicities of each sorted address and value in the original ones (i.e. how many times +/// the multiplicities of each sorted address and value in the original ones (i.e. how many times /// they appear in the original address an value columns). pub fn read_only_logup_trace< F: IsPrimeField + IsFFTField + IsSubFieldOf + Send + Sync, From 21bcea7b0a7af1d02084ba343d98ce01f43499d5 Mon Sep 17 00:00:00 2001 From: Nicole Date: Mon, 23 Dec 2024 13:05:31 -0300 Subject: [PATCH 29/29] fix clippy no-std --- math/src/field/fields/fft_friendly/quartic_babybear.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/math/src/field/fields/fft_friendly/quartic_babybear.rs b/math/src/field/fields/fft_friendly/quartic_babybear.rs index 398b99177..1c2888788 100644 --- a/math/src/field/fields/fft_friendly/quartic_babybear.rs +++ b/math/src/field/fields/fft_friendly/quartic_babybear.rs @@ -8,6 +8,7 @@ use crate::field::{ #[cfg(feature = "lambdaworks-serde-binary")] use crate::traits::ByteConversion; +#[cfg(feature = "lambdaworks-serde-binary")] #[cfg(feature = "alloc")] use crate::traits::AsBytes;