From bf63afe4c780e4e65ec2a487fc760d2ff6b5bca9 Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Tue, 19 Sep 2023 17:24:38 -0300 Subject: [PATCH 1/5] remove degree adjustment --- provers/stark/src/constraints/evaluator.rs | 51 ++++------------------ provers/stark/src/verifier.rs | 25 ++--------- 2 files changed, 11 insertions(+), 65 deletions(-) diff --git a/provers/stark/src/constraints/evaluator.rs b/provers/stark/src/constraints/evaluator.rs index fbf55a3e6..7ad1035e4 100644 --- a/provers/stark/src/constraints/evaluator.rs +++ b/provers/stark/src/constraints/evaluator.rs @@ -72,14 +72,6 @@ impl> ConstraintEvaluator { .collect::>>>(); let trace_length = self.air.trace_length(); - let composition_poly_degree_bound = self.air.composition_poly_degree_bound(); - let boundary_term_degree_adjustment = composition_poly_degree_bound - trace_length; - // Maybe we can do this more efficiently by taking the offset's power and then using successors for roots of unity - let d_adjustment_power = domain - .lde_roots_of_unity_coset - .iter() - .map(|d| d.pow(boundary_term_degree_adjustment)) - .collect::>>(); #[cfg(all(debug_assertions, not(feature = "parallel")))] let boundary_polys: Vec>> = Vec::new(); @@ -108,13 +100,12 @@ impl> ConstraintEvaluator { let boundary_eval_iter = 0..domain.lde_roots_of_unity_coset.len(); let boundary_evaluation = boundary_eval_iter - .zip(&d_adjustment_power) - .map(|(i, d)| { + .map(|i| { (0..number_of_b_constraints) .zip(alpha_and_beta_boundary_coefficients) - .fold(FieldElement::zero(), |acc, (index, (alpha, beta))| { + .fold(FieldElement::zero(), |acc, (index, (_, beta))| { acc + &boundary_zerofiers_inverse_evaluations[index][i] - * (alpha * d + beta) + * beta * &boundary_polys_evaluations[index][i] }) }) @@ -136,28 +127,6 @@ impl> ConstraintEvaluator { let transition_exemptions_evaluations = evaluate_transition_exemptions(transition_exemptions, domain); let num_exemptions = self.air.context().num_transition_exemptions; - let context = self.air.context(); - let max_transition_degree = *context.transition_degrees.iter().max().unwrap(); - - #[cfg(feature = "parallel")] - let degree_adjustments_iter = (1..=max_transition_degree).into_par_iter(); - - #[cfg(not(feature = "parallel"))] - let degree_adjustments_iter = 1..=max_transition_degree; - - let degree_adjustments: Vec>> = degree_adjustments_iter - .map(|transition_degree| { - domain - .lde_roots_of_unity_coset - .iter() - .map(|d| { - let degree_adjustment = composition_poly_degree_bound - - (trace_length * (transition_degree - 1)); - d.pow(degree_adjustment) - }) - .collect() - }) - .collect(); let blowup_factor_order = u64::from(blowup_factor.trailing_zeros()); @@ -215,19 +184,17 @@ impl> ConstraintEvaluator { .zip(alpha_and_beta_transition_coefficients) .fold( FieldElement::zero(), - |acc, (((eval, exemption), degree), (alpha, beta))| { + |acc, (((eval, exemption), _), (_, beta))| { #[cfg(feature = "parallel")] let zerofier = zerofier.clone(); if *exemption == 0 { - acc + zerofier - * (alpha * °ree_adjustments[degree - 1][i] + beta) - * eval + acc + zerofier * beta * eval } else { //TODO: change how exemptions are indexed! if num_exemptions == 1 { acc + zerofier - * (alpha * °ree_adjustments[degree - 1][i] + beta) + * beta * eval * &transition_exemptions_evaluations[0][i] } else { @@ -247,7 +214,7 @@ impl> ConstraintEvaluator { .expect("is there"); acc + zerofier - * (alpha * °ree_adjustments[degree - 1][i] + beta) + * beta * eval * &transition_exemptions_evaluations[index][i] } @@ -295,9 +262,7 @@ impl> ConstraintEvaluator { .zip(constraint_coeffs) .fold( FieldElement::::zero(), - |acc, (((ev, degree), inv), (alpha, beta))| { - acc + ev * (alpha * degree + beta) * inv - }, + |acc, (((ev, _), inv), (_, beta))| acc + ev * beta * inv, ) } } diff --git a/provers/stark/src/verifier.rs b/provers/stark/src/verifier.rs index b1d3dcab4..df75511f6 100644 --- a/provers/stark/src/verifier.rs +++ b/provers/stark/src/verifier.rs @@ -221,8 +221,6 @@ fn step_2_verify_claimed_composition_polynomial //let n_trace_cols = air.context().trace_columns; // special cases. let trace_length = air.trace_length(); - let composition_poly_degree_bound = air.composition_poly_degree_bound(); - let boundary_term_degree_adjustment = composition_poly_degree_bound - trace_length; let number_of_b_constraints = boundary_constraints.constraints.len(); // Following naming conventions from https://www.notamonadtutorial.com/diving-deep-fri/ @@ -251,12 +249,11 @@ fn step_2_verify_claimed_composition_polynomial FieldElement::inplace_batch_inverse(&mut boundary_c_i_evaluations_den).unwrap(); - let boundary_degree_z = challenges.z.pow(boundary_term_degree_adjustment); let boundary_quotient_ood_evaluation: FieldElement = boundary_c_i_evaluations_num .iter() .zip(&boundary_c_i_evaluations_den) .zip(&challenges.boundary_coeffs) - .map(|((num, den), (alpha, beta))| num * den * (alpha * &boundary_degree_z + beta)) + .map(|((num, den), (_, beta))| num * den * beta) .fold(FieldElement::::zero(), |acc, x| acc + x); let transition_ood_frame_evaluations = air.compute_transition( @@ -276,19 +273,6 @@ fn step_2_verify_claimed_composition_polynomial .map(|poly| poly.evaluate(&challenges.z)) .collect::>>(); - let max_degree = air - .context() - .transition_degrees() - .iter() - .max() - .expect("has maximum degree"); - let degree_adjustments = (1..=*max_degree) - .map(|transition_degree| { - let degree_adjustment = - composition_poly_degree_bound - (trace_length * (transition_degree - 1)); - challenges.z.pow(degree_adjustment) - }) - .collect::>>(); let unity = &FieldElement::one(); let transition_c_i_evaluations_sum = transition_ood_frame_evaluations .iter() @@ -297,15 +281,12 @@ fn step_2_verify_claimed_composition_polynomial .zip(&challenges.transition_coeffs) .fold( FieldElement::zero(), - |acc, (((eval, degree), except), (alpha, beta))| { + |acc, (((eval, _), except), (_, beta))| { let except = except .checked_sub(1) .map(|i| &exemption[i]) .unwrap_or(unity); - acc + &denominator - * eval - * (alpha * °ree_adjustments[degree - 1] + beta) - * except + acc + &denominator * eval * beta * except }, ); From ebc748b709766fc23d525ba05d9ae5d676dc6507 Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Tue, 19 Sep 2023 17:40:34 -0300 Subject: [PATCH 2/5] remove unnecessary challenges --- provers/stark/src/constraints/evaluator.rs | 8 ++-- provers/stark/src/prover.rs | 27 ++----------- provers/stark/src/verifier.rs | 44 ++++++---------------- 3 files changed, 20 insertions(+), 59 deletions(-) diff --git a/provers/stark/src/constraints/evaluator.rs b/provers/stark/src/constraints/evaluator.rs index 7ad1035e4..058c1fd3a 100644 --- a/provers/stark/src/constraints/evaluator.rs +++ b/provers/stark/src/constraints/evaluator.rs @@ -39,8 +39,8 @@ impl> ConstraintEvaluator { &self, lde_trace: &TraceTable, domain: &Domain, - alpha_and_beta_transition_coefficients: &[(FieldElement, FieldElement)], - alpha_and_beta_boundary_coefficients: &[(FieldElement, FieldElement)], + alpha_and_beta_transition_coefficients: &[FieldElement], + alpha_and_beta_boundary_coefficients: &[FieldElement], rap_challenges: &A::RAPChallenges, ) -> ConstraintEvaluationTable where @@ -103,7 +103,7 @@ impl> ConstraintEvaluator { .map(|i| { (0..number_of_b_constraints) .zip(alpha_and_beta_boundary_coefficients) - .fold(FieldElement::zero(), |acc, (index, (_, beta))| { + .fold(FieldElement::zero(), |acc, (index, beta)| { acc + &boundary_zerofiers_inverse_evaluations[index][i] * beta * &boundary_polys_evaluations[index][i] @@ -184,7 +184,7 @@ impl> ConstraintEvaluator { .zip(alpha_and_beta_transition_coefficients) .fold( FieldElement::zero(), - |acc, (((eval, exemption), _), (_, beta))| { + |acc, (((eval, exemption), _), beta)| { #[cfg(feature = "parallel")] let zerofier = zerofier.clone(); diff --git a/provers/stark/src/prover.rs b/provers/stark/src/prover.rs index ff9102e8e..27d617880 100644 --- a/provers/stark/src/prover.rs +++ b/provers/stark/src/prover.rs @@ -226,8 +226,8 @@ fn round_2_compute_composition_polynomial( air: &A, domain: &Domain, round_1_result: &Round1, - transition_coeffs: &[(FieldElement, FieldElement)], - boundary_coeffs: &[(FieldElement, FieldElement)], + transition_coeffs: &[FieldElement], + boundary_coeffs: &[FieldElement], ) -> Round2 where F: IsFFTField, @@ -641,36 +641,17 @@ where #[cfg(feature = "instruments")] let timer2 = Instant::now(); - // <<<< Receive challenges: 𝛼_j^B - let boundary_coeffs_alphas = batch_sample_challenges( - air.boundary_constraints(&round_1_result.rap_challenges) - .constraints - .len(), - &mut transcript, - ); // <<<< Receive challenges: 𝛽_j^B - let boundary_coeffs_betas = batch_sample_challenges( + let boundary_coeffs = batch_sample_challenges( air.boundary_constraints(&round_1_result.rap_challenges) .constraints .len(), &mut transcript, ); - // <<<< Receive challenges: 𝛼_j^T - let transition_coeffs_alphas = - batch_sample_challenges(air.context().num_transition_constraints, &mut transcript); // <<<< Receive challenges: 𝛽_j^T - let transition_coeffs_betas = + let transition_coeffs = batch_sample_challenges(air.context().num_transition_constraints, &mut transcript); - let boundary_coeffs: Vec<_> = boundary_coeffs_alphas - .into_iter() - .zip(boundary_coeffs_betas) - .collect(); - let transition_coeffs: Vec<_> = transition_coeffs_alphas - .into_iter() - .zip(transition_coeffs_betas) - .collect(); - let round_2_result = round_2_compute_composition_polynomial( &air, &domain, diff --git a/provers/stark/src/verifier.rs b/provers/stark/src/verifier.rs index df75511f6..028241a30 100644 --- a/provers/stark/src/verifier.rs +++ b/provers/stark/src/verifier.rs @@ -45,8 +45,8 @@ where A: AIR, { z: FieldElement, - boundary_coeffs: Vec<(FieldElement, FieldElement)>, - transition_coeffs: Vec<(FieldElement, FieldElement)>, + boundary_coeffs: Vec>, + transition_coeffs: Vec>, trace_term_coeffs: Vec>>, gamma_even: FieldElement, gamma_odd: FieldElement, @@ -88,31 +88,14 @@ where // =================================== // These are the challenges alpha^B_j and beta^B_j - // >>>> Send challenges: 𝛼_j^B - let boundary_coeffs_alphas = batch_sample_challenges( - air.boundary_constraints(&rap_challenges).constraints.len(), - transcript, - ); // >>>> Send challenges: 𝛽_j^B - let boundary_coeffs_betas = batch_sample_challenges( + let boundary_coeffs = batch_sample_challenges( air.boundary_constraints(&rap_challenges).constraints.len(), transcript, ); - // >>>> Send challenges: 𝛼_j^T - let transition_coeffs_alphas = - batch_sample_challenges(air.context().num_transition_constraints, transcript); // >>>> Send challenges: 𝛽_j^T - let transition_coeffs_betas = + let transition_coeffs = batch_sample_challenges(air.context().num_transition_constraints, transcript); - let boundary_coeffs: Vec<_> = boundary_coeffs_alphas - .into_iter() - .zip(boundary_coeffs_betas) - .collect(); - - let transition_coeffs: Vec<_> = transition_coeffs_alphas - .into_iter() - .zip(transition_coeffs_betas) - .collect(); // <<<< Receive commitments: [H₁], [H₂] transcript.append(&proof.composition_poly_root); @@ -253,7 +236,7 @@ fn step_2_verify_claimed_composition_polynomial .iter() .zip(&boundary_c_i_evaluations_den) .zip(&challenges.boundary_coeffs) - .map(|((num, den), (_, beta))| num * den * beta) + .map(|((num, den), beta)| num * den * beta) .fold(FieldElement::::zero(), |acc, x| acc + x); let transition_ood_frame_evaluations = air.compute_transition( @@ -279,16 +262,13 @@ fn step_2_verify_claimed_composition_polynomial .zip(&air.context().transition_degrees) .zip(&air.context().transition_exemptions) .zip(&challenges.transition_coeffs) - .fold( - FieldElement::zero(), - |acc, (((eval, _), except), (_, beta))| { - let except = except - .checked_sub(1) - .map(|i| &exemption[i]) - .unwrap_or(unity); - acc + &denominator * eval * beta * except - }, - ); + .fold(FieldElement::zero(), |acc, (((eval, _), except), beta)| { + let except = except + .checked_sub(1) + .map(|i| &exemption[i]) + .unwrap_or(unity); + acc + &denominator * eval * beta * except + }); let composition_poly_ood_evaluation = &boundary_quotient_ood_evaluation + transition_c_i_evaluations_sum; From 857535b6d1aa6e5379be9b88d101083de536dea5 Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Tue, 19 Sep 2023 17:53:20 -0300 Subject: [PATCH 3/5] rename coefficients variables --- provers/stark/src/constraints/evaluator.rs | 8 ++++---- provers/stark/src/prover.rs | 16 ++++++++-------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/provers/stark/src/constraints/evaluator.rs b/provers/stark/src/constraints/evaluator.rs index 058c1fd3a..18c0fc6f4 100644 --- a/provers/stark/src/constraints/evaluator.rs +++ b/provers/stark/src/constraints/evaluator.rs @@ -39,8 +39,8 @@ impl> ConstraintEvaluator { &self, lde_trace: &TraceTable, domain: &Domain, - alpha_and_beta_transition_coefficients: &[FieldElement], - alpha_and_beta_boundary_coefficients: &[FieldElement], + transition_coefficients: &[FieldElement], + boundary_coefficients: &[FieldElement], rap_challenges: &A::RAPChallenges, ) -> ConstraintEvaluationTable where @@ -102,7 +102,7 @@ impl> ConstraintEvaluator { let boundary_evaluation = boundary_eval_iter .map(|i| { (0..number_of_b_constraints) - .zip(alpha_and_beta_boundary_coefficients) + .zip(boundary_coefficients) .fold(FieldElement::zero(), |acc, (index, beta)| { acc + &boundary_zerofiers_inverse_evaluations[index][i] * beta @@ -181,7 +181,7 @@ impl> ConstraintEvaluator { .iter() .zip(&self.air.context().transition_exemptions) .zip(&self.air.context().transition_degrees) - .zip(alpha_and_beta_transition_coefficients) + .zip(transition_coefficients) .fold( FieldElement::zero(), |acc, (((eval, exemption), _), beta)| { diff --git a/provers/stark/src/prover.rs b/provers/stark/src/prover.rs index 27d617880..b687f5084 100644 --- a/provers/stark/src/prover.rs +++ b/provers/stark/src/prover.rs @@ -226,8 +226,8 @@ fn round_2_compute_composition_polynomial( air: &A, domain: &Domain, round_1_result: &Round1, - transition_coeffs: &[FieldElement], - boundary_coeffs: &[FieldElement], + transition_coefficients: &[FieldElement], + boundary_coefficients: &[FieldElement], ) -> Round2 where F: IsFFTField, @@ -241,8 +241,8 @@ where let constraint_evaluations = evaluator.evaluate( &round_1_result.lde_trace, domain, - transition_coeffs, - boundary_coeffs, + transition_coefficients, + boundary_coefficients, &round_1_result.rap_challenges, ); @@ -642,22 +642,22 @@ where let timer2 = Instant::now(); // <<<< Receive challenges: 𝛽_j^B - let boundary_coeffs = batch_sample_challenges( + let boundary_coefficients = batch_sample_challenges( air.boundary_constraints(&round_1_result.rap_challenges) .constraints .len(), &mut transcript, ); // <<<< Receive challenges: 𝛽_j^T - let transition_coeffs = + let transition_coefficients = batch_sample_challenges(air.context().num_transition_constraints, &mut transcript); let round_2_result = round_2_compute_composition_polynomial( &air, &domain, &round_1_result, - &transition_coeffs, - &boundary_coeffs, + &transition_coefficients, + &boundary_coefficients, ); // >>>> Send commitments: [H₁], [H₂] From 77cc4eec02475ba4354d6383004d7cb0c15b2b0e Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Thu, 21 Sep 2023 12:22:44 -0300 Subject: [PATCH 4/5] remove degree adjustment from docs --- docs/src/starks/protocol.md | 5 ++--- docs/src/starks/recap.md | 11 +++++------ docs/src/starks/under_the_hood.md | 6 +++--- 3 files changed, 10 insertions(+), 12 deletions(-) diff --git a/docs/src/starks/protocol.md b/docs/src/starks/protocol.md index bae47ebf8..4aa91760d 100644 --- a/docs/src/starks/protocol.md +++ b/docs/src/starks/protocol.md @@ -52,7 +52,6 @@ Both prover and verifier compute the following. - The interpolation domain: the vector $D_S=(1, g, \dots, g^{2^n-1})$. - The Low Degree Extension $D_{\text{LDE}} =(h, h\omega, h\omega^2,\dots, h\omega^{2^{n+l} - 1})$. Recall $2^l$ is the blowup factor. -- Let $d_k^T := 2^n (\deg(P_k^T) - 1)$ and let $d^B := 2^n$. Let $d := 2^{n + 1}$. Notice that $d^B \leq d$ and $d_k^T \leq d$ for all $k$. This holds because we assume all transition constraint polynomials are at most cubic. ### Notation of important operations #### Vector commitment scheme @@ -98,7 +97,7 @@ In our cases the sets $A$ will be of the form $A=(f(a), f(ab), f(ab^2), \dots, f - Compute $B_j := \frac{t_j - P^B_j}{Z_j^B}$. - Compute $C_k := \frac{P^T_k(t_1, \dots, t_m, t_1(gX), \dots, t_m(gX))}{Z_k^T}$. - Compute the _composition polynomial_ - $$H := \sum_{k} (\alpha_k^T X^{d - d_k^T} + \beta_k^T)C_k + \sum_j (\alpha_j^BX^{d - d^B}+\beta_j^B)B_j$$ + $$H := \sum_{k} \beta_k^TC_k + \sum_j \beta_j^BB_j$$ - Decompose $H$ as $$H = H_1(X^2) + XH_2(X^2)$$ - Compute commitments $[H_1]$ and $[H_2]$. @@ -227,7 +226,7 @@ Check that $\text{Keccak256}(x || y)$ has $c$ leading zeroes. - Compute $b_j := \frac{\tau_j^z - P^B_j(z)}{Z_j^B(z)}$ - Compute $c_k := \frac{P^T_k(\tau_1^z, \dots, \tau_m^z, \tau_1^{gz}, \dots, \tau_m^{gz})}{Z_k^T(z)}$ - Verify - $$h = \sum_{k} (\alpha_k^T z^{d - d_k^T} + \beta_k^T)c_k + \sum_j (\alpha_j^B z^{d - d^B}+\beta_j^B)b_j$$ + $$h = \sum_{k} \beta_k^Tc_k + \sum_j \beta_j^Bb_j$$ #### Step 3: Verify FRI diff --git a/docs/src/starks/recap.md b/docs/src/starks/recap.md index 194e0b640..8bbe3832f 100644 --- a/docs/src/starks/recap.md +++ b/docs/src/starks/recap.md @@ -142,15 +142,14 @@ $$ How does \\(C\\) encode the transition constraints? We mentioned above that these are satisfied if the polynomial in the numerator vanishes in the elements \\(\{g^0, g^1, g^2, g^3, g^4, g^5\}\\). As with \\(B\\), this is the same as showing that \\(C(x)\\) is a polynomial instead of a rational function. ### Constructing \\(H\\) -With the boundary and transition constraint polynomials in hand, we build the `composition polynomial` \\(H\\) as follows: The verifier will sample four numbers \\(\alpha_1, \alpha_2, \beta_1, \beta_2\\) and \\(H\\) will be +With the boundary and transition constraint polynomials in hand, we build the `composition polynomial` \\(H\\) as follows: The verifier will sample four numbers \\(\beta_1, \beta_2\\) and \\(H\\) will be $$ -H(x) = B(x) (\alpha_1 x^{D - deg(B)} + \beta_1) + C(x) (\alpha_2 x^{D - deg(C)} + \beta_2) +H(x) = \beta_1 B(x) + \beta_2 C(x) $$ -where \\(D\\) is the smallest power of two greater than the degrees of both \\(B\\) and \\(C\\), so for example if \\(deg(B) = 3\\) and \\(deg(C) = 6\\), then \\(D = 8\\). -Why not just take \\(H(x) = B(x) + C(x)\\)? The reason for the alphas and betas is to make the resulting \\(H\\) be always different and unpredictable for the prover, so they can't precompute stuff beforehand. The \\(x^{D - deg(...)}\\) term is there to adjust the degree of the constraints. This ensures the soundness of the protocol according to the [ethSTARK documentation](https://eprint.iacr.org/2021/582.pdf). +Why not just take \\(H(x) = B(x) + C(x)\\)? The reason for the betas is to make the resulting \\(H\\) be always different and unpredictable for the prover, so they can't precompute stuff beforehand. With what we discussed above, showing that the constraints are satisfied is equivalent to saying that `H` is a polynomial and not a rational function (we are simplifying things a bit here, but it works for our purposes). @@ -167,7 +166,7 @@ After commiting to `H`, the prover needs to show that `H` was constructed correc Because the boundary and transition constraints are a public part of the protocol, the verifier knows them, and thus the only thing it needs to compute the evaluation \\((z)\\) by itself are the three trace evaluations mentioned above. Because it asked the prover for them, it can check both sides of the equation: $$ -H(z) = B(z) (\alpha_1 z^{D - deg(B)} + \beta_1) + C(z) (\alpha_2 z^{D - deg(C)} + \beta_2) +H(z) = \beta_1 B(z)+ \beta_2 C(z) $$ and be convinced that \\(H\\) was constructed correctly. @@ -231,7 +230,7 @@ We summarize below the steps required in a STARK proof for both prover and verif - Take the evaluations \\(H(z)\\), \\(H(x_0)\\), \\(t(z)\\), \\(t(zg)\\), \\(t(zg^2)\\) and \\(t(x_0)\\) the prover provided. - Reconstruct the evaluations \\(B(z)\\) and \\(C(z)\\) from the trace evaluations we were given. Check that the claimed evaluation \\(H(z)\\) the prover gave us actually satisfies $$ - H(z) = B(z) (\alpha_1 z^{D - deg(B)} + \beta_1) + C(z) (\alpha_2 z^{D - deg(C)} + \beta_2) + H(z) = \beta_1 B(z) + \beta_2 C(z) $$ - Check that the claimed evaluation \\(Deep(x_0)\\) the prover gave us actually satisfies $$ diff --git a/docs/src/starks/under_the_hood.md b/docs/src/starks/under_the_hood.md index 78ef489fc..7640d399c 100644 --- a/docs/src/starks/under_the_hood.md +++ b/docs/src/starks/under_the_hood.md @@ -21,7 +21,7 @@ We will once again use the fibonacci example as an ilustration. Recall from the - Take the evaluation $H(z)$ along with the trace evaluations the prover provided. - Reconstruct the evaluations $B(z)$ and $C(z)$ from the trace evaluations. Check that the claimed evaluation $H(z)$ the prover gave us actually satisfies $$ - H(z) = B(z) (\alpha_1 z^{D - deg(B)} + \beta_1) + C(z) (\alpha_2 z^{D - deg(C)} + \beta_2) + H(z) = \beta_1 B(z) + \beta_2 C(z) $$ - Take the evaluations $H(x_0)$ and $t(x_0)$. - Check that the claimed evaluation $Deep(x_0)$ the prover gave us actually satisfies @@ -94,13 +94,13 @@ let constraint_evaluations = evaluator.evaluate( This function call will return the evaluations of the boundary terms $$ -B_i(x) (\alpha_i x^{D - deg(B)} + \beta_i) +\beta_i^B B_i(x) $$ and constraint terms $$ -C_i(x) (\alpha_i x^{D - deg(C)} + \beta_i) +\beta_i^T C_i(x) $$ for every $i$. The `constraint_evaluations` value returned is a `ConstraintEvaluationTable` struct, which is nothing more than a big list of evaluations of each polynomial required to construct `H`. From a24750eb75dd4f136a995419bf3a4b7b466cbe59 Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Thu, 21 Sep 2023 12:31:38 -0300 Subject: [PATCH 5/5] remove whitespaces --- docs/src/starks/recap.md | 4 ++-- docs/src/starks/under_the_hood.md | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/src/starks/recap.md b/docs/src/starks/recap.md index 8bbe3832f..c88554492 100644 --- a/docs/src/starks/recap.md +++ b/docs/src/starks/recap.md @@ -145,7 +145,7 @@ How does \\(C\\) encode the transition constraints? We mentioned above that thes With the boundary and transition constraint polynomials in hand, we build the `composition polynomial` \\(H\\) as follows: The verifier will sample four numbers \\(\beta_1, \beta_2\\) and \\(H\\) will be $$ -H(x) = \beta_1 B(x) + \beta_2 C(x) +H(x) = \beta_1 B(x) + \beta_2 C(x) $$ @@ -166,7 +166,7 @@ After commiting to `H`, the prover needs to show that `H` was constructed correc Because the boundary and transition constraints are a public part of the protocol, the verifier knows them, and thus the only thing it needs to compute the evaluation \\((z)\\) by itself are the three trace evaluations mentioned above. Because it asked the prover for them, it can check both sides of the equation: $$ -H(z) = \beta_1 B(z)+ \beta_2 C(z) +H(z) = \beta_1 B(z) + \beta_2 C(z) $$ and be convinced that \\(H\\) was constructed correctly. diff --git a/docs/src/starks/under_the_hood.md b/docs/src/starks/under_the_hood.md index 7640d399c..b4e5c6afa 100644 --- a/docs/src/starks/under_the_hood.md +++ b/docs/src/starks/under_the_hood.md @@ -21,7 +21,7 @@ We will once again use the fibonacci example as an ilustration. Recall from the - Take the evaluation $H(z)$ along with the trace evaluations the prover provided. - Reconstruct the evaluations $B(z)$ and $C(z)$ from the trace evaluations. Check that the claimed evaluation $H(z)$ the prover gave us actually satisfies $$ - H(z) = \beta_1 B(z) + \beta_2 C(z) + H(z) = \beta_1 B(z) + \beta_2 C(z) $$ - Take the evaluations $H(x_0)$ and $t(x_0)$. - Check that the claimed evaluation $Deep(x_0)$ the prover gave us actually satisfies