Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Zkvm v0.1 #67

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
367 changes: 203 additions & 164 deletions Cargo.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion rust-toolchain
Original file line number Diff line number Diff line change
@@ -1 +1 @@
nightly-2024-02-08
nightly-2024-10-30
27 changes: 13 additions & 14 deletions snark-verifier-sdk/src/halo2/aggregation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ use halo2_base::{
use itertools::Itertools;
use rand::{rngs::StdRng, SeedableRng};
use serde::{Deserialize, Serialize};
#[cfg(debug_assertions)]
use snark_verifier::util::arithmetic::fe_to_limbs;
// #[cfg(debug_assertions)]
// use snark_verifier::util::arithmetic::fe_to_limbs;
use snark_verifier::{
loader::{
self,
Expand Down Expand Up @@ -90,7 +90,7 @@ impl VerifierUniversality {

#[allow(clippy::type_complexity)]
/// Core function used in `synthesize` to aggregate multiple `snarks`.
///
///
/// Returns the assigned instances of previous snarks and the new final pair that needs to be verified in a pairing check.
/// For each previous snark, we concatenate all instances into a single vector. We return a vector of vectors,
/// one vector per snark, for convenience.
Expand Down Expand Up @@ -360,8 +360,7 @@ pub enum AssignedTranscriptObject {
///
/// ## Notes
/// - This function does _not_ expose any public instances.
/// - `svk` is the generator of the KZG trusted setup, usually gotten via `params.get_g()[0]`
/// (avoids having to pass `params` into function just to get generator)
/// - `svk` is the generator of the KZG trusted setup, usually gotten via `params.get_g()[0]` (avoids having to pass `params` into function just to get generator)
///
/// ## Universality
/// - If `universality` is not `None`, then the verifying keys of each snark in `snarks` is loaded as a witness in the circuit.
Expand Down Expand Up @@ -461,15 +460,15 @@ where
})
.collect();

#[cfg(debug_assertions)]
{
let KzgAccumulator { lhs, rhs } = _accumulator;
let instances =
[lhs.x, lhs.y, rhs.x, rhs.y].map(fe_to_limbs::<_, Fr, LIMBS, BITS>).concat();
for (lhs, rhs) in instances.iter().zip(accumulator.iter()) {
assert_eq!(lhs, rhs.value());
}
}
// #[cfg(debug_assertions)]
// {
// let KzgAccumulator { lhs, rhs } = _accumulator;
// let instances =
// [lhs.x, lhs.y, rhs.x, rhs.y].map(fe_to_limbs::<_, Fr, LIMBS, BITS>).concat();
// for (lhs, rhs) in instances.iter().zip(accumulator.iter()) {
// assert_eq!(lhs, rhs.value());
// }
// }
// put back `pool` into `builder`
*pool = loader.take_ctx();
SnarkAggregationOutput { previous_instances, accumulator, preprocessed, proof_transcripts }
Expand Down
2 changes: 1 addition & 1 deletion snark-verifier-sdk/src/halo2/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ pub struct AggregationDependencyIntentOwned {
pub agg_vk_hash_data: Option<((usize, usize), Fr)>,
}

impl<'a> AggregationDependencyIntent<'a> {
impl AggregationDependencyIntent<'_> {
/// Converts `self` into `PlonkProtocol`
pub fn compile(self, params: &ParamsKZG<Bn256>) -> PlonkProtocol<G1Affine> {
compile(
Expand Down
25 changes: 0 additions & 25 deletions snark-verifier-sdk/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -169,28 +169,3 @@ pub fn write_instances(instances: &[&[Fr]], path: impl AsRef<Path>) {
let f = BufWriter::new(File::create(path).unwrap());
bincode::serialize_into(f, &instances).unwrap();
}

#[cfg(feature = "zkevm")]
mod zkevm {
use super::CircuitExt;
use eth_types::Field;
use zkevm_circuits::{evm_circuit::EvmCircuit, state_circuit::StateCircuit};

impl<F: Field> CircuitExt<F> for EvmCircuit<F> {
fn instances(&self) -> Vec<Vec<F>> {
vec![]
}
fn num_instance(&self) -> Vec<usize> {
vec![]
}
}

impl<F: Field> CircuitExt<F> for StateCircuit<F> {
fn instances(&self) -> Vec<Vec<F>> {
vec![]
}
fn num_instance(&self) -> Vec<usize> {
vec![]
}
}
}
8 changes: 5 additions & 3 deletions snark-verifier/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,10 @@ rayon = { version = "1.8", optional = true }

# loader_evm
sha3 = { version = "=0.10", optional = true }
ruint = { version = "=1.12.1", optional = true }
ruint = { version = "=1.12", optional = true }

# revm only used for testing smart contract execution
revm = { version = "=3.5.0", optional = true }
revm = { version = "=18.0.0", optional = true }

[dev-dependencies]
ark-std = { version = "0.3.0", features = ["print-trace"] }
Expand All @@ -46,7 +46,9 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
# loader_evm
crossterm = { version = "0.25" }
ratatui = { version = "0.24", default-features = false, features = ["crossterm"] }
ratatui = { version = "0.24", default-features = false, features = [
"crossterm",
] }

[features]
default = ["loader_evm", "loader_halo2", "halo2-axiom", "display"]
Expand Down
1 change: 1 addition & 0 deletions snark-verifier/examples/evm-verifier-with-accumulator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -307,6 +307,7 @@ mod aggregation {
#[derive(Clone, Debug)]
pub struct AggregationCircuit {
pub inner: BaseCircuitBuilder<Fr>,
#[allow(dead_code)]
pub as_proof: Vec<u8>,
}

Expand Down
3 changes: 2 additions & 1 deletion snark-verifier/src/loader/evm/loader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -430,7 +430,8 @@ impl EvmLoader {
// unimplemented
}

pub fn print_gas_metering(self: &Rc<Self>, _: Vec<u64>) {
#[allow(dead_code)]
fn print_gas_metering(self: &Rc<Self>, _: Vec<u64>) {
// unimplemented
}
}
Expand Down
17 changes: 8 additions & 9 deletions snark-verifier/src/loader/evm/util/executor.rs
Original file line number Diff line number Diff line change
@@ -1,19 +1,18 @@
use revm::primitives::TxKind;
use revm::{
primitives::{CreateScheme, ExecutionResult, Output, TransactTo, TxEnv},
InMemoryDB, EVM,
primitives::{ExecutionResult, Output, TransactTo, TxEnv},
Context, Evm, Handler, InMemoryDB,
};

/// Deploy contract and then call with calldata.
/// Returns gas_used of call to deployed contract if both transactions are successful.
pub fn deploy_and_call(deployment_code: Vec<u8>, calldata: Vec<u8>) -> Result<u64, String> {
let mut evm = EVM {
env: Default::default(),
db: Some(InMemoryDB::default()),
};
let mut evm =
Evm::new(Context::new_with_db(InMemoryDB::default()), Handler::new(Default::default()));

evm.env.tx = TxEnv {
*evm.tx_mut() = TxEnv {
gas_limit: u64::MAX,
transact_to: TransactTo::Create(CreateScheme::Create),
transact_to: TxKind::Create,
data: deployment_code.into(),
..Default::default()
};
Expand All @@ -37,7 +36,7 @@ pub fn deploy_and_call(deployment_code: Vec<u8>, calldata: Vec<u8>) -> Result<u6
_ => unreachable!(),
};

evm.env.tx = TxEnv {
*evm.tx_mut() = TxEnv {
gas_limit: u64::MAX,
transact_to: TransactTo::Call(contract),
data: calldata.into(),
Expand Down
2 changes: 1 addition & 1 deletion snark-verifier/src/loader/halo2/shim.rs
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ mod halo2_lib {
}
}

impl<'chip, C: CurveAffineExt> EccInstructions<C> for BaseFieldEccChip<'chip, C>
impl<C: CurveAffineExt> EccInstructions<C> for BaseFieldEccChip<'_, C>
where
C::ScalarExt: BigPrimeField,
C::Base: BigPrimeField,
Expand Down
98 changes: 49 additions & 49 deletions snark-verifier/src/pcs/ipa.rs
Original file line number Diff line number Diff line change
Expand Up @@ -394,52 +394,52 @@ fn h_coeffs<F: Field>(xi: &[F], scalar: F) -> Vec<F> {
coeffs
}

#[cfg(all(test, feature = "system_halo2"))]
mod test {
use crate::{
pcs::{
ipa::{self, IpaProvingKey},
AccumulationDecider,
},
util::{arithmetic::Field, msm::Msm, poly::Polynomial},
};
use halo2_curves::pasta::pallas;
use halo2_proofs::transcript::{
Blake2bRead, Blake2bWrite, TranscriptReadBuffer, TranscriptWriterBuffer,
};
use rand::rngs::OsRng;

#[test]
fn test_ipa() {
type Ipa = ipa::Ipa<pallas::Affine>;
type IpaAs = ipa::IpaAs<pallas::Affine, ()>;

let k = 10;
let mut rng = OsRng;

for zk in [false, true] {
let pk = IpaProvingKey::<pallas::Affine>::rand(k, zk, &mut rng);
let (c, z, v, proof) = {
let p = Polynomial::<pallas::Scalar>::rand(pk.domain.n, &mut rng);
let omega = pk.zk().then(|| pallas::Scalar::random(&mut rng));
let c = pk.commit(&p, omega);
let z = pallas::Scalar::random(&mut rng);
let v = p.evaluate(z);
let mut transcript = Blake2bWrite::init(Vec::new());
Ipa::create_proof(&pk, &p[..], &z, omega.as_ref(), &mut transcript, &mut rng)
.unwrap();
(c, z, v, transcript.finalize())
};

let svk = pk.svk();
let accumulator = {
let mut transcript = Blake2bRead::init(proof.as_slice());
let proof = Ipa::read_proof(&svk, &mut transcript).unwrap();
Ipa::succinct_verify(&svk, &Msm::base(&c), &z, &v, &proof).unwrap()
};

let dk = pk.dk();
assert!(IpaAs::decide(&dk, accumulator).is_ok());
}
}
}
// #[cfg(test)]
// mod test {
// use crate::{
// pcs::{
// ipa::{self, IpaProvingKey},
// AccumulationDecider,
// },
// util::{arithmetic::Field, msm::Msm, poly::Polynomial},
// };
// use halo2_curves::pasta::pallas;
// use halo2_proofs::transcript::{
// Blake2bRead, Blake2bWrite, TranscriptReadBuffer, TranscriptWriterBuffer,
// };
// use rand::rngs::OsRng;

// #[test]
// fn test_ipa() {
// type Ipa = ipa::Ipa<pallas::Affine>;
// type IpaAs = ipa::IpaAs<pallas::Affine, ()>;

// let k = 10;
// let mut rng = OsRng;

// for zk in [false, true] {
// let pk = IpaProvingKey::<pallas::Affine>::rand(k, zk, &mut rng);
// let (c, z, v, proof) = {
// let p = Polynomial::<pallas::Scalar>::rand(pk.domain.n, &mut rng);
// let omega = pk.zk().then(|| pallas::Scalar::random(&mut rng));
// let c = pk.commit(&p, omega);
// let z = pallas::Scalar::random(&mut rng);
// let v = p.evaluate(z);
// let mut transcript = Blake2bWrite::init(Vec::new());
// Ipa::create_proof(&pk, &p[..], &z, omega.as_ref(), &mut transcript, &mut rng)
// .unwrap();
// (c, z, v, transcript.finalize())
// };

// let svk = pk.svk();
// let accumulator = {
// let mut transcript = Blake2bRead::init(proof.as_slice());
// let proof = Ipa::read_proof(&svk, &mut transcript).unwrap();
// Ipa::succinct_verify(&svk, &Msm::base(&c), &z, &v, &proof).unwrap()
// };

// let dk = pk.dk();
// assert!(IpaAs::decide(&dk, accumulator).is_ok());
// }
// }
// }
2 changes: 1 addition & 1 deletion snark-verifier/src/pcs/ipa/accumulation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ where

let (u, h) = instances
.iter()
.map(|IpaAccumulator { u, xi }| (*u, h_coeffs(xi, C::Scalar::ONE)))
.map(|IpaAccumulator { u, xi }| (*u, h_coeffs(&xi[..], C::Scalar::ONE)))
.chain(a_b_u.map(|(a, b, u)| {
(
u,
Expand Down
4 changes: 2 additions & 2 deletions snark-verifier/src/pcs/kzg/accumulator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -205,8 +205,8 @@ mod halo2 {
use halo2_base::utils::CurveAffineExt;
use halo2_ecc::ecc::BaseFieldEccChip;

impl<'chip, C, const LIMBS: usize, const BITS: usize>
LimbsEncodingInstructions<C, LIMBS, BITS> for BaseFieldEccChip<'chip, C>
impl<C, const LIMBS: usize, const BITS: usize>
LimbsEncodingInstructions<C, LIMBS, BITS> for BaseFieldEccChip<'_, C>
where
C: CurveAffineExt,
C::ScalarExt: BigPrimeField,
Expand Down
2 changes: 1 addition & 1 deletion snark-verifier/src/system/halo2/transcript/halo2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -464,7 +464,7 @@ mod halo2_lib {
use halo2_base::utils::{BigPrimeField, CurveAffineExt};
use halo2_ecc::ecc::BaseFieldEccChip;

impl<'chip, C: CurveAffineExt> NativeEncoding<C> for BaseFieldEccChip<'chip, C>
impl<C: CurveAffineExt> NativeEncoding<C> for BaseFieldEccChip<'_, C>
where
C::Scalar: BigPrimeField,
C::Base: BigPrimeField,
Expand Down
6 changes: 3 additions & 3 deletions snark-verifier/src/util/msm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ pub struct Msm<'a, C: CurveAffine, L: Loader<C>> {
bases: Vec<&'a L::LoadedEcPoint>,
}

impl<'a, C, L> Default for Msm<'a, C, L>
impl<C, L> Default for Msm<'_, C, L>
where
C: CurveAffine,
L: Loader<C>,
Expand Down Expand Up @@ -169,7 +169,7 @@ where
}
}

impl<'a, C, L> MulAssign<&L::LoadedScalar> for Msm<'a, C, L>
impl<C, L> MulAssign<&L::LoadedScalar> for Msm<'_, C, L>
where
C: CurveAffine,
L: Loader<C>,
Expand All @@ -194,7 +194,7 @@ where
}
}

impl<'a, C, L> Sum for Msm<'a, C, L>
impl<C, L> Sum for Msm<'_, C, L>
where
C: CurveAffine,
L: Loader<C>,
Expand Down
13 changes: 5 additions & 8 deletions snark-verifier/src/verifier/plonk/proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,7 @@ use crate::{
},
Error,
};
use std::{
collections::{BTreeMap, HashMap},
iter,
};
use std::{collections::BTreeMap, iter};

/// Proof of PLONK with [`PolynomialCommitmentScheme`] that has
/// [`AccumulationScheme`].
Expand Down Expand Up @@ -164,7 +161,7 @@ where
pub(super) fn queries(
&self,
protocol: &PlonkProtocol<C, L>,
mut evaluations: HashMap<Query, L::LoadedScalar>,
mut evaluations: BTreeMap<Query, L::LoadedScalar>,
) -> Vec<pcs::Query<Rotation, L::LoadedScalar>> {
if protocol.queries.is_empty() {
return vec![];
Expand Down Expand Up @@ -199,8 +196,8 @@ where
&'a self,
protocol: &'a PlonkProtocol<C, L>,
common_poly_eval: &CommonPolynomialEvaluation<C, L>,
evaluations: &mut HashMap<Query, L::LoadedScalar>,
) -> Result<Vec<Msm<C, L>>, Error> {
evaluations: &mut BTreeMap<Query, L::LoadedScalar>,
) -> Result<Vec<Msm<'a, C, L>>, Error> {
let loader = common_poly_eval.zn().loader();
let mut commitments = iter::empty()
.chain(protocol.preprocessed.iter().map(Msm::base))
Expand Down Expand Up @@ -304,7 +301,7 @@ where
protocol: &PlonkProtocol<C, L>,
instances: &[Vec<L::LoadedScalar>],
common_poly_eval: &CommonPolynomialEvaluation<C, L>,
) -> Result<HashMap<Query, L::LoadedScalar>, Error> {
) -> Result<BTreeMap<Query, L::LoadedScalar>, Error> {
let loader = common_poly_eval.zn().loader();
let instance_evals = protocol.instance_committing_key.is_none().then(|| {
let offset = protocol.preprocessed.len();
Expand Down
Loading