diff --git a/Cargo.lock b/Cargo.lock index 0a60568bf..e478c7318 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -849,7 +849,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" dependencies = [ "async-trait", - "axum-core", + "axum-core 0.4.5", "bytes", "futures-util", "http 1.1.0", @@ -858,7 +858,41 @@ dependencies = [ "hyper 1.5.1", "hyper-util", "itoa", - "matchit", + "matchit 0.7.3", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sync_wrapper 1.0.2", + "tokio", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d6fd624c75e18b3b4c6b9caf42b1afe24437daaee904069137d8bab077be8b8" +dependencies = [ + "axum-core 0.5.0", + "bytes", + "form_urlencoded", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.5.1", + "hyper-util", + "itoa", + "matchit 0.8.4", "memchr", "mime", "percent-encoding", @@ -897,14 +931,34 @@ dependencies = [ "tracing", ] +[[package]] +name = "axum-core" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df1362f362fd16024ae199c1970ce98f9661bf5ef94b9808fee734bc3698b733" +dependencies = [ + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper 1.0.2", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "axum-extra" version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c794b30c904f0a1c2fb7740f7df7f7972dfaa14ef6f57cb6178dc63e5dca2f04" dependencies = [ - "axum", - "axum-core", + "axum 0.7.9", + "axum-core 0.4.5", "bytes", "fastrand", "futures-util", @@ -2740,6 +2794,7 @@ name = "ethrex-l2" version = "0.1.0" dependencies = [ "bytes", + "directories", "envy", "ethereum-types 0.15.1", "ethrex-blockchain", @@ -2798,7 +2853,7 @@ dependencies = [ name = "ethrex-metrics" version = "0.1.0" dependencies = [ - "axum", + "axum 0.8.1", "ethrex-core", "prometheus", "serde", @@ -2878,7 +2933,7 @@ dependencies = [ name = "ethrex-rpc" version = "0.1.0" dependencies = [ - "axum", + "axum 0.7.9", "axum-extra", "bytes", "ethrex-blockchain", @@ -4708,6 +4763,12 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" +[[package]] +name = "matchit" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" + [[package]] name = "matrixmultiply" version = "0.3.9" @@ -8678,14 +8739,14 @@ dependencies = [ [[package]] name = "tower" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2873938d487c3cfb9aed7546dc9f2711d867c9f90c46b889989a2cb84eba6b4f" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" dependencies = [ "futures-core", "futures-util", "pin-project-lite", - "sync_wrapper 0.1.2", + "sync_wrapper 1.0.2", "tokio", "tower-layer", "tower-service", @@ -8811,7 +8872,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "27dfcc06b8d9262bc2d4b8d1847c56af9971a52dd8a0076876de9db763227d0d" dependencies = [ "async-trait", - "axum", + "axum 0.7.9", "futures", "http 1.1.0", "http-body-util", diff --git a/cmd/ethrex_l2/src/commands/prove.rs b/cmd/ethrex_l2/src/commands/prove.rs index 12fa233f7..26249c6cf 100644 --- a/cmd/ethrex_l2/src/commands/prove.rs +++ b/cmd/ethrex_l2/src/commands/prove.rs @@ -1,5 +1,8 @@ use clap::Args; -use ethrex_l2::utils::test_data_io::{generate_program_input, read_chain_file, read_genesis_file}; +use ethrex_l2::utils::{ + prover::proving_systems::ProverType, + test_data_io::{generate_program_input, read_chain_file, read_genesis_file}, +}; use ethrex_prover_lib::prover::create_prover; #[derive(Args)] @@ -30,7 +33,7 @@ impl Command { let chain = read_chain_file(&self.chain); let program_input = generate_program_input(genesis, chain, self.block_number)?; - let mut prover = create_prover(ethrex_l2::proposer::prover_server::ProverType::RISC0); + let mut prover = create_prover(ProverType::RISC0); prover.prove(program_input).expect("proving failed"); println!( "Total gas consumption: {}", diff --git a/crates/blockchain/metrics/Cargo.toml b/crates/blockchain/metrics/Cargo.toml index df0267e30..959595f31 100644 --- a/crates/blockchain/metrics/Cargo.toml +++ b/crates/blockchain/metrics/Cargo.toml @@ -6,18 +6,16 @@ edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -tokio.workspace = true -tracing.workspace = true +tokio = { workspace = true, optional = true } +tracing = { workspace = true, optional = true } thiserror.workspace = true serde_json.workspace = true serde.workspace = true ethrex-core = { path = "../../common", default-features = false } -prometheus = "0.13.4" - -# TODO: remove? -axum = "0.7.9" +prometheus = { version = "0.13.4", optional = true } +axum = { version = "0.8.1", optional = true } [lib] @@ -25,4 +23,4 @@ path = "./mod.rs" [features] default = ["api"] -api = [] +api = ["dep:axum", "dep:prometheus", "dep:tokio", "dep:tracing"] diff --git a/crates/l2/Cargo.toml b/crates/l2/Cargo.toml index a73a8a808..bbbe0c865 100644 --- a/crates/l2/Cargo.toml +++ b/crates/l2/Cargo.toml @@ -27,7 +27,10 @@ secp256k1.workspace = true keccak-hash.workspace = true envy = "0.4.2" thiserror.workspace = true +directories = "5.0.1" + zkvm_interface = { path = "./prover/zkvm/interface/", default-features = false } + # risc0 risc0-zkvm = { version = "1.2.0" } # sp1 diff --git a/crates/l2/Makefile b/crates/l2/Makefile index cf1df37a3..89f38c1b5 100644 --- a/crates/l2/Makefile +++ b/crates/l2/Makefile @@ -1,11 +1,10 @@ +.PHONY: help init down clean restart cli update-cli-contracts init-local-l1 init-l1 down-local-l1 restart-local-l1 rm-db-l1 clean-contract-deps restart-contract-deps deploy-l1 init-l2 down-l2 restart-l2 init-prover rm-db-l2 purge_prover_state ci_test test .DEFAULT_GOAL := help L2_GENESIS_FILE_PATH=../../test_data/genesis-l2.json L1_GENESIS_FILE_PATH=../../test_data/genesis-l1.json # Basic -.PHONY: help init down clean restart - help: ## ๐Ÿ“š Show help for each of the Makefile recipes @grep -E '^[a-zA-Z0-9_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' @@ -18,8 +17,6 @@ clean: clean-contract-deps ## ๐Ÿงน Cleans the localnet restart: restart-local-l1 deploy-l1 restart-l2 ## ๐Ÿ”„ Restarts the localnet # CLI -.PHONY: cli update-cli-contracts - cli: ## ๐Ÿ› ๏ธ Installs the L2 Lambda ethrex CLI cargo install --path ${ethrex_PATH}/cmd/ethrex_l2/ --force @@ -60,8 +57,6 @@ L2_AUTH_PORT=8552 L2_PROMETHEUS_METRICS_PORT = 3702 # Local L1 -.PHONY: init-local-l1 init-l1 down-local-l1 restart-local-l1 rm-db-l1 - init-local-l1: ## ๐Ÿš€ Initializes an L1 Lambda ethrex Client with Docker (Used with make init) docker compose -f ${ethrex_DEV_DOCKER_COMPOSE_PATH} -f ${ethrex_METRICS_OVERRIDES_L1_DOCKER_COMPOSE_PATH} up -d @@ -94,8 +89,6 @@ rm-db-l1: ## ๐Ÿ›‘ Removes the DB used by the L1 cargo run --release --manifest-path ../../Cargo.toml --bin ethrex -- removedb --datadir ${ethrex_L1_DEV_LIBMDBX} # Contracts -PHONY: clean-contract-deps restart-contract-deps deploy-l1 - clean-contract-deps: ## ๐Ÿงน Cleans the dependencies for the L1 contracts. rm -rf contracts/solc_out rm -rf contracts/lib @@ -106,8 +99,6 @@ deploy-l1: ## ๐Ÿ“œ Deploys the L1 contracts DEPLOYER_CONTRACTS_PATH=contracts cargo run --release --bin ethrex_l2_l1_deployer --manifest-path ${ethrex_L2_CONTRACTS_PATH}/Cargo.toml # L2 -PHONY: init-l2 down-l2 restart-l2 init-prover rm-db-l2 - init-l2: init-metrics ## ๐Ÿš€ Initializes an L2 Lambda ethrex Client cargo run --release --manifest-path ../../Cargo.toml --bin ethrex --features "l2,metrics" -- \ --network ${L2_GENESIS_FILE_PATH} \ @@ -148,8 +139,6 @@ rm-db-l2: ## ๐Ÿ›‘ Removes the DB used by the L2 cargo run --release --manifest-path ../../Cargo.toml --bin ethrex -- removedb --datadir ${ethrex_L2_DEV_LIBMDBX} # Testing -PHONY: ci_test test - ci_test: ## ๐Ÿšง Runs the L2's integration test, used by the github's CI docker compose -f ${ethrex_L2_DOCKER_COMPOSE_PATH} down docker compose -f ${ethrex_L2_DOCKER_COMPOSE_PATH} up -d --build @@ -158,3 +147,26 @@ ci_test: ## ๐Ÿšง Runs the L2's integration test, used by the github's CI test: ## ๐Ÿšง Runs the L2's integration test, run `make init` and in a new terminal make test BRIDGE_ADDRESS=$$(grep 'L1_WATCHER_BRIDGE_ADDRESS' .env | cut -d= -f2) ON_CHAIN_PROPOSER_ADDRESS=$$(grep 'COMMITTER_ON_CHAIN_PROPOSER_ADDRESS' .env | cut -d= -f2) cargo test --release testito -- --nocapture + + +# Purge L2's state +UNAME_S:=$(shell uname -s) +# This directory is set by crates/l2/utils/prover/save_state.rs -> const DEFAULT_DATADIR +PROJECT_NAME:=ethrex_l2_state + +ifeq ($(UNAME_S),Linux) + PROJECT_PATH := $(HOME)/.local/share/${PROJECT_NAME} +else ifeq ($(UNAME_S),Darwin) + PROJECT_PATH := $(HOME)/Library/Application\ Support/${PROJECT_NAME} +else + $(error Unsupported platform: $(UNAME_S)) +endif + +purge_prover_state: ## ๐Ÿงน Removes the L2 state, only use to start fresh. + @echo "Are you sure you want to delete the directory: $(PROJECT_PATH) ? [y/n]" + @read answer; \ + if [ "$$answer" != "y" ]; then \ + echo "Operation canceled."; \ + fi; \ + rm -rf $(PROJECT_PATH); \ + echo "Directory deleted." diff --git a/crates/l2/contracts/deployer.rs b/crates/l2/contracts/deployer.rs index 6695d5806..36465888f 100644 --- a/crates/l2/contracts/deployer.rs +++ b/crates/l2/contracts/deployer.rs @@ -81,10 +81,8 @@ async fn main() -> Result<(), DeployError> { ) .await?; - let sp1_contract_verifier_address = match sp1_verifier_address { - Some(address) => address, - None => setup_result.sp1_contract_verifier_address, - }; + let sp1_contract_verifier_address = + sp1_verifier_address.unwrap_or(setup_result.sp1_contract_verifier_address); initialize_contracts( setup_result.deployer_address, diff --git a/crates/l2/proposer/errors.rs b/crates/l2/proposer/errors.rs index 0513ace6c..911029778 100644 --- a/crates/l2/proposer/errors.rs +++ b/crates/l2/proposer/errors.rs @@ -1,6 +1,7 @@ use std::sync::mpsc::SendError; use crate::utils::config::errors::ConfigError; +use crate::utils::prover::errors::SaveStateError; use ethereum_types::FromStrRadixErr; use ethrex_core::types::{BlobsBundleError, FakeExponentialError}; use ethrex_dev::utils::engine_client::errors::EngineClientError; @@ -22,6 +23,8 @@ pub enum L1WatcherError { FailedToRetrieveChainConfig(String), #[error("L1Watcher failed to get config: {0}")] FailedToGetConfig(#[from] ConfigError), + #[error("{0}")] + Custom(String), } #[derive(Debug, thiserror::Error)] @@ -32,9 +35,9 @@ pub enum ProverServerError { EthClientError(#[from] EthClientError), #[error("ProverServer failed to send transaction: {0}")] FailedToVerifyProofOnChain(String), - #[error("ProverServer failed retrieve block from storage: {0}")] - FailedToRetrieveBlockFromStorage(#[from] StoreError), - #[error("ProverServer failed retrieve block from storaga, data is None.")] + #[error("ProverServer failed to access Store: {0}")] + FailedAccessingStore(#[from] StoreError), + #[error("ProverServer failed to retrieve block from storaga, data is None.")] StorageDataIsNone, #[error("ProverServer failed to create ProverInputs: {0}")] FailedToCreateProverInputs(#[from] EvmError), @@ -44,6 +47,12 @@ pub enum ProverServerError { JoinError(#[from] JoinError), #[error("ProverServer failed: {0}")] Custom(String), + #[error("ProverServer failed to write to TcpStream: {0}")] + WriteError(String), + #[error("ProverServer failed to get data from Store: {0}")] + ItemNotFoundInStore(String), + #[error("ProverServer encountered a SaveStateError: {0}")] + SaveStateError(#[from] SaveStateError), #[error("Failed to encode calldata: {0}")] CalldataEncodeError(#[from] CalldataEncodeError), } diff --git a/crates/l2/proposer/l1_watcher.rs b/crates/l2/proposer/l1_watcher.rs index 7b8fb5d1c..80ac162c5 100644 --- a/crates/l2/proposer/l1_watcher.rs +++ b/crates/l2/proposer/l1_watcher.rs @@ -227,11 +227,9 @@ impl L1Watcher { let gas_price = self.l2_client.get_gas_price().await?; // Avoid panicking when using as_u64() - let gas_price = if gas_price > u64::MAX.into() { - u64::MAX - } else { - gas_price.as_u64() - }; + let gas_price: u64 = gas_price + .try_into() + .map_err(|_| L1WatcherError::Custom("Failed at gas_price.try_into()".to_owned()))?; let mut mint_transaction = self .eth_client diff --git a/crates/l2/proposer/prover_server.rs b/crates/l2/proposer/prover_server.rs index 08a7055e1..ebb447350 100644 --- a/crates/l2/proposer/prover_server.rs +++ b/crates/l2/proposer/prover_server.rs @@ -1,7 +1,14 @@ -use super::errors::{ProverServerError, SigIntError}; -use crate::utils::config::{ - committer::CommitterConfig, errors::ConfigError, eth::EthConfig, - prover_server::ProverServerConfig, +use crate::proposer::errors::{ProverServerError, SigIntError}; +use crate::utils::{ + config::{ + committer::CommitterConfig, errors::ConfigError, eth::EthConfig, + prover_server::ProverServerConfig, + }, + prover::{ + errors::SaveStateError, + proving_systems::{ProverType, ProvingOutput}, + save_state::{StateFileType, StateType, *}, + }, }; use ethrex_core::{ types::{Block, BlockHeader}, @@ -16,6 +23,7 @@ use ethrex_vm::{execution_db::ExecutionDB, EvmError}; use secp256k1::SecretKey; use serde::{Deserialize, Serialize}; use std::{ + fmt::Debug, io::{BufReader, BufWriter, Write}, net::{IpAddr, Shutdown, TcpListener, TcpStream}, sync::mpsc::{self, Receiver}, @@ -28,9 +36,6 @@ use tokio::{ }; use tracing::{debug, error, info, warn}; -use risc0_zkvm::sha::Digestible; -use sp1_sdk::HashableKey; - const VERIFY_FUNCTION_SIGNATURE: &str = "verify(uint256,bytes,bytes32,bytes32,bytes32,bytes,bytes)"; #[derive(Debug, Serialize, Deserialize, Default)] @@ -51,144 +56,6 @@ struct ProverServer { verifier_private_key: SecretKey, } -#[derive(Debug, Clone, Copy)] -/// Enum used to identify the different proving systems. -pub enum ProverType { - RISC0, - SP1, -} - -#[derive(Serialize, Deserialize, Clone)] -pub struct Risc0Proof { - pub receipt: Box, - pub prover_id: Vec, -} - -pub struct Risc0ContractData { - pub block_proof: Vec, - pub image_id: Vec, - pub journal_digest: Vec, -} - -impl Risc0Proof { - pub fn new(receipt: risc0_zkvm::Receipt, prover_id: Vec) -> Self { - Risc0Proof { - receipt: Box::new(receipt), - prover_id, - } - } - - pub fn contract_data(&self) -> Result { - // If we run the prover_client with RISC0_DEV_MODE=0 we will have a groth16 proof - // Else, we will have a fake proof. - // - // The RISC0_DEV_MODE=1 should only be used with DEPLOYER_CONTRACT_VERIFIER=0xAA - let block_proof = match self.receipt.inner.groth16() { - Ok(inner) => { - // The SELECTOR is used to perform an extra check inside the groth16 verifier contract. - let mut selector = - hex::encode(inner.verifier_parameters.as_bytes().get(..4).ok_or( - ProverServerError::Custom( - "Failed to get verify_proof_selector in send_proof()".to_owned(), - ), - )?); - let seal = hex::encode(inner.clone().seal); - selector.push_str(&seal); - hex::decode(selector).map_err(|e| { - ProverServerError::Custom(format!("Failed to hex::decode(selector): {e}")) - })? - } - Err(_) => vec![32; 0], - }; - - let mut image_id: [u32; 8] = [0; 8]; - for (i, b) in image_id.iter_mut().enumerate() { - *b = *self.prover_id.get(i).ok_or(ProverServerError::Custom( - "Failed to get image_id in handle_proof_submission()".to_owned(), - ))?; - } - - let image_id: risc0_zkvm::sha::Digest = image_id.into(); - let image_id = image_id.as_bytes().to_vec(); - - let journal_digest = Digestible::digest(&self.receipt.journal) - .as_bytes() - .to_vec(); - - Ok(Risc0ContractData { - block_proof, - image_id, - journal_digest, - }) - } - - pub fn contract_data_empty() -> Risc0ContractData { - Risc0ContractData { - block_proof: vec![0; 32], - image_id: vec![0; 32], - journal_digest: vec![0; 32], - } - } -} - -#[derive(Serialize, Deserialize, Clone)] -pub struct Sp1Proof { - pub proof: Box, - pub vk: sp1_sdk::SP1VerifyingKey, -} - -pub struct Sp1ContractData { - pub public_values: Vec, - pub vk: Vec, - pub proof_bytes: Vec, -} - -impl Sp1Proof { - pub fn new( - proof: sp1_sdk::SP1ProofWithPublicValues, - verifying_key: sp1_sdk::SP1VerifyingKey, - ) -> Self { - Sp1Proof { - proof: Box::new(proof), - vk: verifying_key, - } - } - - pub fn contract_data(&self) -> Result { - let vk = self - .vk - .bytes32() - .strip_prefix("0x") - .ok_or(ProverServerError::Custom( - "Failed to strip_prefix of sp1 vk".to_owned(), - ))? - .to_string(); - let vk_bytes = hex::decode(&vk) - .map_err(|_| ProverServerError::Custom("Failed hex::decode(&vk)".to_owned()))?; - - Ok(Sp1ContractData { - public_values: self.proof.public_values.to_vec(), - vk: vk_bytes, - proof_bytes: self.proof.bytes(), - }) - } - - // TODO: better way of giving empty information - pub fn contract_data_empty() -> Sp1ContractData { - Sp1ContractData { - public_values: vec![0; 32], - vk: vec![0; 32], - proof_bytes: vec![0; 32], - } - } -} - -#[derive(Serialize, Deserialize, Clone)] -pub enum ProvingOutput { - RISC0(Risc0Proof), - SP1(Sp1Proof), -} - /// Enum for the ProverServer <--> ProverClient Communication Protocol. #[derive(Serialize, Deserialize)] pub enum ProofData { @@ -384,10 +251,39 @@ impl ProverServer { last_verified_block }; + let block_to_verify = last_verified_block + 1; + + let mut tx_submitted = false; + + // If we have all the proofs send a transaction to verify them on chain + + let send_tx = match block_number_has_all_proofs(block_to_verify) { + Ok(has_all_proofs) => has_all_proofs, + Err(e) => { + if let SaveStateError::IOError(ref error) = e { + if error.kind() != std::io::ErrorKind::NotFound { + return Err(e.into()); + } + } else { + return Err(e.into()); + } + false + } + }; + if send_tx { + self.handle_proof_submission(block_to_verify).await?; + // Remove the Proofs for that block_number + prune_state(block_to_verify)?; + tx_submitted = true; + } + let data: Result = serde_json::de::from_reader(buf_reader); match data { Ok(ProofData::Request) => { - if let Err(e) = self.handle_request(&stream, last_verified_block + 1).await { + if let Err(e) = self + .handle_request(&stream, block_to_verify, tx_submitted) + .await + { warn!("Failed to handle request: {e}"); } } @@ -397,12 +293,46 @@ impl ProverServer { }) => { self.handle_submit(&mut stream, block_number)?; - if block_number != (last_verified_block + 1) { - return Err(ProverServerError::Custom(format!("Prover Client submitted an invalid block_number: {block_number}. The last_proved_block is: {}", last_verified_block))); + // Avoid storing a proof of a future block_number + // CHECK: maybe we would like to store all the proofs given the case in which + // the provers generate them fast enough. In this way, we will avoid unneeded reexecution. + if block_number != block_to_verify { + return Err(ProverServerError::Custom(format!("Prover Client submitted an invalid block_number: {block_number}. The last_proved_block is: {last_verified_block}"))); + } + + // If the transaction was submitted for the block_to_verify + // avoid storing already used proofs. + if tx_submitted { + return Ok(()); + } + + // Check if we have an entry for the proof in that block_number + // Get the ProverType, implicitly set by the ProvingOutput + let prover_type = match proving_output { + ProvingOutput::RISC0(_) => ProverType::RISC0, + ProvingOutput::SP1(_) => ProverType::SP1, + }; + + // Check if we have the proof for that ProverType + // If we don't have it, insert it. + let has_proof = match block_number_has_state_file( + StateFileType::Proof(prover_type), + block_number, + ) { + Ok(has_proof) => has_proof, + Err(e) => { + let error = format!("{e}"); + if !error.contains("No such file or directory") { + return Err(e.into()); + } + false + } + }; + if !has_proof { + write_state(block_number, &StateType::Proof(proving_output))?; } - self.handle_proof_submission(block_number, proving_output) - .await?; + // Then if we have all the proofs, we send the transaction in the next `handle_connection` call. } Err(e) => { warn!("Failed to parse request: {e}"); @@ -420,6 +350,7 @@ impl ProverServer { &self, stream: &TcpStream, block_number: u64, + tx_submitted: bool, ) -> Result<(), ProverServerError> { debug!("Request received"); @@ -427,7 +358,11 @@ impl ProverServer { let response = if block_number > latest_block_number { let response = ProofData::response(None, None); - warn!("Didn't send response"); + debug!("Didn't send response"); + response + } else if tx_submitted { + let response = ProofData::response(None, None); + debug!("Block: {block_number} has been submitted."); response } else { let input = self.create_prover_input(block_number)?; @@ -489,21 +424,26 @@ impl ProverServer { pub async fn handle_proof_submission( &self, block_number: u64, - proving_output: ProvingOutput, ) -> Result { - // TODO: - // Ideally we should wait to have both proofs - // We will have to send them in the same transaction. - let (sp1_contract_data, risc0_contract_data) = match proving_output { - ProvingOutput::RISC0(risc0_proof) => { - let risc0_contract_data = risc0_proof.contract_data()?; - let sp1_contract_data = Sp1Proof::contract_data_empty(); - (sp1_contract_data, risc0_contract_data) + // TODO change error + let risc0_proving_output = + read_proof(block_number, StateFileType::Proof(ProverType::RISC0))?; + let risc0_contract_data = match risc0_proving_output { + ProvingOutput::RISC0(risc0_proof) => risc0_proof.contract_data()?, + _ => { + return Err(ProverServerError::Custom( + "RISC0 Proof isn't present".to_string(), + )) } - ProvingOutput::SP1(sp1_proof) => { - let risc0_contract_data = Risc0Proof::contract_data_empty(); - let sp1_contract_data = sp1_proof.contract_data()?; - (sp1_contract_data, risc0_contract_data) + }; + + let sp1_proving_output = read_proof(block_number, StateFileType::Proof(ProverType::SP1))?; + let sp1_contract_data = match sp1_proving_output { + ProvingOutput::SP1(sp1_proof) => sp1_proof.contract_data()?, + _ => { + return Err(ProverServerError::Custom( + "SP1 Proof isn't present".to_string(), + )) } }; @@ -563,13 +503,8 @@ impl ProverServer { ) .await?; - if last_committed_block == u64::MAX { - debug!("No blocks commited yet"); - continue; - } - if last_committed_block == last_verified_block { - debug!("No new blocks to prove"); + warn!("No new blocks to prove"); continue; } diff --git a/crates/l2/prover/src/lib.rs b/crates/l2/prover/src/lib.rs index 4c99e12fe..64469fe67 100644 --- a/crates/l2/prover/src/lib.rs +++ b/crates/l2/prover/src/lib.rs @@ -2,8 +2,8 @@ pub mod errors; pub mod prover; pub mod prover_client; -use ethrex_l2::{ - proposer::prover_server::ProverType, utils::config::prover_client::ProverClientConfig, +use ethrex_l2::utils::{ + config::prover_client::ProverClientConfig, prover::proving_systems::ProverType, }; use tracing::warn; diff --git a/crates/l2/prover/src/main.rs b/crates/l2/prover/src/main.rs index a442c5877..caa9d8b97 100644 --- a/crates/l2/prover/src/main.rs +++ b/crates/l2/prover/src/main.rs @@ -1,6 +1,6 @@ -use ethrex_l2::{ - proposer::prover_server::ProverType, - utils::config::{prover_client::ProverClientConfig, read_env_file}, +use ethrex_l2::utils::{ + config::{prover_client::ProverClientConfig, read_env_file}, + prover::proving_systems::ProverType, }; use ethrex_prover_lib::init_client; use std::env; diff --git a/crates/l2/prover/src/prover.rs b/crates/l2/prover/src/prover.rs index 4c408df39..f56f54ed1 100644 --- a/crates/l2/prover/src/prover.rs +++ b/crates/l2/prover/src/prover.rs @@ -1,5 +1,5 @@ use crate::errors::ProverError; -use ethrex_l2::proposer::prover_server::{ProverType, ProvingOutput, Risc0Proof, Sp1Proof}; +use ethrex_l2::utils::prover::proving_systems::{ProverType, ProvingOutput, Risc0Proof, Sp1Proof}; use tracing::info; // risc0 diff --git a/crates/l2/prover/src/prover_client.rs b/crates/l2/prover/src/prover_client.rs index dcfe4782a..48ff26ea8 100644 --- a/crates/l2/prover/src/prover_client.rs +++ b/crates/l2/prover/src/prover_client.rs @@ -1,7 +1,10 @@ use crate::prover::create_prover; use ethrex_l2::{ - proposer::prover_server::{ProofData, ProverType, ProvingOutput}, - utils::config::prover_client::ProverClientConfig, + proposer::prover_server::ProofData, + utils::{ + config::prover_client::ProverClientConfig, + prover::proving_systems::{ProverType, ProvingOutput}, + }, }; use std::{ io::{BufReader, BufWriter}, @@ -61,6 +64,7 @@ impl ProverClient { warn!("Failed to request new data: {e}"); } } + sleep(Duration::from_millis(self.interval_ms)).await; } } diff --git a/crates/l2/prover/zkvm/interface/risc0/src/main.rs b/crates/l2/prover/zkvm/interface/risc0/src/main.rs index c4c9b6251..85ecec50a 100644 --- a/crates/l2/prover/zkvm/interface/risc0/src/main.rs +++ b/crates/l2/prover/zkvm/interface/risc0/src/main.rs @@ -31,10 +31,10 @@ fn main() { let receipts = execute_block(&block, &mut state).expect("failed to execute block"); validate_gas_used(&receipts, &block.header).expect("invalid gas used"); - let cumulative_gas_used = match receipts.last() { - Some(last_receipt) => last_receipt.cumulative_gas_used, - None => 0_u64, - }; + let cumulative_gas_used = receipts + .last() + .map(|last_receipt| last_receipt.cumulative_gas_used) + .unwrap_or_default(); env::write(&cumulative_gas_used); diff --git a/crates/l2/prover/zkvm/interface/sp1/elf/riscv32im-succinct-zkvm-elf b/crates/l2/prover/zkvm/interface/sp1/elf/riscv32im-succinct-zkvm-elf index a6fe4d635..01f51eafe 100755 Binary files a/crates/l2/prover/zkvm/interface/sp1/elf/riscv32im-succinct-zkvm-elf and b/crates/l2/prover/zkvm/interface/sp1/elf/riscv32im-succinct-zkvm-elf differ diff --git a/crates/l2/prover/zkvm/interface/sp1/src/main.rs b/crates/l2/prover/zkvm/interface/sp1/src/main.rs index 2ee7abe18..6ea6f9e3c 100644 --- a/crates/l2/prover/zkvm/interface/sp1/src/main.rs +++ b/crates/l2/prover/zkvm/interface/sp1/src/main.rs @@ -34,10 +34,10 @@ pub fn main() { let receipts = execute_block(&block, &mut state).expect("failed to execute block"); validate_gas_used(&receipts, &block.header).expect("invalid gas used"); - let cumulative_gas_used = match receipts.last() { - Some(last_receipt) => last_receipt.cumulative_gas_used, - None => 0_u64, - }; + let cumulative_gas_used = receipts + .last() + .map(|last_receipt| last_receipt.cumulative_gas_used) + .unwrap_or_default(); sp1_zkvm::io::commit(&cumulative_gas_used); diff --git a/crates/l2/sdk/src/eth_client/mod.rs b/crates/l2/sdk/src/eth_client/mod.rs index f2a1c68ec..feac537f8 100644 --- a/crates/l2/sdk/src/eth_client/mod.rs +++ b/crates/l2/sdk/src/eth_client/mod.rs @@ -734,8 +734,9 @@ impl EthClient { chain_id: if let Some(chain_id) = overrides.chain_id { chain_id } else { - // Should never panic, the chain_id should be smaller than u64::MAX - self.get_chain_id().await?.as_u64() + self.get_chain_id().await?.try_into().map_err(|_| { + EthClientError::Custom("Failed at get_chain_id().try_into()".to_owned()) + })? }, nonce: self .get_nonce_from_overrides_or_rpc(&overrides, from) @@ -744,12 +745,10 @@ impl EthClient { get_gas_price = gas_price; gas_price } else { - let gas_price = self.get_gas_price().await?; - get_gas_price = if gas_price > u64::MAX.into() { - u64::MAX - } else { - gas_price.as_u64() - }; + let gas_price: u64 = self.get_gas_price().await?.try_into().map_err(|_| { + EthClientError::Custom("Failed at gas_price.try_into()".to_owned()) + })?; + get_gas_price = gas_price; get_gas_price }, max_fee_per_gas: if let Some(gas_price) = overrides.gas_price { @@ -823,8 +822,9 @@ impl EthClient { chain_id: if let Some(chain_id) = overrides.chain_id { chain_id } else { - // Should never panic, the chain_id should be smaller than u64::MAX - self.get_chain_id().await?.as_u64() + self.get_chain_id().await?.try_into().map_err(|_| { + EthClientError::Custom("Failed at get_chain_id().try_into()".to_owned()) + })? }, nonce: self .get_nonce_from_overrides_or_rpc(&overrides, from) @@ -833,12 +833,10 @@ impl EthClient { get_gas_price = gas_price; gas_price } else { - let gas_price = self.get_gas_price().await?; - get_gas_price = if gas_price > u64::MAX.into() { - u64::MAX - } else { - gas_price.as_u64() - }; + let gas_price: u64 = self.get_gas_price().await?.try_into().map_err(|_| { + EthClientError::Custom("Failed at gas_price.try_into()".to_owned()) + })?; + get_gas_price = gas_price; get_gas_price }, max_fee_per_gas: if let Some(gas_price) = overrides.gas_price { @@ -914,8 +912,9 @@ impl EthClient { chain_id: if let Some(chain_id) = overrides.chain_id { chain_id } else { - // Should never panic, the chain_id should be smaller than u64::MAX - self.get_chain_id().await?.as_u64() + self.get_chain_id().await?.try_into().map_err(|_| { + EthClientError::Custom("Failed at get_chain_id().try_into()".to_owned()) + })? }, nonce: self .get_nonce_from_overrides_or_rpc(&overrides, from) @@ -924,12 +923,10 @@ impl EthClient { get_gas_price = gas_price; gas_price } else { - let gas_price = self.get_gas_price().await?; - get_gas_price = if gas_price > u64::MAX.into() { - u64::MAX - } else { - gas_price.as_u64() - }; + let gas_price: u64 = self.get_gas_price().await?.try_into().map_err(|_| { + EthClientError::Custom("Failed at gas_price.try_into()".to_owned()) + })?; + get_gas_price = gas_price; get_gas_price }, max_fee_per_gas: if let Some(gas_price) = overrides.gas_price { diff --git a/crates/l2/utils/mod.rs b/crates/l2/utils/mod.rs index 98184243b..339c978a3 100644 --- a/crates/l2/utils/mod.rs +++ b/crates/l2/utils/mod.rs @@ -1,3 +1,4 @@ pub mod config; pub mod error; +pub mod prover; pub mod test_data_io; diff --git a/crates/l2/utils/prover/errors.rs b/crates/l2/utils/prover/errors.rs new file mode 100644 index 000000000..2cf4b81bc --- /dev/null +++ b/crates/l2/utils/prover/errors.rs @@ -0,0 +1,13 @@ +#[derive(Debug, thiserror::Error)] +pub enum SaveStateError { + #[error("Failed to create data dir")] + FailedToCrateDataDir, + #[error("Failed to interact with IO: {0}")] + IOError(#[from] std::io::Error), + #[error("Failed to de/serialize: {0}")] + SerdeJson(#[from] serde_json::Error), + #[error("Failed to parse block_number_from_path: {0}")] + ParseIntError(#[from] std::num::ParseIntError), + #[error("{0}")] + Custom(String), +} diff --git a/crates/l2/utils/prover/mod.rs b/crates/l2/utils/prover/mod.rs new file mode 100644 index 000000000..79f4125c2 --- /dev/null +++ b/crates/l2/utils/prover/mod.rs @@ -0,0 +1,3 @@ +pub mod errors; +pub mod proving_systems; +pub mod save_state; diff --git a/crates/l2/utils/prover/proving_systems.rs b/crates/l2/utils/prover/proving_systems.rs new file mode 100644 index 000000000..56ee56a90 --- /dev/null +++ b/crates/l2/utils/prover/proving_systems.rs @@ -0,0 +1,150 @@ +use crate::proposer::errors::ProverServerError; +use serde::{Deserialize, Serialize}; +use std::fmt::Debug; + +use risc0_zkvm::sha::Digestible; +use sp1_sdk::HashableKey; + +#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, Serialize, Deserialize)] +/// Enum used to identify the different proving systems. +pub enum ProverType { + RISC0, + SP1, +} + +/// Used to iterate through all the possible proving systems +impl ProverType { + pub fn all() -> &'static [ProverType] { + &[ProverType::RISC0, ProverType::SP1] + } +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct Risc0Proof { + pub receipt: Box, + pub prover_id: Vec, +} + +pub struct Risc0ContractData { + pub block_proof: Vec, + pub image_id: Vec, + pub journal_digest: Vec, +} + +impl Risc0Proof { + // 8 times u32 + const IMAGE_ID_SIZE: usize = 8; + // 4 times u8 + const SELECTOR_SIZE: usize = 4; + pub fn new(receipt: risc0_zkvm::Receipt, prover_id: Vec) -> Self { + Risc0Proof { + receipt: Box::new(receipt), + prover_id, + } + } + + pub fn contract_data(&self) -> Result { + // If we run the prover_client with RISC0_DEV_MODE=0 we will have a groth16 proof + // Else, we will have a fake proof. + // + // The RISC0_DEV_MODE=1 should only be used with DEPLOYER_CONTRACT_VERIFIER=0xAA + let block_proof = match self.receipt.inner.groth16() { + Ok(inner) => { + // The SELECTOR is used to perform an extra check inside the groth16 verifier contract. + let mut selector = hex::encode( + inner + .verifier_parameters + .as_bytes() + .get(..Self::SELECTOR_SIZE) + .ok_or(ProverServerError::Custom( + "Failed to get verify_proof_selector in send_proof()".to_owned(), + ))?, + ); + let seal = hex::encode(inner.clone().seal); + selector.push_str(&seal); + hex::decode(selector).map_err(|e| { + ProverServerError::Custom(format!("Failed to hex::decode(selector): {e}")) + })? + } + Err(_) => vec![0u8; 4], + }; + + let mut image_id = [0_u32; Self::IMAGE_ID_SIZE]; + for (i, b) in image_id.iter_mut().enumerate() { + *b = *self.prover_id.get(i).ok_or(ProverServerError::Custom( + "Failed to get image_id in handle_proof_submission()".to_owned(), + ))?; + } + + let image_id: risc0_zkvm::sha::Digest = image_id.into(); + let image_id = image_id.as_bytes().to_vec(); + + let journal_digest = Digestible::digest(&self.receipt.journal) + .as_bytes() + .to_vec(); + + Ok(Risc0ContractData { + block_proof, + image_id, + journal_digest, + }) + } +} + +#[derive(Serialize, Deserialize, Clone)] +pub struct Sp1Proof { + pub proof: Box, + pub vk: sp1_sdk::SP1VerifyingKey, +} + +impl Debug for Sp1Proof { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Sp1Proof") + .field("proof", &self.proof) + .field("vk", &self.vk.bytes32()) + .finish() + } +} + +pub struct Sp1ContractData { + pub public_values: Vec, + pub vk: Vec, + pub proof_bytes: Vec, +} + +impl Sp1Proof { + pub fn new( + proof: sp1_sdk::SP1ProofWithPublicValues, + verifying_key: sp1_sdk::SP1VerifyingKey, + ) -> Self { + Sp1Proof { + proof: Box::new(proof), + vk: verifying_key, + } + } + + pub fn contract_data(&self) -> Result { + let vk = self + .vk + .bytes32() + .strip_prefix("0x") + .ok_or(ProverServerError::Custom( + "Failed to strip_prefix of sp1 vk".to_owned(), + ))? + .to_string(); + let vk_bytes = hex::decode(&vk) + .map_err(|_| ProverServerError::Custom("Failed hex::decode(&vk)".to_owned()))?; + + Ok(Sp1ContractData { + public_values: self.proof.public_values.to_vec(), + vk: vk_bytes, + proof_bytes: self.proof.bytes(), + }) + } +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +pub enum ProvingOutput { + RISC0(Risc0Proof), + SP1(Sp1Proof), +} diff --git a/crates/l2/utils/prover/save_state.rs b/crates/l2/utils/prover/save_state.rs new file mode 100644 index 000000000..5046fcfec --- /dev/null +++ b/crates/l2/utils/prover/save_state.rs @@ -0,0 +1,584 @@ +use crate::utils::prover::errors::SaveStateError; +use crate::utils::prover::proving_systems::{ProverType, ProvingOutput}; +use directories::ProjectDirs; +use ethrex_storage::AccountUpdate; +use serde::{Deserialize, Serialize}; +use std::ffi::OsString; +use std::fs::{create_dir, read_dir, File}; +use std::io::{BufReader, Read}; +use std::path::{Path, PathBuf}; +use std::{ + fs::create_dir_all, + io::{BufWriter, Write}, +}; + +#[cfg(not(test))] +/// The default directory for data storage when not running tests. +/// This constant is used to define the default path for data files. +const DEFAULT_DATADIR: &str = "ethrex_l2_state"; + +#[cfg(not(test))] +#[inline(always)] +fn default_datadir() -> Result { + create_datadir(DEFAULT_DATADIR) +} + +#[cfg(test)] +#[inline(always)] +fn default_datadir() -> Result { + create_datadir("test_datadir") +} + +#[inline(always)] +fn create_datadir(dir_name: &str) -> Result { + let path_buf_data_dir = ProjectDirs::from("", "", dir_name) + .ok_or_else(|| SaveStateError::FailedToCrateDataDir)? + .data_local_dir() + .to_path_buf(); + Ok(path_buf_data_dir) +} + +/// Proposed structure +/// 1/ +/// account_updates_1.json +/// proof_risc0_1.json +/// proof_sp1_1.json +/// 2/ +/// account_updates_2.json +/// proof_risc0_2.json +/// proof_sp1_2.json +/// All the files are saved at the path defined by [ProjectDirs::data_local_dir] +/// and the [DEFAULT_DATADIR] when calling [create_datadir] + +/// Enum used to differentiate between the possible types of data we can store per block. +#[derive(Serialize, Deserialize, Debug)] +pub enum StateType { + Proof(ProvingOutput), + AccountUpdates(Vec), +} + +/// Enum used to differentiate between the possible types of files we can have per block. +#[derive(Serialize, Deserialize, Debug, Clone)] +pub enum StateFileType { + Proof(ProverType), + AccountUpdates, +} + +impl From<&StateType> for StateFileType { + fn from(state_type: &StateType) -> Self { + match state_type { + StateType::Proof(p) => match p { + ProvingOutput::RISC0(_) => StateFileType::Proof(ProverType::RISC0), + ProvingOutput::SP1(_) => StateFileType::Proof(ProverType::SP1), + }, + StateType::AccountUpdates(_) => StateFileType::AccountUpdates, + } + } +} + +impl From<&ProverType> for StateFileType { + fn from(prover_type: &ProverType) -> Self { + match prover_type { + ProverType::RISC0 => StateFileType::Proof(ProverType::RISC0), + ProverType::SP1 => StateFileType::Proof(ProverType::SP1), + } + } +} + +#[inline(always)] +fn get_proof_file_name_from_prover_type(prover_type: &ProverType, block_number: u64) -> String { + match prover_type { + ProverType::RISC0 => format!("proof_risc0_{block_number}.json"), + ProverType::SP1 => format!("proof_sp1_{block_number}.json").to_owned(), + } +} + +#[inline(always)] +fn get_block_number_from_path(path_buf: &Path) -> Result { + let block_number = path_buf + .file_name() + .ok_or_else(|| SaveStateError::Custom("Error: No file_name()".to_string()))? + .to_string_lossy(); + + let block_number = block_number.parse::()?; + Ok(block_number) +} + +#[inline(always)] +fn get_state_dir_for_block(block_number: u64) -> Result { + let mut path_buf = default_datadir()?; + path_buf.push(block_number.to_string()); + + Ok(path_buf) +} + +#[inline(always)] +fn get_state_file_name(block_number: u64, state_file_type: &StateFileType) -> String { + match state_file_type { + StateFileType::AccountUpdates => format!("account_updates_{block_number}.json"), + // If we have more proving systems we have to match them an create a file name with the following structure: + // proof__.json + StateFileType::Proof(prover_type) => { + get_proof_file_name_from_prover_type(prover_type, block_number) + } + } +} + +#[inline(always)] +fn get_state_file_path( + path_buf: &Path, + block_number: u64, + state_file_type: &StateFileType, +) -> PathBuf { + let file_name = get_state_file_name(block_number, state_file_type); + path_buf.join(file_name) +} + +/// CREATE the state_file given the block_number +/// This function will create the following file_path: ../../..//state_file_type +fn create_state_file_for_block_number( + block_number: u64, + state_file_type: StateFileType, +) -> Result { + let path_buf = get_state_dir_for_block(block_number)?; + if let Some(parent) = path_buf.parent() { + if let Err(e) = create_dir_all(parent) { + if e.kind() != std::io::ErrorKind::AlreadyExists { + return Err(e.into()); + } + } + } + + let block_number = get_block_number_from_path(&path_buf)?; + + let file_path: PathBuf = get_state_file_path(&path_buf, block_number, &state_file_type); + + if let Err(e) = create_dir(&path_buf) { + if e.kind() != std::io::ErrorKind::AlreadyExists { + return Err(e.into()); + } + } + + File::create(file_path).map_err(Into::into) +} + +/// WRITE to the state_file given the block number and the state_type +/// It also creates the file, if it already exists it will overwrite the file +/// This function will create and write to the following file_path: ../../..//state_file_type +pub fn write_state(block_number: u64, state_type: &StateType) -> Result<(), SaveStateError> { + let inner = create_state_file_for_block_number(block_number, state_type.into())?; + + match state_type { + StateType::Proof(value) => { + let mut writer = BufWriter::new(inner); + serde_json::to_writer(&mut writer, value)?; + writer.flush()?; + } + StateType::AccountUpdates(value) => { + let mut writer = BufWriter::new(inner); + serde_json::to_writer(&mut writer, value)?; + writer.flush()?; + } + } + + Ok(()) +} + +fn get_latest_block_number_and_path() -> Result<(u64, PathBuf), SaveStateError> { + let data_dir = default_datadir()?; + let latest_block_number = read_dir(&data_dir)? + .filter_map(|entry| { + let entry = entry.ok()?; + let path = entry.path(); + if path.is_dir() { + path.file_name()?.to_str()?.parse::().ok() + } else { + None + } + }) + .max(); + + match latest_block_number { + Some(block_number) => { + let latest_path = data_dir.join(block_number.to_string()); + Ok((block_number, latest_path)) + } + None => Err(SaveStateError::Custom( + "No valid block directories found".to_owned(), + )), + } +} + +fn get_block_state_path(block_number: u64) -> Result { + let data_dir = default_datadir()?; + let block_state_path = data_dir.join(block_number.to_string()); + Ok(block_state_path) +} + +/// GET the latest block_number given the proposed structure +pub fn get_latest_block_number() -> Result { + let (block_number, _) = get_latest_block_number_and_path()?; + Ok(block_number) +} + +/// READ the state given the block_number and the [StateFileType] +pub fn read_state( + block_number: u64, + state_file_type: StateFileType, +) -> Result { + // TODO handle path not found + let block_state_path = get_block_state_path(block_number)?; + let file_path: PathBuf = get_state_file_path(&block_state_path, block_number, &state_file_type); + + let inner = File::open(file_path)?; + let mut reader = BufReader::new(inner); + let mut buf = String::new(); + + reader.read_to_string(&mut buf)?; + + let state = match state_file_type { + StateFileType::Proof(_) => { + let state: ProvingOutput = serde_json::from_str(&buf)?; + StateType::Proof(state) + } + StateFileType::AccountUpdates => { + let state: Vec = serde_json::from_str(&buf)?; + StateType::AccountUpdates(state) + } + }; + + Ok(state) +} + +/// READ the proof given the block_number and the [StateFileType::Proof] +pub fn read_proof( + block_number: u64, + state_file_type: StateFileType, +) -> Result { + match read_state(block_number, state_file_type)? { + StateType::Proof(p) => Ok(p), + StateType::AccountUpdates(_) => Err(SaveStateError::Custom( + "Failed in read_proof(), make sure that the state_file_type is a Proof".to_owned(), + )), + } +} + +/// READ the latest state given the [StateFileType]. +/// latest means the state for the highest block_number available. +pub fn read_latest_state(state_file_type: StateFileType) -> Result { + let (latest_block_state_number, _) = get_latest_block_number_and_path()?; + let state = read_state(latest_block_state_number, state_file_type)?; + Ok(state) +} + +/// DELETE the [StateFileType] for the given block_number +pub fn delete_state_file( + block_number: u64, + state_file_type: StateFileType, +) -> Result<(), SaveStateError> { + let block_state_path = get_block_state_path(block_number)?; + let file_path: PathBuf = get_state_file_path(&block_state_path, block_number, &state_file_type); + std::fs::remove_file(file_path)?; + + Ok(()) +} + +/// DELETE the [StateFileType] +/// latest means the state for the highest block_number available. +pub fn delete_latest_state_file(state_file_type: StateFileType) -> Result<(), SaveStateError> { + let (latest_block_state_number, _) = get_latest_block_number_and_path()?; + let latest_block_state_path = get_block_state_path(latest_block_state_number)?; + let file_path: PathBuf = get_state_file_path( + &latest_block_state_path, + latest_block_state_number, + &state_file_type, + ); + std::fs::remove_file(file_path)?; + + Ok(()) +} + +/// PRUNE all the files for the given block_number +pub fn prune_state(block_number: u64) -> Result<(), SaveStateError> { + let block_state_path = get_block_state_path(block_number)?; + std::fs::remove_dir_all(block_state_path)?; + Ok(()) +} + +/// PRUNE all the files +/// latest means the state for the highest block_number available. +pub fn prune_latest_state() -> Result<(), SaveStateError> { + let (latest_block_state_number, _) = get_latest_block_number_and_path()?; + let latest_block_state_path = get_block_state_path(latest_block_state_number)?; + std::fs::remove_dir_all(latest_block_state_path)?; + Ok(()) +} + +/// CHECK if the given path has the given [StateFileType] +/// This function will check if the path: ../../..// contains the state_file_type +pub fn path_has_state_file( + state_file_type: StateFileType, + path_buf: &Path, +) -> Result { + // Get the block_number from the path + let block_number = get_block_number_from_path(path_buf)?; + let file_name_to_seek: OsString = get_state_file_name(block_number, &state_file_type).into(); + + for entry in std::fs::read_dir(path_buf)? { + let entry = entry?; + let file_name_stored = entry.file_name(); + + if file_name_stored == file_name_to_seek { + return Ok(true); + } + } + + Ok(false) +} + +/// CHECK if the given block_number has the given [StateFileType] +/// This function will check if the path: ../../..// contains the state_file_type +pub fn block_number_has_state_file( + state_file_type: StateFileType, + block_number: u64, +) -> Result { + let block_state_path = get_block_state_path(block_number)?; + let file_name_to_seek: OsString = get_state_file_name(block_number, &state_file_type).into(); + + for entry in std::fs::read_dir(block_state_path)? { + let entry = entry?; + let file_name_stored = entry.file_name(); + + if file_name_stored == file_name_to_seek { + return Ok(true); + } + } + + Ok(false) +} + +/// CHECK if the given block_number has all the proofs needed +/// This function will check if the path: ../../..// contains the proofs +/// Make sure to add all new proving_systems in the [ProverType::all] function +pub fn block_number_has_all_proofs(block_number: u64) -> Result { + let block_state_path = get_block_state_path(block_number)?; + + let mut has_all_proofs = true; + for prover_type in ProverType::all() { + let file_name_to_seek: OsString = + get_state_file_name(block_number, &StateFileType::from(prover_type)).into(); + + // Check if the proof exists + let proof_exists = std::fs::read_dir(&block_state_path)? + .filter_map(Result::ok) // Filter out errors + .any(|entry| entry.file_name() == file_name_to_seek); + + // If the proof is missing return false + if !proof_exists { + has_all_proofs = false; + break; + } + } + + Ok(has_all_proofs) +} + +#[cfg(test)] +#[allow(clippy::expect_used)] +mod tests { + use ethrex_blockchain::add_block; + use ethrex_storage::{EngineType, Store}; + use ethrex_vm::execution_db::ExecutionDB; + use risc0_zkvm::sha::Digest; + use sp1_sdk::{HashableKey, PlonkBn254Proof, ProverClient, SP1Proof, SP1PublicValues}; + + use super::*; + use crate::utils::{ + prover::proving_systems::{Risc0Proof, Sp1Proof}, + test_data_io, + }; + use std::fs::{self}; + + #[test] + fn test_state_file_integration() -> Result<(), Box> { + if let Err(e) = fs::remove_dir_all(default_datadir()?) { + if e.kind() != std::io::ErrorKind::NotFound { + eprintln!("Directory NotFound: {:?}", default_datadir()?); + } + } + + let path = Path::new(concat!(env!("CARGO_MANIFEST_DIR"), "/../../test_data")); + + let chain_file_path = path.join("l2-loadtest.rlp"); + let genesis_file_path = path.join("genesis-l2-old.json"); + + // Create an InMemory Store to later perform an execute_block so we can have the Vec. + let store = Store::new("memory", EngineType::InMemory).expect("Failed to create Store"); + + let genesis = test_data_io::read_genesis_file(genesis_file_path.to_str().unwrap()); + store.add_initial_state(genesis.clone()).unwrap(); + + let blocks = test_data_io::read_chain_file(chain_file_path.to_str().unwrap()); + for block in &blocks { + add_block(block, &store).unwrap(); + } + + let mut account_updates_vec: Vec> = Vec::new(); + + // Generic RISC0 Receipt + let risc0_proof = Risc0Proof { + receipt: Box::new(risc0_zkvm::Receipt::new( + risc0_zkvm::InnerReceipt::Fake(risc0_zkvm::FakeReceipt::new( + risc0_zkvm::ReceiptClaim { + pre: risc0_zkvm::MaybePruned::Pruned(Digest::default()), + post: risc0_zkvm::MaybePruned::Pruned(Digest::default()), + exit_code: risc0_zkvm::ExitCode::Halted(37 * 2), + input: risc0_zkvm::MaybePruned::Value(None), + output: risc0_zkvm::MaybePruned::Value(None), + }, + )), + vec![37u8; 32], + )), + prover_id: vec![5u32; 8], + }; + + // The following is a dummy elf to get an SP1VerifyingKey + // It's not the best way, but didn't found an easier one. + // Else, an elf file has to be saved for this test. + let magic_bytes1: &[u8] = &[ + 0x7f, 0x45, 0x4c, 0x46, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, + ]; + let magic_bytes2: &[u8] = &[ + 0x02, 0x00, 0xF3, 0x00, 0x01, 0x00, 0x00, 0x00, 0xD4, 0x8E, 0x21, 0x00, 0x34, 0x00, + 0x00, 0x00, + ]; + let magic_bytes3: &[u8] = &[ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x34, 0x00, 0x20, 0x00, 0x07, 0x00, + 0x28, 0x00, + ]; + + let prover = ProverClient::mock(); + let (_pk, vk) = + prover.setup(&[magic_bytes1, magic_bytes2, magic_bytes3, &[0; 256]].concat()); + + let sp1_proof = Sp1Proof { + proof: Box::new(sp1_sdk::SP1ProofWithPublicValues { + proof: SP1Proof::Plonk(PlonkBn254Proof { + public_inputs: ["1".to_owned(), "2".to_owned()], + encoded_proof: "d".repeat(4), + raw_proof: "d".repeat(4), + plonk_vkey_hash: [1; 32], + }), + stdin: sp1_sdk::SP1Stdin::new(), + public_values: SP1PublicValues::new(), + sp1_version: "dummy".to_owned(), + }), + vk, + }; + + // Write all the account_updates and proofs for each block + for block in &blocks { + let account_updates = + ExecutionDB::get_account_updates(blocks.last().unwrap(), &store).unwrap(); + + account_updates_vec.push(account_updates.clone()); + + write_state( + block.header.number, + &StateType::AccountUpdates(account_updates), + )?; + + let risc0_data = ProvingOutput::RISC0(risc0_proof.clone()); + write_state(block.header.number, &StateType::Proof(risc0_data))?; + + let sp1_data = ProvingOutput::SP1(sp1_proof.clone()); + write_state(block.header.number, &StateType::Proof(sp1_data))?; + } + + // Check if the latest block_number saved matches the latest block in the chain.rlp + let (latest_block_state_number, _) = get_latest_block_number_and_path()?; + + assert_eq!( + latest_block_state_number, + blocks.last().unwrap().header.number + ); + + // Delete account_updates file + let (_, latest_path) = get_latest_block_number_and_path()?; + + assert!(path_has_state_file( + StateFileType::AccountUpdates, + &latest_path + )?); + + assert!(block_number_has_state_file( + StateFileType::AccountUpdates, + latest_block_state_number + )?); + + delete_latest_state_file(StateFileType::AccountUpdates)?; + + assert!(!path_has_state_file( + StateFileType::AccountUpdates, + &latest_path + )?); + + assert!(!block_number_has_state_file( + StateFileType::AccountUpdates, + latest_block_state_number + )?); + + // Delete latest path + prune_latest_state()?; + let (latest_block_state_number, _) = get_latest_block_number_and_path()?; + assert_eq!( + latest_block_state_number, + blocks.last().unwrap().header.number - 1 + ); + + // Read account_updates back + let read_account_updates_blk2 = match read_state(2, StateFileType::AccountUpdates)? { + StateType::Proof(_) => unimplemented!(), + StateType::AccountUpdates(a) => a, + }; + + let og_account_updates_blk2 = account_updates_vec.get(2).unwrap(); + + for og_au in og_account_updates_blk2 { + // The read_account_updates aren't sorted in the same way as the og_account_updates. + let r_au = read_account_updates_blk2 + .iter() + .find(|au| au.address == og_au.address) + .unwrap(); + + assert_eq!(og_au.added_storage, r_au.added_storage); + assert_eq!(og_au.address, r_au.address); + assert_eq!(og_au.info, r_au.info); + assert_eq!(og_au.code, r_au.code); + } + + // Read RISC0 Proof back + let read_proof_updates_blk2 = read_proof(2, StateFileType::Proof(ProverType::RISC0))?; + + if let ProvingOutput::RISC0(read_risc0_proof) = read_proof_updates_blk2 { + assert_eq!( + risc0_proof.receipt.journal.bytes, + read_risc0_proof.receipt.journal.bytes + ); + assert_eq!(read_risc0_proof.prover_id, risc0_proof.prover_id); + } + + // Read SP1 Proof back + let read_proof_updates_blk2 = read_proof(2, StateFileType::Proof(ProverType::SP1))?; + + if let ProvingOutput::SP1(read_sp1_proof) = read_proof_updates_blk2 { + assert_eq!(read_sp1_proof.proof.bytes(), sp1_proof.proof.bytes()); + assert_eq!(read_sp1_proof.vk.bytes32(), sp1_proof.vk.bytes32()); + } + + fs::remove_dir_all(default_datadir()?)?; + + Ok(()) + } +} diff --git a/crates/vm/execution_db.rs b/crates/vm/execution_db.rs index 6eba56378..5a43fce1b 100644 --- a/crates/vm/execution_db.rs +++ b/crates/vm/execution_db.rs @@ -6,7 +6,7 @@ use ethrex_core::{ H256, }; use ethrex_rlp::encode::RLPEncode; -use ethrex_storage::{hash_address, hash_key, Store}; +use ethrex_storage::{hash_address, hash_key, AccountUpdate, Store}; use ethrex_trie::{NodeRLP, Trie}; use revm::{ primitives::{ @@ -44,15 +44,23 @@ pub struct ExecutionDB { } impl ExecutionDB { - /// Creates a database by executing a block, without performing any validation. + /// Creates a database and returns the ExecutionDB by executing a block, + /// without performing any validation. pub fn from_exec(block: &Block, store: &Store) -> Result { // TODO: perform validation to exit early + let account_updates = Self::get_account_updates(block, store)?; + Self::from_account_updates(account_updates, block, store) + } - // Execute and obtain account updates - let mut state = evm_state(store.clone(), block.header.parent_hash); + /// Creates a database and returns the ExecutionDB from a Vec<[AccountUpdate]>, + /// without performing any validation. + pub fn from_account_updates( + account_updates: Vec, + block: &Block, + store: &Store, + ) -> Result { + // TODO: perform validation to exit early let chain_config = store.get_chain_config()?; - execute_block(block, &mut state).map_err(Box::new)?; - let account_updates = get_state_transitions(&mut state); // Store data touched by updates and get all touched storage keys for each account let mut accounts = HashMap::new(); @@ -131,6 +139,21 @@ impl ExecutionDB { }) } + /// Gets the Vec<[AccountUpdate]>/StateTransitions obtained after executing a block. + pub fn get_account_updates( + block: &Block, + store: &Store, + ) -> Result, ExecutionDBError> { + // TODO: perform validation to exit early + + let mut state = evm_state(store.clone(), block.header.parent_hash); + + execute_block(block, &mut state).map_err(Box::new)?; + + let account_updates = get_state_transitions(&mut state); + Ok(account_updates) + } + pub fn get_chain_config(&self) -> ChainConfig { self.chain_config }