diff --git a/Cargo.toml b/Cargo.toml index 4a0eff8..80a1d90 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,13 +14,13 @@ tokio = "1.38.0" zk_rust_io = { path = "./zk_rust_io" } # Sp1 -sp1-sdk = { git = "https://github.com/succinctlabs/sp1.git", tag = "v1.1.1" } +sp1-sdk = { git = "https://github.com/succinctlabs/sp1.git", tag = "v1.0.1" } # Risc 0 risc0-zkvm = { git = "https://github.com/risc0/risc0.git", tag = "v1.0.1" } # Aligned SDK -aligned-sdk = { git = "https://github.com/yetanotherco/aligned_layer", tag = "v0.7.2" } +aligned-sdk = { git = "https://github.com/yetanotherco/aligned_layer", tag = "v0.7.3" } ethers = { tag = "v2.0.15-fix-reconnections", features = [ "ws", "rustls", @@ -31,8 +31,3 @@ bincode = "1.3.3" rpassword = "7.3.1" env_logger = "0.11.3" log = "0.4.22" - -[patch.crates-io] -ark-ff = { git = "https://github.com/a16z/arkworks-algebra", branch = "optimize/field-from-u64" } -ark-ec = { git = "https://github.com/a16z/arkworks-algebra", branch = "optimize/field-from-u64" } -ark-serialize = { git = "https://github.com/a16z/arkworks-algebra", branch = "optimize/field-from-u64" } diff --git a/Makefile b/Makefile index 35037f3..b40c19c 100644 --- a/Makefile +++ b/Makefile @@ -1,19 +1,3 @@ -install: install_sp1 install_risc0 - -install_risc0: - @curl -L https://risczero.com/install | bash - @rzup - @cargo risczero --version - -install_sp1: - @curl -L https://sp1.succinct.xyz | bash - @sp1up - @cargo prove --version - @echo "Sp1 Toolchain Installed" - -# Default target -all: install - __EXAMPLES__: # RISC0 diff --git a/README.md b/README.md index 28246eb..e24e3aa 100644 --- a/README.md +++ b/README.md @@ -6,10 +6,12 @@ zkRust seeks to simplify the development experience of developing using zkVM's a ## Installation: -First make sure [Rust](https://www.rust-lang.org/tools/install) is installed on your machine. Then install the zkVM toolchains from [risc0](https://github.com/risc0/risc0) and [sp1](https://github.com/succinctlabs/sp1) by running: +First make sure [Rust](https://www.rust-lang.org/tools/install) is installed on your machine. Then install the zkVM toolchains from [sp1](https://github.com/succinctlabs/sp1) by running: ```sh -make install +curl -L https://sp1.succinct.xyz | bash +sp1up +cargo prove --version ``` zkRust can then be installed directly by downloading the latest release binaries. @@ -33,103 +35,11 @@ You can test zkRust for any of the examples in the `examples` folder. This inclu - Computing and reading the results of computing Fibonacci numbers. - Performing RSA key verification. - Performing ECDSA program. -- a blockchain state diff program verification. +- Verification of a blockchain state diff. - Computing the Sha256 hash of a value. - Verifying a tendermint block. - Interacting with a user to answer a quiz. -Run one of the following commands to test zkRust. You can choose either Risc0 or SP1: - -**Fibonacci**: - -```bash -make prove_risc0_fibonacci -``` - -```bash -make prove_sp1_fibonacci -``` - -**RSA**: - -```bash -make prove_risc0_rsa -``` - -```bash -make prove_sp1_rsa -``` - -**ECDSA**: - -```bash -make prove_risc0_ecdsa -``` - -```bash -make prove_sp1_ecdsa -``` - -**Blockchain state diff**: - -```bash -make prove_risc0_json -``` - -```bash -make prove_sp1_json -``` - -**Blockchain state diff**: - -```bash -make prove_risc0_json -``` - -```bash -make prove_sp1_json -``` - -**Regex**: - -```bash -make prove_risc0_regex -``` - -```bash -make prove_sp1_regex -``` - -**Sha**: - -```bash -make prove_risc0_sha -``` - -```bash -make prove_sp1_sha -``` - -**Tendermint**: - -```bash -make prove_risc0_tendermint -``` - -```bash -make prove_sp1_tendermint -``` - -**Zk Quiz**: - -```bash -make prove_risc0_zkquiz -``` - -```bash -make prove_sp1_zkquiz -``` - ## Usage: To use zkRust, To use zkRust users specify a `fn main()` whose execution is proven within the zkVM. This function must be defined within a `main.rs` file in a directory with the following structure: @@ -257,11 +167,13 @@ cargo run --release -- prove-sp1 --submit-to-aligned-with-keystore anyhow::Result<()> { - let keystore_password = rpassword::prompt_password("Enter keystore password: ") - .expect("Failed to read keystore password"); + let Ok(keystore_password) = rpassword::prompt_password("Enter keystore password: ") else { + error!("Failed to read keystore password"); + return Ok(()); + }; - let wallet = LocalWallet::decrypt_keystore(keystore_path, keystore_password) - .expect("Failed to decrypt keystore") - .with_chain_id(17000u64); + let Ok(local_wallet) = LocalWallet::decrypt_keystore(keystore_path, keystore_password) else { + error!("Failed to decrypt keystore"); + return Ok(()); + }; + let wallet = local_wallet.with_chain_id(17000u64); - let proof = std::fs::read(proof_path).expect("failed to read proof"); - let elf_data = std::fs::read(elf_path).expect("failed to read ELF"); - let pub_input = pub_input_path - .map(|pub_input_path| std::fs::read(pub_input_path).expect("failed to read public input")); + let Ok(proof) = std::fs::read(proof_path) else { + error!("Failed to Read Proof"); + return Ok(()); + }; + let Ok(elf_data) = std::fs::read(elf_path) else { + error!("Failed to Read ELF"); + return Ok(()); + }; + let pub_input = match pub_input_path { + Some(path) => Some(std::fs::read(path).expect("Failed to Read Public Inputs")), + None => None, + }; - let provider = Provider::::try_from(rpc_url).expect("Failed to connect to provider"); + let Ok(provider) = Provider::::try_from(rpc_url) else { + error!("Failed to connect to provider"); + return Ok(()); + }; let signer = Arc::new(SignerMiddleware::new(provider.clone(), wallet.clone())); - pay_batcher(wallet.address(), signer.clone()) - .await - .expect("Failed to pay for proof submission"); + pay_batcher(wallet.address(), signer.clone()).await?; let max_fee = U256::from(*max_fee); @@ -100,9 +113,11 @@ pub async fn submit_proof_to_aligned( pub_input, }; - let nonce = get_next_nonce(rpc_url, wallet.address(), BATCHER_PAYMENTS_ADDRESS) - .await - .expect("could not get nonce"); + let Ok(nonce) = get_next_nonce(rpc_url, wallet.address(), BATCHER_PAYMENTS_ADDRESS).await + else { + error!("could not get nonce"); + return Ok(()); + }; let chain = match chain_id { 17000 => Chain::Holesky, diff --git a/src/main.rs b/src/main.rs index 4ab21ad..880da5e 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,6 +1,7 @@ use aligned_sdk::core::types::ProvingSystemId; use clap::{Args, Parser, Subcommand}; use env_logger::Env; +use log::error; use log::info; use std::fs::OpenOptions; use std::io::Write; @@ -53,87 +54,96 @@ async fn main() -> anyhow::Result<()> { info!("Proving with SP1, program in: {}", args.guest_path); // Perform sanitation checks on directory - match utils::validate_directory_structure(&args.guest_path) { - Ok(_) => { - utils::prepare_workspace( - &args.guest_path, - sp1::SP1_SRC_DIR, - sp1::SP1_GUEST_CARGO_TOML, - "./workspaces/sp1/script", - "./workspaces/sp1/script/Cargo.toml", - sp1::SP1_BASE_HOST_CARGO_TOML, - sp1::SP1_BASE_GUEST_CARGO_TOML, - )?; - - let imports = utils::get_imports(sp1::SP1_GUEST_MAIN).unwrap(); - let function_bodies = utils::extract_function_bodies( - sp1::SP1_GUEST_MAIN, - vec![ - "fn main()".to_string(), - "fn input()".to_string(), - "fn output()".to_string(), - ], - ) - .unwrap(); - /* - Adds header to the guest & replace I/O imports - risc0: - - #![no_main] - sp1_zkvm::entrypoint!(main); - */ - utils::prepare_guest( - &imports, - &function_bodies[0], - sp1::SP1_GUEST_PROGRAM_HEADER, - sp1::SP1_IO_READ, - sp1::SP1_IO_COMMIT, - sp1::SP1_GUEST_MAIN, - )?; - sp1::prepare_host(&function_bodies[1], &function_bodies[2], &imports)?; - - if args.precompiles { - let mut toml_file = OpenOptions::new() - .append(true) // Open the file in append mode - .open(sp1::SP1_GUEST_CARGO_TOML)?; + if utils::validate_directory_structure(&args.guest_path) { + utils::prepare_workspace( + &args.guest_path, + sp1::SP1_SRC_DIR, + sp1::SP1_GUEST_CARGO_TOML, + "./workspaces/sp1/script", + "./workspaces/sp1/script/Cargo.toml", + sp1::SP1_BASE_HOST_CARGO_TOML, + sp1::SP1_BASE_GUEST_CARGO_TOML, + )?; + + let Ok(imports) = utils::get_imports(sp1::SP1_GUEST_MAIN) else { + error!("Failed to Extract Imports"); + return Ok(()); + }; + let Ok(function_bodies) = utils::extract_function_bodies( + sp1::SP1_GUEST_MAIN, + vec![ + "fn main()".to_string(), + "fn input()".to_string(), + "fn output()".to_string(), + ], + ) else { + error!("Failed to Extract Function Bodies"); + return Ok(()); + }; + /* + Adds header to the guest & replace I/O imports + risc0: + + #![no_main] + sp1_zkvm::entrypoint!(main); + */ + utils::prepare_guest( + &imports, + &function_bodies[0], + sp1::SP1_GUEST_PROGRAM_HEADER, + sp1::SP1_IO_READ, + sp1::SP1_IO_COMMIT, + sp1::SP1_GUEST_MAIN, + )?; + sp1::prepare_host(&function_bodies[1], &function_bodies[2], &imports)?; + + if args.precompiles { + let mut toml_file = OpenOptions::new() + .append(true) // Open the file in append mode + .open(sp1::SP1_GUEST_CARGO_TOML)?; + + writeln!(toml_file, "{}", sp1::SP1_ACCELERATION_IMPORT)?; + } - writeln!(toml_file, "{}", sp1::SP1_ACCELERATION_IMPORT)?; + if sp1::generate_sp1_proof()?.success() { + info!("SP1 proof and ELF generated"); + + utils::replace(sp1::SP1_GUEST_CARGO_TOML, sp1::SP1_ACCELERATION_IMPORT, "")?; + + // Submit to aligned + if args.submit_to_aligned { + submit_proof_to_aligned( + args.keystore_path.as_ref().unwrap(), + sp1::SP1_PROOF_PATH, + sp1::SP1_ELF_PATH, + None, + &args.rpc_url, + &args.chain_id, + &args.max_fee, + ProvingSystemId::SP1, + ) + .await + .map_err(|e| { + error!("Failed to submit to Aligned"); + return e; + })?; + info!("SP1 proof submitted and verified on Aligned"); } - if sp1::generate_sp1_proof()?.success() { - info!("SP1 proof and ELF generated"); - - utils::replace(sp1::SP1_GUEST_CARGO_TOML, sp1::SP1_ACCELERATION_IMPORT, "") - .unwrap(); - - // Submit to aligned - if args.submit_to_aligned { - submit_proof_to_aligned( - &args.keystore_path.as_ref().unwrap(), - sp1::SP1_PROOF_PATH, - sp1::SP1_ELF_PATH, - None, - &args.rpc_url, - &args.chain_id, - &args.max_fee, - ProvingSystemId::SP1, - ) - .await - .expect("Failed to submit to Aligned"); - info!("SP1 proof submitted and verified on aligned"); - } - - // Clear host & guest - std::fs::copy(sp1::SP1_BASE_HOST_FILE, sp1::SP1_HOST_MAIN).unwrap(); - - return Ok(()); - } - info!("SP1 proof generation failed"); - // Clear host - std::fs::copy(sp1::SP1_BASE_HOST_FILE, sp1::SP1_HOST_MAIN)?; + std::fs::copy(sp1::SP1_BASE_HOST_FILE, sp1::SP1_HOST_MAIN).map_err(|e| { + error!("Failed to clear SP1 Host File"); + return e; + })?; + return Ok(()); } - Err(e) => return Err(e), + info!("SP1 proof generation failed"); + // Clear host + std::fs::copy(sp1::SP1_BASE_HOST_FILE, sp1::SP1_HOST_MAIN)?; + return Ok(()); + } else { + error!("zkRust Directory structure incorrect please consult the README",); + return Ok(()); } } @@ -141,87 +151,100 @@ async fn main() -> anyhow::Result<()> { info!("Proving with Risc0, program in: {}", args.guest_path); // Perform sanitation checks on directory - match utils::validate_directory_structure(&args.guest_path) { - Ok(_) => { - utils::prepare_workspace( - &args.guest_path, - risc0::RISC0_SRC_DIR, - risc0::RISC0_GUEST_CARGO_TOML, - "./workspaces/risc0/host", - "./workspaces/risc0/host/Cargo.toml", - risc0::RISC0_BASE_HOST_CARGO_TOML, - risc0::RISC0_BASE_GUEST_CARGO_TOML, - )?; - - let imports = utils::get_imports(risc0::RISC0_GUEST_MAIN).unwrap(); - let function_bodies = utils::extract_function_bodies( - risc0::RISC0_GUEST_MAIN, - vec![ - "fn main()".to_string(), - "fn input()".to_string(), - "fn output()".to_string(), - ], - ) - .unwrap(); - - /* - Adds header to the guest & replace I/O imports - risc0: - - #![no_main] - risc0_zkvm::guest::entry!(main); - */ - utils::prepare_guest( - &imports, - &function_bodies[0], - risc0::RISC0_GUEST_PROGRAM_HEADER, - risc0::RISC0_IO_READ, - risc0::RISC0_IO_COMMIT, - risc0::RISC0_GUEST_MAIN, - )?; - risc0::prepare_host(&function_bodies[1], &function_bodies[2], &imports)?; - - if args.precompiles { - let mut toml_file = OpenOptions::new() - .append(true) - .open(risc0::RISC0_GUEST_CARGO_TOML)?; - - writeln!(toml_file, "{}", risc0::RISC0_ACCELERATION_IMPORT)?; - } + if utils::validate_directory_structure(&args.guest_path) { + utils::prepare_workspace( + &args.guest_path, + risc0::RISC0_SRC_DIR, + risc0::RISC0_GUEST_CARGO_TOML, + "./workspaces/risc0/host", + "./workspaces/risc0/host/Cargo.toml", + risc0::RISC0_BASE_HOST_CARGO_TOML, + risc0::RISC0_BASE_GUEST_CARGO_TOML, + )?; + + let Ok(imports) = utils::get_imports(risc0::RISC0_GUEST_MAIN) else { + error!("Failed to Extract Imports"); + return Ok(()); + }; + let Ok(function_bodies) = utils::extract_function_bodies( + risc0::RISC0_GUEST_MAIN, + vec![ + "fn main()".to_string(), + "fn input()".to_string(), + "fn output()".to_string(), + ], + ) else { + error!("Failed to Extract Function Bodies"); + return Ok(()); + }; + + /* + Adds header to the guest & replace I/O imports + risc0: + + #![no_main] + risc0_zkvm::guest::entry!(main); + */ + utils::prepare_guest( + &imports, + &function_bodies[0], + risc0::RISC0_GUEST_PROGRAM_HEADER, + risc0::RISC0_IO_READ, + risc0::RISC0_IO_COMMIT, + risc0::RISC0_GUEST_MAIN, + )?; + risc0::prepare_host(&function_bodies[1], &function_bodies[2], &imports)?; + + if args.precompiles { + let mut toml_file = OpenOptions::new() + .append(true) + .open(risc0::RISC0_GUEST_CARGO_TOML)?; + + writeln!(toml_file, "{}", risc0::RISC0_ACCELERATION_IMPORT)?; + } - if risc0::generate_risc0_proof()?.success() { - info!("Risc0 proof and Image ID generated"); - - // Submit to aligned - if args.submit_to_aligned { - submit_proof_to_aligned( - &args.keystore_path.as_ref().unwrap(), - risc0::PROOF_FILE_PATH, - risc0::IMAGE_ID_FILE_PATH, - Some(risc0::PUBLIC_INPUT_FILE_PATH), - &args.rpc_url, - &args.chain_id, - &args.max_fee, - ProvingSystemId::Risc0, - ) - .await - .expect("Failed to submit to Aligned"); - - info!("Risc0 proof submitted and verified on aligned"); - } - - // Clear Host file - std::fs::copy(risc0::RISC0_BASE_HOST_FILE, risc0::RISC0_HOST_MAIN).unwrap(); - - return Ok(()); + if risc0::generate_risc0_proof()?.success() { + info!("Risc0 proof and Image ID generated"); + + // Submit to aligned + if args.submit_to_aligned { + submit_proof_to_aligned( + args.keystore_path.as_ref().unwrap(), + risc0::PROOF_FILE_PATH, + risc0::IMAGE_ID_FILE_PATH, + Some(risc0::PUBLIC_INPUT_FILE_PATH), + &args.rpc_url, + &args.chain_id, + &args.max_fee, + ProvingSystemId::Risc0, + ) + .await + .map_err(|e| { + error!("Failed to submit to Aligned"); + return e; + })?; + + info!("Risc0 proof submitted and verified on Aligned"); } - info!("Risc0 proof generation failed"); // Clear Host file - std::fs::copy(risc0::RISC0_BASE_HOST_FILE, risc0::RISC0_HOST_MAIN).unwrap(); + std::fs::copy(risc0::RISC0_BASE_HOST_FILE, risc0::RISC0_HOST_MAIN).map_err( + |e| { + error!("Failed to Clear Risc0 Host File"); + return e; + }, + )?; + return Ok(()); } - Err(e) => return Err(e), + info!("Risc0 proof generation failed"); + + // Clear Host file + std::fs::copy(risc0::RISC0_BASE_HOST_FILE, risc0::RISC0_HOST_MAIN)?; + return Ok(()); + } else { + error!("zkRust Directory structure incorrect please consult the README",); + return Ok(()); } } } diff --git a/src/risc0.rs b/src/risc0.rs index 8f2d5c2..472fed2 100644 --- a/src/risc0.rs +++ b/src/risc0.rs @@ -46,9 +46,9 @@ pub fn prepare_host(input: &str, output: &str, imports: &str) -> io::Result<()> host_program.push_str(RISC0_BASE_HOST); // Insert input body - let host_program = host_program.replace(utils::HOST_INPUT, &input); + let host_program = host_program.replace(utils::HOST_INPUT, input); // Insert output body - let host_program = host_program.replace(utils::HOST_OUTPUT, &output); + let host_program = host_program.replace(utils::HOST_OUTPUT, output); // Extract Variable names from host and add them to the ExecutorEnv::builder() let values = utils::extract_regex( @@ -75,8 +75,6 @@ pub fn prepare_host(input: &str, output: &str, imports: &str) -> io::Result<()> let mut file = fs::File::create(RISC0_HOST_MAIN)?; file.write_all(host_program.as_bytes())?; - //TODO: remove this - //Delete lines that contain zkRust::write(; -> Delete things from within zk_rust_io -> ); utils::remove_lines(RISC0_HOST_MAIN, "zk_rust_io::write(")?; Ok(()) } diff --git a/src/utils.rs b/src/utils.rs index 249724c..3ff368d 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,4 +1,5 @@ use anyhow::anyhow; +use log::error; use regex::Regex; use std::{ fs::{self, File, OpenOptions}, @@ -131,44 +132,36 @@ pub fn extract_function_bodies(file_path: &str, functions: Vec) -> io::R Ok(extracted_codes) } -// Function that handles the stack and status when parsing the file to extract_function_bodies +// Function that handles the stack and status when parsing the file to extract_function_bodies fn handle_stack(ch: char, stack: &mut Vec<&str>) -> bool { match stack.last() { - Some(&"{") => { - return handle_char(ch, stack) - } - Some(&"/") => { - match ch { - '/' => { - stack.pop(); - stack.push("//comment"); - } - '*' => { - stack.pop(); - stack.push("/*comment*\\"); - } - _ => { - stack.pop(); - handle_char(ch, stack); - } + Some(&"{") => return handle_char(ch, stack), + Some(&"/") => match ch { + '/' => { + stack.pop(); + stack.push("//comment"); } - } - Some(&"//comment") => { - match ch { - '\n' => { - stack.pop(); - } - _ => {} + '*' => { + stack.pop(); + stack.push("/*comment*\\"); } - } - Some(&"/*comment*\\") => { - match ch { - '*' => { - stack.push("*"); - } - _ => {} + _ => { + stack.pop(); + handle_char(ch, stack); } - } + }, + Some(&"//comment") => match ch { + '\n' => { + stack.pop(); + } + _ => {} + }, + Some(&"/*comment*\\") => match ch { + '*' => { + stack.push("*"); + } + _ => {} + }, Some(&"*") => { match ch { '/' => { @@ -180,35 +173,29 @@ fn handle_stack(ch: char, stack: &mut Vec<&str>) -> bool { } } } - Some(&"\"string\"") => { - match ch { - '\"' => { - stack.pop(); - } - _ => {} + Some(&"\"string\"") => match ch { + '\"' => { + stack.pop(); } - } - Some(&"\'c\'") => { - match ch { - '\'' => { - stack.pop(); - } - _ => {} + _ => {} + }, + Some(&"\'c\'") => match ch { + '\'' => { + stack.pop(); } - } + _ => {} + }, _ => {} } - return false; + false } // Function to handle characters when in normal status of the stack -fn handle_char (ch: char, stack: &mut Vec<&str>) -> bool { +fn handle_char(ch: char, stack: &mut Vec<&str>) -> bool { match ch { '/' => { stack.push("/"); } - '{' => { - stack.push("{") - } + '{' => stack.push("{"), '}' => { stack.pop(); if stack.is_empty() { @@ -223,28 +210,33 @@ fn handle_char (ch: char, stack: &mut Vec<&str>) -> bool { } _ => {} } - return false; + false } -fn copy_dependencies(toml_path: &str, guest_toml_path: &str) { - let mut toml = std::fs::File::open(toml_path).unwrap(); +fn copy_dependencies(toml_path: &str, guest_toml_path: &str) -> io::Result<()> { + let mut toml = std::fs::File::open(toml_path)?; let mut content = String::new(); - toml.read_to_string(&mut content).unwrap(); - - if let Some(start_index) = content.find("[dependencies]") { - // Get all text after the search string - let dependencies = &content[start_index + "[dependencies]".len()..]; - // Open the output file in append mode - let mut guest_toml = OpenOptions::new() - .create(true) - .append(true) - .open(guest_toml_path) - .unwrap(); - - // Write the text after the search string to the output file - guest_toml.write_all(dependencies.as_bytes()).unwrap(); - } else { - println!("Failed to copy dependencies in Guest Cargo.toml file, plese check"); + toml.read_to_string(&mut content)?; + + match content.find("[dependencies]") { + Some(start_index) => { + // Get all text after the search string + let dependencies = &content[start_index + "[dependencies]".len()..]; + // Open the output file in append mode + let mut guest_toml = OpenOptions::new() + .create(true) + .append(true) + .open(guest_toml_path) + .unwrap(); + + // Write the text after the search string to the output file + guest_toml.write_all(dependencies.as_bytes()) + } + None => { + return Err(io::Error::other( + "Failed to find `[dependencies]` in project Cargo.toml", + )) + } } } @@ -258,7 +250,7 @@ pub fn prepare_workspace( base_guest_toml_dir: &str, ) -> io::Result<()> { // Create proof_data directory - std::fs::create_dir_all("./proof_data").expect("Failed to create proof_data/"); + std::fs::create_dir_all("./proof_data").unwrap_or(error!("Failed to create proof_data/")); let workspace_guest_src_dir = format!("{}/src/", workspace_guest_dir); let workspace_host_src_dir = format!("{}/src/", workspace_host_dir); if let Err(e) = fs::remove_dir_all(&workspace_guest_src_dir) { @@ -292,8 +284,8 @@ pub fn prepare_workspace( // Select dependencies from the let toml_path = format!("{}/Cargo.toml", guest_path); - copy_dependencies(&toml_path, program_toml_dir); - copy_dependencies(&toml_path, host_toml_dir); + copy_dependencies(&toml_path, program_toml_dir)?; + copy_dependencies(&toml_path, host_toml_dir)?; Ok(()) } @@ -320,7 +312,7 @@ pub fn get_imports(filename: &str) -> io::Result { // if not continue reading till one is found this covers the case where import statements cover multiple lines if !line.contains(';') { // Iterate and continue adding lines to the import while line does not contain a ';' break if it does - while let Some(line) = lines.next() { + for line in lines.by_ref() { let mut line = line?; line.push('\n'); imports.push_str(&line.clone()); @@ -335,13 +327,12 @@ pub fn get_imports(filename: &str) -> io::Result { Ok(imports) } -// TODO: Abstract Regex pub fn extract_regex(file_path: &str, regex: &str) -> io::Result> { let file = fs::File::open(file_path)?; let reader = io::BufReader::new(file); let mut values = Vec::new(); - let regex = Regex::new(®ex).unwrap(); + let regex = Regex::new(regex).unwrap(); for line in reader.lines() { let line = line?; @@ -377,28 +368,31 @@ pub fn remove_lines(file_path: &str, target: &str) -> io::Result<()> { Ok(()) } -pub fn validate_directory_structure(root: &str) -> anyhow::Result<()> { +pub fn validate_directory_structure(root: &str) -> bool { let root = Path::new(root); // Check if Cargo.toml exists in the root directory let cargo_toml = root.join("Cargo.toml"); if !cargo_toml.exists() { - return Err(anyhow!("Cargo.toml not found.")); + error!("Cargo.toml not found."); + return false; } // Check if src/ and lib/ directories exist let src_dir = root.join("src"); if !src_dir.exists() { - return Err(anyhow!("src/ directory not found in root")); + error!("src/ directory not found in root"); + return false; } // Check if src/ contains main.rs file let main_rs = src_dir.join("main.rs"); if !main_rs.exists() { - return Err(anyhow!("main.rs not found in src/ directory in root")); + error!("main.rs not found in src/ directory in root"); + return false; } - Ok(()) + true } pub fn prepare_guest( diff --git a/workspaces/base_files/risc0/cargo_host b/workspaces/base_files/risc0/cargo_host index 049e702..1d838da 100644 --- a/workspaces/base_files/risc0/cargo_host +++ b/workspaces/base_files/risc0/cargo_host @@ -5,6 +5,8 @@ edition = "2021" [dependencies] methods = { path = "../methods" } -risc0-zkvm = { git = "https://github.com/risc0/risc0", tag = "v1.0.1" } +risc0-zkvm = { git = "https://github.com/risc0/risc0", tag = "v1.0.1", default-features = false, features = [ + "prove", +] } tracing-subscriber = { version = "0.3", features = ["env-filter"] } bincode = "1.3.3"