Skip to content

Commit

Permalink
Merge pull request #2911 from o1-labs/martin/gen-state-json
Browse files Browse the repository at this point in the history
gen-state-json
  • Loading branch information
martyall authored Jan 2, 2025
2 parents 23f91ca + 84c1103 commit c6dc6de
Show file tree
Hide file tree
Showing 12 changed files with 218 additions and 97 deletions.
51 changes: 51 additions & 0 deletions .github/workflows/mips-build.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
name: MIPS Build and Package

on:
workflow_dispatch:

jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
rust_toolchain_version: ["1.74"]

steps:
- uses: actions/checkout@v4
with:
submodules: 'recursive'

- name: Cache apt packages
id: apt-cache
uses: actions/cache@v4
with:
path: |
/var/cache/apt/archives/*.deb
key: ${{ runner.os }}-apt-${{ hashFiles('.github/workflows/mips-build.yml') }}

- name: Install MIPS tools
run: |
sudo apt-get update
sudo apt-get install -y binutils-mips-linux-gnu
- name: Build MIPS programs
run: make build-mips-programs

- name: Use shared Rust toolchain setting up steps
uses: ./.github/actions/toolchain-shared
with:
rust_toolchain_version: ${{ matrix.rust_toolchain_version }}

- name: Test elf_loader against mips programs
run: ./o1vm/test-gen-state-json.sh

- name: Create tar archive
run: |
cd o1vm/resources/programs/mips
tar -czf mips-binaries.tar.gz bin/
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: mips-binaries
path: o1vm/resources/programs/mips/mips-binaries.tar.gz
7 changes: 6 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,13 @@ o1vm/op-program-db*
o1vm/state.json
meta.json
state.json
o1vm/meta_test.json

# Directory for the RISC-V 32bits toolchain
_riscv32-gnu-toolchain
o1vm/resources/programs/riscv32im/bin/*.o
o1vm/resources/programs/mips/bin/*.o

# modified assembly files generated from cannon's open mips tests
o1vm/resources/programs/mips/src
# mips binary files for open mips tests
o1vm/resources/programs/mips/bin
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,8 @@ You can visualize the documentation by opening the file `target/doc/index.html`.
This workflow runs benchmarks when a pull request is labeled with "benchmark." It sets up the Rust and OCaml environments, installs necessary tools, and executes cargo criterion benchmarks on the kimchi crate. The benchmark results are then posted as a comment on the pull request for review.
- [Deploy Specifications & Docs to GitHub Pages](.github/workflows/gh-page.yml).
When CI passes on master, the documentation built from the rust code will be available by this [link](https://o1-labs.github.io/proof-systems/rustdoc) and the book will be available by this [link](https://o1-labs.github.io/proof-systems).
- [MIPS Build and Package](.github/workflows/mips-build.yml)
This workflow runs the assembler and linker on the programs from the OpenMips test suite, and provides a link where you can download the artifacts (recommended if you don't have / can't install the required MIPS tooling). This workflow also runs the o1vm ELF parser on the artifacts to check that our parsing is working. Currently it is run via manual trigger only -- you can find the trigger in the [GitHub actions tab](https://github.com/o1-labs/proof-systems/actions/workflows/mips-build.yml) and the link to the artifacts will appear in logs of the `Upload Artifacts` stage.

## Nix for Dependencies (WIP)

Expand Down
2 changes: 1 addition & 1 deletion o1vm/src/cannon.rs
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ pub struct HostProgram {
pub struct VmConfiguration {
pub input_state_file: String,
pub output_state_file: String,
pub metadata_file: String,
pub metadata_file: Option<String>,
pub proof_at: StepFrequency,
pub stop_at: StepFrequency,
pub snapshot_state_at: StepFrequency,
Expand Down
25 changes: 18 additions & 7 deletions o1vm/src/cli/cannon.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,8 @@ pub struct MipsVmConfigurationArgs {
)]
output: String,

#[arg(
long,
value_name = "FILE",
default_value = "meta.json",
help = "metadata file"
)]
meta: String,
#[arg(long, value_name = "FILE", help = "metadata file")]
meta: Option<String>,

#[arg(
long = "proof-at",
Expand Down Expand Up @@ -104,9 +99,25 @@ pub struct RunArgs {
pub vm_cfg: MipsVmConfigurationArgs,
}

#[derive(Parser, Debug, Clone)]
pub struct GenStateJsonArgs {
#[arg(short = 'i', long, value_name = "FILE", help = "input ELF file")]
pub input: String,
#[arg(
short = 'o',
long,
value_name = "FILE",
default_value = "state.json",
help = "output state.json file"
)]
pub output: String,
}

#[derive(Subcommand, Clone, Debug)]
pub enum Cannon {
Run(RunArgs),
#[command(name = "test-optimism-preimage-read")]
TestPreimageRead(RunArgs),
#[command(name = "gen-state-json")]
GenStateJson(GenStateJsonArgs),
}
66 changes: 44 additions & 22 deletions o1vm/src/elf_loader.rs
Original file line number Diff line number Diff line change
@@ -1,24 +1,19 @@
use crate::cannon::{Page, State, PAGE_SIZE};
use elf::{endian::LittleEndian, section::SectionHeader, ElfBytes};
use elf::{
endian::{BigEndian, EndianParse, LittleEndian},
section::SectionHeader,
ElfBytes,
};
use log::debug;
use std::{collections::HashMap, path::Path};

/// Parse an ELF file and return the parsed data as a structure that is expected
/// by the o1vm RISC-V 32 bits edition.
// FIXME: parametrize by an architecture. We should return a state depending on the
// architecture. In the meantime, we can have parse_riscv32i and parse_mips.
// FIXME: for now, we return a State structure, either for RISC-V 32i or MIPS.
// We should return a structure specifically built for the o1vm, and not tight
// to Cannon. It will be done in a future PR to avoid breaking the current code
// and have a huge diff.
pub fn parse_riscv32(path: &Path) -> Result<State, String> {
debug!("Start parsing the ELF file to load a RISC-V 32i compatible state");
let file_data = std::fs::read(path).expect("Could not read file.");
let slice = file_data.as_slice();
let file = ElfBytes::<LittleEndian>::minimal_parse(slice).expect("Open ELF file failed.");
pub enum Architecture {
Mips,
RiscV32,
}

pub fn make_state<T: EndianParse>(file: ElfBytes<T>) -> Result<State, String> {
// Checking it is RISC-V
assert_eq!(file.ehdr.e_machine, 243);

let (shdrs_opt, strtab_opt) = file
.section_headers_with_strtab()
Expand Down Expand Up @@ -51,9 +46,11 @@ pub fn parse_riscv32(path: &Path) -> Result<State, String> {
.section_data(text_section)
.expect("Failed to read data from .text section");

// address of starting instruction in code section
let code_section_starting_address = text_section.sh_addr as usize;
let code_section_size = text_section.sh_size as usize;
let code_section_end_address = code_section_starting_address + code_section_size;
// address of last instruction in code section
let code_section_end_address = code_section_starting_address + code_section_size - 1;
debug!(
"The executable code starts at address {}, has size {} bytes, and ends at address {}.",
code_section_starting_address, code_section_size, code_section_end_address
Expand All @@ -64,17 +61,22 @@ pub fn parse_riscv32(path: &Path) -> Result<State, String> {
let page_size_usize: usize = PAGE_SIZE.try_into().unwrap();
// Padding to get the right number of pages. We suppose that the memory
// index starts at 0.

// the address that the first page starts on
let start_page_address: usize =
(code_section_starting_address / page_size_usize) * page_size_usize;
let end_page_address =
(code_section_end_address / (page_size_usize - 1)) * page_size_usize + page_size_usize;

// the address that the last page starts on
let end_page_address = (code_section_end_address / page_size_usize) * page_size_usize;

let first_page_index = start_page_address / page_size_usize;
let last_page_index = (end_page_address - 1) / page_size_usize;

let last_page_index = end_page_address / page_size_usize;

let mut data_offset = 0;
(first_page_index..=last_page_index).for_each(|page_index| {
let mut data = vec![0; page_size_usize];
// Special case of only one page
// Special case where all code fits in one page
if first_page_index == last_page_index {
let data_length = code_section_end_address - code_section_starting_address;
let page_offset = code_section_starting_address - start_page_address;
Expand All @@ -83,7 +85,7 @@ pub fn parse_riscv32(path: &Path) -> Result<State, String> {
data_offset += data_length;
} else {
let data_length = if page_index == last_page_index {
code_section_end_address - (page_index * page_size_usize)
code_section_end_address - end_page_address
} else {
page_size_usize
};
Expand All @@ -92,8 +94,9 @@ pub fn parse_riscv32(path: &Path) -> Result<State, String> {
} else {
0
};
data[page_offset..]
data[page_offset..page_offset + data_length]
.copy_from_slice(&text_section_data[data_offset..data_offset + data_length]);

data_offset += data_length;
}
let page = Page {
Expand Down Expand Up @@ -143,3 +146,22 @@ pub fn parse_riscv32(path: &Path) -> Result<State, String> {

Ok(state)
}

pub fn parse_elf(arch: Architecture, path: &Path) -> Result<State, String> {
debug!("Start parsing the ELF file to load a compatible state");
let file_data = std::fs::read(path).expect("Could not read file.");
let slice = file_data.as_slice();
match arch {
Architecture::Mips => {
let file = ElfBytes::<BigEndian>::minimal_parse(slice).expect("Open ELF file failed.");
assert_eq!(file.ehdr.e_machine, 8);
make_state(file)
}
Architecture::RiscV32 => {
let file =
ElfBytes::<LittleEndian>::minimal_parse(slice).expect("Open ELF file failed.");
assert_eq!(file.ehdr.e_machine, 243);
make_state(file)
}
}
}
9 changes: 5 additions & 4 deletions o1vm/src/interpreters/mips/witness.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1143,7 +1143,7 @@ impl<Fp: Field, PreImageOracle: PreImageOracleT> Env<Fp, PreImageOracle> {
pub fn step(
&mut self,
config: &VmConfiguration,
metadata: &Meta,
metadata: &Option<Meta>,
start: &Start,
) -> Instruction {
self.reset_scratch_state();
Expand Down Expand Up @@ -1267,7 +1267,7 @@ impl<Fp: Field, PreImageOracle: PreImageOracleT> Env<Fp, PreImageOracle> {
}
}

fn pp_info(&mut self, at: &StepFrequency, meta: &Meta, start: &Start) {
fn pp_info(&mut self, at: &StepFrequency, meta: &Option<Meta>, start: &Start) {
if self.should_trigger_at(at) {
let elapsed = start.time.elapsed();
// Compute the step number removing the MAX_ACC factor
Expand All @@ -1286,8 +1286,9 @@ impl<Fp: Field, PreImageOracle: PreImageOracleT> Env<Fp, PreImageOracle> {

let mem = self.memory_usage();
let name = meta
.find_address_symbol(pc)
.unwrap_or_else(|| "n/a".to_string());
.as_ref()
.and_then(|m| m.find_address_symbol(pc))
.unwrap_or("n/a".to_string());

info!(
"processing step={} pc={:010x} insn={:010x} ips={:.2} pages={} mem={} name={}",
Expand Down
38 changes: 22 additions & 16 deletions o1vm/src/legacy/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ use kimchi::o1_utils;
use kimchi_msm::{proof::ProofInputs, prover::prove, verifier::verify, witness::Witness};
use log::debug;
use o1vm::{
cannon::{self, Meta, Start, State},
cli,
cannon::{self, Start, State},
cli, elf_loader,
interpreters::{
keccak::{
column::{Steps, N_ZKVM_KECCAK_COLS, N_ZKVM_KECCAK_REL_COLS, N_ZKVM_KECCAK_SEL_COLS},
Expand All @@ -33,7 +33,9 @@ use o1vm::{
test_preimage_read,
};
use poly_commitment::SRS as _;
use std::{cmp::Ordering, collections::HashMap, fs::File, io::BufReader, process::ExitCode};
use std::{
cmp::Ordering, collections::HashMap, fs::File, io::BufReader, path::Path, process::ExitCode,
};
use strum::IntoEnumIterator;

/// Domain size shared by the Keccak evaluations, MIPS evaluation and main
Expand All @@ -50,18 +52,11 @@ pub fn cannon_main(args: cli::cannon::RunArgs) {
// Read the JSON contents of the file as an instance of `State`.
let state: State = serde_json::from_reader(reader).expect("Error reading input state file");

let meta_file = File::open(&configuration.metadata_file).unwrap_or_else(|_| {
panic!(
"Could not open metadata file {}",
&configuration.metadata_file
)
});

let meta: Meta = serde_json::from_reader(BufReader::new(meta_file)).unwrap_or_else(|_| {
panic!(
"Error deserializing metadata file {}",
&configuration.metadata_file
)
let meta = &configuration.metadata_file.as_ref().map(|f| {
let meta_file =
File::open(f).unwrap_or_else(|_| panic!("Could not open metadata file {}", f));
serde_json::from_reader(BufReader::new(meta_file))
.unwrap_or_else(|_| panic!("Error deserializing metadata file {}", f))
});

let mut po = PreImageOracle::create(&configuration.host);
Expand Down Expand Up @@ -124,7 +119,7 @@ pub fn cannon_main(args: cli::cannon::RunArgs) {
}

while !mips_wit_env.halt {
let instr = mips_wit_env.step(&configuration, &meta, &start);
let instr = mips_wit_env.step(&configuration, meta, &start);

if let Some(ref mut keccak_env) = mips_wit_env.keccak_env {
// Run all steps of hash
Expand Down Expand Up @@ -327,6 +322,14 @@ pub fn cannon_main(args: cli::cannon::RunArgs) {
// TODO: Logic
}

fn gen_state_json(arg: cli::cannon::GenStateJsonArgs) -> Result<(), String> {
let path = Path::new(&arg.input);
let state = elf_loader::parse_elf(elf_loader::Architecture::Mips, path)?;
let file = File::create(&arg.output).expect("Error creating output state file");
serde_json::to_writer_pretty(file, &state).expect("Error writing output state file");
Ok(())
}

pub fn main() -> ExitCode {
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init();
let args = cli::Commands::parse();
Expand All @@ -338,6 +341,9 @@ pub fn main() -> ExitCode {
cli::cannon::Cannon::TestPreimageRead(args) => {
test_preimage_read::main(args);
}
cli::cannon::Cannon::GenStateJson(args) => {
gen_state_json(args).expect("Error generating state.json");
}
},
}
ExitCode::SUCCESS
Expand Down
Loading

0 comments on commit c6dc6de

Please sign in to comment.