Skip to content

Commit

Permalink
Merge branch 'mariari/nifing' into next
Browse files Browse the repository at this point in the history
  • Loading branch information
mariari committed Aug 28, 2023
2 parents fc9717a + 85fa6ed commit cba8398
Show file tree
Hide file tree
Showing 10 changed files with 244 additions and 1 deletion.
7 changes: 6 additions & 1 deletion taiga_halo2/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,11 @@ version = "0.1.0"
edition = "2021"

[dependencies]
rustler = {version = "0.29.1", optional = true}
rand = "0.8"
lazy_static = "1.4"
blake2b_simd = "1.0"
pasta_curves = {git = "https://github.com/heliaxdev/pasta_curves", branch = "taiga"}
pasta_curves = {git = "https://github.com/heliaxdev/pasta_curves", branch = "taiga", features = ["repr-erlang"]}
ff = "0.13"
group = "0.13"
halo2_gadgets = {git = "https://github.com/heliaxdev/halo2", branch = "taiga", features = ["test-dependencies"]}
Expand All @@ -26,6 +27,10 @@ num-bigint = "0.4"
criterion = "0.5"
proptest = "1.2"

[features]
default = []
nif = ["rustler", "pasta_curves/repr-erlang"]

[[bench]]
name = "action_proof"
harness = false
Expand Down
4 changes: 4 additions & 0 deletions taiga_halo2/src/action.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,14 @@ use ff::PrimeField;
use halo2_proofs::arithmetic::Field;
use pasta_curves::pallas;
use rand::RngCore;
#[cfg(feature = "nif")]
use rustler::NifStruct;
use std::io;

/// The action result used in transaction.
#[derive(Copy, Debug, Clone)]
#[cfg_attr(feature = "nif", derive(NifStruct))]
#[cfg_attr(feature = "nif", module = "Taiga.Action.Instance")]
pub struct ActionInstance {
/// The root of the note commitment Merkle tree.
pub anchor: pallas::Base,
Expand Down
62 changes: 62 additions & 0 deletions taiga_halo2/src/circuit/vp_circuit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,11 @@ use vamp_ir::halo2::synth::{make_constant, Halo2Module, PrimeFieldOps};
use vamp_ir::transform::compile;
use vamp_ir::util::{read_inputs_from_file, Config};

#[cfg(feature = "nif")]
use rustler::types::atom;
#[cfg(feature = "nif")]
use rustler::{Decoder, Encoder, Env, NifResult, Term};

pub type ValidityPredicate = dyn ValidityPredicateVerifyingInfo;

#[derive(Debug, Clone)]
Expand All @@ -64,9 +69,66 @@ pub struct VPVerifyingInfo {
pub public_inputs: ValidityPredicatePublicInputs,
}

#[cfg(feature = "nif")]
rustler::atoms! {verifying_info}

#[cfg(feature = "nif")]
impl Encoder for VPVerifyingInfo {
fn encode<'a>(&self, env: Env<'a>) -> Term<'a> {
(
verifying_info().encode(env),
self.vk.to_bytes().encode(env),
self.proof.encode(env),
self.public_inputs.encode(env),
)
.encode(env)
}
}

#[cfg(feature = "nif")]
impl<'a> Decoder<'a> for VPVerifyingInfo {
fn decode(term: Term<'a>) -> NifResult<Self> {
let (term, vk, proof, public_inputs): (
atom::Atom,
Vec<u8>,
Proof,
ValidityPredicatePublicInputs,
) = term.decode()?;
if term == verifying_info() {
use crate::circuit::vp_examples::TrivialValidityPredicateCircuit;
let params = SETUP_PARAMS_MAP.get(&VP_CIRCUIT_PARAMS_SIZE).unwrap();
let vk = VerifyingKey::from_bytes::<TrivialValidityPredicateCircuit>(&vk, params)
.map_err(|_e| rustler::Error::Atom("failure to decode"))?;
Ok(VPVerifyingInfo {
vk,
proof,
public_inputs,
})
} else {
Err(rustler::Error::BadArg)
}
}
}

#[derive(Clone, Debug)]
pub struct ValidityPredicatePublicInputs([pallas::Base; VP_CIRCUIT_PUBLIC_INPUT_NUM]);

#[cfg(feature = "nif")]
impl Encoder for ValidityPredicatePublicInputs {
fn encode<'a>(&self, env: Env<'a>) -> Term<'a> {
self.0.to_vec().encode(env)
}
}

#[cfg(feature = "nif")]
impl<'a> Decoder<'a> for ValidityPredicatePublicInputs {
fn decode(term: Term<'a>) -> NifResult<Self> {
let val: Vec<pallas::Base> = Decoder::decode(term)?;
val.try_into()
.map_err(|_e| rustler::Error::Atom("failure to decode"))
}
}

impl VPVerifyingInfo {
pub fn verify(&self) -> Result<(), Error> {
let params = SETUP_PARAMS_MAP.get(&VP_CIRCUIT_PARAMS_SIZE).unwrap();
Expand Down
47 changes: 47 additions & 0 deletions taiga_halo2/src/circuit/vp_examples.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ use halo2_proofs::{
use lazy_static::lazy_static;
use pasta_curves::pallas;
use rand::{rngs::OsRng, RngCore};
#[cfg(feature = "nif")]
use rustler::{Decoder, Encoder, Env, NifResult, NifStruct, Term};

pub mod cascade_intent;
mod field_addition;
Expand All @@ -39,6 +41,16 @@ pub struct TrivialValidityPredicateCircuit {
pub output_notes: [Note; NUM_NOTE],
}

// I only exist to allow trivial derivation of the nifstruct
#[derive(Clone, Debug, Default)]
#[cfg_attr(feature = "nif", derive(NifStruct))]
#[cfg_attr(feature = "nif", module = "Taiga.VP.Trivial")]
struct TrivialValidtyPredicateCircuitProxy {
owned_note_pub_id: pallas::Base,
input_notes: Vec<Note>,
output_notes: Vec<Note>,
}

impl TrivialValidityPredicateCircuit {
pub fn new(
owned_note_pub_id: pallas::Base,
Expand All @@ -51,6 +63,41 @@ impl TrivialValidityPredicateCircuit {
output_notes,
}
}

fn to_proxy(&self) -> TrivialValidtyPredicateCircuitProxy {
TrivialValidtyPredicateCircuitProxy {
owned_note_pub_id: self.owned_note_pub_id,
input_notes: self.input_notes.to_vec(),
output_notes: self.output_notes.to_vec(),
}
}
}

impl TrivialValidtyPredicateCircuitProxy {
fn to_concrete(&self) -> Option<TrivialValidityPredicateCircuit> {
let input_notes = self.input_notes.clone().try_into().ok()?;
let output_notes = self.output_notes.clone().try_into().ok()?;
let owned_note_pub_id = self.owned_note_pub_id;
Some(TrivialValidityPredicateCircuit {
owned_note_pub_id,
input_notes,
output_notes,
})
}
}
#[cfg(feature = "nif")]
impl Encoder for TrivialValidityPredicateCircuit {
fn encode<'a>(&self, env: Env<'a>) -> Term<'a> {
self.to_proxy().encode(env)
}
}
#[cfg(feature = "nif")]
impl<'a> Decoder<'a> for TrivialValidityPredicateCircuit {
fn decode(term: Term<'a>) -> NifResult<Self> {
let val: TrivialValidtyPredicateCircuitProxy = Decoder::decode(term)?;
val.to_concrete()
.ok_or(rustler::Error::RaiseAtom("Could not decode proxy"))
}
}

impl ValidityPredicateCircuit for TrivialValidityPredicateCircuit {
Expand Down
7 changes: 7 additions & 0 deletions taiga_halo2/src/note.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,16 @@ use pasta_curves::{
pallas,
};
use rand::RngCore;
#[cfg(feature = "nif")]
use rustler::{NifStruct, NifTuple};
use std::{
hash::{Hash, Hasher},
io,
};

/// A commitment to a note.
#[derive(Copy, Debug, Clone)]
#[cfg_attr(feature = "nif", derive(NifTuple))]
pub struct NoteCommitment(pallas::Point);

impl NoteCommitment {
Expand All @@ -55,6 +58,8 @@ impl Default for NoteCommitment {

/// A note
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
#[cfg_attr(feature = "nif", derive(NifStruct))]
#[cfg_attr(feature = "nif", module = "Taiga.Note")]
pub struct Note {
pub note_type: NoteType,
/// app_data_dynamic is the data defined in application vp and will NOT be used to derive type
Expand All @@ -76,6 +81,8 @@ pub struct Note {

/// The parameters in the NoteType are used to derive note type.
#[derive(Debug, Clone, Copy, Default, Eq)]
#[cfg_attr(feature = "nif", derive(NifStruct))]
#[cfg_attr(feature = "nif", module = "Taiga.NoteType")]
pub struct NoteType {
/// app_vk is the compressed verifying key of VP
pub app_vk: pallas::Base,
Expand Down
4 changes: 4 additions & 0 deletions taiga_halo2/src/nullifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,18 @@ use pasta_curves::group::cofactor::CofactorCurveAffine;
use pasta_curves::group::ff::PrimeField;
use pasta_curves::pallas;
use rand::RngCore;
#[cfg(feature = "nif")]
use rustler::{NifTaggedEnum, NifTuple};
use subtle::CtOption;

/// The unique nullifier.
#[derive(Copy, Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "nif", derive(NifTuple))]
pub struct Nullifier(pallas::Base);

/// The NullifierKeyContainer contains the nullifier_key or the nullifier_key commitment
#[derive(Copy, Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "nif", derive(NifTaggedEnum))]
pub enum NullifierKeyContainer {
// The NullifierKeyContainer::Commitment is the commitment of NullifierKeyContainer::Key `nk_com = Commitment(nk, 0)`
Commitment(pallas::Base),
Expand Down
3 changes: 3 additions & 0 deletions taiga_halo2/src/proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,11 @@ use halo2_proofs::{
};
use pasta_curves::{pallas, vesta};
use rand::RngCore;
#[cfg(feature = "nif")]
use rustler::NifTuple;

#[derive(Clone, Debug, BorshSerialize, BorshDeserialize)]
#[cfg_attr(feature = "nif", derive(NifTuple))]
pub struct Proof(Vec<u8>);

impl Proof {
Expand Down
55 changes: 55 additions & 0 deletions taiga_halo2/src/shielded_ptx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ use borsh::{BorshDeserialize, BorshSerialize};
use halo2_proofs::plonk::Error;
use pasta_curves::pallas;
use rand::RngCore;
#[cfg(feature = "nif")]
use rustler::{Decoder, Encoder, Env, NifResult, NifStruct, Term};

#[derive(Debug, Clone)]
pub struct ShieldedPartialTransaction {
Expand All @@ -23,19 +25,33 @@ pub struct ShieldedPartialTransaction {
}

#[derive(Debug, Clone, BorshSerialize, BorshDeserialize)]
#[cfg_attr(feature = "nif", derive(NifStruct))]
#[cfg_attr(feature = "nif", module = "Taiga.Action.VerifyingInfo")]
pub struct ActionVerifyingInfo {
action_proof: Proof,
action_instance: ActionInstance,
}

#[derive(Debug, Clone, BorshSerialize, BorshDeserialize)]
#[cfg_attr(feature = "nif", derive(NifStruct))]
#[cfg_attr(feature = "nif", module = "Taiga.Note.VerifyingInfo")]
pub struct NoteVPVerifyingInfoSet {
app_vp_verifying_info: VPVerifyingInfo,
app_dynamic_vp_verifying_info: Vec<VPVerifyingInfo>,
// TODO: add verifier proof and according public inputs.
// When the verifier proof is added, we may need to reconsider the structure of `VPVerifyingInfo`
}

// Is easier to derive traits for
#[derive(Debug, Clone)]
#[cfg_attr(feature = "nif", derive(NifStruct))]
#[cfg_attr(feature = "nif", module = "Taiga.Shielded.PTX")]
struct ShieldedPartialTransactionProxy {
actions: Vec<ActionVerifyingInfo>,
inputs: Vec<NoteVPVerifyingInfoSet>,
outputs: Vec<NoteVPVerifyingInfoSet>,
}

impl ShieldedPartialTransaction {
pub fn build<R: RngCore>(
input_info: [InputNoteProvingInfo; NUM_NOTE],
Expand Down Expand Up @@ -163,6 +179,28 @@ impl ShieldedPartialTransaction {
}
Ok(())
}

// Conversion to the generic length proxy
fn to_proxy(&self) -> ShieldedPartialTransactionProxy {
ShieldedPartialTransactionProxy {
actions: self.actions.to_vec(),
inputs: self.inputs.to_vec(),
outputs: self.outputs.to_vec(),
}
}
}

impl ShieldedPartialTransactionProxy {
fn to_concrete(&self) -> Option<ShieldedPartialTransaction> {
let actions = self.actions.clone().try_into().ok()?;
let inputs = self.inputs.clone().try_into().ok()?;
let outputs = self.outputs.clone().try_into().ok()?;
Some(ShieldedPartialTransaction {
actions,
inputs,
outputs,
})
}
}

impl Executable for ShieldedPartialTransaction {
Expand Down Expand Up @@ -238,6 +276,23 @@ impl BorshDeserialize for ShieldedPartialTransaction {
})
}
}

#[cfg(feature = "nif")]
impl Encoder for ShieldedPartialTransaction {
fn encode<'a>(&self, env: Env<'a>) -> Term<'a> {
self.to_proxy().encode(env)
}
}

#[cfg(feature = "nif")]
impl<'a> Decoder<'a> for ShieldedPartialTransaction {
fn decode(term: Term<'a>) -> NifResult<Self> {
let val: ShieldedPartialTransactionProxy = Decoder::decode(term)?;
val.to_concrete()
.ok_or(rustler::Error::RaiseAtom("Could not decode proxy"))
}
}

impl ActionVerifyingInfo {
pub fn create<R: RngCore>(action_info: ActionInfo, mut rng: R) -> Result<Self, Error> {
let (action_instance, circuit) = action_info.build();
Expand Down
Loading

0 comments on commit cba8398

Please sign in to comment.