diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index ee516b30..20dd3e72 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,9 +1,13 @@ name: "Aries-Askar" env: - RUST_VERSION: "1.64.0" + RUST_VERSION: "1.65.0" CROSS_VERSION: "0.2.4" +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + on: push: branches: [main] @@ -105,9 +109,6 @@ jobs: - name: Test askar-crypto no default features run: cargo test --manifest-path ./askar-crypto/Cargo.toml --no-default-features - - name: Test askar-bbs no default features - run: cargo test --manifest-path ./askar-bbs/Cargo.toml --no-default-features - build-release: name: Build library needs: [checks] diff --git a/Cargo.toml b/Cargo.toml index 5e9160ad..a84435b1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,5 @@ [workspace] -members = ["askar-bbs", "askar-crypto", "askar-storage"] +members = ["askar-crypto", "askar-storage"] [package] name = "aries-askar" @@ -12,7 +12,7 @@ readme = "README.md" repository = "https://github.com/hyperledger/aries-askar/" categories = ["cryptography", "database"] keywords = ["hyperledger", "aries", "ssi", "verifiable", "credentials"] -rust-version = "1.60" +rust-version = "1.65" [lib] name = "aries_askar" diff --git a/askar-bbs/Cargo.toml b/askar-bbs/Cargo.toml deleted file mode 100644 index 99f9fff5..00000000 --- a/askar-bbs/Cargo.toml +++ /dev/null @@ -1,57 +0,0 @@ -[package] -name = "askar-bbs" -version = "0.1.0" -authors = ["Hyperledger Aries Contributors "] -edition = "2018" -description = "Hyperledger Aries Askar BBS+ Signatures" -license = "MIT OR Apache-2.0" -readme = "README.md" -repository = "https://github.com/hyperledger/aries-askar/" -categories = ["cryptography", "no-std"] -keywords = ["hyperledger", "aries", "credentials", "bbs", "signatures"] - -[package.metadata.docs.rs] -features = ["argon2", "std"] -rustdoc-args = ["--cfg", "docsrs"] - -[features] -default = ["alloc", "getrandom"] -alloc = ["askar-crypto/alloc"] -std = ["alloc", "askar-crypto/std", "getrandom", "rand/std", "sha3/std"] -getrandom = ["askar-crypto/getrandom", "rand/getrandom"] - -[dependencies] -askar-crypto = { version = "0.2.1", default-features = false, features = ["bls"], path = "../askar-crypto" } -bls12_381 = { version = "0.6", default-features = false, features = ["experimental", "groups", "pairings"] } -group = { version = "0.11", default-features = false } -heapless = "0.7" -rand = { version = "0.8", default-features = false } -sha3 = { version = "0.9", default-features = false } -subtle = "2.4" - -[dev-dependencies] -criterion = "0.3" -# override transitive dependency from criterion to support rust versions older than 1.60 -csv = "=1.1" -hex-literal = "0.4" -serde-json-core = { version = "0.4", default-features = false, features = ["std"] } - - -[[bench]] -name = "blind_sign" -harness = false -required-features = ["getrandom"] - -[[bench]] -name = "generators" -harness = false - -[[bench]] -name = "proof" -harness = false -required-features = ["getrandom"] - -[[bench]] -name = "signature" -harness = false -required-features = ["getrandom"] diff --git a/askar-bbs/README.md b/askar-bbs/README.md deleted file mode 100644 index fd35f9ab..00000000 --- a/askar-bbs/README.md +++ /dev/null @@ -1,82 +0,0 @@ -# askar-bbs - -[![Rust Crate](https://img.shields.io/crates/v/askar-bbs.svg)](https://crates.io/crates/askar-bbs) -[![Rust Documentation](https://docs.rs/askar-bbs/badge.svg)](https://docs.rs/askar-bbs) - -The `askar-bbs` crate provides support for BBS+ signature generation and verification used by [`aries-askar`](https://github.com/hyperledger/aries-askar). - -The implementation will be targeting the 2022 standard which is in progress. Please **DO NOT** use this crate and expect it to be compatible with the released version just yet. - -## no-std - -This crate supports the optional `alloc` feature, gating types and operations that depend on a global allocator. The `std` feature depends on `alloc`, and adds support for `std::error::Error`. - -## Quick Start - -### Keypairs - -Signing and verification keys are managed as `askar-crypto` BLS keypairs. Keys may be generated randomly or from a seed value, or loaded from an binary encoded key or JWK. - -```rust -use askar_crypto::{ - alg::bls::{BlsKeyPair, G2}, repr::KeyGen, -}; - -let keypair = BlsKeyPair::::random().unwrap(); -``` - -### Signing - -```rust -use askar_bbs::{ - io::FixedLengthBytes, DynGenerators, Message, Signature, SignatureBuilder, -}; - -let messages = [Message::hash("message 1"), Message::hash("message 2")]; -let generators = DynGenerators::new(&keypair, messages.len()); -let signature = SignatureBuilder::sign(&generators, &keypair, messages.iter().copied()).unwrap(); -let signature_bytes = signature.to_bytes(); -``` - -### Verifying a Signature - -```rust -let messages = [Message::hash("message 1"), Message::hash("message 2")]; -let generators = DynGenerators::new(&keypair, messages.len()); -let signature = Signature::from_bytes(&signature_bytes).unwrap(); -signature.verify(&generators, messages.iter().copied()).unwrap(); -``` - -### Generating a Signature Proof of Knowledge - -This zero-knowledge proof protocol is used by a prover to perform a selective reveal of the signed messages to a verifier. - -```rust -let nonce = Nonce::random(); // provided by the verifier -let messages = [Message::hash("message 1"), Message::hash("message 2")]; -let generators = DynGenerators::new(&keypair, messages.len()); -let signature = Signature::from_bytes(&signature_bytes).unwrap(); -let mut prover = signature.prover(&generators); -prover.push_hidden_message(messages[0]).unwrap(); -prover.push_message(messages[1]).unwrap(); -let (challenge, proof) = prover.complete(nonce).unwrap(); -``` - -### Verifying a Signature Proof of Knowledge - -```rust -let mut verifier = proof.verifier(&generators, challenge).unwrap(); -verifier.push_hidden_count(1).unwrap(); -verifier.push_revealed(messages[1]).unwrap(); -let challenge_v = verifier.complete(nonce).unwrap(); -verifier.verify(challenge_v).unwrap(); -``` - -## License - -Licensed under either of - -- Apache License, Version 2.0 ([LICENSE-APACHE](https://github.com/hyperledger/aries-askar/blob/main/LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) -- MIT license ([LICENSE-MIT](https://github.com/hyperledger/aries-askar/blob/main/LICENSE-MIT) or http://opensource.org/licenses/MIT) - -at your option. diff --git a/askar-bbs/benches/blind_sign.rs b/askar-bbs/benches/blind_sign.rs deleted file mode 100644 index 26a1c12d..00000000 --- a/askar-bbs/benches/blind_sign.rs +++ /dev/null @@ -1,80 +0,0 @@ -#[macro_use] -extern crate criterion; - -use askar_bbs::{CommitmentBuilder, DynGenerators, Message, Nonce, SignatureBuilder}; -use askar_crypto::{ - alg::bls::{BlsKeyPair, G2}, - repr::KeyGen, -}; -use rand::{rngs::OsRng, RngCore}; - -use criterion::Criterion; - -fn criterion_benchmark(c: &mut Criterion) { - let keypair = BlsKeyPair::::generate(OsRng).unwrap(); - - for message_count in vec![5, 25, 125] { - let gens = DynGenerators::new(&keypair, message_count) - .to_vec() - .unwrap(); - let commit_msg = Message::from(OsRng.next_u64()); - let nonce = Nonce::random(); - - if message_count == 5 { - c.bench_function("create commitment", |b| { - b.iter(|| { - let mut committer = CommitmentBuilder::new(&gens); - committer.add_message(0, commit_msg).unwrap(); - let (_challenge, _blind, _commit, _proof) = committer.complete(nonce).unwrap(); - }); - }); - } - - let mut committer = CommitmentBuilder::new(&gens); - committer.add_message(0, commit_msg).unwrap(); - let (challenge, blinding, commitment, proof) = committer.complete(nonce).unwrap(); - - if message_count == 5 { - c.bench_function(&format!("verify commitment"), |b| { - b.iter(|| { - proof - .verify(&gens, commitment, [0].iter().copied(), challenge, nonce) - .unwrap() - }); - }); - } - - let messages: Vec = (1..message_count) - .map(|_| Message::from(OsRng.next_u64())) - .collect(); - c.bench_function(&format!("blind sign for {} messages", message_count), |b| { - b.iter(|| { - let mut signer = SignatureBuilder::from_commitment(&gens, &keypair, commitment); - signer.push_committed_count(1).unwrap(); - signer.append_messages(messages.iter().copied()).unwrap(); - signer.to_signature().unwrap() - }); - }); - - let mut signer = SignatureBuilder::from_commitment(&gens, &keypair, commitment); - signer.push_committed_count(1).unwrap(); - signer.append_messages(messages.iter().copied()).unwrap(); - let sig = signer.to_signature().unwrap(); - - c.bench_function( - &format!("unblind and verify for {} messages", message_count), - |b| { - b.iter(|| { - let sig = sig.unblind(blinding); - let mut verifier = sig.verifier(&gens); - verifier.push_message(commit_msg).unwrap(); - verifier.append_messages(messages.iter().copied()).unwrap(); - verifier.verify().unwrap() - }); - }, - ); - } -} - -criterion_group!(benches, criterion_benchmark); -criterion_main!(benches); diff --git a/askar-bbs/benches/generators.rs b/askar-bbs/benches/generators.rs deleted file mode 100644 index d9dfc6db..00000000 --- a/askar-bbs/benches/generators.rs +++ /dev/null @@ -1,31 +0,0 @@ -#[macro_use] -extern crate criterion; - -use askar_bbs::{DynGenerators, Generators}; -use askar_crypto::{ - alg::bls::{BlsKeyPair, G2}, - repr::KeySecretBytes, -}; -use hex_literal::hex; - -use criterion::{black_box, Criterion}; - -fn criterion_benchmark(c: &mut Criterion) { - let keypair = BlsKeyPair::::from_secret_bytes(&hex!( - "0011223344556677889900112233445566778899001122334455667788990011" - )) - .unwrap(); - - for message_count in vec![5, 25, 125] { - c.bench_function(&format!("keygen for {} messages", message_count), |b| { - b.iter(|| { - for gen in DynGenerators::new(&keypair, message_count).iter() { - black_box(gen); - } - }); - }); - } -} - -criterion_group!(benches, criterion_benchmark); -criterion_main!(benches); diff --git a/askar-bbs/benches/proof.rs b/askar-bbs/benches/proof.rs deleted file mode 100644 index f64c37e4..00000000 --- a/askar-bbs/benches/proof.rs +++ /dev/null @@ -1,82 +0,0 @@ -#[macro_use] -extern crate criterion; - -use askar_bbs::{CreateChallenge, DynGenerators, Message, Nonce, SignatureBuilder}; -use askar_crypto::{ - alg::bls::{BlsKeyPair, G2}, - repr::KeyGen, -}; -use rand::{rngs::OsRng, RngCore}; - -use criterion::Criterion; - -fn criterion_benchmark(c: &mut Criterion) { - let keypair = BlsKeyPair::::generate(OsRng).unwrap(); - - for message_count in vec![5, 25, 125] { - let gens = DynGenerators::new(&keypair, message_count) - .to_vec() - .unwrap(); - - let messages: Vec = (0..message_count) - .map(|_| Message::from(OsRng.next_u64())) - .collect(); - - let mut signer = SignatureBuilder::new(&gens, &keypair); - signer.append_messages(messages.iter().copied()).unwrap(); - let sig = signer.to_signature().unwrap(); - let nonce = Nonce::random(); - - c.bench_function( - &format!("create signature pok for {} messages", message_count), - |b| { - b.iter(|| { - let mut prover = sig.prover(&gens); - let hidden_count = message_count / 2; - for (index, msg) in messages.iter().enumerate() { - if index < hidden_count { - prover.push_hidden_message(*msg).unwrap(); - } else { - prover.push_message(*msg).unwrap(); - } - } - let ctx = prover.prepare().unwrap(); - let challenge = ctx.create_challenge(nonce, None).unwrap(); - let _proof = ctx.complete(challenge).unwrap(); - }); - }, - ); - - let mut prover = sig.prover(&gens); - let hidden_count = message_count / 2; - for (index, msg) in messages.iter().enumerate() { - if index < hidden_count { - prover.push_hidden_message(*msg).unwrap(); - } else { - prover.push_message(*msg).unwrap(); - } - } - let ctx = prover.prepare().unwrap(); - let challenge = ctx.create_challenge(nonce, None).unwrap(); - let proof = ctx.complete(challenge).unwrap(); - c.bench_function( - &format!("verify signature pok for {} messages", message_count), - |b| { - b.iter(|| { - let mut verifier = proof.verifier(&gens, challenge).unwrap(); - verifier.push_hidden_count(hidden_count).unwrap(); - for index in hidden_count..messages.len() { - verifier.push_revealed(messages[index]).unwrap(); - } - let challenge_v = verifier.create_challenge(nonce, None).unwrap(); - verifier - .verify(challenge_v) - .expect("Error verifying signature PoK") - }); - }, - ); - } -} - -criterion_group!(benches, criterion_benchmark); -criterion_main!(benches); diff --git a/askar-bbs/benches/signature.rs b/askar-bbs/benches/signature.rs deleted file mode 100644 index ea7d2ffc..00000000 --- a/askar-bbs/benches/signature.rs +++ /dev/null @@ -1,42 +0,0 @@ -#[macro_use] -extern crate criterion; - -use askar_bbs::{DynGenerators, Message, SignatureBuilder}; -use askar_crypto::{ - alg::bls::{BlsKeyPair, G2}, - repr::KeyGen, -}; -use rand::{rngs::OsRng, RngCore}; - -use criterion::Criterion; - -fn criterion_benchmark(c: &mut Criterion) { - let keypair = BlsKeyPair::::generate(OsRng).unwrap(); - - for message_count in vec![5, 25, 125] { - let gens = DynGenerators::new(&keypair, message_count) - .to_vec() - .unwrap(); - let messages: Vec = (0..message_count) - .map(|_| Message::from(OsRng.next_u64())) - .collect(); - - c.bench_function(&format!("sign for {} messages", message_count), |b| { - b.iter(|| { - let mut signer = SignatureBuilder::new(&gens, &keypair); - signer.append_messages(messages.iter().copied()).unwrap(); - signer.to_signature().unwrap(); - }); - }); - - let mut signer = SignatureBuilder::new(&gens, &keypair); - signer.append_messages(messages.iter().copied()).unwrap(); - let sig = signer.to_signature().unwrap(); - c.bench_function(&format!("verify for {} messages", message_count), |b| { - b.iter(|| sig.verify(&gens, messages.iter().copied()).unwrap()); - }); - } -} - -criterion_group!(benches, criterion_benchmark); -criterion_main!(benches); diff --git a/askar-bbs/src/challenge.rs b/askar-bbs/src/challenge.rs deleted file mode 100644 index 27b2435d..00000000 --- a/askar-bbs/src/challenge.rs +++ /dev/null @@ -1,35 +0,0 @@ -use askar_crypto::buffer::WriteBuffer; - -use crate::{hash::HashScalar, util::Nonce, Error}; - -impl_scalar_type!(ProofChallenge, "Fiat-Shamir proof challenge value"); - -impl ProofChallenge { - /// Create a new proof challenge value from a set of prepared proofs - pub fn create( - proofs: &[&dyn CreateChallenge], - nonce: Nonce, - dst: Option<&[u8]>, - ) -> Result { - let mut c_hash = HashScalar::new(dst); - for proof in proofs { - proof.write_challenge_bytes(&mut c_hash)?; - } - c_hash.update(&nonce.0.to_bytes()); - Ok(ProofChallenge(c_hash.finalize().next())) - } -} - -/// Support for outputting bytes for use in proof challenge generation -pub trait CreateChallenge { - /// Create a new independent proof challenge - fn create_challenge(&self, nonce: Nonce, dst: Option<&[u8]>) -> Result { - let mut c_hash = HashScalar::new(dst); - self.write_challenge_bytes(&mut c_hash)?; - c_hash.update(&nonce.0.to_bytes()); - Ok(ProofChallenge(c_hash.finalize().next())) - } - - /// Write the challenge bytes to a target - fn write_challenge_bytes(&self, writer: &mut dyn WriteBuffer) -> Result<(), Error>; -} diff --git a/askar-bbs/src/collect.rs b/askar-bbs/src/collect.rs deleted file mode 100644 index b403e12b..00000000 --- a/askar-bbs/src/collect.rs +++ /dev/null @@ -1,254 +0,0 @@ -//! Support for heapless and heap-allocated sequences - -use core::{ - fmt::{self, Debug, Formatter}, - ops::{Deref, DerefMut}, - slice::Iter, -}; - -use crate::Error; - -// NOTE: in future, it should be possible to simplify this with GATs - -#[cfg(feature = "alloc")] -/// The default generic sequence type -pub type DefaultSeq = Heap; -#[cfg(not(feature = "alloc"))] -/// The default generic sequence type -pub type DefaultSeq = Stack; - -/// A wrapper type for a generic backing sequence -pub struct Vec -where - B: Seq, -{ - inner: B::Vec, -} - -impl Vec -where - B: Seq, -{ - #[inline] - /// Create a new, empty sequence - pub fn new() -> Self { - Self { inner: B::new() } - } - - #[inline] - /// Create a new sequence with a minimum capacity - pub fn with_capacity(cap: usize) -> Self { - Self { - inner: B::with_capacity(cap), - } - } - - #[inline] - /// Push a new value at the end of the sequence, failing if the - /// maximum length has been exceeded - pub fn push(&mut self, item: Item) -> Result<(), Error> { - B::push(&mut self.inner, item) - } - - #[inline] - /// Get the current length of the sequence - pub fn len(&self) -> usize { - B::len(&self.inner) - } - - /// Get an iterator over the sequence values - pub fn iter(&self) -> Iter<'_, Item> { - B::as_slice(&self.inner).into_iter() - } - - /// Create a new sequence from an iterator of values - pub fn from_iter(iter: impl IntoIterator) -> Result { - let iter = iter.into_iter(); - let mut slf = Self::with_capacity(iter.size_hint().0); - for item in iter { - slf.push(item)?; - } - Ok(slf) - } -} - -impl Clone for Vec -where - B: Seq, - Item: Clone, -{ - fn clone(&self) -> Self { - Self { - inner: B::clone(&self.inner), - } - } -} - -impl Debug for Vec -where - B: Seq, - Item: Debug, -{ - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - f.debug_list().entries(B::as_slice(&self.inner)).finish() - } -} - -impl Deref for Vec -where - B: Seq, -{ - type Target = [Item]; - - fn deref(&self) -> &Self::Target { - B::as_slice(&self.inner) - } -} - -impl DerefMut for Vec -where - B: Seq, -{ - fn deref_mut(&mut self) -> &mut Self::Target { - B::as_slice_mut(&mut self.inner) - } -} - -impl PartialEq for Vec -where - B: Seq, - Item: PartialEq, -{ - fn eq(&self, other: &Self) -> bool { - &*self == &*other - } -} - -impl Eq for Vec -where - B: Seq, - Item: Eq, -{ -} - -/// A generic trait for a backing sequence type -pub trait Seq: Debug { - /// The backing type - type Vec; - - /// Create a new instance of the backing type - fn new() -> Self::Vec; - - #[inline] - /// Create a new instance of the backing type with a minimum capacity - fn with_capacity(_cap: usize) -> Self::Vec { - Self::new() - } - - /// Push a new value onto the sequence - fn push(vec: &mut Self::Vec, item: Item) -> Result<(), Error>; - - /// Access the contained values as a slice - fn as_slice(vec: &Self::Vec) -> &[Item]; - - /// Access the contained values as a mutable slice - fn as_slice_mut(vec: &mut Self::Vec) -> &mut [Item]; - - /// Get the current length of the sequence - fn len(vec: &Self::Vec) -> usize; - - /// Clone the backing type - fn clone(vec: &Self::Vec) -> Self::Vec - where - Item: Clone; -} - -#[cfg(feature = "alloc")] -#[derive(Debug)] -/// A heap-based (std::vec::Vec) sequence type -pub struct Heap; - -#[cfg(feature = "alloc")] -impl Seq for Heap { - type Vec = alloc::vec::Vec; - - #[inline] - fn new() -> Self::Vec { - alloc::vec::Vec::new() - } - - #[inline] - fn with_capacity(cap: usize) -> Self::Vec { - alloc::vec::Vec::with_capacity(cap) - } - - #[inline] - fn push(vec: &mut Self::Vec, item: Item) -> Result<(), Error> { - vec.push(item); - Ok(()) - } - - #[inline] - fn as_slice(vec: &Self::Vec) -> &[Item] { - vec.as_ref() - } - - #[inline] - fn as_slice_mut(vec: &mut Self::Vec) -> &mut [Item] { - &mut vec[..] - } - - #[inline] - fn len(vec: &Self::Vec) -> usize { - vec.len() - } - - #[inline] - fn clone(vec: &Self::Vec) -> Self::Vec - where - Item: Clone, - { - vec.clone() - } -} - -#[derive(Debug)] -/// A stack-based (heapless) sequence type -pub struct Stack; - -impl Seq for Stack { - type Vec = heapless::Vec; - - #[inline] - fn new() -> Self::Vec { - heapless::Vec::new() - } - - fn push(vec: &mut Self::Vec, item: Item) -> Result<(), Error> { - vec.push(item) - .map_err(|_| err_msg!(Usage, "Exceeded storage capacity")) - } - - #[inline] - fn as_slice(vec: &Self::Vec) -> &[Item] { - vec.as_ref() - } - - #[inline] - fn as_slice_mut(vec: &mut Self::Vec) -> &mut [Item] { - &mut vec[..] - } - - #[inline] - fn len(vec: &Self::Vec) -> usize { - vec.len() - } - - #[inline] - fn clone(vec: &Self::Vec) -> Self::Vec - where - Item: Clone, - { - vec.clone() - } -} diff --git a/askar-bbs/src/commitment.rs b/askar-bbs/src/commitment.rs deleted file mode 100644 index b3cbff0c..00000000 --- a/askar-bbs/src/commitment.rs +++ /dev/null @@ -1,409 +0,0 @@ -#[cfg(feature = "alloc")] -use alloc::vec::Vec as StdVec; - -use askar_crypto::buffer::WriteBuffer; -use bls12_381::{G1Affine, G1Projective, Scalar}; -use group::Curve; -use rand::{CryptoRng, Rng}; - -#[cfg(feature = "getrandom")] -use askar_crypto::random::default_rng; - -use crate::{ - challenge::{CreateChallenge, ProofChallenge}, - collect::{DefaultSeq, Seq, Vec}, - generators::Generators, - io::{Cursor, FixedLengthBytes}, - signature::Message, - util::{random_scalar, AccumG1, Nonce}, - Error, -}; - -/// A standard domain-specific input for use in blinded message commitment proofs -pub const COMMITMENT_PROOF_DST_G1: &[u8] = b"BLS12381G1_BBS+_SIGNATURES_COMMITMENT_POK:1_0_0"; - -const G1_COMPRESSED_SIZE: usize = 48; - -/// A nonce value used as a blinding -pub type Blinding = Nonce; - -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -/// A commitment to a set of blinded messages for signing -pub struct Commitment(pub(crate) G1Affine); - -impl FixedLengthBytes for Commitment { - const LENGTH: usize = G1_COMPRESSED_SIZE; - - type Buffer = [u8; G1_COMPRESSED_SIZE]; - - fn from_bytes(buf: &Self::Buffer) -> Result { - if let Some(pt) = G1Affine::from_compressed(buf).into() { - Ok(Self(pt)) - } else { - Err(err_msg!(Invalid)) - } - } - - fn with_bytes(&self, f: impl FnOnce(&Self::Buffer) -> R) -> R { - f(&self.0.to_compressed()) - } -} - -impl From for Commitment { - fn from(pt: G1Affine) -> Self { - Self(pt) - } -} - -#[derive(Clone, Debug)] -/// A builder used to generate and prove a commitment to a set of messages -pub struct CommitmentBuilder<'g, G, S> -where - G: Generators, - S: Seq<(Message, Blinding)>, -{ - accum_commitment: AccumG1, - accum_c1: AccumG1, - messages: Vec<(Message, Blinding), S>, - generators: &'g G, -} - -impl<'g, G> CommitmentBuilder<'g, G, DefaultSeq<32>> -where - G: Generators, -{ - /// Create a new commitment builder - pub fn new(generators: &'g G) -> Self { - Self::new_sized(generators) - } -} - -impl<'g, G, S> CommitmentBuilder<'g, G, S> -where - G: Generators, - S: Seq<(Message, Blinding)>, -{ - /// Create a new commitment builder with a specific backing sequence type - pub fn new_sized(generators: &'g G) -> Self { - Self { - accum_commitment: AccumG1::zero(), - accum_c1: AccumG1::zero(), - messages: Vec::with_capacity(16), - generators, - } - } -} - -impl CommitmentBuilder<'_, G, S> -where - G: Generators, - S: Seq<(Message, Blinding)>, -{ - #[cfg(feature = "getrandom")] - /// Add a hidden message with a random blinding value - pub fn add_message(&mut self, index: usize, message: Message) -> Result<(), Error> { - self.add_message_with(index, message, Blinding::random()) - } - - /// Add a hidden message with a pre-selected blinding value - pub fn add_message_with( - &mut self, - index: usize, - message: Message, - blinding: Blinding, - ) -> Result<(), Error> { - if index > self.generators.message_count() { - Err(err_msg!(Usage, "Message index exceeds generator count")) - } else { - self.messages.push((message, blinding))?; - let base = self.generators.message(index); - self.accum_commitment.push(base, message.0); - self.accum_c1.push(base, blinding.0); - Ok(()) - } - } -} - -impl CommitmentBuilder<'_, G, S> -where - G: Generators, - S: Seq<(Message, Blinding)> + Seq, -{ - #[cfg(feature = "getrandom")] - /// Prepare the commitment proof context - pub fn prepare(self) -> Result, Error> { - self.prepare_with_rng(default_rng()) - } - - /// Prepare the commitment proof context with a specific RNG - pub fn prepare_with_rng( - mut self, - mut rng: impl CryptoRng + Rng, - ) -> Result, Error> { - if self.messages.is_empty() { - return Err(err_msg!(Usage, "No messages provided for commitment")); - } - - let h0 = self.generators.blinding(); - let s_prime = random_scalar(&mut rng); // s' - let s_blind = random_scalar(&mut rng); // s~ - self.accum_commitment.push(h0, s_prime); - self.accum_c1.push(h0, s_blind); - - let mut affine = [G1Affine::identity(); 2]; - G1Projective::batch_normalize( - &[self.accum_commitment.sum(), self.accum_c1.sum()], - &mut affine[..], - ); - Ok(CommitmentProofContext { - commitment: affine[0].into(), - c1: affine[1], - messages: self.messages, - s_prime, - s_blind, - }) - } - - #[cfg(feature = "getrandom")] - /// Complete an independent commitment proof of knowledge - pub fn complete( - self, - nonce: Nonce, - ) -> Result<(ProofChallenge, Blinding, Commitment, CommitmentProof), Error> { - self.complete_with_rng(default_rng(), nonce) - } - - /// Complete an independent commitment proof with a specific RNG - pub fn complete_with_rng( - self, - rng: impl CryptoRng + Rng, - nonce: Nonce, - ) -> Result<(ProofChallenge, Blinding, Commitment, CommitmentProof), Error> { - let context = self.prepare_with_rng(rng)?; - let challenge = context.create_challenge(nonce, Some(COMMITMENT_PROOF_DST_G1))?; - let (blinding, commitment, proof) = context.complete(challenge)?; - Ok((challenge, blinding, commitment, proof)) - } -} - -#[derive(Clone, Debug)] -/// A prepared context for generating a commitment proof of knowledge -pub struct CommitmentProofContext -where - S: Seq<(Message, Blinding)>, -{ - commitment: Commitment, - c1: G1Affine, - messages: Vec<(Message, Blinding), S>, - s_prime: Scalar, - s_blind: Scalar, -} - -impl CommitmentProofContext -where - S: Seq<(Message, Blinding)>, - S: Seq, -{ - /// Complete the commitment proof of knowledge given a Fiat-Shamir challenge value - pub fn complete( - &self, - challenge: ProofChallenge, - ) -> Result<(Blinding, Commitment, CommitmentProof), Error> { - let c = challenge.0; - let s_resp = self.s_blind + c * self.s_prime; - let mut m_resp = Vec::with_capacity(self.messages.len()); - for (msg, m_rand) in self.messages.iter().copied() { - m_resp.push(m_rand.0 + c * msg.0)?; - } - Ok(( - self.s_prime.into(), - self.commitment, - CommitmentProof { s_resp, m_resp }, - )) - } -} - -impl CreateChallenge for CommitmentProofContext -where - S: Seq<(Message, Blinding)>, -{ - fn write_challenge_bytes(&self, writer: &mut dyn WriteBuffer) -> Result<(), Error> { - writer.buffer_write(&self.commitment.0.to_uncompressed())?; - writer.buffer_write(&self.c1.to_uncompressed())?; - Ok(()) - } -} - -#[derive(Clone, Debug)] -/// A proof of a commitment to hidden messages for signing -pub struct CommitmentProof -where - S: Seq, -{ - pub(crate) s_resp: Scalar, - pub(crate) m_resp: Vec, -} - -impl CommitmentProof> { - /// Convert a signature proof of knowledge from a byte slice - pub fn from_bytes(buf: &[u8]) -> Result { - Self::from_bytes_sized(buf) - } -} - -impl CommitmentProof -where - S: Seq, -{ - /// Verify an independent commitment proof - pub fn verify( - &self, - generators: &G, - commitment: Commitment, - committed_indices: I, - challenge: ProofChallenge, - nonce: Nonce, - ) -> Result<(), Error> - where - G: Generators, - I: IntoIterator, - { - let verifier = self.verifier(generators, commitment, committed_indices, challenge)?; - let challenge_v = verifier.create_challenge(nonce, Some(COMMITMENT_PROOF_DST_G1))?; - verifier.verify(challenge_v) - } - - /// Create a verifier for the commitment proof - pub fn verifier( - &self, - generators: &G, - commitment: Commitment, - committed_indices: I, - challenge: ProofChallenge, - ) -> Result - where - G: Generators, - I: IntoIterator, - { - CommitmentProofVerifier::new( - generators, - commitment, - self, - committed_indices.into_iter(), - challenge, - ) - } - - /// Write the commitment proof of knowledge to an output buffer - pub fn write_bytes(&self, buf: &mut dyn WriteBuffer) -> Result<(), Error> { - buf.buffer_write(&((self.m_resp.len() + 1) as u32).to_be_bytes())?; - self.s_resp.write_bytes(&mut *buf)?; - for resp in self.m_resp.iter() { - resp.write_bytes(&mut *buf)?; - } - Ok(()) - } - - #[cfg(feature = "alloc")] - /// Output the signature proof of knowledge as a byte vec - pub fn to_bytes(&self) -> Result, Error> { - let mut out = StdVec::with_capacity(4 + (1 + self.m_resp.len()) * 32); - self.write_bytes(&mut out)?; - Ok(out) - } - - /// Convert a signature proof of knowledge from a byte slice - pub fn from_bytes_sized(buf: &[u8]) -> Result { - let mut cur = Cursor::new(buf); - let mut m_len = u32::from_be_bytes(*cur.read_fixed()?) as usize; - if m_len < 2 { - return Err(err_msg!(Invalid, "Invalid proof response count")); - } - let s_resp = Scalar::read_bytes(&mut cur)?; - m_len -= 1; - let mut m_resp = Vec::with_capacity(m_len); - for _ in 0..m_len { - m_resp.push(Scalar::read_bytes(&mut cur)?)?; - } - Ok(Self { s_resp, m_resp }) - } - - /// Get the response value from the post-challenge phase of the sigma protocol - /// for a given message index - pub fn get_response(&self, index: usize) -> Result { - self.m_resp - .get(index) - .map(Blinding::from) - .ok_or_else(|| err_msg!(Usage, "Invalid index for committed message")) - } -} - -impl PartialEq> for CommitmentProof -where - S: Seq, - T: Seq, -{ - fn eq(&self, other: &CommitmentProof) -> bool { - self.s_resp == other.s_resp && &*self.m_resp == &*other.m_resp - } -} -impl Eq for CommitmentProof where S: Seq {} - -#[derive(Clone, Debug)] -/// A verifier for a commitment proof of knowledge -pub struct CommitmentProofVerifier { - challenge: Scalar, - commitment: G1Affine, - c1: G1Affine, -} - -impl CommitmentProofVerifier { - pub(crate) fn new( - generators: &G, - commitment: Commitment, - proof: &CommitmentProof, - committed_indices: I, - challenge: ProofChallenge, - ) -> Result - where - G: Generators, - S: Seq, - I: Iterator, - { - let mut accum_c1 = AccumG1::from( - &[ - (commitment.0.into(), -challenge.0), - (generators.blinding(), proof.s_resp), - ][..], - ); - for (index, resp) in committed_indices.zip(proof.m_resp.iter().copied()) { - if index >= generators.message_count() { - return Err(err_msg!(Invalid, "Message index exceeds generator count")); - } - accum_c1.push(generators.message(index), resp); - } - - Ok(Self { - challenge: challenge.0, - commitment: commitment.0, - c1: accum_c1.sum().to_affine(), - }) - } - - /// Verify the commitment proof of knowledge - pub fn verify(&self, challenge_v: ProofChallenge) -> Result<(), Error> { - if challenge_v.0 != self.challenge { - Err(err_msg!(Invalid, "Commitment proof challenge mismatch")) - } else { - Ok(()) - } - } -} - -impl CreateChallenge for CommitmentProofVerifier { - fn write_challenge_bytes(&self, writer: &mut dyn WriteBuffer) -> Result<(), Error> { - writer.buffer_write(&self.commitment.to_uncompressed())?; - writer.buffer_write(&self.c1.to_uncompressed())?; - Ok(()) - } -} diff --git a/askar-bbs/src/generators.rs b/askar-bbs/src/generators.rs deleted file mode 100644 index 41c4dcd6..00000000 --- a/askar-bbs/src/generators.rs +++ /dev/null @@ -1,183 +0,0 @@ -use core::fmt::Debug; - -use askar_crypto::alg::bls::{BlsKeyPair, G2}; -use bls12_381::{ - hash_to_curve::{ExpandMsgXof, HashToCurve}, - G1Projective, G2Affine, -}; - -use crate::{ - collect::{DefaultSeq, Seq, Vec}, - Error, -}; - -/// A standard domain-specific input for use in signature message generators -pub const GENERATORS_DST_G1: &'static [u8] = - b"BLS12381G1_XOF:SHAKE256_SSWU_RO_BBS+_SIGNATURES:1_0_0"; - -const G2_UNCOMPRESSED_SIZE: usize = 192; - -/// Message generators used in signature building and verification -pub trait Generators: Clone + Debug { - /// Get the blinding message generator (h_0) - #[inline] - fn blinding(&self) -> G1Projective { - self.generator(0) - } - - /// Get the message generator for a given message index (h_i) - #[inline] - fn message(&self, index: usize) -> G1Projective { - self.generator(index + 1) - } - - /// The number of message generators, not including the blinding - fn message_count(&self) -> usize; - - /// The public key associated with the generators - fn public_key(&self) -> G2Affine; - - /// Fetch a zero-based message generator - fn generator(&self, index: usize) -> G1Projective; - - /// Create an iterator over the message generators - fn iter(&self) -> GeneratorsRefIter<'_, Self> { - GeneratorsRefIter { - index: 0, - count: self.message_count() + 1, - gens: self, - } - } -} - -#[derive(Clone, Debug)] -pub struct GeneratorsRefIter<'g, G: Generators> { - index: usize, - count: usize, - gens: &'g G, -} - -impl Iterator for GeneratorsRefIter<'_, G> { - type Item = G1Projective; - - fn size_hint(&self) -> (usize, Option) { - let len = self.count - self.index; - (len, Some(len)) - } - - fn next(&mut self) -> Option { - let idx = self.index; - if idx < self.count { - self.index += 1; - Some(self.gens.generator(idx)) - } else { - None - } - } -} - -impl ExactSizeIterator for GeneratorsRefIter<'_, G> {} - -/// The default pre-computed message generators -pub type VecGenerators = GeneratorsSeq>; - -#[derive(Debug)] -/// A pre-computed sequence of message generators -pub struct GeneratorsSeq -where - S: Seq, -{ - h: Vec, - pk: G2Affine, -} - -impl Clone for GeneratorsSeq -where - S: Seq, - Vec: Clone, -{ - fn clone(&self) -> Self { - Self { - h: self.h.clone(), - pk: self.pk.clone(), - } - } -} - -impl Generators for GeneratorsSeq -where - S: Seq, -{ - fn generator(&self, index: usize) -> G1Projective { - self.h[index] - } - - fn message_count(&self) -> usize { - self.h.len() - 1 - } - - fn public_key(&self) -> G2Affine { - self.pk - } -} - -impl GeneratorsSeq -where - S: Seq, -{ - /// Populate the message generators from another source - pub fn copy_from(gens: &G) -> Result { - Ok(Self { - h: Vec::from_iter(gens.iter())?, - pk: gens.public_key(), - }) - } -} - -#[derive(Clone, Debug, PartialEq, Eq)] -/// A dynamic (not pre-computed) message generator source -pub struct DynGenerators { - count: usize, - pk: G2Affine, -} - -impl DynGenerators { - /// Create a new instance of the message generators - pub fn new(pk: &BlsKeyPair, message_count: usize) -> Self { - Self { - count: message_count, - pk: *pk.bls_public_key(), - } - } - - /// Compute all the generators - pub fn to_vec(&self) -> Result { - VecGenerators::copy_from(self) - } -} - -impl Generators for DynGenerators { - fn generator(&self, index: usize) -> G1Projective { - const HASH_BUF_SIZE: usize = 10 + G2_UNCOMPRESSED_SIZE; - - let mut hash_buf = [0u8; HASH_BUF_SIZE]; - hash_buf[..G2_UNCOMPRESSED_SIZE].copy_from_slice(&self.pk.to_uncompressed()[..]); - hash_buf[(G2_UNCOMPRESSED_SIZE + 1)..(G2_UNCOMPRESSED_SIZE + 5)] - .copy_from_slice(&(index as u32).to_be_bytes()[..]); - hash_buf[(G2_UNCOMPRESSED_SIZE + 6)..(G2_UNCOMPRESSED_SIZE + 10)] - .copy_from_slice(&(self.count as u32).to_be_bytes()[..]); - - >>::hash_to_curve( - &hash_buf[..], - GENERATORS_DST_G1, - ) - } - - fn message_count(&self) -> usize { - self.count - } - - fn public_key(&self) -> G2Affine { - self.pk - } -} diff --git a/askar-bbs/src/hash.rs b/askar-bbs/src/hash.rs deleted file mode 100644 index 0fb6f748..00000000 --- a/askar-bbs/src/hash.rs +++ /dev/null @@ -1,92 +0,0 @@ -//! Support for hashing inputs into scalar values - -use core::fmt::{self, Debug, Formatter}; - -use askar_crypto::buffer::WriteBuffer; -use bls12_381::Scalar; -use sha3::{ - digest::{ExtendableOutput, Update, XofReader}, - Sha3XofReader, Shake256, -}; -use subtle::ConstantTimeEq; - -use crate::Error; - -#[derive(Clone, Debug)] -/// Derive Scalar values by hashing an arbitrary length input using Shake256 -pub struct HashScalar<'d> { - hasher: Shake256, - dst: Option<&'d [u8]>, -} - -impl<'d> HashScalar<'d> { - /// Create a new HashScalar instance - pub fn new(dst: Option<&'d [u8]>) -> Self { - Self { - hasher: Shake256::default(), - dst, - } - } - - /// Create a new HashScalar instance with initial input to the hasher - pub fn new_with_input(input: &[u8], dst: Option<&'d [u8]>) -> Self { - let mut slf = Self::new(dst); - slf.update(input); - slf - } -} - -impl HashScalar<'_> { - #[inline] - /// Utility method to hash the input and return a single Scalar - pub fn digest(input: impl AsRef<[u8]>, dst: Option<&[u8]>) -> Scalar { - let mut state = HashScalar::new(dst); - state.update(input.as_ref()); - state.finalize().next() - } - - #[inline] - /// Add more input to the hash state - pub fn update(&mut self, input: impl AsRef<[u8]>) { - self.hasher.update(input.as_ref()); - } - - /// Finalize the hasher and return a factory for Scalar values - pub fn finalize(mut self) -> HashScalarRead { - if let Some(dst) = self.dst { - self.hasher.update(dst); - } - HashScalarRead(self.hasher.finalize_xof()) - } -} - -impl WriteBuffer for HashScalar<'_> { - fn buffer_write(&mut self, data: &[u8]) -> Result<(), Error> { - self.update(data); - Ok(()) - } -} - -/// The output of a HashScalar, allowing for multiple Scalar values to be read -pub struct HashScalarRead(Sha3XofReader); - -impl HashScalarRead { - /// Read the next non-zero Scalar value from the extensible hash output - pub fn next(&mut self) -> Scalar { - let mut buf = [0u8; 64]; - let mut s; - loop { - self.0.read(&mut buf); - s = Scalar::from_bytes_wide(&buf); - if !bool::from(s.ct_eq(&Scalar::zero())) { - break s; - } - } - } -} - -impl Debug for HashScalarRead { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - f.debug_struct("HashScalarRead").finish() - } -} diff --git a/askar-bbs/src/io.rs b/askar-bbs/src/io.rs deleted file mode 100644 index ff106740..00000000 --- a/askar-bbs/src/io.rs +++ /dev/null @@ -1,123 +0,0 @@ -//! Support for reading and writing structures as bytes - -use core::{ - array::TryFromSliceError, - convert::{TryFrom, TryInto}, - fmt::Debug, -}; - -use askar_crypto::buffer::WriteBuffer; -use bls12_381::{G1Affine, Scalar}; - -use crate::Error; - -/// Convert to and from a fixed-length byte array -pub trait FixedLengthBytes: Sized { - /// The length of the byte array - const LENGTH: usize; - - /// The type of the byte array - type Buffer: AsRef<[u8]> + Clone + Copy + Debug; - - /// Work with a reference to the byte array - fn with_bytes(&self, f: impl FnOnce(&Self::Buffer) -> R) -> R; - - /// Convert from a byte array - fn from_bytes(buf: &Self::Buffer) -> Result; - - /// Read an instance from a cursor - fn read_bytes(cur: &mut Cursor<'_>) -> Result - where - for<'a> &'a Self::Buffer: TryFrom<&'a [u8], Error = TryFromSliceError>, - { - let buf = cur.read(Self::LENGTH)?.try_into().unwrap(); - Self::from_bytes(buf) - } - - /// Write the byte array to a target - fn write_bytes(&self, buf: &mut dyn WriteBuffer) -> Result<(), Error> { - self.with_bytes(|b| buf.buffer_write(b.as_ref())) - } -} - -impl FixedLengthBytes for Scalar { - const LENGTH: usize = 32; - - type Buffer = [u8; 32]; - - fn from_bytes(buf: &Self::Buffer) -> Result { - let mut b = *buf; - b.reverse(); // into little-endian - if let Some(s) = bls12_381::Scalar::from_bytes(&b).into() { - Ok(s) - } else { - Err(err_msg!(Usage, "Scalar bytes not in canonical format")) - } - } - - fn with_bytes(&self, f: impl FnOnce(&Self::Buffer) -> R) -> R { - let mut b = self.to_bytes(); - b.reverse(); // into big-endian - f(&b) - } -} - -#[derive(Clone, Debug)] -/// A cursor for incrementally parsing a byte slice -pub struct Cursor<'r>(&'r [u8]); - -impl<'r> Cursor<'r> { - /// Create a new cursor instance - pub fn new(buf: &'r [u8]) -> Self { - Self(buf) - } -} - -impl Cursor<'_> { - /// The remaining length of the slice - pub fn len(&self) -> usize { - self.0.len() - } - - /// Read a number of bytes from the slice - pub fn read(&mut self, len: usize) -> Result<&[u8], Error> { - if self.0.len() < len { - Err(err_msg!(ExceededBuffer)) - } else { - let (pfx, rest) = self.0.split_at(len); - self.0 = rest; - Ok(pfx) - } - } - - /// Read a type-safe number of bytes from the slice - pub fn read_fixed(&mut self) -> Result<&[u8; L], Error> { - if self.0.len() < L { - Err(err_msg!(ExceededBuffer)) - } else { - let (pfx, rest) = self.0.split_at(L); - self.0 = rest; - Ok(pfx.try_into().unwrap()) - } - } -} - -pub(crate) trait CompressedBytes: Sized { - fn read_compressed(cur: &mut Cursor<'_>) -> Result; - - fn write_compressed(&self, buf: &mut dyn WriteBuffer) -> Result<(), Error>; -} - -impl CompressedBytes for G1Affine { - fn read_compressed(cur: &mut Cursor<'_>) -> Result { - if let Some(pt) = G1Affine::from_compressed(cur.read_fixed()?).into() { - Ok(pt) - } else { - Err(err_msg!(Invalid, "Invalid G1 element")) - } - } - - fn write_compressed(&self, buf: &mut dyn WriteBuffer) -> Result<(), Error> { - buf.buffer_write(&self.to_compressed()) - } -} diff --git a/askar-bbs/src/lib.rs b/askar-bbs/src/lib.rs deleted file mode 100644 index a3d746b7..00000000 --- a/askar-bbs/src/lib.rs +++ /dev/null @@ -1,47 +0,0 @@ -//! BBS+ signature support for aries-askar - -#![cfg_attr(not(feature = "std"), no_std)] -#![cfg_attr(docsrs, feature(doc_cfg))] -#![deny(missing_debug_implementations, missing_docs, rust_2018_idioms)] - -#[cfg(feature = "alloc")] -extern crate alloc; - -pub use bls12_381; - -#[macro_use] -extern crate askar_crypto; -pub use askar_crypto::{Error, ErrorKind}; - -#[macro_use] -mod macros; - -mod challenge; -pub use challenge::{CreateChallenge, ProofChallenge}; - -pub mod collect; - -mod commitment; -pub use commitment::{ - Blinding, Commitment, CommitmentBuilder, CommitmentProof, CommitmentProofContext, - CommitmentProofVerifier, COMMITMENT_PROOF_DST_G1, -}; - -mod generators; -pub use generators::{DynGenerators, Generators, GeneratorsSeq, VecGenerators, GENERATORS_DST_G1}; - -pub mod hash; - -pub mod io; - -mod proof; -pub use proof::{ - SignatureProof, SignatureProofContext, SignatureProofVerifier, SignatureProver, - SIGNATURE_PROOF_DST_G1, -}; - -mod signature; -pub use signature::{Message, Signature, SignatureBuilder, SignatureVerifier}; - -mod util; -pub use util::Nonce; diff --git a/askar-bbs/src/macros.rs b/askar-bbs/src/macros.rs deleted file mode 100644 index d6d873a2..00000000 --- a/askar-bbs/src/macros.rs +++ /dev/null @@ -1,46 +0,0 @@ -macro_rules! impl_scalar_type { - ($type:ident, $doc:expr) => { - #[derive(Clone, Copy, Debug, PartialEq, Eq)] - #[doc = $doc] - pub struct $type(pub(crate) bls12_381::Scalar); - - impl $crate::io::FixedLengthBytes for $type { - const LENGTH: usize = 32; - - type Buffer = [u8; 32]; - - fn from_bytes(buf: &Self::Buffer) -> Result { - let s = ::from_bytes(buf)?; - Ok(Self(s)) - } - - fn with_bytes(&self, f: impl FnOnce(&Self::Buffer) -> R) -> R { - ::with_bytes(&self.0, f) - } - } - - impl subtle::ConstantTimeEq for $type { - fn ct_eq(&self, other: &Self) -> subtle::Choice { - self.0.ct_eq(&other.0) - } - } - - impl From<&bls12_381::Scalar> for $type { - fn from(s: &bls12_381::Scalar) -> Self { - Self(*s) - } - } - - impl From for $type { - fn from(s: bls12_381::Scalar) -> Self { - Self(s) - } - } - - impl From for $type { - fn from(s: u64) -> Self { - Self(bls12_381::Scalar::from(s)) - } - } - }; -} diff --git a/askar-bbs/src/proof.rs b/askar-bbs/src/proof.rs deleted file mode 100644 index 9617b77e..00000000 --- a/askar-bbs/src/proof.rs +++ /dev/null @@ -1,565 +0,0 @@ -#[cfg(feature = "alloc")] -use alloc::vec::Vec as StdVec; - -use askar_crypto::buffer::WriteBuffer; -use bls12_381::{pairing, G1Affine, G1Projective, G2Affine, Scalar}; -use rand::{CryptoRng, Rng}; -use subtle::ConstantTimeEq; - -#[cfg(feature = "getrandom")] -use askar_crypto::random::default_rng; - -use crate::{ - challenge::{CreateChallenge, ProofChallenge}, - collect::{DefaultSeq, Seq, Vec}, - commitment::Blinding, - generators::Generators, - io::{CompressedBytes, Cursor, FixedLengthBytes}, - signature::{Message, Signature}, - util::{random_scalar, AccumG1, Nonce}, - Error, -}; - -/// A standard domain-specific input for use in signature proofs of knowledge -pub const SIGNATURE_PROOF_DST_G1: &[u8] = b"BLS12381G1_BBS+_SIGNATURES_POK:1_0_0"; - -#[derive(Clone, Debug)] -/// Generate a signature proof of knowledge -pub struct SignatureProver<'g, G, S = DefaultSeq<128>> -where - G: Generators, - S: Seq<(Message, Blinding)>, -{ - accum_b: AccumG1, - accum_c2: AccumG1, - count: usize, - generators: &'g G, - hidden: Vec<(Message, Blinding), S>, - signature: Signature, -} - -impl<'g, G> SignatureProver<'g, G> -where - G: Generators, -{ - /// Create a new signature prover - pub fn new( - generators: &'g G, - signature: &Signature, - ) -> SignatureProver<'g, G, DefaultSeq<128>> { - Self::new_sized(generators, signature) - } -} - -impl<'g, G, S> SignatureProver<'g, G, S> -where - G: Generators, - S: Seq<(Message, Blinding)>, -{ - /// Create a new signature prover with a specific backing sequence type - pub fn new_sized(generators: &'g G, signature: &Signature) -> Self { - Self { - accum_b: AccumG1::new_with(G1Projective::generator()), - accum_c2: AccumG1::zero(), - count: 0, - generators, - hidden: Vec::new(), - signature: *signature, - } - } -} - -impl SignatureProver<'_, G, S> -where - G: Generators, - S: Seq<(Message, Blinding)>, -{ - /// Push a revealed signed message - pub fn push_message(&mut self, message: Message) -> Result<(), Error> { - let c = self.count; - if c >= self.generators.message_count() { - return Err(err_msg!(Usage, "Message index exceeds generator count")); - } - self.accum_b.push(self.generators.message(c), message.0); - self.count = c + 1; - Ok(()) - } - - /// Push a sequence of revealed signed messages - pub fn append_messages( - &mut self, - messages: impl IntoIterator, - ) -> Result<(), Error> { - for msg in messages { - self.push_message(msg)?; - } - Ok(()) - } - - #[cfg(feature = "getrandom")] - /// Push a hidden signed message - pub fn push_hidden_message(&mut self, message: Message) -> Result<(), Error> { - self.push_hidden_message_with(message, Blinding::random()) - } - - /// Push a hidden signed message with a specific blinding value - pub fn push_hidden_message_with( - &mut self, - message: Message, - blinding: Blinding, - ) -> Result<(), Error> { - let c = self.count; - if c >= self.generators.message_count() { - return Err(err_msg!(Usage, "Message index exceeds generator count")); - } - let base = self.generators.message(c); - self.hidden.push((message, blinding))?; - self.accum_b.push(base, message.0); - self.accum_c2.push(base, blinding.0); - self.count = c + 1; - Ok(()) - } - - #[cfg(feature = "getrandom")] - /// Prepare the context for generating the final proof - pub fn prepare(self) -> Result, Error> { - self.prepare_with_rng(default_rng()) - } - - /// Prepare the context for generating the final proof - pub fn prepare_with_rng( - mut self, - mut rng: impl CryptoRng + Rng, - ) -> Result, Error> { - if self.count != self.generators.message_count() { - return Err(err_msg!( - Usage, - "Message count does not match generator count" - )); - } - - let Signature { a, e, s } = self.signature; - self.accum_b.push(self.generators.blinding(), s); - let b = self.accum_b.sum(); - let h0 = self.generators.blinding(); - let r1 = random_scalar(&mut rng); - let r2 = random_scalar(&mut rng); - let r3 = r1.invert().unwrap(); - let e_rand = random_scalar(&mut rng); - let r2_rand = random_scalar(&mut rng); - let r3_rand = random_scalar(&mut rng); - let s_rand = random_scalar(&mut rng); - - let b_r1 = b * r1; - let a_prime = a * r1; - let a_bar = a_prime * (-e) + b_r1; - let d = h0 * (-r2) + b_r1; - let s_prime = s - r2 * r3; - - let c1 = AccumG1::calc(&[(a_prime, e_rand), (h0, r2_rand)]); - self.accum_c2.append(&[(d, r3_rand), (h0, s_rand)][..]); - - let mut affine = [G1Affine::identity(); 5]; - G1Projective::batch_normalize( - &[a_prime, a_bar, d, c1, self.accum_c2.sum()], - &mut affine[..], - ); - - Ok(SignatureProofContext { - params: ProofPublicParams { - a_prime: affine[0], - a_bar: affine[1], - d: affine[2], - }, - c1: affine[3], - c2: affine[4], - e, - e_rand, - r2, - r2_rand, - r3, - r3_rand, - s_prime, - s_rand, - hidden: self.hidden, - }) - } - - #[cfg(feature = "getrandom")] - /// Complete an independent signature proof of knowledge - pub fn complete(self, nonce: Nonce) -> Result<(ProofChallenge, SignatureProof), Error> - where - S: Seq, - { - self.complete_with_rng(default_rng(), nonce) - } - - /// Complete an independent signature proof of knowledge with a given RNG - pub fn complete_with_rng( - self, - rng: impl CryptoRng + Rng, - nonce: Nonce, - ) -> Result<(ProofChallenge, SignatureProof), Error> - where - S: Seq, - { - let context = self.prepare_with_rng(rng)?; - let challenge = context.create_challenge(nonce, Some(SIGNATURE_PROOF_DST_G1))?; - let proof = context.complete(challenge)?; - Ok((challenge, proof)) - } -} - -#[derive(Clone, Debug)] -/// A prepared context for generating a signature proof of knowledge -pub struct SignatureProofContext -where - S: Seq<(Message, Blinding)>, -{ - params: ProofPublicParams, - c1: G1Affine, - c2: G1Affine, - e: Scalar, - e_rand: Scalar, - r2: Scalar, - r2_rand: Scalar, - r3: Scalar, - r3_rand: Scalar, - s_prime: Scalar, - s_rand: Scalar, - hidden: Vec<(Message, Blinding), S>, -} - -impl SignatureProofContext -where - S: Seq<(Message, Blinding)>, - S: Seq, -{ - /// Complete the signature proof of knowledge given a Fiat-Shamir challenge value - pub fn complete(&self, challenge: ProofChallenge) -> Result, Error> { - let c = challenge.0; - let mut m_resp = Vec::with_capacity(self.hidden.len()); - for (msg, m_rand) in self.hidden.iter() { - m_resp.push(m_rand.0 - c * msg.0)?; - } - Ok(SignatureProof { - params: self.params, - e_resp: self.e_rand + c * self.e, - r2_resp: self.r2_rand - c * self.r2, - r3_resp: self.r3_rand + c * self.r3, - s_resp: self.s_rand - c * self.s_prime, - m_resp, - }) - } -} - -impl CreateChallenge for SignatureProofContext -where - S: Seq<(Message, Blinding)>, -{ - fn write_challenge_bytes(&self, writer: &mut dyn WriteBuffer) -> Result<(), Error> { - self.params - .write_challenge_bytes(&self.c1, &self.c2, writer) - } -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -struct ProofPublicParams { - a_prime: G1Affine, - a_bar: G1Affine, - d: G1Affine, -} - -impl ProofPublicParams { - pub fn write_challenge_bytes( - &self, - c1: &G1Affine, - c2: &G1Affine, - writer: &mut dyn WriteBuffer, - ) -> Result<(), askar_crypto::Error> { - writer.buffer_write(&self.a_bar.to_uncompressed())?; - writer.buffer_write(&self.a_prime.to_uncompressed())?; - writer.buffer_write(&self.d.to_uncompressed())?; - writer.buffer_write(&c1.to_uncompressed())?; - writer.buffer_write(&c2.to_uncompressed())?; - Ok(()) - } -} - -#[derive(Clone, Debug)] -/// A signature proof of knowledge -pub struct SignatureProof -where - S: Seq, -{ - params: ProofPublicParams, - e_resp: Scalar, - r2_resp: Scalar, - r3_resp: Scalar, - s_resp: Scalar, - m_resp: Vec, -} - -impl SignatureProof> { - /// Convert a signature proof of knowledge from a byte slice - pub fn from_bytes(buf: &[u8]) -> Result { - Self::from_bytes_sized(buf) - } -} - -impl SignatureProof -where - S: Seq, -{ - /// Create a verifier for the signature proof of knowledge - pub fn verifier<'v, G>( - &'v self, - generators: &'v G, - challenge: ProofChallenge, - ) -> Result, Error> - where - G: Generators, - { - SignatureProofVerifier::new(generators, self, challenge) - } - - /// Write the signature proof of knowledge to an output buffer - pub fn write_bytes(&self, buf: &mut dyn WriteBuffer) -> Result<(), Error> { - self.params.a_prime.write_compressed(&mut *buf)?; - self.params.a_bar.write_compressed(&mut *buf)?; - self.params.d.write_compressed(&mut *buf)?; - self.e_resp.write_bytes(&mut *buf)?; - self.r2_resp.write_bytes(&mut *buf)?; - self.r3_resp.write_bytes(&mut *buf)?; - self.s_resp.write_bytes(&mut *buf)?; - buf.buffer_write(&(self.m_resp.len() as u32).to_be_bytes())?; - for resp in self.m_resp.iter() { - resp.write_bytes(&mut *buf)?; - } - Ok(()) - } - - #[cfg(feature = "alloc")] - /// Output the signature proof of knowledge as a byte vec - pub fn to_bytes(&self) -> Result, Error> { - let mut out = StdVec::with_capacity(48 * 3 + 32 * 5 + 4); - self.write_bytes(&mut out)?; - Ok(out) - } - - /// Convert a signature proof of knowledge from a byte slice - pub fn from_bytes_sized(buf: &[u8]) -> Result { - let mut cur = Cursor::new(buf); - let params = ProofPublicParams { - a_prime: G1Affine::read_compressed(&mut cur)?, - a_bar: G1Affine::read_compressed(&mut cur)?, - d: G1Affine::read_compressed(&mut cur)?, - }; - let e_resp = Scalar::read_bytes(&mut cur)?; - let r2_resp = Scalar::read_bytes(&mut cur)?; - let r3_resp = Scalar::read_bytes(&mut cur)?; - let s_resp = Scalar::read_bytes(&mut cur)?; - let m_len = u32::from_be_bytes(*cur.read_fixed()?) as usize; - let mut m_resp = Vec::with_capacity(m_len); - for _ in 0..m_len { - m_resp.push(Scalar::read_bytes(&mut cur)?)?; - } - if cur.len() != 0 { - return Err(err_msg!(Invalid, "Invalid length")); - } - Ok(Self { - params, - e_resp, - r2_resp, - r3_resp, - s_resp, - m_resp, - }) - } - - /// Get the response value from the post-challenge phase of the sigma protocol - /// for a given message index - pub fn get_response(&self, index: usize) -> Result { - self.m_resp - .get(index) - .map(Blinding::from) - .ok_or_else(|| err_msg!(Usage, "Invalid index for hidden message")) - } -} - -impl PartialEq> for SignatureProof -where - S: Seq, - T: Seq, -{ - fn eq(&self, other: &SignatureProof) -> bool { - self.params == other.params - && self.e_resp == other.e_resp - && self.r2_resp == other.r2_resp - && self.r3_resp == other.r3_resp - && self.s_resp == other.s_resp - && &*self.m_resp == &*other.m_resp - } -} -impl Eq for SignatureProof where S: Seq {} - -#[derive(Clone, Debug)] -/// A verifier for a signature proof of knowledge -pub struct SignatureProofVerifier<'v, G, S> -where - G: Generators, - S: Seq, -{ - generators: &'v G, - proof: &'v SignatureProof, - neg_challenge: Scalar, - c1: G1Projective, - accum_c2: AccumG1, - hidden_count: usize, - message_count: usize, -} - -impl<'v, G, S> SignatureProofVerifier<'v, G, S> -where - G: Generators, - S: Seq, -{ - pub(crate) fn new( - generators: &'v G, - proof: &'v SignatureProof, - challenge: ProofChallenge, - ) -> Result { - let ProofPublicParams { a_prime, a_bar, d } = proof.params; - let challenge = challenge.0; - let neg_challenge = -challenge; // negated early for multiplying - - let h0 = generators.blinding(); - let c1 = AccumG1::calc(&[ - (a_prime.into(), proof.e_resp), - (h0, proof.r2_resp), - (G1Projective::from(a_bar) - d, challenge), - ]); - let accum_c2 = AccumG1::from( - &[ - (d.into(), proof.r3_resp), - (h0, proof.s_resp), - (G1Projective::generator(), neg_challenge), - ][..], - ); - - Ok(Self { - generators, - proof, - neg_challenge, - c1, - accum_c2, - hidden_count: 0, - message_count: 0, - }) - } -} - -impl SignatureProofVerifier<'_, G, S> -where - G: Generators, - S: Seq, -{ - /// Push a revealed signed message - pub fn push_revealed(&mut self, message: Message) -> Result<(), Error> { - let c = self.message_count; - if c >= self.generators.message_count() { - return Err(err_msg!(Usage, "Message index exceeds generator count")); - } - self.accum_c2 - .push(self.generators.message(c), message.0 * self.neg_challenge); - self.message_count = c + 1; - Ok(()) - } - - /// Push a sequence of revealed signed messages - pub fn append_revealed( - &mut self, - messages: impl IntoIterator, - ) -> Result<(), Error> { - for msg in messages { - self.push_revealed(msg)?; - } - Ok(()) - } - - /// Push a number of hidden signed messages - pub fn push_hidden_count(&mut self, count: usize) -> Result<(), Error> { - let c = self.message_count + count; - if c > self.generators.message_count() { - return Err(err_msg!(Usage, "Message index exceeds generator count")); - } - if self.hidden_count + c > self.proof.m_resp.len() { - return Err(err_msg!( - Usage, - "Hidden message count exceeded response count" - )); - } - for index in self.message_count..c { - self.accum_c2.push( - self.generators.message(index), - self.proof.m_resp[self.hidden_count], - ); - self.hidden_count += 1; - } - self.message_count = c; - Ok(()) - } - - /// Complete the proof challenge value for an independent proof - pub fn complete(&self, nonce: Nonce) -> Result { - self.create_challenge(nonce, Some(SIGNATURE_PROOF_DST_G1)) - } - - /// Verify the signature proof of knowledge - pub fn verify(&self, challenge_v: ProofChallenge) -> Result<(), Error> { - if self.message_count != self.generators.message_count() { - return Err(err_msg!( - Invalid, - "Number of messages does not correspond with generators" - )); - } - if self.hidden_count != self.proof.m_resp.len() { - return Err(err_msg!( - Invalid, - "Number of hidden messages does not correspond with responses" - )); - } - // the challenge value is negated on this struct, so compare the sum to zero - if challenge_v.0 + self.neg_challenge != Scalar::zero() { - return Err(err_msg!( - Invalid, - "Signature proof of knowledge challenge mismatch" - )); - } - - let ProofPublicParams { a_prime, a_bar, .. } = self.proof.params; - let check_pair = pairing(&a_prime, &self.generators.public_key()) - .ct_eq(&pairing(&a_bar, &G2Affine::generator())); - - let verify: bool = (!a_prime.is_identity() & check_pair).into(); - if verify { - Ok(()) - } else { - Err(err_msg!(Invalid)) - } - } -} - -impl CreateChallenge for SignatureProofVerifier<'_, G, S> -where - G: Generators, - S: Seq, -{ - fn write_challenge_bytes(&self, writer: &mut dyn WriteBuffer) -> Result<(), Error> { - let mut checks = [G1Affine::identity(); 2]; - G1Projective::batch_normalize(&[self.c1, self.accum_c2.sum()], &mut checks[..]); - self.proof - .params - .write_challenge_bytes(&checks[0], &checks[1], writer) - } -} diff --git a/askar-bbs/src/signature.rs b/askar-bbs/src/signature.rs deleted file mode 100644 index ba638c8e..00000000 --- a/askar-bbs/src/signature.rs +++ /dev/null @@ -1,287 +0,0 @@ -use askar_crypto::{ - alg::bls::{BlsKeyPair, G2}, - buffer::Writer, -}; -use bls12_381::{pairing, G1Affine, G1Projective, G2Affine, G2Projective, Scalar}; -use group::Curve; -use subtle::ConstantTimeEq; - -use crate::{ - commitment::{Blinding, Commitment}, - generators::Generators, - hash::HashScalar, - io::{CompressedBytes, Cursor, FixedLengthBytes}, - proof::SignatureProver, - util::AccumG1, - Error, -}; - -const SIGNATURE_LENGTH: usize = 48 + 32 + 32; - -impl_scalar_type!(Message, "A message value used in a signature"); - -impl Message { - /// Generate a message value by hashing arbitrary length input - pub fn hash(input: impl AsRef<[u8]>) -> Self { - Self(HashScalar::digest(input, None)) - } -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -/// A BBS+ signature value -pub struct Signature { - pub(crate) a: G1Affine, - pub(crate) e: Scalar, - pub(crate) s: Scalar, -} - -impl Signature { - /// Create a prover for this signature - pub fn prover<'g, G>(&self, generators: &'g G) -> SignatureProver<'g, G> - where - G: Generators, - { - SignatureProver::new(generators, &self) - } - - /// Unblind a signature created against a commitment - pub fn unblind(self, blinding: Blinding) -> Self { - let Signature { a, e, s } = self; - Self { - a, - e, - s: s + blinding.0, - } - } - - /// Verify a signature with a set of known messages - pub fn verify( - &self, - generators: &G, - messages: impl IntoIterator, - ) -> Result<(), Error> - where - G: Generators, - { - let mut verifier = SignatureVerifier::new(generators, self); - verifier.append_messages(messages)?; - verifier.verify() - } - - /// Create a new signature verifier - pub fn verifier<'g, G>(&self, generators: &'g G) -> SignatureVerifier<'g, G> - where - G: Generators, - { - SignatureVerifier::new(generators, self) - } -} - -impl FixedLengthBytes for Signature { - const LENGTH: usize = SIGNATURE_LENGTH; - - type Buffer = [u8; SIGNATURE_LENGTH]; - - fn with_bytes(&self, f: impl FnOnce(&Self::Buffer) -> R) -> R { - let mut buf = [0u8; Self::LENGTH]; - let mut w = Writer::from_slice(&mut buf); - self.a.write_compressed(&mut w).unwrap(); - self.e.write_bytes(&mut w).unwrap(); - self.s.write_bytes(&mut w).unwrap(); - f(&buf) - } - - fn from_bytes(buf: &Self::Buffer) -> Result { - let mut cur = Cursor::new(buf); - let a = G1Affine::read_compressed(&mut cur)?; - let e = Scalar::read_bytes(&mut cur)?; - let s = Scalar::read_bytes(&mut cur)?; - Ok(Self { a, e, s }) - } -} - -#[derive(Clone, Debug)] -/// A builder for a signature -pub struct SignatureBuilder<'g, G: Generators> { - accum_b: AccumG1, - generators: &'g G, - hash_es: HashScalar<'static>, - key: &'g BlsKeyPair, - message_count: usize, -} - -impl<'g, G: Generators> SignatureBuilder<'g, G> { - /// Create a new signature builder - pub fn new(generators: &'g G, key: &'g BlsKeyPair) -> Self { - Self::from_accum(generators, key, G1Projective::generator()) - } - - /// Create a new signature builder with a blinded messages commitment value - pub fn from_commitment( - generators: &'g G, - key: &'g BlsKeyPair, - commitment: Commitment, - ) -> Self { - Self::from_accum(generators, key, G1Projective::generator() + commitment.0) - } - - /// Utility method to sign a set of messages with no blinded commitment - pub fn sign( - generators: &'g G, - key: &'g BlsKeyPair, - messages: impl IntoIterator, - ) -> Result { - let mut slf = Self::from_accum(generators, key, G1Projective::generator()); - slf.append_messages(messages)?; - slf.to_signature() - } - - #[inline] - fn from_accum(generators: &'g G, key: &'g BlsKeyPair, sum: G1Projective) -> Self { - Self { - accum_b: AccumG1::new_with(sum), - generators, - hash_es: HashScalar::new_with_input(&sum.to_affine().to_compressed(), None), - key, - message_count: 0, - } - } -} - -impl SignatureBuilder<'_, G> { - /// Push a message to be signed - pub fn push_message(&mut self, message: Message) -> Result<(), Error> { - let c = self.message_count; - if c >= self.generators.message_count() { - return Err(err_msg!(Usage, "Message index exceeds generator count")); - } - self.accum_b.push(self.generators.message(c), message.0); - self.hash_es.update(&message.0.to_bytes()); - self.message_count = c + 1; - Ok(()) - } - - /// Push a sequence of messages to be signed - pub fn append_messages( - &mut self, - messages: impl IntoIterator, - ) -> Result<(), Error> { - for msg in messages { - self.push_message(msg)?; - } - Ok(()) - } - - /// Push a number of blind (committed) messages - pub fn push_committed_count(&mut self, count: usize) -> Result<(), Error> { - let c = self.message_count + count; - if c > self.generators.message_count() { - return Err(err_msg!(Usage, "Message index exceeds generator count")); - } - self.message_count = c; - Ok(()) - } - - /// Get the current number of added messages - pub fn len(&self) -> usize { - self.message_count - } - - /// Create a signature from the builder - pub fn to_signature(&self) -> Result { - if self.message_count != self.generators.message_count() { - return Err(err_msg!( - Usage, - "Message count does not match generator count" - )); - } - let sk = self - .key - .bls_secret_scalar() - .ok_or_else(|| err_msg!(MissingSecretKey))?; - if sk == &Scalar::zero() { - return Err(err_msg!(MissingSecretKey)); - } - let mut hash_es = self.hash_es.clone(); - hash_es.update(sk.to_bytes()); - let mut hash_read = hash_es.finalize(); - let e = hash_read.next(); - let s = hash_read.next(); - let b = self.accum_b.sum_with(self.generators.blinding(), s); - let a = (b * (sk + e).invert().unwrap()).to_affine(); - Ok(Signature { a, e, s }) - } -} - -#[derive(Clone, Debug)] -/// A verifier for a BBS+ signature -pub struct SignatureVerifier<'g, G: Generators> { - accum_b: AccumG1, - generators: &'g G, - message_count: usize, - signature: Signature, -} - -impl<'g, G: Generators> SignatureVerifier<'g, G> { - /// Create a new signature verifier - pub fn new(generators: &'g G, signature: &Signature) -> Self { - Self { - accum_b: AccumG1::new_with(G1Projective::generator()), - generators, - message_count: 0, - signature: *signature, - } - } -} - -impl SignatureVerifier<'_, G> { - /// Push a signed message - pub fn push_message(&mut self, message: Message) -> Result<(), Error> { - let c = self.message_count; - if c >= self.generators.message_count() { - return Err(err_msg!(Usage, "Message index exceeds generator count")); - } - self.accum_b.push(self.generators.message(c), message.0); - self.message_count = c + 1; - Ok(()) - } - - /// Push a sequence of signed messages - pub fn append_messages( - &mut self, - messages: impl IntoIterator, - ) -> Result<(), Error> { - for msg in messages { - self.push_message(msg)?; - } - Ok(()) - } - - /// Get the current number of added messages - pub fn len(&self) -> usize { - self.message_count - } - - /// Verify a signature - pub fn verify(&self) -> Result<(), Error> { - if self.message_count != self.generators.message_count() { - return Err(err_msg!( - Usage, - "Message count does not match generator count" - )); - } - let Signature { a, e, s } = self.signature; - let b = self.accum_b.sum_with(self.generators.blinding(), s); - let valid: bool = pairing( - &a, - &(G2Projective::generator() * e + self.generators.public_key()).to_affine(), - ) - .ct_eq(&pairing(&b.to_affine(), &G2Affine::generator())) - .into(); - if valid { - Ok(()) - } else { - Err(err_msg!(Invalid)) - } - } -} diff --git a/askar-bbs/src/util.rs b/askar-bbs/src/util.rs deleted file mode 100644 index c783315a..00000000 --- a/askar-bbs/src/util.rs +++ /dev/null @@ -1,192 +0,0 @@ -use bls12_381::{G1Projective, Scalar}; -use rand::{CryptoRng, Rng}; -use subtle::ConstantTimeEq; - -#[cfg(feature = "getrandom")] -use askar_crypto::random::default_rng; - -pub(crate) fn random_scalar(mut rng: R) -> Scalar { - let mut buf = [0u8; 64]; - let mut s; - loop { - rng.fill_bytes(&mut buf); - s = Scalar::from_bytes_wide(&buf); - if !bool::from(s.ct_eq(&Scalar::zero())) { - break s; - } - } -} - -impl_scalar_type!(Nonce, "A nonce used in proof verification"); - -impl Nonce { - #[cfg(feature = "getrandom")] - /// Generate a new random nonce value - pub fn random() -> Self { - Self(random_scalar(default_rng())) - } - - /// Generate a new random nonce value from a specific RNG - pub fn random_with_rng(rng: impl CryptoRng + Rng) -> Self { - Self(random_scalar(rng)) - } -} - -// sum-of-products impl: - -// const ACCUM_BATCH: usize = 16; - -// #[derive(Clone, Debug)] -// pub(crate) struct AccumG1 { -// accum: G1Projective, -// stack_base: [G1Projective; ACCUM_BATCH], -// stack_factor: [Scalar; ACCUM_BATCH], -// stack_size: usize, -// } - -// impl AccumG1 { -// pub fn zero() -> Self { -// Self::new_with(G1Projective::identity()) -// } - -// pub fn new_with(accum: impl Into) -> Self { -// Self { -// accum: accum.into(), -// stack_base: [G1Projective::identity(); ACCUM_BATCH], -// stack_factor: [Scalar::zero(); ACCUM_BATCH], -// stack_size: 0, -// } -// } - -// pub fn calc(pairs: &[(G1Projective, Scalar)]) -> G1Projective { -// let mut acc = Self::zero(); -// acc.append(pairs); -// acc.sum() -// } - -// #[inline] -// fn rollup(&mut self) -> G1Projective { -// let sz = self.stack_size; -// G1Projective::sum_of_products_in_place(&self.stack_base[..sz], &mut self.stack_factor[..sz]) -// } - -// #[inline] -// pub fn push(&mut self, base: G1Projective, factor: Scalar) { -// let mut sz = self.stack_size; -// if sz == ACCUM_BATCH { -// let sum = self.rollup(); -// self.accum += sum; -// sz = 0; -// }; -// self.stack_base[sz] = base; -// self.stack_factor[sz] = factor; -// self.stack_size = sz + 1; -// } - -// pub fn append(&mut self, pairs: &[(G1Projective, Scalar)]) { -// for (base, factor) in pairs.into_iter().copied() { -// self.push(base, factor); -// } -// } - -// pub fn sum(&self) -> G1Projective { -// let mut sum = self.accum; -// let sz = self.stack_size; -// if sz > 0 { -// let mut factors = self.stack_factor; -// sum += -// G1Projective::sum_of_products_in_place(&self.stack_base[..sz], &mut factors[..sz]); -// } -// sum -// } - -// pub fn sum_mut(&mut self) -> G1Projective { -// if self.stack_size > 0 { -// let sum = self.rollup(); -// self.accum += sum; -// self.stack_size = 0; -// } -// self.accum -// } - -// pub fn sum_with(&self, base: G1Projective, factor: Scalar) -> G1Projective { -// let sum = self.accum; -// let mut sz = self.stack_size; -// if sz > 0 { -// let mut bases = [G1Projective::identity(); ACCUM_BATCH + 1]; -// let mut factors = [Scalar::zero(); ACCUM_BATCH + 1]; -// bases[..sz].copy_from_slice(&self.stack_base[..sz]); -// factors[..sz].copy_from_slice(&self.stack_factor[..sz]); -// bases[sz] = base; -// factors[sz] = factor; -// sz += 1; -// sum + G1Projective::sum_of_products_in_place(&bases[..sz], &mut factors[..sz]) -// } else { -// sum + base * factor -// } -// } -// } - -#[derive(Clone, Debug)] -pub(crate) struct AccumG1 { - accum: G1Projective, -} - -impl AccumG1 { - pub fn zero() -> Self { - Self::new_with(G1Projective::identity()) - } - - pub fn new_with(accum: impl Into) -> Self { - Self { - accum: accum.into(), - } - } - - pub fn calc(pairs: &[(G1Projective, Scalar)]) -> G1Projective { - let mut acc = Self::zero(); - acc.append(pairs); - acc.sum() - } - - #[inline] - pub fn push(&mut self, base: G1Projective, factor: Scalar) { - self.accum += base * factor; - } - - pub fn append(&mut self, pairs: &[(G1Projective, Scalar)]) { - for (base, factor) in pairs.into_iter().copied() { - self.push(base, factor); - } - } - - pub fn sum(&self) -> G1Projective { - self.accum - } - - pub fn sum_with(&self, base: G1Projective, factor: Scalar) -> G1Projective { - self.accum + base * factor - } -} - -impl From for AccumG1 { - fn from(accum: G1Projective) -> Self { - AccumG1::new_with(accum) - } -} - -impl From<(G1Projective, Scalar)> for AccumG1 { - fn from((base, factor): (G1Projective, Scalar)) -> Self { - let mut acc = AccumG1::zero(); - acc.push(base, factor); - acc - } -} - -impl From<&[(G1Projective, Scalar)]> for AccumG1 { - fn from(pairs: &[(G1Projective, Scalar)]) -> Self { - let mut acc = AccumG1::zero(); - acc.append(pairs); - acc - } -} diff --git a/askar-bbs/tests/blind_sign.rs b/askar-bbs/tests/blind_sign.rs deleted file mode 100644 index 3aa4af36..00000000 --- a/askar-bbs/tests/blind_sign.rs +++ /dev/null @@ -1,74 +0,0 @@ -#[cfg(feature = "getrandom")] -use askar_bbs::{ - CommitmentBuilder, CommitmentProof, DynGenerators, Message, Nonce, SignatureBuilder, -}; - -#[cfg(feature = "getrandom")] -use askar_crypto::{ - alg::bls::{BlsKeyPair, G2}, - buffer::Writer, - repr::KeyGen, -}; - -#[cfg(feature = "getrandom")] -#[test] -fn test_commitment_verify() { - let keypair = BlsKeyPair::::random().unwrap(); - let gens = DynGenerators::new(&keypair, 5); - let nonce = Nonce::random(); - let commit_messages = [(0, Message::hash(b"hello"))]; - let mut committer = CommitmentBuilder::new(&gens); - for (index, message) in commit_messages.iter().copied() { - committer.add_message(index, message).unwrap(); - } - let (challenge, _blinding, commitment, proof) = committer - .complete(nonce) - .expect("Error completing commitment"); - proof - .verify(&gens, commitment, [0].iter().copied(), challenge, nonce) - .expect("Error verifying commitment"); - - // test serialization round trip - let mut buf = [0u8; 1024]; - let mut w = Writer::from_slice(&mut buf); - proof.write_bytes(&mut w).expect("Error serializing proof"); - let proof_len = w.position(); - let proof_de = - CommitmentProof::from_bytes(&buf[..proof_len]).expect("Error deserializing proof"); - assert_eq!(proof, proof_de); -} - -#[cfg(feature = "getrandom")] -#[test] -fn test_blind_signature() { - let keypair = BlsKeyPair::::random().unwrap(); - let gens = DynGenerators::new(&keypair, 2); - let nonce = Nonce::random(); - let commit_messages = [(0, Message::hash(b"hello"))]; - let mut committer = CommitmentBuilder::new(&gens); - for (index, message) in commit_messages.iter().copied() { - committer.add_message(index, message).unwrap(); - } - let (challenge, blinding, commitment, proof) = committer - .complete(nonce) - .expect("Error completing commitment"); - proof - .verify(&gens, commitment, [0].iter().copied(), challenge, nonce) - .expect("Error verifying commitment"); - - let sign_messages = [Message::hash(b"world")]; - let mut signer = SignatureBuilder::from_commitment(&gens, &keypair, commitment); - signer.push_committed_count(1).unwrap(); - signer - .append_messages(sign_messages.iter().copied()) - .unwrap(); - let blind_signature = signer.to_signature().expect("Error creating signature"); - - let signature = blind_signature.unblind(blinding); - let mut verifier = signature.verifier(&gens); - verifier.push_message(commit_messages[0].1).unwrap(); - verifier - .append_messages(sign_messages.iter().copied()) - .unwrap(); - verifier.verify().expect("Error verifying signature"); -} diff --git a/askar-bbs/tests/generators.rs b/askar-bbs/tests/generators.rs deleted file mode 100644 index dffcfb74..00000000 --- a/askar-bbs/tests/generators.rs +++ /dev/null @@ -1,22 +0,0 @@ -use askar_bbs::{DynGenerators, Generators}; -use askar_crypto::{ - alg::bls::{BlsKeyPair, G2}, - repr::KeySecretBytes, -}; -use bls12_381::G1Projective; -use hex_literal::hex; - -#[test] -fn dyn_generators_expected() { - let keypair = BlsKeyPair::::from_secret_bytes(&hex!( - "0011223344556677889900112233445566778899001122334455667788990011" - )) - .unwrap(); - let message_count = 10; - let gens_count = message_count + 1; - let gens = DynGenerators::new(&keypair, message_count); - let iter = gens.iter(); - assert_eq!(iter.size_hint(), (gens_count, Some(gens_count))); - let hm: Vec = iter.collect(); - assert_eq!(hm.len(), gens_count); -} diff --git a/askar-bbs/tests/proof.rs b/askar-bbs/tests/proof.rs deleted file mode 100644 index 317a844c..00000000 --- a/askar-bbs/tests/proof.rs +++ /dev/null @@ -1,144 +0,0 @@ -#[cfg(feature = "getrandom")] -#[test] -fn prove_single_signature_hidden_message() { - use askar_bbs::{DynGenerators, Message, Nonce, SignatureBuilder, SignatureProof}; - use askar_crypto::{ - alg::bls::{BlsKeyPair, G2}, - buffer::Writer, - repr::KeySecretBytes, - }; - use hex_literal::hex; - - let keypair = BlsKeyPair::::from_secret_bytes(&hex!( - "0011223344556677889900112233445566778899001122334455667788990011" - )) - .unwrap(); - let messages = [Message::hash("hello"), Message::hash("there")]; - let gens = DynGenerators::new(&keypair, messages.len()); - let mut builder = SignatureBuilder::new(&gens, &keypair); - builder - .append_messages(messages.iter().copied()) - .expect("Error building signature"); - let sig = builder.to_signature().expect("Error creating signature"); - - // verifier creates a nonce for the proof presentation - let nonce = Nonce::random(); - - // prover constructs the proof and challenge value for an independent proof - let mut prover = sig.prover(&gens); - prover.push_hidden_message(messages[0]).unwrap(); - prover.push_message(messages[1]).unwrap(); - let (challenge, proof) = prover - .complete(nonce) - .expect("Error creating signature pok"); - - // verifier checks the proof with the challenge value - let mut verifier = proof.verifier(&gens, challenge).unwrap(); - verifier.push_hidden_count(1).unwrap(); - verifier.push_revealed(messages[1]).unwrap(); - let challenge_v = verifier - .complete(nonce) - .expect("Error creating verification challenge"); - verifier - .verify(challenge_v) - .expect("Error verifying signature PoK"); - // double check challenge comparison for testing - assert_eq!(challenge, challenge_v); - - // test serialization round trip - let mut buf = [0u8; 1024]; - let mut w = Writer::from_slice(&mut buf); - proof.write_bytes(&mut w).expect("Error serializing proof"); - let proof_len = w.position(); - let proof_de = - SignatureProof::from_bytes(&buf[..proof_len]).expect("Error deserializing proof"); - assert_eq!(proof, proof_de); -} - -#[cfg(feature = "getrandom")] -#[test] -fn multi_proof_matching_hidden_message() { - use askar_bbs::{Blinding, DynGenerators, Message, Nonce, ProofChallenge, SignatureBuilder}; - use askar_crypto::{ - alg::bls::{BlsKeyPair, G2}, - repr::KeySecretBytes, - }; - use hex_literal::hex; - - let test_proof_dst = b"test proof"; - let keypair = BlsKeyPair::::from_secret_bytes(&hex!( - "0011223344556677889900112233445566778899001122334455667788990011" - )) - .unwrap(); - let messages_1 = [Message::hash("hello"), Message::hash("there")]; - let messages_2 = [ - Message::hash("indeed"), - Message::hash("hello"), - Message::hash("stranger"), - ]; - let gens_1 = DynGenerators::new(&keypair, messages_1.len()); - let gens_2 = DynGenerators::new(&keypair, messages_2.len()); - let sig_1 = SignatureBuilder::sign(&gens_1, &keypair, messages_1.iter().copied()) - .expect("Error creating signature"); - let sig_2 = SignatureBuilder::sign(&gens_2, &keypair, messages_2.iter().copied()) - .expect("Error creating signature"); - - // verifier creates a nonce for the proof presentation - let nonce = Nonce::random(); - - // a common blinding value for the two messages to be proven equal - let msg_blind = Blinding::random(); - - // construct provers for the two signatures - let mut prover_1 = sig_1.prover(&gens_1); - prover_1 - .push_hidden_message_with(messages_1[0], msg_blind) - .unwrap(); - prover_1.push_message(messages_1[1]).unwrap(); - let prepare_1 = prover_1.prepare().unwrap(); - let mut prover_2 = sig_2.prover(&gens_2); - prover_2.push_hidden_message(messages_2[0]).unwrap(); - prover_2 - .push_hidden_message_with(messages_2[1], msg_blind) - .unwrap(); - prover_2.push_message(messages_2[2]).unwrap(); - let prepare_2 = prover_2.prepare().unwrap(); - - // prover creates a combined challenge value for the two sub-proofs - let challenge = ProofChallenge::create(&[&prepare_1, &prepare_2], nonce, Some(test_proof_dst)) - .expect("Error creating proof challenge"); - let proof_1 = prepare_1 - .complete(challenge) - .expect("Error completing signature pok"); - let proof_2 = prepare_2 - .complete(challenge) - .expect("Error completing signature pok"); - - // construct verifiers for the two sub-proofs - let mut verifier_1 = proof_1.verifier(&gens_1, challenge).unwrap(); - verifier_1.push_hidden_count(1).unwrap(); - verifier_1.push_revealed(messages_1[1]).unwrap(); - let mut verifier_2 = proof_2.verifier(&gens_2, challenge).unwrap(); - verifier_2.push_hidden_count(2).unwrap(); - verifier_2.push_revealed(messages_2[2]).unwrap(); - - // now verifier computes the challenge value - let challenge_v = - ProofChallenge::create(&[&verifier_1, &verifier_2], nonce, Some(test_proof_dst)) - .expect("Error creating proof challenge"); - // check the proofs - verifier_1 - .verify(challenge_v) - .expect("Error verifying signature PoK"); - verifier_2 - .verify(challenge_v) - .expect("Error verifying signature PoK"); - // double check challenge comparison for testing - assert_eq!(challenge, challenge_v); - - // check that the responses match, meaning that the hidden messages also match - assert_eq!( - proof_1.get_response(0).unwrap(), - proof_2.get_response(1).unwrap() - ); -} diff --git a/askar-bbs/tests/signature.rs b/askar-bbs/tests/signature.rs deleted file mode 100644 index 6666b919..00000000 --- a/askar-bbs/tests/signature.rs +++ /dev/null @@ -1,35 +0,0 @@ -#[test] -fn sign_verify_expected() { - use askar_bbs::{io::FixedLengthBytes, DynGenerators, Message, Signature, SignatureBuilder}; - use askar_crypto::{ - alg::bls::{BlsKeyPair, G2}, - buffer::Writer, - repr::KeySecretBytes, - }; - use hex_literal::hex; - - let keypair = BlsKeyPair::::from_secret_bytes(&hex!( - "0011223344556677889900112233445566778899001122334455667788990011" - )) - .unwrap(); - let messages = [Message::hash("hello")]; - let gens = DynGenerators::new(&keypair, messages.len()); - let sig = SignatureBuilder::sign(&gens, &keypair, messages.iter().copied()) - .expect("Error creating signature"); - - let mut verifier = sig.verifier(&gens); - verifier - .append_messages(messages.iter().copied()) - .expect("Error verifying signature"); - verifier.verify().expect("Error verifying signature"); - - // test serialization round trip - let mut buf = [0u8; 112]; - let mut w = Writer::from_slice(&mut buf); - sig.write_bytes(&mut w) - .expect("Error serializing signature"); - let sig_len = w.position(); - assert_eq!(sig_len, 112); - let sig_de = Signature::from_bytes(&buf).expect("Error deserializing signature"); - assert_eq!(sig, sig_de); -} diff --git a/askar-crypto/Cargo.toml b/askar-crypto/Cargo.toml index c308eb34..6802f1ae 100644 --- a/askar-crypto/Cargo.toml +++ b/askar-crypto/Cargo.toml @@ -9,7 +9,7 @@ readme = "README.md" repository = "https://github.com/hyperledger/aries-askar/" categories = ["cryptography", "no-std"] keywords = ["hyperledger", "aries", "didcomm", "ssi"] -rust-version = "1.60" +rust-version = "1.65" [package.metadata.docs.rs] features = ["argon2", "std"] @@ -21,18 +21,18 @@ alloc = [] std = ["alloc", "serde/std", "serde-json-core/std", "std_rng"] all_keys = ["aes", "bls", "chacha", "ec_curves", "ed25519"] any_key = ["alloc"] -aes = ["aes-core", "aes-gcm", "block-modes", "hmac"] +aes = ["aes-core", "aes-gcm", "block-modes", "cbc", "cipher", "hmac"] bls = ["bls12_381", "hkdf"] chacha = ["chacha20poly1305"] crypto_box = ["alloc", "crypto_box_rs", "ed25519", "getrandom"] -ec_curves = ["elliptic-curve", "k256", "p256"] +ec_curves = ["elliptic-curve", "k256", "p256", "p384"] ed25519 = ["curve25519-dalek", "ed25519-dalek", "x25519-dalek"] getrandom = ["rand/getrandom"] std_rng = ["getrandom", "rand/std", "rand/std_rng"] [dev-dependencies] -base64 = { version = "0.13", default-features = false, features = ["alloc"] } -criterion = "0.4" +base64 = { version = "0.21", default-features = false, features = ["alloc"] } +criterion = "0.5" hex-literal = "0.4" serde_cbor = "0.11" serde-json-core = { version = "0.5", default-features = false, features = ["std"] } @@ -46,27 +46,30 @@ name = "kdf" harness = false [dependencies] -aead = "0.4" -aes-core = { package = "aes", version = "0.7", default-features = false, optional = true } -aes-gcm = { version = "0.9", default-features = false, features = ["aes"], optional = true } +aead = "0.5" +aes-core = { package = "aes", version = "0.8", default-features = false, optional = true } +aes-gcm = { version = "0.10", default-features = false, features = ["aes"], optional = true } arbitrary = { version = "1.0", optional = true, features = ["derive"] } -argon2 = { version = "0.3", default-features = false, features = ["alloc", "password-hash"], optional = true } -base64 = { version = "0.13", default-features = false } +argon2 = { version = "0.5", default-features = false, features = ["alloc", "password-hash"], optional = true } +base64 = { version = "0.21", default-features = false } blake2 = { version = "0.10", default-features = false } -block-modes = { version = "0.8", default-features = false, optional = true } -bls12_381 = { version = "0.6", default-features = false, features = ["groups", "zeroize"], optional = true } -chacha20 = { version = "0.8" } # should match dependency of chacha20poly1305 -chacha20poly1305 = { version = "0.9", default-features = false, optional = true } -crypto_box_rs = { package = "crypto_box", version = "0.6", default-features = false, features = ["u64_backend"], optional = true } +block-modes = { version = "0.9", default-features = false, optional = true } +bls12_381 = { version = "0.8", default-features = false, features = ["groups", "zeroize"], optional = true } +cbc = { version = "0.1", default-features = false, optional = true } +chacha20 = { version = "0.9" } # should match dependency of chacha20poly1305 +chacha20poly1305 = { version = "0.10", default-features = false, optional = true } +cipher = { version = "0.4", default-features = false, features = ["block-padding"], optional = true } +crypto_box_rs = { package = "crypto_box", version = "0.8", default-features = false, features = ["u64_backend"], optional = true } curve25519-dalek = { version = "3.1", default-features = false, features = ["u64_backend"], optional = true } ed25519-dalek = { version = "1.0", default-features = false, features = ["u64_backend"], optional = true } -elliptic-curve = { version = "0.11", optional = true } +elliptic-curve = { version = "0.13", optional = true } digest = "0.10" -group = "0.11" +group = "0.13" hkdf = { version = "0.12", optional = true } hmac = { version = "0.12", optional = true } -k256 = { version = "0.10", default-features = false, features = ["arithmetic", "ecdsa", "ecdh", "sha256"], optional = true } -p256 = { version = "0.10", default-features = false, features = ["arithmetic", "ecdsa", "ecdh"], optional = true } +k256 = { version = "0.13", default-features = false, features = ["arithmetic", "ecdsa", "ecdh", "sha256"], optional = true } +p256 = { version = "0.13", default-features = false, features = ["arithmetic", "ecdsa", "ecdh"], optional = true } +p384 = { version = "0.13", default-features = false, features = ["arithmetic", "ecdsa", "ecdh"], optional = true } rand = { version = "0.8", default-features = false } serde = { version = "1.0", default-features = false, features = ["derive"] } serde-json-core = { version = "0.5", default-features = false } diff --git a/askar-crypto/src/alg/aes/cbc_hmac.rs b/askar-crypto/src/alg/aes/cbc_hmac.rs index 81348e2f..27533296 100644 --- a/askar-crypto/src/alg/aes/cbc_hmac.rs +++ b/askar-crypto/src/alg/aes/cbc_hmac.rs @@ -4,10 +4,9 @@ use core::marker::PhantomData; use aead::generic_array::ArrayLength; use aes_core::{Aes128, Aes256}; -use block_modes::{ - block_padding::Pkcs7, - cipher::{BlockCipher, BlockDecrypt, BlockEncrypt, NewBlockCipher}, - BlockMode, Cbc, +use cbc::{Decryptor as CbcDec, Encryptor as CbcEnc}; +use cipher::{ + block_padding::Pkcs7, BlockCipher, BlockDecryptMut, BlockEncryptMut, KeyInit, KeyIvInit, }; use digest::{crypto_common::BlockSizeUser, Digest}; use hmac::{Mac, SimpleHmac}; @@ -60,7 +59,7 @@ where impl KeyAeadMeta for AesKey> where AesCbcHmac: AesType, - C: BlockCipher + NewBlockCipher, + C: BlockCipher + KeyInit, { type NonceSize = C::BlockSize; type TagSize = C::KeySize; @@ -69,7 +68,7 @@ where impl KeyAeadInPlace for AesKey> where AesCbcHmac: AesType, - C: BlockCipher + NewBlockCipher + BlockEncrypt + BlockDecrypt, + C: BlockCipher + KeyInit + BlockEncryptMut + BlockDecryptMut, D: Digest + BlockSizeUser, C::KeySize: core::ops::Shl, >::Output: ArrayLength, @@ -101,12 +100,12 @@ where let pad_len = AesCbcHmac::::padding_length(msg_len); buffer.buffer_extend(pad_len + TagSize::::USIZE)?; let enc_key = GenericArray::from_slice(&self.0[C::KeySize::USIZE..]); - Cbc::::new_fix(enc_key, GenericArray::from_slice(nonce)) - .encrypt(buffer.as_mut(), msg_len) + as KeyIvInit>::new(enc_key, GenericArray::from_slice(nonce)) + .encrypt_padded_mut::(buffer.as_mut(), msg_len) .map_err(|_| err_msg!(Encryption, "AES-CBC encryption error"))?; let ctext_end = msg_len + pad_len; - let mut hmac = SimpleHmac::::new_from_slice(&self.0[..C::KeySize::USIZE]) + let mut hmac = as Mac>::new_from_slice(&self.0[..C::KeySize::USIZE]) .expect("Incompatible HMAC key length"); hmac.update(aad); hmac.update(nonce.as_ref()); @@ -141,7 +140,7 @@ where let ctext_end = buf_len - TagSize::::USIZE; let tag = GenericArray::>::from_slice(&buffer.as_ref()[ctext_end..]); - let mut hmac = SimpleHmac::::new_from_slice(&self.0[..C::KeySize::USIZE]) + let mut hmac = as Mac>::new_from_slice(&self.0[..C::KeySize::USIZE]) .expect("Incompatible HMAC key length"); hmac.update(aad); hmac.update(nonce.as_ref()); @@ -151,8 +150,8 @@ where let tag_match = tag.as_ref().ct_eq(&mac[..TagSize::::USIZE]); let enc_key = GenericArray::from_slice(&self.0[C::KeySize::USIZE..]); - let dec_len = Cbc::::new_fix(enc_key, GenericArray::from_slice(nonce)) - .decrypt(&mut buffer.as_mut()[..ctext_end]) + let dec_len = as KeyIvInit>::new(enc_key, GenericArray::from_slice(nonce)) + .decrypt_padded_mut::(&mut buffer.as_mut()[..ctext_end]) .map_err(|_| err_msg!(Encryption, "AES-CBC decryption error"))? .len(); buffer.buffer_resize(dec_len)?; @@ -178,10 +177,12 @@ where #[cfg(test)] mod tests { + use base64::Engine; + use std::string::ToString; + use super::*; use crate::buffer::SecretBytes; use crate::repr::KeySecretBytes; - use std::string::ToString; #[test] fn encrypt_expected_cbc_128_hmac_256() { @@ -246,15 +247,17 @@ mod tests { \"apu\":\"QWxpY2U\",\"apv\":\"Qm9iIGFuZCBDaGFybGll\",\"epk\":{\ \"kty\":\"OKP\",\"crv\":\"X25519\",\ \"x\":\"k9of_cpAajy0poW5gaixXGs9nHkwg1AFqUAFa39dyBc\"}}"; - let aad = base64::encode_config(protected, base64::URL_SAFE_NO_PAD); + let aad = base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(protected); let input = b"Three is a magic number."; let key = AesKey::::from_secret_bytes(key_data).unwrap(); let mut buffer = SecretBytes::from_slice(input); let ct_len = key .encrypt_in_place(&mut buffer, &nonce[..], aad.as_bytes()) .unwrap(); - let ctext = base64::encode_config(&buffer.as_ref()[..ct_len], base64::URL_SAFE_NO_PAD); - let tag = base64::encode_config(&buffer.as_ref()[ct_len..], base64::URL_SAFE_NO_PAD); + let ctext = + base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(&buffer.as_ref()[..ct_len]); + let tag = + base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(&buffer.as_ref()[ct_len..]); assert_eq!(ctext, "Az2IWsISEMDJvyc5XRL-3-d-RgNBOGolCsxFFoUXFYw"); assert_eq!(tag, "HLb4fTlm8spGmij3RyOs2gJ4DpHM4hhVRwdF_hGb3WQ"); key.decrypt_in_place(&mut buffer, &nonce[..], aad.as_bytes()) diff --git a/askar-crypto/src/alg/aes/key_wrap.rs b/askar-crypto/src/alg/aes/key_wrap.rs index 12f3b2cd..15580cfa 100644 --- a/askar-crypto/src/alg/aes/key_wrap.rs +++ b/askar-crypto/src/alg/aes/key_wrap.rs @@ -2,8 +2,10 @@ use core::marker::PhantomData; -use aes_core::{Aes128, Aes256}; -use block_modes::cipher::{BlockCipher, BlockDecrypt, BlockEncrypt, NewBlockCipher}; +use aes_core::{ + cipher::{BlockCipher, BlockDecrypt, BlockEncrypt, KeyInit, KeySizeUser}, + Aes128, Aes256, +}; use subtle::ConstantTimeEq; use super::{AesKey, AesType, NonceSize, TagSize}; @@ -24,7 +26,7 @@ const AES_KW_DEFAULT_IV: [u8; 8] = [166, 166, 166, 166, 166, 166, 166, 166]; pub type A128Kw = AesKeyWrap; impl AesType for A128Kw { - type KeySize = ::KeySize; + type KeySize = ::KeySize; const ALG_TYPE: AesTypes = AesTypes::A128Kw; const JWK_ALG: &'static str = "A128KW"; } @@ -33,7 +35,7 @@ impl AesType for A128Kw { pub type A256Kw = AesKeyWrap; impl AesType for A256Kw { - type KeySize = ::KeySize; + type KeySize = ::KeySize; const ALG_TYPE: AesTypes = AesTypes::A256Kw; const JWK_ALG: &'static str = "A256KW"; } @@ -53,7 +55,8 @@ where impl KeyAeadInPlace for AesKey> where AesKeyWrap: AesType, - C: NewBlockCipher as AesType>::KeySize> + C: KeyInit + + KeySizeUser as AesType>::KeySize> + BlockCipher + BlockDecrypt + BlockEncrypt, diff --git a/askar-crypto/src/alg/aes/mod.rs b/askar-crypto/src/alg/aes/mod.rs index e4bfc821..12013120 100644 --- a/askar-crypto/src/alg/aes/mod.rs +++ b/askar-crypto/src/alg/aes/mod.rs @@ -2,7 +2,7 @@ use core::fmt::{self, Debug, Formatter}; -use aead::{generic_array::ArrayLength, AeadCore, AeadInPlace, NewAead}; +use aead::{generic_array::ArrayLength, AeadCore, AeadInPlace, KeyInit, KeySizeUser}; use aes_gcm::{Aes128Gcm, Aes256Gcm}; use serde::{Deserialize, Serialize}; use zeroize::Zeroize; @@ -157,7 +157,7 @@ where pub type A128Gcm = Aes128Gcm; impl AesType for A128Gcm { - type KeySize = ::KeySize; + type KeySize = ::KeySize; const ALG_TYPE: AesTypes = AesTypes::A128Gcm; const JWK_ALG: &'static str = "A128GCM"; @@ -167,7 +167,7 @@ impl AesType for A128Gcm { pub type A256Gcm = Aes256Gcm; impl AesType for A256Gcm { - type KeySize = ::KeySize; + type KeySize = ::KeySize; const ALG_TYPE: AesTypes = AesTypes::A256Gcm; const JWK_ALG: &'static str = "A256GCM"; @@ -182,7 +182,7 @@ impl KeyAeadMeta for AesKey { // generic implementation applying to AesGcm impl KeyAeadInPlace for AesKey where - T: NewAead + AeadInPlace + AesType::KeySize>, + T: KeyInit + AeadInPlace + AesType::KeySize>, { /// Encrypt a secret value in place, appending the verification tag fn encrypt_in_place( @@ -194,7 +194,7 @@ where if nonce.len() != T::NonceSize::USIZE { return Err(err_msg!(InvalidNonce)); } - let enc = ::new(self.0.as_ref()); + let enc = ::new(self.0.as_ref()); let tag = enc .encrypt_in_place_detached(GenericArray::from_slice(nonce), aad, buffer.as_mut()) .map_err(|_| err_msg!(Encryption, "AEAD encryption error"))?; @@ -220,7 +220,7 @@ where let tag_start = buf_len - T::TagSize::USIZE; let mut tag = GenericArray::default(); tag.clone_from_slice(&buffer.as_ref()[tag_start..]); - let enc = ::new(self.0.as_ref()); + let enc = ::new(self.0.as_ref()); enc.decrypt_in_place_detached( GenericArray::from_slice(nonce), aad, @@ -286,7 +286,8 @@ mod tests { let mut buffer = [0u8; 255]; buffer[0..message.len()].copy_from_slice(&message[..]); let mut writer = Writer::from_slice_position(&mut buffer, message.len()); - key.encrypt_in_place(&mut writer, &nonce, &[]).unwrap(); + key.encrypt_in_place(&mut writer, nonce.as_slice(), &[]) + .unwrap(); } #[test] diff --git a/askar-crypto/src/alg/any.rs b/askar-crypto/src/alg/any.rs index c4864a15..93e53a5f 100644 --- a/askar-crypto/src/alg/any.rs +++ b/askar-crypto/src/alg/any.rs @@ -34,6 +34,9 @@ use super::k256::{self, K256KeyPair}; #[cfg(feature = "p256")] use super::p256::{self, P256KeyPair}; +#[cfg(feature = "p384")] +use super::p384::{self, P384KeyPair}; + use super::{HasKeyAlg, KeyAlg}; use crate::{ buffer::{ResizeBuffer, WriteBuffer}, @@ -46,7 +49,7 @@ use crate::{ sign::{KeySigVerify, KeySign, SignatureType}, }; -#[cfg(any(feature = "k256", feature = "p256"))] +#[cfg(any(feature = "k256", feature = "p256", feature = "p384"))] use super::EcCurves; #[cfg(any(feature = "aes", feature = "chacha"))] @@ -227,6 +230,8 @@ fn generate_any(alg: KeyAlg, rng: impl KeyMaterial) -> Result K256KeyPair::generate(rng).map(R::alloc_key), #[cfg(feature = "p256")] KeyAlg::EcCurve(EcCurves::Secp256r1) => P256KeyPair::generate(rng).map(R::alloc_key), + #[cfg(feature = "p384")] + KeyAlg::EcCurve(EcCurves::Secp384r1) => P384KeyPair::generate(rng).map(R::alloc_key), #[allow(unreachable_patterns)] _ => Err(err_msg!( Unsupported, @@ -262,6 +267,10 @@ fn from_public_bytes_any(alg: KeyAlg, public: &[u8]) -> Result { P256KeyPair::from_public_bytes(public).map(R::alloc_key) } + #[cfg(feature = "p384")] + KeyAlg::EcCurve(EcCurves::Secp384r1) => { + P384KeyPair::from_public_bytes(public).map(R::alloc_key) + } #[allow(unreachable_patterns)] _ => Err(err_msg!( Unsupported, @@ -329,6 +338,10 @@ fn from_secret_bytes_any(alg: KeyAlg, secret: &[u8]) -> Result { P256KeyPair::from_secret_bytes(secret).map(R::alloc_key) } + #[cfg(feature = "p384")] + KeyAlg::EcCurve(EcCurves::Secp384r1) => { + P384KeyPair::from_secret_bytes(secret).map(R::alloc_key) + } #[allow(unreachable_patterns)] _ => Err(err_msg!( Unsupported, @@ -520,6 +533,8 @@ fn from_jwk_any(jwk: JwkParts<'_>) -> Result { ("EC", c) if c == k256::JWK_CURVE => K256KeyPair::from_jwk_parts(jwk).map(R::alloc_key), #[cfg(feature = "p256")] ("EC", c) if c == p256::JWK_CURVE => P256KeyPair::from_jwk_parts(jwk).map(R::alloc_key), + #[cfg(feature = "p384")] + ("EC", c) if c == p384::JWK_CURVE => P384KeyPair::from_jwk_parts(jwk).map(R::alloc_key), // FIXME implement symmetric keys? _ => Err(err_msg!(Unsupported, "Unsupported JWK for key import")), } @@ -617,6 +632,13 @@ macro_rules! match_key_alg { } match_key_alg!(@ $($rest)*; $key, $alg) }}; + (@ P384 $($rest:ident)*; $key:ident, $alg:ident) => {{ + #[cfg(feature = "p384")] + if $alg == KeyAlg::EcCurve(EcCurves::Secp384r1) { + return Ok($key.assume::()) + } + match_key_alg!(@ $($rest)*; $key, $alg) + }}; } impl AnyKey { @@ -640,6 +662,7 @@ impl AnyKey { Ed25519, K256, P256, + P384, X25519, "Secret key export is not supported for this key type" } @@ -653,6 +676,7 @@ impl AnyKey { Ed25519, K256, P256, + P384, X25519, "Public key export is not supported for this key type" } @@ -697,6 +721,10 @@ impl KeyExchange for AnyKey { KeyAlg::EcCurve(EcCurves::Secp256r1) => Ok(self .assume::() .write_key_exchange(other.assume::(), out)?), + #[cfg(feature = "p384")] + KeyAlg::EcCurve(EcCurves::Secp384r1) => Ok(self + .assume::() + .write_key_exchange(other.assume::(), out)?), #[allow(unreachable_patterns)] _ => { let _ = out; @@ -753,6 +781,7 @@ impl ToJwk for AnyKey { Ed25519, K256, P256, + P384, X25519, "JWK export is not supported for this key type" }?; @@ -773,6 +802,7 @@ impl KeySign for AnyKey { Ed25519, K256, P256, + P384, "Signing is not supported for this key type" }?; key.write_signature(message, sig_type, out) @@ -792,6 +822,7 @@ impl KeySigVerify for AnyKey { Ed25519, K256, P256, + P384, "Signature verification is not supported for this key type" }?; key.verify_signature(message, signature, sig_type) diff --git a/askar-crypto/src/alg/bls.rs b/askar-crypto/src/alg/bls.rs index bf05aa5c..97d07ff9 100644 --- a/askar-crypto/src/alg/bls.rs +++ b/askar-crypto/src/alg/bls.rs @@ -446,9 +446,11 @@ pub struct G1G2Pair(G1Affine, G2Affine); #[cfg(test)] mod tests { + use base64::Engine; + use std::string::ToString; + use super::*; use crate::repr::{ToPublicBytes, ToSecretBytes}; - use std::string::ToString; // test against EIP-2333 (as updated for signatures draft 4) #[test] @@ -532,7 +534,9 @@ mod tests { assert_eq!(jwk.crv, G1::JWK_CURVE); assert_eq!( jwk.x, - base64::encode_config(test_pub_g1, base64::URL_SAFE_NO_PAD).as_str() + base64::engine::general_purpose::URL_SAFE_NO_PAD + .encode(test_pub_g1) + .as_str() ); assert_eq!(jwk.d, None); let pk_load = BlsKeyPair::::from_jwk_parts(jwk).unwrap(); @@ -544,11 +548,15 @@ mod tests { assert_eq!(jwk.crv, G1::JWK_CURVE); assert_eq!( jwk.x, - base64::encode_config(test_pub_g1, base64::URL_SAFE_NO_PAD).as_str() + base64::engine::general_purpose::URL_SAFE_NO_PAD + .encode(test_pub_g1) + .as_str() ); assert_eq!( jwk.d, - base64::encode_config(test_pvt, base64::URL_SAFE_NO_PAD).as_str() + base64::engine::general_purpose::URL_SAFE_NO_PAD + .encode(test_pvt) + .as_str() ); let _sk_load = BlsKeyPair::::from_jwk_parts(jwk).unwrap(); // assert_eq!( diff --git a/askar-crypto/src/alg/chacha20.rs b/askar-crypto/src/alg/chacha20.rs index 56766712..e61d4164 100644 --- a/askar-crypto/src/alg/chacha20.rs +++ b/askar-crypto/src/alg/chacha20.rs @@ -2,7 +2,7 @@ use core::fmt::{self, Debug, Formatter}; -use aead::{AeadCore, AeadInPlace, NewAead}; +use aead::{AeadCore, AeadInPlace, KeyInit, KeySizeUser}; use chacha20poly1305::{ChaCha20Poly1305, XChaCha20Poly1305}; use serde::{Deserialize, Serialize}; use zeroize::Zeroize; @@ -25,7 +25,7 @@ pub static JWK_KEY_TYPE: &str = "oct"; /// Trait implemented by supported ChaCha20 algorithms pub trait Chacha20Type: 'static { /// The AEAD implementation - type Aead: NewAead + AeadCore + AeadInPlace; + type Aead: KeyInit + AeadCore + AeadInPlace; /// The associated algorithm type const ALG_TYPE: Chacha20Types; @@ -55,7 +55,7 @@ impl Chacha20Type for XC20P { const JWK_ALG: &'static str = "XC20P"; } -type KeyType = ArrayKey<<::Aead as NewAead>::KeySize>; +type KeyType = ArrayKey<<::Aead as KeySizeUser>::KeySize>; type NonceSize = <::Aead as AeadCore>::NonceSize; @@ -112,7 +112,7 @@ impl HasKeyAlg for Chacha20Key { } impl KeyMeta for Chacha20Key { - type KeySize = ::KeySize; + type KeySize = ::KeySize; } impl KeyGen for Chacha20Key { diff --git a/askar-crypto/src/alg/ec_common.rs b/askar-crypto/src/alg/ec_common.rs index d35eea2b..91246778 100644 --- a/askar-crypto/src/alg/ec_common.rs +++ b/askar-crypto/src/alg/ec_common.rs @@ -4,14 +4,14 @@ use elliptic_curve::{ }; pub fn write_sk(sk: &SecretKey, out: &mut [u8]) { - let limbs = sk.as_scalar_core().as_limbs(); - debug_assert_eq!(out.len(), Limb::BYTE_SIZE * limbs.len()); + let limbs = sk.as_scalar_primitive().as_limbs(); + debug_assert_eq!(out.len(), Limb::BYTES * limbs.len()); for (src, dst) in limbs .iter() .rev() .cloned() - .zip(out.chunks_exact_mut(Limb::BYTE_SIZE)) + .zip(out.chunks_exact_mut(Limb::BYTES)) { dst.copy_from_slice(&src.to_be_bytes()); } diff --git a/askar-crypto/src/alg/ed25519.rs b/askar-crypto/src/alg/ed25519.rs index a5aa5da5..4183fac6 100644 --- a/askar-crypto/src/alg/ed25519.rs +++ b/askar-crypto/src/alg/ed25519.rs @@ -317,6 +317,8 @@ impl Debug for Ed25519SigningKey<'_> { #[cfg(test)] mod tests { + use base64::Engine; + use super::*; use crate::repr::{ToPublicBytes, ToSecretBytes}; @@ -361,7 +363,9 @@ mod tests { // } let test_pvt_b64 = "nWGxne_9WmC6hEr0kuwsxERJxWl7MmkZcDusAxyuf2A"; let test_pub_b64 = "11qYAYKxCrfVS_7TyWQHOg7hcvPapiMlrwIaaPcHURo"; - let test_pvt = base64::decode_config(test_pvt_b64, base64::URL_SAFE).unwrap(); + let test_pvt = base64::engine::general_purpose::URL_SAFE_NO_PAD + .decode(test_pvt_b64) + .unwrap(); let kp = Ed25519KeyPair::from_secret_bytes(&test_pvt).expect("Error creating signing key"); let jwk = kp .to_jwk_public(None) diff --git a/askar-crypto/src/alg/k256.rs b/askar-crypto/src/alg/k256.rs index f9588657..e90a3e9f 100644 --- a/askar-crypto/src/alg/k256.rs +++ b/askar-crypto/src/alg/k256.rs @@ -1,7 +1,5 @@ //! Elliptic curve ECDH and ECDSA support on curve secp256k1 -use core::convert::{TryFrom, TryInto}; - use k256::{ ecdsa::{ signature::{Signer, Verifier}, @@ -51,7 +49,7 @@ pub static JWK_KEY_TYPE: &str = "EC"; /// The 'crv' value of a K-256 key JWK pub static JWK_CURVE: &str = "secp256k1"; -type FieldSize = elliptic_curve::FieldSize; +type FieldSize = elliptic_curve::FieldBytesSize; /// A K-256 (secp256k1) public key or keypair #[derive(Clone, Debug)] @@ -87,7 +85,7 @@ impl K256KeyPair { pub fn sign(&self, message: &[u8]) -> Option<[u8; ES256K_SIGNATURE_LENGTH]> { if let Some(skey) = self.to_signing_key() { let sig: Signature = skey.sign(message); - let sigb: [u8; 64] = sig.as_ref().try_into().unwrap(); + let sigb: [u8; 64] = sig.to_bytes().try_into().unwrap(); Some(sigb) } else { None @@ -97,7 +95,7 @@ impl K256KeyPair { /// Verify a signature with the public key pub fn verify_signature(&self, message: &[u8], signature: &[u8]) -> bool { if let Ok(sig) = Signature::try_from(signature) { - let vk = VerifyingKey::from(self.public.as_affine()); + let vk = VerifyingKey::from(&self.public); vk.verify(message, &sig).is_ok() } else { false @@ -119,7 +117,7 @@ impl KeyGen for K256KeyPair { fn generate(mut rng: impl KeyMaterial) -> Result { ArrayKey::::temp(|buf| loop { rng.read_okm(buf); - if let Ok(key) = SecretKey::from_be_bytes(buf) { + if let Ok(key) = SecretKey::from_bytes(buf) { return Ok(Self::from_secret_key(key)); } }) @@ -128,9 +126,12 @@ impl KeyGen for K256KeyPair { impl KeySecretBytes for K256KeyPair { fn from_secret_bytes(key: &[u8]) -> Result { - Ok(Self::from_secret_key( - SecretKey::from_be_bytes(key).map_err(|_| err_msg!(InvalidKeyData))?, - )) + if let Ok(key) = key.try_into() { + if let Ok(sk) = SecretKey::from_bytes(key) { + return Ok(Self::from_secret_key(sk)); + } + } + Err(err_msg!(InvalidKeyData)) } fn with_secret_bytes(&self, f: impl FnOnce(Option<&[u8]>) -> O) -> O { @@ -310,7 +311,7 @@ impl KeyExchange for K256KeyPair { match self.secret.as_ref() { Some(sk) => { let xk = diffie_hellman(sk.to_nonzero_scalar(), other.public.as_affine()); - out.buffer_write(xk.as_bytes().as_ref())?; + out.buffer_write(xk.raw_secret_bytes().as_ref())?; Ok(()) } None => Err(err_msg!(MissingSecretKey)), @@ -320,6 +321,8 @@ impl KeyExchange for K256KeyPair { #[cfg(test)] mod tests { + use base64::Engine; + use super::*; use crate::repr::ToPublicBytes; @@ -339,7 +342,9 @@ mod tests { "dWCvM4fTdeM0KmloF57zxtBPXTOythHPMm1HCLrdd3A", "36uMVGM7hnw-N6GnjFcihWE3SkrhMLzzLCdPMXPEXlA", ); - let test_pvt = base64::decode_config(test_pvt_b64, base64::URL_SAFE).unwrap(); + let test_pvt = base64::engine::general_purpose::URL_SAFE_NO_PAD + .decode(test_pvt_b64) + .unwrap(); let sk = K256KeyPair::from_secret_bytes(&test_pvt).expect("Error creating signing key"); let jwk = sk.to_jwk_public(None).expect("Error converting key to JWK"); @@ -373,11 +378,9 @@ mod tests { "a2a3affbe18cda8c5a7b6375f05b304c2303ab8beb21428709a43a519f8f946f 6ffa7966afdb337e9b1f70bb575282e71d4fe5bbe6bfa97b229d6bd7e97df1e5" ); - let test_pvt = base64::decode_config( - "jv_VrhPomm6_WOzb74xF4eMI0hu9p0W1Zlxi0nz8AFs", - base64::URL_SAFE_NO_PAD, - ) - .unwrap(); + let test_pvt = base64::engine::general_purpose::URL_SAFE_NO_PAD + .decode("jv_VrhPomm6_WOzb74xF4eMI0hu9p0W1Zlxi0nz8AFs") + .unwrap(); let kp = K256KeyPair::from_secret_bytes(&test_pvt).unwrap(); let sig = kp.sign(&test_msg[..]).unwrap(); assert_eq!(sig, &test_sig[..]); diff --git a/askar-crypto/src/alg/mod.rs b/askar-crypto/src/alg/mod.rs index ef586f9f..be92488b 100644 --- a/askar-crypto/src/alg/mod.rs +++ b/askar-crypto/src/alg/mod.rs @@ -50,6 +50,10 @@ pub mod k256; #[cfg_attr(docsrs, doc(cfg(feature = "p256")))] pub mod p256; +#[cfg(feature = "p384")] +#[cfg_attr(docsrs, doc(cfg(feature = "p384")))] +pub mod p384; + /// Supported key algorithms #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Zeroize)] #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] @@ -60,9 +64,9 @@ pub enum KeyAlg { Bls12_381(BlsCurves), /// (X)ChaCha20-Poly1305 Chacha20(Chacha20Types), - /// Curve25519 signing key + /// Ed25519 signing key Ed25519, - /// Curve25519 diffie-hellman key exchange key + /// Curve25519 elliptic curve key exchange key X25519, /// Elliptic Curve key for signing or key exchange EcCurve(EcCurves), @@ -87,6 +91,7 @@ impl KeyAlg { Self::X25519 => "x25519", Self::EcCurve(EcCurves::Secp256k1) => "k256", Self::EcCurve(EcCurves::Secp256r1) => "p256", + Self::EcCurve(EcCurves::Secp384r1) => "p384", } } } @@ -123,6 +128,7 @@ impl FromStr for KeyAlg { a if a == "x25519" => Ok(Self::X25519), a if a == "k256" || a == "secp256k1" => Ok(Self::EcCurve(EcCurves::Secp256k1)), a if a == "p256" || a == "secp256r1" => Ok(Self::EcCurve(EcCurves::Secp256r1)), + a if a == "p384" || a == "secp384r1" => Ok(Self::EcCurve(EcCurves::Secp384r1)), _ => Err(err_msg!(Unsupported, "Unknown key algorithm")), } } @@ -248,6 +254,8 @@ pub enum EcCurves { Secp256r1, /// Koblitz 256 curve Secp256k1, + /// NIST P-384 curve + Secp384r1, } /// A trait for accessing the algorithm of a key, used when diff --git a/askar-crypto/src/alg/p256.rs b/askar-crypto/src/alg/p256.rs index 0ef59219..c74ebe85 100644 --- a/askar-crypto/src/alg/p256.rs +++ b/askar-crypto/src/alg/p256.rs @@ -1,6 +1,6 @@ //! Elliptic curve ECDH and ECDSA support on curve secp256r1 -use core::convert::{TryFrom, TryInto}; +use core::convert::TryFrom; use p256::{ ecdsa::{ @@ -51,7 +51,7 @@ pub static JWK_KEY_TYPE: &str = "EC"; /// The 'crv' value of a P-256 key JWK pub static JWK_CURVE: &str = "P-256"; -type FieldSize = elliptic_curve::FieldSize; +type FieldSize = elliptic_curve::FieldBytesSize; /// A P-256 (secp256r1) public key or keypair #[derive(Clone, Debug)] @@ -87,7 +87,7 @@ impl P256KeyPair { pub fn sign(&self, message: &[u8]) -> Option<[u8; ES256_SIGNATURE_LENGTH]> { if let Some(skey) = self.to_signing_key() { let sig: Signature = skey.sign(message); - let sigb: [u8; 64] = sig.as_ref().try_into().unwrap(); + let sigb: [u8; 64] = sig.to_bytes().try_into().unwrap(); Some(sigb) } else { None @@ -119,7 +119,7 @@ impl KeyGen for P256KeyPair { fn generate(mut rng: impl KeyMaterial) -> Result { ArrayKey::::temp(|buf| loop { rng.read_okm(buf); - if let Ok(key) = SecretKey::from_be_bytes(buf) { + if let Ok(key) = SecretKey::from_bytes(buf) { return Ok(Self::from_secret_key(key)); } }) @@ -128,9 +128,12 @@ impl KeyGen for P256KeyPair { impl KeySecretBytes for P256KeyPair { fn from_secret_bytes(key: &[u8]) -> Result { - Ok(Self::from_secret_key( - SecretKey::from_be_bytes(key).map_err(|_| err_msg!(InvalidKeyData))?, - )) + if let Ok(key) = key.try_into() { + if let Ok(sk) = SecretKey::from_bytes(key) { + return Ok(Self::from_secret_key(sk)); + } + } + Err(err_msg!(InvalidKeyData)) } fn with_secret_bytes(&self, f: impl FnOnce(Option<&[u8]>) -> O) -> O { @@ -310,7 +313,7 @@ impl KeyExchange for P256KeyPair { match self.secret.as_ref() { Some(sk) => { let xk = diffie_hellman(sk.to_nonzero_scalar(), other.public.as_affine()); - out.buffer_write(xk.as_bytes().as_ref())?; + out.buffer_write(xk.raw_secret_bytes().as_ref())?; Ok(()) } None => Err(err_msg!(MissingSecretKey)), @@ -320,6 +323,8 @@ impl KeyExchange for P256KeyPair { #[cfg(test)] mod tests { + use base64::Engine; + use super::*; use crate::repr::ToPublicBytes; @@ -337,7 +342,9 @@ mod tests { "f83OJ3D2xF1Bg8vub9tLe1gHMzV76e8Tus9uPHvRVEU", "x_FEzRu9m36HLN_tue659LNpXW6pCyStikYjKIWI5a0", ); - let test_pvt = base64::decode_config(test_pvt_b64, base64::URL_SAFE).unwrap(); + let test_pvt = base64::engine::general_purpose::URL_SAFE_NO_PAD + .decode(test_pvt_b64) + .unwrap(); let sk = P256KeyPair::from_secret_bytes(&test_pvt).expect("Error creating signing key"); let jwk = sk.to_jwk_public(None).expect("Error converting key to JWK"); @@ -388,11 +395,9 @@ mod tests { "241f765f19d4e6148452f2249d2fa69882244a6ad6e70aadb8848a6409d20712 4e85faf9587100247de7bdace13a3073b47ec8a531ca91c1375b2b6134344413" ); - let test_pvt = base64::decode_config( - "jpsQnnGQmL-YBIffH1136cspYG6-0iY7X1fCE9-E9LI", - base64::URL_SAFE_NO_PAD, - ) - .unwrap(); + let test_pvt = base64::engine::general_purpose::URL_SAFE_NO_PAD + .decode("jpsQnnGQmL-YBIffH1136cspYG6-0iY7X1fCE9-E9LI") + .unwrap(); let kp = P256KeyPair::from_secret_bytes(&test_pvt).unwrap(); let sig = kp.sign(&test_msg[..]).unwrap(); assert_eq!(sig, &test_sig[..]); diff --git a/askar-crypto/src/alg/p384.rs b/askar-crypto/src/alg/p384.rs new file mode 100644 index 00000000..8a9fca02 --- /dev/null +++ b/askar-crypto/src/alg/p384.rs @@ -0,0 +1,433 @@ +//! Elliptic curve ECDH and ECDSA support on curve secp384r1 + +use core::convert::{TryFrom, TryInto}; + +use p384::{ + ecdsa::{ + signature::{Signer, Verifier}, + Signature, SigningKey, VerifyingKey, + }, + elliptic_curve::{ + self, + ecdh::diffie_hellman, + sec1::{Coordinates, FromEncodedPoint, ToEncodedPoint}, + }, + EncodedPoint, PublicKey, SecretKey, +}; +use subtle::ConstantTimeEq; + +use super::{ec_common, EcCurves, HasKeyAlg, KeyAlg}; +use crate::{ + buffer::{ArrayKey, WriteBuffer}, + error::Error, + generic_array::typenum::{U48, U49, U97}, + jwk::{FromJwk, JwkEncoder, JwkParts, ToJwk}, + kdf::KeyExchange, + random::KeyMaterial, + repr::{KeyGen, KeyMeta, KeyPublicBytes, KeySecretBytes, KeypairBytes, KeypairMeta}, + sign::{KeySigVerify, KeySign, SignatureType}, +}; + +// SECURITY: PublicKey contains a p384::AffinePoint, which is always checked +// to be on the curve when loaded. +// The identity point is rejected when converting into a p384::PublicKey. +// This satisfies 5.6.2.3.4 ECC Partial Public-Key Validation Routine from +// NIST SP 800-56A: _Recommendation for Pair-Wise Key-Establishment Schemes +// Using Discrete Logarithm Cryptography_. + +/// The length of an ES384 signature +pub const ES384_SIGNATURE_LENGTH: usize = 96; + +/// The length of a compressed public key in bytes +pub const PUBLIC_KEY_LENGTH: usize = 49; +/// The length of a secret key +pub const SECRET_KEY_LENGTH: usize = 48; +/// The length of a keypair in bytes +pub const KEYPAIR_LENGTH: usize = SECRET_KEY_LENGTH + PUBLIC_KEY_LENGTH; + +/// The 'kty' value of an elliptic curve key JWK +pub static JWK_KEY_TYPE: &str = "EC"; +/// The 'crv' value of a P-384 key JWK +pub static JWK_CURVE: &str = "P-384"; + +type FieldSize = elliptic_curve::FieldBytesSize; + +/// A P-384 (secp384r1) public key or keypair +#[derive(Clone, Debug)] +pub struct P384KeyPair { + // SECURITY: SecretKey zeroizes on drop + secret: Option, + public: PublicKey, +} + +impl P384KeyPair { + #[inline] + pub(crate) fn from_secret_key(sk: SecretKey) -> Self { + let pk = sk.public_key(); + Self { + secret: Some(sk), + public: pk, + } + } + + pub(crate) fn check_public_bytes(&self, pk: &[u8]) -> Result<(), Error> { + if self.with_public_bytes(|slf| slf.ct_eq(pk)).into() { + Ok(()) + } else { + Err(err_msg!(InvalidKeyData, "invalid p384 keypair")) + } + } + + pub(crate) fn to_signing_key(&self) -> Option { + self.secret.clone().map(SigningKey::from) + } + + /// Sign a message with the secret key + pub fn sign(&self, message: &[u8]) -> Option<[u8; ES384_SIGNATURE_LENGTH]> { + if let Some(skey) = self.to_signing_key() { + let sig: Signature = skey.sign(message); + let mut sigb = [0u8; 96]; + sigb.copy_from_slice(&sig.to_bytes()); + Some(sigb) + } else { + None + } + } + + /// Verify a signature with the public key + pub fn verify_signature(&self, message: &[u8], signature: &[u8]) -> bool { + if let Ok(sig) = Signature::try_from(signature) { + let vk = VerifyingKey::from(&self.public); + vk.verify(message, &sig).is_ok() + } else { + false + } + } +} + +impl HasKeyAlg for P384KeyPair { + fn algorithm(&self) -> KeyAlg { + KeyAlg::EcCurve(EcCurves::Secp384r1) + } +} + +impl KeyMeta for P384KeyPair { + type KeySize = U48; +} + +impl KeyGen for P384KeyPair { + fn generate(mut rng: impl KeyMaterial) -> Result { + ArrayKey::::temp(|buf| loop { + rng.read_okm(buf); + if let Ok(key) = SecretKey::from_bytes(buf) { + return Ok(Self::from_secret_key(key)); + } + }) + } +} + +impl KeySecretBytes for P384KeyPair { + fn from_secret_bytes(key: &[u8]) -> Result { + if let Ok(key) = key.try_into() { + if let Ok(sk) = SecretKey::from_bytes(key) { + return Ok(Self::from_secret_key(sk)); + } + } + Err(err_msg!(InvalidKeyData)) + } + + fn with_secret_bytes(&self, f: impl FnOnce(Option<&[u8]>) -> O) -> O { + if let Some(sk) = self.secret.as_ref() { + ArrayKey::::temp(|arr| { + ec_common::write_sk(sk, &mut arr[..]); + f(Some(arr)) + }) + } else { + f(None) + } + } +} + +impl KeypairMeta for P384KeyPair { + type PublicKeySize = U49; + type KeypairSize = U97; +} + +impl KeypairBytes for P384KeyPair { + fn from_keypair_bytes(kp: &[u8]) -> Result { + if kp.len() != KEYPAIR_LENGTH { + return Err(err_msg!(InvalidKeyData)); + } + let result = P384KeyPair::from_secret_bytes(&kp[..SECRET_KEY_LENGTH]) + .map_err(|_| err_msg!(InvalidKeyData))?; + result.check_public_bytes(&kp[SECRET_KEY_LENGTH..])?; + Ok(result) + } + + fn with_keypair_bytes(&self, f: impl FnOnce(Option<&[u8]>) -> O) -> O { + if let Some(sk) = self.secret.as_ref() { + ArrayKey::<::KeypairSize>::temp(|arr| { + ec_common::write_sk(sk, &mut arr[..SECRET_KEY_LENGTH]); + let pk_enc = self.public.to_encoded_point(true); + arr[SECRET_KEY_LENGTH..].copy_from_slice(pk_enc.as_bytes()); + f(Some(&*arr)) + }) + } else { + f(None) + } + } +} + +impl KeyPublicBytes for P384KeyPair { + fn from_public_bytes(key: &[u8]) -> Result { + let pk = PublicKey::from_sec1_bytes(key).map_err(|_| err_msg!(InvalidKeyData))?; + Ok(Self { + secret: None, + public: pk, + }) + } + + fn with_public_bytes(&self, f: impl FnOnce(&[u8]) -> O) -> O { + f(self.public.to_encoded_point(true).as_bytes()) + } +} + +impl KeySign for P384KeyPair { + fn write_signature( + &self, + message: &[u8], + sig_type: Option, + out: &mut dyn WriteBuffer, + ) -> Result<(), Error> { + match sig_type { + None | Some(SignatureType::ES384) => { + if let Some(sig) = self.sign(message) { + out.buffer_write(&sig[..])?; + Ok(()) + } else { + Err(err_msg!(Unsupported, "Undefined secret key")) + } + } + #[allow(unreachable_patterns)] + _ => Err(err_msg!(Unsupported, "Unsupported signature type")), + } + } +} + +impl KeySigVerify for P384KeyPair { + fn verify_signature( + &self, + message: &[u8], + signature: &[u8], + sig_type: Option, + ) -> Result { + match sig_type { + None | Some(SignatureType::ES256) => Ok(self.verify_signature(message, signature)), + #[allow(unreachable_patterns)] + _ => Err(err_msg!(Unsupported, "Unsupported signature type")), + } + } +} + +impl ToJwk for P384KeyPair { + fn encode_jwk(&self, enc: &mut dyn JwkEncoder) -> Result<(), Error> { + let pk_enc = self.public.to_encoded_point(false); + let (x, y) = match pk_enc.coordinates() { + Coordinates::Identity => { + return Err(err_msg!( + Unsupported, + "Cannot convert identity point to JWK" + )) + } + Coordinates::Uncompressed { x, y } => (x, y), + Coordinates::Compressed { .. } | Coordinates::Compact { .. } => unreachable!(), + }; + + enc.add_str("crv", JWK_CURVE)?; + enc.add_str("kty", JWK_KEY_TYPE)?; + enc.add_as_base64("x", &x[..])?; + enc.add_as_base64("y", &y[..])?; + if enc.is_secret() { + self.with_secret_bytes(|buf| { + if let Some(sk) = buf { + enc.add_as_base64("d", sk) + } else { + Ok(()) + } + })?; + } + Ok(()) + } +} + +impl FromJwk for P384KeyPair { + fn from_jwk_parts(jwk: JwkParts<'_>) -> Result { + if jwk.kty != JWK_KEY_TYPE { + return Err(err_msg!(InvalidKeyData, "Unsupported key type")); + } + if jwk.crv != JWK_CURVE { + return Err(err_msg!(InvalidKeyData, "Unsupported key algorithm")); + } + let pk_x = ArrayKey::::try_new_with(|arr| { + if jwk.x.decode_base64(arr)? != arr.len() { + Err(err_msg!(InvalidKeyData)) + } else { + Ok(()) + } + })?; + let pk_y = ArrayKey::::try_new_with(|arr| { + if jwk.y.decode_base64(arr)? != arr.len() { + Err(err_msg!(InvalidKeyData)) + } else { + Ok(()) + } + })?; + let pk = Option::from(PublicKey::from_encoded_point( + &EncodedPoint::from_affine_coordinates(pk_x.as_ref(), pk_y.as_ref(), false), + )) + .ok_or_else(|| err_msg!(InvalidKeyData))?; + if jwk.d.is_some() { + ArrayKey::::temp(|arr| { + if jwk.d.decode_base64(arr)? != arr.len() { + Err(err_msg!(InvalidKeyData)) + } else { + let kp = P384KeyPair::from_secret_bytes(arr)?; + if kp.public != pk { + Err(err_msg!(InvalidKeyData)) + } else { + Ok(kp) + } + } + }) + } else { + Ok(Self { + secret: None, + public: pk, + }) + } + } +} + +impl KeyExchange for P384KeyPair { + fn write_key_exchange(&self, other: &Self, out: &mut dyn WriteBuffer) -> Result<(), Error> { + match self.secret.as_ref() { + Some(sk) => { + let xk = diffie_hellman(sk.to_nonzero_scalar(), other.public.as_affine()); + out.buffer_write(xk.raw_secret_bytes().as_ref())?; + Ok(()) + } + None => Err(err_msg!(MissingSecretKey)), + } + } +} + +#[cfg(test)] +mod tests { + use base64::Engine; + + use super::*; + use crate::repr::ToPublicBytes; + + #[test] + fn jwk_expected() { + // { + // "kty": "EC", + // "x": "p3ZI8DAmxn8BJ3936Y5MHRLXTAg6SxCNhuH6JBEuieuicUY9wqZk8C63SZIj4htA", + // "y": "eqSjvs1X7eI9V2o8sYUpsrj6WUKOymqFtkCxMwWQuDPtZKOHC3fSWkjQvf_73GH-", + // "crv": "P-384", + // "d": "rgFYq-b_toGb-wN3URCk_e-6Sj2PtUvoefF284q9oKnVCi7sglAmCZkOv-2nOAeE" + // } + let test_pvt_b64 = "rgFYq-b_toGb-wN3URCk_e-6Sj2PtUvoefF284q9oKnVCi7sglAmCZkOv-2nOAeE"; + let test_pub_b64 = ( + "p3ZI8DAmxn8BJ3936Y5MHRLXTAg6SxCNhuH6JBEuieuicUY9wqZk8C63SZIj4htA", + "eqSjvs1X7eI9V2o8sYUpsrj6WUKOymqFtkCxMwWQuDPtZKOHC3fSWkjQvf_73GH-", + ); + let test_pvt = base64::engine::general_purpose::URL_SAFE_NO_PAD + .decode(test_pvt_b64) + .unwrap(); + let sk = P384KeyPair::from_secret_bytes(&test_pvt).expect("Error creating signing key"); + + let jwk = sk.to_jwk_public(None).expect("Error converting key to JWK"); + let jwk = JwkParts::try_from_str(&jwk).expect("Error parsing JWK"); + assert_eq!(jwk.kty, JWK_KEY_TYPE); + assert_eq!(jwk.crv, JWK_CURVE); + assert_eq!(jwk.x, test_pub_b64.0); + assert_eq!(jwk.y, test_pub_b64.1); + assert_eq!(jwk.d, None); + let pk_load = P384KeyPair::from_jwk_parts(jwk).unwrap(); + assert_eq!(sk.to_public_bytes(), pk_load.to_public_bytes()); + + let jwk = sk.to_jwk_secret(None).expect("Error converting key to JWK"); + let jwk = JwkParts::from_slice(&jwk).expect("Error parsing JWK"); + assert_eq!(jwk.kty, JWK_KEY_TYPE); + assert_eq!(jwk.crv, JWK_CURVE); + assert_eq!(jwk.x, test_pub_b64.0); + assert_eq!(jwk.y, test_pub_b64.1); + assert_eq!(jwk.d, test_pvt_b64); + let sk_load = P384KeyPair::from_jwk_parts(jwk).unwrap(); + assert_eq!( + sk.to_keypair_bytes().unwrap(), + sk_load.to_keypair_bytes().unwrap() + ); + } + + #[test] + fn jwk_thumbprint() { + let pk = P384KeyPair::from_jwk( + r#"{ + "kty": "EC", + "x": "p3ZI8DAmxn8BJ3936Y5MHRLXTAg6SxCNhuH6JBEuieuicUY9wqZk8C63SZIj4htA", + "y": "eqSjvs1X7eI9V2o8sYUpsrj6WUKOymqFtkCxMwWQuDPtZKOHC3fSWkjQvf_73GH-", + "crv": "P-384" + }"#, + ) + .unwrap(); + assert_eq!( + pk.to_jwk_thumbprint(None).unwrap(), + "4zlc15_l012-r5pFk7mnEFs6MghkhSAkdMeNeyL00u4" + ); + } + + #[test] + fn sign_verify_expected() { + let test_msg = b"This is a dummy message for use with tests"; + let test_sig = &hex!( + "acf7e9f0975738d446b26aa1651ad699cac490a496d6f70221126c35d8e4fcc5a28f63f611557be9d4c321d8fa24dbf2 + 846e3bcbea2e45eff577974664b1e98fffdad8ddbe7bfa792c17a9981915aa63755cfd338fd28874de02c42d966ece67" + ); + let test_pvt = base64::engine::general_purpose::URL_SAFE_NO_PAD + .decode("rgFYq-b_toGb-wN3URCk_e-6Sj2PtUvoefF284q9oKnVCi7sglAmCZkOv-2nOAeE") + .unwrap(); + let kp = P384KeyPair::from_secret_bytes(&test_pvt).unwrap(); + let sig = kp.sign(&test_msg[..]).unwrap(); + assert_eq!(sig, &test_sig[..]); + assert!(kp.verify_signature(&test_msg[..], &sig[..])); + assert!(!kp.verify_signature(b"Not the message", &sig[..])); + assert!(!kp.verify_signature(&test_msg[..], &[0u8; 96])); + } + + #[test] + fn key_exchange_random() { + let kp1 = P384KeyPair::random().unwrap(); + let kp2 = P384KeyPair::random().unwrap(); + assert_ne!( + kp1.to_keypair_bytes().unwrap(), + kp2.to_keypair_bytes().unwrap() + ); + + let xch1 = kp1.key_exchange_bytes(&kp2).unwrap(); + let xch2 = kp2.key_exchange_bytes(&kp1).unwrap(); + assert_eq!(xch1.len(), 48); + assert_eq!(xch1, xch2); + } + + #[test] + fn round_trip_bytes() { + let kp = P384KeyPair::random().unwrap(); + let cmp = P384KeyPair::from_keypair_bytes(&kp.to_keypair_bytes().unwrap()).unwrap(); + assert_eq!( + kp.to_keypair_bytes().unwrap(), + cmp.to_keypair_bytes().unwrap() + ); + } +} diff --git a/askar-crypto/src/alg/x25519.rs b/askar-crypto/src/alg/x25519.rs index 768fedc0..370d40a4 100644 --- a/askar-crypto/src/alg/x25519.rs +++ b/askar-crypto/src/alg/x25519.rs @@ -242,6 +242,8 @@ impl TryFrom<&Ed25519KeyPair> for X25519KeyPair { #[cfg(test)] mod tests { + use base64::Engine; + use super::*; use crate::repr::ToPublicBytes; @@ -255,7 +257,9 @@ mod tests { // "x": "tGskN_ae61DP4DLY31_fjkbvnKqf-ze7kA6Cj2vyQxU" // } let test_pvt_b64 = "qL25gw-HkNJC9m4EsRzCoUx1KntjwHPzxo6a2xUcyFQ"; - let test_pvt = base64::decode_config(test_pvt_b64, base64::URL_SAFE).unwrap(); + let test_pvt = base64::engine::general_purpose::URL_SAFE_NO_PAD + .decode(test_pvt_b64) + .unwrap(); let kp = X25519KeyPair::from_secret_bytes(&test_pvt).expect("Error creating x25519 keypair"); let jwk = kp diff --git a/askar-crypto/src/jwk/encode.rs b/askar-crypto/src/jwk/encode.rs index 7265b9bb..73054ebd 100644 --- a/askar-crypto/src/jwk/encode.rs +++ b/askar-crypto/src/jwk/encode.rs @@ -16,7 +16,10 @@ fn write_hex_buffer(mut buffer: impl Write, value: &[u8]) -> Result<(), Error> { write!( buffer, "{}", - base64::display::Base64Display::with_config(value, base64::URL_SAFE_NO_PAD) + base64::display::Base64Display::new( + value, + &base64::engine::general_purpose::URL_SAFE_NO_PAD + ) ) .map_err(|_| err_msg!(Unexpected, "Error writing to JWK buffer")) } diff --git a/askar-crypto/src/jwk/mod.rs b/askar-crypto/src/jwk/mod.rs index 7cc7648f..bd93d321 100644 --- a/askar-crypto/src/jwk/mod.rs +++ b/askar-crypto/src/jwk/mod.rs @@ -3,6 +3,7 @@ #[cfg(feature = "alloc")] use alloc::{string::String, vec::Vec}; +use base64::Engine; use sha2::Sha256; #[cfg(feature = "alloc")] @@ -71,7 +72,9 @@ pub fn write_jwk_thumbprint( buf.finalize()?; let hash = hasher.finalize(); let mut buf = [0u8; 43]; - let len = base64::encode_config_slice(hash, base64::URL_SAFE_NO_PAD, &mut buf); + let len = base64::engine::general_purpose::URL_SAFE_NO_PAD + .encode_slice(hash, &mut buf) + .map_err(|_| err_msg!(Unexpected, "Base64 encoding error"))?; output.buffer_write(&buf[..len])?; Ok(()) } diff --git a/askar-crypto/src/jwk/parts.rs b/askar-crypto/src/jwk/parts.rs index d042ee21..055464af 100644 --- a/askar-crypto/src/jwk/parts.rs +++ b/askar-crypto/src/jwk/parts.rs @@ -5,6 +5,7 @@ use core::{ #[cfg(feature = "arbitrary")] use arbitrary::Arbitrary; +use base64::Engine; use serde::{ de::{Deserialize, Deserializer, MapAccess, Visitor}, ser::{Serialize, SerializeMap, Serializer}, @@ -77,7 +78,8 @@ impl OptAttr<'_> { if s.len() > max_input { Err(err_msg!(Invalid, "Base64 length exceeds max")) } else { - base64::decode_config_slice(s, base64::URL_SAFE_NO_PAD, output) + base64::engine::general_purpose::URL_SAFE_NO_PAD + .decode_slice_unchecked(s, output) .map_err(|_| err_msg!(Invalid, "Base64 decoding error")) } } else { diff --git a/askar-crypto/src/kdf/argon2.rs b/askar-crypto/src/kdf/argon2.rs index aac17486..c05d1346 100644 --- a/askar-crypto/src/kdf/argon2.rs +++ b/askar-crypto/src/kdf/argon2.rs @@ -69,12 +69,13 @@ impl KeyDerivation for Argon2<'_> { )); } let mut pbuild = argon2::ParamsBuilder::new(); - pbuild.m_cost(self.params.mem_cost).unwrap(); - pbuild.t_cost(self.params.time_cost).unwrap(); + pbuild + .m_cost(self.params.mem_cost) + .t_cost(self.params.time_cost); argon2::Argon2::new( self.params.alg, self.params.version, - pbuild.params().unwrap(), + pbuild.build().unwrap(), ) .hash_password_into(self.password, self.salt, key_output) .map_err(|_| err_msg!(Unexpected, "Error deriving key")) diff --git a/askar-crypto/src/random.rs b/askar-crypto/src/random.rs index 0d9e2dc1..dc387caf 100644 --- a/askar-crypto/src/random.rs +++ b/askar-crypto/src/random.rs @@ -4,7 +4,7 @@ use core::fmt::{self, Debug, Formatter}; use aead::generic_array::{typenum::Unsigned, GenericArray}; use chacha20::{ - cipher::{NewCipher, StreamCipher}, + cipher::{KeyIvInit, KeySizeUser, StreamCipher}, ChaCha20, }; use rand::{CryptoRng, RngCore, SeedableRng}; @@ -14,7 +14,7 @@ use crate::buffer::SecretBytes; use crate::error::Error; /// The expected length of a seed for `fill_random_deterministic` -pub const DETERMINISTIC_SEED_LENGTH: usize = ::KeySize::USIZE; +pub const DETERMINISTIC_SEED_LENGTH: usize = ::KeySize::USIZE; /// Combined trait for CryptoRng and RngCore pub trait Rng: CryptoRng + RngCore + Debug {} diff --git a/askar-crypto/src/sign.rs b/askar-crypto/src/sign.rs index abdd6b41..e00e23ca 100644 --- a/askar-crypto/src/sign.rs +++ b/askar-crypto/src/sign.rs @@ -53,6 +53,8 @@ pub enum SignatureType { ES256, /// Elliptic curve DSA using K-256 and SHA-256 ES256K, + /// Elliptic curve DSA using P-384 and SHA-384 + ES384, } impl FromStr for SignatureType { @@ -63,6 +65,7 @@ impl FromStr for SignatureType { a if a == "eddsa" => Ok(Self::EdDSA), a if a == "es256" => Ok(Self::ES256), a if a == "es256k" => Ok(Self::ES256K), + a if a == "es384" => Ok(Self::ES384), _ => Err(err_msg!(Unsupported, "Unknown signature algorithm")), } } @@ -73,6 +76,7 @@ impl SignatureType { pub const fn signature_length(&self) -> usize { match self { Self::EdDSA | Self::ES256 | Self::ES256K => 64, + Self::ES384 => 96, } } } diff --git a/askar-storage/Cargo.toml b/askar-storage/Cargo.toml index 46521985..14d80863 100644 --- a/askar-storage/Cargo.toml +++ b/askar-storage/Cargo.toml @@ -21,7 +21,7 @@ default = ["all_backends", "log"] all_backends = ["any", "postgres", "sqlite"] any = [] migration = ["rmp-serde", "sqlx/macros"] -postgres = ["sqlx", "sqlx/postgres", "sqlx/tls"] +postgres = ["sqlx", "sqlx/postgres", "sqlx/tls-rustls"] sqlite = ["sqlx", "sqlx/sqlite"] pg_test = ["postgres"] @@ -29,13 +29,13 @@ pg_test = ["postgres"] arc-swap = "1.6" async-lock = "2.5" async-stream = "0.3" -bs58 = "0.4" +bs58 = "0.5" chrono = "0.4" digest = "0.10" futures-lite = "1.11" hex = "0.4" hmac = "0.12" -itertools = "0.10" +itertools = "0.11" log = { version = "0.4", optional = true } once_cell = "1.5" percent-encoding = "2.0" @@ -44,7 +44,7 @@ serde = { version = "1.0", features = ["derive"] } serde_cbor = "0.11" serde_json = "1.0" sha2 = "0.10" -tokio = { version = "1.5", features = ["time"] } +tokio = { version = "1.5", features = ["rt-multi-thread", "time"] } url = { version = "2.1", default-features = false } uuid = { version = "1.2", features = ["v4"] } zeroize = "1.5" @@ -56,13 +56,13 @@ default-features = false features = ["alloc", "argon2", "chacha", "std_rng"] [dependencies.sqlx] -version = "0.6.2" +version = "0.7.1" default-features = false -features = ["chrono", "runtime-tokio-rustls"] +features = ["chrono", "runtime-tokio"] optional = true [dev-dependencies] -env_logger = "0.9" +env_logger = "0.10" hex-literal = "0.4" rand = { version = "0.8" } diff --git a/askar-storage/src/backend/db_utils.rs b/askar-storage/src/backend/db_utils.rs index 4fe057ba..e3e447cc 100644 --- a/askar-storage/src/backend/db_utils.rs +++ b/askar-storage/src/backend/db_utils.rs @@ -23,6 +23,8 @@ pub const PAGE_SIZE: usize = 32; pub type Expiry = chrono::DateTime; +pub(crate) type Connection = ::Connection; + #[derive(Debug)] pub(crate) enum DbSessionState { Active { conn: PoolConnection }, @@ -98,10 +100,10 @@ impl DbSession { I: for<'a> GetProfileKey<'a, DB>, { if let DbSessionState::Pending { pool, transaction } = &self.state { - info!("Acquire pool connection"); + debug!("Acquire pool connection"); let mut conn = pool.acquire().await?; if *transaction { - info!("Start transaction"); + debug!("Start transaction"); DB::start_transaction(&mut conn, false).await?; self.txn_depth += 1; } @@ -141,10 +143,10 @@ impl DbSession { self.txn_depth = 0; if let Some(conn) = self.connection_mut() { if commit { - info!("Commit transaction on close"); + debug!("Commit transaction on close"); DB::TransactionManager::commit(conn).await } else { - info!("Roll-back transaction on close"); + debug!("Roll-back transaction on close"); DB::TransactionManager::rollback(conn).await } .map_err(err_map!(Backend, "Error closing transaction"))?; @@ -159,11 +161,11 @@ impl Drop for DbSession { if self.txn_depth > 0 { self.txn_depth = 0; if let Some(conn) = self.connection_mut() { - info!("Dropped transaction: roll-back"); + debug!("Dropped transaction: roll-back"); DB::TransactionManager::start_rollback(conn); } } else { - info!("Dropped pool connection") + debug!("Dropped pool connection") } } } @@ -208,7 +210,7 @@ pub(crate) enum DbSessionKey { pub trait ExtDatabase: Database { fn start_transaction( - conn: &mut PoolConnection, + conn: &mut Connection, _nested: bool, ) -> BoxFuture<'_, Result<(), SqlxError>> { ::TransactionManager::begin(conn) @@ -247,8 +249,8 @@ pub(crate) struct DbSessionActive<'a, DB: ExtDatabase> { impl<'q, DB: ExtDatabase> DbSessionActive<'q, DB> { #[inline] - pub fn connection_mut(&mut self) -> &mut PoolConnection { - self.inner.connection_mut().unwrap() + pub fn connection_mut(&mut self) -> &mut Connection { + self.inner.connection_mut().unwrap().as_mut() } #[allow(unused)] @@ -261,7 +263,7 @@ impl<'q, DB: ExtDatabase> DbSessionActive<'q, DB> { where 'q: 't, { - info!("Start nested transaction"); + debug!("Start nested transaction"); DB::start_transaction(self.connection_mut(), true).await?; self.inner.txn_depth += 1; Ok(DbSessionTxn { @@ -276,7 +278,7 @@ impl<'q, DB: ExtDatabase> DbSessionActive<'q, DB> { 'q: 't, { if self.inner.txn_depth == 0 { - info!("Start transaction"); + debug!("Start transaction"); DB::start_transaction(self.connection_mut(), false).await?; self.inner.txn_depth += 1; Ok(DbSessionTxn { @@ -301,8 +303,8 @@ pub(crate) struct DbSessionTxn<'a, DB: ExtDatabase> { } impl<'a, DB: ExtDatabase> DbSessionTxn<'a, DB> { - pub fn connection_mut(&mut self) -> &mut PoolConnection { - self.inner.connection_mut().unwrap() + pub fn connection_mut(&mut self) -> &mut Connection { + self.inner.connection_mut().unwrap().as_mut() } pub async fn commit(mut self) -> Result<(), Error> { @@ -310,7 +312,7 @@ impl<'a, DB: ExtDatabase> DbSessionTxn<'a, DB> { self.rollback = false; self.inner.txn_depth -= 1; let conn = self.connection_mut(); - info!("Commit transaction"); + debug!("Commit transaction"); DB::TransactionManager::commit(conn).await?; } Ok(()) @@ -321,7 +323,7 @@ impl<'a, DB: ExtDatabase> Drop for DbSessionTxn<'a, DB> { fn drop(&mut self) { if self.rollback { self.inner.txn_depth -= 1; - info!("Roll-back dropped nested transaction"); + debug!("Roll-back dropped nested transaction"); DB::TransactionManager::start_rollback(self.connection_mut()); } } diff --git a/askar-storage/src/backend/postgres/mod.rs b/askar-storage/src/backend/postgres/mod.rs index 868f0342..0077b684 100644 --- a/askar-storage/src/backend/postgres/mod.rs +++ b/askar-storage/src/backend/postgres/mod.rs @@ -130,7 +130,7 @@ impl Backend for PostgresBackend { ) .bind(&name) .bind(enc_key) - .fetch_optional(&mut conn) + .fetch_optional(conn.as_mut()) .await? { self.key_cache @@ -152,7 +152,7 @@ impl Backend for PostgresBackend { let mut conn = self.conn_pool.acquire().await?; Ok(sqlx::query("DELETE FROM profiles WHERE name=$1") .bind(&name) - .execute(&mut conn) + .execute(conn.as_mut()) .await? .rows_affected() != 0) @@ -169,7 +169,7 @@ impl Backend for PostgresBackend { let (store_key, store_key_ref) = unblock(move || method.resolve(pass_key)).await?; let store_key = Arc::new(store_key); let mut txn = self.conn_pool.begin().await?; - let mut rows = sqlx::query("SELECT id, profile_key FROM profiles").fetch(&mut txn); + let mut rows = sqlx::query("SELECT id, profile_key FROM profiles").fetch(txn.as_mut()); let mut upd_keys = BTreeMap::>::new(); while let Some(row) = rows.next().await { let row = row?; @@ -188,7 +188,7 @@ impl Backend for PostgresBackend { if sqlx::query("UPDATE profiles SET profile_key=$1 WHERE id=$2") .bind(key) .bind(pid) - .execute(&mut txn) + .execute(txn.as_mut()) .await? .rows_affected() != 1 @@ -198,7 +198,7 @@ impl Backend for PostgresBackend { } if sqlx::query("UPDATE config SET value=$1 WHERE name='key'") .bind(store_key_ref.into_uri()) - .execute(&mut txn) + .execute(txn.as_mut()) .await? .rows_affected() != 1 @@ -568,7 +568,7 @@ async fn resolve_profile_key( Ok((pid, key)) } else if let Some(row) = sqlx::query("SELECT id, profile_key FROM profiles WHERE name=$1") .bind(profile.as_str()) - .fetch_optional(conn) + .fetch_optional(conn.as_mut()) .await? { let pid = row.try_get(0)?; diff --git a/askar-storage/src/backend/postgres/provision.rs b/askar-storage/src/backend/postgres/provision.rs index 336afd9b..19285afe 100644 --- a/askar-storage/src/backend/postgres/provision.rs +++ b/askar-storage/src/backend/postgres/provision.rs @@ -115,8 +115,9 @@ impl PostgresStoreOptions { let mut conn_opts = PgConnectOptions::from_str(self.uri.as_str())?; #[cfg(feature = "log")] { - conn_opts.log_statements(log::LevelFilter::Debug); - conn_opts.log_slow_statements(log::LevelFilter::Debug, Default::default()); + conn_opts = conn_opts + .log_statements(log::LevelFilter::Debug) + .log_slow_statements(log::LevelFilter::Debug, Default::default()); } PgPoolOptions::default() .acquire_timeout(self.connect_timeout) @@ -184,7 +185,7 @@ impl PostgresStoreOptions { "SELECT COUNT(*) FROM information_schema.tables WHERE table_schema='public' AND table_name='config'", ) - .fetch_one(&mut txn) + .fetch_one(txn.as_mut()) .await? == 1 { @@ -354,14 +355,14 @@ pub(crate) async fn init_db<'t>( .persistent(false) .bind(profile_name) .bind(store_key_ref) - .execute(&mut txn) + .execute(txn.as_mut()) .await?; let profile_id = sqlx::query_scalar("INSERT INTO profiles (name, profile_key) VALUES ($1, $2) RETURNING id") .bind(profile_name) .bind(enc_profile_key) - .fetch_one(&mut txn) + .fetch_one(txn.as_mut()) .await?; txn.commit().await?; @@ -399,7 +400,7 @@ pub(crate) async fn open_db( r#"SELECT name, value FROM config WHERE name IN ('default_profile', 'key', 'version')"#, ) - .fetch_all(&mut conn) + .fetch_all(conn.as_mut()) .await?; for row in config { match row.try_get(0)? { @@ -444,7 +445,7 @@ pub(crate) async fn open_db( let row = sqlx::query("SELECT id, profile_key FROM profiles WHERE name = $1") .bind(&profile) - .fetch_one(&mut conn) + .fetch_one(conn.as_mut()) .await?; let profile_id = row.try_get(0)?; let profile_key = key_cache.load_key(row.try_get(1)?).await?; diff --git a/askar-storage/src/backend/postgres/test_db.rs b/askar-storage/src/backend/postgres/test_db.rs index 00295f16..318cde31 100644 --- a/askar-storage/src/backend/postgres/test_db.rs +++ b/askar-storage/src/backend/postgres/test_db.rs @@ -87,7 +87,9 @@ impl TestDB { mut inst: Option, ) -> Result<(), Error> { if let Some(lock_txn) = lock_txn.take() { - lock_txn.close().await?; + if let Err(e) = lock_txn.close().await { + warn!("Error closing lock transaction: {}", e); + } } if let Some(inst) = inst.take() { timeout(Duration::from_secs(30), inst.close()) diff --git a/askar-storage/src/backend/sqlite/mod.rs b/askar-storage/src/backend/sqlite/mod.rs index 32b2336a..69dbf4c2 100644 --- a/askar-storage/src/backend/sqlite/mod.rs +++ b/askar-storage/src/backend/sqlite/mod.rs @@ -17,8 +17,9 @@ use sqlx::{ use super::{ db_utils::{ decode_tags, decrypt_scan_batch, encode_profile_key, encode_tag_filter, expiry_timestamp, - extend_query, prepare_tags, random_profile_name, DbSession, DbSessionActive, DbSessionRef, - DbSessionTxn, EncScanEntry, ExtDatabase, QueryParams, QueryPrepare, PAGE_SIZE, + extend_query, prepare_tags, random_profile_name, Connection, DbSession, DbSessionActive, + DbSessionRef, DbSessionTxn, EncScanEntry, ExtDatabase, QueryParams, QueryPrepare, + PAGE_SIZE, }, Backend, BackendSession, }; @@ -121,7 +122,7 @@ impl Backend for SqliteBackend { sqlx::query("INSERT OR IGNORE INTO profiles (name, profile_key) VALUES (?1, ?2)") .bind(&name) .bind(enc_key) - .execute(&mut conn) + .execute(conn.as_mut()) .await?; if done.rows_affected() == 0 { return Err(err_msg!(Duplicate, "Duplicate profile name")); @@ -146,7 +147,7 @@ impl Backend for SqliteBackend { let mut conn = self.conn_pool.acquire().await?; Ok(sqlx::query("DELETE FROM profiles WHERE name=?") .bind(&name) - .execute(&mut conn) + .execute(conn.as_mut()) .await? .rows_affected() != 0) @@ -163,7 +164,7 @@ impl Backend for SqliteBackend { let (store_key, store_key_ref) = unblock(move || method.resolve(pass_key)).await?; let store_key = Arc::new(store_key); let mut txn = self.conn_pool.begin().await?; - let mut rows = sqlx::query("SELECT id, profile_key FROM profiles").fetch(&mut txn); + let mut rows = sqlx::query("SELECT id, profile_key FROM profiles").fetch(txn.as_mut()); let mut upd_keys = BTreeMap::>::new(); while let Some(row) = rows.next().await { let row = row?; @@ -182,7 +183,7 @@ impl Backend for SqliteBackend { if sqlx::query("UPDATE profiles SET profile_key=?1 WHERE id=?2") .bind(key) .bind(pid) - .execute(&mut txn) + .execute(txn.as_mut()) .await? .rows_affected() != 1 @@ -192,7 +193,7 @@ impl Backend for SqliteBackend { } if sqlx::query("UPDATE config SET value=?1 WHERE name='key'") .bind(store_key_ref.into_uri()) - .execute(&mut txn) + .execute(txn.as_mut()) .await? .rows_affected() != 1 @@ -487,18 +488,18 @@ impl BackendSession for DbSession { impl ExtDatabase for Sqlite { fn start_transaction( - conn: &mut PoolConnection, + conn: &mut Connection, nested: bool, ) -> BoxFuture<'_, std::result::Result<(), SqlxError>> { // FIXME - this is a horrible workaround because there is currently // no good way to start an immediate transaction with sqlx. Without this // adjustment, updates will run into 'database is locked' errors. Box::pin(async move { - ::TransactionManager::begin(&mut *conn).await?; + ::TransactionManager::begin(conn).await?; if !nested { // a no-op write transaction sqlx::query("DELETE FROM config WHERE 0") - .execute(&mut *conn) + .execute(conn) .await?; } Ok(()) @@ -532,7 +533,7 @@ async fn resolve_profile_key( Ok((pid, key)) } else if let Some(row) = sqlx::query("SELECT id, profile_key FROM profiles WHERE name=?1") .bind(profile.as_str()) - .fetch_optional(conn) + .fetch_optional(conn.as_mut()) .await? { let pid = row.try_get(0)?; diff --git a/askar-storage/src/backend/sqlite/provision.rs b/askar-storage/src/backend/sqlite/provision.rs index 54bb06bd..a75b1830 100644 --- a/askar-storage/src/backend/sqlite/provision.rs +++ b/askar-storage/src/backend/sqlite/provision.rs @@ -137,8 +137,9 @@ impl SqliteStoreOptions { .synchronous(self.synchronous); #[cfg(feature = "log")] { - conn_opts.log_statements(log::LevelFilter::Debug); - conn_opts.log_slow_statements(log::LevelFilter::Debug, Default::default()); + conn_opts = conn_opts + .log_statements(log::LevelFilter::Debug) + .log_slow_statements(log::LevelFilter::Debug, Default::default()); } SqlitePoolOptions::default() // maintains at least 1 connection. @@ -346,7 +347,7 @@ async fn init_db( .bind(profile_name) .bind(store_key_ref) .bind(enc_profile_key) - .execute(&mut conn) + .execute(conn.as_mut()) .await?; let mut key_cache = KeyCache::new(store_key); @@ -354,7 +355,7 @@ async fn init_db( let row = sqlx::query("SELECT id FROM profiles WHERE name = ?1") .persistent(false) .bind(profile_name) - .fetch_one(&mut conn) + .fetch_one(conn.as_mut()) .await?; key_cache.add_profile_mut(profile_name.to_string(), row.try_get(0)?, profile_key); @@ -377,7 +378,7 @@ async fn open_db( r#"SELECT name, value FROM config WHERE name IN ("default_profile", "key", "version")"#, ) - .fetch_all(&mut conn) + .fetch_all(conn.as_mut()) .await?; for row in config { match row.try_get(0)? { @@ -422,7 +423,7 @@ async fn open_db( let row = sqlx::query("SELECT id, profile_key FROM profiles WHERE name = ?1") .bind(&profile) - .fetch_one(&mut conn) + .fetch_one(conn.as_mut()) .await?; let profile_id = row.try_get(0)?; let profile_key = key_cache.load_key(row.try_get(1)?).await?; diff --git a/askar-storage/src/protect/store_key.rs b/askar-storage/src/protect/store_key.rs index a70842e3..b365dbe3 100644 --- a/askar-storage/src/protect/store_key.rs +++ b/askar-storage/src/protect/store_key.rs @@ -33,7 +33,7 @@ pub fn generate_raw_store_key(seed: Option<&[u8]>) -> Result, E pub fn parse_raw_store_key(raw_key: &str) -> Result { ArrayKey::<::KeySize>::temp(|key| { let key_len = bs58::decode(raw_key) - .into(&mut *key) + .onto(key.as_mut_slice()) .map_err(|_| err_msg!(Input, "Error parsing raw key as base58 value"))?; if key_len != key.len() { Err(err_msg!(Input, "Incorrect length for encoded raw key")) diff --git a/src/ffi/store.rs b/src/ffi/store.rs index dfb124bd..9139216d 100644 --- a/src/ffi/store.rs +++ b/src/ffi/store.rs @@ -164,7 +164,7 @@ pub extern "C" fn askar_store_provision( let cb = EnsureCallback::new(move |result| match result { Ok(sid) => { - info!("Provisioned store {}", sid); + debug!("Provisioned store {}", sid); cb(cb_id, ErrorCode::Success, sid) } Err(err) => cb(cb_id, set_last_error(Some(err)), StoreHandle::invalid()), @@ -209,7 +209,7 @@ pub extern "C" fn askar_store_open( let cb = EnsureCallback::new(move |result| match result { Ok(sid) => { - info!("Opened store {}", sid); + debug!("Opened store {}", sid); cb(cb_id, ErrorCode::Success, sid) } Err(err) => cb(cb_id, set_last_error(Some(err)), StoreHandle::invalid()), @@ -398,7 +398,7 @@ pub extern "C" fn askar_store_close( FFI_SESSIONS.remove_all(handle).await?; FFI_SCANS.remove_all(handle).await?; store.close().await?; - info!("Closed store {}", handle); + debug!("Closed store {}", handle); Ok(()) }.await; if let Some(cb) = cb { @@ -432,7 +432,7 @@ pub extern "C" fn askar_scan_start( let cb = EnsureCallback::new(move |result: Result| match result { Ok(scan_handle) => { - info!("Started scan {} on store {}", scan_handle, handle); + debug!("Started scan {} on store {}", scan_handle, handle); cb(cb_id, ErrorCode::Success, scan_handle) } Err(err) => cb(cb_id, set_last_error(Some(err)), ScanHandle::invalid()), @@ -489,9 +489,9 @@ pub extern "C" fn askar_scan_free(handle: ScanHandle) -> ErrorCode { // the Scan may have been removed due to the Store being closed if let Some(scan) = FFI_SCANS.remove(handle).await { scan.ok(); - info!("Closed scan {}", handle); + debug!("Closed scan {}", handle); } else { - info!("Scan not found for closing: {}", handle); + debug!("Scan not found for closing: {}", handle); } }); Ok(ErrorCode::Success) @@ -513,7 +513,7 @@ pub extern "C" fn askar_session_start( let cb = EnsureCallback::new(move |result: Result| match result { Ok(sess_handle) => { - info!("Started session {} on store {} (txn: {})", sess_handle, handle, as_transaction != 0); + debug!("Started session {} on store {} (txn: {})", sess_handle, handle, as_transaction != 0); cb(cb_id, ErrorCode::Success, sess_handle) } Err(err) => cb(cb_id, set_last_error(Some(err)), SessionHandle::invalid()), @@ -985,9 +985,9 @@ pub extern "C" fn askar_session_close( } else { session.commit().await?; } - info!("Closed session {}", handle); + debug!("Closed session {}", handle); } else { - info!("Session not found for closing: {}", handle); + debug!("Session not found for closing: {}", handle); } Ok(()) }.await; diff --git a/wrappers/python/aries_askar/types.py b/wrappers/python/aries_askar/types.py index 03bc688d..ac212c59 100644 --- a/wrappers/python/aries_askar/types.py +++ b/wrappers/python/aries_askar/types.py @@ -18,6 +18,7 @@ class KeyAlg(Enum): X25519 = "x25519" K256 = "k256" P256 = "p256" + P384 = "p384" @classmethod def from_key_alg(cls, alg: str) -> Optional["KeyAlg"]: diff --git a/wrappers/python/tests/test_keys.py b/wrappers/python/tests/test_keys.py index 7c8c6ada..49a79f9e 100644 --- a/wrappers/python/tests/test_keys.py +++ b/wrappers/python/tests/test_keys.py @@ -1,5 +1,7 @@ import json +import pytest + from aries_askar import ( KeyAlg, Key, @@ -77,3 +79,28 @@ def test_ed25519(): jwk = json.loads(key.get_jwk_secret()) assert jwk["kty"] == "OKP" assert jwk["crv"] == "Ed25519" + + +@pytest.mark.parametrize( + "key_alg", + [KeyAlg.K256, KeyAlg.P256, KeyAlg.P384], +) +def test_ec_curves(key_alg: KeyAlg): + key = Key.generate(key_alg) + assert key.algorithm == key_alg + message = b"test message" + sig = key.sign_message(message) + assert key.verify_signature(message, sig) + + jwk = json.loads(key.get_jwk_public()) + assert jwk["kty"] == "EC" + assert jwk["crv"] + assert jwk["x"] + assert jwk["y"] + + jwk = json.loads(key.get_jwk_secret()) + assert jwk["kty"] == "EC" + assert jwk["crv"] + assert jwk["x"] + assert jwk["y"] + assert jwk["d"]