diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e361442b..a9222103 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -39,6 +39,7 @@ jobs: uses: actions-rs/cargo@v1 with: command: check + args: --workspace - name: Cargo fmt uses: actions-rs/cargo@v1 @@ -56,6 +57,7 @@ jobs: uses: actions-rs/cargo@v1 with: command: test + args: --workspace build-manylinux: name: Build Library diff --git a/Cargo.toml b/Cargo.toml index 60d549de..4c81bddf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,10 +1,13 @@ +[workspace] +members = ["askar-crypto"] + [package] name = "aries-askar" -version = "0.1.3" +version = "0.2.0-pre.2" authors = ["Hyperledger Aries Contributors "] edition = "2018" description = "Hyperledger Aries Askar secure storage" -license = "MIT/Apache-2.0" +license = "MIT OR Apache-2.0" readme = "README.md" repository = "https://github.com/hyperledger/aries-askar/" categories = ["cryptography", "database"] @@ -16,75 +19,62 @@ path = "src/lib.rs" crate-type = ["staticlib", "rlib", "cdylib"] [package.metadata.docs.rs] -features = ["all"] +features = ["all_backends"] no-default-features = true rustdoc-args = ["--cfg", "docsrs"] [features] -default = ["all", "ffi", "logger"] -all = ["any", "postgres", "sqlite"] +default = ["all_backends", "ffi", "logger"] +all_backends = ["any", "postgres", "sqlite"] any = [] ffi = ["any", "ffi-support", "logger"] -indy_compat = ["sqlx", "sqlx/sqlite"] jemalloc = ["jemallocator"] logger = ["env_logger", "log"] postgres = ["sqlx", "sqlx/postgres", "sqlx/tls"] sqlite = ["num_cpus", "sqlx", "sqlx/sqlite"] pg_test = ["postgres"] +[dev-dependencies] +hex-literal = "0.3" + [dependencies] -aead = "0.3" -async-global-executor = { version = "1.4", features = ["async-io"] } +askar-crypto = { version = "0.2.0-pre.2", path = "./askar-crypto", features = ["argon2", "std"] } async-mutex = "1.4" async-stream = "0.3" -async-std = "1.7" # temporary addition to encourage common dependencies with sqlx -blocking = "1.0" -chacha20poly1305 = { version = "0.7", default-features = false, features = ["alloc", "chacha20"] } +bs58 = "0.4" chrono = "0.4" +digest = "0.9" env_logger = { version = "0.7", optional = true } ffi-support = { version = "0.4", optional = true } futures-lite = "1.7" hex = "0.4" hmac = "0.10" +indy-wql = "0.4" itertools = "0.9" jemallocator = { version = "0.3", optional = true } log = { version = "0.4", optional = true } num_cpus = { version = "1.0", optional = true } once_cell = "1.5" percent-encoding = "2.0" -rand = "0.7" -rmp-serde = "0.14" -rust-argon2 = "0.8" serde = { version = "1.0", features = ["derive"] } serde_bytes = "0.11" +serde_cbor = "0.11" serde_json = "1.0" sha2 = "0.9" +tokio = { version = "1.5", features = ["time"] } url = { version = "2.1", default-features = false } uuid = { version = "0.8", features = ["v4"] } -zeroize = { version = "1.1.0", features = ["zeroize_derive"] } - -[dependencies.indy-utils] -version = "=0.3.9" -default-features = false -features = ["ed25519", "pack", "serde_support", "wql"] +zeroize = "1.1" [dependencies.sqlx] -version = "0.5.1" +version = "=0.5.1" default-features = false -features = ["chrono", "runtime-async-std-rustls"] +features = ["chrono", "runtime-tokio-rustls"] optional = true -# [target.'cfg(target_os = "macos")'.dependencies] -# keychain-services = { path = "../keychain-services.rs" } -# keychain-services = { git = "https://github.com/iqlusioninc/keychain-services.rs", rev = "7410fb8baf4ecdf04cdcd7d06d02658f4f158d77" } - [profile.release] lto = true codegen-units = 1 [[test]] name = "backends" - -[[test]] -name = "faber" -required-features = ["indy_compat"] diff --git a/README.md b/README.md index 2c171687..d4e39e2b 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,15 @@ # aries-askar -Aries-Askar, secure storage designed for Hyperledger Aries agents. +[![Unit Tests](https://github.com/hyperledger/aries-askar/workflows/Aries-Askar/badge.svg)](https://github.com/hyperledger/aries-askar/actions) +[![Rust Crate](https://img.shields.io/crates/v/aries-askar.svg)](https://crates.io/crates/aries-askar) +[![Rust Documentation](https://docs.rs/aries-askar/badge.svg)](https://docs.rs/aries-askar) +[![Python Package](https://img.shields.io/pypi/v/aries_askar)](https://pypi.org/project/aries-askar/) + +Secure storage and cryptographic support designed for Hyperledger Aries agents. ## Credit -The initial implementation of `aries-askar` was developed by the Verifiable Organizations Network (VON) team based at the Province of British Columbia. The database structure derives largely from the wallet design within [Hyperledger Indy-SDK](https://github.com/hyperledger/indy-sdk). To learn more about VON and what's happening with decentralized identity in British Columbia, please go to [https://vonx.io](https://vonx.io). +The initial implementation of `aries-askar` was developed by the Verifiable Organizations Network (VON) team based at the Province of British Columbia, and inspired by the wallet design within [Hyperledger Indy-SDK](https://github.com/hyperledger/indy-sdk). To learn more about VON and what's happening with decentralized identity in British Columbia, please go to [https://vonx.io](https://vonx.io). ## Contributing diff --git a/askar-crypto/Cargo.toml b/askar-crypto/Cargo.toml new file mode 100644 index 00000000..ad82df92 --- /dev/null +++ b/askar-crypto/Cargo.toml @@ -0,0 +1,70 @@ +[package] +name = "askar-crypto" +version = "0.2.0-pre.2" +authors = ["Hyperledger Aries Contributors "] +edition = "2018" +description = "Hyperledger Aries Askar cryptography" +license = "MIT OR Apache-2.0" +readme = "README.md" +repository = "https://github.com/hyperledger/aries-askar/" +categories = ["cryptography", "no-std"] +keywords = ["hyperledger", "aries", "didcomm", "ssi"] + +[package.metadata.docs.rs] +features = ["argon2", "std"] +rustdoc-args = ["--cfg", "docsrs"] + +[features] +default = ["alloc", "any_key", "all_keys", "crypto_box"] +alloc = [] +std = ["alloc"] +all_keys = ["aes", "bls", "chacha", "ec_curves", "ed25519"] +any_key = ["alloc"] +aes = ["aes-core", "aes-gcm", "block-modes", "hmac"] +bls = ["bls12_381", "hkdf"] +chacha = ["chacha20poly1305"] +crypto_box = ["alloc", "crypto_box_rs", "ed25519"] +ec_curves = ["k256", "p256"] +ed25519 = ["curve25519-dalek", "ed25519-dalek", "x25519-dalek"] + +[dev-dependencies] +base64 = { version = "0.13", default-features = false, features = ["alloc"] } +criterion = "0.3" +hex-literal = "0.3" +serde_cbor = "0.11" + +[[bench]] +name = "enc" +harness = false + +[[bench]] +name = "kdf" +harness = false + +[dependencies] +aead = "0.3" +aes-core = { package = "aes", version = "0.6", default-features = false, optional = true } +aes-gcm = { version = "0.8", default-features = false, features = ["aes"], optional = true } +argon2 = { version = "0.1", default-features = false, features = ["password-hash"], optional = true } +base64 = { version = "0.13", default-features = false } +blake2 = { version = "0.9", default-features = false } +block-modes = { version = "0.7", default-features = false, optional = true } +bls12_381 = { version = "0.4", default-features = false, features = ["groups"], optional = true } +chacha20 = { version = "0.6" } # should match chacha20poly1305 +chacha20poly1305 = { version = "0.7", default-features = false, features = ["chacha20", "xchacha20poly1305"], optional = true } +crypto_box_rs = { package = "crypto_box", version = "0.5", default-features = false, features = ["u64_backend"], optional = true } +curve25519-dalek = { version = "3.1", default-features = false, features = ["u64_backend"], optional = true } +ed25519-dalek = { version = "1.0", default-features = false, features = ["u64_backend"], optional = true } +digest = "0.9" +group = "0.9" +hkdf = { version = "0.11", optional = true } +hmac = { version = "0.11", optional = true } +k256 = { version = "0.8", default-features = false, features = ["arithmetic", "ecdsa", "ecdh", "sha256", "zeroize"], optional = true } +p256 = { version = "0.8", default-features = false, features = ["arithmetic", "ecdsa", "ecdh", "zeroize"], optional = true } +rand = { version = "0.8", default-features = false, features = ["getrandom"] } +serde = { version = "1.0", default-features = false, features = ["derive"] } +serde_json = { version = "1.0", default-features = false } +subtle = "2.4" +sha2 = { version = "0.9", default-features = false } +x25519-dalek = { version = "1.1", default-features = false, features = ["u64_backend"], optional = true } +zeroize = { version = "1.1.0", features = ["zeroize_derive"] } diff --git a/askar-crypto/README.md b/askar-crypto/README.md new file mode 100644 index 00000000..686b5ece --- /dev/null +++ b/askar-crypto/README.md @@ -0,0 +1,31 @@ +# askar-crypto + +[![Rust Crate](https://img.shields.io/crates/v/askar-crypto.svg)](https://crates.io/crates/askar-crypto) +[![Rust Documentation](https://docs.rs/askar-crypto/badge.svg)](https://docs.rs/askar-crypto) + +The `askar-crypto` crate provides the basic key representations and cryptographic operations used by [`aries-askar`](https://github.com/hyperledger/aries-askar). + +## Supported Key Types + +| Key Type | Feature | Operations | Notes | +| -------------------- | --------- | ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------- | +| AES-GCM | `aes` | AEAD encryption
JWK export | A128GCM and A256GCM | +| AES-CBC-HMAC-SHA2 | `aes` | AEAD encryption
JWK export | A128CBC-HS256 and A256CBC-HS512 | +| (X)ChaCha20-Poly1305 | `chacha` | AEAD encryption
JWK export | aka C20P, XC20P | +| BLS12-381 | `bls` | [`bls-signature`](https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-04)-compatible key generation
JWK import/export | G1, G2, and G1G2 key types | +| Ed25519 | `ed25519` | EdDSA signatures
JWK import/export | | +| X25519 | `ed25519` | DH key exchange
JWK import/export | | +| K-256 | `k256` | ECDSA signatures
DH key exchange
JWK import/export | aka secp256k1 | +| P-256 | `p256` | ECDSA signatures
DH key exchange
JWK import/export | aka nist256p1, secp256r1 | + +## 'Any' Key support + +The `any_key` feature (which depends on `alloc`) provides a generic interface for creating and working with any supported key type. + +## JOSE Authenticated Encryption + +This crate provides implementations of the [ECDH-ES](https://tools.ietf.org/html/rfc7518#section-4.6) and [ECDH-1PU (draft 4)](https://tools.ietf.org/html/draft-madden-jose-ecdh-1pu-04) key agreement operations, for use in deriving the CEK or key wrapping key when producing or consuming JWE envelopes using these protection algorithms. + +## no-std + +This crate supports the optional `alloc` feature, gating types and operations that depend on a global allocator. The `std` feature depends on `alloc`, and adds support for `std::error::Error`. diff --git a/askar-crypto/benches/enc.rs b/askar-crypto/benches/enc.rs new file mode 100644 index 00000000..d56838b2 --- /dev/null +++ b/askar-crypto/benches/enc.rs @@ -0,0 +1,66 @@ +#[macro_use] +extern crate criterion; + +#[macro_use] +extern crate hex_literal; + +use askar_crypto::{ + alg::{ + chacha20::{Chacha20Key, C20P}, + AnyKey, AnyKeyCreate, Chacha20Types, KeyAlg, + }, + buffer::{SecretBytes, WriteBuffer, Writer}, + encrypt::{KeyAeadInPlace, KeyAeadMeta}, + random::fill_random, + repr::KeySecretBytes, +}; + +use criterion::{black_box, Criterion}; + +fn criterion_benchmark(c: &mut Criterion) { + { + let message = b"test message for encrypting"; + + let key = &hex!("451b5b8e8725321541954997781de51f4142e4a56bab68d24f6a6b92615de5ee"); + + c.bench_function(&format!("chacha20-poly1305 encrypt"), move |b| { + b.iter(|| { + let key = Chacha20Key::::from_secret_bytes(&key[..]).unwrap(); + let mut buffer = [0u8; 255]; + buffer[0..message.len()].copy_from_slice(black_box(&message[..])); + let nonce = Chacha20Key::::random_nonce(); + let mut writer = Writer::from_slice_position(&mut buffer, message.len()); + key.encrypt_in_place(&mut writer, &nonce, &[]).unwrap(); + }) + }); + c.bench_function(&format!("chacha20-poly1305 encrypt alloc"), move |b| { + b.iter(|| { + let key = Chacha20Key::::from_secret_bytes(&key[..]).unwrap(); + let mut buffer = SecretBytes::with_capacity(255); + buffer.buffer_write(black_box(&message[..])).unwrap(); + let nonce = Chacha20Key::::random_nonce(); + key.encrypt_in_place(&mut buffer, &nonce, &[]).unwrap(); + }) + }); + c.bench_function(&format!("chacha20-poly1305 encrypt as any"), move |b| { + b.iter(|| { + let key = Box::::from_secret_bytes( + KeyAlg::Chacha20(Chacha20Types::C20P), + &key[..], + ) + .unwrap(); + let mut buffer = [0u8; 255]; + buffer[0..message.len()].copy_from_slice(black_box(&message[..])); + let mut nonce = [0u8; 255]; + let nonce_len = key.aead_params().nonce_length; + fill_random(&mut nonce[..nonce_len]); + let mut writer = Writer::from_slice_position(&mut buffer, message.len()); + key.encrypt_in_place(&mut writer, &nonce[..nonce_len], &[]) + .unwrap(); + }) + }); + } +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/askar-crypto/benches/kdf.rs b/askar-crypto/benches/kdf.rs new file mode 100644 index 00000000..861869f9 --- /dev/null +++ b/askar-crypto/benches/kdf.rs @@ -0,0 +1,32 @@ +#[macro_use] +extern crate criterion; + +use askar_crypto::kdf::concat::{ConcatKDF, ConcatKDFParams}; +use sha2::Sha256; + +use criterion::{black_box, Criterion}; + +fn criterion_benchmark(c: &mut Criterion) { + { + let message = b"test message for encrypting"; + + let params = ConcatKDFParams { + alg: b"A256GCM", + apu: b"sender name", + apv: b"recipient name", + pub_info: &(256u32).to_be_bytes(), + prv_info: &[], + }; + + c.bench_function(&format!("concat kdf sha256"), move |b| { + b.iter(|| { + let mut output = [0u8; 32]; + ConcatKDF::::derive_key(black_box(message), black_box(params), &mut output) + .unwrap(); + }) + }); + } +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/askar-crypto/src/alg/aes.rs b/askar-crypto/src/alg/aes.rs new file mode 100644 index 00000000..a53db408 --- /dev/null +++ b/askar-crypto/src/alg/aes.rs @@ -0,0 +1,577 @@ +//! AES-GCM key representations with AEAD support + +use core::{ + fmt::{self, Debug, Formatter}, + marker::PhantomData, +}; + +use aead::{generic_array::ArrayLength, AeadInPlace, NewAead}; +use aes_gcm::{Aes128Gcm, Aes256Gcm}; +use block_modes::{ + block_padding::Pkcs7, + cipher::{BlockCipher, NewBlockCipher}, + BlockMode, Cbc, +}; +use digest::{BlockInput, FixedOutput, Reset, Update}; +use hmac::{Hmac, Mac, NewMac}; +use serde::{Deserialize, Serialize}; +use zeroize::Zeroize; + +use super::{AesTypes, HasKeyAlg, KeyAlg}; +use crate::{ + buffer::{ArrayKey, ResizeBuffer, Writer}, + encrypt::{KeyAeadInPlace, KeyAeadMeta, KeyAeadParams}, + error::Error, + generic_array::{ + typenum::{self, Unsigned}, + GenericArray, + }, + jwk::{JwkEncoder, ToJwk}, + kdf::{FromKeyDerivation, FromKeyExchange, KeyDerivation, KeyExchange}, + random::fill_random_deterministic, + repr::{KeyGen, KeyMeta, KeySecretBytes, Seed, SeedMethod}, +}; + +/// The 'kty' value of a symmetric key JWK +pub static JWK_KEY_TYPE: &'static str = "oct"; + +/// Trait implemented by supported AES authenticated encryption algorithms +pub trait AesType: 'static { + /// The AEAD implementation + type Aead: AesAead; + + /// The associated algorithm type + const ALG_TYPE: AesTypes; + /// The associated JWK algorithm name + const JWK_ALG: &'static str; +} + +type KeyType = ArrayKey<<::Aead as AesAead>::KeySize>; + +type NonceSize = <::Aead as AesAead>::NonceSize; + +type TagSize = <::Aead as AesAead>::TagSize; + +/// An AES-GCM symmetric encryption key +#[derive(Serialize, Deserialize, Zeroize)] +#[serde( + transparent, + bound( + deserialize = "KeyType: for<'a> Deserialize<'a>", + serialize = "KeyType: Serialize" + ) +)] +// SECURITY: ArrayKey is zeroized on drop +pub struct AesKey(KeyType); + +impl AesKey { + /// The length of the secret key in bytes + pub const KEY_LENGTH: usize = KeyType::::SIZE; + /// The length of the AEAD encryption nonce + pub const NONCE_LENGTH: usize = NonceSize::::USIZE; + /// The length of the AEAD encryption tag + pub const TAG_LENGTH: usize = TagSize::::USIZE; +} + +impl Clone for AesKey { + fn clone(&self) -> Self { + Self(self.0.clone()) + } +} + +impl Debug for AesKey { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.debug_struct("AesKey") + .field("alg", &T::JWK_ALG) + .field("key", &self.0) + .finish() + } +} + +impl PartialEq for AesKey { + fn eq(&self, other: &Self) -> bool { + other.0 == self.0 + } +} + +impl Eq for AesKey {} + +impl HasKeyAlg for AesKey { + fn algorithm(&self) -> KeyAlg { + KeyAlg::Aes(T::ALG_TYPE) + } +} + +impl KeyMeta for AesKey { + type KeySize = ::KeySize; +} + +impl KeyGen for AesKey { + fn generate() -> Result { + Ok(AesKey(KeyType::::random())) + } + + fn from_seed(seed: Seed<'_>) -> Result + where + Self: Sized, + { + match seed { + Seed::Bytes(ikm, SeedMethod::Preferred) | Seed::Bytes(ikm, SeedMethod::RandomDet) => { + Ok(Self(KeyType::::try_new_with(|arr| { + fill_random_deterministic(ikm, arr) + })?)) + } + #[allow(unreachable_patterns)] + _ => Err(err_msg!(Unsupported)), + } + } +} + +impl KeySecretBytes for AesKey { + fn from_secret_bytes(key: &[u8]) -> Result { + if key.len() != KeyType::::SIZE { + return Err(err_msg!(InvalidKeyData)); + } + Ok(Self(KeyType::::from_slice(key))) + } + + fn with_secret_bytes(&self, f: impl FnOnce(Option<&[u8]>) -> O) -> O { + f(Some(self.0.as_ref())) + } +} + +impl FromKeyDerivation for AesKey { + fn from_key_derivation(mut derive: D) -> Result + where + Self: Sized, + { + Ok(Self(KeyType::::try_new_with(|arr| { + derive.derive_key_bytes(arr) + })?)) + } +} + +impl KeyAeadMeta for AesKey { + type NonceSize = NonceSize; + type TagSize = TagSize; +} + +impl KeyAeadInPlace for AesKey { + /// Encrypt a secret value in place, appending the verification tag + fn encrypt_in_place( + &self, + buffer: &mut dyn ResizeBuffer, + nonce: &[u8], + aad: &[u8], + ) -> Result<(), Error> { + if nonce.len() != NonceSize::::USIZE { + return Err(err_msg!(InvalidNonce)); + } + T::Aead::aes_encrypt_in_place( + self.0.as_ref(), + buffer, + GenericArray::from_slice(nonce), + aad, + ) + } + + /// Decrypt an encrypted (verification tag appended) value in place + fn decrypt_in_place( + &self, + buffer: &mut dyn ResizeBuffer, + nonce: &[u8], + aad: &[u8], + ) -> Result<(), Error> { + if nonce.len() != NonceSize::::USIZE { + return Err(err_msg!(InvalidNonce)); + } + T::Aead::aes_decrypt_in_place( + self.0.as_ref(), + buffer, + GenericArray::from_slice(nonce), + aad, + ) + } + + fn aead_params(&self) -> KeyAeadParams { + KeyAeadParams { + nonce_length: NonceSize::::USIZE, + tag_length: TagSize::::USIZE, + } + } +} + +impl ToJwk for AesKey { + fn encode_jwk(&self, enc: &mut JwkEncoder<'_>) -> Result<(), Error> { + if enc.is_public() { + return Err(err_msg!(Unsupported, "Cannot export as a public key")); + } + if !enc.is_thumbprint() { + enc.add_str("alg", T::JWK_ALG)?; + } + enc.add_as_base64("k", self.0.as_ref())?; + enc.add_str("kty", JWK_KEY_TYPE)?; + Ok(()) + } +} + +// for direct key agreement (not used currently) +impl FromKeyExchange for AesKey +where + Lhs: KeyExchange + ?Sized, + Rhs: ?Sized, + T: AesType, +{ + fn from_key_exchange(lhs: &Lhs, rhs: &Rhs) -> Result { + Ok(Self(KeyType::::try_new_with(|arr| { + let mut buf = Writer::from_slice(arr); + lhs.write_key_exchange(rhs, &mut buf)?; + if buf.position() != Self::KEY_LENGTH { + return Err(err_msg!(Usage, "Invalid length for key exchange output")); + } + Ok(()) + })?)) + } +} + +/// 128 bit AES-GCM +#[derive(Debug)] +pub struct A128Gcm; + +impl AesType for A128Gcm { + type Aead = Aes128Gcm; + + const ALG_TYPE: AesTypes = AesTypes::A128Gcm; + const JWK_ALG: &'static str = "A128GCM"; +} + +/// 256 bit AES-GCM +#[derive(Debug)] +pub struct A256Gcm; + +impl AesType for A256Gcm { + type Aead = Aes256Gcm; + + const ALG_TYPE: AesTypes = AesTypes::A256Gcm; + const JWK_ALG: &'static str = "A256GCM"; +} + +/// Specialized trait for performing AEAD encryption +pub trait AesAead { + /// The size of the associated key + type KeySize: ArrayLength; + /// The size of the nonce + type NonceSize: ArrayLength; + /// The size of the authentication tag + type TagSize: ArrayLength; + + /// Perform AEAD encryption + fn aes_encrypt_in_place( + key: &GenericArray, + buffer: &mut dyn ResizeBuffer, + key: &GenericArray, + aad: &[u8], + ) -> Result<(), Error>; + + /// Perform AEAD decryption + fn aes_decrypt_in_place( + key: &GenericArray, + buffer: &mut dyn ResizeBuffer, + nonce: &GenericArray, + aad: &[u8], + ) -> Result<(), Error>; + + /// Calculate padding length for a plaintext length + fn aes_padding_length(len: usize) -> usize; +} + +// Generic implementation for AesGcm +impl AesAead for T +where + T: NewAead + AeadInPlace, +{ + type KeySize = T::KeySize; + type NonceSize = T::NonceSize; + type TagSize = T::TagSize; + + fn aes_encrypt_in_place( + key: &GenericArray, + buffer: &mut dyn ResizeBuffer, + nonce: &GenericArray, + aad: &[u8], + ) -> Result<(), Error> { + let enc = ::new(key); + let tag = enc + .encrypt_in_place_detached(nonce, aad, buffer.as_mut()) + .map_err(|_| err_msg!(Encryption, "AEAD encryption error"))?; + buffer.buffer_write(&tag[..])?; + Ok(()) + } + + fn aes_decrypt_in_place( + key: &GenericArray, + buffer: &mut dyn ResizeBuffer, + nonce: &GenericArray, + aad: &[u8], + ) -> Result<(), Error> { + let buf_len = buffer.as_ref().len(); + if buf_len < Self::TagSize::USIZE { + return Err(err_msg!(Encryption, "Invalid size for encrypted data")); + } + let tag_start = buf_len - Self::TagSize::USIZE; + let mut tag = GenericArray::default(); + tag.clone_from_slice(&buffer.as_ref()[tag_start..]); + let enc = ::new(key); + enc.decrypt_in_place_detached(nonce, aad, &mut buffer.as_mut()[..tag_start], &tag) + .map_err(|_| err_msg!(Encryption, "AEAD decryption error"))?; + buffer.buffer_resize(tag_start)?; + Ok(()) + } + + fn aes_padding_length(_len: usize) -> usize { + 0 + } +} + +/// 128 bit AES-CBC with HMAC-256 +#[derive(Debug)] +pub struct A128CbcHs256; + +impl AesType for A128CbcHs256 { + type Aead = AesCbcHmac; + + const ALG_TYPE: AesTypes = AesTypes::A128CbcHs256; + const JWK_ALG: &'static str = "A128CBC-HS256"; +} + +/// 256 bit AES-CBC with HMAC-512 +#[derive(Debug)] +pub struct A256CbcHs512; + +impl AesType for A256CbcHs512 { + type Aead = AesCbcHmac; + + const ALG_TYPE: AesTypes = AesTypes::A256CbcHs512; + const JWK_ALG: &'static str = "A256CBC-HS512"; +} + +/// AES-CBC-HMAC implementation +#[derive(Debug)] +pub struct AesCbcHmac(PhantomData<(C, D)>); + +// Specific implementation, cannot implement normal AeadInPlace trait +impl AesAead for AesCbcHmac +where + C: BlockCipher + NewBlockCipher, + D: Update + BlockInput + FixedOutput + Reset + Default + Clone, + C::KeySize: core::ops::Shl, + >::Output: ArrayLength, +{ + type KeySize = typenum::Double; + type NonceSize = C::BlockSize; + type TagSize = C::KeySize; + + fn aes_encrypt_in_place( + key: &GenericArray, + buffer: &mut dyn ResizeBuffer, + nonce: &GenericArray, + aad: &[u8], + ) -> Result<(), Error> { + // this should be optimized unless it matters + if Self::TagSize::USIZE > D::OutputSize::USIZE { + return Err(err_msg!( + Encryption, + "AES-CBC-HMAC tag size exceeds maximum supported" + )); + } + + if aad.len() as u64 > u64::MAX / 8 { + return Err(err_msg!( + Encryption, + "AES-CBC-HMAC aad size exceeds maximum supported" + )); + } + + let msg_len = buffer.as_ref().len(); + let pad_len = Self::aes_padding_length(msg_len); + buffer.buffer_extend(pad_len + Self::TagSize::USIZE)?; + let enc_key = GenericArray::from_slice(&key[C::KeySize::USIZE..]); + Cbc::::new_fix(enc_key, nonce) + .encrypt(buffer.as_mut(), msg_len) + .map_err(|_| err_msg!(Encryption, "AES-CBC encryption error"))?; + let ctext_end = msg_len + pad_len; + + let mut hmac = Hmac::::new_from_slice(&key[..C::KeySize::USIZE]) + .expect("Incompatible HMAC key length"); + hmac.update(aad); + hmac.update(nonce.as_ref()); + hmac.update(&buffer.as_ref()[..ctext_end]); + hmac.update(&((aad.len() as u64) * 8).to_be_bytes()); + let mac = hmac.finalize().into_bytes(); + buffer.as_mut()[ctext_end..].copy_from_slice(&mac[..Self::TagSize::USIZE]); + + Ok(()) + } + + fn aes_decrypt_in_place( + key: &GenericArray, + buffer: &mut dyn ResizeBuffer, + nonce: &GenericArray, + aad: &[u8], + ) -> Result<(), Error> { + let buf_len = buffer.as_ref().len(); + if buf_len < Self::TagSize::USIZE { + return Err(err_msg!(Encryption, "Invalid size for encrypted data")); + } + let ctext_end = buf_len - Self::TagSize::USIZE; + let tag = GenericArray::::from_slice(&buffer.as_ref()[ctext_end..]); + + let mut hmac = Hmac::::new_from_slice(&key[..C::KeySize::USIZE]) + .expect("Incompatible HMAC key length"); + hmac.update(aad); + hmac.update(nonce.as_ref()); + hmac.update(&buffer.as_ref()[..ctext_end]); + hmac.update(&(aad.len() as u64).to_be_bytes()); + let mac = hmac.finalize().into_bytes(); + let tag_match = + subtle::ConstantTimeEq::ct_eq(tag.as_ref(), &mac[..Self::TagSize::USIZE]).unwrap_u8(); + + let enc_key = GenericArray::from_slice(&key[C::KeySize::USIZE..]); + let dec_len = Cbc::::new_fix(enc_key, nonce) + .decrypt(&mut buffer.as_mut()[..ctext_end]) + .map_err(|_| err_msg!(Encryption, "AES-CBC decryption error"))? + .len(); + buffer.buffer_resize(dec_len)?; + + if tag_match != 1 { + Err(err_msg!(Encryption, "AEAD decryption error")) + } else { + Ok(()) + } + } + + #[inline] + fn aes_padding_length(len: usize) -> usize { + Self::NonceSize::USIZE - (len % Self::NonceSize::USIZE) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::buffer::SecretBytes; + use crate::repr::ToSecretBytes; + use std::string::ToString; + + #[test] + fn encrypt_round_trip() { + fn test_encrypt() { + let input = b"hello"; + let key = AesKey::::generate().unwrap(); + let mut buffer = SecretBytes::from_slice(input); + let pad_len = T::Aead::aes_padding_length(input.len()); + let nonce = AesKey::::random_nonce(); + key.encrypt_in_place(&mut buffer, &nonce, &[]).unwrap(); + let enc_len = buffer.len(); + assert_eq!(enc_len, input.len() + pad_len + AesKey::::TAG_LENGTH); + assert_ne!(&buffer[..], input); + let mut dec = buffer.clone(); + key.decrypt_in_place(&mut dec, &nonce, &[]).unwrap(); + assert_eq!(&dec[..], input); + + // test tag validation + buffer.as_mut()[enc_len - 1] = buffer.as_mut()[enc_len - 1].wrapping_add(1); + assert!(key.decrypt_in_place(&mut buffer, &nonce, &[]).is_err()); + } + test_encrypt::(); + test_encrypt::(); + test_encrypt::(); + test_encrypt::(); + } + + #[test] + fn serialize_round_trip() { + fn test_serialize() { + let key = AesKey::::generate().unwrap(); + let sk = key.to_secret_bytes().unwrap(); + let bytes = serde_cbor::to_vec(&key).unwrap(); + let deser: &[u8] = serde_cbor::from_slice(bytes.as_ref()).unwrap(); + assert_eq!(deser, sk.as_ref()); + } + test_serialize::(); + test_serialize::(); + test_serialize::(); + test_serialize::(); + } + + #[test] + fn encrypt_expected_cbc_hmac_128() { + let key_data = &hex!("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"); + let input = b"A cipher system must not be required to be secret, and it must be able to fall into the hands of the enemy without inconvenience"; + let nonce = &hex!("1af38c2dc2b96ffdd86694092341bc04"); + let aad = b"The second principle of Auguste Kerckhoffs"; + let key = AesKey::::from_secret_bytes(key_data).unwrap(); + let mut buffer = SecretBytes::from_slice(input); + key.encrypt_in_place(&mut buffer, &nonce[..], &aad[..]) + .unwrap(); + + assert_eq!( + buffer.as_hex().to_string(), + "c80edfa32ddf39d5ef00c0b468834279a2e46a1b8049f792f76bfe54b903a9c9\ + a94ac9b47ad2655c5f10f9aef71427e2fc6f9b3f399a221489f16362c7032336\ + 09d45ac69864e3321cf82935ac4096c86e133314c54019e8ca7980dfa4b9cf1b\ + 384c486f3a54c51078158ee5d79de59fbd34d848b3d69550a67646344427ade5\ + 4b8851ffb598f7f80074b9473c82e2db\ + 652c3fa36b0a7c5b3219fab3a30bc1c4" + ) + } + + #[test] + fn encrypt_expected_cbc_hmac_256() { + let key_data = &hex!( + "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f + 202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f" + ); + let input = b"A cipher system must not be required to be secret, and it must be able to fall into the hands of the enemy without inconvenience"; + let nonce = &hex!("1af38c2dc2b96ffdd86694092341bc04"); + let aad = b"The second principle of Auguste Kerckhoffs"; + let key = AesKey::::from_secret_bytes(key_data).unwrap(); + let mut buffer = SecretBytes::from_slice(input); + key.encrypt_in_place(&mut buffer, &nonce[..], &aad[..]) + .unwrap(); + + assert_eq!( + buffer.as_hex().to_string(), + "4affaaadb78c31c5da4b1b590d10ffbd3dd8d5d302423526912da037ecbcc7bd\ + 822c301dd67c373bccb584ad3e9279c2e6d12a1374b77f077553df829410446b\ + 36ebd97066296ae6427ea75c2e0846a11a09ccf5370dc80bfecbad28c73f09b3\ + a3b75e662a2594410ae496b2e2e6609e31e6e02cc837f053d21f37ff4f51950b\ + be2638d09dd7a4930930806d0703b1f6\ + 4dd3b4c088a7f45c216839645b2012bf2e6269a8c56a816dbc1b267761955bc5" + ) + } + + #[test] + fn encrypt_expected_cbc_hmac_1pu() { + let key_data = &hex!( + "fffefdfcfbfaf9f8f7f6f5f4f3f2f1f0efeeedecebeae9e8e7e6e5e4e3e2e1e0 + dfdedddcdbdad9d8d7d6d5d4d3d2d1d0cfcecdcccbcac9c8c7c6c5c4c3c2c1c0" + ); + let nonce = &hex!("000102030405060708090a0b0c0d0e0f"); + let protected = "{\"alg\":\"ECDH-1PU+A128KW\",\"enc\":\"A256CBC-HS512\",\ + \"apu\":\"QWxpY2U\",\"apv\":\"Qm9iIGFuZCBDaGFybGll\",\"epk\":{\ + \"kty\":\"OKP\",\"crv\":\"X25519\",\ + \"x\":\"k9of_cpAajy0poW5gaixXGs9nHkwg1AFqUAFa39dyBc\"}}"; + let aad = base64::encode_config(protected, base64::URL_SAFE_NO_PAD); + let input = b"Three is a magic number."; + let key = AesKey::::from_secret_bytes(key_data).unwrap(); + let mut buffer = SecretBytes::from_slice(input); + key.encrypt_in_place(&mut buffer, &nonce[..], aad.as_bytes()) + .unwrap(); + let ct_len = buffer.len() - key.aead_params().tag_length; + let ctext = base64::encode_config(&buffer.as_ref()[..ct_len], base64::URL_SAFE_NO_PAD); + let tag = base64::encode_config(&buffer.as_ref()[ct_len..], base64::URL_SAFE_NO_PAD); + assert_eq!(ctext, "Az2IWsISEMDJvyc5XRL-3-d-RgNBOGolCsxFFoUXFYw"); + assert_eq!(tag, "HLb4fTlm8spGmij3RyOs2gJ4DpHM4hhVRwdF_hGb3WQ"); + } +} diff --git a/askar-crypto/src/alg/any.rs b/askar-crypto/src/alg/any.rs new file mode 100644 index 00000000..cb737743 --- /dev/null +++ b/askar-crypto/src/alg/any.rs @@ -0,0 +1,864 @@ +use alloc::{boxed::Box, sync::Arc}; +#[cfg(feature = "ed25519")] +use core::convert::TryFrom; +use core::{ + any::{Any, TypeId}, + fmt::Debug, +}; + +#[cfg(feature = "aes")] +use super::{ + aes::{A128CbcHs256, A128Gcm, A256CbcHs512, A256Gcm, AesKey}, + AesTypes, +}; + +#[cfg(feature = "bls")] +use super::{ + bls::{BlsKeyPair, BlsPublicKeyType, G1, G1G2, G2}, + BlsCurves, +}; + +#[cfg(feature = "chacha")] +use super::{ + chacha20::{Chacha20Key, C20P, XC20P}, + Chacha20Types, +}; + +#[cfg(feature = "ed25519")] +use super::ed25519::{self, Ed25519KeyPair}; +#[cfg(feature = "ed25519")] +use super::x25519::{self, X25519KeyPair}; + +#[cfg(feature = "k256")] +use super::k256::{self, K256KeyPair}; + +#[cfg(feature = "p256")] +use super::p256::{self, P256KeyPair}; + +use super::{HasKeyAlg, KeyAlg}; +use crate::{ + buffer::{ResizeBuffer, WriteBuffer}, + encrypt::{KeyAeadInPlace, KeyAeadParams}, + error::Error, + jwk::{FromJwk, JwkEncoder, JwkParts, ToJwk}, + kdf::{KeyDerivation, KeyExchange}, + repr::{KeyGen, KeyPublicBytes, KeySecretBytes, Seed, ToPublicBytes, ToSecretBytes}, + sign::{KeySigVerify, KeySign, SignatureType}, +}; + +#[cfg(any(feature = "k256", feature = "p256"))] +use super::EcCurves; + +#[cfg(any(feature = "aes", feature = "chacha"))] +use crate::kdf::{FromKeyDerivation, FromKeyExchange}; + +#[derive(Debug)] +pub struct KeyT(T); + +/// The type-erased representation for a concrete key instance +pub type AnyKey = KeyT; + +impl AnyKey { + pub fn algorithm(&self) -> KeyAlg { + self.0.algorithm() + } + + fn assume(&self) -> &K { + self.downcast_ref().expect("Error assuming key type") + } + + #[inline] + pub fn downcast_ref(&self) -> Option<&K> { + self.0.as_any().downcast_ref() + } + + #[inline] + fn key_type_id(&self) -> TypeId { + self.0.as_any().type_id() + } +} + +// key instances are immutable +#[cfg(feature = "std")] +impl std::panic::UnwindSafe for AnyKey {} +#[cfg(feature = "std")] +impl std::panic::RefUnwindSafe for AnyKey {} + +/// Create `AnyKey` instances from various sources +pub trait AnyKeyCreate: Sized { + /// Generate a new random key for the given key algorithm. + fn generate(alg: KeyAlg) -> Result; + + /// Generate a new deterministic key for the given key algorithm. + fn from_seed(alg: KeyAlg, seed: Seed<'_>) -> Result; + + /// Load a public key from its byte representation + fn from_public_bytes(alg: KeyAlg, public: &[u8]) -> Result; + + /// Load a secret key or keypair from its byte representation + fn from_secret_bytes(alg: KeyAlg, secret: &[u8]) -> Result; + + /// Convert from a concrete key instance + fn from_key(key: K) -> Self; + + /// Create a new key instance from a key exchange + fn from_key_exchange(alg: KeyAlg, secret: &Sk, public: &Pk) -> Result + where + Sk: KeyExchange + ?Sized, + Pk: ?Sized; + + /// Create a new key instance from a key derivation + fn from_key_derivation(alg: KeyAlg, derive: impl KeyDerivation) -> Result; + + /// Derive the corresponding key for the provided key algorithm + fn convert_key(&self, alg: KeyAlg) -> Result; +} + +impl AnyKeyCreate for Box { + fn generate(alg: KeyAlg) -> Result { + generate_any(alg) + } + + fn from_seed(alg: KeyAlg, seed: Seed<'_>) -> Result { + from_seed_any(alg, seed) + } + + fn from_public_bytes(alg: KeyAlg, public: &[u8]) -> Result { + from_public_bytes_any(alg, public) + } + + fn from_secret_bytes(alg: KeyAlg, secret: &[u8]) -> Result { + from_secret_bytes_any(alg, secret) + } + + #[inline(always)] + fn from_key(key: K) -> Self { + Box::new(KeyT(key)) + } + + fn from_key_exchange(alg: KeyAlg, secret: &Sk, public: &Pk) -> Result + where + Sk: KeyExchange + ?Sized, + Pk: ?Sized, + { + from_key_exchange_any(alg, secret, public) + } + + fn from_key_derivation(alg: KeyAlg, derive: impl KeyDerivation) -> Result { + from_key_derivation_any(alg, derive) + } + + fn convert_key(&self, alg: KeyAlg) -> Result { + convert_key_any(self, alg) + } +} + +impl AnyKeyCreate for Arc { + fn generate(alg: KeyAlg) -> Result { + generate_any(alg) + } + + fn from_seed(alg: KeyAlg, seed: Seed<'_>) -> Result { + from_seed_any(alg, seed) + } + + fn from_public_bytes(alg: KeyAlg, public: &[u8]) -> Result { + from_public_bytes_any(alg, public) + } + + fn from_secret_bytes(alg: KeyAlg, secret: &[u8]) -> Result { + from_secret_bytes_any(alg, secret) + } + + #[inline(always)] + fn from_key(key: K) -> Self { + Arc::new(KeyT(key)) + } + + fn from_key_exchange(alg: KeyAlg, secret: &Sk, public: &Pk) -> Result + where + Sk: KeyExchange + ?Sized, + Pk: ?Sized, + { + from_key_exchange_any(alg, secret, public) + } + + fn from_key_derivation(alg: KeyAlg, derive: impl KeyDerivation) -> Result { + from_key_derivation_any(alg, derive) + } + + fn convert_key(&self, alg: KeyAlg) -> Result { + convert_key_any(self, alg) + } +} + +#[inline] +fn generate_any(alg: KeyAlg) -> Result { + match alg { + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A128Gcm) => AesKey::::generate().map(R::alloc_key), + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A256Gcm) => AesKey::::generate().map(R::alloc_key), + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A128CbcHs256) => AesKey::::generate().map(R::alloc_key), + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A256CbcHs512) => AesKey::::generate().map(R::alloc_key), + #[cfg(feature = "bls")] + KeyAlg::Bls12_381(BlsCurves::G1) => BlsKeyPair::::generate().map(R::alloc_key), + #[cfg(feature = "bls")] + KeyAlg::Bls12_381(BlsCurves::G2) => BlsKeyPair::::generate().map(R::alloc_key), + #[cfg(feature = "bls")] + KeyAlg::Bls12_381(BlsCurves::G1G2) => BlsKeyPair::::generate().map(R::alloc_key), + #[cfg(feature = "chacha")] + KeyAlg::Chacha20(Chacha20Types::C20P) => Chacha20Key::::generate().map(R::alloc_key), + #[cfg(feature = "chacha")] + KeyAlg::Chacha20(Chacha20Types::XC20P) => { + Chacha20Key::::generate().map(R::alloc_key) + } + #[cfg(feature = "ed25519")] + KeyAlg::Ed25519 => Ed25519KeyPair::generate().map(R::alloc_key), + #[cfg(feature = "ed25519")] + KeyAlg::X25519 => X25519KeyPair::generate().map(R::alloc_key), + #[cfg(feature = "k256")] + KeyAlg::EcCurve(EcCurves::Secp256k1) => K256KeyPair::generate().map(R::alloc_key), + #[cfg(feature = "p256")] + KeyAlg::EcCurve(EcCurves::Secp256r1) => P256KeyPair::generate().map(R::alloc_key), + #[allow(unreachable_patterns)] + _ => { + return Err(err_msg!( + Unsupported, + "Unsupported algorithm for key generation" + )) + } + } +} + +#[inline] +fn from_seed_any(alg: KeyAlg, seed: Seed<'_>) -> Result { + match alg { + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A128Gcm) => AesKey::::from_seed(seed).map(R::alloc_key), + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A256Gcm) => AesKey::::from_seed(seed).map(R::alloc_key), + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A128CbcHs256) => { + AesKey::::from_seed(seed).map(R::alloc_key) + } + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A256CbcHs512) => { + AesKey::::from_seed(seed).map(R::alloc_key) + } + #[cfg(feature = "bls")] + KeyAlg::Bls12_381(BlsCurves::G1) => BlsKeyPair::::from_seed(seed).map(R::alloc_key), + #[cfg(feature = "bls")] + KeyAlg::Bls12_381(BlsCurves::G2) => BlsKeyPair::::from_seed(seed).map(R::alloc_key), + #[cfg(feature = "bls")] + KeyAlg::Bls12_381(BlsCurves::G1G2) => BlsKeyPair::::from_seed(seed).map(R::alloc_key), + #[cfg(feature = "chacha")] + KeyAlg::Chacha20(Chacha20Types::C20P) => { + Chacha20Key::::from_seed(seed).map(R::alloc_key) + } + #[cfg(feature = "chacha")] + KeyAlg::Chacha20(Chacha20Types::XC20P) => { + Chacha20Key::::from_seed(seed).map(R::alloc_key) + } + #[allow(unreachable_patterns)] + _ => { + return Err(err_msg!( + Unsupported, + "Unsupported algorithm for public key import" + )) + } + } +} + +#[inline] +fn from_public_bytes_any(alg: KeyAlg, public: &[u8]) -> Result { + match alg { + #[cfg(feature = "bls")] + KeyAlg::Bls12_381(BlsCurves::G1) => { + BlsKeyPair::::from_public_bytes(public).map(R::alloc_key) + } + #[cfg(feature = "bls")] + KeyAlg::Bls12_381(BlsCurves::G2) => { + BlsKeyPair::::from_public_bytes(public).map(R::alloc_key) + } + #[cfg(feature = "bls")] + KeyAlg::Bls12_381(BlsCurves::G1G2) => { + BlsKeyPair::::from_public_bytes(public).map(R::alloc_key) + } + #[cfg(feature = "ed25519")] + KeyAlg::Ed25519 => Ed25519KeyPair::from_public_bytes(public).map(R::alloc_key), + #[cfg(feature = "ed25519")] + KeyAlg::X25519 => X25519KeyPair::from_public_bytes(public).map(R::alloc_key), + #[cfg(feature = "k256")] + KeyAlg::EcCurve(EcCurves::Secp256k1) => { + K256KeyPair::from_public_bytes(public).map(R::alloc_key) + } + #[cfg(feature = "p256")] + KeyAlg::EcCurve(EcCurves::Secp256r1) => { + P256KeyPair::from_public_bytes(public).map(R::alloc_key) + } + #[allow(unreachable_patterns)] + _ => { + return Err(err_msg!( + Unsupported, + "Unsupported algorithm for public key import" + )) + } + } +} + +#[inline] +fn from_secret_bytes_any(alg: KeyAlg, secret: &[u8]) -> Result { + match alg { + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A128Gcm) => { + AesKey::::from_secret_bytes(secret).map(R::alloc_key) + } + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A256Gcm) => { + AesKey::::from_secret_bytes(secret).map(R::alloc_key) + } + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A128CbcHs256) => { + AesKey::::from_secret_bytes(secret).map(R::alloc_key) + } + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A256CbcHs512) => { + AesKey::::from_secret_bytes(secret).map(R::alloc_key) + } + #[cfg(feature = "bls")] + KeyAlg::Bls12_381(BlsCurves::G1) => { + BlsKeyPair::::from_secret_bytes(secret).map(R::alloc_key) + } + #[cfg(feature = "bls")] + KeyAlg::Bls12_381(BlsCurves::G2) => { + BlsKeyPair::::from_secret_bytes(secret).map(R::alloc_key) + } + #[cfg(feature = "bls")] + KeyAlg::Bls12_381(BlsCurves::G1G2) => { + BlsKeyPair::::from_secret_bytes(secret).map(R::alloc_key) + } + #[cfg(feature = "chacha")] + KeyAlg::Chacha20(Chacha20Types::C20P) => { + Chacha20Key::::from_secret_bytes(secret).map(R::alloc_key) + } + #[cfg(feature = "chacha")] + KeyAlg::Chacha20(Chacha20Types::XC20P) => { + Chacha20Key::::from_secret_bytes(secret).map(R::alloc_key) + } + #[cfg(feature = "ed25519")] + KeyAlg::Ed25519 => Ed25519KeyPair::from_secret_bytes(secret).map(R::alloc_key), + #[cfg(feature = "ed25519")] + KeyAlg::X25519 => X25519KeyPair::from_secret_bytes(secret).map(R::alloc_key), + #[cfg(feature = "k256")] + KeyAlg::EcCurve(EcCurves::Secp256k1) => { + K256KeyPair::from_secret_bytes(secret).map(R::alloc_key) + } + #[cfg(feature = "p256")] + KeyAlg::EcCurve(EcCurves::Secp256r1) => { + P256KeyPair::from_secret_bytes(secret).map(R::alloc_key) + } + #[allow(unreachable_patterns)] + _ => { + return Err(err_msg!( + Unsupported, + "Unsupported algorithm for secret key import" + )) + } + } +} + +#[cfg(any(feature = "aes", feature = "chacha"))] +#[inline] +fn from_key_exchange_any(alg: KeyAlg, secret: &Sk, public: &Pk) -> Result +where + R: AllocKey, + Sk: KeyExchange + ?Sized, + Pk: ?Sized, +{ + match alg { + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A128Gcm) => { + AesKey::::from_key_exchange(secret, public).map(R::alloc_key) + } + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A256Gcm) => { + AesKey::::from_key_exchange(secret, public).map(R::alloc_key) + } + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A128CbcHs256) => { + AesKey::::from_key_exchange(secret, public).map(R::alloc_key) + } + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A256CbcHs512) => { + AesKey::::from_key_exchange(secret, public).map(R::alloc_key) + } + #[cfg(feature = "chacha")] + KeyAlg::Chacha20(Chacha20Types::C20P) => { + Chacha20Key::::from_key_exchange(secret, public).map(R::alloc_key) + } + #[cfg(feature = "chacha")] + KeyAlg::Chacha20(Chacha20Types::XC20P) => { + Chacha20Key::::from_key_exchange(secret, public).map(R::alloc_key) + } + #[allow(unreachable_patterns)] + _ => { + return Err(err_msg!( + Unsupported, + "Unsupported algorithm for key exchange" + )); + } + } +} + +#[cfg(not(any(feature = "aes", feature = "chacha")))] +#[inline] +fn from_key_exchange_any( + _alg: KeyAlg, + _secret: &Sk, + _public: &Pk, +) -> Result { + return Err(err_msg!( + Unsupported, + "Unsupported algorithm for key exchange" + )); +} + +#[cfg(any(feature = "aes", feature = "chacha"))] +#[inline] +fn from_key_derivation_any( + alg: KeyAlg, + derive: impl KeyDerivation, +) -> Result { + match alg { + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A128Gcm) => { + AesKey::::from_key_derivation(derive).map(R::alloc_key) + } + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A256Gcm) => { + AesKey::::from_key_derivation(derive).map(R::alloc_key) + } + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A128CbcHs256) => { + AesKey::::from_key_derivation(derive).map(R::alloc_key) + } + #[cfg(feature = "aes")] + KeyAlg::Aes(AesTypes::A256CbcHs512) => { + AesKey::::from_key_derivation(derive).map(R::alloc_key) + } + #[cfg(feature = "chacha")] + KeyAlg::Chacha20(Chacha20Types::C20P) => { + Chacha20Key::::from_key_derivation(derive).map(R::alloc_key) + } + #[cfg(feature = "chacha")] + KeyAlg::Chacha20(Chacha20Types::XC20P) => { + Chacha20Key::::from_key_derivation(derive).map(R::alloc_key) + } + #[allow(unreachable_patterns)] + _ => { + return Err(err_msg!( + Unsupported, + "Unsupported algorithm for key derivation" + )); + } + } +} + +#[cfg(not(any(feature = "aes", feature = "chacha")))] +fn from_key_derivation_any( + _alg: KeyAlg, + _derive: impl KeyDerivation, +) -> Result { + return Err(err_msg!( + Unsupported, + "Unsupported algorithm for key derivation" + )); +} + +#[inline] +fn convert_key_any(key: &AnyKey, alg: KeyAlg) -> Result { + match (key.algorithm(), alg) { + #[cfg(feature = "bls")] + (KeyAlg::Bls12_381(BlsCurves::G1G2), KeyAlg::Bls12_381(BlsCurves::G1)) => Ok(R::alloc_key( + BlsKeyPair::::from(key.assume::>()), + )), + #[cfg(feature = "bls")] + (KeyAlg::Bls12_381(BlsCurves::G1G2), KeyAlg::Bls12_381(BlsCurves::G2)) => Ok(R::alloc_key( + BlsKeyPair::::from(key.assume::>()), + )), + #[cfg(feature = "ed25519")] + (KeyAlg::Ed25519, KeyAlg::X25519) => Ok(>::try_from( + key.assume::(), + ) + .map(R::alloc_key)?), + #[allow(unreachable_patterns)] + _ => { + return Err(err_msg!( + Unsupported, + "Unsupported key conversion operation" + )) + } + } +} + +impl FromJwk for Box { + fn from_jwk_parts(jwk: JwkParts<'_>) -> Result { + from_jwk_any(jwk) + } +} + +impl FromJwk for Arc { + fn from_jwk_parts(jwk: JwkParts<'_>) -> Result { + from_jwk_any(jwk) + } +} + +#[inline] +fn from_jwk_any(jwk: JwkParts<'_>) -> Result { + match (jwk.kty, jwk.crv.as_ref()) { + #[cfg(feature = "ed25519")] + ("OKP", c) if c == ed25519::JWK_CURVE => { + Ed25519KeyPair::from_jwk_parts(jwk).map(R::alloc_key) + } + #[cfg(feature = "ed25519")] + ("OKP", c) if c == x25519::JWK_CURVE => { + X25519KeyPair::from_jwk_parts(jwk).map(R::alloc_key) + } + #[cfg(feature = "bls")] + ("EC", c) if c == G1::JWK_CURVE => BlsKeyPair::::from_jwk_parts(jwk).map(R::alloc_key), + #[cfg(feature = "bls")] + ("EC", c) if c == G2::JWK_CURVE => BlsKeyPair::::from_jwk_parts(jwk).map(R::alloc_key), + #[cfg(feature = "bls")] + ("EC", c) if c == G1G2::JWK_CURVE => { + BlsKeyPair::::from_jwk_parts(jwk).map(R::alloc_key) + } + #[cfg(feature = "k256")] + ("EC", c) if c == k256::JWK_CURVE => K256KeyPair::from_jwk_parts(jwk).map(R::alloc_key), + #[cfg(feature = "p256")] + ("EC", c) if c == p256::JWK_CURVE => P256KeyPair::from_jwk_parts(jwk).map(R::alloc_key), + // FIXME implement symmetric keys? + _ => Err(err_msg!(Unsupported, "Unsupported JWK for key import")), + } +} + +macro_rules! match_key_alg { + ($slf:expr, $ty:ty, $($kty:ident),+ $(,$errmsg:literal)?) => {{ + fn matcher(key: &AnyKey) -> Result<$ty, Error> { + #[allow(unused_variables)] + let alg = key.algorithm(); + match_key_alg!(@ $($kty)+ ; key, alg); + return Err(err_msg!(Unsupported $(,$errmsg)?)) + } + matcher($slf) + }}; + (@ ; $key:ident, $alg:ident) => {()}; + (@ Aes $($rest:ident)*; $key:ident, $alg:ident) => {{ + #[cfg(feature = "aes")] + if $alg == KeyAlg::Aes(AesTypes::A128Gcm) { + return Ok($key.assume::>()); + } + #[cfg(feature = "aes")] + if $alg == KeyAlg::Aes(AesTypes::A256Gcm) { + return Ok($key.assume::>()); + } + #[cfg(feature = "aes")] + if $alg == KeyAlg::Aes(AesTypes::A128CbcHs256) { + return Ok($key.assume::>()); + } + #[cfg(feature = "aes")] + if $alg == KeyAlg::Aes(AesTypes::A256CbcHs512) { + return Ok($key.assume::>()); + } + match_key_alg!(@ $($rest)*; $key, $alg) + }}; + (@ Bls $($rest:ident)*; $key:ident, $alg:ident) => {{ + #[cfg(feature = "bls")] + if $alg == KeyAlg::Bls12_381(BlsCurves::G1) { + return Ok($key.assume::>()); + } + #[cfg(feature = "bls")] + if $alg == KeyAlg::Bls12_381(BlsCurves::G2) { + return Ok($key.assume::>()); + } + #[cfg(feature = "bls")] + if $alg == KeyAlg::Bls12_381(BlsCurves::G1G2) { + return Ok($key.assume::>()); + } + match_key_alg!(@ $($rest)*; $key, $alg) + }}; + (@ Chacha $($rest:ident)*; $key:ident, $alg:ident) => {{ + #[cfg(feature = "chacha")] + if $alg == KeyAlg::Chacha20(Chacha20Types::C20P) { + return Ok($key.assume::>()); + } + #[cfg(feature = "chacha")] + if $alg == KeyAlg::Chacha20(Chacha20Types::XC20P) { + return Ok($key.assume::>()); + } + match_key_alg!(@ $($rest)*; $key, $alg) + }}; + (@ Ed25519 $($rest:ident)*; $key:ident, $alg:ident) => {{ + #[cfg(feature = "ed25519")] + if $alg == KeyAlg::Ed25519 { + return Ok($key.assume::()) + } + match_key_alg!(@ $($rest)*; $key, $alg) + }}; + (@ X25519 $($rest:ident)*; $key:ident, $alg:ident) => {{ + #[cfg(feature = "ed25519")] + if $alg == KeyAlg::X25519 { + return Ok($key.assume::()) + } + match_key_alg!(@ $($rest)*; $key, $alg) + }}; + (@ K256 $($rest:ident)*; $key:ident, $alg:ident) => {{ + #[cfg(feature = "k256")] + if $alg == KeyAlg::EcCurve(EcCurves::Secp256k1) { + return Ok($key.assume::()) + } + match_key_alg!(@ $($rest)*; $key, $alg) + }}; + (@ P256 $($rest:ident)*; $key:ident, $alg:ident) => {{ + #[cfg(feature = "p256")] + if $alg == KeyAlg::EcCurve(EcCurves::Secp256r1) { + return Ok($key.assume::()) + } + match_key_alg!(@ $($rest)*; $key, $alg) + }}; +} + +impl ToPublicBytes for AnyKey { + fn write_public_bytes(&self, out: &mut dyn WriteBuffer) -> Result<(), Error> { + let key = match_key_alg! { + self, + &dyn ToPublicBytes, + Bls, + Ed25519, + K256, + P256, + X25519, + "Public key export is not supported for this key type" + }?; + key.write_public_bytes(out) + } +} + +impl ToSecretBytes for AnyKey { + fn write_secret_bytes(&self, out: &mut dyn WriteBuffer) -> Result<(), Error> { + let key = match_key_alg! { + self, + &dyn ToSecretBytes, + Aes, + Bls, + Chacha, + Ed25519, + K256, + P256, + X25519, + "Secret key export is not supported for this key type" + }?; + key.write_secret_bytes(out) + } +} + +impl KeyExchange for AnyKey { + fn write_key_exchange(&self, other: &AnyKey, out: &mut dyn WriteBuffer) -> Result<(), Error> { + if self.key_type_id() != other.key_type_id() { + return Err(err_msg!(Unsupported, "Unsupported key exchange")); + } + match self.algorithm() { + #[cfg(feature = "ed25519")] + KeyAlg::X25519 => Ok(self + .assume::() + .write_key_exchange(other.assume::(), out)?), + #[cfg(feature = "k256")] + KeyAlg::EcCurve(EcCurves::Secp256k1) => Ok(self + .assume::() + .write_key_exchange(other.assume::(), out)?), + #[cfg(feature = "p256")] + KeyAlg::EcCurve(EcCurves::Secp256r1) => Ok(self + .assume::() + .write_key_exchange(other.assume::(), out)?), + #[allow(unreachable_patterns)] + _ => { + let _ = out; + return Err(err_msg!(Unsupported, "Unsupported key exchange")); + } + } + } +} + +impl AnyKey { + fn key_as_aead(&self) -> Result<&dyn KeyAeadInPlace, Error> { + match_key_alg! { + self, + &dyn KeyAeadInPlace, + Aes, + Chacha, + "AEAD is not supported for this key type" + } + } +} + +impl KeyAeadInPlace for AnyKey { + fn encrypt_in_place( + &self, + buffer: &mut dyn ResizeBuffer, + nonce: &[u8], + aad: &[u8], + ) -> Result<(), Error> { + self.key_as_aead()?.encrypt_in_place(buffer, nonce, aad) + } + + fn decrypt_in_place( + &self, + buffer: &mut dyn ResizeBuffer, + nonce: &[u8], + aad: &[u8], + ) -> Result<(), Error> { + self.key_as_aead()?.decrypt_in_place(buffer, nonce, aad) + } + + fn aead_params(&self) -> KeyAeadParams { + if let Ok(key) = self.key_as_aead() { + key.aead_params() + } else { + KeyAeadParams::default() + } + } +} + +impl ToJwk for AnyKey { + fn encode_jwk(&self, enc: &mut JwkEncoder<'_>) -> Result<(), Error> { + let key = match_key_alg! { + self, + &dyn ToJwk, + Aes, + Bls, + Chacha, + Ed25519, + K256, + P256, + X25519, + "JWK export is not supported for this key type" + }?; + key.encode_jwk(enc) + } +} + +impl KeySign for AnyKey { + fn write_signature( + &self, + message: &[u8], + sig_type: Option, + out: &mut dyn WriteBuffer, + ) -> Result<(), Error> { + let key = match_key_alg! { + self, + &dyn KeySign, + Ed25519, + K256, + P256, + "Signing is not supported for this key type" + }?; + key.write_signature(message, sig_type, out) + } +} + +impl KeySigVerify for AnyKey { + fn verify_signature( + &self, + message: &[u8], + signature: &[u8], + sig_type: Option, + ) -> Result { + let key = match_key_alg! { + self, + &dyn KeySigVerify, + Ed25519, + K256, + P256, + "Signature verification is not supported for this key type" + }?; + key.verify_signature(message, signature, sig_type) + } +} + +// may want to implement in-place initialization to avoid copies +trait AllocKey { + fn alloc_key(key: K) -> Self; +} + +impl AllocKey for Arc { + #[inline(always)] + fn alloc_key(key: K) -> Self { + Self::from_key(key) + } +} + +impl AllocKey for Box { + #[inline(always)] + fn alloc_key(key: K) -> Self { + Self::from_key(key) + } +} + +pub trait AnyKeyAlg: HasKeyAlg + 'static { + fn as_any(&self) -> &dyn Any; +} + +// implement for all concrete key types +impl AnyKeyAlg for K { + fn as_any(&self) -> &dyn Any { + self + } +} + +#[cfg(test)] +mod tests { + #[allow(unused_imports)] + use super::*; + + // FIXME - add a custom key type for testing, to allow feature independence + + #[cfg(feature = "ed25519")] + #[test] + fn ed25519_as_any() { + let key = Box::::generate(KeyAlg::Ed25519).unwrap(); + assert_eq!(key.algorithm(), KeyAlg::Ed25519); + assert_eq!(key.key_type_id(), TypeId::of::()); + let _ = key.to_jwk_public(None).unwrap(); + } + + #[cfg(feature = "aes")] + #[test] + fn key_exchange_any() { + let alice = Box::::generate(KeyAlg::X25519).unwrap(); + let bob = Box::::generate(KeyAlg::X25519).unwrap(); + let exch_a = alice.key_exchange_bytes(&bob).unwrap(); + let exch_b = bob.key_exchange_bytes(&alice).unwrap(); + assert_eq!(exch_a, exch_b); + + let _aes_key = + Box::::from_key_exchange(KeyAlg::Aes(AesTypes::A256Gcm), &*alice, &*bob) + .unwrap(); + } + + #[cfg(feature = "chacha")] + #[test] + fn key_encrypt_any() { + use crate::buffer::SecretBytes; + let message = b"test message"; + let mut data = SecretBytes::from(&message[..]); + + let key = Box::::generate(KeyAlg::Chacha20(Chacha20Types::XC20P)).unwrap(); + let nonce = [0u8; 24]; // size varies by algorithm + key.encrypt_in_place(&mut data, &nonce, &[]).unwrap(); + assert_ne!(data, &message[..]); + key.decrypt_in_place(&mut data, &nonce, &[]).unwrap(); + assert_eq!(data, &message[..]); + } +} diff --git a/askar-crypto/src/alg/bls.rs b/askar-crypto/src/alg/bls.rs new file mode 100644 index 00000000..147c4284 --- /dev/null +++ b/askar-crypto/src/alg/bls.rs @@ -0,0 +1,517 @@ +//! BLS12-381 key support + +use core::{ + convert::TryInto, + fmt::{self, Debug, Formatter}, + ops::Add, +}; + +use blake2::Digest; +use bls12_381::{G1Affine, G1Projective, G2Affine, G2Projective, Scalar}; +use group::GroupEncoding; +use sha2::Sha256; +use zeroize::Zeroizing; + +use crate::generic_array::{ + typenum::{self, Unsigned, U144, U32, U48, U96}, + ArrayLength, +}; + +use super::{BlsCurves, HasKeyAlg, KeyAlg}; +use crate::{ + buffer::ArrayKey, + error::Error, + jwk::{FromJwk, JwkEncoder, JwkParts, ToJwk}, + random::fill_random, + repr::{KeyGen, KeyMeta, KeyPublicBytes, KeySecretBytes, KeypairMeta, Seed, SeedMethod}, +}; + +/// The 'kty' value of a BLS key JWK +pub const JWK_KEY_TYPE: &'static str = "EC"; + +/// A BLS12-381 key pair +#[derive(Clone)] +pub struct BlsKeyPair { + secret: Option, + public: Pk::Buffer, +} + +impl BlsKeyPair { + #[inline] + fn from_secret_key(sk: BlsSecretKey) -> Self { + let public = Pk::from_secret_scalar(&sk.0); + Self { + secret: Some(sk), + public, + } + } +} + +impl Debug for BlsKeyPair { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.debug_struct("BlsKeyPair") + .field("crv", &Pk::JWK_CURVE) + .field("secret", &self.secret) + .field("public", &self.public) + .finish() + } +} + +impl PartialEq for BlsKeyPair { + fn eq(&self, other: &Self) -> bool { + other.secret == self.secret && other.public == self.public + } +} + +impl Eq for BlsKeyPair {} + +impl HasKeyAlg for BlsKeyPair { + fn algorithm(&self) -> KeyAlg { + KeyAlg::Bls12_381(Pk::ALG_TYPE) + } +} + +impl KeyMeta for BlsKeyPair { + type KeySize = U32; +} + +impl KeypairMeta for BlsKeyPair +where + Pk: BlsPublicKeyType, + U32: Add, + >::Output: ArrayLength, +{ + type PublicKeySize = Pk::BufferSize; + type KeypairSize = typenum::Sum; +} + +impl KeyGen for BlsKeyPair { + fn generate() -> Result { + let secret = BlsSecretKey::generate()?; + Ok(Self::from_secret_key(secret)) + } + + fn from_seed(seed: Seed<'_>) -> Result + where + Self: Sized, + { + match seed { + Seed::Bytes(ikm, SeedMethod::Preferred) + | Seed::Bytes(ikm, SeedMethod::BlsKeyGenDraft4) => { + Ok(Self::from_secret_key(BlsSecretKey::from_seed(ikm)?)) + } + #[allow(unreachable_patterns)] + _ => Err(err_msg!(Unsupported, "Unsupported seed method for BLS key")), + } + } +} + +impl KeySecretBytes for BlsKeyPair { + fn from_secret_bytes(key: &[u8]) -> Result + where + Self: Sized, + { + let sk = BlsSecretKey::from_bytes(key)?; + Ok(Self::from_secret_key(sk)) + } + + fn with_secret_bytes(&self, f: impl FnOnce(Option<&[u8]>) -> O) -> O { + if let Some(sk) = self.secret.as_ref() { + let mut skb = Zeroizing::new(sk.0.to_bytes()); + skb.reverse(); // into big-endian + f(Some(&*skb)) + } else { + f(None) + } + } +} + +impl KeyPublicBytes for BlsKeyPair { + fn from_public_bytes(key: &[u8]) -> Result { + Ok(Self { + secret: None, + public: Pk::from_public_bytes(key)?, + }) + } + + fn with_public_bytes(&self, f: impl FnOnce(&[u8]) -> O) -> O { + Pk::with_bytes(&self.public, None, f) + } +} + +impl ToJwk for BlsKeyPair { + fn encode_jwk(&self, enc: &mut JwkEncoder<'_>) -> Result<(), Error> { + enc.add_str("crv", Pk::get_jwk_curve(enc.alg()))?; + enc.add_str("kty", JWK_KEY_TYPE)?; + Pk::with_bytes(&self.public, enc.alg(), |buf| enc.add_as_base64("x", buf))?; + if enc.is_secret() { + self.with_secret_bytes(|buf| { + if let Some(sk) = buf { + enc.add_as_base64("d", sk) + } else { + Ok(()) + } + })?; + } + Ok(()) + } +} + +impl FromJwk for BlsKeyPair { + fn from_jwk_parts(jwk: JwkParts<'_>) -> Result { + let pk = ArrayKey::::temp(|arr| { + if jwk.x.decode_base64(arr)? != arr.len() { + Err(err_msg!(InvalidKeyData)) + } else { + Pk::from_public_bytes(arr) + } + })?; + let sk = if jwk.d.is_some() { + Some(ArrayKey::::temp(|arr| { + if jwk.d.decode_base64(arr)? != arr.len() { + Err(err_msg!(InvalidKeyData)) + } else { + BlsSecretKey::from_bytes(arr) + } + })?) + } else { + None + }; + Ok(Self { + secret: sk, + public: pk, + }) + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +#[repr(transparent)] +struct BlsSecretKey(Scalar); + +impl BlsSecretKey { + pub fn generate() -> Result { + let mut secret = Zeroizing::new([0u8; 64]); + fill_random(&mut secret[..]); + Ok(Self(Scalar::from_bytes_wide(&secret))) + } + + // bls-signatures draft 4 version (incompatible with earlier) + pub fn from_seed(ikm: &[u8]) -> Result { + const SALT: &[u8] = b"BLS-SIG-KEYGEN-SALT-"; + if ikm.len() < 32 { + return Err(err_msg!(Usage, "Insufficient length for seed")); + } + + let mut salt = Sha256::digest(SALT); + Ok(Self(loop { + let mut okm = Zeroizing::new([0u8; 64]); + let mut extract = hkdf::HkdfExtract::::new(Some(salt.as_ref())); + extract.input_ikm(ikm); + extract.input_ikm(&[0u8]); + let (_, hkdf) = extract.finalize(); + hkdf.expand(&(48 as u16).to_be_bytes(), &mut okm[16..]) + .expect("HDKF extract failure"); + okm.reverse(); // into little endian + let scalar = Scalar::from_bytes_wide(&okm); + if scalar != Scalar::zero() { + break scalar; + } + salt = Sha256::digest(salt.as_ref()); + })) + } + + pub fn from_bytes(sk: &[u8]) -> Result { + if sk.len() != 32 { + return Err(err_msg!(InvalidKeyData)); + } + let mut skb = Zeroizing::new([0u8; 32]); + skb.copy_from_slice(sk); + skb.reverse(); // into little endian + let result: Option = Scalar::from_bytes(&skb).into(); + Ok(Self(result.ok_or_else(|| err_msg!(InvalidKeyData))?)) + } +} + +/// Trait implemented by supported BLS public key types +pub trait BlsPublicKeyType: 'static { + /// The concrete key representation + type Buffer: Clone + Debug + PartialEq + Sized; + + /// The size of the serialized public key + type BufferSize: ArrayLength; + + /// The associated algorithm type + const ALG_TYPE: BlsCurves; + /// The associated JWK curve name + const JWK_CURVE: &'static str; + + /// Get the JWK curve for a specific key algorithm + fn get_jwk_curve(_alg: Option) -> &'static str { + Self::JWK_CURVE + } + + /// Initialize from the secret scalar + fn from_secret_scalar(secret: &Scalar) -> Self::Buffer; + + /// Initialize from the compressed bytes + fn from_public_bytes(key: &[u8]) -> Result; + + /// Access the bytes of the public key + fn with_bytes(buf: &Self::Buffer, alg: Option, f: impl FnOnce(&[u8]) -> O) -> O; +} + +/// G1 curve +#[derive(Debug)] +pub struct G1; + +impl BlsPublicKeyType for G1 { + type Buffer = G1Affine; + type BufferSize = U48; + + const ALG_TYPE: BlsCurves = BlsCurves::G1; + const JWK_CURVE: &'static str = "BLS12381_G1"; + + #[inline] + fn from_secret_scalar(secret: &Scalar) -> Self::Buffer { + G1Affine::from(G1Projective::generator() * secret) + } + + fn from_public_bytes(key: &[u8]) -> Result { + let buf: Option = G1Affine::from_compressed( + TryInto::<&[u8; 48]>::try_into(key).map_err(|_| err_msg!(InvalidKeyData))?, + ) + .into(); + buf.ok_or_else(|| err_msg!(InvalidKeyData)) + } + + fn with_bytes(buf: &Self::Buffer, _alg: Option, f: impl FnOnce(&[u8]) -> O) -> O { + f(buf.to_bytes().as_ref()) + } +} + +/// G2 curve +#[derive(Debug)] +pub struct G2; + +impl BlsPublicKeyType for G2 { + type Buffer = G2Affine; + type BufferSize = U96; + + const ALG_TYPE: BlsCurves = BlsCurves::G2; + const JWK_CURVE: &'static str = "BLS12381_G2"; + + #[inline] + fn from_secret_scalar(secret: &Scalar) -> Self::Buffer { + G2Affine::from(G2Projective::generator() * secret) + } + + fn from_public_bytes(key: &[u8]) -> Result { + let buf: Option = G2Affine::from_compressed( + TryInto::<&[u8; 96]>::try_into(key).map_err(|_| err_msg!(InvalidKeyData))?, + ) + .into(); + buf.ok_or_else(|| err_msg!(InvalidKeyData)) + } + + fn with_bytes(buf: &Self::Buffer, _alg: Option, f: impl FnOnce(&[u8]) -> O) -> O { + f(buf.to_bytes().as_ref()) + } +} + +/// G1 + G2 curves +#[derive(Debug)] +pub struct G1G2; + +impl BlsPublicKeyType for G1G2 { + type Buffer = (G1Affine, G2Affine); + type BufferSize = U144; + + const ALG_TYPE: BlsCurves = BlsCurves::G1G2; + const JWK_CURVE: &'static str = "BLS12381_G1G2"; + + fn get_jwk_curve(alg: Option) -> &'static str { + if alg == Some(KeyAlg::Bls12_381(BlsCurves::G1)) { + G1::JWK_CURVE + } else if alg == Some(KeyAlg::Bls12_381(BlsCurves::G2)) { + G2::JWK_CURVE + } else { + Self::JWK_CURVE + } + } + + #[inline] + fn from_secret_scalar(secret: &Scalar) -> Self::Buffer { + ( + G1Affine::from(G1Projective::generator() * secret), + G2Affine::from(G2Projective::generator() * secret), + ) + } + + fn from_public_bytes(key: &[u8]) -> Result { + if key.len() != Self::BufferSize::USIZE { + return Err(err_msg!(InvalidKeyData)); + } + let g1: Option = + G1Affine::from_compressed(TryInto::<&[u8; 48]>::try_into(&key[..48]).unwrap()).into(); + let g2: Option = + G2Affine::from_compressed(TryInto::<&[u8; 96]>::try_into(&key[48..]).unwrap()).into(); + if let (Some(g1), Some(g2)) = (g1, g2) { + Ok((g1, g2)) + } else { + Err(err_msg!(InvalidKeyData)) + } + } + + fn with_bytes(buf: &Self::Buffer, alg: Option, f: impl FnOnce(&[u8]) -> O) -> O { + if alg == Some(KeyAlg::Bls12_381(BlsCurves::G1)) { + ArrayKey::::temp(|arr| { + arr.copy_from_slice(buf.0.to_bytes().as_ref()); + f(&arr[..]) + }) + } else if alg == Some(KeyAlg::Bls12_381(BlsCurves::G2)) { + ArrayKey::::temp(|arr| { + arr.copy_from_slice(buf.1.to_bytes().as_ref()); + f(&arr[..]) + }) + } else { + ArrayKey::::temp(|arr| { + arr[0..48].copy_from_slice(buf.0.to_bytes().as_ref()); + arr[48..].copy_from_slice(buf.1.to_bytes().as_ref()); + f(&arr[..]) + }) + } + } +} + +impl From<&BlsKeyPair> for BlsKeyPair { + fn from(kp: &BlsKeyPair) -> Self { + BlsKeyPair { + secret: kp.secret.clone(), + public: kp.public.0.clone(), + } + } +} + +impl From<&BlsKeyPair> for BlsKeyPair { + fn from(kp: &BlsKeyPair) -> Self { + BlsKeyPair { + secret: kp.secret.clone(), + public: kp.public.1.clone(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::repr::{ToPublicBytes, ToSecretBytes}; + use std::string::ToString; + + // test against EIP-2333 (as updated for signatures draft 4) + #[test] + fn key_gen_expected() { + let seed = &hex!( + "c55257c360c07c72029aebc1b53c05ed0362ada38ead3e3e9efa3708e5349553 + 1f09a6987599d18264c1e1c92f2cf141630c7a3c4ab7c81b2f001698e7463b04" + ); + let sk = BlsSecretKey::from_seed(&seed[..]).unwrap(); + let kp = BlsKeyPair::::from_secret_key(sk); + let sk = kp.to_secret_bytes().unwrap(); + assert_eq!( + sk.as_hex().to_string(), + "0d7359d57963ab8fbbde1852dcf553fedbc31f464d80ee7d40ae683122b45070" + ); + } + + #[test] + fn g1_key_expected() { + let sk = hex!("0d7359d57963ab8fbbde1852dcf553fedbc31f464d80ee7d40ae683122b45070"); + let kp = BlsKeyPair::::from_secret_bytes(&sk[..]).unwrap(); + let pk = kp.to_public_bytes().unwrap(); + assert_eq!( + pk.as_hex().to_string(), + "a2c975348667926acf12f3eecb005044e08a7a9b7d95f30bd281b55445107367a2e5d0558be7943c8bd13f9a1a7036fb" + ); + assert_eq!( + BlsKeyPair::::from_public_bytes(pk.as_ref()) + .unwrap() + .to_public_bytes() + .unwrap(), + pk + ); + } + + #[test] + fn g2_key_expected() { + let sk = hex!("0d7359d57963ab8fbbde1852dcf553fedbc31f464d80ee7d40ae683122b45070"); + let kp = BlsKeyPair::::from_secret_bytes(&sk[..]).unwrap(); + let pk = kp.to_public_bytes().unwrap(); + assert_eq!( + pk.as_hex().to_string(), + "a5e43d5ecb7b8c01ceb3b91f7413b628ef02c6859dc42a4354b21f9195531988a648655037faafd1bac2fd2d7d9466180baa3705a45a6c597853db51eaf431616057fd8049c6bee8764292f9a104200a45a63ceae9d3c368643ab9e5ff0f8810" + ); + assert_eq!( + BlsKeyPair::::from_public_bytes(pk.as_ref()) + .unwrap() + .to_public_bytes() + .unwrap(), + pk + ); + } + + #[test] + fn g1g2_key_expected() { + let sk = hex!("0d7359d57963ab8fbbde1852dcf553fedbc31f464d80ee7d40ae683122b45070"); + let kp = BlsKeyPair::::from_secret_bytes(&sk[..]).unwrap(); + let pk = kp.to_public_bytes().unwrap(); + assert_eq!( + pk.as_hex().to_string(), + "a2c975348667926acf12f3eecb005044e08a7a9b7d95f30bd281b55445107367a2e5d0558be7943c8bd13f9a1a7036fb\ + a5e43d5ecb7b8c01ceb3b91f7413b628ef02c6859dc42a4354b21f9195531988a648655037faafd1bac2fd2d7d9466180baa3705a45a6c597853db51eaf431616057fd8049c6bee8764292f9a104200a45a63ceae9d3c368643ab9e5ff0f8810" + ); + assert_eq!( + BlsKeyPair::::from_public_bytes(pk.as_ref()) + .unwrap() + .to_public_bytes() + .unwrap(), + pk + ); + } + + #[test] + fn jwk_expected() { + let test_pvt = &hex!("0d7359d57963ab8fbbde1852dcf553fedbc31f464d80ee7d40ae683122b45070"); + let test_pub_g1 = &hex!("a2c975348667926acf12f3eecb005044e08a7a9b7d95f30bd281b55445107367a2e5d0558be7943c8bd13f9a1a7036fb"); + let kp = BlsKeyPair::::from_secret_bytes(&test_pvt[..]).expect("Error creating key"); + + let jwk = kp.to_jwk_public(None).expect("Error converting key to JWK"); + let jwk = JwkParts::from_str(&jwk).expect("Error parsing JWK"); + assert_eq!(jwk.kty, "EC"); + assert_eq!(jwk.crv, G1::JWK_CURVE); + assert_eq!( + jwk.x, + base64::encode_config(test_pub_g1, base64::URL_SAFE_NO_PAD).as_str() + ); + assert_eq!(jwk.d, None); + let pk_load = BlsKeyPair::::from_jwk_parts(jwk).unwrap(); + assert_eq!(kp.to_public_bytes(), pk_load.to_public_bytes()); + + let jwk = kp.to_jwk_secret().expect("Error converting key to JWK"); + let jwk = JwkParts::from_slice(&jwk).expect("Error parsing JWK"); + assert_eq!(jwk.kty, "EC"); + assert_eq!(jwk.crv, G1::JWK_CURVE); + assert_eq!( + jwk.x, + base64::encode_config(test_pub_g1, base64::URL_SAFE_NO_PAD).as_str() + ); + assert_eq!( + jwk.d, + base64::encode_config(test_pvt, base64::URL_SAFE_NO_PAD).as_str() + ); + let _sk_load = BlsKeyPair::::from_jwk_parts(jwk).unwrap(); + // assert_eq!( + // kp.to_keypair_bytes().unwrap(), + // sk_load.to_keypair_bytes().unwrap() + // ); + } +} diff --git a/askar-crypto/src/alg/chacha20.rs b/askar-crypto/src/alg/chacha20.rs new file mode 100644 index 00000000..0ae3260f --- /dev/null +++ b/askar-crypto/src/alg/chacha20.rs @@ -0,0 +1,290 @@ +//! ChaCha20 and XChaCha20 stream ciphers with AEAD + +use core::fmt::{self, Debug, Formatter}; + +use aead::{Aead, AeadInPlace, NewAead}; +use chacha20poly1305::{ChaCha20Poly1305, XChaCha20Poly1305}; +use serde::{Deserialize, Serialize}; +use zeroize::Zeroize; + +use super::{Chacha20Types, HasKeyAlg, KeyAlg}; +use crate::{ + buffer::{ArrayKey, ResizeBuffer, Writer}, + encrypt::{KeyAeadInPlace, KeyAeadMeta, KeyAeadParams}, + error::Error, + generic_array::{typenum::Unsigned, GenericArray}, + jwk::{JwkEncoder, ToJwk}, + kdf::{FromKeyDerivation, FromKeyExchange, KeyDerivation, KeyExchange}, + random::fill_random_deterministic, + repr::{KeyGen, KeyMeta, KeySecretBytes, Seed, SeedMethod}, +}; + +/// The 'kty' value of a symmetric key JWK +pub static JWK_KEY_TYPE: &'static str = "oct"; + +/// Trait implemented by supported ChaCha20 algorithms +pub trait Chacha20Type: 'static { + /// The AEAD implementation + type Aead: NewAead + Aead + AeadInPlace; + + /// The associated algorithm type + const ALG_TYPE: Chacha20Types; + /// The associated JWK algorithm name + const JWK_ALG: &'static str; +} + +/// ChaCha20-Poly1305 +#[derive(Debug)] +pub struct C20P; + +impl Chacha20Type for C20P { + type Aead = ChaCha20Poly1305; + + const ALG_TYPE: Chacha20Types = Chacha20Types::C20P; + const JWK_ALG: &'static str = "C20P"; +} + +/// XChaCha20-Poly1305 +#[derive(Debug)] +pub struct XC20P; + +impl Chacha20Type for XC20P { + type Aead = XChaCha20Poly1305; + + const ALG_TYPE: Chacha20Types = Chacha20Types::XC20P; + const JWK_ALG: &'static str = "XC20P"; +} + +type KeyType = ArrayKey<<::Aead as NewAead>::KeySize>; + +type NonceSize = <::Aead as Aead>::NonceSize; + +type TagSize = <::Aead as Aead>::TagSize; + +/// A ChaCha20 symmetric encryption key +#[derive(Serialize, Deserialize, Zeroize)] +#[serde( + transparent, + bound( + deserialize = "KeyType: for<'a> Deserialize<'a>", + serialize = "KeyType: Serialize" + ) +)] +// SECURITY: ArrayKey is zeroized on drop +pub struct Chacha20Key(KeyType); + +impl Chacha20Key { + /// The length of the secret key in bytes + pub const KEY_LENGTH: usize = KeyType::::SIZE; + /// The length of the AEAD encryption nonce + pub const NONCE_LENGTH: usize = NonceSize::::USIZE; + /// The length of the AEAD encryption tag + pub const TAG_LENGTH: usize = TagSize::::USIZE; +} + +impl Clone for Chacha20Key { + fn clone(&self) -> Self { + Self(self.0.clone()) + } +} + +impl Debug for Chacha20Key { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.debug_struct("Chacha20Key") + .field("alg", &T::JWK_ALG) + .field("key", &self.0) + .finish() + } +} + +impl PartialEq for Chacha20Key { + fn eq(&self, other: &Self) -> bool { + other.0 == self.0 + } +} + +impl Eq for Chacha20Key {} + +impl HasKeyAlg for Chacha20Key { + fn algorithm(&self) -> KeyAlg { + KeyAlg::Chacha20(T::ALG_TYPE) + } +} + +impl KeyMeta for Chacha20Key { + type KeySize = ::KeySize; +} + +impl KeyGen for Chacha20Key { + fn generate() -> Result { + Ok(Chacha20Key(KeyType::::random())) + } + + fn from_seed(seed: Seed<'_>) -> Result + where + Self: Sized, + { + match seed { + Seed::Bytes(ikm, SeedMethod::Preferred) | Seed::Bytes(ikm, SeedMethod::RandomDet) => { + Ok(Self(KeyType::::try_new_with(|arr| { + fill_random_deterministic(ikm, arr) + })?)) + } + #[allow(unreachable_patterns)] + _ => Err(err_msg!(Unsupported)), + } + } +} + +impl KeySecretBytes for Chacha20Key { + fn from_secret_bytes(key: &[u8]) -> Result { + if key.len() != KeyType::::SIZE { + return Err(err_msg!(InvalidKeyData)); + } + Ok(Self(KeyType::::from_slice(key))) + } + + fn with_secret_bytes(&self, f: impl FnOnce(Option<&[u8]>) -> O) -> O { + f(Some(self.0.as_ref())) + } +} + +impl FromKeyDerivation for Chacha20Key { + fn from_key_derivation(mut derive: D) -> Result + where + Self: Sized, + { + Ok(Self(KeyType::::try_new_with(|arr| { + derive.derive_key_bytes(arr) + })?)) + } +} + +impl KeyAeadMeta for Chacha20Key { + type NonceSize = NonceSize; + type TagSize = TagSize; +} + +impl KeyAeadInPlace for Chacha20Key { + /// Encrypt a secret value in place, appending the verification tag + fn encrypt_in_place( + &self, + buffer: &mut dyn ResizeBuffer, + nonce: &[u8], + aad: &[u8], + ) -> Result<(), Error> { + if nonce.len() != NonceSize::::USIZE { + return Err(err_msg!(InvalidNonce)); + } + let nonce = GenericArray::from_slice(nonce); + let chacha = T::Aead::new(self.0.as_ref()); + let tag = chacha + .encrypt_in_place_detached(nonce, aad, buffer.as_mut()) + .map_err(|_| err_msg!(Encryption, "AEAD encryption error"))?; + buffer.buffer_write(&tag[..])?; + Ok(()) + } + + /// Decrypt an encrypted (verification tag appended) value in place + fn decrypt_in_place( + &self, + buffer: &mut dyn ResizeBuffer, + nonce: &[u8], + aad: &[u8], + ) -> Result<(), Error> { + if nonce.len() != NonceSize::::USIZE { + return Err(err_msg!(InvalidNonce)); + } + let nonce = GenericArray::from_slice(nonce); + let buf_len = buffer.as_ref().len(); + if buf_len < TagSize::::USIZE { + return Err(err_msg!(InvalidData, "Invalid size for encrypted data")); + } + let tag_start = buf_len - TagSize::::USIZE; + let mut tag = GenericArray::default(); + tag.clone_from_slice(&buffer.as_ref()[tag_start..]); + let chacha = T::Aead::new(self.0.as_ref()); + chacha + .decrypt_in_place_detached(nonce, aad, &mut buffer.as_mut()[..tag_start], &tag) + .map_err(|_| err_msg!(Encryption, "AEAD decryption error"))?; + buffer.buffer_resize(tag_start)?; + Ok(()) + } + + fn aead_params(&self) -> KeyAeadParams { + KeyAeadParams { + nonce_length: NonceSize::::USIZE, + tag_length: TagSize::::USIZE, + } + } +} + +impl ToJwk for Chacha20Key { + fn encode_jwk(&self, enc: &mut JwkEncoder<'_>) -> Result<(), Error> { + if enc.is_public() { + return Err(err_msg!(Unsupported, "Cannot export as a public key")); + } + if !enc.is_thumbprint() { + enc.add_str("alg", T::JWK_ALG)?; + } + enc.add_as_base64("k", self.0.as_ref())?; + enc.add_str("kty", JWK_KEY_TYPE)?; + Ok(()) + } +} + +// for direct key agreement (not used currently) +impl FromKeyExchange for Chacha20Key +where + Lhs: KeyExchange + ?Sized, + Rhs: ?Sized, + T: Chacha20Type, +{ + fn from_key_exchange(lhs: &Lhs, rhs: &Rhs) -> Result { + Ok(Self(KeyType::::try_new_with(|arr| { + let mut buf = Writer::from_slice(arr); + lhs.write_key_exchange(rhs, &mut buf)?; + if buf.position() != Self::KEY_LENGTH { + return Err(err_msg!(Usage, "Invalid length for key exchange output")); + } + Ok(()) + })?)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::buffer::SecretBytes; + use crate::repr::ToSecretBytes; + + #[test] + fn encrypt_round_trip() { + fn test_encrypt() { + let input = b"hello"; + let key = Chacha20Key::::generate().unwrap(); + let mut buffer = SecretBytes::from_slice(input); + let nonce = Chacha20Key::::random_nonce(); + key.encrypt_in_place(&mut buffer, &nonce, &[]).unwrap(); + assert_eq!(buffer.len(), input.len() + Chacha20Key::::TAG_LENGTH); + assert_ne!(&buffer[..], input); + key.decrypt_in_place(&mut buffer, &nonce, &[]).unwrap(); + assert_eq!(&buffer[..], input); + } + test_encrypt::(); + test_encrypt::(); + } + + #[test] + fn serialize_round_trip() { + fn test_serialize() { + let key = Chacha20Key::::generate().unwrap(); + let sk = key.to_secret_bytes().unwrap(); + let bytes = serde_cbor::to_vec(&key).unwrap(); + let deser: &[u8] = serde_cbor::from_slice(bytes.as_ref()).unwrap(); + assert_eq!(deser, sk.as_ref()); + } + test_serialize::(); + test_serialize::(); + } +} diff --git a/askar-crypto/src/alg/ed25519.rs b/askar-crypto/src/alg/ed25519.rs new file mode 100644 index 00000000..d3207c60 --- /dev/null +++ b/askar-crypto/src/alg/ed25519.rs @@ -0,0 +1,416 @@ +//! Ed25519 signature and verification key support + +use core::{ + convert::{TryFrom, TryInto}, + fmt::{self, Debug, Formatter}, +}; + +use curve25519_dalek::edwards::CompressedEdwardsY; +use ed25519_dalek::{ExpandedSecretKey, PublicKey, SecretKey, Signature}; +use sha2::{self, Digest}; +use x25519_dalek::{PublicKey as XPublicKey, StaticSecret as XSecretKey}; + +use super::{x25519::X25519KeyPair, HasKeyAlg, KeyAlg}; +use crate::{ + buffer::{ArrayKey, WriteBuffer}, + error::Error, + generic_array::typenum::{U32, U64}, + jwk::{FromJwk, JwkEncoder, JwkParts, ToJwk}, + repr::{KeyGen, KeyMeta, KeyPublicBytes, KeySecretBytes, KeypairBytes, KeypairMeta}, + sign::{KeySigVerify, KeySign, SignatureType}, +}; + +// FIXME - check for low-order points when loading public keys? +// https://github.com/tendermint/tmkms/pull/279 + +/// The length of an EdDSA signature +pub const EDDSA_SIGNATURE_LENGTH: usize = 64; + +/// The length of a public key in bytes +pub const PUBLIC_KEY_LENGTH: usize = 32; +/// The length of a secret key in bytes +pub const SECRET_KEY_LENGTH: usize = 32; +/// The length of a keypair in bytes +pub const KEYPAIR_LENGTH: usize = SECRET_KEY_LENGTH + PUBLIC_KEY_LENGTH; + +/// The 'kty' value of an Ed25519 JWK +pub static JWK_KEY_TYPE: &'static str = "OKP"; +/// The 'crv' value of an Ed25519 JWK +pub static JWK_CURVE: &'static str = "Ed25519"; + +/// An Ed25519 public key or keypair +pub struct Ed25519KeyPair { + // SECURITY: SecretKey zeroizes on drop + secret: Option, + public: PublicKey, +} + +impl Ed25519KeyPair { + #[inline] + pub(crate) fn from_secret_key(sk: SecretKey) -> Self { + let public = PublicKey::from(&sk); + Self { + secret: Some(sk), + public, + } + } + + /// Create a signing key from the secret key + pub fn to_signing_key(&self) -> Option> { + self.secret + .as_ref() + .map(|sk| Ed25519SigningKey(ExpandedSecretKey::from(sk), &self.public)) + } + + /// Convert this keypair to an X25519 keypair + pub fn to_x25519_keypair(&self) -> X25519KeyPair { + if let Some(secret) = self.secret.as_ref() { + let hash = sha2::Sha512::digest(secret.as_bytes()); + // clamp result + let secret = XSecretKey::from(TryInto::<[u8; 32]>::try_into(&hash[..32]).unwrap()); + let public = XPublicKey::from(&secret); + X25519KeyPair::new(Some(secret), public) + } else { + let public = XPublicKey::from( + CompressedEdwardsY(self.public.to_bytes()) + .decompress() + .unwrap() + .to_montgomery() + .to_bytes(), + ); + X25519KeyPair::new(None, public) + } + } + + /// Sign a message with the secret key + pub fn sign(&self, message: &[u8]) -> Option<[u8; EDDSA_SIGNATURE_LENGTH]> { + self.to_signing_key().map(|sk| sk.sign(message)) + } + + /// Verify a signature against the public key + pub fn verify_signature(&self, message: &[u8], signature: &[u8]) -> bool { + if let Ok(sig) = Signature::try_from(signature) { + self.public.verify_strict(message, &sig).is_ok() + } else { + false + } + } +} + +impl Clone for Ed25519KeyPair { + fn clone(&self) -> Self { + Self { + secret: self + .secret + .as_ref() + .map(|sk| SecretKey::from_bytes(&sk.as_bytes()[..]).unwrap()), + public: self.public.clone(), + } + } +} + +impl Debug for Ed25519KeyPair { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.debug_struct("Ed25519KeyPair") + .field( + "secret", + if self.secret.is_some() { + &"" + } else { + &"None" + }, + ) + .field("public", &self.public) + .finish() + } +} + +impl KeyGen for Ed25519KeyPair { + fn generate() -> Result { + let sk = ArrayKey::::random(); + // NB: from_bytes is infallible if the slice is the right length + Ok(Self::from_secret_key( + SecretKey::from_bytes(sk.as_ref()).unwrap(), + )) + } +} + +impl HasKeyAlg for Ed25519KeyPair { + fn algorithm(&self) -> KeyAlg { + KeyAlg::Ed25519 + } +} + +impl KeyMeta for Ed25519KeyPair { + type KeySize = U32; +} + +impl KeySecretBytes for Ed25519KeyPair { + fn from_secret_bytes(key: &[u8]) -> Result { + if key.len() != SECRET_KEY_LENGTH { + return Err(err_msg!(InvalidKeyData)); + } + let sk = SecretKey::from_bytes(key).expect("Error loading ed25519 key"); + Ok(Self::from_secret_key(sk)) + } + + fn with_secret_bytes(&self, f: impl FnOnce(Option<&[u8]>) -> O) -> O { + f(self.secret.as_ref().map(|sk| &sk.as_bytes()[..])) + } +} + +impl KeypairMeta for Ed25519KeyPair { + type PublicKeySize = U32; + type KeypairSize = U64; +} + +impl KeypairBytes for Ed25519KeyPair { + fn from_keypair_bytes(kp: &[u8]) -> Result { + if kp.len() != KEYPAIR_LENGTH { + return Err(err_msg!(InvalidKeyData)); + } + // NB: this is infallible if the slice is the right length + let sk = SecretKey::from_bytes(&kp[..SECRET_KEY_LENGTH]).unwrap(); + let pk = PublicKey::from_bytes(&kp[SECRET_KEY_LENGTH..]) + .map_err(|_| err_msg!(InvalidKeyData))?; + + Ok(Self { + secret: Some(sk), + public: pk, + }) + } + + fn with_keypair_bytes(&self, f: impl FnOnce(Option<&[u8]>) -> O) -> O { + if let Some(secret) = self.secret.as_ref() { + ArrayKey::<::KeypairSize>::temp(|arr| { + arr[..SECRET_KEY_LENGTH].copy_from_slice(secret.as_bytes()); + arr[SECRET_KEY_LENGTH..].copy_from_slice(self.public.as_bytes()); + f(Some(&*arr)) + }) + } else { + f(None) + } + } +} + +impl KeyPublicBytes for Ed25519KeyPair { + fn from_public_bytes(key: &[u8]) -> Result { + if key.len() != PUBLIC_KEY_LENGTH { + return Err(err_msg!(InvalidKeyData)); + } + Ok(Self { + secret: None, + public: PublicKey::from_bytes(key).map_err(|_| err_msg!(InvalidKeyData))?, + }) + } + + fn with_public_bytes(&self, f: impl FnOnce(&[u8]) -> O) -> O { + f(&self.public.to_bytes()[..]) + } +} + +impl KeySign for Ed25519KeyPair { + fn write_signature( + &self, + message: &[u8], + sig_type: Option, + out: &mut dyn WriteBuffer, + ) -> Result<(), Error> { + match sig_type { + None | Some(SignatureType::EdDSA) => { + if let Some(signer) = self.to_signing_key() { + let sig = signer.sign(message); + out.buffer_write(&sig[..])?; + Ok(()) + } else { + Err(err_msg!(MissingSecretKey)) + } + } + #[allow(unreachable_patterns)] + _ => Err(err_msg!(Unsupported, "Unsupported signature type")), + } + } +} + +impl KeySigVerify for Ed25519KeyPair { + fn verify_signature( + &self, + message: &[u8], + signature: &[u8], + sig_type: Option, + ) -> Result { + match sig_type { + None | Some(SignatureType::EdDSA) => Ok(self.verify_signature(message, signature)), + #[allow(unreachable_patterns)] + _ => Err(err_msg!(Unsupported, "Unsupported signature type")), + } + } +} + +impl ToJwk for Ed25519KeyPair { + fn encode_jwk(&self, enc: &mut JwkEncoder<'_>) -> Result<(), Error> { + enc.add_str("crv", JWK_CURVE)?; + enc.add_str("kty", JWK_KEY_TYPE)?; + self.with_public_bytes(|buf| enc.add_as_base64("x", buf))?; + if enc.is_secret() { + self.with_secret_bytes(|buf| { + if let Some(sk) = buf { + enc.add_as_base64("d", sk) + } else { + Ok(()) + } + })?; + } + Ok(()) + } +} + +impl FromJwk for Ed25519KeyPair { + fn from_jwk_parts(jwk: JwkParts<'_>) -> Result { + let pk = ArrayKey::::temp(|arr| { + if jwk.x.decode_base64(arr)? != arr.len() { + Err(err_msg!(InvalidKeyData)) + } else { + PublicKey::from_bytes(&*arr).map_err(|_| err_msg!(InvalidKeyData)) + } + })?; + let sk = if jwk.d.is_some() { + Some(ArrayKey::::temp(|arr| { + if jwk.d.decode_base64(arr)? != arr.len() { + Err(err_msg!(InvalidKeyData)) + } else { + SecretKey::from_bytes(&*arr).map_err(|_| err_msg!(InvalidKeyData)) + } + })?) + } else { + None + }; + Ok(Self { + secret: sk, + public: pk, + }) + } +} + +/// An Ed25519 expanded secret key used for signing +// SECURITY: ExpandedSecretKey zeroizes on drop +pub struct Ed25519SigningKey<'p>(ExpandedSecretKey, &'p PublicKey); + +impl Ed25519SigningKey<'_> { + /// Sign a message with the secret key + pub fn sign(&self, message: &[u8]) -> [u8; EDDSA_SIGNATURE_LENGTH] { + self.0.sign(message, &self.1).to_bytes() + } +} + +impl Debug for Ed25519SigningKey<'_> { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.debug_struct("Ed25519SigningKey") + .field("secret", &"") + .field("public", &self.1) + .finish() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::repr::{ToPublicBytes, ToSecretBytes}; + + #[test] + fn expand_keypair() { + let seed = b"000000000000000000000000Trustee1"; + let test_sk = &hex!("3030303030303030303030303030303030303030303030305472757374656531e33aaf381fffa6109ad591fdc38717945f8fabf7abf02086ae401c63e9913097"); + + let kp = Ed25519KeyPair::from_secret_bytes(seed).unwrap(); + assert_eq!(kp.to_keypair_bytes().unwrap(), &test_sk[..]); + assert_eq!(kp.to_secret_bytes().unwrap(), &seed[..]); + + // test round trip + let cmp = Ed25519KeyPair::from_keypair_bytes(test_sk).unwrap(); + assert_eq!(cmp.to_keypair_bytes().unwrap(), &test_sk[..]); + } + + #[test] + fn ed25519_to_x25519() { + let test_keypair = &hex!("1c1179a560d092b90458fe6ab8291215a427fcd6b3927cb240701778ef55201927c96646f2d4632d4fc241f84cbc427fbc3ecaa95becba55088d6c7b81fc5bbf"); + let x_sk = &hex!("08e7286c232ec71b37918533ea0229bf0c75d3db4731df1c5c03c45bc909475f"); + let x_pk = &hex!("9b4260484c889158c128796103dc8d8b883977f2ef7efb0facb12b6ca9b2ae3d"); + let x_pair = Ed25519KeyPair::from_keypair_bytes(test_keypair) + .unwrap() + .to_x25519_keypair() + .to_keypair_bytes() + .unwrap(); + assert_eq!(&x_pair[..32], x_sk); + assert_eq!(&x_pair[32..], x_pk); + } + + #[test] + fn jwk_expected() { + // from https://www.connect2id.com/blog/nimbus-jose-jwt-6 + // { + // "kty" : "OKP", + // "crv" : "Ed25519", + // "x" : "11qYAYKxCrfVS_7TyWQHOg7hcvPapiMlrwIaaPcHURo", + // "d" : "nWGxne_9WmC6hEr0kuwsxERJxWl7MmkZcDusAxyuf2A" + // "use" : "sig", + // "kid" : "FdFYFzERwC2uCBB46pZQi4GG85LujR8obt-KWRBICVQ" + // } + let test_pvt_b64 = "nWGxne_9WmC6hEr0kuwsxERJxWl7MmkZcDusAxyuf2A"; + let test_pub_b64 = "11qYAYKxCrfVS_7TyWQHOg7hcvPapiMlrwIaaPcHURo"; + let test_pvt = base64::decode_config(test_pvt_b64, base64::URL_SAFE).unwrap(); + let kp = Ed25519KeyPair::from_secret_bytes(&test_pvt).expect("Error creating signing key"); + let jwk = kp + .to_jwk_public(None) + .expect("Error converting public key to JWK"); + let jwk = JwkParts::from_str(&jwk).expect("Error parsing JWK output"); + assert_eq!(jwk.kty, JWK_KEY_TYPE); + assert_eq!(jwk.crv, JWK_CURVE); + assert_eq!(jwk.x, "11qYAYKxCrfVS_7TyWQHOg7hcvPapiMlrwIaaPcHURo"); + let pk_load = Ed25519KeyPair::from_jwk_parts(jwk).unwrap(); + assert_eq!(kp.to_public_bytes(), pk_load.to_public_bytes()); + + let jwk = kp + .to_jwk_secret() + .expect("Error converting private key to JWK"); + let jwk = JwkParts::from_slice(&jwk).expect("Error parsing JWK output"); + assert_eq!(jwk.kty, "OKP"); + assert_eq!(jwk.crv, JWK_CURVE); + assert_eq!(jwk.x, test_pub_b64); + assert_eq!(jwk.d, test_pvt_b64); + let sk_load = Ed25519KeyPair::from_jwk_parts(jwk).unwrap(); + assert_eq!( + kp.to_keypair_bytes().unwrap(), + sk_load.to_keypair_bytes().unwrap() + ); + } + + #[test] + fn sign_verify_expected() { + let test_msg = b"This is a dummy message for use with tests"; + let test_sig = &hex!( + "451b5b8e8725321541954997781de51f4142e4a56bab68d24f6a6b92615de5ee + fb74134138315859a32c7cf5fe5a488bc545e2e08e5eedfd1fb10188d532d808" + ); + let test_keypair = &hex!( + "1c1179a560d092b90458fe6ab8291215a427fcd6b3927cb240701778ef552019 + 27c96646f2d4632d4fc241f84cbc427fbc3ecaa95becba55088d6c7b81fc5bbf" + ); + let kp = Ed25519KeyPair::from_keypair_bytes(test_keypair).unwrap(); + let sig = &kp.sign(test_msg).unwrap(); + assert_eq!(sig, test_sig); + assert_eq!(kp.verify_signature(test_msg, &sig[..]), true); + assert_eq!(kp.verify_signature(b"Not the message", &sig[..]), false); + assert_eq!(kp.verify_signature(test_msg, &[0u8; 64]), false); + } + + #[test] + fn round_trip_bytes() { + let kp = Ed25519KeyPair::generate().unwrap(); + let cmp = Ed25519KeyPair::from_keypair_bytes(&kp.to_keypair_bytes().unwrap()).unwrap(); + assert_eq!( + kp.to_keypair_bytes().unwrap(), + cmp.to_keypair_bytes().unwrap() + ); + } +} diff --git a/askar-crypto/src/alg/k256.rs b/askar-crypto/src/alg/k256.rs new file mode 100644 index 00000000..4a326779 --- /dev/null +++ b/askar-crypto/src/alg/k256.rs @@ -0,0 +1,384 @@ +//! Elliptic curve ECDH and ECDSA support on curve secp256k1 + +use core::convert::{TryFrom, TryInto}; + +use k256::{ + ecdsa::{ + signature::{Signer, Verifier}, + Signature, SigningKey, VerifyingKey, + }, + elliptic_curve::{ecdh::diffie_hellman, sec1::Coordinates, Curve}, + EncodedPoint, PublicKey, SecretKey, +}; + +use super::{EcCurves, HasKeyAlg, KeyAlg}; +use crate::{ + buffer::{ArrayKey, WriteBuffer}, + error::Error, + generic_array::typenum::{U32, U33, U65}, + jwk::{FromJwk, JwkEncoder, JwkParts, ToJwk}, + kdf::KeyExchange, + random::with_rng, + repr::{KeyGen, KeyMeta, KeyPublicBytes, KeySecretBytes, KeypairBytes, KeypairMeta}, + sign::{KeySigVerify, KeySign, SignatureType}, +}; + +/// The length of an ES256K signature +pub const ES256K_SIGNATURE_LENGTH: usize = 64; + +/// The length of a compressed public key in bytes +pub const PUBLIC_KEY_LENGTH: usize = 33; +/// The length of a secret key +pub const SECRET_KEY_LENGTH: usize = 32; +/// The length of a keypair in bytes +pub const KEYPAIR_LENGTH: usize = SECRET_KEY_LENGTH + PUBLIC_KEY_LENGTH; + +/// The 'kty' value of an elliptic curve key JWK +pub static JWK_KEY_TYPE: &'static str = "EC"; +/// The 'crv' value of a K-256 key JWK +pub static JWK_CURVE: &'static str = "secp256k1"; + +type FieldSize = ::FieldSize; + +/// A K-256 (secp256k1) public key or keypair +#[derive(Clone, Debug)] +pub struct K256KeyPair { + // SECURITY: SecretKey zeroizes on drop + secret: Option, + public: PublicKey, +} + +impl K256KeyPair { + #[inline] + pub(crate) fn from_secret_key(sk: SecretKey) -> Self { + let pk = sk.public_key(); + Self { + secret: Some(sk), + public: pk, + } + } + + pub(crate) fn to_signing_key(&self) -> Option { + self.secret.as_ref().map(SigningKey::from) + } + + /// Sign a message with the secret key + pub fn sign(&self, message: &[u8]) -> Option<[u8; ES256K_SIGNATURE_LENGTH]> { + if let Some(skey) = self.to_signing_key() { + let sig: Signature = skey.sign(message); + let sigb: [u8; 64] = sig.as_ref().try_into().unwrap(); + Some(sigb) + } else { + None + } + } + + /// Verify a signature with the public key + pub fn verify_signature(&self, message: &[u8], signature: &[u8]) -> bool { + if let Ok(sig) = Signature::try_from(signature) { + let vk = VerifyingKey::from(self.public.as_affine()); + vk.verify(message, &sig).is_ok() + } else { + false + } + } +} + +impl HasKeyAlg for K256KeyPair { + fn algorithm(&self) -> KeyAlg { + KeyAlg::EcCurve(EcCurves::Secp256k1) + } +} + +impl KeyMeta for K256KeyPair { + type KeySize = U32; +} + +impl KeyGen for K256KeyPair { + fn generate() -> Result { + Ok(Self::from_secret_key(with_rng(|r| SecretKey::random(r)))) + } +} + +impl KeySecretBytes for K256KeyPair { + fn from_secret_bytes(key: &[u8]) -> Result { + Ok(Self::from_secret_key( + SecretKey::from_bytes(key).map_err(|_| err_msg!(InvalidKeyData))?, + )) + } + + fn with_secret_bytes(&self, f: impl FnOnce(Option<&[u8]>) -> O) -> O { + if let Some(sk) = self.secret.as_ref() { + let b = k256::SecretBytes::from(sk.to_bytes()); + f(Some(&b[..])) + } else { + f(None) + } + } +} + +impl KeypairMeta for K256KeyPair { + type PublicKeySize = U33; + type KeypairSize = U65; +} + +impl KeypairBytes for K256KeyPair { + fn from_keypair_bytes(kp: &[u8]) -> Result { + if kp.len() != KEYPAIR_LENGTH { + return Err(err_msg!(InvalidKeyData)); + } + let sk = SecretKey::from_bytes(&kp[..SECRET_KEY_LENGTH]) + .map_err(|_| err_msg!(InvalidKeyData))?; + let pk = EncodedPoint::from_bytes(&kp[SECRET_KEY_LENGTH..]) + .and_then(|pt| pt.decode()) + .map_err(|_| err_msg!(InvalidKeyData))?; + + Ok(Self { + secret: Some(sk), + public: pk, + }) + } + + fn with_keypair_bytes(&self, f: impl FnOnce(Option<&[u8]>) -> O) -> O { + if let Some(secret) = self.secret.as_ref() { + ArrayKey::<::KeypairSize>::temp(|arr| { + let sk_b = k256::SecretBytes::from(secret.to_bytes()); + let pk_enc = EncodedPoint::encode(self.public, true); + arr[..SECRET_KEY_LENGTH].copy_from_slice(&sk_b[..]); + arr[SECRET_KEY_LENGTH..].copy_from_slice(pk_enc.as_ref()); + f(Some(&*arr)) + }) + } else { + f(None) + } + } +} + +impl KeyPublicBytes for K256KeyPair { + fn from_public_bytes(key: &[u8]) -> Result { + let pk = EncodedPoint::from_bytes(key) + .and_then(|pt| pt.decode()) + .map_err(|_| err_msg!(InvalidKeyData))?; + Ok(Self { + secret: None, + public: pk, + }) + } + + fn with_public_bytes(&self, f: impl FnOnce(&[u8]) -> O) -> O { + let pt = EncodedPoint::encode(self.public, true); + f(pt.as_ref()) + } +} + +impl KeySign for K256KeyPair { + fn write_signature( + &self, + message: &[u8], + sig_type: Option, + out: &mut dyn WriteBuffer, + ) -> Result<(), Error> { + match sig_type { + None | Some(SignatureType::ES256K) => { + if let Some(sig) = self.sign(message) { + out.buffer_write(&sig[..])?; + Ok(()) + } else { + Err(err_msg!(Unsupported, "Undefined secret key")) + } + } + #[allow(unreachable_patterns)] + _ => Err(err_msg!(Unsupported, "Unsupported signature type")), + } + } +} + +impl KeySigVerify for K256KeyPair { + fn verify_signature( + &self, + message: &[u8], + signature: &[u8], + sig_type: Option, + ) -> Result { + match sig_type { + None | Some(SignatureType::ES256K) => Ok(self.verify_signature(message, signature)), + #[allow(unreachable_patterns)] + _ => Err(err_msg!(Unsupported, "Unsupported signature type")), + } + } +} + +impl ToJwk for K256KeyPair { + fn encode_jwk(&self, enc: &mut JwkEncoder<'_>) -> Result<(), Error> { + let pk_enc = EncodedPoint::encode(self.public, false); + let (x, y) = match pk_enc.coordinates() { + Coordinates::Identity => { + return Err(err_msg!( + Unsupported, + "Cannot convert identity point to JWK" + )) + } + Coordinates::Uncompressed { x, y } => (x, y), + Coordinates::Compressed { .. } => unreachable!(), + }; + + enc.add_str("crv", JWK_CURVE)?; + enc.add_str("kty", JWK_KEY_TYPE)?; + enc.add_as_base64("x", &x[..])?; + enc.add_as_base64("y", &y[..])?; + if enc.is_secret() { + self.with_secret_bytes(|buf| { + if let Some(sk) = buf { + enc.add_as_base64("d", sk) + } else { + Ok(()) + } + })?; + } + Ok(()) + } +} + +impl FromJwk for K256KeyPair { + fn from_jwk_parts(jwk: JwkParts<'_>) -> Result { + let pk_x = ArrayKey::::try_new_with(|arr| { + if jwk.x.decode_base64(arr)? != arr.len() { + Err(err_msg!(InvalidKeyData)) + } else { + Ok(()) + } + })?; + let pk_y = ArrayKey::::try_new_with(|arr| { + if jwk.y.decode_base64(arr)? != arr.len() { + Err(err_msg!(InvalidKeyData)) + } else { + Ok(()) + } + })?; + let pk = EncodedPoint::from_affine_coordinates(pk_x.as_ref(), pk_y.as_ref(), false) + .decode() + .map_err(|_| err_msg!(InvalidKeyData))?; + let sk = if jwk.d.is_some() { + Some(ArrayKey::::temp(|arr| { + if jwk.d.decode_base64(arr)? != arr.len() { + Err(err_msg!(InvalidKeyData)) + } else { + SecretKey::from_bytes(&*arr).map_err(|_| err_msg!(InvalidKeyData)) + } + })?) + } else { + None + }; + Ok(Self { + secret: sk, + public: pk, + }) + } +} + +impl KeyExchange for K256KeyPair { + fn write_key_exchange(&self, other: &Self, out: &mut dyn WriteBuffer) -> Result<(), Error> { + match self.secret.as_ref() { + Some(sk) => { + let xk = diffie_hellman(sk.secret_scalar(), other.public.as_affine()); + out.buffer_write(xk.as_bytes())?; + Ok(()) + } + None => Err(err_msg!(MissingSecretKey)), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::repr::ToPublicBytes; + + #[test] + fn jwk_expected() { + // from https://identity.foundation/EcdsaSecp256k1RecoverySignature2020/ + // {"kty":"EC", + // "crv":"secp256k1", + // "d": "rhYFsBPF9q3-uZThy7B3c4LDF_8wnozFUAEm5LLC4Zw", + // "kid": "JUvpllMEYUZ2joO59UNui_XYDqxVqiFLLAJ8klWuPBw", + // "kty": "EC", + // "x": "dWCvM4fTdeM0KmloF57zxtBPXTOythHPMm1HCLrdd3A", + // "y": "36uMVGM7hnw-N6GnjFcihWE3SkrhMLzzLCdPMXPEXlA" + // } + let test_pvt_b64 = "rhYFsBPF9q3-uZThy7B3c4LDF_8wnozFUAEm5LLC4Zw"; + let test_pub_b64 = ( + "dWCvM4fTdeM0KmloF57zxtBPXTOythHPMm1HCLrdd3A", + "36uMVGM7hnw-N6GnjFcihWE3SkrhMLzzLCdPMXPEXlA", + ); + let test_pvt = base64::decode_config(test_pvt_b64, base64::URL_SAFE).unwrap(); + let sk = K256KeyPair::from_secret_bytes(&test_pvt).expect("Error creating signing key"); + + let jwk = sk.to_jwk_public(None).expect("Error converting key to JWK"); + let jwk = JwkParts::from_str(&jwk).expect("Error parsing JWK"); + assert_eq!(jwk.kty, "EC"); + assert_eq!(jwk.crv, JWK_CURVE); + assert_eq!(jwk.x, test_pub_b64.0); + assert_eq!(jwk.y, test_pub_b64.1); + assert_eq!(jwk.d, None); + let pk_load = K256KeyPair::from_jwk_parts(jwk).unwrap(); + assert_eq!(sk.to_public_bytes(), pk_load.to_public_bytes()); + + let jwk = sk.to_jwk_secret().expect("Error converting key to JWK"); + let jwk = JwkParts::from_slice(&jwk).expect("Error parsing JWK"); + assert_eq!(jwk.kty, "EC"); + assert_eq!(jwk.crv, JWK_CURVE); + assert_eq!(jwk.x, test_pub_b64.0); + assert_eq!(jwk.y, test_pub_b64.1); + assert_eq!(jwk.d, test_pvt_b64); + let sk_load = K256KeyPair::from_jwk_parts(jwk).unwrap(); + assert_eq!( + sk.to_keypair_bytes().unwrap(), + sk_load.to_keypair_bytes().unwrap() + ); + } + + #[test] + fn sign_verify_expected() { + let test_msg = b"This is a dummy message for use with tests"; + let test_sig = &hex!( + "a2a3affbe18cda8c5a7b6375f05b304c2303ab8beb21428709a43a519f8f946f + 6ffa7966afdb337e9b1f70bb575282e71d4fe5bbe6bfa97b229d6bd7e97df1e5" + ); + let test_pvt = base64::decode_config( + "jv_VrhPomm6_WOzb74xF4eMI0hu9p0W1Zlxi0nz8AFs", + base64::URL_SAFE_NO_PAD, + ) + .unwrap(); + let kp = K256KeyPair::from_secret_bytes(&test_pvt).unwrap(); + let sig = kp.sign(&test_msg[..]).unwrap(); + assert_eq!(sig, &test_sig[..]); + assert_eq!(kp.verify_signature(&test_msg[..], &sig[..]), true); + assert_eq!(kp.verify_signature(b"Not the message", &sig[..]), false); + assert_eq!(kp.verify_signature(&test_msg[..], &[0u8; 64]), false); + } + + #[test] + fn key_exchange_random() { + let kp1 = K256KeyPair::generate().unwrap(); + let kp2 = K256KeyPair::generate().unwrap(); + assert_ne!( + kp1.to_keypair_bytes().unwrap(), + kp2.to_keypair_bytes().unwrap() + ); + + let xch1 = kp1.key_exchange_bytes(&kp2).unwrap(); + let xch2 = kp2.key_exchange_bytes(&kp1).unwrap(); + assert_eq!(xch1.len(), 32); + assert_eq!(xch1, xch2); + } + + #[test] + fn round_trip_bytes() { + let kp = K256KeyPair::generate().unwrap(); + let cmp = K256KeyPair::from_keypair_bytes(&kp.to_keypair_bytes().unwrap()).unwrap(); + assert_eq!( + kp.to_keypair_bytes().unwrap(), + cmp.to_keypair_bytes().unwrap() + ); + } +} diff --git a/askar-crypto/src/alg/mod.rs b/askar-crypto/src/alg/mod.rs new file mode 100644 index 00000000..d7724892 --- /dev/null +++ b/askar-crypto/src/alg/mod.rs @@ -0,0 +1,253 @@ +//! Supported key algorithms + +use core::{ + fmt::{self, Debug, Display, Formatter}, + str::FromStr, +}; + +use zeroize::Zeroize; + +use crate::{ + buffer::{WriteBuffer, Writer}, + error::Error, +}; + +#[cfg(any(test, feature = "any_key"))] +mod any; +#[cfg(any(test, feature = "any_key"))] +#[cfg_attr(docsrs, doc(cfg(feature = "any_key")))] +pub use any::{AnyKey, AnyKeyCreate}; + +#[cfg(feature = "aes")] +#[cfg_attr(docsrs, doc(cfg(feature = "aes")))] +pub mod aes; + +#[cfg(feature = "bls")] +#[cfg_attr(docsrs, doc(cfg(feature = "bls")))] +pub mod bls; + +#[cfg(feature = "chacha")] +#[cfg_attr(docsrs, doc(cfg(feature = "chacha")))] +pub mod chacha20; + +#[cfg(feature = "ed25519")] +#[cfg_attr(docsrs, doc(cfg(feature = "ed25519")))] +pub mod ed25519; +#[cfg(feature = "ed25519")] +#[cfg_attr(docsrs, doc(cfg(feature = "ed25519")))] +pub mod x25519; + +#[cfg(feature = "k256")] +#[cfg_attr(docsrs, doc(cfg(feature = "k256")))] +pub mod k256; + +#[cfg(feature = "p256")] +#[cfg_attr(docsrs, doc(cfg(feature = "p256")))] +pub mod p256; + +/// Supported key algorithms +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Zeroize)] +pub enum KeyAlg { + /// AES + Aes(AesTypes), + /// BLS12-381 + Bls12_381(BlsCurves), + /// (X)ChaCha20-Poly1305 + Chacha20(Chacha20Types), + /// Curve25519 signing key + Ed25519, + /// Curve25519 diffie-hellman key exchange key + X25519, + /// Elliptic Curve key for signing or key exchange + EcCurve(EcCurves), +} + +impl KeyAlg { + /// Get a reference to a string representing the `KeyAlg` + pub fn as_str(&self) -> &'static str { + match self { + Self::Aes(AesTypes::A128Gcm) => "a128gcm", + Self::Aes(AesTypes::A256Gcm) => "a256gcm", + Self::Aes(AesTypes::A128CbcHs256) => "a128cbchs256", + Self::Aes(AesTypes::A256CbcHs512) => "a256cbchs512", + Self::Bls12_381(BlsCurves::G1) => "bls12381g1", + Self::Bls12_381(BlsCurves::G2) => "bls12381g2", + Self::Bls12_381(BlsCurves::G1G2) => "bls12381g1g2", + Self::Chacha20(Chacha20Types::C20P) => "c20p", + Self::Chacha20(Chacha20Types::XC20P) => "xc20p", + Self::Ed25519 => "ed25519", + Self::X25519 => "x25519", + Self::EcCurve(EcCurves::Secp256k1) => "k256", + Self::EcCurve(EcCurves::Secp256r1) => "p256", + } + } +} + +impl AsRef for KeyAlg { + fn as_ref(&self) -> &str { + self.as_str() + } +} + +impl FromStr for KeyAlg { + type Err = Error; + + fn from_str(s: &str) -> Result { + match normalize_alg(s)? { + a if a == "a128gcm" || a == "aes128gcm" => Ok(Self::Aes(AesTypes::A128Gcm)), + a if a == "a256gcm" || a == "aes256gcm" => Ok(Self::Aes(AesTypes::A256Gcm)), + a if a == "a128cbchs256" || a == "aes128cbchs256" => { + Ok(Self::Aes(AesTypes::A128CbcHs256)) + } + a if a == "a256cbchs512" || a == "aes256cbchs512" => { + Ok(Self::Aes(AesTypes::A256CbcHs512)) + } + a if a == "bls12381g1" => Ok(Self::Bls12_381(BlsCurves::G1)), + a if a == "bls12381g2" => Ok(Self::Bls12_381(BlsCurves::G2)), + a if a == "bls12381g1g2" => Ok(Self::Bls12_381(BlsCurves::G1G2)), + a if a == "c20p" || a == "chacha20poly1305" => Ok(Self::Chacha20(Chacha20Types::C20P)), + a if a == "xc20p" || a == "xchacha20poly1305" => { + Ok(Self::Chacha20(Chacha20Types::XC20P)) + } + a if a == "ed25519" => Ok(Self::Ed25519), + a if a == "x25519" => Ok(Self::X25519), + a if a == "k256" || a == "secp256k1" => Ok(Self::EcCurve(EcCurves::Secp256k1)), + a if a == "p256" || a == "secp256r1" => Ok(Self::EcCurve(EcCurves::Secp256r1)), + _ => Err(err_msg!(Unsupported, "Unknown key algorithm")), + } + } +} + +#[inline(always)] +pub(crate) fn normalize_alg(alg: &str) -> Result { + NormalizedAlg::new(alg) +} + +// Going through some hoops to avoid allocating. +// This struct stores up to 64 bytes of a normalized +// algorithm name in order to speed up comparisons +// when matching. +pub(crate) struct NormalizedAlg { + len: usize, + buf: [u8; 64], +} + +impl NormalizedAlg { + fn new(val: &str) -> Result { + let mut slf = Self { + len: 0, + buf: [0; 64], + }; + let mut cu = [0u8; 4]; + let mut writer = Writer::from_slice(slf.buf.as_mut()); + for c in NormalizedIter::new(val) { + let s = c.encode_utf8(&mut cu); + writer.buffer_write(s.as_bytes())?; + } + slf.len = writer.position(); + Ok(slf) + } +} + +impl AsRef<[u8]> for NormalizedAlg { + fn as_ref(&self) -> &[u8] { + &self.buf[..self.len] + } +} + +impl> PartialEq for NormalizedAlg { + fn eq(&self, other: &T) -> bool { + self.as_ref() == other.as_ref() + } +} + +struct NormalizedIter<'a> { + chars: core::str::Chars<'a>, +} + +impl<'a> NormalizedIter<'a> { + pub fn new(val: &'a str) -> Self { + Self { chars: val.chars() } + } +} + +impl Iterator for NormalizedIter<'_> { + type Item = char; + fn next(&mut self) -> Option { + while let Some(c) = self.chars.next() { + if c != '-' && c != '_' && c != ' ' { + return Some(c.to_ascii_lowercase()); + } + } + None + } +} + +impl Display for KeyAlg { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str(self.as_str()) + } +} + +/// Supported algorithms for AES +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Zeroize)] +pub enum AesTypes { + /// 128-bit AES-GCM + A128Gcm, + /// 256-bit AES-GCM + A256Gcm, + /// 128-bit AES-CBC with HMAC-256 + A128CbcHs256, + /// 256-bit AES-CBC with HMAC-512 + A256CbcHs512, +} + +/// Supported public key types for Bls12_381 +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Zeroize)] +pub enum BlsCurves { + /// G1 curve + G1, + /// G2 curve + G2, + /// G1 + G2 curves + G1G2, +} + +/// Supported algorithms for (X)ChaCha20-Poly1305 +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Zeroize)] +pub enum Chacha20Types { + /// ChaCha20-Poly1305 + C20P, + /// XChaCha20-Poly1305 + XC20P, +} + +/// Supported curves for ECC operations +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Zeroize)] +pub enum EcCurves { + /// NIST P-256 curve + Secp256r1, + /// Koblitz 256 curve + Secp256k1, +} + +/// A trait for accessing the algorithm of a key, used when +/// converting to generic `AnyKey` instances. +pub trait HasKeyAlg: Debug { + /// Get the corresponding key algorithm. + fn algorithm(&self) -> KeyAlg; +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn cmp_normalize() { + assert_eq!(normalize_alg("Test").unwrap() == "test", true); + assert_eq!(normalize_alg("t-e-s-t").unwrap() == "test", true); + assert_eq!(normalize_alg("--TE__ST--").unwrap() == "test", true); + assert_eq!(normalize_alg("t-e-s-t").unwrap() == "tes", false); + assert_eq!(normalize_alg("t-e-s-t").unwrap() == "testt", false); + } +} diff --git a/askar-crypto/src/alg/p256.rs b/askar-crypto/src/alg/p256.rs new file mode 100644 index 00000000..b92addf1 --- /dev/null +++ b/askar-crypto/src/alg/p256.rs @@ -0,0 +1,399 @@ +//! Elliptic curve ECDH and ECDSA support on curve secp256r1 + +use core::convert::{TryFrom, TryInto}; + +use p256::{ + ecdsa::{ + signature::{Signer, Verifier}, + Signature, SigningKey, VerifyingKey, + }, + elliptic_curve::{ecdh::diffie_hellman, sec1::Coordinates, Curve}, + EncodedPoint, PublicKey, SecretKey, +}; + +use super::{EcCurves, HasKeyAlg, KeyAlg}; +use crate::{ + buffer::{ArrayKey, WriteBuffer}, + error::Error, + generic_array::typenum::{U32, U33, U65}, + jwk::{FromJwk, JwkEncoder, JwkParts, ToJwk}, + kdf::KeyExchange, + random::with_rng, + repr::{KeyGen, KeyMeta, KeyPublicBytes, KeySecretBytes, KeypairBytes, KeypairMeta}, + sign::{KeySigVerify, KeySign, SignatureType}, +}; + +/// The length of an ES256 signature +pub const ES256_SIGNATURE_LENGTH: usize = 64; + +/// The length of a compressed public key in bytes +pub const PUBLIC_KEY_LENGTH: usize = 33; +/// The length of a secret key +pub const SECRET_KEY_LENGTH: usize = 32; +/// The length of a keypair in bytes +pub const KEYPAIR_LENGTH: usize = SECRET_KEY_LENGTH + PUBLIC_KEY_LENGTH; + +/// The 'kty' value of an elliptic curve key JWK +pub static JWK_KEY_TYPE: &'static str = "EC"; +/// The 'crv' value of a P-256 key JWK +pub static JWK_CURVE: &'static str = "P-256"; + +type FieldSize = ::FieldSize; + +/// A P-256 (secp256r1) public key or keypair +#[derive(Clone, Debug)] +pub struct P256KeyPair { + // SECURITY: SecretKey zeroizes on drop + secret: Option, + public: PublicKey, +} + +impl P256KeyPair { + #[inline] + pub(crate) fn from_secret_key(sk: SecretKey) -> Self { + let pk = sk.public_key(); + Self { + secret: Some(sk), + public: pk, + } + } + + pub(crate) fn to_signing_key(&self) -> Option { + self.secret.clone().map(SigningKey::from) + } + + /// Sign a message with the secret key + pub fn sign(&self, message: &[u8]) -> Option<[u8; ES256_SIGNATURE_LENGTH]> { + if let Some(skey) = self.to_signing_key() { + let sig: Signature = skey.sign(message); + let sigb: [u8; 64] = sig.as_ref().try_into().unwrap(); + Some(sigb) + } else { + None + } + } + + /// Verify a signature with the public key + pub fn verify_signature(&self, message: &[u8], signature: &[u8]) -> bool { + if let Ok(sig) = Signature::try_from(signature) { + let vk = VerifyingKey::from(&self.public); + vk.verify(message, &sig).is_ok() + } else { + false + } + } +} + +impl HasKeyAlg for P256KeyPair { + fn algorithm(&self) -> KeyAlg { + KeyAlg::EcCurve(EcCurves::Secp256r1) + } +} + +impl KeyMeta for P256KeyPair { + type KeySize = U32; +} + +impl KeyGen for P256KeyPair { + fn generate() -> Result { + Ok(Self::from_secret_key(with_rng(|r| SecretKey::random(r)))) + } +} + +impl KeySecretBytes for P256KeyPair { + fn from_secret_bytes(key: &[u8]) -> Result { + Ok(Self::from_secret_key( + SecretKey::from_bytes(key).map_err(|_| err_msg!(InvalidKeyData))?, + )) + } + + fn with_secret_bytes(&self, f: impl FnOnce(Option<&[u8]>) -> O) -> O { + if let Some(sk) = self.secret.as_ref() { + let b = p256::SecretBytes::from(sk.to_bytes()); + f(Some(&b[..])) + } else { + f(None) + } + } +} + +impl KeypairMeta for P256KeyPair { + type PublicKeySize = U33; + type KeypairSize = U65; +} + +impl KeypairBytes for P256KeyPair { + fn from_keypair_bytes(kp: &[u8]) -> Result { + if kp.len() != KEYPAIR_LENGTH { + return Err(err_msg!(InvalidKeyData)); + } + let sk = SecretKey::from_bytes(&kp[..SECRET_KEY_LENGTH]) + .map_err(|_| err_msg!(InvalidKeyData))?; + let pk = EncodedPoint::from_bytes(&kp[SECRET_KEY_LENGTH..]) + .and_then(|pt| pt.decode()) + .map_err(|_| err_msg!(InvalidKeyData))?; + + Ok(Self { + secret: Some(sk), + public: pk, + }) + } + + fn with_keypair_bytes(&self, f: impl FnOnce(Option<&[u8]>) -> O) -> O { + if let Some(secret) = self.secret.as_ref() { + ArrayKey::<::KeypairSize>::temp(|arr| { + let sk_b = p256::SecretBytes::from(secret.to_bytes()); + let pk_enc = EncodedPoint::encode(self.public, true); + arr[..SECRET_KEY_LENGTH].copy_from_slice(&sk_b[..]); + arr[SECRET_KEY_LENGTH..].copy_from_slice(pk_enc.as_ref()); + f(Some(&*arr)) + }) + } else { + f(None) + } + } +} + +impl KeyPublicBytes for P256KeyPair { + fn from_public_bytes(key: &[u8]) -> Result { + let pk = EncodedPoint::from_bytes(key) + .and_then(|pt| pt.decode()) + .map_err(|_| err_msg!(InvalidKeyData))?; + Ok(Self { + secret: None, + public: pk, + }) + } + + fn with_public_bytes(&self, f: impl FnOnce(&[u8]) -> O) -> O { + let pt = EncodedPoint::encode(self.public, true); + f(pt.as_ref()) + } +} + +impl KeySign for P256KeyPair { + fn write_signature( + &self, + message: &[u8], + sig_type: Option, + out: &mut dyn WriteBuffer, + ) -> Result<(), Error> { + match sig_type { + None | Some(SignatureType::ES256K) => { + if let Some(sig) = self.sign(message) { + out.buffer_write(&sig[..])?; + Ok(()) + } else { + Err(err_msg!(Unsupported, "Undefined secret key")) + } + } + #[allow(unreachable_patterns)] + _ => Err(err_msg!(Unsupported, "Unsupported signature type")), + } + } +} + +impl KeySigVerify for P256KeyPair { + fn verify_signature( + &self, + message: &[u8], + signature: &[u8], + sig_type: Option, + ) -> Result { + match sig_type { + None | Some(SignatureType::ES256) => Ok(self.verify_signature(message, signature)), + #[allow(unreachable_patterns)] + _ => Err(err_msg!(Unsupported, "Unsupported signature type")), + } + } +} + +impl ToJwk for P256KeyPair { + fn encode_jwk(&self, enc: &mut JwkEncoder<'_>) -> Result<(), Error> { + let pk_enc = EncodedPoint::encode(self.public, false); + let (x, y) = match pk_enc.coordinates() { + Coordinates::Identity => { + return Err(err_msg!( + Unsupported, + "Cannot convert identity point to JWK" + )) + } + Coordinates::Uncompressed { x, y } => (x, y), + Coordinates::Compressed { .. } => unreachable!(), + }; + + enc.add_str("crv", JWK_CURVE)?; + enc.add_str("kty", JWK_KEY_TYPE)?; + enc.add_as_base64("x", &x[..])?; + enc.add_as_base64("y", &y[..])?; + if enc.is_secret() { + self.with_secret_bytes(|buf| { + if let Some(sk) = buf { + enc.add_as_base64("d", sk) + } else { + Ok(()) + } + })?; + } + Ok(()) + } +} + +impl FromJwk for P256KeyPair { + fn from_jwk_parts(jwk: JwkParts<'_>) -> Result { + let pk_x = ArrayKey::::try_new_with(|arr| { + if jwk.x.decode_base64(arr)? != arr.len() { + Err(err_msg!(InvalidKeyData)) + } else { + Ok(()) + } + })?; + let pk_y = ArrayKey::::try_new_with(|arr| { + if jwk.y.decode_base64(arr)? != arr.len() { + Err(err_msg!(InvalidKeyData)) + } else { + Ok(()) + } + })?; + let pk = EncodedPoint::from_affine_coordinates(pk_x.as_ref(), pk_y.as_ref(), false) + .decode() + .map_err(|_| err_msg!(InvalidKeyData))?; + let sk = if jwk.d.is_some() { + Some(ArrayKey::::temp(|arr| { + if jwk.d.decode_base64(arr)? != arr.len() { + Err(err_msg!(InvalidKeyData)) + } else { + SecretKey::from_bytes(&*arr).map_err(|_| err_msg!(InvalidKeyData)) + } + })?) + } else { + None + }; + Ok(Self { + secret: sk, + public: pk, + }) + } +} + +impl KeyExchange for P256KeyPair { + fn write_key_exchange(&self, other: &Self, out: &mut dyn WriteBuffer) -> Result<(), Error> { + match self.secret.as_ref() { + Some(sk) => { + let xk = diffie_hellman(sk.secret_scalar(), other.public.as_affine()); + out.buffer_write(xk.as_bytes())?; + Ok(()) + } + None => Err(err_msg!(MissingSecretKey)), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::repr::ToPublicBytes; + + #[test] + fn jwk_expected() { + // from JWS RFC https://tools.ietf.org/html/rfc7515 + // {"kty":"EC", + // "crv":"P-256", + // "x":"f83OJ3D2xF1Bg8vub9tLe1gHMzV76e8Tus9uPHvRVEU", + // "y":"x_FEzRu9m36HLN_tue659LNpXW6pCyStikYjKIWI5a0", + // "d":"jpsQnnGQmL-YBIffH1136cspYG6-0iY7X1fCE9-E9LI" + // } + let test_pvt_b64 = "jpsQnnGQmL-YBIffH1136cspYG6-0iY7X1fCE9-E9LI"; + let test_pub_b64 = ( + "f83OJ3D2xF1Bg8vub9tLe1gHMzV76e8Tus9uPHvRVEU", + "x_FEzRu9m36HLN_tue659LNpXW6pCyStikYjKIWI5a0", + ); + let test_pvt = base64::decode_config(test_pvt_b64, base64::URL_SAFE).unwrap(); + let sk = P256KeyPair::from_secret_bytes(&test_pvt).expect("Error creating signing key"); + + let jwk = sk.to_jwk_public(None).expect("Error converting key to JWK"); + let jwk = JwkParts::from_str(&jwk).expect("Error parsing JWK"); + assert_eq!(jwk.kty, "EC"); + assert_eq!(jwk.crv, JWK_CURVE); + assert_eq!(jwk.x, test_pub_b64.0); + assert_eq!(jwk.y, test_pub_b64.1); + assert_eq!(jwk.d, None); + let pk_load = P256KeyPair::from_jwk_parts(jwk).unwrap(); + assert_eq!(sk.to_public_bytes(), pk_load.to_public_bytes()); + + let jwk = sk.to_jwk_secret().expect("Error converting key to JWK"); + let jwk = JwkParts::from_slice(&jwk).expect("Error parsing JWK"); + assert_eq!(jwk.kty, "EC"); + assert_eq!(jwk.crv, JWK_CURVE); + assert_eq!(jwk.x, test_pub_b64.0); + assert_eq!(jwk.y, test_pub_b64.1); + assert_eq!(jwk.d, test_pvt_b64); + let sk_load = P256KeyPair::from_jwk_parts(jwk).unwrap(); + assert_eq!( + sk.to_keypair_bytes().unwrap(), + sk_load.to_keypair_bytes().unwrap() + ); + } + + #[test] + fn jwk_thumbprint() { + let pk = P256KeyPair::from_jwk( + r#"{ + "kty": "EC", + "crv": "P-256", + "x": "tDeeYABgKEAbWicYPCEEI8sP4SRIhHKcHDW7VqrB4LA", + "y": "J08HOoIZ0rX2Me3bNFZUltfxIk1Hrc8FsLu8VaSxsMI" + }"#, + ) + .unwrap(); + assert_eq!( + pk.to_jwk_thumbprint(None).unwrap(), + "8fm8079s3nu4FLV_7dVJoJ69A8XCXn7Za2mtaWCnxR4" + ); + } + + #[test] + fn sign_verify_expected() { + let test_msg = b"This is a dummy message for use with tests"; + let test_sig = &hex!( + "241f765f19d4e6148452f2249d2fa69882244a6ad6e70aadb8848a6409d20712 + 4e85faf9587100247de7bdace13a3073b47ec8a531ca91c1375b2b6134344413" + ); + let test_pvt = base64::decode_config( + "jpsQnnGQmL-YBIffH1136cspYG6-0iY7X1fCE9-E9LI", + base64::URL_SAFE_NO_PAD, + ) + .unwrap(); + let kp = P256KeyPair::from_secret_bytes(&test_pvt).unwrap(); + let sig = kp.sign(&test_msg[..]).unwrap(); + assert_eq!(sig, &test_sig[..]); + assert_eq!(kp.verify_signature(&test_msg[..], &sig[..]), true); + assert_eq!(kp.verify_signature(b"Not the message", &sig[..]), false); + assert_eq!(kp.verify_signature(&test_msg[..], &[0u8; 64]), false); + } + + #[test] + fn key_exchange_random() { + let kp1 = P256KeyPair::generate().unwrap(); + let kp2 = P256KeyPair::generate().unwrap(); + assert_ne!( + kp1.to_keypair_bytes().unwrap(), + kp2.to_keypair_bytes().unwrap() + ); + + let xch1 = kp1.key_exchange_bytes(&kp2).unwrap(); + let xch2 = kp2.key_exchange_bytes(&kp1).unwrap(); + assert_eq!(xch1.len(), 32); + assert_eq!(xch1, xch2); + } + + #[test] + fn round_trip_bytes() { + let kp = P256KeyPair::generate().unwrap(); + let cmp = P256KeyPair::from_keypair_bytes(&kp.to_keypair_bytes().unwrap()).unwrap(); + assert_eq!( + kp.to_keypair_bytes().unwrap(), + cmp.to_keypair_bytes().unwrap() + ); + } +} diff --git a/askar-crypto/src/alg/x25519.rs b/askar-crypto/src/alg/x25519.rs new file mode 100644 index 00000000..2f600489 --- /dev/null +++ b/askar-crypto/src/alg/x25519.rs @@ -0,0 +1,321 @@ +//! X25519 key exchange support on Curve25519 + +use core::{ + convert::{TryFrom, TryInto}, + fmt::{self, Debug, Formatter}, +}; + +use x25519_dalek::{PublicKey, StaticSecret as SecretKey}; +use zeroize::Zeroizing; + +use super::{ed25519::Ed25519KeyPair, HasKeyAlg, KeyAlg}; +use crate::{ + buffer::{ArrayKey, WriteBuffer}, + error::Error, + generic_array::typenum::{U32, U64}, + jwk::{FromJwk, JwkEncoder, JwkParts, ToJwk}, + kdf::KeyExchange, + repr::{KeyGen, KeyMeta, KeyPublicBytes, KeySecretBytes, KeypairBytes, KeypairMeta}, +}; + +/// The length of a public key in bytes +pub const PUBLIC_KEY_LENGTH: usize = 32; +/// The length of a secret key in bytes +pub const SECRET_KEY_LENGTH: usize = 32; +/// The length of a keypair in bytes +pub const KEYPAIR_LENGTH: usize = SECRET_KEY_LENGTH + PUBLIC_KEY_LENGTH; + +/// The 'kty' value of an X25519 JWK +pub static JWK_KEY_TYPE: &'static str = "OKP"; +/// The 'crv' value of an X25519 JWK +pub static JWK_CURVE: &'static str = "X25519"; + +/// An X25519 public key or keypair +#[derive(Clone)] +pub struct X25519KeyPair { + // SECURITY: SecretKey (StaticSecret) zeroizes on drop + pub(crate) secret: Option, + pub(crate) public: PublicKey, +} + +impl X25519KeyPair { + #[inline(always)] + pub(crate) fn new(sk: Option, pk: PublicKey) -> Self { + Self { + secret: sk, + public: pk, + } + } + + #[inline] + pub(crate) fn from_secret_key(sk: SecretKey) -> Self { + let public = PublicKey::from(&sk); + Self { + secret: Some(sk), + public, + } + } +} + +impl Debug for X25519KeyPair { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.debug_struct("X25519KeyPair") + .field( + "secret", + if self.secret.is_some() { + &"" + } else { + &"None" + }, + ) + .field("public", &self.public) + .finish() + } +} + +impl HasKeyAlg for X25519KeyPair { + fn algorithm(&self) -> KeyAlg { + KeyAlg::X25519 + } +} + +impl KeyMeta for X25519KeyPair { + type KeySize = U32; +} + +impl KeyGen for X25519KeyPair { + fn generate() -> Result { + let sk = ArrayKey::::random(); + let sk = SecretKey::from( + TryInto::<[u8; SECRET_KEY_LENGTH]>::try_into(&sk.as_ref()[..]).unwrap(), + ); + let pk = PublicKey::from(&sk); + Ok(Self::new(Some(sk), pk)) + } +} + +impl KeySecretBytes for X25519KeyPair { + fn from_secret_bytes(key: &[u8]) -> Result { + if key.len() != SECRET_KEY_LENGTH { + return Err(err_msg!(InvalidKeyData)); + } + + // pre-check key to ensure that clamping has no effect + if key[0] & 7 != 0 || (key[31] & 127 | 64) != key[31] { + return Err(err_msg!(InvalidKeyData)); + } + + let sk = SecretKey::from(TryInto::<[u8; SECRET_KEY_LENGTH]>::try_into(key).unwrap()); + + // post-check key + // let mut check = sk.to_bytes(); + // if &check[..] != key { + // return Err(err_msg!("invalid x25519 secret key")); + // } + // check.zeroize(); + + Ok(Self::from_secret_key(sk)) + } + + fn with_secret_bytes(&self, f: impl FnOnce(Option<&[u8]>) -> O) -> O { + if let Some(sk) = self.secret.as_ref() { + let b = Zeroizing::new(sk.to_bytes()); + f(Some(&b[..])) + } else { + f(None) + } + } +} + +impl KeypairMeta for X25519KeyPair { + type PublicKeySize = U32; + type KeypairSize = U64; +} + +impl KeypairBytes for X25519KeyPair { + fn from_keypair_bytes(kp: &[u8]) -> Result { + if kp.len() != KEYPAIR_LENGTH { + return Err(err_msg!(InvalidKeyData)); + } + let sk = SecretKey::from( + TryInto::<[u8; SECRET_KEY_LENGTH]>::try_into(&kp[..SECRET_KEY_LENGTH]).unwrap(), + ); + let pk = PublicKey::from( + TryInto::<[u8; PUBLIC_KEY_LENGTH]>::try_into(&kp[SECRET_KEY_LENGTH..]).unwrap(), + ); + + Ok(Self { + secret: Some(sk), + public: pk, + }) + } + + fn with_keypair_bytes(&self, f: impl FnOnce(Option<&[u8]>) -> O) -> O { + if let Some(secret) = self.secret.as_ref() { + ArrayKey::<::KeypairSize>::temp(|arr| { + let b = Zeroizing::new(secret.to_bytes()); + arr[..SECRET_KEY_LENGTH].copy_from_slice(&b[..]); + arr[SECRET_KEY_LENGTH..].copy_from_slice(self.public.as_bytes()); + f(Some(&*arr)) + }) + } else { + f(None) + } + } +} + +impl KeyPublicBytes for X25519KeyPair { + fn from_public_bytes(key: &[u8]) -> Result { + if key.len() != PUBLIC_KEY_LENGTH { + return Err(err_msg!(InvalidKeyData)); + } + Ok(Self::new( + None, + PublicKey::from(TryInto::<[u8; PUBLIC_KEY_LENGTH]>::try_into(key).unwrap()), + )) + } + + fn with_public_bytes(&self, f: impl FnOnce(&[u8]) -> O) -> O { + f(&self.public.to_bytes()[..]) + } +} + +impl ToJwk for X25519KeyPair { + fn encode_jwk(&self, enc: &mut JwkEncoder<'_>) -> Result<(), Error> { + enc.add_str("crv", JWK_CURVE)?; + enc.add_str("kty", JWK_KEY_TYPE)?; + self.with_public_bytes(|buf| enc.add_as_base64("x", buf))?; + if enc.is_secret() { + self.with_secret_bytes(|buf| { + if let Some(sk) = buf { + enc.add_as_base64("d", sk) + } else { + Ok(()) + } + })?; + } + Ok(()) + } +} + +impl FromJwk for X25519KeyPair { + fn from_jwk_parts(jwk: JwkParts<'_>) -> Result { + let pk = ArrayKey::::temp(|arr| { + if jwk.x.decode_base64(arr)? != arr.len() { + Err(err_msg!(InvalidKeyData)) + } else { + Ok(PublicKey::from( + TryInto::<[u8; PUBLIC_KEY_LENGTH]>::try_into(&*arr).unwrap(), + )) + } + })?; + let sk = if jwk.d.is_some() { + Some(ArrayKey::::temp(|arr| { + if jwk.d.decode_base64(arr)? != arr.len() { + Err(err_msg!(InvalidKeyData)) + } else { + Ok(SecretKey::from( + TryInto::<[u8; SECRET_KEY_LENGTH]>::try_into(&*arr).unwrap(), + )) + } + })?) + } else { + None + }; + Ok(Self { + secret: sk, + public: pk, + }) + } +} + +impl KeyExchange for X25519KeyPair { + fn write_key_exchange(&self, other: &Self, out: &mut dyn WriteBuffer) -> Result<(), Error> { + match self.secret.as_ref() { + Some(sk) => { + let xk = sk.diffie_hellman(&other.public); + out.buffer_write(xk.as_bytes())?; + Ok(()) + } + None => Err(err_msg!(MissingSecretKey)), + } + } +} + +impl TryFrom<&Ed25519KeyPair> for X25519KeyPair { + type Error = Error; + + fn try_from(value: &Ed25519KeyPair) -> Result { + Ok(value.to_x25519_keypair()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::repr::ToPublicBytes; + + #[test] + fn jwk_expected() { + // { + // "kty": "OKP", + // "d": "qL25gw-HkNJC9m4EsRzCoUx1KntjwHPzxo6a2xUcyFQ", + // "use": "enc", + // "crv": "X25519", + // "x": "tGskN_ae61DP4DLY31_fjkbvnKqf-ze7kA6Cj2vyQxU" + // } + let test_pvt_b64 = "qL25gw-HkNJC9m4EsRzCoUx1KntjwHPzxo6a2xUcyFQ"; + let test_pvt = base64::decode_config(test_pvt_b64, base64::URL_SAFE).unwrap(); + let kp = + X25519KeyPair::from_secret_bytes(&test_pvt).expect("Error creating x25519 keypair"); + let jwk = kp + .to_jwk_public(None) + .expect("Error converting public key to JWK"); + let jwk = JwkParts::from_str(&jwk).expect("Error parsing JWK output"); + assert_eq!(jwk.kty, "OKP"); + assert_eq!(jwk.crv, JWK_CURVE); + assert_eq!(jwk.x, "tGskN_ae61DP4DLY31_fjkbvnKqf-ze7kA6Cj2vyQxU"); + assert_eq!(jwk.d, None); + let pk_load = X25519KeyPair::from_jwk_parts(jwk).unwrap(); + assert_eq!(kp.to_public_bytes(), pk_load.to_public_bytes()); + + let jwk = kp + .to_jwk_secret() + .expect("Error converting private key to JWK"); + let jwk = JwkParts::from_slice(&jwk).expect("Error parsing JWK output"); + assert_eq!(jwk.kty, JWK_KEY_TYPE); + assert_eq!(jwk.crv, JWK_CURVE); + assert_eq!(jwk.x, "tGskN_ae61DP4DLY31_fjkbvnKqf-ze7kA6Cj2vyQxU"); + assert_eq!(jwk.d, test_pvt_b64); + let sk_load = X25519KeyPair::from_jwk_parts(jwk).unwrap(); + assert_eq!( + kp.to_keypair_bytes().unwrap(), + sk_load.to_keypair_bytes().unwrap() + ); + } + + #[test] + fn key_exchange_random() { + let kp1 = X25519KeyPair::generate().unwrap(); + let kp2 = X25519KeyPair::generate().unwrap(); + assert_ne!( + kp1.to_keypair_bytes().unwrap(), + kp2.to_keypair_bytes().unwrap() + ); + + let xch1 = kp1.key_exchange_bytes(&kp2).unwrap(); + let xch2 = kp2.key_exchange_bytes(&kp1).unwrap(); + assert_eq!(xch1.len(), 32); + assert_eq!(xch1, xch2); + } + + #[test] + fn round_trip_bytes() { + let kp = X25519KeyPair::generate().unwrap(); + let cmp = X25519KeyPair::from_keypair_bytes(&kp.to_keypair_bytes().unwrap()).unwrap(); + assert_eq!( + kp.to_keypair_bytes().unwrap(), + cmp.to_keypair_bytes().unwrap() + ); + } +} diff --git a/askar-crypto/src/buffer/array.rs b/askar-crypto/src/buffer/array.rs new file mode 100644 index 00000000..697be8c7 --- /dev/null +++ b/askar-crypto/src/buffer/array.rs @@ -0,0 +1,204 @@ +use core::{ + fmt::{self, Debug, Formatter}, + marker::{PhantomData, PhantomPinned}, + ops::Deref, +}; + +use crate::generic_array::{ArrayLength, GenericArray}; +use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; +use subtle::{Choice, ConstantTimeEq}; +use zeroize::Zeroize; + +use super::HexRepr; +use crate::{ + error::Error, + kdf::{FromKeyDerivation, KeyDerivation}, + random::fill_random, +}; + +/// A secure representation for fixed-length keys +#[derive(Clone, Hash)] +#[repr(transparent)] +pub struct ArrayKey>( + GenericArray, + // ensure that the type does not implement Unpin + PhantomPinned, +); + +impl> ArrayKey { + /// The array length in bytes + pub const SIZE: usize = L::USIZE; + + /// Create a new buffer using an initializer for the data + pub fn new_with(f: impl FnOnce(&mut [u8])) -> Self { + let mut slf = Self::default(); + f(slf.0.as_mut()); + slf + } + + /// Create a new buffer using a fallible initializer for the data + pub fn try_new_with(f: impl FnOnce(&mut [u8]) -> Result<(), E>) -> Result { + let mut slf = Self::default(); + f(slf.0.as_mut())?; + Ok(slf) + } + + /// Temporarily allocate and use a key + pub fn temp(f: impl FnOnce(&mut [u8]) -> R) -> R { + let mut slf = Self::default(); + f(slf.0.as_mut()) + } + + /// Convert this array to a non-zeroing GenericArray instance + #[inline] + pub fn extract(self) -> GenericArray { + self.0.clone() + } + + /// Create a new array instance from a slice of bytes. + /// Like <&GenericArray>::from_slice, panics if the length of the slice + /// is incorrect. + #[inline] + pub fn from_slice(data: &[u8]) -> Self { + Self::from(GenericArray::from_slice(data)) + } + + /// Get the length of the array + #[inline] + pub fn len() -> usize { + Self::SIZE + } + + /// Create a new array of random bytes + #[inline] + pub fn random() -> Self { + Self::new_with(fill_random) + } + + /// Get a hex formatter for the key data + pub fn as_hex(&self) -> HexRepr<&[u8]> { + HexRepr(self.0.as_ref()) + } +} + +impl> AsRef> for ArrayKey { + #[inline(always)] + fn as_ref(&self) -> &GenericArray { + &self.0 + } +} + +impl> Deref for ArrayKey { + type Target = [u8]; + + #[inline(always)] + fn deref(&self) -> &[u8] { + self.0.as_ref() + } +} + +impl> Default for ArrayKey { + #[inline(always)] + fn default() -> Self { + Self(GenericArray::default(), PhantomPinned) + } +} + +impl> From<&GenericArray> for ArrayKey { + #[inline(always)] + fn from(key: &GenericArray) -> Self { + Self(key.clone(), PhantomPinned) + } +} + +impl> From> for ArrayKey { + #[inline(always)] + fn from(key: GenericArray) -> Self { + Self(key, PhantomPinned) + } +} + +impl> Debug for ArrayKey { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + if cfg!(test) { + f.debug_tuple("ArrayKey").field(&*self).finish() + } else { + f.debug_tuple("ArrayKey").field(&"").finish() + } + } +} + +impl> ConstantTimeEq for ArrayKey { + fn ct_eq(&self, other: &Self) -> Choice { + ConstantTimeEq::ct_eq(self.0.as_ref(), other.0.as_ref()) + } +} + +impl> PartialEq for ArrayKey { + #[inline] + fn eq(&self, other: &Self) -> bool { + self.ct_eq(other).unwrap_u8() == 1 + } +} +impl> Eq for ArrayKey {} + +impl> Serialize for ArrayKey { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_bytes(self.as_ref()) + } +} + +impl<'de, L: ArrayLength> Deserialize<'de> for ArrayKey { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + deserializer.deserialize_bytes(KeyVisitor { _pd: PhantomData }) + } +} + +impl> Zeroize for ArrayKey { + fn zeroize(&mut self) { + self.0.zeroize(); + } +} + +impl> Drop for ArrayKey { + fn drop(&mut self) { + self.zeroize(); + } +} + +struct KeyVisitor> { + _pd: PhantomData, +} + +impl<'de, L: ArrayLength> de::Visitor<'de> for KeyVisitor { + type Value = ArrayKey; + + fn expecting(&self, formatter: &mut Formatter<'_>) -> fmt::Result { + formatter.write_str("ArrayKey") + } + + fn visit_bytes(self, value: &[u8]) -> Result + where + E: de::Error, + { + if value.len() != L::USIZE { + return Err(E::invalid_length(value.len(), &self)); + } + Ok(ArrayKey::from_slice(value)) + } +} + +impl> FromKeyDerivation for ArrayKey { + fn from_key_derivation(mut derive: D) -> Result + where + Self: Sized, + { + Self::try_new_with(|buf| derive.derive_key_bytes(buf)) + } +} diff --git a/askar-crypto/src/buffer/hash.rs b/askar-crypto/src/buffer/hash.rs new file mode 100644 index 00000000..0807ed62 --- /dev/null +++ b/askar-crypto/src/buffer/hash.rs @@ -0,0 +1,30 @@ +use core::fmt::Debug; + +use digest::Digest; + +use crate::generic_array::GenericArray; + +use crate::{buffer::WriteBuffer, error::Error}; + +/// A `WriteBuffer` implementation which hashes its input +#[derive(Debug)] +pub struct HashBuffer(D); + +impl HashBuffer { + /// Create a new instance + pub fn new() -> Self { + Self(D::new()) + } + + /// Finalize the hasher and extract the result + pub fn finalize(self) -> GenericArray { + self.0.finalize() + } +} + +impl WriteBuffer for HashBuffer { + fn buffer_write(&mut self, data: &[u8]) -> Result<(), Error> { + self.0.update(data); + Ok(()) + } +} diff --git a/askar-crypto/src/buffer/mod.rs b/askar-crypto/src/buffer/mod.rs new file mode 100644 index 00000000..7eccdf0c --- /dev/null +++ b/askar-crypto/src/buffer/mod.rs @@ -0,0 +1,113 @@ +//! Structures and traits for representing byte ranges in memory + +#[cfg(feature = "alloc")] +use alloc::vec::Vec; +use core::{fmt::Debug, ops::Range}; + +use crate::error::Error; + +mod array; +pub use self::array::ArrayKey; + +mod hash; +pub use self::hash::HashBuffer; + +#[cfg(feature = "alloc")] +mod secret; +#[cfg(feature = "alloc")] +#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))] +pub use self::secret::SecretBytes; + +mod string; +pub use self::string::HexRepr; + +mod writer; +pub use self::writer::Writer; + +/// Support for writing to a byte buffer +pub trait WriteBuffer: Debug { + /// Append a slice to the buffer + fn buffer_write(&mut self, data: &[u8]) -> Result<(), Error>; +} + +/// Support for writing to, accessing, and resizing a byte buffer +pub trait ResizeBuffer: WriteBuffer + AsRef<[u8]> + AsMut<[u8]> { + /// Insert a slice at the given position in the buffer + fn buffer_insert(&mut self, pos: usize, data: &[u8]) -> Result<(), Error>; + + /// Remove an exclusive range from the buffer + fn buffer_remove(&mut self, range: Range) -> Result<(), Error>; + + /// Resize the buffer, truncating or padding it with zeroes + fn buffer_resize(&mut self, len: usize) -> Result<(), Error>; + + /// Extend the buffer with `len` bytes of zeroes and return + /// a mutable reference to the slice + fn buffer_extend(&mut self, len: usize) -> Result<&mut [u8], Error> { + let pos = self.as_ref().len(); + let end = pos + len; + self.buffer_resize(end)?; + Ok(&mut self.as_mut()[pos..end]) + } +} + +#[cfg(feature = "alloc")] +#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))] +impl WriteBuffer for Vec { + fn buffer_write(&mut self, data: &[u8]) -> Result<(), Error> { + self.extend_from_slice(data); + Ok(()) + } +} + +#[cfg(feature = "alloc")] +#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))] +impl ResizeBuffer for Vec { + fn buffer_insert(&mut self, pos: usize, data: &[u8]) -> Result<(), Error> { + self.splice(pos..pos, data.into_iter().cloned()); + Ok(()) + } + + fn buffer_remove(&mut self, range: Range) -> Result<(), Error> { + self.drain(range); + Ok(()) + } + + fn buffer_resize(&mut self, len: usize) -> Result<(), Error> { + self.resize(len, 0u8); + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + pub(crate) fn test_write_buffer>(mut w: B) { + w.buffer_write(b"he").unwrap(); + w.buffer_write(b"y").unwrap(); + assert_eq!(&w.as_ref()[..], b"hey"); + } + + pub(crate) fn test_resize_buffer(mut w: B) { + w.buffer_write(b"hello").unwrap(); + w.buffer_insert(1, b"world").unwrap(); + assert_eq!(&w.as_ref()[..], b"hworldello"); + w.buffer_resize(12).unwrap(); + assert_eq!(&w.as_ref()[..], b"hworldello\0\0"); + w.buffer_resize(6).unwrap(); + assert_eq!(&w.as_ref()[..], b"hworld"); + w.buffer_insert(1, b"ello").unwrap(); + assert_eq!(&w.as_ref()[..], b"helloworld"); + } + + #[test] + fn write_buffer_vec() { + test_write_buffer(Vec::new()); + } + + #[test] + fn resize_buffer_vec() { + test_resize_buffer(Vec::new()); + } +} diff --git a/askar-crypto/src/buffer/secret.rs b/askar-crypto/src/buffer/secret.rs new file mode 100644 index 00000000..8abc4b95 --- /dev/null +++ b/askar-crypto/src/buffer/secret.rs @@ -0,0 +1,307 @@ +use alloc::{boxed::Box, string::String, vec::Vec}; +use core::{ + fmt::{self, Debug, Formatter}, + mem, + ops::{Deref, Range}, +}; + +use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; +use subtle::{Choice, ConstantTimeEq}; +use zeroize::Zeroize; + +use super::{string::MaybeStr, HexRepr, ResizeBuffer, WriteBuffer}; +use crate::error::Error; + +/// A heap-allocated, zeroized byte buffer +#[derive(Clone, Default, Hash, Zeroize)] +pub struct SecretBytes(Vec); + +impl SecretBytes { + /// Create a new buffer using an initializer for the data + pub fn new_with(len: usize, f: impl FnOnce(&mut [u8])) -> Self { + let mut slf = Self::with_capacity(len); + slf.0.resize(len, 0u8); + f(slf.0.as_mut()); + slf + } + + /// Create a new, empty buffer with an initial capacity + #[inline] + pub fn with_capacity(max_len: usize) -> Self { + Self(Vec::with_capacity(max_len)) + } + + /// Create a new buffer from a slice + #[inline] + pub fn from_slice(data: &[u8]) -> Self { + let mut v = Vec::with_capacity(data.len()); + v.extend_from_slice(data); + Self(v) + } + + /// Create a new buffer from a slice, with extra space reserved + #[inline] + pub fn from_slice_reserve(data: &[u8], reserve: usize) -> Self { + let mut v = Vec::with_capacity(data.len() + reserve); + v.extend_from_slice(data); + Self(v) + } + + /// Accessor for the length of the buffer contents + #[inline] + pub fn len(&self) -> usize { + self.0.len() + } + + /// Try to convert the buffer value to a string reference + pub fn as_opt_str(&self) -> Option<&str> { + core::str::from_utf8(self.0.as_slice()).ok() + } + + /// Ensure that data can be appended to the buffer without resizing + pub fn ensure_capacity(&mut self, min_cap: usize) { + let cap = self.0.capacity(); + if cap == 0 { + self.0.reserve(min_cap); + } else if cap > 0 && min_cap >= cap { + // allocate a new buffer and copy the secure data over + let new_cap = min_cap.max(cap * 2).max(32); + let mut buf = SecretBytes::with_capacity(new_cap); + buf.0.extend_from_slice(&self.0[..]); + mem::swap(&mut buf, self); + // old buf zeroized on drop + } + } + + /// Extend the buffer from a byte slice + #[inline] + pub fn extend_from_slice(&mut self, data: &[u8]) { + self.reserve(data.len()); + self.0.extend_from_slice(data); + } + + /// Reserve extra space in the buffer + #[inline] + pub fn reserve(&mut self, extra: usize) { + self.ensure_capacity(self.len() + extra) + } + + /// Convert this buffer into a boxed slice + pub fn into_boxed_slice(mut self) -> Box<[u8]> { + let len = self.0.len(); + if self.0.capacity() > len { + // copy to a smaller buffer (capacity is not tracked for boxed slice) + // and proceed with the normal zeroize on drop + let mut v = Vec::with_capacity(len); + v.append(&mut self.0); + v.into_boxed_slice() + } else { + // no realloc and copy needed + self.into_vec().into_boxed_slice() + } + } + + /// Unwrap this buffer into a Vec + #[inline] + pub fn into_vec(mut self) -> Vec { + // FIXME zeroize extra capacity in case it was used previously? + let mut v = Vec::new(); // note: no heap allocation for empty vec + mem::swap(&mut v, &mut self.0); + mem::forget(self); + v + } + + #[cfg(feature = "crypto_box")] + pub(crate) fn as_vec_mut(&mut self) -> &mut Vec { + &mut self.0 + } + + pub(crate) fn splice( + &mut self, + range: Range, + iter: impl Iterator + ExactSizeIterator, + ) -> Result<(), Error> { + assert!(range.end >= range.start); + let rem_len = range.len(); + let ins_len = iter.len(); + if ins_len > rem_len { + self.reserve(ins_len - rem_len); + } + self.0.splice(range, iter); + Ok(()) + } + + /// Get a hex formatter for the secret data + pub fn as_hex(&self) -> HexRepr<&[u8]> { + HexRepr(self.0.as_ref()) + } +} + +impl Debug for SecretBytes { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + if cfg!(test) { + f.debug_tuple("Secret") + .field(&MaybeStr(self.0.as_slice())) + .finish() + } else { + f.write_str("") + } + } +} + +impl AsRef<[u8]> for SecretBytes { + fn as_ref(&self) -> &[u8] { + self.0.as_slice() + } +} + +impl AsMut<[u8]> for SecretBytes { + fn as_mut(&mut self) -> &mut [u8] { + self.0.as_mut_slice() + } +} + +impl Deref for SecretBytes { + type Target = [u8]; + + fn deref(&self) -> &Self::Target { + self.0.as_slice() + } +} + +impl Drop for SecretBytes { + fn drop(&mut self) { + self.zeroize(); + } +} + +impl ConstantTimeEq for SecretBytes { + fn ct_eq(&self, other: &Self) -> Choice { + ConstantTimeEq::ct_eq(self.0.as_slice(), other.0.as_slice()) + } +} + +impl PartialEq for SecretBytes { + #[inline] + fn eq(&self, other: &Self) -> bool { + self.ct_eq(other).unwrap_u8() == 1 + } +} +impl Eq for SecretBytes {} + +impl From<&[u8]> for SecretBytes { + fn from(inner: &[u8]) -> Self { + Self(inner.to_vec()) + } +} + +impl From<&str> for SecretBytes { + fn from(inner: &str) -> Self { + Self(inner.as_bytes().to_vec()) + } +} + +impl From for SecretBytes { + fn from(inner: String) -> Self { + Self(inner.into_bytes()) + } +} + +impl From> for SecretBytes { + fn from(inner: Box<[u8]>) -> Self { + Self(inner.into()) + } +} + +impl From> for SecretBytes { + fn from(inner: Vec) -> Self { + Self(inner) + } +} + +impl PartialEq<&[u8]> for SecretBytes { + fn eq(&self, other: &&[u8]) -> bool { + self.0.eq(other) + } +} + +impl PartialEq> for SecretBytes { + fn eq(&self, other: &Vec) -> bool { + self.0.eq(other) + } +} + +impl WriteBuffer for SecretBytes { + fn buffer_write(&mut self, data: &[u8]) -> Result<(), Error> { + self.extend_from_slice(data); + Ok(()) + } +} + +impl ResizeBuffer for SecretBytes { + fn buffer_insert(&mut self, pos: usize, data: &[u8]) -> Result<(), Error> { + self.splice(pos..pos, data.into_iter().cloned()) + } + + fn buffer_remove(&mut self, range: Range) -> Result<(), Error> { + self.0.drain(range); + Ok(()) + } + + fn buffer_resize(&mut self, len: usize) -> Result<(), Error> { + self.ensure_capacity(len); + self.0.resize(len, 0u8); + Ok(()) + } +} + +impl Serialize for SecretBytes { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_bytes(self.as_ref()) + } +} + +impl<'de> Deserialize<'de> for SecretBytes { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + deserializer.deserialize_bytes(SecVisitor) + } +} + +struct SecVisitor; + +impl<'de> de::Visitor<'de> for SecVisitor { + type Value = SecretBytes; + + fn expecting(&self, formatter: &mut Formatter<'_>) -> fmt::Result { + formatter.write_str("bytes") + } + + fn visit_bytes(self, value: &[u8]) -> Result + where + E: de::Error, + { + Ok(SecretBytes::from_slice(value)) + } +} + +#[cfg(test)] +mod tests { + use super::super::tests::{test_resize_buffer, test_write_buffer}; + use super::*; + + #[test] + fn write_buffer_secret() { + test_write_buffer(SecretBytes::with_capacity(10)); + } + + #[test] + fn resize_buffer_secret() { + test_resize_buffer(SecretBytes::with_capacity(10)); + } +} diff --git a/askar-crypto/src/buffer/string.rs b/askar-crypto/src/buffer/string.rs new file mode 100644 index 00000000..05205727 --- /dev/null +++ b/askar-crypto/src/buffer/string.rs @@ -0,0 +1,76 @@ +use core::fmt::{self, Debug, Display, Formatter, Write}; + +/// A utility type used to print or serialize a byte string as hex +#[derive(Debug)] +pub struct HexRepr(pub B); + +impl> Display for HexRepr { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + for c in self.0.as_ref() { + f.write_fmt(format_args!("{:02x}", c))?; + } + Ok(()) + } +} + +// Compare to another hex value as [u8] +impl> PartialEq<[u8]> for HexRepr { + fn eq(&self, other: &[u8]) -> bool { + struct CmpWrite<'s>(::core::slice::Iter<'s, u8>); + + impl Write for CmpWrite<'_> { + fn write_str(&mut self, s: &str) -> fmt::Result { + for c in s.as_bytes() { + if self.0.next() != Some(c) { + return Err(fmt::Error); + } + } + Ok(()) + } + } + + write!(&mut CmpWrite(other.into_iter()), "{}", self).is_ok() + } +} + +impl> PartialEq<&str> for HexRepr { + fn eq(&self, other: &&str) -> bool { + self == other.as_bytes() + } +} + +/// A utility type for debug printing of byte strings +pub struct MaybeStr<'a>(pub &'a [u8]); + +impl Debug for MaybeStr<'_> { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + if let Ok(sval) = core::str::from_utf8(self.0) { + write!(f, "{:?}", sval) + } else { + write!(f, "<{}>", HexRepr(self.0)) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn hex_repr_output() { + assert_eq!(format!("{}", HexRepr(&[])), ""); + assert_eq!(format!("{}", HexRepr(&[255, 0, 255, 0])), "ff00ff00"); + } + + #[test] + fn hex_repr_cmp() { + assert_eq!(HexRepr(&[0, 255, 0, 255]), "00ff00ff"); + assert_ne!(HexRepr(&[100, 101, 102]), "00ff00ff"); + } + + #[test] + fn maybe_str_output() { + assert_eq!(format!("{:?}", MaybeStr(&[])), "\"\""); + assert_eq!(format!("{:?}", MaybeStr(&[255, 0])), ""); + } +} diff --git a/askar-crypto/src/buffer/writer.rs b/askar-crypto/src/buffer/writer.rs new file mode 100644 index 00000000..b7cfd297 --- /dev/null +++ b/askar-crypto/src/buffer/writer.rs @@ -0,0 +1,195 @@ +#[cfg(feature = "alloc")] +use alloc::vec::Vec; +use core::{fmt, ops::Range}; + +use super::{ResizeBuffer, WriteBuffer}; +use crate::error::Error; + +/// A structure wrapping a mutable pointer to a buffer +#[derive(Debug)] +pub struct Writer<'w, B: ?Sized> { + inner: &'w mut B, + pos: usize, +} + +impl Writer<'_, B> { + /// Accessor for the writer position + pub fn position(&self) -> usize { + self.pos + } +} + +impl<'w> Writer<'w, [u8]> { + /// Create a new writer from a mutable byte slice + #[inline] + pub fn from_slice(slice: &'w mut [u8]) -> Self { + Writer { + inner: slice, + pos: 0, + } + } + + /// Create a new writer from a mutable byte slice, skipping a prefix + #[inline] + pub fn from_slice_position(slice: &'w mut [u8], pos: usize) -> Self { + Writer { inner: slice, pos } + } +} + +impl Writer<'_, [u8]> { + pub(crate) fn splice( + &mut self, + range: Range, + mut iter: impl Iterator + ExactSizeIterator, + ) -> Result<(), Error> { + assert!(range.end >= range.start); + let rem_len = range.len(); + let ins_len = iter.len(); + if ins_len > rem_len { + let diff = ins_len - rem_len; + if self.pos + diff > self.inner.len() { + return Err(err_msg!(ExceededBuffer)); + } + self.inner + .copy_within((range.end - diff)..self.pos, range.end); + self.pos += diff; + } else if ins_len < rem_len { + let diff = rem_len - ins_len; + self.inner + .copy_within(range.end..self.pos, range.end - diff); + self.pos -= diff; + } + for idx in 0..ins_len { + self.inner[range.start + idx] = iter.next().unwrap(); + } + Ok(()) + } +} + +impl AsRef<[u8]> for Writer<'_, [u8]> { + fn as_ref(&self) -> &[u8] { + &self.inner[..self.pos] + } +} + +impl AsMut<[u8]> for Writer<'_, [u8]> { + fn as_mut(&mut self) -> &mut [u8] { + &mut self.inner[..self.pos] + } +} + +impl WriteBuffer for Writer<'_, [u8]> { + fn buffer_write(&mut self, data: &[u8]) -> Result<(), Error> { + let total = self.inner.len(); + let end = self.pos + data.len(); + if end > total { + return Err(err_msg!(ExceededBuffer)); + } + self.inner[self.pos..end].copy_from_slice(data); + self.pos += data.len(); + Ok(()) + } +} + +impl ResizeBuffer for Writer<'_, [u8]> { + fn buffer_insert(&mut self, pos: usize, data: &[u8]) -> Result<(), Error> { + self.splice(pos..pos, data.into_iter().cloned()) + } + + fn buffer_remove(&mut self, range: Range) -> Result<(), Error> { + assert!(range.end >= range.start); + let diff = range.end - range.start; + self.inner.copy_within(range.end..self.pos, range.start); + self.pos -= diff; + Ok(()) + } + + fn buffer_resize(&mut self, len: usize) -> Result<(), Error> { + let len = self.pos + len; + if len > self.inner.len() { + return Err(err_msg!(ExceededBuffer)); + } + self.pos = len; + Ok(()) + } +} + +#[cfg(feature = "alloc")] +#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))] +impl<'w> Writer<'w, Vec> { + /// Create a new writer from a mutable Vec pointer + #[inline] + pub fn from_vec(vec: &'w mut Vec) -> Self { + Writer { inner: vec, pos: 0 } + } + + /// Create a new writer from a mutable Vec pointer, skipping a prefix + #[inline] + pub fn from_vec_skip(vec: &'w mut Vec, pos: usize) -> Self { + Writer { inner: vec, pos } + } +} + +impl WriteBuffer for Writer<'_, B> { + fn buffer_write(&mut self, data: &[u8]) -> Result<(), Error> { + self.inner.buffer_write(data) + } +} + +impl AsRef<[u8]> for Writer<'_, B> { + fn as_ref(&self) -> &[u8] { + &self.inner.as_ref()[self.pos..] + } +} + +impl AsMut<[u8]> for Writer<'_, B> { + fn as_mut(&mut self) -> &mut [u8] { + &mut self.inner.as_mut()[self.pos..] + } +} + +impl ResizeBuffer for Writer<'_, B> { + fn buffer_insert(&mut self, pos: usize, data: &[u8]) -> Result<(), Error> { + self.inner.buffer_insert(self.pos + pos, data) + } + + fn buffer_remove(&mut self, range: Range) -> Result<(), Error> { + self.inner + .buffer_remove((self.pos + range.start)..(self.pos + range.end)) + } + + fn buffer_resize(&mut self, len: usize) -> Result<(), Error> { + self.inner.buffer_resize(self.pos + len) + } +} + +impl<'b, B: ?Sized> Writer<'b, B> { + /// Create a new writer from a pointer to a buffer implementation + pub fn from_buffer(buf: &'b mut B) -> Writer<'b, B> { + Writer { inner: buf, pos: 0 } + } +} + +impl<'b, B: ?Sized> fmt::Write for Writer<'b, B> +where + Writer<'b, B>: WriteBuffer, +{ + fn write_str(&mut self, s: &str) -> fmt::Result { + self.buffer_write(s.as_bytes()).map_err(|_| fmt::Error) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn write_buffer_slice() { + let mut buf = [0u8; 10]; + let mut w = Writer::from_slice(&mut buf); + w.buffer_write(b"he").unwrap(); + w.buffer_write(b"y").unwrap(); + assert_eq!(w.position(), 3); + assert_eq!(&buf[..3], b"hey"); + } +} diff --git a/askar-crypto/src/encrypt/crypto_box.rs b/askar-crypto/src/encrypt/crypto_box.rs new file mode 100644 index 00000000..ee70678d --- /dev/null +++ b/askar-crypto/src/encrypt/crypto_box.rs @@ -0,0 +1,184 @@ +//! Compatibility with libsodium's crypto_box construct + +use crate::{ + buffer::Writer, + generic_array::{typenum::Unsigned, GenericArray}, +}; +use aead::AeadInPlace; +use blake2::{digest::Update, digest::VariableOutput, VarBlake2b}; +use crypto_box_rs::{self as cbox, SalsaBox}; + +use crate::{ + alg::x25519::X25519KeyPair, + buffer::{ResizeBuffer, SecretBytes, WriteBuffer}, + error::Error, + repr::{KeyGen, KeyPublicBytes}, +}; + +/// The length of the salsa box nonce +pub const CBOX_NONCE_LENGTH: usize = NonceSize::::USIZE; +/// The length of the salsa box key (x25519 public key) +pub const CBOX_KEY_LENGTH: usize = crate::alg::x25519::PUBLIC_KEY_LENGTH; +/// The length of the salsa box tag +pub const CBOX_TAG_LENGTH: usize = TagSize::::USIZE; + +type NonceSize = ::NonceSize; + +type TagSize = ::TagSize; + +#[inline] +fn secret_key_from(kp: &X25519KeyPair) -> Result { + if let Some(sk) = kp.secret.as_ref() { + Ok(cbox::SecretKey::from(sk.to_bytes())) + } else { + Err(err_msg!(MissingSecretKey)) + } +} + +#[inline] +fn nonce_from(nonce: &[u8]) -> Result<&GenericArray>, Error> { + if nonce.len() == NonceSize::::USIZE { + Ok(GenericArray::from_slice(nonce)) + } else { + Err(err_msg!(InvalidNonce)) + } +} + +/// Encrypt a message into a crypto box with a given nonce +pub fn crypto_box( + recip_pk: &X25519KeyPair, + sender_sk: &X25519KeyPair, + buffer: &mut B, + nonce: &[u8], +) -> Result<(), Error> { + let sender_sk = secret_key_from(sender_sk)?; + let nonce = nonce_from(nonce)?; + let box_inst = SalsaBox::new(&recip_pk.public, &sender_sk); + let tag = box_inst + .encrypt_in_place_detached(nonce, &[], buffer.as_mut()) + .map_err(|_| err_msg!(Encryption, "Crypto box AEAD encryption error"))?; + buffer.buffer_insert(0, &tag[..])?; + Ok(()) +} + +/// Unencrypt a crypto box +pub fn crypto_box_open( + recip_sk: &X25519KeyPair, + sender_pk: &X25519KeyPair, + buffer: &mut B, + nonce: &[u8], +) -> Result<(), Error> { + let recip_sk = secret_key_from(recip_sk)?; + let nonce = nonce_from(nonce)?; + let buf_len = buffer.as_ref().len(); + if buf_len < CBOX_TAG_LENGTH { + return Err(err_msg!(Encryption, "Invalid size for encrypted data")); + } + // the tag is prepended + let tag = GenericArray::clone_from_slice(&buffer.as_ref()[..CBOX_TAG_LENGTH]); + let box_inst = SalsaBox::new(&sender_pk.public, &recip_sk); + box_inst + .decrypt_in_place_detached(nonce, &[], &mut buffer.as_mut()[CBOX_TAG_LENGTH..], &tag) + .map_err(|_| err_msg!(Encryption, "Crypto box AEAD decryption error"))?; + buffer.buffer_remove(0..CBOX_TAG_LENGTH)?; + Ok(()) +} + +/// Construct a deterministic nonce for an ephemeral and recipient key +pub fn crypto_box_seal_nonce( + ephemeral_pk: &[u8], + recip_pk: &[u8], +) -> Result<[u8; CBOX_NONCE_LENGTH], Error> { + let mut key_hash = VarBlake2b::new(CBOX_NONCE_LENGTH).unwrap(); + key_hash.update(ephemeral_pk); + key_hash.update(recip_pk); + let mut nonce = [0u8; CBOX_NONCE_LENGTH]; + key_hash.finalize_variable(|hash| nonce.copy_from_slice(hash)); + Ok(nonce) +} + +/// Encrypt a message for a recipient using an ephemeral key and deterministic nonce +// Could add a non-alloc version, if needed +pub fn crypto_box_seal(recip_pk: &X25519KeyPair, message: &[u8]) -> Result { + let ephem_kp = X25519KeyPair::generate()?; + let ephem_pk_bytes = ephem_kp.public.as_bytes(); + let buf_len = CBOX_KEY_LENGTH + CBOX_TAG_LENGTH + message.len(); + let mut buffer = SecretBytes::with_capacity(buf_len); + buffer.buffer_write(ephem_pk_bytes)?; + buffer.buffer_write(message)?; + let mut writer = Writer::from_vec_skip(buffer.as_vec_mut(), CBOX_KEY_LENGTH); + let nonce = crypto_box_seal_nonce(ephem_pk_bytes, recip_pk.public.as_bytes())?.to_vec(); + crypto_box(recip_pk, &ephem_kp, &mut writer, &nonce[..])?; + Ok(buffer) +} + +/// Unseal a sealed crypto box +pub fn crypto_box_seal_open( + recip_sk: &X25519KeyPair, + ciphertext: &[u8], +) -> Result { + let ephem_pk = X25519KeyPair::from_public_bytes(&ciphertext[..CBOX_KEY_LENGTH])?; + let mut buffer = SecretBytes::from_slice(&ciphertext[CBOX_KEY_LENGTH..]); + let nonce = crypto_box_seal_nonce(ephem_pk.public.as_bytes(), recip_sk.public.as_bytes())?; + crypto_box_open(recip_sk, &ephem_pk, &mut buffer, &nonce)?; + Ok(buffer) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::buffer::SecretBytes; + use crate::repr::{KeySecretBytes, ToPublicBytes}; + + #[test] + fn crypto_box_round_trip_expected() { + let sk = X25519KeyPair::from_secret_bytes(&hex!( + "a8bdb9830f8790d242f66e04b11cc2a14c752a7b63c073f3c68e9adb151cc854" + )) + .unwrap(); + let pk = X25519KeyPair::from_public_bytes(&hex!( + "07d0b594683bdb6af5f4eacb1a392687d580a58db196a752dca316dedb7d251c" + )) + .unwrap(); + let message = b"hello there"; + let nonce = b"012345678912012345678912"; + let mut buffer = SecretBytes::from_slice(message); + crypto_box(&pk, &sk, &mut buffer, nonce).unwrap(); + assert_eq!( + buffer, + &hex!("848dc97d373f7aa2223b57780c60f7731cc8721d567baa8f2b5583")[..] + ); + + crypto_box_open(&sk, &pk, &mut buffer, nonce).unwrap(); + assert_eq!(buffer, &message[..]); + } + + #[test] + fn crypto_box_seal_round_trip() { + let recip = X25519KeyPair::generate().unwrap(); + + let recip_public = + X25519KeyPair::from_public_bytes(recip.to_public_bytes().unwrap().as_ref()).unwrap(); + + let message = b"hello there"; + let sealed = crypto_box_seal(&recip_public, message).unwrap(); + assert_ne!(sealed, &message[..]); + + let open = crypto_box_seal_open(&recip, &sealed).unwrap(); + assert_eq!(open, &message[..]); + } + + #[test] + fn crypto_box_unseal_expected() { + use crate::alg::ed25519::Ed25519KeyPair; + let recip = Ed25519KeyPair::from_secret_bytes(b"testseed000000000000000000000001") + .unwrap() + .to_x25519_keypair(); + let ciphertext = hex!( + "ed443c0377a579857f2f00543e0da0f2585b6119cd9e43c871e4f1114c7ce9050b + a8811edf39d257bbeec0d423a0a7ff98d424fbfa9d52e0c5b3f674738f75d8e727f + 5526296482fd0fd013d71d50ce4ce5ebe9c2fa1c230298419a9" + ); + crypto_box_seal_open(&recip, &ciphertext).unwrap(); + } +} diff --git a/askar-crypto/src/encrypt/mod.rs b/askar-crypto/src/encrypt/mod.rs new file mode 100644 index 00000000..9e70c2b6 --- /dev/null +++ b/askar-crypto/src/encrypt/mod.rs @@ -0,0 +1,58 @@ +//! AEAD encryption traits and parameters + +use crate::{ + buffer::ResizeBuffer, + error::Error, + generic_array::{ArrayLength, GenericArray}, + random::fill_random, +}; + +#[cfg(feature = "crypto_box")] +#[cfg_attr(docsrs, doc(cfg(feature = "crypto_box")))] +pub mod crypto_box; + +/// Trait for key types which perform AEAD encryption +pub trait KeyAeadInPlace { + /// Encrypt a secret value in place, appending the verification tag + fn encrypt_in_place( + &self, + buffer: &mut dyn ResizeBuffer, + nonce: &[u8], + aad: &[u8], + ) -> Result<(), Error>; + + /// Decrypt an encrypted (verification tag appended) value in place + fn decrypt_in_place( + &self, + buffer: &mut dyn ResizeBuffer, + nonce: &[u8], + aad: &[u8], + ) -> Result<(), Error>; + + /// Get the nonce and tag length for encryption + fn aead_params(&self) -> KeyAeadParams; +} + +/// For concrete key types with fixed nonce and tag sizes +pub trait KeyAeadMeta { + /// The size of the AEAD nonce + type NonceSize: ArrayLength; + /// The size of the AEAD tag + type TagSize: ArrayLength; + + /// Generate a new random nonce + fn random_nonce() -> GenericArray { + let mut nonce = GenericArray::default(); + fill_random(nonce.as_mut_slice()); + nonce + } +} + +/// A structure combining the AEAD parameters +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] +pub struct KeyAeadParams { + /// The length of the nonce + pub nonce_length: usize, + /// The length of the tag + pub tag_length: usize, +} diff --git a/askar-crypto/src/error.rs b/askar-crypto/src/error.rs new file mode 100644 index 00000000..aa90ef54 --- /dev/null +++ b/askar-crypto/src/error.rs @@ -0,0 +1,163 @@ +#[cfg(feature = "std")] +use alloc::boxed::Box; +use core::fmt::{self, Display, Formatter}; + +#[cfg(feature = "std")] +use std::error::Error as StdError; + +/// The possible kinds of error produced by the crate +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum ErrorKind { + /// An encryption or decryption operation failed + Encryption, + + /// Out of space in provided buffer + ExceededBuffer, + + /// The provided input was invalid + InvalidData, + + /// The provided key was invalid + InvalidKeyData, + + /// The provided nonce was invalid (bad length) + InvalidNonce, + + /// A secret key is required but not present + MissingSecretKey, + + /// An unexpected error occurred + Unexpected, + + /// The input parameters to the method were incorrect + Usage, + + /// An unsupported operation was requested + Unsupported, +} + +impl ErrorKind { + /// Convert the error kind to a string reference + pub fn as_str(&self) -> &'static str { + match self { + Self::Encryption => "Encryption error", + Self::ExceededBuffer => "Exceeded allocated buffer", + Self::InvalidData => "Invalid data", + Self::InvalidNonce => "Invalid encryption nonce", + Self::InvalidKeyData => "Invalid key data", + Self::MissingSecretKey => "Missing secret key", + Self::Unexpected => "Unexpected error", + Self::Usage => "Usage error", + Self::Unsupported => "Unsupported", + } + } +} + +impl Display for ErrorKind { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str(self.as_str()) + } +} + +/// The standard crate error type +#[derive(Debug)] +pub struct Error { + pub(crate) kind: ErrorKind, + #[cfg(feature = "std")] + pub(crate) cause: Option>, + pub(crate) message: Option<&'static str>, +} + +impl Error { + /// Create a new error instance with message text + pub fn from_msg(kind: ErrorKind, msg: &'static str) -> Self { + Self { + kind, + #[cfg(feature = "std")] + cause: None, + message: Some(msg), + } + } + + /// Accessor for the error kind + pub fn kind(&self) -> ErrorKind { + self.kind + } + + /// Accessor for the error message + pub fn message(&self) -> &'static str { + self.message.unwrap_or_else(|| self.kind.as_str()) + } + + #[cfg(feature = "std")] + pub(crate) fn with_cause>>(mut self, err: T) -> Self { + self.cause = Some(err.into()); + self + } +} + +impl Display for Error { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + if let Some(msg) = self.message { + f.write_str(msg)?; + } else { + f.write_str(self.kind.as_str())?; + } + #[cfg(feature = "std")] + if let Some(cause) = self.cause.as_ref() { + write!(f, "\nCaused by: {}", cause)?; + } + Ok(()) + } +} + +#[cfg(feature = "std")] +impl StdError for Error { + fn source(&self) -> Option<&(dyn StdError + 'static)> { + // the transmute operation here is only removing Send and Sync markers + #[allow(unsafe_code)] + self.cause + .as_ref() + .map(|err| unsafe { std::mem::transmute(&**err) }) + } +} + +impl PartialEq for Error { + fn eq(&self, other: &Self) -> bool { + self.kind == other.kind && self.message == other.message + } +} + +impl From for Error { + fn from(kind: ErrorKind) -> Self { + Self { + kind, + #[cfg(feature = "std")] + cause: None, + message: None, + } + } +} + +macro_rules! err_msg { + ($kind:ident) => { + $crate::error::Error::from($crate::error::ErrorKind::$kind) + }; + ($kind:ident, $msg:expr) => { + $crate::error::Error::from_msg($crate::error::ErrorKind::$kind, $msg) + }; +} + +#[cfg(feature = "std")] +macro_rules! err_map { + ($($params:tt)*) => { + |err| err_msg!($($params)*).with_cause(err) + }; +} + +#[cfg(not(feature = "std"))] +macro_rules! err_map { + ($($params:tt)*) => { + |_| err_msg!($($params)*) + }; +} diff --git a/askar-crypto/src/jwk/encode.rs b/askar-crypto/src/jwk/encode.rs new file mode 100644 index 00000000..410dabc5 --- /dev/null +++ b/askar-crypto/src/jwk/encode.rs @@ -0,0 +1,134 @@ +use core::fmt::Write; + +use super::ops::KeyOpsSet; +use crate::{ + alg::KeyAlg, + buffer::{WriteBuffer, Writer}, + error::Error, +}; + +/// Supported modes for JWK encoding +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum JwkEncoderMode { + /// Encoding a public key + PublicKey, + /// Encoding a secret key + SecretKey, + /// Encoding a public key thumbprint + Thumbprint, +} + +/// A helper structure which writes a JWK to a buffer +#[derive(Debug)] +pub struct JwkEncoder<'b> { + alg: Option, + buffer: &'b mut dyn WriteBuffer, + empty: bool, + mode: JwkEncoderMode, +} + +impl<'b> JwkEncoder<'b> { + /// Create a new instance + pub fn new( + alg: Option, + buffer: &'b mut B, + mode: JwkEncoderMode, + ) -> Result { + Ok(Self { + alg, + buffer, + empty: true, + mode, + }) + } +} + +impl JwkEncoder<'_> { + /// Get the requested algorithm for the JWK + pub fn alg(&self) -> Option { + self.alg + } + + fn start_attr(&mut self, key: &str) -> Result<(), Error> { + let buffer = &mut *self.buffer; + if self.empty { + buffer.buffer_write(b"{\"")?; + self.empty = false; + } else { + buffer.buffer_write(b",\"")?; + } + buffer.buffer_write(key.as_bytes())?; + buffer.buffer_write(b"\":")?; + Ok(()) + } + + /// Add a string attribute + pub fn add_str(&mut self, key: &str, value: &str) -> Result<(), Error> { + self.start_attr(key)?; + let buffer = &mut *self.buffer; + buffer.buffer_write(b"\"")?; + buffer.buffer_write(value.as_bytes())?; + buffer.buffer_write(b"\"")?; + Ok(()) + } + + /// Add a binary attribute to be encoded as unpadded base64-URL + pub fn add_as_base64(&mut self, key: &str, value: &[u8]) -> Result<(), Error> { + self.start_attr(key)?; + let buffer = &mut *self.buffer; + buffer.buffer_write(b"\"")?; + write!( + Writer::from_buffer(&mut *buffer), + "{}", + base64::display::Base64Display::with_config(value, base64::URL_SAFE_NO_PAD) + ) + .map_err(|_| err_msg!(Unexpected, "Error writing to JWK buffer"))?; + buffer.buffer_write(b"\"")?; + Ok(()) + } + + /// Add key operations to the JWK + pub fn add_key_ops(&mut self, ops: impl Into) -> Result<(), Error> { + self.start_attr("key_ops")?; + let buffer = &mut *self.buffer; + for (idx, op) in ops.into().into_iter().enumerate() { + if idx > 0 { + buffer.buffer_write(b",\"")?; + } else { + buffer.buffer_write(b"\"")?; + } + buffer.buffer_write(op.as_str().as_bytes())?; + buffer.buffer_write(b"\"")?; + } + buffer.buffer_write(b"]")?; + Ok(()) + } + + /// Accessor for the encoder mode + pub fn mode(&self) -> JwkEncoderMode { + self.mode + } + + /// Check if the mode is public + pub fn is_public(&self) -> bool { + matches!(self.mode, JwkEncoderMode::PublicKey) + } + + /// Check if the mode is secret + pub fn is_secret(&self) -> bool { + matches!(self.mode, JwkEncoderMode::SecretKey) + } + + /// Check if the mode is thumbprint + pub fn is_thumbprint(&self) -> bool { + matches!(self.mode, JwkEncoderMode::Thumbprint) + } + + /// Complete the JWK + pub fn finalize(self) -> Result<(), Error> { + if !self.empty { + self.buffer.buffer_write(b"}")?; + } + Ok(()) + } +} diff --git a/askar-crypto/src/jwk/mod.rs b/askar-crypto/src/jwk/mod.rs new file mode 100644 index 00000000..b57046a8 --- /dev/null +++ b/askar-crypto/src/jwk/mod.rs @@ -0,0 +1,91 @@ +//! JSON Web Key (JWK) support + +#[cfg(feature = "alloc")] +use alloc::{string::String, vec::Vec}; + +use sha2::Sha256; + +#[cfg(feature = "alloc")] +use crate::buffer::SecretBytes; +use crate::{ + alg::KeyAlg, + buffer::{HashBuffer, ResizeBuffer}, + error::Error, +}; + +mod encode; +pub use self::encode::{JwkEncoder, JwkEncoderMode}; + +mod ops; +pub use self::ops::{KeyOps, KeyOpsSet}; + +mod parts; +pub use self::parts::JwkParts; + +/// Support for converting a key into a JWK +pub trait ToJwk { + /// Write the JWK representation to an encoder + fn encode_jwk(&self, enc: &mut JwkEncoder<'_>) -> Result<(), Error>; + + /// Create the JWK thumbprint of the key + #[cfg(feature = "alloc")] + #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))] + fn to_jwk_thumbprint(&self, alg: Option) -> Result { + let mut v = Vec::with_capacity(43); + write_jwk_thumbprint(self, alg, &mut v)?; + Ok(String::from_utf8(v).unwrap()) + } + + /// Create a JWK of the public key + #[cfg(feature = "alloc")] + #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))] + fn to_jwk_public(&self, alg: Option) -> Result { + let mut v = Vec::with_capacity(128); + let mut buf = JwkEncoder::new(alg, &mut v, JwkEncoderMode::PublicKey)?; + self.encode_jwk(&mut buf)?; + buf.finalize()?; + Ok(String::from_utf8(v).unwrap()) + } + + /// Create a JWK of the secret key + #[cfg(feature = "alloc")] + #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))] + fn to_jwk_secret(&self) -> Result { + let mut v = SecretBytes::with_capacity(128); + let mut buf = JwkEncoder::new(None, &mut v, JwkEncoderMode::SecretKey)?; + self.encode_jwk(&mut buf)?; + buf.finalize()?; + Ok(v) + } +} + +/// Encode a key's JWK into a buffer +pub fn write_jwk_thumbprint( + key: &K, + alg: Option, + output: &mut dyn ResizeBuffer, +) -> Result<(), Error> { + let mut hasher = HashBuffer::::new(); + let mut buf = JwkEncoder::new(alg, &mut hasher, JwkEncoderMode::Thumbprint)?; + key.encode_jwk(&mut buf)?; + buf.finalize()?; + let hash = hasher.finalize(); + base64::encode_config_slice(&hash, base64::URL_SAFE_NO_PAD, output.buffer_extend(43)?); + Ok(()) +} + +/// Support for loading a key instance from a JWK +pub trait FromJwk: Sized { + /// Import the key from a JWK string reference + fn from_jwk(jwk: &str) -> Result { + JwkParts::from_str(jwk).and_then(Self::from_jwk_parts) + } + + /// Import the key from a JWK byte slice + fn from_jwk_slice(jwk: &[u8]) -> Result { + JwkParts::from_slice(jwk).and_then(Self::from_jwk_parts) + } + + /// Import the key from a pre-parsed JWK + fn from_jwk_parts(jwk: JwkParts<'_>) -> Result; +} diff --git a/askar-crypto/src/jwk/ops.rs b/askar-crypto/src/jwk/ops.rs new file mode 100644 index 00000000..fb34504f --- /dev/null +++ b/askar-crypto/src/jwk/ops.rs @@ -0,0 +1,224 @@ +use core::{ + fmt::{self, Debug, Display, Formatter}, + ops::{BitAnd, BitOr}, +}; + +static OPS: &[KeyOps] = &[ + KeyOps::Encrypt, + KeyOps::Decrypt, + KeyOps::Sign, + KeyOps::Verify, + KeyOps::WrapKey, + KeyOps::UnwrapKey, + KeyOps::DeriveKey, + KeyOps::DeriveBits, +]; + +/// Supported JWK key operations +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] +#[repr(usize)] +pub enum KeyOps { + /// Allows encryption + Encrypt = 1 << 0, + /// Allows decryption + Decrypt = 1 << 1, + /// Allows signature creation + Sign = 1 << 2, + /// Allows signature verification + Verify = 1 << 3, + /// Allows key wrapping + WrapKey = 1 << 4, + /// Allows key unwrapping + UnwrapKey = 1 << 5, + /// Allows key derivation + DeriveKey = 1 << 6, + /// Allows derivation of bytes + DeriveBits = 1 << 7, +} + +impl Display for KeyOps { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str(self.as_str()) + } +} + +impl KeyOps { + /// String representation of the key operation + pub fn as_str(&self) -> &'static str { + match self { + Self::Encrypt => "encrypt", + Self::Decrypt => "decrypt", + Self::Sign => "sign", + Self::Verify => "verify", + Self::WrapKey => "wrapKey", + Self::UnwrapKey => "unwrapKey", + Self::DeriveKey => "deriveKey", + Self::DeriveBits => "deriveBits", + } + } + + /// Parse a key operation from a string reference + pub fn from_str(key: &str) -> Option { + match key { + "sign" => Some(Self::Sign), + "verify" => Some(Self::Verify), + "encrypt" => Some(Self::Encrypt), + "decrypt" => Some(Self::Decrypt), + "wrapKey" => Some(Self::WrapKey), + "unwrapKey" => Some(Self::UnwrapKey), + "deriveKey" => Some(Self::DeriveKey), + "deriveBits" => Some(Self::DeriveBits), + _ => None, + } + } +} + +impl BitOr for KeyOps { + type Output = KeyOpsSet; + + fn bitor(self, rhs: Self) -> Self::Output { + KeyOpsSet { + value: self as usize | rhs as usize, + } + } +} + +/// A set of key operations +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[repr(transparent)] +pub struct KeyOpsSet { + value: usize, +} + +impl KeyOpsSet { + /// Create a new, empty operation set + pub const fn new() -> Self { + Self { value: 0 } + } + + /// Check if an operation set is empty + pub fn is_empty(&self) -> bool { + self.value == 0 + } +} + +impl Default for KeyOpsSet { + fn default() -> Self { + Self::new() + } +} + +impl BitOr for KeyOpsSet { + type Output = Self; + + fn bitor(self, rhs: Self) -> Self::Output { + KeyOpsSet { + value: self.value | rhs.value, + } + } +} + +impl BitOr for KeyOpsSet { + type Output = KeyOpsSet; + + fn bitor(self, rhs: KeyOps) -> Self::Output { + KeyOpsSet { + value: self.value | rhs as usize, + } + } +} + +impl BitAnd for KeyOpsSet { + type Output = bool; + + fn bitand(self, rhs: KeyOps) -> Self::Output { + self.value & rhs as usize != 0 + } +} + +impl BitAnd for KeyOpsSet { + type Output = bool; + + fn bitand(self, rhs: Self) -> Self::Output { + self.value & rhs.value != 0 + } +} + +impl Debug for KeyOpsSet { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + let mut vals = &mut f.debug_set(); + for op in self { + vals = vals.entry(&op.as_str()); + } + vals.finish() + } +} + +impl From for KeyOpsSet { + fn from(op: KeyOps) -> Self { + Self { value: op as usize } + } +} + +impl IntoIterator for &KeyOpsSet { + type IntoIter = KeyOpsIter; + type Item = KeyOps; + + fn into_iter(self) -> Self::IntoIter { + KeyOpsIter { + index: 0, + value: *self, + } + } +} + +#[derive(Debug)] +pub struct KeyOpsIter { + index: usize, + value: KeyOpsSet, +} + +impl Iterator for KeyOpsIter { + type Item = KeyOps; + + fn next(&mut self) -> Option { + while self.index < OPS.len() { + let op = OPS[self.index]; + self.index += 1; + if self.value & op { + return Some(op); + } + } + None + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn invariants() { + assert_eq!(KeyOpsSet::new().is_empty(), true); + assert_eq!(KeyOpsSet::from(KeyOps::Decrypt).is_empty(), false); + assert_eq!(KeyOpsSet::new(), KeyOpsSet::new()); + assert_ne!(KeyOpsSet::from(KeyOps::Decrypt), KeyOpsSet::new()); + assert_ne!(KeyOps::Decrypt, KeyOps::Encrypt); + assert_ne!( + KeyOpsSet::from(KeyOps::Decrypt), + KeyOpsSet::from(KeyOps::Encrypt) + ); + assert_eq!( + KeyOps::Decrypt | KeyOps::Encrypt, + KeyOps::Encrypt | KeyOps::Decrypt + ); + } + + #[test] + fn debug_format() { + assert_eq!( + format!("{:?}", KeyOps::Decrypt | KeyOps::Encrypt), + "{\"encrypt\", \"decrypt\"}" + ); + } +} diff --git a/askar-crypto/src/jwk/parts.rs b/askar-crypto/src/jwk/parts.rs new file mode 100644 index 00000000..34ab8a64 --- /dev/null +++ b/askar-crypto/src/jwk/parts.rs @@ -0,0 +1,255 @@ +use core::{ + fmt::{self, Debug, Formatter}, + marker::PhantomData, +}; + +use serde::de::{Deserialize, Deserializer, MapAccess, SeqAccess, Visitor}; + +use super::ops::{KeyOps, KeyOpsSet}; +use crate::error::Error; + +/// A parsed JWK +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct JwkParts<'a> { + /// Key type + pub kty: &'a str, + /// Key ID + pub kid: OptAttr<'a>, + /// Curve type + pub crv: OptAttr<'a>, + /// Curve key public x coordinate + pub x: OptAttr<'a>, + /// Curve key public y coordinate + pub y: OptAttr<'a>, + /// Curve key private key bytes + pub d: OptAttr<'a>, + /// Used by symmetric keys like AES + pub k: OptAttr<'a>, + /// Recognized key operations + pub key_ops: Option, +} + +impl<'de> JwkParts<'de> { + /// Parse a JWK from a string reference + pub fn from_str(jwk: &'de str) -> Result { + serde_json::from_str(jwk).map_err(err_map!(InvalidData, "Error parsing JWK")) + } + + /// Parse a JWK from a byte slice + pub fn from_slice(jwk: &'de [u8]) -> Result { + serde_json::from_slice(jwk).map_err(err_map!(InvalidData, "Error parsing JWK")) + } +} + +#[derive(Copy, Clone, Default, PartialEq, Eq)] +#[repr(transparent)] +pub struct OptAttr<'a>(Option<&'a str>); + +impl OptAttr<'_> { + pub fn is_none(&self) -> bool { + self.0.is_none() + } + + pub fn is_some(&self) -> bool { + self.0.is_some() + } + + pub fn to_option(&self) -> Option<&str> { + self.0 + } + + pub fn decode_base64(&self, output: &mut [u8]) -> Result { + if let Some(s) = self.0 { + let max_input = (output.len() * 4 + 2) / 3; // ceil(4*n/3) + if s.len() > max_input { + Err(err_msg!(InvalidData, "Base64 length exceeds max")) + } else { + base64::decode_config_slice(s, base64::URL_SAFE_NO_PAD, output) + .map_err(|_| err_msg!(InvalidData, "Base64 decode error")) + } + } else { + Err(err_msg!(InvalidData, "Empty attribute")) + } + } +} + +impl Debug for OptAttr<'_> { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + match self.0 { + None => f.write_str("None"), + Some(s) => f.write_fmt(format_args!("{:?}", s)), + } + } +} + +impl AsRef for OptAttr<'_> { + fn as_ref(&self) -> &str { + self.0.unwrap_or_default() + } +} + +impl<'o> From<&'o str> for OptAttr<'o> { + fn from(s: &'o str) -> Self { + Self(Some(s)) + } +} + +impl<'o> From> for OptAttr<'o> { + fn from(s: Option<&'o str>) -> Self { + Self(s) + } +} + +impl PartialEq> for OptAttr<'_> { + fn eq(&self, other: &Option<&str>) -> bool { + self.0 == *other + } +} + +impl PartialEq<&str> for OptAttr<'_> { + fn eq(&self, other: &&str) -> bool { + match self.0 { + None => false, + Some(s) => (*other) == s, + } + } +} + +struct JwkMapVisitor<'de>(PhantomData<&'de ()>); + +impl<'de> Visitor<'de> for JwkMapVisitor<'de> { + type Value = JwkParts<'de>; + + fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + formatter.write_str("an object representing a JWK") + } + + fn visit_map(self, mut access: M) -> Result + where + M: MapAccess<'de>, + { + let mut kty = None; + let mut kid = None; + let mut crv = None; + let mut x = None; + let mut y = None; + let mut d = None; + let mut k = None; + let mut key_ops = None; + + while let Some(key) = access.next_key::<&str>()? { + match key { + "kty" => kty = Some(access.next_value()?), + "kid" => kid = Some(access.next_value()?), + "crv" => crv = Some(access.next_value()?), + "x" => x = Some(access.next_value()?), + "y" => y = Some(access.next_value()?), + "d" => d = Some(access.next_value()?), + "k" => k = Some(access.next_value()?), + "use" => { + let ops = match access.next_value()? { + "enc" => { + KeyOps::Encrypt | KeyOps::Decrypt | KeyOps::WrapKey | KeyOps::UnwrapKey + } + "sig" => KeyOps::Sign | KeyOps::Verify, + _ => KeyOpsSet::new(), + }; + if !ops.is_empty() { + key_ops = Some(key_ops.unwrap_or_default() | ops); + } + } + "key_ops" => key_ops = Some(access.next_value()?), + _ => (), + } + } + + if let Some(kty) = kty { + Ok(JwkParts { + kty, + kid: kid.into(), + crv: crv.into(), + x: x.into(), + y: y.into(), + d: d.into(), + k: k.into(), + key_ops, + }) + } else { + Err(serde::de::Error::missing_field("kty")) + } + } +} + +impl<'de> Deserialize<'de> for JwkParts<'de> { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + deserializer.deserialize_map(JwkMapVisitor(PhantomData)) + } +} + +struct KeyOpsVisitor; + +impl<'de> Visitor<'de> for KeyOpsVisitor { + type Value = KeyOpsSet; + + fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + formatter.write_str("an array of key operations") + } + + fn visit_seq(self, mut seq: A) -> Result + where + A: SeqAccess<'de>, + { + let mut ops = KeyOpsSet::new(); + while let Some(op) = seq.next_element()? { + if let Some(op) = KeyOps::from_str(op) { + if ops & op { + return Err(serde::de::Error::duplicate_field(op.as_str())); + } else { + ops = ops | op; + } + } + } + Ok(ops) + } +} + +impl<'de> Deserialize<'de> for KeyOpsSet { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + deserializer.deserialize_seq(KeyOpsVisitor) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn parse_sample_okp() { + let jwk = r#"{ + "kty": "OKP", + "crv": "Ed25519", + "x": "11qYAYKxCrfVS_7TyWQHOg7hcvPapiMlrwIaaPcHURo", + "d": "nWGxne_9WmC6hEr0kuwsxERJxWl7MmkZcDusAxyuf2A", + "key_ops": ["sign", "verify"], + "kid": "FdFYFzERwC2uCBB46pZQi4GG85LujR8obt-KWRBICVQ" + }"#; + let parts = serde_json::from_str::>(jwk).unwrap(); + assert_eq!(parts.kty, "OKP"); + assert_eq!( + parts.kid, + Some("FdFYFzERwC2uCBB46pZQi4GG85LujR8obt-KWRBICVQ") + ); + assert_eq!(parts.crv, Some("Ed25519")); + assert_eq!(parts.x, Some("11qYAYKxCrfVS_7TyWQHOg7hcvPapiMlrwIaaPcHURo")); + assert_eq!(parts.y, None); + assert_eq!(parts.d, Some("nWGxne_9WmC6hEr0kuwsxERJxWl7MmkZcDusAxyuf2A")); + assert_eq!(parts.k, None); + assert_eq!(parts.key_ops, Some(KeyOps::Sign | KeyOps::Verify)); + } +} diff --git a/askar-crypto/src/kdf/argon2.rs b/askar-crypto/src/kdf/argon2.rs new file mode 100644 index 00000000..9fb62b45 --- /dev/null +++ b/askar-crypto/src/kdf/argon2.rs @@ -0,0 +1,103 @@ +//! Argon2 key derivation from a password + +pub use argon2::{Algorithm, Version}; + +use super::KeyDerivation; +use crate::{ + error::Error, + generic_array::typenum::{Unsigned, U16}, +}; + +/// The size of the password salt +pub type SaltSize = U16; + +/// The length of the password salt +pub const SALT_LENGTH: usize = SaltSize::USIZE; + +/// Standard parameters for 'interactive' level +pub const PARAMS_INTERACTIVE: Params = Params { + alg: Algorithm::Argon2i, + version: Version::V0x13, + mem_cost: 32768, + time_cost: 4, +}; +/// Standard parameters for 'moderate' level +pub const PARAMS_MODERATE: Params = Params { + alg: Algorithm::Argon2i, + version: Version::V0x13, + mem_cost: 131072, + time_cost: 6, +}; + +/// Parameters to the argon2 key derivation +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct Params { + alg: Algorithm, + version: Version, + mem_cost: u32, + time_cost: u32, +} + +/// Struct wrapping the KDF functionality +#[derive(Debug)] +pub struct Argon2<'a> { + password: &'a [u8], + salt: &'a [u8], + params: Params, +} + +impl<'a> Argon2<'a> { + /// Create a new Argon2 key derivation instance + pub fn new(password: &'a [u8], salt: &'a [u8], params: Params) -> Result { + if salt.len() < SALT_LENGTH { + return Err(err_msg!(Usage, "Invalid salt for argon2i hash")); + } + Ok(Self { + password, + salt, + params, + }) + } +} + +impl KeyDerivation for Argon2<'_> { + fn derive_key_bytes(&mut self, key_output: &mut [u8]) -> Result<(), Error> { + if key_output.len() > u32::MAX as usize { + return Err(err_msg!( + Usage, + "Output length exceeds max for argon2i hash" + )); + } + let context = argon2::Argon2::new( + None, + self.params.time_cost, + self.params.mem_cost, + 1, + self.params.version, + ) + .map_err(|_| err_msg!(Unexpected, "Error creating hasher"))?; + context + .hash_password_into(self.params.alg, self.password, self.salt, &[], key_output) + .map_err(|_| err_msg!(Unexpected, "Error deriving key")) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn expected() { + let pass = b"my password"; + let salt = b"long enough salt"; + let mut output = [0u8; 32]; + Argon2::new(pass, salt, PARAMS_INTERACTIVE) + .unwrap() + .derive_key_bytes(&mut output) + .unwrap(); + assert_eq!( + output, + hex!("9ef87bcf828c46c0136a0d1d9e391d713f75b327c6dc190455bd36c1bae33259") + ); + } +} diff --git a/askar-crypto/src/kdf/concat.rs b/askar-crypto/src/kdf/concat.rs new file mode 100644 index 00000000..4a3b803b --- /dev/null +++ b/askar-crypto/src/kdf/concat.rs @@ -0,0 +1,143 @@ +//! ConcatKDF from NIST 800-56ar for ECDH-ES / ECDH-1PU + +use core::{fmt::Debug, marker::PhantomData}; + +use digest::Digest; + +use crate::generic_array::{typenum::Unsigned, GenericArray}; + +use crate::{buffer::WriteBuffer, error::Error}; + +/// A struct providing the key derivation for a particular hash function +#[derive(Clone, Copy, Debug)] +pub struct ConcatKDF(PhantomData); + +/// Parameters for the key derivation +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] +pub struct ConcatKDFParams<'p> { + /// The algorithm name + pub alg: &'p [u8], + /// Sender identifier (PartyUInfo) + pub apu: &'p [u8], + /// Recipient identifier (PartyVInfo) + pub apv: &'p [u8], + /// SuppPubInfo as defined by the application + pub pub_info: &'p [u8], + /// SuppPrivInfo as defined by the application + pub prv_info: &'p [u8], +} + +impl ConcatKDF +where + H: Digest, +{ + /// Perform the key derivation and write the result to the provided buffer + pub fn derive_key( + message: &[u8], + params: ConcatKDFParams<'_>, + mut output: &mut [u8], + ) -> Result<(), Error> { + let output_len = output.len(); + if output_len > H::OutputSize::USIZE * (u32::MAX as usize) - 1 { + return Err(err_msg!(Usage, "Exceeded max output size for concat KDF")); + } + let mut hasher = ConcatKDFHash::::new(); + let mut remain = output_len; + while remain > 0 { + hasher.start_pass(); + hasher.hash_message(message); + hasher.hash_params(params); + let hashed = hasher.finish_pass(); + let cp_size = hashed.len().min(remain); + &output[..cp_size].copy_from_slice(&hashed[..cp_size]); + output = &mut output[cp_size..]; + remain -= cp_size; + } + Ok(()) + } +} + +/// Core hashing implementation of the multi-pass key derivation +#[derive(Debug)] +pub struct ConcatKDFHash { + hasher: H, + counter: u32, +} + +impl ConcatKDFHash { + /// Create a new instance + pub fn new() -> Self { + Self { + hasher: H::new(), + counter: 1, + } + } + + /// Start a new pass of the key derivation + pub fn start_pass(&mut self) { + self.hasher.update(self.counter.to_be_bytes()); + self.counter += 1; + } + + /// Hash input to the key derivation + pub fn hash_message(&mut self, data: &[u8]) { + self.hasher.update(data); + } + + /// Hash the parameters of the key derivation + pub fn hash_params(&mut self, params: ConcatKDFParams<'_>) { + let hash = &mut self.hasher; + hash.update((params.alg.len() as u32).to_be_bytes()); + hash.update(params.alg); + hash.update((params.apu.len() as u32).to_be_bytes()); + hash.update(params.apu); + hash.update((params.apv.len() as u32).to_be_bytes()); + hash.update(params.apv); + hash.update(params.pub_info); + hash.update(params.prv_info); + } + + /// Complete this pass of the key derivation, returning the result + pub fn finish_pass(&mut self) -> GenericArray { + self.hasher.finalize_reset() + } +} + +impl WriteBuffer for ConcatKDFHash { + fn buffer_write(&mut self, data: &[u8]) -> Result<(), Error> { + self.hasher.update(data); + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use sha2::Sha256; + + #[test] + // testing with ConcatKDF - single pass via ConcatKDFHash is tested elsewhere + fn expected_1pu_output() { + let z = hex!( + "9e56d91d817135d372834283bf84269cfb316ea3da806a48f6daa7798cfe90c4 + e3ca3474384c9f62b30bfd4c688b3e7d4110a1b4badc3cc54ef7b81241efd50d" + ); + let mut output = [0u8; 32]; + ConcatKDF::::derive_key( + &z, + ConcatKDFParams { + alg: b"A256GCM", + apu: b"Alice", + apv: b"Bob", + pub_info: &(256u32).to_be_bytes(), + prv_info: &[], + }, + &mut output, + ) + .unwrap(); + assert_eq!( + output, + hex!("6caf13723d14850ad4b42cd6dde935bffd2fff00a9ba70de05c203a5e1722ca7") + ); + } +} diff --git a/askar-crypto/src/kdf/ecdh_1pu.rs b/askar-crypto/src/kdf/ecdh_1pu.rs new file mode 100644 index 00000000..5286097b --- /dev/null +++ b/askar-crypto/src/kdf/ecdh_1pu.rs @@ -0,0 +1,200 @@ +//! ECDH-1PU key derivation + +use sha2::Sha256; +use zeroize::Zeroize; + +use super::{ + concat::{ConcatKDFHash, ConcatKDFParams}, + KeyDerivation, KeyExchange, +}; +use crate::{ + buffer::{WriteBuffer, Writer}, + error::Error, +}; + +/// An instantiation of the ECDH-1PU key derivation +#[derive(Debug)] +pub struct Ecdh1PU<'d, Key: KeyExchange + ?Sized> { + ephem_key: &'d Key, + send_key: &'d Key, + recip_key: &'d Key, + alg: &'d [u8], + apu: &'d [u8], + apv: &'d [u8], + cc_tag: &'d [u8], +} + +impl<'d, Key: KeyExchange + ?Sized> Ecdh1PU<'d, Key> { + /// Create a new KDF instance + pub fn new( + ephem_key: &'d Key, + send_key: &'d Key, + recip_key: &'d Key, + alg: &'d [u8], + apu: &'d [u8], + apv: &'d [u8], + cc_tag: &'d [u8], + ) -> Self { + Self { + ephem_key, + send_key, + recip_key, + alg, + apu, + apv, + cc_tag, + } + } +} + +impl KeyDerivation for Ecdh1PU<'_, Key> { + fn derive_key_bytes(&mut self, key_output: &mut [u8]) -> Result<(), Error> { + let output_len = key_output.len(); + // one-pass KDF only produces 256 bits of output + if output_len > 32 { + return Err(err_msg!(Unsupported, "Exceeded maximum output length")); + } + if self.cc_tag.len() > 128 { + return Err(err_msg!(Unsupported, "Exceeded maximum length for cc_tag")); + } + let mut kdf = ConcatKDFHash::::new(); + kdf.start_pass(); + + // hash Zs and Ze directly into the KDF + self.ephem_key + .write_key_exchange(self.recip_key, &mut kdf)?; + self.send_key.write_key_exchange(self.recip_key, &mut kdf)?; + + // the authentication tag is appended to pub_info, if any. + let mut pub_info = [0u8; 132]; + let mut pub_w = Writer::from_slice(&mut pub_info[..]); + pub_w.buffer_write(&((output_len as u32) * 8).to_be_bytes())?; // output length in bits + if !self.cc_tag.is_empty() { + pub_w.buffer_write(&(self.cc_tag.len() as u32).to_be_bytes())?; + pub_w.buffer_write(&self.cc_tag)?; + } + #[cfg(test)] + println!("w: {:?}", pub_w.as_ref()); + + kdf.hash_params(ConcatKDFParams { + alg: self.alg, + apu: self.apu, + apv: self.apv, + pub_info: pub_w.as_ref(), + prv_info: &[], + }); + + let mut key = kdf.finish_pass(); + key_output.copy_from_slice(&key[..output_len]); + key.zeroize(); + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + #[allow(unused_imports)] + use super::*; + + #[cfg(feature = "p256")] + #[test] + // from RFC: https://tools.ietf.org/html/draft-madden-jose-ecdh-1pu-03#appendix-A + fn expected_1pu_direct_output() { + use crate::alg::p256::P256KeyPair; + use crate::jwk::FromJwk; + + let alice_sk = P256KeyPair::from_jwk( + r#"{"kty":"EC", + "crv":"P-256", + "x":"WKn-ZIGevcwGIyyrzFoZNBdaq9_TsqzGl96oc0CWuis", + "y":"y77t-RvAHRKTsSGdIYUfweuOvwrvDD-Q3Hv5J0fSKbE", + "d":"Hndv7ZZjs_ke8o9zXYo3iq-Yr8SewI5vrqd0pAvEPqg"}"#, + ) + .unwrap(); + let bob_sk = P256KeyPair::from_jwk( + r#"{"kty":"EC", + "crv":"P-256", + "x":"weNJy2HscCSM6AEDTDg04biOvhFhyyWvOHQfeF_PxMQ", + "y":"e8lnCO-AlStT-NJVX-crhB7QRYhiix03illJOVAOyck", + "d":"VEmDZpDXXK8p8N0Cndsxs924q6nS1RXFASRl6BfUqdw"}"#, + ) + .unwrap(); + let ephem_sk = P256KeyPair::from_jwk( + r#"{"kty":"EC", + "crv":"P-256", + "x":"gI0GAILBdu7T53akrFmMyGcsF3n5dO7MmwNBHKW5SV0", + "y":"SLW_xSffzlPWrHEVI30DHM_4egVwt3NQqeUD7nMFpps", + "d":"0_NxaRPUMQoAJt50Gz8YiTr8gRTwyEaCumd-MToTmIo"}"#, + ) + .unwrap(); + + let mut key_output = [0u8; 32]; + + Ecdh1PU::new( + &ephem_sk, + &alice_sk, + &bob_sk, + b"A256GCM", + b"Alice", + b"Bob", + &[], + ) + .derive_key_bytes(&mut key_output) + .unwrap(); + + assert_eq!( + key_output, + hex!("6caf13723d14850ad4b42cd6dde935bffd2fff00a9ba70de05c203a5e1722ca7") + ); + } + + #[cfg(feature = "ed25519")] + #[test] + // from RFC: https://tools.ietf.org/html/draft-madden-jose-ecdh-1pu-04#appendix-B + fn expected_1pu_wrapped_output() { + use crate::alg::x25519::X25519KeyPair; + use crate::jwk::FromJwk; + + let alice_sk = X25519KeyPair::from_jwk( + r#"{"kty": "OKP", + "crv": "X25519", + "x": "Knbm_BcdQr7WIoz-uqit9M0wbcfEr6y-9UfIZ8QnBD4", + "d": "i9KuFhSzEBsiv3PKVL5115OCdsqQai5nj_Flzfkw5jU"}"#, + ) + .unwrap(); + let bob_sk = X25519KeyPair::from_jwk( + r#"{"kty": "OKP", + "crv": "X25519", + "x": "BT7aR0ItXfeDAldeeOlXL_wXqp-j5FltT0vRSG16kRw", + "d": "1gDirl_r_Y3-qUa3WXHgEXrrEHngWThU3c9zj9A2uBg"}"#, + ) + .unwrap(); + let ephem_sk = X25519KeyPair::from_jwk( + r#"{"kty": "OKP", + "crv": "X25519", + "x": "k9of_cpAajy0poW5gaixXGs9nHkwg1AFqUAFa39dyBc", + "d": "x8EVZH4Fwk673_mUujnliJoSrLz0zYzzCWp5GUX2fc8"}"#, + ) + .unwrap(); + + let mut key_output = [0u8; 16]; + + Ecdh1PU::new( + &ephem_sk, + &alice_sk, + &bob_sk, + b"ECDH-1PU+A128KW", + b"Alice", + b"Bob and Charlie", + &hex!( + "1cb6f87d3966f2ca469a28f74723acda + 02780e91cce21855470745fe119bdd64" + ), + ) + .derive_key_bytes(&mut key_output) + .unwrap(); + + assert_eq!(key_output, hex!("df4c37a0668306a11e3d6b0074b5d8df")); + } +} diff --git a/askar-crypto/src/kdf/ecdh_es.rs b/askar-crypto/src/kdf/ecdh_es.rs new file mode 100644 index 00000000..e707ff66 --- /dev/null +++ b/askar-crypto/src/kdf/ecdh_es.rs @@ -0,0 +1,117 @@ +//! ECDH-ES key derivation + +use sha2::Sha256; +use zeroize::Zeroize; + +use super::{ + concat::{ConcatKDFHash, ConcatKDFParams}, + KeyDerivation, KeyExchange, +}; +use crate::error::Error; + +/// An instantiation of the ECDH-ES key derivation +#[derive(Debug)] +pub struct EcdhEs<'d, Key> +where + Key: KeyExchange + ?Sized, +{ + ephem_key: &'d Key, + recip_key: &'d Key, + alg: &'d [u8], + apu: &'d [u8], + apv: &'d [u8], +} + +impl<'d, Key: KeyExchange + ?Sized> EcdhEs<'d, Key> { + /// Create a new KDF instance + pub fn new( + ephem_key: &'d Key, + recip_key: &'d Key, + alg: &'d [u8], + apu: &'d [u8], + apv: &'d [u8], + ) -> Self { + Self { + ephem_key, + recip_key, + alg, + apu, + apv, + } + } +} + +impl KeyDerivation for EcdhEs<'_, Key> { + fn derive_key_bytes(&mut self, key_output: &mut [u8]) -> Result<(), Error> { + let output_len = key_output.len(); + // one-pass KDF only produces 256 bits of output + if output_len > 32 { + return Err(err_msg!(Unsupported, "Exceeded maximum output length")); + } + let mut kdf = ConcatKDFHash::::new(); + kdf.start_pass(); + + // hash Z directly into the KDF + self.ephem_key + .write_key_exchange(self.recip_key, &mut kdf)?; + + kdf.hash_params(ConcatKDFParams { + alg: self.alg, + apu: self.apu, + apv: self.apv, + pub_info: &((output_len as u32) * 8).to_be_bytes(), // output length in bits + prv_info: &[], + }); + + let mut key = kdf.finish_pass(); + key_output.copy_from_slice(&key[..output_len]); + key.zeroize(); + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + #[allow(unused_imports)] + use super::*; + + #[cfg(feature = "ed25519")] + #[test] + // based on RFC sample keys + // https://tools.ietf.org/html/rfc8037#appendix-A.6 + fn expected_es_direct_output() { + use crate::alg::x25519::X25519KeyPair; + use crate::jwk::FromJwk; + + let bob_pk = X25519KeyPair::from_jwk( + r#"{"kty":"OKP","crv":"X25519","kid":"Bob", + "x":"3p7bfXt9wbTTW2HC7OQ1Nz-DQ8hbeGdNrfx-FG-IK08"}"#, + ) + .unwrap(); + let ephem_sk = X25519KeyPair::from_jwk( + r#"{"kty":"OKP","crv":"X25519", + "d":"dwdtCnMYpX08FsFyUbJmRd9ML4frwJkqsXf7pR25LCo", + "x":"hSDwCYkwp1R0i33ctD73Wg2_Og0mOBr066SpjqqbTmo"} + "#, + ) + .unwrap(); + + let xk = ephem_sk.key_exchange_bytes(&bob_pk).unwrap(); + assert_eq!( + xk, + &hex!("4a5d9d5ba4ce2de1728e3bf480350f25e07e21c947d19e3376f09b3c1e161742")[..] + ); + + let mut key_output = [0u8; 32]; + + EcdhEs::new(&ephem_sk, &bob_pk, b"A256GCM", b"Alice", b"Bob") + .derive_key_bytes(&mut key_output) + .unwrap(); + + assert_eq!( + key_output, + hex!("2f3636918ddb57fe0b3569113f19c4b6c518c2843f8930f05db25cd55dee53c1") + ); + } +} diff --git a/askar-crypto/src/kdf/mod.rs b/askar-crypto/src/kdf/mod.rs new file mode 100644 index 00000000..6450a288 --- /dev/null +++ b/askar-crypto/src/kdf/mod.rs @@ -0,0 +1,50 @@ +//! Key derivation function traits and implementations + +#[cfg(feature = "alloc")] +use crate::buffer::SecretBytes; +use crate::{buffer::WriteBuffer, error::Error}; + +#[cfg(feature = "argon2")] +#[cfg_attr(docsrs, doc(cfg(feature = "argon2")))] +pub mod argon2; + +pub mod concat; + +pub mod ecdh_1pu; + +pub mod ecdh_es; + +/// Trait for keys supporting Diffie-Helman key exchange +pub trait KeyExchange { + /// Perform a key exchange, writing the result to the provided buffer. + fn write_key_exchange(&self, other: &Rhs, out: &mut dyn WriteBuffer) -> Result<(), Error>; + + #[cfg(feature = "alloc")] + #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))] + /// Perform a key exchange and return a new allocated buffer. + fn key_exchange_bytes(&self, other: &Rhs) -> Result { + let mut buf = SecretBytes::with_capacity(128); + self.write_key_exchange(other, &mut buf)?; + Ok(buf) + } +} + +/// Trait for instantiation from a key exchange +pub trait FromKeyExchange: Sized { + /// Derive an instance of this key directly from a supported key exchange + fn from_key_exchange(lhs: &Lhs, rhs: &Rhs) -> Result; +} + +/// Trait implemented by key derivation methods +pub trait KeyDerivation { + /// Derive the raw bytes of a key from this KDF + fn derive_key_bytes(&mut self, key_output: &mut [u8]) -> Result<(), Error>; +} + +/// Trait for instantiation from a key derivation +pub trait FromKeyDerivation { + /// Create a new instance of a key from a key derivation + fn from_key_derivation(derive: D) -> Result + where + Self: Sized; +} diff --git a/askar-crypto/src/lib.rs b/askar-crypto/src/lib.rs new file mode 100644 index 00000000..8ed88f09 --- /dev/null +++ b/askar-crypto/src/lib.rs @@ -0,0 +1,44 @@ +//! Cryptography primitives and operations for aries-askar. + +#![no_std] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![deny( + missing_docs, + missing_debug_implementations, + rust_2018_idioms, + unsafe_code +)] + +#[cfg(feature = "alloc")] +extern crate alloc; + +#[cfg(any(test, feature = "std"))] +#[macro_use] +extern crate std; + +#[cfg(test)] +#[macro_use] +extern crate hex_literal; + +#[macro_use] +mod error; +pub use self::error::{Error, ErrorKind}; + +// re-export +pub use aead::generic_array; + +pub mod alg; + +pub mod buffer; + +pub mod encrypt; + +pub mod jwk; + +pub mod kdf; + +pub mod random; + +pub mod sign; + +pub mod repr; diff --git a/askar-crypto/src/random.rs b/askar-crypto/src/random.rs new file mode 100644 index 00000000..efa68b8b --- /dev/null +++ b/askar-crypto/src/random.rs @@ -0,0 +1,75 @@ +//! Support for random number generation + +use aead::generic_array::{typenum::Unsigned, GenericArray}; +use chacha20::{ + cipher::{NewStreamCipher, SyncStreamCipher}, + ChaCha20, +}; +use rand::{CryptoRng, RngCore}; + +#[cfg(feature = "alloc")] +use crate::buffer::SecretBytes; +use crate::error::Error; + +/// The expected length of a seed for `fill_random_deterministic` +pub const DETERMINISTIC_SEED_LENGTH: usize = ::KeySize::USIZE; + +/// Combined trait for CryptoRng and RngCore +pub trait Rng: CryptoRng + RngCore {} + +impl Rng for T {} + +/// Perform an operation with a reference to the random number generator +#[inline(always)] +pub fn with_rng(f: impl FnOnce(&mut dyn Rng) -> O) -> O { + // FIXME may wish to support platforms without 'getrandom' by adding + // a method to initialize with a custom RNG (or fill_bytes function) + f(&mut ::rand::rngs::OsRng) +} + +/// Fill a mutable slice with random data using the +/// system random number generator. +#[inline(always)] +pub fn fill_random(value: &mut [u8]) { + with_rng(|rng| rng.fill_bytes(value)); +} + +/// Written to be compatible with randombytes_deterministic in libsodium, +/// used to generate a deterministic symmetric encryption key +pub fn fill_random_deterministic(seed: &[u8], output: &mut [u8]) -> Result<(), Error> { + if seed.len() != DETERMINISTIC_SEED_LENGTH { + return Err(err_msg!(Usage, "Invalid length for seed")); + } + let mut cipher = ChaCha20::new( + GenericArray::from_slice(seed), + GenericArray::from_slice(b"LibsodiumDRG"), + ); + cipher.apply_keystream(output); + Ok(()) +} + +#[cfg(feature = "alloc")] +#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))] +/// Create a new `SecretBytes` instance with random data. +#[inline(always)] +pub fn random_secret(len: usize) -> SecretBytes { + SecretBytes::new_with(len, fill_random) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::buffer::HexRepr; + use std::string::ToString; + + #[test] + fn fill_random_det_expected() { + let seed = b"testseed000000000000000000000001"; + let mut output = [0u8; 32]; + fill_random_deterministic(seed, &mut output).unwrap(); + assert_eq!( + HexRepr(output).to_string(), + "b1923a011cd1adbe89552db9862470c29512a8f51d184dfd778bfe7f845390d1" + ); + } +} diff --git a/askar-crypto/src/repr.rs b/askar-crypto/src/repr.rs new file mode 100644 index 00000000..ead44287 --- /dev/null +++ b/askar-crypto/src/repr.rs @@ -0,0 +1,170 @@ +//! Traits for exposing key data representations + +#[cfg(feature = "alloc")] +use crate::buffer::SecretBytes; +use crate::{buffer::WriteBuffer, error::Error, generic_array::ArrayLength}; + +/// A seed used in key generation +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum Seed<'d> { + /// A seed byte string with a selected generation method + Bytes(&'d [u8], SeedMethod), +} + +impl<'d> From<&'d [u8]> for Seed<'d> { + fn from(seed: &'d [u8]) -> Self { + Self::Bytes(seed, SeedMethod::Preferred) + } +} + +/// Supported deterministic key methods +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum SeedMethod { + /// Use the preferred method for the current key algorithm + Preferred, + /// Generate a BLS key according to bls-signatures-draft-04 + BlsKeyGenDraft4, + /// Random bytes compatible with libsodium's randombytes_buf_deterministic. + /// The seed must be 32 bytes in length + RandomDet, +} + +/// Key generation operations +pub trait KeyGen { + /// Generate a new random key. + fn generate() -> Result + where + Self: Sized; + + /// Generate a new deterministic key. + fn from_seed(_seed: Seed<'_>) -> Result + where + Self: Sized, + { + return Err(err_msg!( + Unsupported, + "Key generation from seed not supported" + )); + } +} + +/// Convert between key instance and key secret bytes +pub trait KeySecretBytes { + /// Create a new key instance from a slice of key secret bytes. + fn from_secret_bytes(key: &[u8]) -> Result + where + Self: Sized; + + /// Access a temporary slice of the key secret bytes, if any. + fn with_secret_bytes(&self, f: impl FnOnce(Option<&[u8]>) -> O) -> O; +} + +/// Object-safe trait for exporting key secret bytes +pub trait ToSecretBytes { + /// Write the key secret bytes to a buffer. + fn write_secret_bytes(&self, out: &mut dyn WriteBuffer) -> Result<(), Error>; + + #[cfg(feature = "alloc")] + #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))] + /// Write the key secret bytes to a new allocated buffer. + fn to_secret_bytes(&self) -> Result { + let mut buf = SecretBytes::with_capacity(128); + self.write_secret_bytes(&mut buf)?; + Ok(buf) + } +} + +impl ToSecretBytes for K +where + K: KeySecretBytes, +{ + fn write_secret_bytes(&self, out: &mut dyn WriteBuffer) -> Result<(), Error> { + self.with_secret_bytes(|buf| { + if let Some(buf) = buf { + out.buffer_write(buf) + } else { + Err(err_msg!(MissingSecretKey)) + } + }) + } +} + +/// Convert between key instance and key public bytes. +pub trait KeyPublicBytes { + /// Create a new key instance from a slice of public key bytes. + fn from_public_bytes(key: &[u8]) -> Result + where + Self: Sized; + + /// Access a temporary slice of the key public bytes. + fn with_public_bytes(&self, f: impl FnOnce(&[u8]) -> O) -> O; +} + +/// Object-safe trait for exporting key public bytes +pub trait ToPublicBytes { + /// Write the key public bytes to a buffer. + fn write_public_bytes(&self, out: &mut dyn WriteBuffer) -> Result<(), Error>; + + #[cfg(feature = "alloc")] + #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))] + /// Write the key public bytes to a new allocated buffer. + fn to_public_bytes(&self) -> Result { + let mut buf = SecretBytes::with_capacity(128); + self.write_public_bytes(&mut buf)?; + Ok(buf) + } +} + +impl ToPublicBytes for K +where + K: KeyPublicBytes, +{ + fn write_public_bytes(&self, out: &mut dyn WriteBuffer) -> Result<(), Error> { + self.with_public_bytes(|buf| out.buffer_write(buf)) + } +} + +/// Convert between keypair instance and keypair (secret and public) bytes +pub trait KeypairBytes { + /// Create a new key instance from a slice of keypair bytes. + fn from_keypair_bytes(key: &[u8]) -> Result + where + Self: Sized; + + /// Create a new key instance from a slice of keypair bytes. + fn with_keypair_bytes(&self, f: impl FnOnce(Option<&[u8]>) -> O) -> O; + + /// Write the keypair bytes to a buffer. + fn to_keypair_bytes_buffer(&self, out: &mut B) -> Result<(), Error> { + self.with_keypair_bytes(|buf| { + if let Some(buf) = buf { + out.buffer_write(buf) + } else { + Err(err_msg!(MissingSecretKey)) + } + }) + } + + #[cfg(feature = "alloc")] + #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))] + /// Write the keypair bytes to a new allocated buffer. + fn to_keypair_bytes(&self) -> Result { + let mut buf = SecretBytes::with_capacity(128); + self.to_keypair_bytes_buffer(&mut buf)?; + Ok(buf) + } +} + +/// For concrete secret key types +pub trait KeyMeta { + /// The size of the key secret bytes + type KeySize: ArrayLength; +} + +/// For concrete secret + public key types +pub trait KeypairMeta: KeyMeta { + /// The size of the key public bytes + type PublicKeySize: ArrayLength; + /// The size of the secret bytes and public bytes combined + type KeypairSize: ArrayLength; +} diff --git a/askar-crypto/src/sign.rs b/askar-crypto/src/sign.rs new file mode 100644 index 00000000..abdd6b41 --- /dev/null +++ b/askar-crypto/src/sign.rs @@ -0,0 +1,78 @@ +//! Signature traits and parameters + +use core::str::FromStr; + +#[cfg(feature = "alloc")] +use crate::buffer::SecretBytes; +use crate::{alg::normalize_alg, buffer::WriteBuffer, error::Error}; + +/// Signature creation operations +pub trait KeySign: KeySigVerify { + /// Create a signature of the requested type and write it to the + /// provided buffer. + fn write_signature( + &self, + message: &[u8], + sig_type: Option, + out: &mut dyn WriteBuffer, + ) -> Result<(), Error>; + + #[cfg(feature = "alloc")] + #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))] + /// Create a signature of the requested type and return an allocated + /// buffer. + fn create_signature( + &self, + message: &[u8], + sig_type: Option, + ) -> Result { + let mut buf = SecretBytes::with_capacity(128); + self.write_signature(message, sig_type, &mut buf)?; + Ok(buf) + } +} + +/// Signature verification operations +pub trait KeySigVerify { + /// Check the validity of signature over a message with the + /// specified signature type. + fn verify_signature( + &self, + message: &[u8], + signature: &[u8], + sig_type: Option, + ) -> Result; +} + +/// Supported signature types +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum SignatureType { + /// Standard signature output for ed25519 + EdDSA, + /// Elliptic curve DSA using P-256 and SHA-256 + ES256, + /// Elliptic curve DSA using K-256 and SHA-256 + ES256K, +} + +impl FromStr for SignatureType { + type Err = Error; + + fn from_str(s: &str) -> Result { + match normalize_alg(s)? { + a if a == "eddsa" => Ok(Self::EdDSA), + a if a == "es256" => Ok(Self::ES256), + a if a == "es256k" => Ok(Self::ES256K), + _ => Err(err_msg!(Unsupported, "Unknown signature algorithm")), + } + } +} + +impl SignatureType { + /// Get the length of the signature output. + pub const fn signature_length(&self) -> usize { + match self { + Self::EdDSA | Self::ES256 | Self::ES256K => 64, + } + } +} diff --git a/src/any.rs b/src/backend/any.rs similarity index 84% rename from src/any.rs rename to src/backend/any.rs index cb7d98dd..4c440e79 100644 --- a/src/any.rs +++ b/src/backend/any.rs @@ -1,15 +1,18 @@ -use super::error::Result; -use super::future::BoxFuture; -use super::keys::{wrap::WrapKeyMethod, PassKey}; -use super::options::IntoOptions; -use super::store::{Backend, ManageBackend, QueryBackend, Scan, Session, Store}; -use super::types::{Entry, EntryKind, EntryOperation, EntryTag, TagFilter}; +use super::{Backend, ManageBackend, QueryBackend}; +use crate::{ + error::Error, + future::BoxFuture, + protect::{PassKey, StoreKeyMethod}, + storage::{ + Entry, EntryKind, EntryOperation, EntryTag, IntoOptions, Scan, Session, Store, TagFilter, + }, +}; #[cfg(feature = "postgres")] -use super::postgres::PostgresStore; +use super::postgres::{self, PostgresStore}; #[cfg(feature = "sqlite")] -use super::sqlite::SqliteStore; +use super::sqlite::{self, SqliteStore}; /// A generic `Store` implementation for any supported backend pub type AnyStore = Store; @@ -50,7 +53,7 @@ macro_rules! with_backend { impl Backend for AnyBackend { type Session = AnyQueryBackend; - fn create_profile(&self, name: Option) -> BoxFuture<'_, Result> { + fn create_profile(&self, name: Option) -> BoxFuture<'_, Result> { with_backend!(self, store, store.create_profile(name)) } @@ -58,7 +61,7 @@ impl Backend for AnyBackend { with_backend!(self, store, store.get_profile_name()) } - fn remove_profile(&self, name: String) -> BoxFuture<'_, Result> { + fn remove_profile(&self, name: String) -> BoxFuture<'_, Result> { with_backend!(self, store, store.remove_profile(name)) } @@ -70,7 +73,7 @@ impl Backend for AnyBackend { tag_filter: Option, offset: Option, limit: Option, - ) -> BoxFuture<'_, Result>> { + ) -> BoxFuture<'_, Result, Error>> { with_backend!( self, store, @@ -78,7 +81,7 @@ impl Backend for AnyBackend { ) } - fn session(&self, profile: Option, transaction: bool) -> Result { + fn session(&self, profile: Option, transaction: bool) -> Result { match self { #[cfg(feature = "postgres")] Self::Postgres(store) => { @@ -88,7 +91,6 @@ impl Backend for AnyBackend { #[cfg(feature = "sqlite")] Self::Sqlite(store) => { - // FIXME - avoid double boxed futures by exposing public method let session = store.session(profile, transaction)?; Ok(AnyQueryBackend::SqliteSession(session)) } @@ -99,13 +101,13 @@ impl Backend for AnyBackend { fn rekey_backend( &mut self, - method: WrapKeyMethod, + method: StoreKeyMethod, pass_key: PassKey<'_>, - ) -> BoxFuture<'_, Result<()>> { + ) -> BoxFuture<'_, Result<(), Error>> { with_backend!(self, store, store.rekey_backend(method, pass_key)) } - fn close(&self) -> BoxFuture<'_, Result<()>> { + fn close(&self) -> BoxFuture<'_, Result<(), Error>> { with_backend!(self, store, store.close()) } } @@ -132,7 +134,7 @@ impl QueryBackend for AnyQueryBackend { kind: EntryKind, category: &'q str, tag_filter: Option, - ) -> BoxFuture<'q, Result> { + ) -> BoxFuture<'q, Result> { match self { #[cfg(feature = "postgres")] Self::PostgresSession(session) => session.count(kind, category, tag_filter), @@ -150,7 +152,7 @@ impl QueryBackend for AnyQueryBackend { category: &'q str, name: &'q str, for_update: bool, - ) -> BoxFuture<'q, Result>> { + ) -> BoxFuture<'q, Result, Error>> { match self { #[cfg(feature = "postgres")] Self::PostgresSession(session) => session.fetch(kind, category, name, for_update), @@ -169,7 +171,7 @@ impl QueryBackend for AnyQueryBackend { tag_filter: Option, limit: Option, for_update: bool, - ) -> BoxFuture<'q, Result>> { + ) -> BoxFuture<'q, Result, Error>> { match self { #[cfg(feature = "postgres")] Self::PostgresSession(session) => { @@ -190,7 +192,7 @@ impl QueryBackend for AnyQueryBackend { kind: EntryKind, category: &'q str, tag_filter: Option, - ) -> BoxFuture<'q, Result> { + ) -> BoxFuture<'q, Result> { match self { #[cfg(feature = "postgres")] Self::PostgresSession(session) => session.remove_all(kind, category, tag_filter), @@ -211,7 +213,7 @@ impl QueryBackend for AnyQueryBackend { value: Option<&'q [u8]>, tags: Option<&'q [EntryTag]>, expiry_ms: Option, - ) -> BoxFuture<'q, Result<()>> { + ) -> BoxFuture<'q, Result<(), Error>> { match self { #[cfg(feature = "postgres")] Self::PostgresSession(session) => { @@ -227,7 +229,7 @@ impl QueryBackend for AnyQueryBackend { } } - fn close(self, commit: bool) -> BoxFuture<'static, Result<()>> { + fn close(self, commit: bool) -> BoxFuture<'static, Result<(), Error>> { match self { #[cfg(feature = "postgres")] Self::PostgresSession(session) => Box::pin(session.close(commit)), @@ -245,10 +247,10 @@ impl<'a> ManageBackend<'a> for &'a str { fn open_backend( self, - method: Option, + method: Option, pass_key: PassKey<'a>, profile: Option<&'a str>, - ) -> BoxFuture<'a, Result> { + ) -> BoxFuture<'a, Result> { Box::pin(async move { let opts = self.into_options()?; debug!("Open store with options: {:?}", &opts); @@ -256,14 +258,14 @@ impl<'a> ManageBackend<'a> for &'a str { match opts.schema.as_ref() { #[cfg(feature = "postgres")] "postgres" => { - let opts = super::postgres::PostgresStoreOptions::new(opts)?; + let opts = postgres::PostgresStoreOptions::new(opts)?; let mgr = opts.open(method, pass_key, profile).await?; Ok(Store::new(AnyBackend::Postgres(mgr.into_inner()))) } #[cfg(feature = "sqlite")] "sqlite" => { - let opts = super::sqlite::SqliteStoreOptions::new(opts)?; + let opts = sqlite::SqliteStoreOptions::new(opts)?; let mgr = opts.open(method, pass_key, profile).await?; Ok(Store::new(AnyBackend::Sqlite(mgr.into_inner()))) } @@ -275,11 +277,11 @@ impl<'a> ManageBackend<'a> for &'a str { fn provision_backend( self, - method: WrapKeyMethod, + method: StoreKeyMethod, pass_key: PassKey<'a>, profile: Option<&'a str>, recreate: bool, - ) -> BoxFuture<'a, Result> { + ) -> BoxFuture<'a, Result> { Box::pin(async move { let opts = self.into_options()?; debug!("Provision store with options: {:?}", &opts); @@ -287,14 +289,14 @@ impl<'a> ManageBackend<'a> for &'a str { match opts.schema.as_ref() { #[cfg(feature = "postgres")] "postgres" => { - let opts = super::postgres::PostgresStoreOptions::new(opts)?; + let opts = postgres::PostgresStoreOptions::new(opts)?; let mgr = opts.provision(method, pass_key, profile, recreate).await?; Ok(Store::new(AnyBackend::Postgres(mgr.into_inner()))) } #[cfg(feature = "sqlite")] "sqlite" => { - let opts = super::sqlite::SqliteStoreOptions::new(opts)?; + let opts = sqlite::SqliteStoreOptions::new(opts)?; let mgr = opts.provision(method, pass_key, profile, recreate).await?; Ok(Store::new(AnyBackend::Sqlite(mgr.into_inner()))) } @@ -304,7 +306,7 @@ impl<'a> ManageBackend<'a> for &'a str { }) } - fn remove_backend(self) -> BoxFuture<'a, Result> { + fn remove_backend(self) -> BoxFuture<'a, Result> { Box::pin(async move { let opts = self.into_options()?; debug!("Remove store with options: {:?}", &opts); @@ -312,13 +314,13 @@ impl<'a> ManageBackend<'a> for &'a str { match opts.schema.as_ref() { #[cfg(feature = "postgres")] "postgres" => { - let opts = super::postgres::PostgresStoreOptions::new(opts)?; + let opts = postgres::PostgresStoreOptions::new(opts)?; Ok(opts.remove().await?) } #[cfg(feature = "sqlite")] "sqlite" => { - let opts = super::sqlite::SqliteStoreOptions::new(opts)?; + let opts = sqlite::SqliteStoreOptions::new(opts)?; Ok(opts.remove().await?) } diff --git a/src/db_utils.rs b/src/backend/db_utils.rs similarity index 84% rename from src/db_utils.rs rename to src/backend/db_utils.rs index c5cf527a..fc7bba50 100644 --- a/src/db_utils.rs +++ b/src/backend/db_utils.rs @@ -7,23 +7,23 @@ use sqlx::{ IntoArguments, Pool, TransactionManager, Type, }; -use crate::EntryTag; - -use super::error::Result; -use super::future::BoxFuture; -use super::keys::{ - store::StoreKey, - wrap::{WrapKey, WrapKeyMethod}, - EntryEncryptor, KeyCache, PassKey, -}; -use super::types::{EncEntryTag, Entry, Expiry, ProfileId, TagFilter}; -use super::wql::{ - sql::TagSqlEncoder, - tags::{tag_query, TagQueryEncoder}, +use crate::{ + error::Error, + future::BoxFuture, + protect::{EntryEncryptor, KeyCache, PassKey, ProfileId, ProfileKey, StoreKey, StoreKeyMethod}, + storage::{ + wql::{ + sql::TagSqlEncoder, + tags::{tag_query, TagQueryEncoder}, + }, + {EncEntryTag, Entry, EntryTag, TagFilter}, + }, }; pub const PAGE_SIZE: usize = 32; +pub type Expiry = chrono::DateTime; + #[derive(Debug)] pub(crate) enum DbSessionState { Active { conn: PoolConnection }, @@ -79,7 +79,7 @@ impl DbSession { } } - pub(crate) fn profile_and_key(&mut self) -> Option<(ProfileId, Arc)> { + pub(crate) fn profile_and_key(&mut self) -> Option<(ProfileId, Arc)> { if let DbSessionKey::Active { profile_id, ref key, @@ -91,7 +91,10 @@ impl DbSession { } } - pub(crate) async fn make_active(&mut self, init_key: I) -> Result> + pub(crate) async fn make_active( + &mut self, + init_key: I, + ) -> Result, Error> where I: for<'a> GetProfileKey<'a, DB>, { @@ -136,7 +139,7 @@ impl DbSession { DbSessionRef::Owned(self) } - pub(crate) async fn close(mut self, commit: bool) -> Result<()> { + pub(crate) async fn close(mut self, commit: bool) -> Result<(), Error> { if self.transaction { if let Some(conn) = self.connection_mut() { if commit { @@ -168,7 +171,7 @@ impl<'q, DB: ExtDatabase> Drop for DbSession { } pub(crate) trait GetProfileKey<'a, DB: Database> { - type Fut: Future)>>; + type Fut: Future), Error>>; fn call_once( self, conn: &'a mut PoolConnection, @@ -180,7 +183,7 @@ pub(crate) trait GetProfileKey<'a, DB: Database> { impl<'a, DB: Database, F, Fut> GetProfileKey<'a, DB> for F where F: FnOnce(&'a mut PoolConnection, Arc, String) -> Fut, - Fut: Future)>> + 'a, + Fut: Future), Error>> + 'a, { type Fut = Fut; fn call_once( @@ -197,7 +200,7 @@ where pub(crate) enum DbSessionKey { Active { profile_id: ProfileId, - key: Arc, + key: Arc, }, Pending { cache: Arc, @@ -209,7 +212,7 @@ pub trait ExtDatabase: Database { fn start_transaction( conn: &mut PoolConnection, _nested: bool, - ) -> BoxFuture<'_, std::result::Result<(), SqlxError>> { + ) -> BoxFuture<'_, Result<(), SqlxError>> { ::TransactionManager::begin(conn) } } @@ -252,7 +255,7 @@ impl<'q, DB: ExtDatabase> DbSessionActive<'q, DB> { self.inner.connection_mut().unwrap() } - pub async fn commit(mut self) -> Result<()> { + pub async fn commit(mut self) -> Result<(), Error> { if self.txn_depth > 0 && !self.false_txn { let conn = self.connection_mut(); info!("Commit transaction"); @@ -269,7 +272,7 @@ impl<'q, DB: ExtDatabase> DbSessionActive<'q, DB> { } #[allow(unused)] - pub async fn transaction<'t>(&'t mut self) -> Result> + pub async fn transaction<'t>(&'t mut self) -> Result, Error> where 'q: 't, { @@ -283,7 +286,7 @@ impl<'q, DB: ExtDatabase> DbSessionActive<'q, DB> { }) } - pub async fn as_transaction<'t>(&'t mut self) -> Result> + pub async fn as_transaction<'t>(&'t mut self) -> Result, Error> where 'q: 't, { @@ -317,14 +320,14 @@ impl<'a, DB: ExtDatabase> Drop for DbSessionActive<'a, DB> { } pub(crate) trait RunInTransaction<'a, 'q: 'a, DB: ExtDatabase> { - type Fut: Future>; + type Fut: Future>; fn call_once(self, conn: &'a mut DbSessionActive<'q, DB>) -> Self::Fut; } impl<'a, 'q: 'a, DB: ExtDatabase, F, Fut> RunInTransaction<'a, 'q, DB> for F where F: FnOnce(&'a mut DbSessionActive<'q, DB>) -> Fut, - Fut: Future> + 'a, + Fut: Future> + 'a, { type Fut = Fut; fn call_once(self, conn: &'a mut DbSessionActive<'q, DB>) -> Self::Fut { @@ -335,7 +338,7 @@ where pub struct EncScanEntry { pub name: Vec, pub value: Vec, - pub tags: Option>, + pub tags: Vec, } pub struct QueryParams<'q, DB: Database> { @@ -452,7 +455,7 @@ pub fn replace_arg_placeholders( buffer } -pub(crate) fn decode_tags(tags: Vec) -> std::result::Result, ()> { +pub(crate) fn decode_tags(tags: Vec) -> Result, ()> { let mut idx = 0; let mut plaintext; let mut name_start; @@ -498,8 +501,8 @@ pub(crate) fn decode_tags(tags: Vec) -> std::result::Result pub fn decrypt_scan_batch( category: String, enc_rows: Vec, - key: &StoreKey, -) -> Result> { + key: &ProfileKey, +) -> Result, Error> { let mut batch = Vec::with_capacity(enc_rows.len()); for enc_entry in enc_rows { batch.push(decrypt_scan_entry(category.clone(), enc_entry, key)?); @@ -510,21 +513,17 @@ pub fn decrypt_scan_batch( pub fn decrypt_scan_entry( category: String, enc_entry: EncScanEntry, - key: &StoreKey, -) -> Result { + key: &ProfileKey, +) -> Result { let name = key.decrypt_entry_name(enc_entry.name)?; - let value = key.decrypt_entry_value(enc_entry.value)?; - let tags = if let Some(enc_tags) = enc_entry.tags { - Some(key.decrypt_entry_tags( - decode_tags(enc_tags).map_err(|_| err_msg!(Unexpected, "Error decoding tags"))?, - )?) - } else { - None - }; + let value = key.decrypt_entry_value(category.as_bytes(), name.as_bytes(), enc_entry.value)?; + let tags = key.decrypt_entry_tags( + decode_tags(enc_entry.tags).map_err(|_| err_msg!(Unexpected, "Error decoding tags"))?, + )?; Ok(Entry::new(category.to_string(), name, value, tags)) } -pub fn expiry_timestamp(expire_ms: i64) -> Result { +pub fn expiry_timestamp(expire_ms: i64) -> Result { chrono::Utc::now() .checked_add_signed(chrono::Duration::milliseconds(expire_ms)) .ok_or_else(|| err_msg!(Unexpected, "Invalid expiry timestamp")) @@ -532,14 +531,14 @@ pub fn expiry_timestamp(expire_ms: i64) -> Result { pub fn encode_tag_filter( tag_filter: Option, - key: &StoreKey, + key: &ProfileKey, offset: usize, -) -> Result>)>> { +) -> Result>)>, Error> { if let Some(tag_filter) = tag_filter { let tag_query = tag_query(tag_filter.query)?; let mut enc = TagSqlEncoder::new( - |name| Ok(key.encrypt_tag_name(StoreKey::prepare_input(name.as_bytes()))?), - |value| Ok(key.encrypt_tag_value(StoreKey::prepare_input(value.as_bytes()))?), + |name| Ok(key.encrypt_tag_name(ProfileKey::prepare_input(name.as_bytes()))?), + |value| Ok(key.encrypt_tag_value(ProfileKey::prepare_input(value.as_bytes()))?), ); if let Some(filter) = enc.encode_query(&tag_query)? { let filter = replace_arg_placeholders::(&filter, (offset as i64) + 1); @@ -554,29 +553,33 @@ pub fn encode_tag_filter( // convert a slice of tags into a Vec, when ensuring there is // adequate space in the allocations to reuse them during encryption -pub fn prepare_tags(tags: &[EntryTag]) -> Vec { +pub fn prepare_tags(tags: &[EntryTag]) -> Result, Error> { let mut result = Vec::with_capacity(tags.len()); for tag in tags { result.push(match tag { EntryTag::Plaintext(name, value) => EntryTag::Plaintext( unsafe { - String::from_utf8_unchecked(StoreKey::prepare_input(name.as_bytes()).into_vec()) + String::from_utf8_unchecked( + ProfileKey::prepare_input(name.as_bytes()).into_vec(), + ) }, value.clone(), ), EntryTag::Encrypted(name, value) => EntryTag::Encrypted( unsafe { - String::from_utf8_unchecked(StoreKey::prepare_input(name.as_bytes()).into_vec()) + String::from_utf8_unchecked( + ProfileKey::prepare_input(name.as_bytes()).into_vec(), + ) }, unsafe { String::from_utf8_unchecked( - StoreKey::prepare_input(value.as_bytes()).into_vec(), + ProfileKey::prepare_input(value.as_bytes()).into_vec(), ) }, ), }); } - result + Ok(result) } pub fn extend_query<'q, Q: QueryPrepare>( @@ -585,7 +588,7 @@ pub fn extend_query<'q, Q: QueryPrepare>( tag_filter: Option<(String, Vec>)>, offset: Option, limit: Option, -) -> Result +) -> Result where i64: for<'e> Encode<'e, Q::DB> + Type, Vec: for<'e> Encode<'e, Q::DB> + Type, @@ -603,19 +606,25 @@ where } pub fn init_keys<'a>( - method: WrapKeyMethod, + method: StoreKeyMethod, pass_key: PassKey<'a>, -) -> Result<(StoreKey, Vec, WrapKey, String)> { - let (wrap_key, wrap_key_ref) = method.resolve(pass_key)?; - let store_key = StoreKey::new()?; - let enc_store_key = encode_store_key(&store_key, &wrap_key)?; - Ok((store_key, enc_store_key, wrap_key, wrap_key_ref.into_uri())) -} - -pub fn encode_store_key(store_key: &StoreKey, wrap_key: &WrapKey) -> Result> { - let enc_store_key = store_key.to_string()?; - let result = wrap_key.wrap_data(enc_store_key.into())?; - Ok(result) +) -> Result<(ProfileKey, Vec, StoreKey, String), Error> { + let (store_key, store_key_ref) = method.resolve(pass_key)?; + let profile_key = ProfileKey::new()?; + let enc_profile_key = encode_profile_key(&profile_key, &store_key)?; + Ok(( + profile_key, + enc_profile_key, + store_key, + store_key_ref.into_uri(), + )) +} + +pub fn encode_profile_key( + profile_key: &ProfileKey, + store_key: &StoreKey, +) -> Result, Error> { + store_key.wrap_data(profile_key.to_bytes()?) } #[inline] diff --git a/src/backend/mod.rs b/src/backend/mod.rs new file mode 100644 index 00000000..537e24ea --- /dev/null +++ b/src/backend/mod.rs @@ -0,0 +1,22 @@ +//! Storage backends supported by aries-askar + +#[cfg(feature = "any")] +#[cfg_attr(docsrs, doc(cfg(feature = "any")))] +/// Generic backend (from URI) support +pub mod any; + +#[cfg(any(feature = "postgres", feature = "sqlite"))] +pub(crate) mod db_utils; + +#[cfg(feature = "postgres")] +#[cfg_attr(docsrs, doc(cfg(feature = "postgres")))] +/// Postgres database support +pub mod postgres; + +#[cfg(feature = "sqlite")] +#[cfg_attr(docsrs, doc(cfg(feature = "sqlite")))] +/// Sqlite database support +pub mod sqlite; + +mod types; +pub use self::types::{Backend, ManageBackend, QueryBackend}; diff --git a/src/postgres/mod.rs b/src/backend/postgres/mod.rs similarity index 80% rename from src/postgres/mod.rs rename to src/backend/postgres/mod.rs index 01b76427..8e945af0 100644 --- a/src/postgres/mod.rs +++ b/src/backend/postgres/mod.rs @@ -15,16 +15,21 @@ use sqlx::{ Row, }; -use super::db_utils::{ - decode_tags, decrypt_scan_batch, encode_store_key, encode_tag_filter, expiry_timestamp, - extend_query, prepare_tags, random_profile_name, replace_arg_placeholders, DbSession, - DbSessionActive, DbSessionRef, EncScanEntry, ExtDatabase, QueryParams, QueryPrepare, PAGE_SIZE, +use crate::{ + backend::{ + db_utils::{ + decode_tags, decrypt_scan_batch, encode_profile_key, encode_tag_filter, + expiry_timestamp, extend_query, prepare_tags, random_profile_name, + replace_arg_placeholders, DbSession, DbSessionActive, DbSessionRef, EncScanEntry, + ExtDatabase, QueryParams, QueryPrepare, PAGE_SIZE, + }, + types::{Backend, QueryBackend}, + }, + error::Error, + future::{unblock, BoxFuture}, + protect::{EntryEncryptor, KeyCache, PassKey, ProfileId, ProfileKey, StoreKeyMethod}, + storage::{EncEntryTag, Entry, EntryKind, EntryOperation, EntryTag, Scan, TagFilter}, }; -use super::error::Result; -use super::future::{unblock, BoxFuture}; -use super::keys::{store::StoreKey, wrap::WrapKeyMethod, EntryEncryptor, KeyCache, PassKey}; -use super::store::{Backend, QueryBackend, Scan}; -use super::types::{EncEntryTag, Entry, EntryKind, EntryOperation, EntryTag, ProfileId, TagFilter}; const COUNT_QUERY: &'static str = "SELECT COUNT(*) FROM items i WHERE profile_id = $1 AND kind = $2 AND category = $3 @@ -63,7 +68,7 @@ const TAG_INSERT_QUERY: &'static str = "INSERT INTO items_tags mod provision; pub use provision::PostgresStoreOptions; -#[cfg(feature = "pg_test")] +#[cfg(any(test, feature = "pg_test"))] pub mod test_db; /// A PostgreSQL database store @@ -96,18 +101,18 @@ impl PostgresStore { impl Backend for PostgresStore { type Session = DbSession; - fn create_profile(&self, name: Option) -> BoxFuture<'_, Result> { + fn create_profile(&self, name: Option) -> BoxFuture<'_, Result> { let name = name.unwrap_or_else(random_profile_name); Box::pin(async move { - let key = StoreKey::new()?; - let enc_key = key.to_string()?; + let key = ProfileKey::new()?; + let enc_key = key.to_bytes()?; let mut conn = self.conn_pool.acquire().await?; if let Some(pid) = sqlx::query_scalar( - "INSERT INTO profiles (name, store_key) VALUES ($1, $2) + "INSERT INTO profiles (name, profile_key) VALUES ($1, $2) ON CONFLICT DO NOTHING RETURNING id", ) .bind(&name) - .bind(enc_key.as_bytes()) + .bind(enc_key.as_ref()) .fetch_optional(&mut conn) .await? { @@ -125,7 +130,7 @@ impl Backend for PostgresStore { self.default_profile.as_str() } - fn remove_profile(&self, name: String) -> BoxFuture<'_, Result> { + fn remove_profile(&self, name: String) -> BoxFuture<'_, Result> { Box::pin(async move { let mut conn = self.conn_pool.acquire().await?; Ok(sqlx::query("DELETE FROM profiles WHERE name=$1") @@ -139,31 +144,31 @@ impl Backend for PostgresStore { fn rekey_backend( &mut self, - method: WrapKeyMethod, + method: StoreKeyMethod, pass_key: PassKey<'_>, - ) -> BoxFuture<'_, Result<()>> { + ) -> BoxFuture<'_, Result<(), Error>> { let pass_key = pass_key.into_owned(); Box::pin(async move { - let (wrap_key, wrap_key_ref) = unblock(move || method.resolve(pass_key)).await?; - let wrap_key = Arc::new(wrap_key); + let (store_key, store_key_ref) = unblock(move || method.resolve(pass_key)).await?; + let store_key = Arc::new(store_key); let mut txn = self.conn_pool.begin().await?; - let mut rows = sqlx::query("SELECT id, store_key FROM profiles").fetch(&mut txn); + let mut rows = sqlx::query("SELECT id, profile_key FROM profiles").fetch(&mut txn); let mut upd_keys = BTreeMap::>::new(); while let Some(row) = rows.next().await { let row = row?; let pid = row.try_get(0)?; let enc_key = row.try_get(1)?; - let store_key = self.key_cache.load_key(enc_key).await?; + let profile_key = self.key_cache.load_key(enc_key).await?; let upd_key = unblock({ - let wrap_key = wrap_key.clone(); - move || encode_store_key(&store_key, &wrap_key) + let store_key = store_key.clone(); + move || encode_profile_key(&profile_key, &store_key) }) .await?; upd_keys.insert(pid, upd_key); } drop(rows); for (pid, key) in upd_keys { - if sqlx::query("UPDATE profiles SET store_key=$1 WHERE id=$2") + if sqlx::query("UPDATE profiles SET profile_key=$1 WHERE id=$2") .bind(key) .bind(pid) .execute(&mut txn) @@ -171,20 +176,20 @@ impl Backend for PostgresStore { .rows_affected() != 1 { - return Err(err_msg!(Backend, "Error updating profile store key")); + return Err(err_msg!(Backend, "Error updating profile key")); } } - if sqlx::query("UPDATE config SET value=$1 WHERE name='wrap_key'") - .bind(wrap_key_ref.into_uri()) + if sqlx::query("UPDATE config SET value=$1 WHERE name='key'") + .bind(store_key_ref.into_uri()) .execute(&mut txn) .await? .rows_affected() != 1 { - return Err(err_msg!(Backend, "Error updating wrap key")); + return Err(err_msg!(Backend, "Error updating store key")); } txn.commit().await?; - self.key_cache = Arc::new(KeyCache::new(wrap_key)); + self.key_cache = Arc::new(KeyCache::new(store_key)); Ok(()) }) } @@ -197,7 +202,7 @@ impl Backend for PostgresStore { tag_filter: Option, offset: Option, limit: Option, - ) -> BoxFuture<'_, Result>> { + ) -> BoxFuture<'_, Result, Error>> { Box::pin(async move { let session = self.session(profile, false)?; let mut active = session.owned_ref(); @@ -222,7 +227,7 @@ impl Backend for PostgresStore { }) } - fn session(&self, profile: Option, transaction: bool) -> Result { + fn session(&self, profile: Option, transaction: bool) -> Result { Ok(DbSession::new( self.conn_pool.clone(), self.key_cache.clone(), @@ -231,7 +236,7 @@ impl Backend for PostgresStore { )) } - fn close(&self) -> BoxFuture<'_, Result<()>> { + fn close(&self) -> BoxFuture<'_, Result<(), Error>> { Box::pin(async move { self.conn_pool.close().await; Ok(()) @@ -255,8 +260,8 @@ impl QueryBackend for DbSession { kind: EntryKind, category: &'q str, tag_filter: Option, - ) -> BoxFuture<'q, Result> { - let category = StoreKey::prepare_input(category.as_bytes()); + ) -> BoxFuture<'q, Result> { + let category = ProfileKey::prepare_input(category.as_bytes()); Box::pin(async move { let (profile_id, key) = acquire_key(&mut *self).await?; @@ -266,7 +271,7 @@ impl QueryBackend for DbSession { let (enc_category, tag_filter) = unblock({ let params_len = params.len() + 1; // plus category move || { - Result::Ok(( + Result::<_, Error>::Ok(( key.encrypt_entry_category(category)?, encode_tag_filter::(tag_filter, &key, params_len)?, )) @@ -290,7 +295,7 @@ impl QueryBackend for DbSession { category: &str, name: &str, for_update: bool, - ) -> BoxFuture<'_, Result>> { + ) -> BoxFuture<'_, Result, Error>> { let category = category.to_string(); let name = name.to_string(); @@ -298,10 +303,10 @@ impl QueryBackend for DbSession { let (profile_id, key) = acquire_key(&mut *self).await?; let (enc_category, enc_name) = unblock({ let key = key.clone(); - let category = StoreKey::prepare_input(category.as_bytes()); - let name = StoreKey::prepare_input(name.as_bytes()); + let category = ProfileKey::prepare_input(category.as_bytes()); + let name = ProfileKey::prepare_input(name.as_bytes()); move || { - Result::Ok(( + Result::<_, Error>::Ok(( key.encrypt_entry_category(category)?, key.encrypt_entry_name(name)?, )) @@ -323,19 +328,17 @@ impl QueryBackend for DbSession { { let value = row.try_get(1)?; let tags = row.try_get::, _>(2)?.map(String::into_bytes); - let (value, tags) = unblock(move || { - let value = key.decrypt_entry_value(value)?; + let (category, name, value, tags) = unblock(move || { + let value = key.decrypt_entry_value(category.as_ref(), name.as_ref(), value)?; let tags = if let Some(enc_tags) = tags { - Some( - key.decrypt_entry_tags( - decode_tags(enc_tags) - .map_err(|_| err_msg!(Unexpected, "Error decoding tags"))?, - )?, - ) + key.decrypt_entry_tags( + decode_tags(enc_tags) + .map_err(|_| err_msg!(Unexpected, "Error decoding tags"))?, + )? } else { - None + Vec::new() }; - Result::Ok((value, tags)) + Result::<_, Error>::Ok((category, name, value, tags)) }) .await?; Ok(Some(Entry::new(category, name, value, tags))) @@ -352,7 +355,7 @@ impl QueryBackend for DbSession { tag_filter: Option, limit: Option, for_update: bool, - ) -> BoxFuture<'q, Result>> { + ) -> BoxFuture<'q, Result, Error>> { let category = category.to_string(); Box::pin(async move { let for_update = for_update && self.is_transaction(); @@ -387,8 +390,8 @@ impl QueryBackend for DbSession { kind: EntryKind, category: &'q str, tag_filter: Option, - ) -> BoxFuture<'q, Result> { - let category = StoreKey::prepare_input(category.as_bytes()); + ) -> BoxFuture<'q, Result> { + let category = ProfileKey::prepare_input(category.as_bytes()); Box::pin(async move { let (profile_id, key) = acquire_key(&mut *self).await?; @@ -398,7 +401,7 @@ impl QueryBackend for DbSession { let (enc_category, tag_filter) = unblock({ let params_len = params.len() + 1; // plus category move || { - Result::Ok(( + Result::<_, Error>::Ok(( key.encrypt_entry_category(category)?, encode_tag_filter::(tag_filter, &key, params_len)?, )) @@ -432,22 +435,26 @@ impl QueryBackend for DbSession { value: Option<&'q [u8]>, tags: Option<&'q [EntryTag]>, expiry_ms: Option, - ) -> BoxFuture<'q, Result<()>> { - let category = StoreKey::prepare_input(category.as_bytes()); - let name = StoreKey::prepare_input(name.as_bytes()); + ) -> BoxFuture<'q, Result<(), Error>> { + let category = ProfileKey::prepare_input(category.as_bytes()); + let name = ProfileKey::prepare_input(name.as_bytes()); match operation { EntryOperation::Insert => { - let value = StoreKey::prepare_input(value.unwrap()); + let value = ProfileKey::prepare_input(value.unwrap()); let tags = tags.map(prepare_tags); Box::pin(async move { let (_, key) = acquire_key(&mut *self).await?; let (enc_category, enc_name, enc_value, enc_tags) = unblock(move || { - Result::Ok(( + let enc_value = + key.encrypt_entry_value(category.as_ref(), name.as_ref(), value)?; + Result::<_, Error>::Ok(( key.encrypt_entry_category(category)?, key.encrypt_entry_name(name)?, - key.encrypt_entry_value(value)?, - tags.map(|t| key.encrypt_entry_tags(t)).transpose()?, + enc_value, + tags.transpose()? + .map(|t| key.encrypt_entry_tags(t)) + .transpose()?, )) }) .await?; @@ -468,16 +475,20 @@ impl QueryBackend for DbSession { }) } EntryOperation::Replace => { - let value = StoreKey::prepare_input(value.unwrap()); + let value = ProfileKey::prepare_input(value.unwrap()); let tags = tags.map(prepare_tags); Box::pin(async move { let (_, key) = acquire_key(&mut *self).await?; let (enc_category, enc_name, enc_value, enc_tags) = unblock(move || { - Result::Ok(( + let enc_value = + key.encrypt_entry_value(category.as_ref(), name.as_ref(), value)?; + Result::<_, Error>::Ok(( key.encrypt_entry_category(category)?, key.encrypt_entry_name(name)?, - key.encrypt_entry_value(value)?, - tags.map(|t| key.encrypt_entry_tags(t)).transpose()?, + enc_value, + tags.transpose()? + .map(|t| key.encrypt_entry_tags(t)) + .transpose()?, )) }) .await?; @@ -503,7 +514,7 @@ impl QueryBackend for DbSession { EntryOperation::Remove => Box::pin(async move { let (_, key) = acquire_key(&mut *self).await?; let (enc_category, enc_name) = unblock(move || { - Result::Ok(( + Result::<_, Error>::Ok(( key.encrypt_entry_category(category)?, key.encrypt_entry_name(name)?, )) @@ -515,7 +526,7 @@ impl QueryBackend for DbSession { } } - fn close(self, commit: bool) -> BoxFuture<'static, Result<()>> { + fn close(self, commit: bool) -> BoxFuture<'static, Result<(), Error>> { Box::pin(DbSession::close(self, commit)) } } @@ -549,7 +560,9 @@ impl QueryPrepare for PostgresStore { } } -async fn acquire_key(session: &mut DbSession) -> Result<(ProfileId, Arc)> { +async fn acquire_key( + session: &mut DbSession, +) -> Result<(ProfileId, Arc), Error> { if let Some(ret) = session.profile_and_key() { Ok(ret) } else { @@ -560,7 +573,7 @@ async fn acquire_key(session: &mut DbSession) -> Result<(ProfileId, Ar async fn acquire_session<'q>( session: &'q mut DbSession, -) -> Result> { +) -> Result, Error> { session.make_active(&resolve_profile_key).await } @@ -568,11 +581,11 @@ async fn resolve_profile_key( conn: &mut PoolConnection, cache: Arc, profile: String, -) -> Result<(ProfileId, Arc)> { +) -> Result<(ProfileId, Arc), Error> { if let Some((pid, key)) = cache.get_profile(profile.as_str()).await { Ok((pid, key)) } else { - if let Some(row) = sqlx::query("SELECT id, store_key FROM profiles WHERE name=?1") + if let Some(row) = sqlx::query("SELECT id, profile_key FROM profiles WHERE name=?1") .bind(profile.as_str()) .fetch_optional(conn) .await? @@ -595,7 +608,7 @@ async fn perform_insert<'q>( enc_value: &[u8], enc_tags: Option>, expiry_ms: Option, -) -> Result<()> { +) -> Result<(), Error> { trace!("Insert entry"); let row_id: i64 = sqlx::query_scalar(INSERT_QUERY) .bind(active.profile_id) @@ -627,7 +640,7 @@ async fn perform_remove<'q>( enc_category: &[u8], enc_name: &[u8], ignore_error: bool, -) -> Result<()> { +) -> Result<(), Error> { trace!("Remove entry"); let done = sqlx::query(DELETE_QUERY) .bind(active.profile_id) @@ -646,24 +659,24 @@ async fn perform_remove<'q>( fn perform_scan<'q>( mut active: DbSessionRef<'q, Postgres>, profile_id: ProfileId, - key: Arc, + key: Arc, kind: EntryKind, category: String, tag_filter: Option, offset: Option, limit: Option, for_update: bool, -) -> impl Stream>> + 'q { +) -> impl Stream, Error>> + 'q { try_stream! { let mut params = QueryParams::new(); params.push(profile_id); params.push(kind as i16); let (enc_category, tag_filter) = unblock({ let key = key.clone(); - let category = StoreKey::prepare_input(category.as_bytes()); + let category = ProfileKey::prepare_input(category.as_bytes()); let params_len = params.len() + 1; // plus category move || { - Result::Ok(( + Result::<_, Error>::Ok(( key.encrypt_entry_category(category)?, encode_tag_filter::(tag_filter, &key, params_len)? )) @@ -679,8 +692,9 @@ fn perform_scan<'q>( let mut acquired = acquire_session(&mut *active).await?; let mut rows = sqlx::query_with(query.as_str(), params).fetch(acquired.connection_mut()); while let Some(row) = rows.try_next().await? { + let tags = row.try_get::, _>(3)?.map(String::into_bytes).unwrap_or_default(); batch.push(EncScanEntry { - name: row.try_get(1)?, value: row.try_get(2)?, tags: row.try_get::, _>(3)?.map(String::into_bytes) + name: row.try_get(1)?, value: row.try_get(2)?, tags }); if batch.len() == PAGE_SIZE { yield batch.split_off(0); @@ -699,7 +713,7 @@ fn perform_scan<'q>( #[cfg(test)] mod tests { use super::*; - use crate::db_utils::replace_arg_placeholders; + use crate::backend::db_utils::replace_arg_placeholders; #[test] fn postgres_simple_and_convert_args_works() { diff --git a/src/postgres/provision.rs b/src/backend/postgres/provision.rs similarity index 85% rename from src/postgres/provision.rs rename to src/backend/postgres/provision.rs index e0732803..bb5cd544 100644 --- a/src/postgres/provision.rs +++ b/src/backend/postgres/provision.rs @@ -7,16 +7,16 @@ use sqlx::{ ConnectOptions, Connection, Error as SqlxError, Executor, Row, Transaction, }; -use crate::db_utils::{init_keys, random_profile_name}; -use crate::error::Result; -use crate::future::{unblock, BoxFuture}; -use crate::keys::{ - wrap::{WrapKeyMethod, WrapKeyReference}, - KeyCache, PassKey, +use crate::{ + backend::{ + db_utils::{init_keys, random_profile_name}, + types::ManageBackend, + }, + error::Error, + future::{unblock, BoxFuture}, + protect::{KeyCache, PassKey, ProfileId, StoreKeyMethod, StoreKeyReference}, + storage::{IntoOptions, Store}, }; -use crate::options::IntoOptions; -use crate::store::{ManageBackend, Store}; -use crate::types::ProfileId; use super::PostgresStore; @@ -40,7 +40,7 @@ pub struct PostgresStoreOptions { impl PostgresStoreOptions { /// Initialize `PostgresStoreOptions` from a generic set of options - pub fn new<'a, O>(options: O) -> Result + pub fn new<'a, O>(options: O) -> Result where O: IntoOptions<'a>, { @@ -110,7 +110,7 @@ impl PostgresStoreOptions { }) } - async fn pool(&self) -> std::result::Result { + async fn pool(&self) -> Result { #[allow(unused_mut)] let mut conn_opts = PgConnectOptions::from_str(self.uri.as_str())?; #[cfg(feature = "log")] @@ -128,7 +128,7 @@ impl PostgresStoreOptions { .await } - pub(crate) async fn create_db_pool(&self) -> Result { + pub(crate) async fn create_db_pool(&self) -> Result { // try connecting normally in case the database exists match self.pool().await { Ok(pool) => Ok(pool), @@ -169,11 +169,11 @@ impl PostgresStoreOptions { /// Provision a Postgres store from this set of configuration options pub async fn provision( self, - method: WrapKeyMethod, + method: StoreKeyMethod, pass_key: PassKey<'_>, profile: Option<&str>, recreate: bool, - ) -> Result> { + ) -> Result, Error> { let conn_pool = self.create_db_pool().await?; let mut txn = conn_pool.begin().await?; @@ -203,7 +203,7 @@ impl PostgresStoreOptions { // no 'config' table, assume empty database } - let (store_key, enc_store_key, wrap_key, wrap_key_ref) = unblock({ + let (profile_key, enc_profile_key, store_key, store_key_ref) = unblock({ let pass_key = pass_key.into_owned(); move || init_keys(method, pass_key) }) @@ -211,9 +211,9 @@ impl PostgresStoreOptions { let default_profile = profile .map(str::to_string) .unwrap_or_else(random_profile_name); - let profile_id = init_db(txn, &default_profile, wrap_key_ref, enc_store_key).await?; - let mut key_cache = KeyCache::new(wrap_key); - key_cache.add_profile_mut(default_profile.clone(), profile_id, store_key); + let profile_id = init_db(txn, &default_profile, store_key_ref, enc_profile_key).await?; + let mut key_cache = KeyCache::new(store_key); + key_cache.add_profile_mut(default_profile.clone(), profile_id, profile_key); Ok(Store::new(PostgresStore::new( conn_pool, @@ -227,10 +227,10 @@ impl PostgresStoreOptions { /// Open an existing Postgres store from this set of configuration options pub async fn open( self, - method: Option, + method: Option, pass_key: PassKey<'_>, profile: Option<&str>, - ) -> Result> { + ) -> Result, Error> { let pool = match self.pool().await { Ok(p) => Ok(p), Err(SqlxError::Database(db_err)) if db_err.code() == Some(Cow::Borrowed("3D000")) => { @@ -244,7 +244,7 @@ impl PostgresStoreOptions { } /// Remove an existing Postgres store defined by these configuration options - pub async fn remove(self) -> Result { + pub async fn remove(self) -> Result { let mut admin_conn = PgConnection::connect(self.admin_uri.as_ref()).await?; // any character except NUL is allowed in an identifier. // double quotes must be escaped, but we just disallow those @@ -269,26 +269,26 @@ impl<'a> ManageBackend<'a> for PostgresStoreOptions { fn open_backend( self, - method: Option, + method: Option, pass_key: PassKey<'_>, profile: Option<&'a str>, - ) -> BoxFuture<'a, Result>> { + ) -> BoxFuture<'a, Result, Error>> { let pass_key = pass_key.into_owned(); Box::pin(self.open(method, pass_key, profile)) } fn provision_backend( self, - method: WrapKeyMethod, + method: StoreKeyMethod, pass_key: PassKey<'_>, profile: Option<&'a str>, recreate: bool, - ) -> BoxFuture<'a, Result>> { + ) -> BoxFuture<'a, Result, Error>> { let pass_key = pass_key.into_owned(); Box::pin(self.provision(method, pass_key, profile, recreate)) } - fn remove_backend(self) -> BoxFuture<'a, Result> { + fn remove_backend(self) -> BoxFuture<'a, Result> { Box::pin(self.remove()) } } @@ -296,9 +296,9 @@ impl<'a> ManageBackend<'a> for PostgresStoreOptions { pub(crate) async fn init_db<'t>( mut txn: Transaction<'t, Postgres>, profile_name: &str, - wrap_key_ref: String, - enc_store_key: Vec, -) -> Result { + store_key_ref: String, + enc_profile_key: Vec, +) -> Result { txn.execute( " CREATE TABLE config ( @@ -311,7 +311,7 @@ pub(crate) async fn init_db<'t>( id BIGSERIAL, name TEXT NOT NULL, reference TEXT NULL, - store_key BYTEA NULL, + profile_key BYTEA NULL, PRIMARY KEY(id) ); CREATE UNIQUE INDEX ix_profile_name ON profiles(name); @@ -350,19 +350,19 @@ pub(crate) async fn init_db<'t>( sqlx::query( "INSERT INTO config (name, value) VALUES ('default_profile', $1), - ('version', '1'), - ('wrap_key', $2)", + ('key', $2), + ('version', '1')", ) .persistent(false) .bind(profile_name) - .bind(wrap_key_ref) + .bind(store_key_ref) .execute(&mut txn) .await?; let profile_id = - sqlx::query_scalar("INSERT INTO profiles (name, store_key) VALUES ($1, $2) RETURNING id") + sqlx::query_scalar("INSERT INTO profiles (name, profile_key) VALUES ($1, $2) RETURNING id") .bind(profile_name) - .bind(enc_store_key) + .bind(enc_profile_key) .fetch_one(&mut txn) .await?; @@ -371,12 +371,12 @@ pub(crate) async fn init_db<'t>( Ok(profile_id) } -pub(crate) async fn reset_db(conn: &mut PgConnection) -> Result<()> { +pub(crate) async fn reset_db(conn: &mut PgConnection) -> Result<(), Error> { conn.execute( " DROP TABLE IF EXISTS config, profiles, - store_keys, keys, + profile_keys, keys, items, items_tags; ", ) @@ -386,20 +386,20 @@ pub(crate) async fn reset_db(conn: &mut PgConnection) -> Result<()> { pub(crate) async fn open_db( conn_pool: PgPool, - method: Option, + method: Option, pass_key: PassKey<'_>, profile: Option<&str>, host: String, name: String, -) -> Result> { +) -> Result, Error> { let mut conn = conn_pool.acquire().await?; let mut ver_ok = false; let mut default_profile: Option = None; - let mut wrap_key_ref: Option = None; + let mut store_key_ref: Option = None; let config = sqlx::query( r#"SELECT name, value FROM config - WHERE name IN ('default_profile', 'version', 'wrap_key')"#, + WHERE name IN ('default_profile', 'key', 'version')"#, ) .fetch_all(&mut conn) .await?; @@ -408,15 +408,15 @@ pub(crate) async fn open_db( "default_profile" => { default_profile.replace(row.try_get(1)?); } + "key" => { + store_key_ref.replace(row.try_get(1)?); + } "version" => { if row.try_get::<&str, _>(1)? != "1" { return Err(err_msg!(Unsupported, "Unsupported store version")); } ver_ok = true; } - "wrap_key" => { - wrap_key_ref.replace(row.try_get(1)?); - } _ => (), } } @@ -427,11 +427,11 @@ pub(crate) async fn open_db( .map(str::to_string) .or(default_profile) .ok_or_else(|| err_msg!(Unsupported, "Default store profile not found"))?; - let wrap_key = if let Some(wrap_key_ref) = wrap_key_ref { - let wrap_ref = WrapKeyReference::parse_uri(&wrap_key_ref)?; + let store_key = if let Some(store_key_ref) = store_key_ref { + let wrap_ref = StoreKeyReference::parse_uri(&store_key_ref)?; if let Some(method) = method { if !wrap_ref.compare_method(&method) { - return Err(err_msg!(Input, "Store key wrap method mismatch")); + return Err(err_msg!(Input, "Store key method mismatch")); } } unblock({ @@ -440,17 +440,17 @@ pub(crate) async fn open_db( }) .await? } else { - return Err(err_msg!(Unsupported, "Store wrap key not found")); + return Err(err_msg!(Unsupported, "Store key not found")); }; - let mut key_cache = KeyCache::new(wrap_key); + let mut key_cache = KeyCache::new(store_key); - let row = sqlx::query("SELECT id, store_key FROM profiles WHERE name = $1") + let row = sqlx::query("SELECT id, profile_key FROM profiles WHERE name = $1") .bind(&profile) .fetch_one(&mut conn) .await?; let profile_id = row.try_get(0)?; - let store_key = key_cache.load_key(row.try_get(1)?).await?; - key_cache.add_profile_mut(profile.clone(), profile_id, store_key); + let profile_key = key_cache.load_key(row.try_get(1)?).await?; + key_cache.add_profile_mut(profile.clone(), profile_id, profile_key); Ok(Store::new(PostgresStore::new( conn_pool, profile, key_cache, host, name, diff --git a/src/postgres/test_db.rs b/src/backend/postgres/test_db.rs similarity index 67% rename from src/postgres/test_db.rs rename to src/backend/postgres/test_db.rs index 3dd494b6..c719f260 100644 --- a/src/postgres/test_db.rs +++ b/src/backend/postgres/test_db.rs @@ -1,3 +1,5 @@ +//! Store wrapper for running tests against a postgres database + use sqlx::{ postgres::{PgConnection, Postgres}, Connection, Database, TransactionManager, @@ -6,31 +8,33 @@ use std::time::Duration; use super::provision::{init_db, reset_db, PostgresStoreOptions}; use super::PostgresStore; -use crate::db_utils::{init_keys, random_profile_name}; -use crate::error::Result; -use crate::future::{block_on, unblock}; -use crate::keys::{ - wrap::{generate_raw_wrap_key, WrapKeyMethod}, - KeyCache, +use crate::{ + backend::db_utils::{init_keys, random_profile_name}, + error::Error, + future::{block_on, sleep, timeout, unblock}, + protect::{generate_raw_store_key, KeyCache, StoreKeyMethod}, + storage::Store, }; -use crate::store::Store; +#[derive(Debug)] +/// Postgres test database wrapper instance pub struct TestDB { inst: Option>, lock_txn: Option, } impl TestDB { - #[allow(unused)] - pub async fn provision() -> Result { + /// Provision a new instance of the test database. + /// This method blocks until the database lock can be acquired. + pub async fn provision() -> Result { let path = match std::env::var("POSTGRES_URL") { Ok(p) if !p.is_empty() => p, _ => panic!("'POSTGRES_URL' must be defined"), }; - let key = generate_raw_wrap_key(None)?; - let (store_key, enc_store_key, wrap_key, wrap_key_ref) = - unblock(|| init_keys(WrapKeyMethod::RawKey, key)).await?; + let key = generate_raw_store_key(None)?; + let (profile_key, enc_profile_key, store_key, store_key_ref) = + unblock(|| init_keys(StoreKeyMethod::RawKey, key)).await?; let default_profile = random_profile_name(); let opts = PostgresStoreOptions::new(path.as_str())?; @@ -51,7 +55,7 @@ impl TestDB { break lock_txn; } lock_txn.close().await?; - async_std::task::sleep(Duration::from_millis(50)).await; + sleep(Duration::from_millis(50)).await; }; let mut init_txn = conn_pool.begin().await?; @@ -59,10 +63,11 @@ impl TestDB { reset_db(&mut *init_txn).await?; // create tables and add default profile - let profile_id = init_db(init_txn, &default_profile, wrap_key_ref, enc_store_key).await?; + let profile_id = + init_db(init_txn, &default_profile, store_key_ref, enc_profile_key).await?; - let mut key_cache = KeyCache::new(wrap_key); - key_cache.add_profile_mut(default_profile.clone(), profile_id, store_key); + let mut key_cache = KeyCache::new(store_key); + key_cache.add_profile_mut(default_profile.clone(), profile_id, profile_key); let inst = Store::new(PostgresStore::new( conn_pool, default_profile, @@ -92,12 +97,9 @@ impl Drop for TestDB { block_on(lock_txn.close()).expect("Error closing database connection"); } if let Some(inst) = self.inst.take() { - block_on(async_std::future::timeout( - Duration::from_secs(30), - inst.close(), - )) - .expect("Timed out waiting for the pool connection to close") - .expect("Error closing connection pool"); + block_on(timeout(Duration::from_secs(30), inst.close())) + .expect("Timed out waiting for the pool connection to close") + .expect("Error closing connection pool"); } } } diff --git a/src/sqlite/mod.rs b/src/backend/sqlite/mod.rs similarity index 81% rename from src/sqlite/mod.rs rename to src/backend/sqlite/mod.rs index cadd6847..5df97183 100644 --- a/src/sqlite/mod.rs +++ b/src/backend/sqlite/mod.rs @@ -14,16 +14,21 @@ use sqlx::{ Database, Error as SqlxError, Row, TransactionManager, }; -use super::db_utils::{ - decode_tags, decrypt_scan_batch, encode_store_key, encode_tag_filter, expiry_timestamp, - extend_query, prepare_tags, random_profile_name, DbSession, DbSessionActive, DbSessionRef, - EncScanEntry, ExtDatabase, QueryParams, QueryPrepare, PAGE_SIZE, +use crate::{ + backend::{ + db_utils::{ + decode_tags, decrypt_scan_batch, encode_profile_key, encode_tag_filter, + expiry_timestamp, extend_query, prepare_tags, random_profile_name, DbSession, + DbSessionActive, DbSessionRef, EncScanEntry, ExtDatabase, QueryParams, QueryPrepare, + PAGE_SIZE, + }, + types::{Backend, QueryBackend}, + }, + error::Error, + future::{unblock, BoxFuture}, + protect::{EntryEncryptor, KeyCache, PassKey, ProfileId, ProfileKey, StoreKeyMethod}, + storage::{EncEntryTag, Entry, EntryKind, EntryOperation, EntryTag, Scan, TagFilter}, }; -use super::error::Result; -use super::future::{unblock, BoxFuture}; -use super::keys::{store::StoreKey, wrap::WrapKeyMethod, EntryEncryptor, KeyCache, PassKey}; -use super::store::{Backend, QueryBackend, Scan}; -use super::types::{EncEntryTag, Entry, EntryKind, EntryOperation, EntryTag, ProfileId, TagFilter}; mod provision; pub use provision::SqliteStoreOptions; @@ -92,16 +97,16 @@ impl QueryPrepare for SqliteStore { impl Backend for SqliteStore { type Session = DbSession; - fn create_profile(&self, name: Option) -> BoxFuture<'_, Result> { + fn create_profile(&self, name: Option) -> BoxFuture<'_, Result> { let name = name.unwrap_or_else(random_profile_name); Box::pin(async move { - let key = StoreKey::new()?; - let enc_key = key.to_string()?; + let key = ProfileKey::new()?; + let enc_key = key.to_bytes()?; let mut conn = self.conn_pool.acquire().await?; let done = - sqlx::query("INSERT OR IGNORE INTO profiles (name, store_key) VALUES (?1, ?2)") + sqlx::query("INSERT OR IGNORE INTO profiles (name, profile_key) VALUES (?1, ?2)") .bind(&name) - .bind(enc_key) + .bind(enc_key.as_ref()) .execute(&mut conn) .await?; if done.rows_affected() == 0 { @@ -118,7 +123,7 @@ impl Backend for SqliteStore { self.default_profile.as_str() } - fn remove_profile(&self, name: String) -> BoxFuture<'_, Result> { + fn remove_profile(&self, name: String) -> BoxFuture<'_, Result> { Box::pin(async move { let mut conn = self.conn_pool.acquire().await?; Ok(sqlx::query("DELETE FROM profiles WHERE name=?") @@ -132,31 +137,31 @@ impl Backend for SqliteStore { fn rekey_backend( &mut self, - method: WrapKeyMethod, + method: StoreKeyMethod, pass_key: PassKey<'_>, - ) -> BoxFuture<'_, Result<()>> { + ) -> BoxFuture<'_, Result<(), Error>> { let pass_key = pass_key.into_owned(); Box::pin(async move { - let (wrap_key, wrap_key_ref) = unblock(move || method.resolve(pass_key)).await?; - let wrap_key = Arc::new(wrap_key); + let (store_key, store_key_ref) = unblock(move || method.resolve(pass_key)).await?; + let store_key = Arc::new(store_key); let mut txn = self.conn_pool.begin().await?; - let mut rows = sqlx::query("SELECT id, store_key FROM profiles").fetch(&mut txn); + let mut rows = sqlx::query("SELECT id, profile_key FROM profiles").fetch(&mut txn); let mut upd_keys = BTreeMap::>::new(); while let Some(row) = rows.next().await { let row = row?; let pid = row.try_get(0)?; let enc_key = row.try_get(1)?; - let store_key = self.key_cache.load_key(enc_key).await?; + let profile_key = self.key_cache.load_key(enc_key).await?; let upd_key = unblock({ - let wrap_key = wrap_key.clone(); - move || encode_store_key(&store_key, &wrap_key) + let store_key = store_key.clone(); + move || encode_profile_key(&profile_key, &store_key) }) .await?; upd_keys.insert(pid, upd_key); } drop(rows); for (pid, key) in upd_keys { - if sqlx::query("UPDATE profiles SET store_key=?1 WHERE id=?2") + if sqlx::query("UPDATE profiles SET profile_key=?1 WHERE id=?2") .bind(key) .bind(pid) .execute(&mut txn) @@ -164,20 +169,20 @@ impl Backend for SqliteStore { .rows_affected() != 1 { - return Err(err_msg!(Backend, "Error updating profile store key")); + return Err(err_msg!(Backend, "Error updating profile key")); } } - if sqlx::query("UPDATE config SET value=?1 WHERE name='wrap_key'") - .bind(wrap_key_ref.into_uri()) + if sqlx::query("UPDATE config SET value=?1 WHERE name='key'") + .bind(store_key_ref.into_uri()) .execute(&mut txn) .await? .rows_affected() != 1 { - return Err(err_msg!(Backend, "Error updating wrap key")); + return Err(err_msg!(Backend, "Error updating store key")); } txn.commit().await?; - self.key_cache = Arc::new(KeyCache::new(wrap_key)); + self.key_cache = Arc::new(KeyCache::new(store_key)); Ok(()) }) } @@ -190,7 +195,7 @@ impl Backend for SqliteStore { tag_filter: Option, offset: Option, limit: Option, - ) -> BoxFuture<'_, Result>> { + ) -> BoxFuture<'_, Result, Error>> { Box::pin(async move { let session = self.session(profile, false)?; let mut active = session.owned_ref(); @@ -214,7 +219,7 @@ impl Backend for SqliteStore { }) } - fn session(&self, profile: Option, transaction: bool) -> Result { + fn session(&self, profile: Option, transaction: bool) -> Result { Ok(DbSession::new( self.conn_pool.clone(), self.key_cache.clone(), @@ -223,7 +228,7 @@ impl Backend for SqliteStore { )) } - fn close(&self) -> BoxFuture<'_, Result<()>> { + fn close(&self) -> BoxFuture<'_, Result<(), Error>> { Box::pin(async move { self.conn_pool.close().await; Ok(()) @@ -237,8 +242,8 @@ impl QueryBackend for DbSession { kind: EntryKind, category: &'q str, tag_filter: Option, - ) -> BoxFuture<'q, Result> { - let category = StoreKey::prepare_input(category.as_bytes()); + ) -> BoxFuture<'q, Result> { + let category = ProfileKey::prepare_input(category.as_bytes()); Box::pin(async move { let (profile_id, key) = acquire_key(&mut *self).await?; @@ -248,7 +253,7 @@ impl QueryBackend for DbSession { let (enc_category, tag_filter) = unblock({ let params_len = params.len() + 1; // plus category move || { - Result::Ok(( + Result::<_, Error>::Ok(( key.encrypt_entry_category(category)?, encode_tag_filter::(tag_filter, &key, params_len)?, )) @@ -272,7 +277,7 @@ impl QueryBackend for DbSession { category: &str, name: &str, _for_update: bool, - ) -> BoxFuture<'_, Result>> { + ) -> BoxFuture<'_, Result, Error>> { let category = category.to_string(); let name = name.to_string(); @@ -280,10 +285,10 @@ impl QueryBackend for DbSession { let (profile_id, key) = acquire_key(&mut *self).await?; let (enc_category, enc_name) = unblock({ let key = key.clone(); - let category = StoreKey::prepare_input(category.as_bytes()); - let name = StoreKey::prepare_input(name.as_bytes()); + let category = ProfileKey::prepare_input(category.as_bytes()); + let name = ProfileKey::prepare_input(name.as_bytes()); move || { - Result::Ok(( + Result::<_, Error>::Ok(( key.encrypt_entry_category(category)?, key.encrypt_entry_name(name)?, )) @@ -301,12 +306,12 @@ impl QueryBackend for DbSession { { let value = row.try_get(1)?; let tags = row.try_get(2)?; - let (value, tags) = unblock(move || { - let value = key.decrypt_entry_value(value)?; + let (category, name, value, tags) = unblock(move || { + let value = key.decrypt_entry_value(category.as_ref(), name.as_ref(), value)?; let enc_tags = decode_tags(tags) .map_err(|_| err_msg!(Unexpected, "Error decoding entry tags"))?; - let tags = Some(key.decrypt_entry_tags(enc_tags)?); - Result::Ok((value, tags)) + let tags = key.decrypt_entry_tags(enc_tags)?; + Result::<_, Error>::Ok((category, name, value, tags)) }) .await?; Ok(Some(Entry::new(category, name, value, tags))) @@ -323,7 +328,7 @@ impl QueryBackend for DbSession { tag_filter: Option, limit: Option, _for_update: bool, - ) -> BoxFuture<'q, Result>> { + ) -> BoxFuture<'q, Result, Error>> { let category = category.to_string(); Box::pin(async move { let mut active = self.borrow_mut(); @@ -356,8 +361,8 @@ impl QueryBackend for DbSession { kind: EntryKind, category: &'q str, tag_filter: Option, - ) -> BoxFuture<'q, Result> { - let category = StoreKey::prepare_input(category.as_bytes()); + ) -> BoxFuture<'q, Result> { + let category = ProfileKey::prepare_input(category.as_bytes()); Box::pin(async move { let (profile_id, key) = acquire_key(&mut *self).await?; @@ -367,7 +372,7 @@ impl QueryBackend for DbSession { let (enc_category, tag_filter) = unblock({ let params_len = params.len() + 1; // plus category move || { - Result::Ok(( + Result::<_, Error>::Ok(( key.encrypt_entry_category(category)?, encode_tag_filter::(tag_filter, &key, params_len)?, )) @@ -396,22 +401,26 @@ impl QueryBackend for DbSession { value: Option<&'q [u8]>, tags: Option<&'q [EntryTag]>, expiry_ms: Option, - ) -> BoxFuture<'q, Result<()>> { - let category = StoreKey::prepare_input(category.as_bytes()); - let name = StoreKey::prepare_input(name.as_bytes()); + ) -> BoxFuture<'q, Result<(), Error>> { + let category = ProfileKey::prepare_input(category.as_bytes()); + let name = ProfileKey::prepare_input(name.as_bytes()); match operation { op @ EntryOperation::Insert | op @ EntryOperation::Replace => { - let value = StoreKey::prepare_input(value.unwrap()); + let value = ProfileKey::prepare_input(value.unwrap()); let tags = tags.map(prepare_tags); Box::pin(async move { let (_, key) = acquire_key(&mut *self).await?; let (enc_category, enc_name, enc_value, enc_tags) = unblock(move || { - Result::Ok(( + let enc_value = + key.encrypt_entry_value(category.as_ref(), name.as_ref(), value)?; + Result::<_, Error>::Ok(( key.encrypt_entry_category(category)?, key.encrypt_entry_name(name)?, - key.encrypt_entry_value(value)?, - tags.map(|t| key.encrypt_entry_tags(t)).transpose()?, + enc_value, + tags.transpose()? + .map(|t| key.encrypt_entry_tags(t)) + .transpose()?, )) }) .await?; @@ -438,7 +447,7 @@ impl QueryBackend for DbSession { EntryOperation::Remove => Box::pin(async move { let (_, key) = acquire_key(&mut *self).await?; let (enc_category, enc_name) = unblock(move || { - Result::Ok(( + Result::<_, Error>::Ok(( key.encrypt_entry_category(category)?, key.encrypt_entry_name(name)?, )) @@ -450,7 +459,7 @@ impl QueryBackend for DbSession { } } - fn close(self, commit: bool) -> BoxFuture<'static, Result<()>> { + fn close(self, commit: bool) -> BoxFuture<'static, Result<(), Error>> { Box::pin(DbSession::close(self, commit)) } } @@ -474,7 +483,9 @@ impl ExtDatabase for Sqlite { } } -async fn acquire_key(session: &mut DbSession) -> Result<(ProfileId, Arc)> { +async fn acquire_key( + session: &mut DbSession, +) -> Result<(ProfileId, Arc), Error> { if let Some(ret) = session.profile_and_key() { Ok(ret) } else { @@ -485,7 +496,7 @@ async fn acquire_key(session: &mut DbSession) -> Result<(ProfileId, Arc< async fn acquire_session<'q>( session: &'q mut DbSession, -) -> Result> { +) -> Result, Error> { session.make_active(&resolve_profile_key).await } @@ -493,11 +504,11 @@ async fn resolve_profile_key( conn: &mut PoolConnection, cache: Arc, profile: String, -) -> Result<(ProfileId, Arc)> { +) -> Result<(ProfileId, Arc), Error> { if let Some((pid, key)) = cache.get_profile(profile.as_str()).await { Ok((pid, key)) } else { - if let Some(row) = sqlx::query("SELECT id, store_key FROM profiles WHERE name=?1") + if let Some(row) = sqlx::query("SELECT id, profile_key FROM profiles WHERE name=?1") .bind(profile.as_str()) .fetch_optional(conn) .await? @@ -520,7 +531,7 @@ async fn perform_insert<'q>( enc_value: &[u8], enc_tags: Option>, expiry_ms: Option, -) -> Result<()> { +) -> Result<(), Error> { trace!("Insert entry"); let done = sqlx::query(INSERT_QUERY) .bind(active.profile_id) @@ -555,7 +566,7 @@ async fn perform_remove<'q>( enc_category: &[u8], enc_name: &[u8], ignore_error: bool, -) -> Result<()> { +) -> Result<(), Error> { trace!("Remove entry"); let done = sqlx::query(DELETE_QUERY) .bind(active.profile_id) @@ -574,23 +585,23 @@ async fn perform_remove<'q>( fn perform_scan<'q>( mut active: DbSessionRef<'q, Sqlite>, profile_id: ProfileId, - key: Arc, + key: Arc, kind: EntryKind, category: String, tag_filter: Option, offset: Option, limit: Option, -) -> impl Stream>> + 'q { +) -> impl Stream, Error>> + 'q { try_stream! { let mut params = QueryParams::new(); params.push(profile_id); params.push(kind as i16); let (enc_category, tag_filter) = unblock({ let key = key.clone(); - let category = StoreKey::prepare_input(category.as_bytes()); + let category = ProfileKey::prepare_input(category.as_bytes()); let params_len = params.len() + 1; // plus category move || { - Result::Ok(( + Result::<_, Error>::Ok(( key.encrypt_entry_category(category)?, encode_tag_filter::(tag_filter, &key, params_len)? )) @@ -624,16 +635,16 @@ fn perform_scan<'q>( #[cfg(test)] mod tests { use super::*; - use crate::db_utils::replace_arg_placeholders; + use crate::backend::db_utils::replace_arg_placeholders; use crate::future::block_on; - use crate::keys::wrap::{generate_raw_wrap_key, WrapKeyMethod}; + use crate::protect::{generate_raw_store_key, StoreKeyMethod}; #[test] fn sqlite_check_expiry_timestamp() { block_on(async { - let key = generate_raw_wrap_key(None)?; + let key = generate_raw_store_key(None)?; let db = SqliteStoreOptions::in_memory() - .provision(WrapKeyMethod::RawKey, key, None, false) + .provision(StoreKeyMethod::RawKey, key, None, false) .await?; let ts = expiry_timestamp(1000).unwrap(); let check = sqlx::query("SELECT datetime('now'), ?1, ?1 > datetime('now')") @@ -646,7 +657,7 @@ mod tests { if !cmp { panic!("now ({}) > expiry timestamp ({})", now, cmp_ts); } - Result::Ok(()) + Result::<_, Error>::Ok(()) }) .unwrap(); } diff --git a/src/sqlite/provision.rs b/src/backend/sqlite/provision.rs similarity index 81% rename from src/sqlite/provision.rs rename to src/backend/sqlite/provision.rs index 80d77568..987ca9ff 100644 --- a/src/sqlite/provision.rs +++ b/src/backend/sqlite/provision.rs @@ -9,15 +9,16 @@ use sqlx::{ }; use super::SqliteStore; -use crate::db_utils::{init_keys, random_profile_name}; -use crate::error::Result; -use crate::future::{unblock, BoxFuture}; -use crate::keys::{ - wrap::{WrapKeyMethod, WrapKeyReference}, - KeyCache, PassKey, +use crate::{ + backend::{ + db_utils::{init_keys, random_profile_name}, + types::ManageBackend, + }, + error::Error, + future::{unblock, BoxFuture}, + protect::{KeyCache, PassKey, StoreKeyMethod, StoreKeyReference}, + storage::{IntoOptions, Options, Store}, }; -use crate::options::{IntoOptions, Options}; -use crate::store::{ManageBackend, Store}; /// Configuration options for Sqlite stores #[derive(Debug)] @@ -29,7 +30,7 @@ pub struct SqliteStoreOptions { impl SqliteStoreOptions { /// Initialize `SqliteStoreOptions` from a generic set of options - pub fn new<'a>(options: impl IntoOptions<'a>) -> Result { + pub fn new<'a>(options: impl IntoOptions<'a>) -> Result { let mut opts = options.into_options()?; let max_connections = if let Some(max_conn) = opts.query.remove("max_connections") { max_conn @@ -70,11 +71,11 @@ impl SqliteStoreOptions { /// Provision a new Sqlite store from these configuration options pub async fn provision( self, - method: WrapKeyMethod, + method: StoreKeyMethod, pass_key: PassKey<'_>, profile: Option<&'_ str>, recreate: bool, - ) -> Result> { + ) -> Result, Error> { if recreate && !self.in_memory { try_remove_file(self.path.to_string()).await?; } @@ -116,10 +117,10 @@ impl SqliteStoreOptions { /// Open an existing Sqlite store from this set of configuration options pub async fn open( self, - method: Option, + method: Option, pass_key: PassKey<'_>, profile: Option<&'_ str>, - ) -> Result> { + ) -> Result, Error> { let conn_pool = match self.pool(false).await { Ok(pool) => Ok(pool), Err(SqlxError::Database(db_err)) => { @@ -139,7 +140,7 @@ impl SqliteStoreOptions { } /// Remove the Sqlite store defined by these configuration options - pub async fn remove(self) -> Result { + pub async fn remove(self) -> Result { if self.in_memory { Ok(true) } else { @@ -167,24 +168,24 @@ impl<'a> ManageBackend<'a> for SqliteStoreOptions { fn open_backend( self, - method: Option, + method: Option, pass_key: PassKey<'a>, profile: Option<&'a str>, - ) -> BoxFuture<'a, Result>> { + ) -> BoxFuture<'a, Result, Error>> { Box::pin(self.open(method, pass_key, profile)) } fn provision_backend( self, - method: WrapKeyMethod, + method: StoreKeyMethod, pass_key: PassKey<'a>, profile: Option<&'a str>, recreate: bool, - ) -> BoxFuture<'a, Result>> { + ) -> BoxFuture<'a, Result, Error>> { Box::pin(self.provision(method, pass_key, profile, recreate)) } - fn remove_backend(self) -> BoxFuture<'a, Result> { + fn remove_backend(self) -> BoxFuture<'a, Result> { Box::pin(self.remove()) } } @@ -192,10 +193,10 @@ impl<'a> ManageBackend<'a> for SqliteStoreOptions { async fn init_db( conn_pool: &SqlitePool, profile_name: &str, - method: WrapKeyMethod, + method: StoreKeyMethod, pass_key: PassKey<'_>, -) -> Result { - let (store_key, enc_store_key, wrap_key, wrap_key_ref) = unblock({ +) -> Result { + let (profile_key, enc_profile_key, store_key, store_key_ref) = unblock({ let pass_key = pass_key.into_owned(); move || init_keys(method, pass_key) }) @@ -214,14 +215,14 @@ async fn init_db( ); INSERT INTO config (name, value) VALUES ("default_profile", ?1), - ("version", "1"), - ("wrap_key", ?2); + ("key", ?2), + ("version", "1"); CREATE TABLE profiles ( id INTEGER NOT NULL, name TEXT NOT NULL, reference TEXT NULL, - store_key BLOB NULL, + profile_key BLOB NULL, PRIMARY KEY(id) ); CREATE UNIQUE INDEX ix_profile_name ON profiles (name); @@ -254,51 +255,45 @@ async fn init_db( CREATE INDEX ix_items_tags_name_enc ON items_tags (name, SUBSTR(value, 1, 12)) WHERE plaintext=0; CREATE INDEX ix_items_tags_name_plain ON items_tags (name, value) WHERE plaintext=1; - CREATE TABLE items_locks ( - id INTEGER NOT NULL, - expiry DATETIME NOT NULL, - PRIMARY KEY (id) - ); - - INSERT INTO profiles (name, store_key) VALUES (?1, ?3); + INSERT INTO profiles (name, profile_key) VALUES (?1, ?3); COMMIT; "#, ) .persistent(false) .bind(profile_name) - .bind(wrap_key_ref) - .bind(enc_store_key) + .bind(store_key_ref) + .bind(enc_profile_key) .execute(&mut conn) .await?; - let mut key_cache = KeyCache::new(wrap_key); + let mut key_cache = KeyCache::new(store_key); let row = sqlx::query("SELECT id FROM profiles WHERE name = ?1") .persistent(false) .bind(profile_name) .fetch_one(&mut conn) .await?; - key_cache.add_profile_mut(profile_name.to_string(), row.try_get(0)?, store_key); + key_cache.add_profile_mut(profile_name.to_string(), row.try_get(0)?, profile_key); Ok(key_cache) } async fn open_db( conn_pool: SqlitePool, - method: Option, + method: Option, pass_key: PassKey<'_>, profile: Option<&str>, path: String, -) -> Result> { +) -> Result, Error> { let mut conn = conn_pool.acquire().await?; let mut ver_ok = false; let mut default_profile: Option = None; - let mut wrap_key_ref: Option = None; + let mut store_key_ref: Option = None; let config = sqlx::query( r#"SELECT name, value FROM config - WHERE name IN ("default_profile", "version", "wrap_key")"#, + WHERE name IN ("default_profile", "key", "version")"#, ) .fetch_all(&mut conn) .await?; @@ -307,15 +302,15 @@ async fn open_db( "default_profile" => { default_profile.replace(row.try_get(1)?); } + "key" => { + store_key_ref.replace(row.try_get(1)?); + } "version" => { if row.try_get::<&str, _>(1)? != "1" { return Err(err_msg!(Unsupported, "Unsupported store version")); } ver_ok = true; } - "wrap_key" => { - wrap_key_ref.replace(row.try_get(1)?); - } _ => (), } } @@ -326,11 +321,11 @@ async fn open_db( .map(str::to_string) .or(default_profile) .ok_or_else(|| err_msg!(Unsupported, "Default store profile not found"))?; - let wrap_key = if let Some(wrap_key_ref) = wrap_key_ref { - let wrap_ref = WrapKeyReference::parse_uri(&wrap_key_ref)?; + let store_key = if let Some(store_key_ref) = store_key_ref { + let wrap_ref = StoreKeyReference::parse_uri(&store_key_ref)?; if let Some(method) = method { if !wrap_ref.compare_method(&method) { - return Err(err_msg!(Input, "Store key wrap method mismatch")); + return Err(err_msg!(Input, "Store key method mismatch")); } } unblock({ @@ -339,24 +334,24 @@ async fn open_db( }) .await? } else { - return Err(err_msg!(Unsupported, "Store wrap key not found")); + return Err(err_msg!(Unsupported, "Store key not found")); }; - let mut key_cache = KeyCache::new(wrap_key); + let mut key_cache = KeyCache::new(store_key); - let row = sqlx::query("SELECT id, store_key FROM profiles WHERE name = ?1") + let row = sqlx::query("SELECT id, profile_key FROM profiles WHERE name = ?1") .bind(&profile) .fetch_one(&mut conn) .await?; let profile_id = row.try_get(0)?; - let store_key = key_cache.load_key(row.try_get(1)?).await?; - key_cache.add_profile_mut(profile.clone(), profile_id, store_key); + let profile_key = key_cache.load_key(row.try_get(1)?).await?; + key_cache.add_profile_mut(profile.clone(), profile_id, profile_key); Ok(Store::new(SqliteStore::new( conn_pool, profile, key_cache, path, ))) } -async fn try_remove_file(path: String) -> Result { +async fn try_remove_file(path: String) -> Result { unblock(|| match remove_file(path) { Ok(()) => Ok(true), Err(err) if err.kind() == IoErrorKind::NotFound => Ok(false), diff --git a/src/backend/types.rs b/src/backend/types.rs new file mode 100644 index 00000000..d397a529 --- /dev/null +++ b/src/backend/types.rs @@ -0,0 +1,124 @@ +use crate::{ + error::Error, + future::BoxFuture, + protect::{PassKey, StoreKeyMethod}, + storage::{Entry, EntryKind, EntryOperation, EntryTag, Scan, TagFilter}, +}; + +/// Represents a generic backend implementation +pub trait Backend: Send + Sync { + /// The type of session managed by this backend + type Session: QueryBackend; + + /// Create a new profile + fn create_profile(&self, name: Option) -> BoxFuture<'_, Result>; + + /// Get the name of the active profile + fn get_profile_name(&self) -> &str; + + /// Remove an existing profile + fn remove_profile(&self, name: String) -> BoxFuture<'_, Result>; + + /// Create a [`Scan`] against the store + fn scan( + &self, + profile: Option, + kind: EntryKind, + category: String, + tag_filter: Option, + offset: Option, + limit: Option, + ) -> BoxFuture<'_, Result, Error>>; + + /// Create a new session against the store + fn session(&self, profile: Option, transaction: bool) -> Result; + + /// Replace the wrapping key of the store + fn rekey_backend( + &mut self, + method: StoreKeyMethod, + key: PassKey<'_>, + ) -> BoxFuture<'_, Result<(), Error>>; + + /// Close the store instance + fn close(&self) -> BoxFuture<'_, Result<(), Error>>; +} + +/// Create, open, or remove a generic backend implementation +pub trait ManageBackend<'a> { + /// The type of store being managed + type Store; + + /// Open an existing store + fn open_backend( + self, + method: Option, + pass_key: PassKey<'a>, + profile: Option<&'a str>, + ) -> BoxFuture<'a, Result>; + + /// Provision a new store + fn provision_backend( + self, + method: StoreKeyMethod, + pass_key: PassKey<'a>, + profile: Option<&'a str>, + recreate: bool, + ) -> BoxFuture<'a, Result>; + + /// Remove an existing store + fn remove_backend(self) -> BoxFuture<'a, Result>; +} + +/// Query from a generic backend implementation +pub trait QueryBackend: Send { + /// Count the number of matching records in the store + fn count<'q>( + &'q mut self, + kind: EntryKind, + category: &'q str, + tag_filter: Option, + ) -> BoxFuture<'q, Result>; + + /// Fetch a single record from the store by category and name + fn fetch<'q>( + &'q mut self, + kind: EntryKind, + category: &'q str, + name: &'q str, + for_update: bool, + ) -> BoxFuture<'q, Result, Error>>; + + /// Fetch all matching records from the store + fn fetch_all<'q>( + &'q mut self, + kind: EntryKind, + category: &'q str, + tag_filter: Option, + limit: Option, + for_update: bool, + ) -> BoxFuture<'q, Result, Error>>; + + /// Remove all matching records from the store + fn remove_all<'q>( + &'q mut self, + kind: EntryKind, + category: &'q str, + tag_filter: Option, + ) -> BoxFuture<'q, Result>; + + /// Insert or replace a record in the store + fn update<'q>( + &'q mut self, + kind: EntryKind, + operation: EntryOperation, + category: &'q str, + name: &'q str, + value: Option<&'q [u8]>, + tags: Option<&'q [EntryTag]>, + expiry_ms: Option, + ) -> BoxFuture<'q, Result<(), Error>>; + + /// Close the current store session + fn close(self, commit: bool) -> BoxFuture<'static, Result<(), Error>>; +} diff --git a/src/didcomm/mod.rs b/src/didcomm/mod.rs new file mode 100644 index 00000000..3aae65f7 --- /dev/null +++ b/src/didcomm/mod.rs @@ -0,0 +1 @@ +pub mod pack; diff --git a/src/didcomm/pack/mod.rs b/src/didcomm/pack/mod.rs new file mode 100644 index 00000000..ca69237e --- /dev/null +++ b/src/didcomm/pack/mod.rs @@ -0,0 +1,9 @@ +mod alg; +mod nacl_box; +mod types; + +pub use alg::{pack_message, unpack_message}; +pub use types::KeyLookup; + +#[cfg(test)] +pub use types::key_lookup_fn; diff --git a/src/didcomm/pack/types.rs b/src/didcomm/pack/types.rs new file mode 100644 index 00000000..53a02510 --- /dev/null +++ b/src/didcomm/pack/types.rs @@ -0,0 +1,79 @@ +use std::future::Future; + +pub use crate::keys::alg::ed25519::{Ed25519KeyPair as KeyPair, Ed25519PublicKey as PublicKey}; + +#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] +pub struct JWE { + pub protected: String, + pub iv: String, + pub ciphertext: String, + pub tag: String, +} + +#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] +pub struct Recipient { + pub encrypted_key: String, + pub header: Header, +} + +#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] +pub struct Header { + pub kid: String, + #[serde(default)] + #[serde(skip_serializing_if = "Option::is_none")] + pub iv: Option, + #[serde(default)] + #[serde(skip_serializing_if = "Option::is_none")] + pub sender: Option, +} + +#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] +pub struct Protected { + pub enc: String, + pub typ: String, + pub alg: String, + pub recipients: Vec, +} + +/// A trait for custom key lookup implementations used by unpack +pub trait KeyLookup<'f> { + fn find<'a>( + self, + key: &'a Vec, + ) -> std::pin::Pin> + Send + 'a>> + where + 'f: 'a; +} + +type KeyLookupCb<'a> = Box) -> Option<(usize, KeyPair)> + Send + Sync + 'a>; + +pub struct KeyLookupFn<'a> { + cb: KeyLookupCb<'a>, +} + +#[cfg(test)] +/// Create a `KeyLookup` from a callback function +pub fn key_lookup_fn<'a, F>(cb: F) -> KeyLookupFn<'a> +where + F: Fn(&Vec) -> Option<(usize, KeyPair)> + Send + Sync + 'a, +{ + KeyLookupFn { + cb: Box::new(cb) as KeyLookupCb<'a>, + } +} + +impl<'a, 'l, 'r> KeyLookup<'l> for &'r KeyLookupFn<'a> +where + 'a: 'l, + 'r: 'a, +{ + fn find<'f>( + self, + keys: &'f Vec, + ) -> std::pin::Pin> + Send + 'f>> + where + 'l: 'f, + { + Box::pin(async move { (&self.cb)(keys) }) + } +} diff --git a/src/error.rs b/src/error.rs index d0231bbd..9990819b 100644 --- a/src/error.rs +++ b/src/error.rs @@ -1,7 +1,7 @@ use std::error::Error as StdError; use std::fmt::{self, Display, Formatter}; -pub type Result = std::result::Result; +use crate::crypto::{Error as CryptoError, ErrorKind as CryptoErrorKind}; /// The possible kinds of error produced by the crate #[derive(Clone, Copy, Debug, PartialEq, Eq)] @@ -70,19 +70,16 @@ impl Error { } } - pub(crate) fn from_opt_msg>(kind: ErrorKind, msg: Option) -> Self { - Self { - kind, - cause: None, - message: msg.map(Into::into), - } - } - /// Accessor for the error kind pub fn kind(&self) -> ErrorKind { self.kind } + /// Accessor for the error message + pub fn message(&self) -> Option<&str> { + self.message.as_ref().map(String::as_str) + } + pub(crate) fn with_cause>>(mut self, err: T) -> Self { self.cause = Some(err.into()); self @@ -136,21 +133,19 @@ impl From for Error { } } -impl From for Error { - fn from(err: indy_utils::EncryptionError) -> Self { - Error::from_opt_msg(ErrorKind::Encryption, err.context) - } -} - -impl From for Error { - fn from(err: indy_utils::UnexpectedError) -> Self { - Error::from_opt_msg(ErrorKind::Unexpected, err.context) - } -} - -impl From for Error { - fn from(err: indy_utils::ValidationError) -> Self { - Error::from_opt_msg(ErrorKind::Input, err.context) +impl From for Error { + fn from(err: CryptoError) -> Self { + let kind = match err.kind() { + CryptoErrorKind::Encryption => ErrorKind::Encryption, + CryptoErrorKind::ExceededBuffer | CryptoErrorKind::Unexpected => ErrorKind::Unexpected, + CryptoErrorKind::InvalidData + | CryptoErrorKind::InvalidKeyData + | CryptoErrorKind::InvalidNonce + | CryptoErrorKind::MissingSecretKey + | CryptoErrorKind::Usage => ErrorKind::Input, + CryptoErrorKind::Unsupported => ErrorKind::Unsupported, + }; + Error::from_msg(kind, err.message()) } } diff --git a/src/ffi/error.rs b/src/ffi/error.rs index a9e3f665..5c595236 100644 --- a/src/ffi/error.rs +++ b/src/ffi/error.rs @@ -1,4 +1,4 @@ -use crate::error::{Error, ErrorKind, Result}; +use crate::error::{Error, ErrorKind}; use std::os::raw::c_char; use std::sync::RwLock; @@ -38,8 +38,8 @@ impl From for ErrorCode { } } -impl From> for ErrorCode { - fn from(result: Result) -> ErrorCode { +impl From> for ErrorCode { + fn from(result: Result) -> ErrorCode { match result { Ok(_) => ErrorCode::Success, Err(err) => ErrorCode::from(err.kind()), diff --git a/src/ffi/handle.rs b/src/ffi/handle.rs new file mode 100644 index 00000000..a0421ba7 --- /dev/null +++ b/src/ffi/handle.rs @@ -0,0 +1,101 @@ +use std::{marker::PhantomData, mem, sync::Arc}; + +use crate::error::Error; + +#[repr(transparent)] +pub struct ArcHandle(usize, PhantomData); + +impl ArcHandle { + pub fn invalid() -> Self { + Self(0, PhantomData) + } + + pub fn create(value: T) -> Self { + let results = Arc::into_raw(Arc::new(value)); + Self(results as usize, PhantomData) + } + + pub fn load(&self) -> Result, Error> { + self.validate()?; + let slf = unsafe { Arc::from_raw(self.0 as *const T) }; + let copy = slf.clone(); + mem::forget(slf); // Arc::increment_strong_count(..) in 1.51 + Ok(copy) + } + + pub fn remove(&self) { + if self.0 != 0 { + unsafe { + // Drop the initial reference. There could be others outstanding. + Arc::from_raw(self.0 as *const T); + } + } + } + + #[inline] + pub fn validate(&self) -> Result<(), Error> { + if self.0 == 0 { + Err(err_msg!("Invalid handle")) + } else { + Ok(()) + } + } +} + +impl std::fmt::Display for ArcHandle { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Handle({:p})", self.0 as *const T) + } +} + +/// Derive a new handle type having an atomically increasing sequence number +#[macro_export] +macro_rules! new_sequence_handle (($newtype:ident, $counter:ident) => ( + static $counter: std::sync::atomic::AtomicUsize = std::sync::atomic::AtomicUsize::new(0); + + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] + #[repr(transparent)] + pub struct $newtype(pub usize); + + impl $newtype { + #[allow(dead_code)] + pub fn invalid() -> $newtype { + $newtype(0) + } + + #[allow(dead_code)] + pub fn next() -> $newtype { + $newtype($counter.fetch_add(1, std::sync::atomic::Ordering::SeqCst) + 1) + } + } + + impl std::fmt::Display for $newtype { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}({})", stringify!($newtype), self.0) + } + } + + impl std::ops::Deref for $newtype { + type Target = usize; + fn deref(&self) -> &usize { + &self.0 + } + } + + impl PartialEq for $newtype { + fn eq(&self, other: &usize) -> bool { + self.0 == *other + } + } +)); + +#[cfg(test)] +mod tests { + new_sequence_handle!(TestHandle, TEST_HANDLE_CTR); + + #[test] + fn test_handle_seq() { + assert_eq!(TestHandle::next(), 1); + assert_eq!(TestHandle::next(), 2); + } +} diff --git a/src/ffi/key.rs b/src/ffi/key.rs new file mode 100644 index 00000000..c847c697 --- /dev/null +++ b/src/ffi/key.rs @@ -0,0 +1,489 @@ +use std::{os::raw::c_char, str::FromStr}; + +use ffi_support::{rust_string_to_c, ByteBuffer, FfiStr}; + +use super::{handle::ArcHandle, secret::SecretBuffer, ErrorCode}; +use crate::kms::{ + crypto_box, crypto_box_open, crypto_box_random_nonce, crypto_box_seal, crypto_box_seal_open, + derive_key_ecdh_1pu, derive_key_ecdh_es, KeyAlg, LocalKey, +}; + +pub type LocalKeyHandle = ArcHandle; + +#[repr(C)] +pub struct AeadParams { + nonce_length: i32, + tag_length: i32, +} + +#[no_mangle] +pub extern "C" fn askar_key_generate( + alg: FfiStr<'_>, + ephemeral: i8, + out: *mut LocalKeyHandle, +) -> ErrorCode { + catch_err! { + trace!("Generate key: {}", alg.as_str()); + check_useful_c_ptr!(out); + let alg = KeyAlg::from_str(alg.as_str())?; + let key = LocalKey::generate(alg, ephemeral != 0)?; + unsafe { *out = LocalKeyHandle::create(key) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_from_seed( + alg: FfiStr<'_>, + seed: ByteBuffer, + method: FfiStr<'_>, + out: *mut LocalKeyHandle, +) -> ErrorCode { + catch_err! { + trace!("Create key from seed: {}", alg.as_str()); + check_useful_c_ptr!(out); + let alg = KeyAlg::from_str(alg.as_str())?; + let key = LocalKey::from_seed(alg, seed.as_slice(), method.as_opt_str())?; + unsafe { *out = LocalKeyHandle::create(key) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_from_jwk(jwk: FfiStr<'_>, out: *mut LocalKeyHandle) -> ErrorCode { + catch_err! { + trace!("Load key from JWK"); + check_useful_c_ptr!(out); + let key = LocalKey::from_jwk(jwk.as_str())?; + unsafe { *out = LocalKeyHandle::create(key) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_from_public_bytes( + alg: FfiStr<'_>, + public: ByteBuffer, + out: *mut LocalKeyHandle, +) -> ErrorCode { + catch_err! { + trace!("Load key from public: {}", alg.as_str()); + check_useful_c_ptr!(out); + let alg = KeyAlg::from_str(alg.as_str())?; + let key = LocalKey::from_public_bytes(alg, public.as_slice())?; + unsafe { *out = LocalKeyHandle::create(key) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_get_public_bytes( + handle: LocalKeyHandle, + out: *mut SecretBuffer, +) -> ErrorCode { + catch_err! { + trace!("Get key public bytes: {}", handle); + check_useful_c_ptr!(out); + let key = handle.load()?; + let public = key.to_public_bytes()?; + unsafe { *out = SecretBuffer::from_secret(public) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_from_secret_bytes( + alg: FfiStr<'_>, + secret: ByteBuffer, + out: *mut LocalKeyHandle, +) -> ErrorCode { + catch_err! { + trace!("Load key from secret: {}", alg.as_str()); + check_useful_c_ptr!(out); + let alg = KeyAlg::from_str(alg.as_str())?; + let key = LocalKey::from_secret_bytes(alg, secret.as_slice())?; + unsafe { *out = LocalKeyHandle::create(key) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_get_secret_bytes( + handle: LocalKeyHandle, + out: *mut SecretBuffer, +) -> ErrorCode { + catch_err! { + trace!("Get key secret bytes: {}", handle); + check_useful_c_ptr!(out); + let key = handle.load()?; + let public = key.to_secret_bytes()?; + unsafe { *out = SecretBuffer::from_secret(public) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_convert( + handle: LocalKeyHandle, + alg: FfiStr<'_>, + out: *mut LocalKeyHandle, +) -> ErrorCode { + catch_err! { + trace!("Convert key: {} to {}", handle, alg.as_str()); + check_useful_c_ptr!(out); + let alg = KeyAlg::from_str(alg.as_str())?; + let key = handle.load()?.convert_key(alg)?; + unsafe { *out = LocalKeyHandle::create(key) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_from_key_exchange( + alg: FfiStr<'_>, + sk_handle: LocalKeyHandle, + pk_handle: LocalKeyHandle, + out: *mut LocalKeyHandle, +) -> ErrorCode { + catch_err! { + trace!("Key exchange: {}, {}", sk_handle, pk_handle); + check_useful_c_ptr!(out); + let alg = KeyAlg::from_str(alg.as_str())?; + let sk = sk_handle.load()?; + let pk = pk_handle.load()?; + let key = sk.to_key_exchange(alg, &pk)?; + unsafe { *out = LocalKeyHandle::create(key) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_free(handle: LocalKeyHandle) { + handle.remove(); +} + +#[no_mangle] +pub extern "C" fn askar_key_get_algorithm( + handle: LocalKeyHandle, + out: *mut *const c_char, +) -> ErrorCode { + catch_err! { + trace!("Get key algorithm: {}", handle); + check_useful_c_ptr!(out); + let key = handle.load()?; + unsafe { *out = rust_string_to_c(key.algorithm().as_str()) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_get_ephemeral(handle: LocalKeyHandle, out: *mut i8) -> ErrorCode { + catch_err! { + trace!("Get key ephemeral: {}", handle); + check_useful_c_ptr!(out); + let key = handle.load()?; + unsafe { *out = key.ephemeral as i8 }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_get_jwk_public( + handle: LocalKeyHandle, + alg: FfiStr<'_>, + out: *mut *const c_char, +) -> ErrorCode { + catch_err! { + trace!("Get key JWK public: {}", handle); + check_useful_c_ptr!(out); + let key = handle.load()?; + let alg = alg.as_opt_str().map(KeyAlg::from_str).transpose()?; + let jwk = key.to_jwk_public(alg)?; + unsafe { *out = rust_string_to_c(jwk) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_get_jwk_secret( + handle: LocalKeyHandle, + out: *mut SecretBuffer, +) -> ErrorCode { + catch_err! { + trace!("Get key JWK secret: {}", handle); + check_useful_c_ptr!(out); + let key = handle.load()?; + let jwk = key.to_jwk_secret()?; + unsafe { *out = SecretBuffer::from_secret(jwk) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_get_jwk_thumbprint( + handle: LocalKeyHandle, + alg: FfiStr<'_>, + out: *mut *const c_char, +) -> ErrorCode { + catch_err! { + trace!("Get key JWK thumbprint: {}", handle); + check_useful_c_ptr!(out); + let key = handle.load()?; + let alg = alg.as_opt_str().map(KeyAlg::from_str).transpose()?; + let thumb = key.to_jwk_thumbprint(alg)?; + unsafe { *out = rust_string_to_c(thumb) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_aead_random_nonce( + handle: LocalKeyHandle, + out: *mut SecretBuffer, +) -> ErrorCode { + catch_err! { + trace!("AEAD create nonce: {}", handle); + check_useful_c_ptr!(out); + let key = handle.load()?; + let nonce = key.aead_random_nonce()?; + unsafe { *out = SecretBuffer::from_secret(nonce) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_aead_get_params( + handle: LocalKeyHandle, + out: *mut AeadParams, +) -> ErrorCode { + catch_err! { + trace!("AEAD get params: {}", handle); + check_useful_c_ptr!(out); + let key = handle.load()?; + let params = key.aead_params()?; + unsafe { *out = AeadParams { + nonce_length: params.nonce_length as i32, + tag_length: params.tag_length as i32 + } }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_aead_encrypt( + handle: LocalKeyHandle, + message: ByteBuffer, + nonce: ByteBuffer, + aad: ByteBuffer, + out: *mut SecretBuffer, +) -> ErrorCode { + catch_err! { + trace!("AEAD encrypt: {}", handle); + check_useful_c_ptr!(out); + let key = handle.load()?; + let enc = key.aead_encrypt(message.as_slice(), nonce.as_slice(), aad.as_slice())?; + unsafe { *out = SecretBuffer::from_secret(enc) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_aead_decrypt( + handle: LocalKeyHandle, + ciphertext: ByteBuffer, + nonce: ByteBuffer, + aad: ByteBuffer, + out: *mut SecretBuffer, +) -> ErrorCode { + catch_err! { + trace!("AEAD decrypt: {}", handle); + check_useful_c_ptr!(out); + let key = handle.load()?; + let dec = key.aead_decrypt(ciphertext.as_slice(), nonce.as_slice(), aad.as_slice())?; + unsafe { *out = SecretBuffer::from_secret(dec) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_sign_message( + handle: LocalKeyHandle, + message: ByteBuffer, + sig_type: FfiStr<'_>, + out: *mut SecretBuffer, +) -> ErrorCode { + catch_err! { + trace!("Sign message: {}", handle); + check_useful_c_ptr!(out); + let key = handle.load()?; + let sig = key.sign_message(message.as_slice(), sig_type.as_opt_str())?; + unsafe { *out = SecretBuffer::from_secret(sig) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_verify_signature( + handle: LocalKeyHandle, + message: ByteBuffer, + signature: ByteBuffer, + sig_type: FfiStr<'_>, + out: *mut i8, +) -> ErrorCode { + catch_err! { + trace!("Verify signature: {}", handle); + check_useful_c_ptr!(out); + let key = handle.load()?; + let verify = key.verify_signature(message.as_slice(),signature.as_slice(), sig_type.as_opt_str())?; + unsafe { *out = verify as i8 }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_crypto_box_random_nonce(out: *mut SecretBuffer) -> ErrorCode { + catch_err! { + trace!("crypto box random nonce"); + check_useful_c_ptr!(out); + let nonce = crypto_box_random_nonce()?; + unsafe { *out = SecretBuffer::from_secret(&nonce[..]) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_crypto_box( + recip_key: LocalKeyHandle, + sender_key: LocalKeyHandle, + message: ByteBuffer, + nonce: ByteBuffer, + out: *mut SecretBuffer, +) -> ErrorCode { + catch_err! { + trace!("crypto box: {}, {}", recip_key, sender_key); + check_useful_c_ptr!(out); + let recip_key = recip_key.load()?; + let sender_key = sender_key.load()?; + let message = crypto_box( + &*recip_key, + &*sender_key, + message.as_slice(), + nonce.as_slice() + )?; + unsafe { *out = SecretBuffer::from_secret(message) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_crypto_box_open( + recip_key: LocalKeyHandle, + sender_key: LocalKeyHandle, + message: ByteBuffer, + nonce: ByteBuffer, + out: *mut SecretBuffer, +) -> ErrorCode { + catch_err! { + trace!("crypto box open: {}, {}", recip_key, sender_key); + check_useful_c_ptr!(out); + let recip_key = recip_key.load()?; + let sender_key = sender_key.load()?; + let message = crypto_box_open( + &*recip_key, + &*sender_key, + message.as_slice(), + nonce.as_slice() + )?; + unsafe { *out = SecretBuffer::from_secret(message) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_crypto_box_seal( + handle: LocalKeyHandle, + message: ByteBuffer, + out: *mut SecretBuffer, +) -> ErrorCode { + catch_err! { + trace!("crypto box seal: {}", handle); + check_useful_c_ptr!(out); + let key = handle.load()?; + let enc = crypto_box_seal(&key, message.as_slice())?; + unsafe { *out = SecretBuffer::from_secret(enc) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_crypto_box_seal_open( + handle: LocalKeyHandle, + ciphertext: ByteBuffer, + out: *mut SecretBuffer, +) -> ErrorCode { + catch_err! { + trace!("crypto box seal open: {}", handle); + check_useful_c_ptr!(out); + let key = handle.load()?; + let enc = crypto_box_seal_open(&key, ciphertext.as_slice())?; + unsafe { *out = SecretBuffer::from_secret(enc) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_derive_ecdh_es( + alg: FfiStr<'_>, + ephem_key: LocalKeyHandle, + recip_key: LocalKeyHandle, + apu: ByteBuffer, + apv: ByteBuffer, + out: *mut LocalKeyHandle, +) -> ErrorCode { + catch_err! { + trace!("ECDH-ES: {}", alg.as_str()); + check_useful_c_ptr!(out); + let ephem_key = ephem_key.load()?; + let recip_key = recip_key.load()?; + let key = derive_key_ecdh_es( + &ephem_key, + &recip_key, + alg.as_str(), + apu.as_slice(), + apv.as_slice(), + )?; + unsafe { *out = LocalKeyHandle::create(key) }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_derive_ecdh_1pu( + alg: FfiStr<'_>, + ephem_key: LocalKeyHandle, + sender_key: LocalKeyHandle, + recip_key: LocalKeyHandle, + apu: ByteBuffer, + apv: ByteBuffer, + cc_tag: ByteBuffer, + out: *mut LocalKeyHandle, +) -> ErrorCode { + catch_err! { + trace!("ECDH-1PU: {}", alg.as_str()); + check_useful_c_ptr!(out); + let ephem_key = ephem_key.load()?; + let sender_key = sender_key.load()?; + let recip_key = recip_key.load()?; + let key = derive_key_ecdh_1pu( + &ephem_key, + &sender_key, + &recip_key, + alg.as_str(), + apu.as_slice(), + apv.as_slice(), + cc_tag.as_slice(), + )?; + unsafe { *out = LocalKeyHandle::create(key) }; + Ok(ErrorCode::Success) + } +} diff --git a/src/ffi/macros.rs b/src/ffi/macros.rs index f9b87915..0f0410e7 100644 --- a/src/ffi/macros.rs +++ b/src/ffi/macros.rs @@ -1,6 +1,6 @@ macro_rules! catch_err { ($($e:tt)*) => { - match std::panic::catch_unwind(move || -> $crate::error::Result<_> {$($e)*}) { + match std::panic::catch_unwind(move || -> Result<_, $crate::error::Error> {$($e)*}) { Ok(Ok(a)) => a, Ok(Err(err)) => { // lib error $crate::ffi::error::set_last_error(Some(err)) diff --git a/src/ffi/mod.rs b/src/ffi/mod.rs index 29c5c609..516f152b 100644 --- a/src/ffi/mod.rs +++ b/src/ffi/mod.rs @@ -1,10 +1,7 @@ use std::marker::PhantomData; use std::os::raw::c_char; -use std::ptr; -use std::str::FromStr; -use ffi_support::{rust_string_to_c, ByteBuffer, FfiStr}; -use zeroize::{Zeroize, Zeroizing}; +use ffi_support::rust_string_to_c; #[cfg(feature = "jemalloc")] #[global_allocator] @@ -12,31 +9,26 @@ static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; pub static LIB_VERSION: &str = env!("CARGO_PKG_VERSION"); +#[macro_use] +mod handle; + #[macro_use] mod macros; mod error; - +mod key; mod log; - +mod result_list; +mod secret; mod store; -use self::error::{set_last_error, ErrorCode}; +use self::error::ErrorCode; use crate::error::Error; -use crate::future::{spawn_ok, unblock}; -use crate::keys::{derive_verkey, verify_signature, wrap::generate_raw_wrap_key, KeyAlg}; pub type CallbackId = i64; ffi_support::define_string_destructor!(askar_string_free); -#[no_mangle] -pub extern "C" fn askar_buffer_free(buffer: ByteBuffer) { - ffi_support::abort_on_panic::with_abort_on_panic(|| { - drop(buffer.destroy_into_vec().zeroize()); - }) -} - pub struct EnsureCallback)> { f: F, _pd: PhantomData, @@ -63,95 +55,6 @@ impl)> Drop for EnsureCallback { } } -#[no_mangle] -pub extern "C" fn askar_derive_verkey( - alg: FfiStr<'_>, - seed: ByteBuffer, - cb: Option, - cb_id: CallbackId, -) -> ErrorCode { - catch_err! { - trace!("Derive verkey"); - let alg = alg.as_opt_str().map(|alg| KeyAlg::from_str(alg).unwrap()).ok_or_else(|| err_msg!("Key algorithm not provided"))?; - let seed = Zeroizing::new(seed.as_slice().to_vec()); - let cb = cb.ok_or_else(|| err_msg!("No callback provided"))?; - let cb = EnsureCallback::new(move |result| - match result { - Ok(key) => cb(cb_id, ErrorCode::Success, rust_string_to_c(key)), - Err(err) => cb(cb_id, set_last_error(Some(err)), ptr::null()), - } - ); - spawn_ok(async move { - let result = unblock(move || derive_verkey( - alg, seed.as_slice() - )).await; - cb.resolve(result); - }); - Ok(ErrorCode::Success) - } -} - -#[no_mangle] -pub extern "C" fn askar_generate_raw_key( - seed: ByteBuffer, - cb: Option, - cb_id: CallbackId, -) -> ErrorCode { - catch_err! { - trace!("Create raw key"); - let seed = match seed.as_slice() { - s if s.is_empty() => None, - s => Some(Zeroizing::new(s.to_vec())) - }; - let cb = cb.ok_or_else(|| err_msg!("No callback provided"))?; - let cb = EnsureCallback::new(move |result| - match result { - Ok(key) => cb(cb_id, ErrorCode::Success, rust_string_to_c(key)), - Err(err) => cb(cb_id, set_last_error(Some(err)), ptr::null()), - } - ); - spawn_ok(async move { - let result = unblock(move || generate_raw_wrap_key( - seed.as_ref().map(|s| s.as_slice()) - ).map(|p| p.to_string())).await; - cb.resolve(result); - }); - Ok(ErrorCode::Success) - } -} - -#[no_mangle] -pub extern "C" fn askar_verify_signature( - signer_vk: FfiStr<'_>, - message: ByteBuffer, - signature: ByteBuffer, - cb: Option, - cb_id: CallbackId, -) -> ErrorCode { - catch_err! { - trace!("Verify signature"); - let signer_vk = signer_vk.into_opt_string().ok_or_else(|| err_msg!("Signer verkey not provided"))?; - let message = message.as_slice().to_vec(); - let signature = signature.as_slice().to_vec(); - let cb = cb.ok_or_else(|| err_msg!("No callback provided"))?; - let cb = EnsureCallback::new(move |result| - match result { - Ok(verify) => cb(cb_id, ErrorCode::Success, verify as i8), - Err(err) => cb(cb_id, set_last_error(Some(err)), 0), - } - ); - spawn_ok(async move { - let result = unblock(move || verify_signature( - &signer_vk, - &message, - &signature - )).await; - cb.resolve(result); - }); - Ok(ErrorCode::Success) - } -} - #[no_mangle] pub extern "C" fn askar_version() -> *mut c_char { rust_string_to_c(LIB_VERSION.to_owned()) diff --git a/src/ffi/result_list.rs b/src/ffi/result_list.rs new file mode 100644 index 00000000..38b15a9c --- /dev/null +++ b/src/ffi/result_list.rs @@ -0,0 +1,248 @@ +use std::{ffi::CString, os::raw::c_char, ptr}; + +use super::{handle::ArcHandle, key::LocalKeyHandle, secret::SecretBuffer, ErrorCode}; +use crate::{ + error::Error, + kms::KeyEntry, + storage::{Entry, EntryTagSet}, +}; + +pub enum FfiResultList { + Single(R), + Rows(Vec), +} + +impl FfiResultList { + pub fn get_row(&self, idx: i32) -> Result<&R, Error> { + if idx >= 0 { + match self { + Self::Single(e) => { + if idx == 0 { + return Ok(e); + } + } + Self::Rows(r) => { + if let Some(e) = r.get(idx as usize) { + return Ok(e); + } + } + } + } + return Err(err_msg!(Input, "Invalid index for result set")); + } + + pub fn len(&self) -> i32 { + match self { + Self::Single(..) => 0, + Self::Rows(r) => r.len() as i32, + } + } +} + +impl From for FfiResultList { + fn from(row: R) -> Self { + Self::Single(row) + } +} + +impl From> for FfiResultList { + fn from(rows: Vec) -> Self { + Self::Rows(rows) + } +} + +pub type EntryListHandle = ArcHandle; + +pub type FfiEntryList = FfiResultList; + +#[no_mangle] +pub extern "C" fn askar_entry_list_count(handle: EntryListHandle, count: *mut i32) -> ErrorCode { + catch_err! { + check_useful_c_ptr!(count); + let results = handle.load()?; + unsafe { *count = results.len() }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_entry_list_get_category( + handle: EntryListHandle, + index: i32, + category: *mut *const c_char, +) -> ErrorCode { + catch_err! { + check_useful_c_ptr!(category); + let results = handle.load()?; + let entry = results.get_row(index)?; + unsafe { *category = CString::new(entry.category.as_str()).unwrap().into_raw() }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_entry_list_get_name( + handle: EntryListHandle, + index: i32, + name: *mut *const c_char, +) -> ErrorCode { + catch_err! { + check_useful_c_ptr!(name); + let results = handle.load()?; + let entry = results.get_row(index)?; + unsafe { *name = CString::new(entry.name.as_str()).unwrap().into_raw() }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_entry_list_get_value( + handle: EntryListHandle, + index: i32, + value: *mut SecretBuffer, +) -> ErrorCode { + catch_err! { + check_useful_c_ptr!(value); + let results = handle.load()?; + let entry = results.get_row(index)?; + unsafe { *value = SecretBuffer::from_secret(entry.value.as_ref()); } + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_entry_list_get_tags( + handle: EntryListHandle, + index: i32, + tags: *mut *const c_char, +) -> ErrorCode { + catch_err! { + check_useful_c_ptr!(tags); + let results = handle.load()?; + let entry = results.get_row(index)?; + if entry.tags.is_empty() { + unsafe { *tags = ptr::null() }; + } else { + let tag_json = serde_json::to_vec(&EntryTagSet::from(entry.tags.as_slice())).unwrap(); + unsafe { *tags = CString::new(tag_json).unwrap().into_raw() }; + } + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_entry_list_free(handle: EntryListHandle) { + handle.remove(); +} + +pub type KeyEntryListHandle = ArcHandle; + +pub type FfiKeyEntryList = FfiResultList; + +#[no_mangle] +pub extern "C" fn askar_key_entry_list_count( + handle: KeyEntryListHandle, + count: *mut i32, +) -> ErrorCode { + catch_err! { + check_useful_c_ptr!(count); + let results = handle.load()?; + unsafe { *count = results.len() }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_entry_list_free(handle: KeyEntryListHandle) { + handle.remove(); +} + +#[no_mangle] +pub extern "C" fn askar_key_entry_list_get_algorithm( + handle: KeyEntryListHandle, + index: i32, + alg: *mut *const c_char, +) -> ErrorCode { + catch_err! { + check_useful_c_ptr!(alg); + let results = handle.load()?; + let entry = results.get_row(index)?; + if let Some(alg_name) = entry.algorithm() { + unsafe { *alg = CString::new(alg_name).unwrap().into_raw() }; + } else { + unsafe { *alg = ptr::null() }; + } + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_entry_list_get_name( + handle: KeyEntryListHandle, + index: i32, + name: *mut *const c_char, +) -> ErrorCode { + catch_err! { + check_useful_c_ptr!(name); + let results = handle.load()?; + let entry = results.get_row(index)?; + unsafe { *name = CString::new(entry.name.as_str()).unwrap().into_raw() }; + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_entry_list_get_metadata( + handle: KeyEntryListHandle, + index: i32, + metadata: *mut *const c_char, +) -> ErrorCode { + catch_err! { + check_useful_c_ptr!(metadata); + let results = handle.load()?; + let entry = results.get_row(index)?; + if let Some(m) = entry.metadata() { + unsafe { *metadata = CString::new(m).unwrap().into_raw(); } + } else { + unsafe { *metadata = ptr::null(); } + } + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_entry_list_get_tags( + handle: KeyEntryListHandle, + index: i32, + tags: *mut *const c_char, +) -> ErrorCode { + catch_err! { + check_useful_c_ptr!(tags); + let results = handle.load()?; + let entry = results.get_row(index)?; + if entry.tags.is_empty() { + unsafe { *tags = ptr::null() }; + } else { + let tag_json = serde_json::to_vec(&EntryTagSet::from(entry.tags.as_slice())).unwrap(); + unsafe { *tags = CString::new(tag_json).unwrap().into_raw() }; + } + Ok(ErrorCode::Success) + } +} + +#[no_mangle] +pub extern "C" fn askar_key_entry_list_load_local( + handle: KeyEntryListHandle, + index: i32, + out: *mut LocalKeyHandle, +) -> ErrorCode { + catch_err! { + trace!("Load key"); + check_useful_c_ptr!(out); + let results = handle.load()?; + let entry = results.get_row(index)?; + let key = entry.load_local_key()?; + unsafe { *out = LocalKeyHandle::create(key) }; + Ok(ErrorCode::Success) + } +} diff --git a/src/ffi/secret.rs b/src/ffi/secret.rs new file mode 100644 index 00000000..e5f1b9e8 --- /dev/null +++ b/src/ffi/secret.rs @@ -0,0 +1,50 @@ +use std::{convert::TryFrom, mem, ptr}; + +use crate::crypto::buffer::SecretBytes; + +#[no_mangle] +pub extern "C" fn askar_buffer_free(buffer: SecretBuffer) { + ffi_support::abort_on_panic::with_abort_on_panic(|| { + drop(buffer.destroy_into_secret()); + }) +} + +// Structure consistent with ffi_support ByteBuffer, but zeroized on drop +#[repr(C)] +pub struct SecretBuffer { + // must be >= 0, signed int was chosen for compatibility + len: i64, + // nullable + data: *mut u8, +} + +impl Default for SecretBuffer { + fn default() -> Self { + Self { + len: 0, + data: ptr::null_mut(), + } + } +} + +impl SecretBuffer { + pub fn from_secret(buffer: impl Into) -> Self { + let mut buf = buffer.into().into_boxed_slice(); + let len = i64::try_from(buf.len()).expect("secret length exceeds i64::MAX"); + let data = buf.as_mut_ptr(); + mem::forget(buf); + Self { len, data } + } + + pub fn destroy_into_secret(self) -> SecretBytes { + if self.data.is_null() { + SecretBytes::default() + } else { + if self.len < 0 { + panic!("found negative length for secret buffer"); + } + let len = self.len as usize; + SecretBytes::from(unsafe { Vec::from_raw_parts(self.data, len, len) }) + } + } +} diff --git a/src/ffi/store.rs b/src/ffi/store.rs index a93bf721..ea9d2e8d 100644 --- a/src/ffi/store.rs +++ b/src/ffi/store.rs @@ -1,32 +1,29 @@ -use std::collections::BTreeMap; -use std::ffi::CString; -use std::mem; -use std::os::raw::c_char; -use std::ptr; -use std::str::FromStr; -use std::sync::{ - atomic::{AtomicUsize, Ordering}, - Arc, -}; +use std::{collections::BTreeMap, os::raw::c_char, ptr, str::FromStr, sync::Arc}; use async_mutex::{Mutex, MutexGuardArc}; use ffi_support::{rust_string_to_c, ByteBuffer, FfiStr}; -use indy_utils::new_handle_type; use once_cell::sync::Lazy; -use zeroize::Zeroize; -use super::error::set_last_error; -use super::{CallbackId, EnsureCallback, ErrorCode}; -use crate::any::{AnySession, AnyStore}; -use crate::error::Result as KvResult; -use crate::future::spawn_ok; -use crate::keys::{wrap::WrapKeyMethod, KeyAlg, KeyCategory, KeyEntry, PassKey}; -use crate::store::{ManageBackend, Scan}; -use crate::types::{Entry, EntryOperation, EntryTagSet, TagFilter}; +use super::{ + error::set_last_error, + key::LocalKeyHandle, + result_list::{EntryListHandle, FfiEntryList, FfiKeyEntryList, KeyEntryListHandle}, + CallbackId, EnsureCallback, ErrorCode, +}; +use crate::{ + backend::{ + any::{AnySession, AnyStore}, + ManageBackend, + }, + error::Error, + future::spawn_ok, + protect::{generate_raw_store_key, PassKey, StoreKeyMethod}, + storage::{Entry, EntryOperation, EntryTagSet, Scan, TagFilter}, +}; -new_handle_type!(StoreHandle, FFI_STORE_COUNTER); -new_handle_type!(SessionHandle, FFI_SESSION_COUNTER); -new_handle_type!(ScanHandle, FFI_SCAN_COUNTER); +new_sequence_handle!(StoreHandle, FFI_STORE_COUNTER); +new_sequence_handle!(SessionHandle, FFI_SESSION_COUNTER); +new_sequence_handle!(ScanHandle, FFI_SCAN_COUNTER); static FFI_STORES: Lazy>>> = Lazy::new(|| Mutex::new(BTreeMap::new())); @@ -43,7 +40,7 @@ impl StoreHandle { handle } - pub async fn load(&self) -> KvResult> { + pub async fn load(&self) -> Result, Error> { FFI_STORES .lock() .await @@ -52,7 +49,7 @@ impl StoreHandle { .ok_or_else(|| err_msg!("Invalid store handle")) } - pub async fn remove(&self) -> KvResult> { + pub async fn remove(&self) -> Result, Error> { FFI_STORES .lock() .await @@ -73,7 +70,7 @@ impl SessionHandle { handle } - pub async fn load(&self) -> KvResult> { + pub async fn load(&self) -> Result, Error> { Ok(Mutex::lock_arc( FFI_SESSIONS .lock() @@ -84,7 +81,7 @@ impl SessionHandle { .await) } - pub async fn remove(&self) -> KvResult>> { + pub async fn remove(&self) -> Result>, Error> { FFI_SESSIONS .lock() .await @@ -101,7 +98,7 @@ impl ScanHandle { handle } - pub async fn borrow(&self) -> KvResult> { + pub async fn borrow(&self) -> Result, Error> { FFI_SCANS .lock() .await @@ -111,7 +108,7 @@ impl ScanHandle { .ok_or_else(|| err_msg!(Busy, "Scan handle in use")) } - pub async fn release(&self, value: Scan<'static, Entry>) -> KvResult<()> { + pub async fn release(&self, value: Scan<'static, Entry>) -> Result<(), Error> { FFI_SCANS .lock() .await @@ -121,7 +118,7 @@ impl ScanHandle { Ok(()) } - pub async fn remove(&self) -> KvResult> { + pub async fn remove(&self) -> Result, Error> { FFI_SCANS .lock() .await @@ -131,150 +128,27 @@ impl ScanHandle { } } -#[repr(transparent)] -pub struct EntrySetHandle(u64); - -impl EntrySetHandle { - pub fn invalid() -> Self { - Self(0) - } - - pub fn create(value: FfiEntrySet) -> Self { - let results = Box::into_raw(Box::new(value)); - Self(results as u64) - } - - pub fn enter(&self, f: impl FnOnce(&mut FfiEntrySet) -> T) -> T { - let mut slf = mem::ManuallyDrop::new(unsafe { - Box::from_raw(self.0 as *const FfiEntrySet as *mut FfiEntrySet) - }); - f(&mut *slf) - } - - pub fn remove(&self) { - if self.0 != 0 { - unsafe { - Box::from_raw(self.0 as *const FfiEntrySet as *mut FfiEntrySet); - } - } - } -} - -pub struct FfiEntrySet { - pos: AtomicUsize, - rows: Vec, -} - -impl FfiEntrySet { - pub fn next(&self) -> Option { - let pos = self.pos.fetch_add(1, Ordering::Release); - if pos < self.rows.len() { - Some(self.rows[pos].clone()) - } else { - None - } - } -} - -impl From for FfiEntrySet { - fn from(entry: Entry) -> Self { - Self { - pos: AtomicUsize::default(), - rows: vec![FfiEntry::new(entry)], - } - } -} - -impl From> for FfiEntrySet { - fn from(entries: Vec) -> Self { - Self { - pos: AtomicUsize::default(), - rows: { - let mut acc = Vec::with_capacity(entries.len()); - acc.extend(entries.into_iter().map(FfiEntry::new)); - acc - }, - } - } -} - -impl Drop for FfiEntrySet { - fn drop(&mut self) { - self.rows.drain(..).for_each(FfiEntry::destroy); - } -} - -#[repr(C)] -pub struct FfiEntry { - category: *const c_char, - name: *const c_char, - value: ByteBuffer, - tags: *const c_char, -} - -unsafe impl Send for FfiEntry {} -unsafe impl Sync for FfiEntry {} - -impl Clone for FfiEntry { - fn clone(&self) -> Self { - Self { - category: self.category, - name: self.name, - value: unsafe { ptr::read(&self.value) }, - tags: self.tags, - } - } -} - -impl FfiEntry { - pub fn new(entry: Entry) -> Self { - let Entry { - category, - name, - value, - tags, - } = entry; - let category = CString::new(category).unwrap().into_raw(); - let name = CString::new(name).unwrap().into_raw(); - let value = ByteBuffer::from_vec(value.into_vec()); - let tags = match tags { - Some(tags) => { - let tags = serde_json::to_vec(&EntryTagSet::new(tags)).unwrap(); - CString::new(tags).unwrap().into_raw() - } - None => ptr::null(), +#[no_mangle] +pub extern "C" fn askar_store_generate_raw_key( + seed: ByteBuffer, + out: *mut *const c_char, +) -> ErrorCode { + catch_err! { + trace!("Create raw store key"); + let seed = match seed.as_slice() { + s if s.is_empty() => None, + s => Some(s) }; - Self { - category, - name, - value, - tags, - } - } - - pub fn destroy(self) { - unsafe { - CString::from_raw(self.category as *mut c_char); - CString::from_raw(self.name as *mut c_char); - self.value.destroy_into_vec().zeroize(); - if !self.tags.is_null() { - CString::from_raw(self.tags as *mut c_char); - } - } + let key = generate_raw_store_key(seed)?; + unsafe { *out = rust_string_to_c(key.to_string()); } + Ok(ErrorCode::Success) } } -#[repr(C)] -pub struct FfiUnpackResult { - unpacked: ByteBuffer, - recipient: *const c_char, - sender: *const c_char, -} - #[no_mangle] pub extern "C" fn askar_store_provision( spec_uri: FfiStr<'_>, - wrap_key_method: FfiStr<'_>, + key_method: FfiStr<'_>, pass_key: FfiStr<'_>, profile: FfiStr<'_>, recreate: i8, @@ -285,9 +159,9 @@ pub extern "C" fn askar_store_provision( trace!("Provision store"); let cb = cb.ok_or_else(|| err_msg!("No callback provided"))?; let spec_uri = spec_uri.into_opt_string().ok_or_else(|| err_msg!("No provision spec URI provided"))?; - let wrap_key_method = match wrap_key_method.as_opt_str() { - Some(method) => WrapKeyMethod::parse_uri(method)?, - None => WrapKeyMethod::default() + let key_method = match key_method.as_opt_str() { + Some(method) => StoreKeyMethod::parse_uri(method)?, + None => StoreKeyMethod::default() }; let pass_key = PassKey::from(pass_key.as_opt_str()).into_owned(); let profile = profile.into_opt_string(); @@ -303,7 +177,7 @@ pub extern "C" fn askar_store_provision( spawn_ok(async move { let result = async { let store = spec_uri.provision_backend( - wrap_key_method, + key_method, pass_key, profile.as_ref().map(String::as_str), recreate != 0 @@ -319,7 +193,7 @@ pub extern "C" fn askar_store_provision( #[no_mangle] pub extern "C" fn askar_store_open( spec_uri: FfiStr<'_>, - wrap_key_method: FfiStr<'_>, + key_method: FfiStr<'_>, pass_key: FfiStr<'_>, profile: FfiStr<'_>, cb: Option, @@ -329,8 +203,8 @@ pub extern "C" fn askar_store_open( trace!("Open store"); let cb = cb.ok_or_else(|| err_msg!("No callback provided"))?; let spec_uri = spec_uri.into_opt_string().ok_or_else(|| err_msg!("No store URI provided"))?; - let wrap_key_method = match wrap_key_method.as_opt_str() { - Some(method) => Some(WrapKeyMethod::parse_uri(method)?), + let key_method = match key_method.as_opt_str() { + Some(method) => Some(StoreKeyMethod::parse_uri(method)?), None => None }; let pass_key = PassKey::from(pass_key.as_opt_str()).into_owned(); @@ -347,7 +221,7 @@ pub extern "C" fn askar_store_open( spawn_ok(async move { let result = async { let store = spec_uri.open_backend( - wrap_key_method, + key_method, pass_key, profile.as_ref().map(String::as_str) ).await?; @@ -369,7 +243,7 @@ pub extern "C" fn askar_store_remove( trace!("Remove store"); let cb = cb.ok_or_else(|| err_msg!("No callback provided"))?; let spec_uri = spec_uri.into_opt_string().ok_or_else(|| err_msg!("No store URI provided"))?; - let cb = EnsureCallback::new(move |result: KvResult| + let cb = EnsureCallback::new(move |result: Result| match result { Ok(removed) => cb(cb_id, ErrorCode::Success, removed as i8), Err(err) => cb(cb_id, set_last_error(Some(err)), 0), @@ -472,7 +346,7 @@ pub extern "C" fn askar_store_remove_profile( #[no_mangle] pub extern "C" fn askar_store_rekey( handle: StoreHandle, - wrap_key_method: FfiStr<'_>, + key_method: FfiStr<'_>, pass_key: FfiStr<'_>, cb: Option, cb_id: CallbackId, @@ -480,9 +354,9 @@ pub extern "C" fn askar_store_rekey( catch_err! { trace!("Re-key store"); let cb = cb.ok_or_else(|| err_msg!("No callback provided"))?; - let wrap_key_method = match wrap_key_method.as_opt_str() { - Some(method) => WrapKeyMethod::parse_uri(method)?, - None => WrapKeyMethod::default() + let key_method = match key_method.as_opt_str() { + Some(method) => StoreKeyMethod::parse_uri(method)?, + None => StoreKeyMethod::default() }; let pass_key = PassKey::from(pass_key.as_opt_str()).into_owned(); let cb = EnsureCallback::new(move |result| @@ -496,7 +370,7 @@ pub extern "C" fn askar_store_rekey( let store = handle.remove().await?; match Arc::try_unwrap(store) { Ok(mut store) => { - store.rekey(wrap_key_method, pass_key.as_ref()).await?; + store.rekey(key_method, pass_key.as_ref()).await?; handle.replace(Arc::new(store)).await; Ok(()) } @@ -563,7 +437,7 @@ pub extern "C" fn askar_scan_start( let profile = profile.into_opt_string(); let category = category.into_opt_string().ok_or_else(|| err_msg!("Category not provided"))?; let tag_filter = tag_filter.as_opt_str().map(TagFilter::from_str).transpose()?; - let cb = EnsureCallback::new(move |result: KvResult| + let cb = EnsureCallback::new(move |result: Result| match result { Ok(scan_handle) => { info!("Started scan {} on store {}", scan_handle, handle); @@ -587,20 +461,20 @@ pub extern "C" fn askar_scan_start( #[no_mangle] pub extern "C" fn askar_scan_next( handle: ScanHandle, - cb: Option, + cb: Option, cb_id: CallbackId, ) -> ErrorCode { catch_err! { trace!("Scan store next"); let cb = cb.ok_or_else(|| err_msg!("No callback provided"))?; - let cb = EnsureCallback::new(move |result: KvResult>>| + let cb = EnsureCallback::new(move |result: Result>,Error>| match result { Ok(Some(entries)) => { - let results = EntrySetHandle::create(FfiEntrySet::from(entries)); + let results = EntryListHandle::create(FfiEntryList::from(entries)); cb(cb_id, ErrorCode::Success, results) }, - Ok(None) => cb(cb_id, ErrorCode::Success, EntrySetHandle::invalid()), - Err(err) => cb(cb_id, set_last_error(Some(err)), EntrySetHandle::invalid()), + Ok(None) => cb(cb_id, ErrorCode::Success, EntryListHandle::invalid()), + Err(err) => cb(cb_id, set_last_error(Some(err)), EntryListHandle::invalid()), } ); spawn_ok(async move { @@ -628,32 +502,6 @@ pub extern "C" fn askar_scan_free(handle: ScanHandle) -> ErrorCode { } } -#[no_mangle] -pub extern "C" fn askar_entry_set_next( - handle: EntrySetHandle, - entry: *mut FfiEntry, - found: *mut i8, -) -> ErrorCode { - catch_err! { - check_useful_c_ptr!(entry); - check_useful_c_ptr!(found); - handle.enter(|results| { - if let Some(next) = results.next() { - unsafe { *entry = next }; - unsafe { *found = 1 }; - } else { - unsafe { *found = 0 }; - } - }); - Ok(ErrorCode::Success) - } -} - -#[no_mangle] -pub extern "C" fn askar_entry_set_free(handle: EntrySetHandle) { - handle.remove(); -} - #[no_mangle] pub extern "C" fn askar_session_start( handle: StoreHandle, @@ -666,7 +514,7 @@ pub extern "C" fn askar_session_start( trace!("Session start"); let profile = profile.into_opt_string(); let cb = cb.ok_or_else(|| err_msg!("No callback provided"))?; - let cb = EnsureCallback::new(move |result: KvResult| + let cb = EnsureCallback::new(move |result: Result| match result { Ok(sess_handle) => { info!("Started session {} on store {} (txn: {})", sess_handle, handle, as_transaction != 0); @@ -704,7 +552,7 @@ pub extern "C" fn askar_session_count( let cb = cb.ok_or_else(|| err_msg!("No callback provided"))?; let category = category.into_opt_string().ok_or_else(|| err_msg!("Category not provided"))?; let tag_filter = tag_filter.as_opt_str().map(TagFilter::from_str).transpose()?; - let cb = EnsureCallback::new(move |result: KvResult| + let cb = EnsureCallback::new(move |result: Result| match result { Ok(count) => cb(cb_id, ErrorCode::Success, count), Err(err) => cb(cb_id, set_last_error(Some(err)), 0), @@ -727,7 +575,7 @@ pub extern "C" fn askar_session_fetch( category: FfiStr<'_>, name: FfiStr<'_>, for_update: i8, - cb: Option, + cb: Option, cb_id: CallbackId, ) -> ErrorCode { catch_err! { @@ -735,14 +583,14 @@ pub extern "C" fn askar_session_fetch( let cb = cb.ok_or_else(|| err_msg!("No callback provided"))?; let category = category.into_opt_string().ok_or_else(|| err_msg!("Category not provided"))?; let name = name.into_opt_string().ok_or_else(|| err_msg!("Name not provided"))?; - let cb = EnsureCallback::new(move |result: KvResult>| + let cb = EnsureCallback::new(move |result: Result,Error>| match result { Ok(Some(entry)) => { - let results = Box::into_raw(Box::new(FfiEntrySet::from(entry))); + let results = EntryListHandle::create(FfiEntryList::from(entry)); cb(cb_id, ErrorCode::Success, results) }, - Ok(None) => cb(cb_id, ErrorCode::Success, ptr::null()), - Err(err) => cb(cb_id, set_last_error(Some(err)), ptr::null()), + Ok(None) => cb(cb_id, ErrorCode::Success, EntryListHandle::invalid()), + Err(err) => cb(cb_id, set_last_error(Some(err)), EntryListHandle::invalid()), } ); spawn_ok(async move { @@ -763,7 +611,7 @@ pub extern "C" fn askar_session_fetch_all( tag_filter: FfiStr<'_>, limit: i64, for_update: i8, - cb: Option, + cb: Option, cb_id: CallbackId, ) -> ErrorCode { catch_err! { @@ -775,10 +623,10 @@ pub extern "C" fn askar_session_fetch_all( let cb = EnsureCallback::new(move |result| match result { Ok(rows) => { - let results = Box::into_raw(Box::new(FfiEntrySet::from(rows))); + let results = EntryListHandle::create(FfiEntryList::from(rows)); cb(cb_id, ErrorCode::Success, results) } - Err(err) => cb(cb_id, set_last_error(Some(err)), ptr::null()), + Err(err) => cb(cb_id, set_last_error(Some(err)), EntryListHandle::invalid()), } ); spawn_ok(async move { @@ -850,9 +698,9 @@ pub extern "C" fn askar_session_update( let value = value.as_slice().to_vec(); let tags = if let Some(tags) = tags.as_opt_str() { Some( - serde_json::from_str::(tags) + serde_json::from_str::>(tags) .map_err(err_map!("Error decoding tags"))? - .into_inner(), + .into_vec(), ) } else { None @@ -881,54 +729,56 @@ pub extern "C" fn askar_session_update( } #[no_mangle] -pub extern "C" fn askar_session_create_keypair( +pub extern "C" fn askar_session_insert_key( handle: SessionHandle, - alg: FfiStr<'_>, + key_handle: LocalKeyHandle, + name: FfiStr<'_>, metadata: FfiStr<'_>, tags: FfiStr<'_>, - seed: ByteBuffer, - cb: Option, + expiry_ms: i64, + cb: Option, cb_id: CallbackId, ) -> ErrorCode { catch_err! { - trace!("Create keypair"); + trace!("Insert key"); let cb = cb.ok_or_else(|| err_msg!("No callback provided"))?; - let alg = alg.as_opt_str().map(|alg| KeyAlg::from_str(alg).unwrap()).ok_or_else(|| err_msg!("Key algorithm not provided"))?; + let key = key_handle.load()?; + let name = name.into_opt_string().ok_or_else(|| err_msg!("No key name provided"))?; let metadata = metadata.into_opt_string(); let tags = if let Some(tags) = tags.as_opt_str() { Some( - serde_json::from_str::(tags) + serde_json::from_str::>(tags) .map_err(err_map!("Error decoding tags"))? - .into_inner(), + .into_vec(), ) } else { None }; - let seed = if seed.as_slice().len() > 0 { - Some(seed.as_slice().to_vec()) - } else { + let expiry_ms = if expiry_ms < 0 { None + } else { + Some(expiry_ms) }; - let cb = EnsureCallback::new(move |result| match result { - Ok(ident) => { - cb(cb_id, ErrorCode::Success, rust_string_to_c(ident)) + Ok(_) => { + cb(cb_id, ErrorCode::Success) } - Err(err) => cb(cb_id, set_last_error(Some(err)), ptr::null()), + Err(err) => cb(cb_id, set_last_error(Some(err))), } ); spawn_ok(async move { let result = async { let mut session = handle.load().await?; - let key_entry = session.create_keypair( - alg, + session.insert_key( + name.as_str(), + &key, metadata.as_ref().map(String::as_str), - seed.as_ref().map(Vec::as_ref), tags.as_ref().map(Vec::as_slice), + expiry_ms, ).await?; - Ok(key_entry.ident.clone()) + Ok(()) }.await; cb.resolve(result); }); @@ -937,28 +787,28 @@ pub extern "C" fn askar_session_create_keypair( } #[no_mangle] -pub extern "C" fn askar_session_fetch_keypair( +pub extern "C" fn askar_session_fetch_key( handle: SessionHandle, - ident: FfiStr<'_>, + name: FfiStr<'_>, for_update: i8, - cb: Option, + cb: Option, cb_id: CallbackId, ) -> ErrorCode { catch_err! { - trace!("Fetch keypair"); + trace!("Fetch key"); let cb = cb.ok_or_else(|| err_msg!("No callback provided"))?; - let ident = ident.into_opt_string().ok_or_else(|| err_msg!("No key ident provided"))?; + let name = name.into_opt_string().ok_or_else(|| err_msg!("No key name provided"))?; let cb = EnsureCallback::new(move |result| match result { Ok(Some(entry)) => { - let results = Box::into_raw(Box::new(FfiEntrySet::from(entry))); + let results = KeyEntryListHandle::create(FfiKeyEntryList::from(entry)); cb(cb_id, ErrorCode::Success, results) } Ok(None) => { - cb(cb_id, ErrorCode::Success, ptr::null()) + cb(cb_id, ErrorCode::Success, KeyEntryListHandle::invalid()) } - Err(err) => cb(cb_id, set_last_error(Some(err)), ptr::null()), + Err(err) => cb(cb_id, set_last_error(Some(err)), KeyEntryListHandle::invalid()), } ); @@ -966,11 +816,10 @@ pub extern "C" fn askar_session_fetch_keypair( let result = async { let mut session = handle.load().await?; let key_entry = session.fetch_key( - KeyCategory::KeyPair, - &ident, + name.as_str(), for_update != 0 ).await?; - Ok(key_entry.map(export_key_entry).transpose()?) + Ok(key_entry) }.await; cb.resolve(result); }); @@ -979,48 +828,45 @@ pub extern "C" fn askar_session_fetch_keypair( } #[no_mangle] -pub extern "C" fn askar_session_update_keypair( +pub extern "C" fn askar_session_fetch_all_keys( handle: SessionHandle, - ident: FfiStr<'_>, - metadata: FfiStr<'_>, - tags: FfiStr<'_>, - cb: Option, + alg: FfiStr<'_>, + thumbprint: FfiStr<'_>, + tag_filter: FfiStr<'_>, + limit: i64, + for_update: i8, + cb: Option, cb_id: CallbackId, ) -> ErrorCode { catch_err! { - trace!("Update keypair"); + trace!("Fetch all keys"); let cb = cb.ok_or_else(|| err_msg!("No callback provided"))?; - let ident = ident.into_opt_string().ok_or_else(|| err_msg!("No key ident provided"))?; - let metadata = metadata.into_opt_string(); - let tags = if let Some(tags) = tags.as_opt_str() { - Some( - serde_json::from_str::(tags) - .map_err(err_map!("Error decoding tags"))? - .into_inner(), - ) - } else { - None - }; + let alg = alg.into_opt_string(); + let thumbprint = thumbprint.into_opt_string(); + let tag_filter = tag_filter.as_opt_str().map(TagFilter::from_str).transpose()?; + let limit = if limit < 0 { None } else {Some(limit)}; let cb = EnsureCallback::new(move |result| match result { - Ok(_) => { - cb(cb_id, ErrorCode::Success) + Ok(entries) => { + let results = KeyEntryListHandle::create(FfiKeyEntryList::from(entries)); + cb(cb_id, ErrorCode::Success, results) } - Err(err) => cb(cb_id, set_last_error(Some(err))), + Err(err) => cb(cb_id, set_last_error(Some(err)), KeyEntryListHandle::invalid()), } ); spawn_ok(async move { let result = async { let mut session = handle.load().await?; - session.update_key( - KeyCategory::KeyPair, - &ident, - metadata.as_ref().map(String::as_str), - tags.as_ref().map(Vec::as_slice) + let key_entry = session.fetch_all_keys( + alg.as_ref().map(String::as_str), + thumbprint.as_ref().map(String::as_str), + tag_filter, + limit, + for_update != 0 ).await?; - Ok(()) + Ok(key_entry) }.await; cb.resolve(result); }); @@ -1029,90 +875,54 @@ pub extern "C" fn askar_session_update_keypair( } #[no_mangle] -pub extern "C" fn askar_session_sign_message( +pub extern "C" fn askar_session_update_key( handle: SessionHandle, - key_ident: FfiStr<'_>, - message: ByteBuffer, - cb: Option, + name: FfiStr<'_>, + metadata: FfiStr<'_>, + tags: FfiStr<'_>, + expiry_ms: i64, + cb: Option, cb_id: CallbackId, ) -> ErrorCode { catch_err! { - trace!("Sign message"); + trace!("Update key"); let cb = cb.ok_or_else(|| err_msg!("No callback provided"))?; - let key_ident = key_ident.into_opt_string().ok_or_else(|| err_msg!("Key identity not provided"))?; - // copy message so the caller can drop it - let message = message.as_slice().to_vec(); - + let name = name.into_opt_string().ok_or_else(|| err_msg!("No key name provided"))?; + let metadata = metadata.into_opt_string(); + let tags = if let Some(tags) = tags.as_opt_str() { + Some( + serde_json::from_str::>(tags) + .map_err(err_map!("Error decoding tags"))? + .into_vec(), + ) + } else { + None + }; + let expiry_ms = if expiry_ms < 0 { + None + } else { + Some(expiry_ms) + }; let cb = EnsureCallback::new(move |result| match result { - Ok(sig) => { - cb(cb_id, ErrorCode::Success, ByteBuffer::from_vec(sig)) + Ok(_) => { + cb(cb_id, ErrorCode::Success) } - Err(err) => cb(cb_id, set_last_error(Some(err)), ByteBuffer::default()), + Err(err) => cb(cb_id, set_last_error(Some(err))), } ); spawn_ok(async move { let result = async { let mut session = handle.load().await?; - let signature = session.sign_message( - &key_ident, - &message, - ).await?; - Ok(signature) - }.await; - cb.resolve(result); - }); - Ok(ErrorCode::Success) - } -} - -#[no_mangle] -pub extern "C" fn askar_session_pack_message( - handle: SessionHandle, - recipient_vks: FfiStr<'_>, - from_key_ident: FfiStr<'_>, - message: ByteBuffer, - cb: Option, - cb_id: CallbackId, -) -> ErrorCode { - catch_err! { - trace!("Pack message"); - let cb = cb.ok_or_else(|| err_msg!("No callback provided"))?; - let mut recips = recipient_vks.as_opt_str().ok_or_else(|| err_msg!("Recipient verkey(s) not provided"))?; - let mut recipient_vks = vec![]; - loop { - if let Some(pos) = recips.find(",") { - recipient_vks.push((&recips[..pos]).to_string()); - recips = &recips[(pos+1)..]; - } else { - if !recips.is_empty() { - recipient_vks.push(recips.to_string()); - } - break; - } - } - let from_key_ident = from_key_ident.into_opt_string(); - let message = message.as_slice().to_vec(); - - let cb = EnsureCallback::new(move |result| - match result { - Ok(packed) => { - cb(cb_id, ErrorCode::Success, ByteBuffer::from_vec(packed)) - } - Err(err) => cb(cb_id, set_last_error(Some(err)), ByteBuffer::default()), - } - ); + session.update_key( + &name, + metadata.as_ref().map(String::as_str), + tags.as_ref().map(Vec::as_slice), + expiry_ms, - spawn_ok(async move { - let result = async { - let mut session = handle.load().await?; - let packed = session.pack_message( - recipient_vks.iter().map(String::as_str), - from_key_ident.as_ref().map(String::as_str), - &message ).await?; - Ok(packed) + Ok(()) }.await; cb.resolve(result); }); @@ -1121,38 +931,32 @@ pub extern "C" fn askar_session_pack_message( } #[no_mangle] -pub extern "C" fn askar_session_unpack_message( +pub extern "C" fn askar_session_remove_key( handle: SessionHandle, - message: ByteBuffer, - cb: Option, + name: FfiStr<'_>, + cb: Option, cb_id: CallbackId, ) -> ErrorCode { catch_err! { - trace!("Unpack message"); + trace!("Remove key"); let cb = cb.ok_or_else(|| err_msg!("No callback provided"))?; - let message = message.as_slice().to_vec(); - - let cb = EnsureCallback::new(move |result: KvResult<(Vec, String, Option)>| - match result { - Ok((unpacked, recipient, sender)) => { - cb(cb_id, ErrorCode::Success, FfiUnpackResult { - unpacked: ByteBuffer::from_vec(unpacked), recipient: rust_string_to_c(recipient), sender: sender.map(rust_string_to_c).unwrap_or(ptr::null_mut())} - ) - } - Err(err) => { - eprintln!("err: {:?}", &err); - cb(cb_id, set_last_error(Some(err)), FfiUnpackResult { unpacked: ByteBuffer::default(), recipient: ptr::null(), sender: ptr::null() }) - } + let name = name.into_opt_string().ok_or_else(|| err_msg!("No key name provided"))?; + let cb = EnsureCallback::new(move |result| + match result { + Ok(_) => { + cb(cb_id, ErrorCode::Success) } - ); + Err(err) => cb(cb_id, set_last_error(Some(err))), + } + ); spawn_ok(async move { let result = async { let mut session = handle.load().await?; - let (unpacked, recipient, sender) = session.unpack_message( - &message + session.remove_key( + &name, ).await?; - Ok((unpacked, recipient.to_string(), sender.map(|s| s.to_string()))) + Ok(()) }.await; cb.resolve(result); }); @@ -1206,11 +1010,3 @@ pub extern "C" fn askar_session_close( Ok(ErrorCode::Success) } } - -fn export_key_entry(key_entry: KeyEntry) -> KvResult { - let (category, name, params, tags) = key_entry.into_parts(); - let value = serde_json::to_string(¶ms) - .map_err(err_map!("Error converting key entry to JSON"))? - .into_bytes(); - Ok(Entry::new(category.to_string(), name, value, tags)) -} diff --git a/src/future.rs b/src/future.rs index e79e07c7..0aace01d 100644 --- a/src/future.rs +++ b/src/future.rs @@ -1,13 +1,15 @@ -use std::future::Future; -use std::pin::Pin; +use std::{future::Future, pin::Pin, time::Duration}; -pub use async_global_executor::block_on; -// use once_cell::sync::Lazy; -// use suspend_exec::ThreadPool; +use once_cell::sync::Lazy; +use tokio::runtime::Runtime; pub type BoxFuture<'a, T> = Pin + Send + 'a>>; -// pub static THREAD_POOL: Lazy = Lazy::new(ThreadPool::default); +static RUNTIME: Lazy = Lazy::new(|| Runtime::new().expect("Error creating tokio runtime")); + +pub fn block_on(f: impl Future) -> R { + RUNTIME.block_on(f) +} #[inline] pub async fn unblock(f: F) -> T @@ -15,11 +17,23 @@ where T: Send + 'static, F: FnOnce() -> T + Send + 'static, { - // THREAD_POOL.run(f).await.unwrap() - blocking::unblock(f).await + RUNTIME + .spawn_blocking(f) + .await + .expect("Error running blocking task") } #[inline] pub fn spawn_ok(fut: impl Future + Send + 'static) { - async_global_executor::spawn(fut).detach(); + RUNTIME.spawn(fut); +} + +pub async fn sleep(dur: Duration) { + let _rt = RUNTIME.enter(); + tokio::time::sleep(dur).await +} + +pub async fn timeout(dur: Duration, f: impl Future) -> Option { + let _rt = RUNTIME.enter(); + tokio::time::timeout(dur, f).await.ok() } diff --git a/src/indy_compat/mod.rs b/src/indy_compat/mod.rs deleted file mode 100644 index ce776fc8..00000000 --- a/src/indy_compat/mod.rs +++ /dev/null @@ -1,201 +0,0 @@ -use std::collections::{BTreeMap, HashMap}; -use std::io::{stdout, Write}; - -use futures_lite::stream::StreamExt; -use indy_utils::base58; -use itertools::Itertools; -use serde::Serialize; -use sqlx::{sqlite::SqliteRow as DbRow, Row, SqlitePool as DbPool}; - -use super::{ - error::Result, - keys::kdf::argon2::Level, - keys::store::{decrypt, EncKey, HmacKey, StoreKey}, - types::{Entry, EntryTag}, -}; - -const CHUNK_SIZE: usize = 20; - -#[derive(Debug, Serialize)] -struct PrintEntry { - category: String, - name: String, - value: String, - tags: HashMap, -} - -impl PrintEntry { - pub fn new(entry: Entry) -> Self { - let value = String::from_utf8(entry.value.to_vec()).expect("Error parsing value as utf-8"); - let mut tags = HashMap::new(); - if let Some(entry_tags) = entry.tags { - for tag in entry_tags { - match tag { - EntryTag::Encrypted(name, value) => { - tags.insert(name, value); - } - EntryTag::Plaintext(name, value) => { - tags.insert(format!("~{}", name), value); - } - } - } - } - Self { - category: entry.category, - name: entry.name, - value, - tags, - } - } -} - -// test method for dumping the contents of the wallet -pub async fn print_records<'a>(path: &str, password: &str) -> Result<()> { - let pool = DbPool::connect(path).await?; - - let wallet_key = { - let metadata = sqlx::query("SELECT value from metadata") - .fetch_one(&pool) - .await?; - let enc_key = metadata.try_get(0)?; - decode_wallet_key(enc_key, &password)? - }; - - let tag_q = format!( - "SELECT * FROM (SELECT 1 as encrypted, item_id, name, value FROM tags_encrypted - UNION SELECT 0 as encrypted, item_id, name, value FROM tags_plaintext) - WHERE item_id IN ({})", - std::iter::repeat("?") - .take(CHUNK_SIZE) - .intersperse(", ") - .collect::() - ); - - let mut rows = sqlx::query("SELECT id, type, name, value, key FROM items").fetch(&pool); - let mut done = false; - let mut chunk = Vec::with_capacity(CHUNK_SIZE); - let mut ids = Vec::with_capacity(CHUNK_SIZE); - let mut writer = stdout(); - - while !done { - chunk.clear(); - ids.clear(); - let mut tag_query = sqlx::query(&tag_q); - for idx in 0..CHUNK_SIZE { - if let Some(enc_row) = rows.next().await { - let (row_id, row) = decode_row(&wallet_key, enc_row?)?; - chunk.push(row); - ids.push(row_id); - tag_query = tag_query.bind(row_id); - } else { - for _ in idx..CHUNK_SIZE { - tag_query = tag_query.bind(0); - } - done = true; - break; - } - } - - let mut tags = collect_tags(&wallet_key, tag_query.fetch_all(&pool).await?)?; - for (idx, id) in ids.iter().enumerate() { - chunk[idx].tags = tags.remove(id); - } - for entry in chunk.drain(..) { - serde_json::to_writer_pretty(&writer, &PrintEntry::new(entry)).unwrap(); - writer.write(b"\n").unwrap(); - } - } - drop(rows); - - pool.close().await; - Ok(()) -} - -#[inline] -fn get_slice<'a>(row: &'a DbRow, index: usize) -> Result<&'a [u8]> { - row.try_get(index) - .map_err(err_map!(Unexpected, "Error fetching column")) -} - -fn decode_row(key: &StoreKey, row: DbRow) -> Result<(i64, Entry)> { - let value_key_enc = get_slice(&row, 4)?; - let value_key = EncKey::from_slice(decrypt(&key.value_key, value_key_enc)?); - let value = decrypt(&value_key, get_slice(&row, 3)?)?; - - let entry = Entry::new( - decode_utf8(key.decrypt_category(get_slice(&row, 1)?)?)?, - decode_utf8(key.decrypt_name(get_slice(&row, 2)?)?)?, - value, - None, - ); - Ok((row.try_get(0)?, entry)) -} - -fn collect_tags(key: &StoreKey, tags: Vec) -> Result>> { - let mut result = BTreeMap::new(); - for row in tags { - let entry = result.entry(row.try_get(1)?).or_insert_with(Vec::new); - let name = decode_utf8(key.decrypt_tag_name(get_slice(&row, 2)?)?)?; - if row.try_get(0)? { - // encrypted value - let value = decode_utf8(key.decrypt_tag_value(get_slice(&row, 3)?)?)?; - entry.push(EntryTag::Encrypted(name, value)) - } else { - let value = decode_utf8(get_slice(&row, 3)?.to_vec())?; - entry.push(EntryTag::Plaintext(name, value)); - }; - } - Ok(result) -} - -#[derive(Deserialize, Debug)] -struct EncStorageKey { - keys: Vec, - master_key_salt: Vec, -} - -pub fn decode_wallet_key(enc_key: &[u8], password: &str) -> Result { - let key = - serde_json::from_slice::(enc_key).map_err(err_map!("Invalid wallet key"))?; - - let keys = decrypt_key(key, password)?; - let data = rmp_serde::from_slice::<[serde_bytes::ByteBuf; 7]>(keys.as_slice()).unwrap(); - let wallet_key = StoreKey { - category_key: EncKey::from_slice(&data[0]), - name_key: EncKey::from_slice(&data[1]), - value_key: EncKey::from_slice(&data[2]), - item_hmac_key: HmacKey::from_slice(&data[3]), - tag_name_key: EncKey::from_slice(&data[4]), - tag_value_key: EncKey::from_slice(&data[5]), - tags_hmac_key: HmacKey::from_slice(&data[6]), - }; - - Ok(wallet_key) -} - -fn decrypt_key(key: EncStorageKey, password: &str) -> Result> { - // check for a raw key in base58 format - if let Ok(raw_key) = base58::decode(password) { - if raw_key.len() == 32 { - let master_key = EncKey::from_slice(&raw_key); - return Ok(decrypt(&master_key, key.keys.as_slice())?); - } - } - - let salt = &key.master_key_salt[..16]; - - // derive key with libsodium 'moderate' settings - let master_key = Level::Moderate.derive_key(salt, password)?; - if let Ok(keys) = decrypt(&master_key, key.keys.as_slice()) { - Ok(keys) - } else { - // derive key with libsodium 'interactive' settings - let master_key = Level::Interactive.derive_key(salt, password)?; - Ok(decrypt(&master_key, key.keys.as_slice())?) - } -} - -#[inline] -fn decode_utf8(value: Vec) -> Result { - String::from_utf8(value).map_err(err_map!(Encryption)) -} diff --git a/src/keys/encrypt.rs b/src/keys/encrypt.rs deleted file mode 100644 index 612f1a7f..00000000 --- a/src/keys/encrypt.rs +++ /dev/null @@ -1,238 +0,0 @@ -use std::fmt::Debug; - -use serde::{Deserialize, Serialize}; - -use crate::error::Result; -use crate::types::SecretBytes; - -pub trait SymEncryptKey: Clone + Debug + Eq + Sized + Serialize + for<'a> Deserialize<'a> { - const SIZE: usize; - - fn as_bytes(&self) -> &[u8]; - - fn from_slice(bytes: &[u8]) -> Self; - - fn from_seed(seed: &[u8]) -> Result; - - fn random_key() -> Self; -} - -pub trait SymEncryptHashKey: - Clone + Debug + Eq + Sized + Serialize + for<'a> Deserialize<'a> -{ - const SIZE: usize; - - fn random_hash_key() -> Self; -} - -pub trait SymEncrypt: Debug { - type Key: SymEncryptKey; - type HashKey: SymEncryptHashKey; - type Nonce; - - /// Convert a referenced secret value to a secure buffer with sufficient - /// memory for in-place encryption, reusing the same buffer if possible - fn prepare_input(input: &[u8]) -> SecretBytes; - - /// Create a predictable nonce for an input, to allow searching - fn hashed_nonce(input: &SecretBytes, key: &Self::HashKey) -> Result; - - /// Encrypt a secret value and optional random nonce, producing a Vec containing the - /// nonce, ciphertext and tag - fn encrypt( - input: SecretBytes, - enc_key: &Self::Key, - nonce: Option, - ) -> Result>; - - /// Get the expected size of an input value after encryption - fn encrypted_size(input_size: usize) -> usize; - - /// Decrypt a combined encrypted value - fn decrypt(enc: Vec, enc_key: &Self::Key) -> Result; -} - -pub(crate) mod aead { - use std::fmt::{self, Debug, Formatter}; - use std::marker::PhantomData; - use std::ptr; - - use chacha20poly1305::{ - aead::{ - generic_array::{ - typenum::{Unsigned, U32}, - ArrayLength, - }, - AeadInPlace, NewAead, - }, - ChaCha20Poly1305, - }; - use hmac::{Hmac, Mac, NewMac}; - use indy_utils::{keys::ArrayKey, random::random_deterministic}; - use sha2::Sha256; - - use super::{Result, SecretBytes, SymEncrypt, SymEncryptHashKey, SymEncryptKey}; - - pub type ChaChaEncrypt = AeadEncrypt; - - const SEED_LENGTH: usize = 32; - - impl + Debug> SymEncryptKey for ArrayKey { - const SIZE: usize = L::USIZE; - - fn as_bytes(&self) -> &[u8] { - &**self - } - - fn from_slice(bytes: &[u8]) -> Self { - ArrayKey::from_slice(bytes) - } - - fn from_seed(seed: &[u8]) -> Result { - if seed.len() != SEED_LENGTH { - return Err(err_msg!(Encryption, "Invalid length for seed")); - } - let input = ArrayKey::from_slice(seed); - let raw_key = ArrayKey::from_slice(&random_deterministic(&input, L::USIZE)); - Ok(ArrayKey::from(raw_key)) - } - - fn random_key() -> Self { - ArrayKey::random() - } - } - - impl + Debug> SymEncryptHashKey for ArrayKey { - const SIZE: usize = L::USIZE; - - fn random_hash_key() -> Self { - ArrayKey::random() - } - } - - #[derive(PartialEq, Eq, PartialOrd, Ord)] - pub struct AeadEncrypt(PhantomData); - - impl AeadEncrypt { - const NONCE_SIZE: usize = E::NonceSize::USIZE; - const TAG_SIZE: usize = E::TagSize::USIZE; - } - - impl Debug for AeadEncrypt { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - f.debug_struct("AeadEncrypt").finish() - } - } - - impl SymEncrypt for AeadEncrypt - where - E: NewAead + AeadInPlace, - E::KeySize: Debug, - { - type Key = ArrayKey; - type HashKey = ArrayKey; - type Nonce = ArrayKey; - - fn prepare_input(input: &[u8]) -> SecretBytes { - // if we must perform a heap allocation, try to make sure that the - // allocation is large enough to avoid reallocating later (for performance) - let size = input.len() + Self::NONCE_SIZE + Self::TAG_SIZE; - let mut buf = Vec::with_capacity(size); - buf.extend_from_slice(input); - SecretBytes::from(buf) - } - - fn hashed_nonce(input: &SecretBytes, key: &Self::HashKey) -> Result { - let mut nonce_hmac = - Hmac::::new_varkey(&**key).map_err(|e| err_msg!(Encryption, "{}", e))?; - nonce_hmac.update(&*input); - let nonce_long = nonce_hmac.finalize().into_bytes(); - Ok(ArrayKey::::from_slice( - &nonce_long[0..E::NonceSize::USIZE], - )) - } - - fn encrypt( - mut input: SecretBytes, - enc_key: &Self::Key, - nonce: Option, - ) -> Result> { - let nonce = nonce.unwrap_or_else(|| Self::Nonce::random()); - let chacha = E::new(&enc_key); - let mut buf = input.as_buffer(); - // should be a no-op if prepare_input was used - buf.reserve_extra(Self::NONCE_SIZE + Self::TAG_SIZE); - // replace the input data with the ciphertext and tag - chacha - .encrypt_in_place(&*nonce, &[], &mut buf) - .map_err(|e| err_msg!(Encryption, "{}", e))?; - let mut buf = input.into_vec(); - // prepend the nonce to the current (ciphertext + tag) Vec contents. - // extra capacity has previously been reserved for this in order to avoid - // reallocation of the Vec buffer - buf.splice(0..0, nonce.as_slice().into_iter().cloned()); - Ok(buf) - } - - #[inline] - fn encrypted_size(input_size: usize) -> usize { - Self::NONCE_SIZE + Self::TAG_SIZE + input_size - } - - fn decrypt(mut enc: Vec, enc_key: &Self::Key) -> Result { - if enc.len() < Self::NONCE_SIZE + Self::TAG_SIZE { - return Err(err_msg!( - Encryption, - "Buffer is too short to represent an encrypted value" - )); - } - let nonce = Self::Nonce::from_slice(&enc[0..Self::NONCE_SIZE]); - let chacha = E::new(&enc_key); - unsafe { - let cipher_len = enc.len() - Self::NONCE_SIZE; - ptr::copy( - enc.as_mut_ptr().add(Self::NONCE_SIZE), - enc.as_mut_ptr(), - cipher_len, - ); - enc.set_len(cipher_len); - } - let mut result = SecretBytes::from(enc); - chacha - .decrypt_in_place(&nonce, &[], &mut result.as_buffer()) - .map_err(|e| err_msg!(Encryption, "Error decrypting record: {}", e))?; - Ok(result) - } - } - - #[cfg(test)] - mod tests { - use super::*; - - #[test] - fn chacha_key_round_trip() { - let input = b"hello"; - let key = ArrayKey::random(); - let enc = ChaChaEncrypt::encrypt(SecretBytes::from(&input[..]), &key, None).unwrap(); - assert_eq!( - enc.len(), - input.len() + ChaChaEncrypt::NONCE_SIZE + ChaChaEncrypt::TAG_SIZE - ); - let dec = ChaChaEncrypt::decrypt(enc, &key).unwrap(); - assert_eq!(dec, &input[..]); - } - - #[test] - fn chacha_encrypt_avoid_realloc() { - let input = ChaChaEncrypt::prepare_input(b"hello"); - let buffer_ptr = input.as_ptr() as usize; - let key = ArrayKey::random(); - let enc = ChaChaEncrypt::encrypt(input, &key, None).unwrap(); - assert_eq!( - enc.as_ptr() as usize, - buffer_ptr, - "Same buffer should be used" - ); - } - } -} diff --git a/src/keys/kdf/argon2.rs b/src/keys/kdf/argon2.rs deleted file mode 100644 index 2bf047e6..00000000 --- a/src/keys/kdf/argon2.rs +++ /dev/null @@ -1,80 +0,0 @@ -use indy_utils::random::random_vec; -use zeroize::Zeroize; - -use crate::error::Result; -use crate::keys::{encrypt::aead::ChaChaEncrypt, store::EncKey}; - -pub const LEVEL_INTERACTIVE: &'static str = "13:int"; -pub const LEVEL_MODERATE: &'static str = "13:mod"; - -pub const HASH_SIZE: usize = 32; -pub const SALT_SIZE: usize = 16; - -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] -pub enum Level { - Interactive, - Moderate, -} - -impl Default for Level { - fn default() -> Self { - Self::Moderate - } -} - -impl Level { - pub fn from_str(level: &str) -> Option { - match level { - "int" | LEVEL_INTERACTIVE => Some(Self::Interactive), - "mod" | LEVEL_MODERATE => Some(Self::Moderate), - "" => Some(Self::default()), - _ => None, - } - } - - pub fn as_str(&self) -> &'static str { - match self { - Self::Interactive => LEVEL_INTERACTIVE, - Self::Moderate => LEVEL_MODERATE, - } - } - - pub fn derive_key(&self, salt: &[u8], password: &str) -> Result> { - let (mem_cost, time_cost) = match self { - Self::Interactive => (32768, 4), - Self::Moderate => (131072, 6), - }; - derive_key(password, salt, mem_cost, time_cost) - } -} - -fn derive_key( - password: &str, - salt: &[u8], - mem_cost: u32, - time_cost: u32, -) -> Result> { - if salt.len() < SALT_SIZE { - return Err(err_msg!(Encryption, "Invalid salt for argon2i hash")); - } - let config = argon2::Config { - variant: argon2::Variant::Argon2i, - version: argon2::Version::Version13, - mem_cost, - time_cost, - lanes: 1, - thread_mode: argon2::ThreadMode::Sequential, - secret: &[], - ad: &[], - hash_length: HASH_SIZE as u32, - }; - let mut hashed = argon2::hash_raw(password.as_bytes(), &salt[..SALT_SIZE], &config) - .map_err(|e| err_msg!(Encryption, "Error deriving key: {}", e))?; - let key = EncKey::::from_slice(&hashed); - hashed.zeroize(); - Ok(key) -} - -pub fn generate_salt() -> Vec { - random_vec(SALT_SIZE) -} diff --git a/src/keys/store.rs b/src/keys/store.rs deleted file mode 100644 index 19ab163a..00000000 --- a/src/keys/store.rs +++ /dev/null @@ -1,249 +0,0 @@ -use std::fmt::Debug; - -use serde::{Deserialize, Serialize}; - -use super::encrypt::{aead::ChaChaEncrypt, SymEncrypt, SymEncryptHashKey, SymEncryptKey}; -use crate::error::Result; -use crate::keys::EntryEncryptor; -use crate::types::{EncEntryTag, EntryTag, SecretBytes}; - -pub type EncKey = ::Key; -pub type HashKey = ::HashKey; -pub type StoreKey = StoreKeyImpl; - -/// A store key combining the keys required to encrypt -/// and decrypt storage records -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(bound( - deserialize = "EncKey: for<'a> Deserialize<'a>, HashKey: for<'a> Deserialize<'a>", - serialize = "EncKey: Serialize, HashKey: Serialize" -))] -pub struct StoreKeyImpl { - pub category_key: EncKey, - pub name_key: EncKey, - pub value_key: EncKey, - pub item_hmac_key: HashKey, - pub tag_name_key: EncKey, - pub tag_value_key: EncKey, - pub tags_hmac_key: HashKey, -} - -impl StoreKeyImpl { - pub fn new() -> Result { - Ok(Self { - category_key: E::Key::random_key(), - name_key: E::Key::random_key(), - value_key: E::Key::random_key(), - item_hmac_key: E::HashKey::random_hash_key(), - tag_name_key: E::Key::random_key(), - tag_value_key: E::Key::random_key(), - tags_hmac_key: E::HashKey::random_hash_key(), - }) - } - - pub fn encrypt_tag_name(&self, name: SecretBytes) -> Result> { - encrypt_searchable::(name, &self.tag_name_key, &self.tags_hmac_key) - } - - pub fn encrypt_tag_value(&self, value: SecretBytes) -> Result> { - encrypt_searchable::(value, &self.tag_value_key, &self.tags_hmac_key) - } - - pub fn decrypt_tag_name(&self, enc_tag_name: Vec) -> Result { - E::decrypt(enc_tag_name, &self.tag_name_key) - } - - pub fn decrypt_tag_value(&self, enc_tag_value: Vec) -> Result { - E::decrypt(enc_tag_value, &self.tag_value_key) - } - - pub fn to_string(&self) -> Result { - serde_json::to_string(self).map_err(err_map!(Unexpected, "Error serializing store key")) - } - - pub fn from_slice(input: &[u8]) -> Result { - serde_json::from_slice(input).map_err(err_map!(Unsupported, "Invalid store key")) - } -} - -impl PartialEq for StoreKeyImpl { - fn eq(&self, other: &Self) -> bool { - self.category_key == other.category_key - && self.name_key == other.name_key - && self.value_key == other.value_key - && self.item_hmac_key == other.item_hmac_key - && self.tag_name_key == other.tag_name_key - && self.tag_value_key == other.tag_value_key - && self.tags_hmac_key == other.tags_hmac_key - } -} -impl Eq for StoreKeyImpl {} - -/// Encrypt a value with a predictable nonce, making it searchable -fn encrypt_searchable( - input: SecretBytes, - enc_key: &E::Key, - hmac_key: &E::HashKey, -) -> Result> { - let nonce = E::hashed_nonce(&input, hmac_key)?; - E::encrypt(input, enc_key, Some(nonce)) -} - -impl EntryEncryptor for StoreKeyImpl -where - E: SymEncrypt, -{ - fn prepare_input(input: &[u8]) -> SecretBytes { - E::prepare_input(input) - } - - fn encrypt_entry_category(&self, category: SecretBytes) -> Result> { - encrypt_searchable::(category, &self.category_key, &self.item_hmac_key) - } - - fn encrypt_entry_name(&self, name: SecretBytes) -> Result> { - encrypt_searchable::(name, &self.name_key, &self.item_hmac_key) - } - - fn encrypt_entry_value(&self, value: SecretBytes) -> Result> { - let value_key = E::Key::random_key(); - let mut value = E::encrypt(value, &value_key, None)?; - let key_input = E::prepare_input(value_key.as_bytes()); - let mut result = E::encrypt(key_input, &self.value_key, None)?; - result.append(&mut value); - Ok(result) - } - - fn decrypt_entry_category(&self, enc_category: Vec) -> Result { - decode_utf8(E::decrypt(enc_category, &self.category_key)?.into_vec()) - } - - fn decrypt_entry_name(&self, enc_name: Vec) -> Result { - decode_utf8(E::decrypt(enc_name, &self.name_key)?.into_vec()) - } - - fn decrypt_entry_value(&self, mut enc_value: Vec) -> Result { - let enc_key_size = E::encrypted_size(E::Key::SIZE); - if enc_value.len() < enc_key_size + E::encrypted_size(0) { - return Err(err_msg!( - Encryption, - "Buffer is too short to represent an encrypted value", - )); - } - let value = enc_value[enc_key_size..].to_vec(); - enc_value.truncate(enc_key_size); - let value_key = E::Key::from_slice( - E::decrypt(enc_value, &self.value_key)? - .into_vec() - .as_slice(), - ); - E::decrypt(value, &value_key) - } - - fn encrypt_entry_tags(&self, tags: Vec) -> Result> { - tags.into_iter() - .map(|tag| match tag { - EntryTag::Plaintext(name, value) => { - let name = self.encrypt_tag_name(name.into())?; - Ok(EncEntryTag { - name, - value: value.into_bytes(), - plaintext: true, - }) - } - EntryTag::Encrypted(name, value) => { - let name = self.encrypt_tag_name(name.into())?; - let value = self.encrypt_tag_value(value.into())?; - Ok(EncEntryTag { - name, - value, - plaintext: false, - }) - } - }) - .collect() - } - - fn decrypt_entry_tags(&self, enc_tags: Vec) -> Result> { - enc_tags.into_iter().try_fold(vec![], |mut acc, tag| { - let name = decode_utf8(self.decrypt_tag_name(tag.name)?.into_vec())?; - acc.push(if tag.plaintext { - let value = decode_utf8(tag.value)?; - EntryTag::Plaintext(name, value) - } else { - let value = decode_utf8(self.decrypt_tag_value(tag.value)?.into_vec())?; - EntryTag::Encrypted(name, value) - }); - Result::Ok(acc) - }) - } -} - -#[inline] -fn decode_utf8(value: Vec) -> Result { - String::from_utf8(value).map_err(err_map!(Encryption)) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::types::Entry; - - #[test] - fn store_key_round_trip() { - let key = StoreKey::new().unwrap(); - let test_record = Entry::new( - "category", - "name", - "value", - Some(vec![ - EntryTag::Plaintext("plain".to_string(), "tag".to_string()), - EntryTag::Encrypted("enctag".to_string(), "envtagval".to_string()), - ]), - ); - let enc_category = key - .encrypt_entry_category(test_record.category.clone().into()) - .unwrap(); - let enc_name = key - .encrypt_entry_name(test_record.name.clone().into()) - .unwrap(); - let enc_value = key - .encrypt_entry_value(test_record.value.clone().into()) - .unwrap(); - let enc_tags = key - .encrypt_entry_tags(test_record.tags.clone().unwrap()) - .unwrap(); - assert_ne!(test_record.category.as_bytes(), enc_category.as_slice()); - assert_ne!(test_record.name.as_bytes(), enc_name.as_slice()); - assert_ne!(test_record.value, enc_value); - - let cmp_record = Entry::new( - key.decrypt_entry_category(enc_category).unwrap(), - key.decrypt_entry_name(enc_name).unwrap(), - key.decrypt_entry_value(enc_value).unwrap(), - Some(key.decrypt_entry_tags(enc_tags).unwrap()), - ); - assert_eq!(test_record, cmp_record); - } - - #[test] - fn store_key_searchable() { - const NONCE_SIZE: usize = 12; - let input = SecretBytes::from(&b"hello"[..]); - let key = EncKey::::random_key(); - let hmac_key = EncKey::::random(); - let enc1 = encrypt_searchable::(input.clone(), &key, &hmac_key).unwrap(); - let enc2 = encrypt_searchable::(input.clone(), &key, &hmac_key).unwrap(); - assert_eq!(&enc1[0..NONCE_SIZE], &enc2[0..NONCE_SIZE]); - let dec = ChaChaEncrypt::decrypt(enc1, &key).unwrap(); - assert_eq!(dec, input); - } - - #[test] - fn store_key_serde() { - let key = StoreKey::new().unwrap(); - let key_json = serde_json::to_string(&key).unwrap(); - let key_cmp = serde_json::from_str(&key_json).unwrap(); - assert_eq!(key, key_cmp); - } -} diff --git a/src/keys/types.rs b/src/keys/types.rs deleted file mode 100644 index 6abd152d..00000000 --- a/src/keys/types.rs +++ /dev/null @@ -1,358 +0,0 @@ -use std::borrow::Cow; -use std::convert::Infallible; -use std::fmt::{self, Debug, Display, Formatter}; -use std::mem::ManuallyDrop; -use std::ops::Deref; -use std::ptr; -use std::str::FromStr; - -use indy_utils::keys::{EncodedVerKey, KeyType as IndyKeyAlg, PrivateKey, VerKey}; -use serde::{Deserialize, Serialize}; -use zeroize::Zeroize; - -use crate::error::Error; -use crate::types::{sorted_tags, EntryTag, SecretBytes}; - -/// Supported key algorithms -#[derive(Clone, Debug, PartialEq, Eq, Zeroize)] -pub enum KeyAlg { - /// curve25519-based signature scheme - ED25519, - /// Unrecognized algorithm - Other(String), -} - -serde_as_str_impl!(KeyAlg); - -impl KeyAlg { - /// Get a reference to a string representing the `KeyAlg` - pub fn as_str(&self) -> &str { - match self { - Self::ED25519 => "ed25519", - Self::Other(other) => other.as_str(), - } - } -} - -impl AsRef for KeyAlg { - fn as_ref(&self) -> &str { - self.as_str() - } -} - -impl FromStr for KeyAlg { - type Err = Infallible; - - fn from_str(s: &str) -> Result { - Ok(match s { - "ed25519" => Self::ED25519, - other => Self::Other(other.to_owned()), - }) - } -} - -impl Display for KeyAlg { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - f.write_str(self.as_str()) - } -} - -/// Categories of keys supported by the default KMS -#[derive(Clone, Debug, PartialEq, Eq, Zeroize)] -pub enum KeyCategory { - /// A public key - PublicKey, - /// A combination of a public and private key - KeyPair, - /// An unrecognized key category - Other(String), -} - -impl KeyCategory { - /// Get a reference to a string representing the `KeyCategory` - pub fn as_str(&self) -> &str { - match self { - Self::PublicKey => "public", - Self::KeyPair => "keypair", - Self::Other(other) => other.as_str(), - } - } - - /// Convert the `KeyCategory` into an owned string - pub fn into_string(self) -> String { - match self { - Self::Other(other) => other, - _ => self.as_str().to_owned(), - } - } -} - -serde_as_str_impl!(KeyCategory); - -impl AsRef for KeyCategory { - fn as_ref(&self) -> &str { - self.as_str() - } -} - -impl FromStr for KeyCategory { - type Err = Infallible; - - fn from_str(s: &str) -> Result { - Ok(match s { - "public" => Self::PublicKey, - "keypair" => Self::KeyPair, - other => Self::Other(other.to_owned()), - }) - } -} - -impl Display for KeyCategory { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - f.write_str(self.as_str()) - } -} - -/// Parameters defining a stored key -#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] -pub struct KeyParams { - /// The key algorithm - pub alg: KeyAlg, - - /// Associated key metadata - #[serde(default, rename = "meta", skip_serializing_if = "Option::is_none")] - pub metadata: Option, - - /// An optional external reference for the key - #[serde(default, rename = "ref", skip_serializing_if = "Option::is_none")] - pub reference: Option, - #[serde( - default, - rename = "pub", - skip_serializing_if = "Option::is_none", - with = "crate::serde_utils::as_base58" - )] - - /// The associated public key in binary format - pub pub_key: Option>, - - /// The associated private key in binary format - #[serde( - default, - rename = "prv", - skip_serializing_if = "Option::is_none", - with = "crate::serde_utils::as_base58" - )] - pub prv_key: Option, -} - -impl KeyParams { - pub(crate) fn to_vec(&self) -> Result, Error> { - serde_json::to_vec(self) - .map_err(|e| err_msg!(Unexpected, "Error serializing key params: {}", e)) - } - - pub(crate) fn from_slice(params: &[u8]) -> Result { - let result = serde_json::from_slice(params) - .map_err(|e| err_msg!(Unexpected, "Error deserializing key params: {}", e)); - result - } -} - -impl Drop for KeyParams { - fn drop(&mut self) { - self.zeroize() - } -} - -impl Zeroize for KeyParams { - fn zeroize(&mut self) { - self.prv_key.zeroize(); - } -} - -/// A stored key entry -#[derive(Clone, Debug, Eq)] -pub struct KeyEntry { - /// The category of the key entry (public or public/private pair) - pub category: KeyCategory, - /// The key entry identifier - pub ident: String, - /// The parameters defining the key - pub params: KeyParams, - /// Tags associated with the key entry record - pub tags: Option>, -} - -impl KeyEntry { - pub(crate) fn into_parts(self) -> (KeyCategory, String, KeyParams, Option>) { - let slf = ManuallyDrop::new(self); - unsafe { - ( - ptr::read(&slf.category), - ptr::read(&slf.ident), - ptr::read(&slf.params), - ptr::read(&slf.tags), - ) - } - } - - /// Determine if a key entry refers to a local or external key - pub fn is_local(&self) -> bool { - self.params.reference.is_none() - } - - /// Access the associated public key as an [`EncodedVerKey`] - pub fn encoded_verkey(&self) -> Result { - Ok(self - .verkey()? - .as_base58() - .map_err(err_map!(Unexpected, "Error encoding verkey"))?) - } - - /// Access the associated public key as a [`VerKey`] - pub fn verkey(&self) -> Result { - match (&self.params.alg, &self.params.pub_key) { - (KeyAlg::ED25519, Some(pub_key)) => Ok(VerKey::new(pub_key, Some(IndyKeyAlg::ED25519))), - (_, None) => Err(err_msg!(Input, "Undefined public key")), - _ => Err(err_msg!(Unsupported, "Unsupported key algorithm")), - } - } - - /// Access the associated private key as a [`PrivateKey`] - pub fn private_key(&self) -> Result { - match (&self.params.alg, &self.params.prv_key) { - (KeyAlg::ED25519, Some(prv_key)) => { - Ok(PrivateKey::new(prv_key, Some(IndyKeyAlg::ED25519))) - } - (_, None) => Err(err_msg!(Input, "Undefined private key")), - _ => Err(err_msg!(Unsupported, "Unsupported key algorithm")), - } - } - - pub(crate) fn sorted_tags(&self) -> Option> { - self.tags.as_ref().and_then(sorted_tags) - } -} - -impl PartialEq for KeyEntry { - fn eq(&self, rhs: &Self) -> bool { - self.category == rhs.category - && self.ident == rhs.ident - && self.params == rhs.params - && self.sorted_tags() == rhs.sorted_tags() - } -} - -/// A possibly-empty password or key used to derive a store wrap key -#[derive(Clone)] -pub struct PassKey<'a>(Option>); - -impl PassKey<'_> { - /// Create a scoped reference to the passkey - pub fn as_ref(&self) -> PassKey<'_> { - PassKey(Some(Cow::Borrowed(&**self))) - } - - pub(crate) fn is_none(&self) -> bool { - self.0.is_none() - } - - pub(crate) fn into_owned(self) -> PassKey<'static> { - let mut slf = ManuallyDrop::new(self); - let val = slf.0.take(); - PassKey(match val { - None => None, - Some(Cow::Borrowed(s)) => Some(Cow::Owned(s.to_string())), - Some(Cow::Owned(s)) => Some(Cow::Owned(s)), - }) - } -} - -impl Debug for PassKey<'_> { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - if cfg!(test) { - f.debug_tuple("PassKey").field(&*self).finish() - } else { - f.debug_tuple("PassKey").field(&"").finish() - } - } -} - -impl Default for PassKey<'_> { - fn default() -> Self { - Self(None) - } -} - -impl Deref for PassKey<'_> { - type Target = str; - - fn deref(&self) -> &str { - match self.0.as_ref() { - None => "", - Some(s) => s.as_ref(), - } - } -} - -impl Drop for PassKey<'_> { - fn drop(&mut self) { - self.zeroize(); - } -} - -impl<'a> From<&'a str> for PassKey<'a> { - fn from(inner: &'a str) -> Self { - Self(Some(Cow::Borrowed(inner))) - } -} - -impl From for PassKey<'_> { - fn from(inner: String) -> Self { - Self(Some(Cow::Owned(inner))) - } -} - -impl<'a> From> for PassKey<'a> { - fn from(inner: Option<&'a str>) -> Self { - Self(inner.map(Cow::Borrowed)) - } -} - -impl<'a, 'b> PartialEq> for PassKey<'a> { - fn eq(&self, other: &PassKey<'b>) -> bool { - &**self == &**other - } -} -impl Eq for PassKey<'_> {} - -impl Zeroize for PassKey<'_> { - fn zeroize(&mut self) { - match self.0.take() { - Some(Cow::Owned(mut s)) => { - s.zeroize(); - } - _ => (), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn key_params_roundtrip() { - let params = KeyParams { - alg: KeyAlg::ED25519, - metadata: Some("meta".to_string()), - reference: None, - pub_key: Some(vec![0, 0, 0, 0]), - prv_key: Some(vec![1, 1, 1, 1].into()), - }; - let enc_params = params.to_vec().unwrap(); - let p2 = KeyParams::from_slice(&enc_params).unwrap(); - assert_eq!(p2, params); - } -} diff --git a/src/kms/entry.rs b/src/kms/entry.rs new file mode 100644 index 00000000..4e1e93d9 --- /dev/null +++ b/src/kms/entry.rs @@ -0,0 +1,136 @@ +use super::key::LocalKey; +use crate::{ + crypto::{alg::AnyKey, buffer::SecretBytes, jwk::FromJwk}, + error::Error, + storage::{Entry, EntryTag}, +}; + +/// Parameters defining a stored key +#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] +pub struct KeyParams { + /// Associated key metadata + #[serde(default, rename = "meta", skip_serializing_if = "Option::is_none")] + pub metadata: Option, + + /// An optional external reference for the key + #[serde(default, rename = "ref", skip_serializing_if = "Option::is_none")] + pub reference: Option, + + /// The associated key data (JWK) + #[serde(default, skip_serializing_if = "Option::is_none")] + pub data: Option, +} + +impl KeyParams { + pub(crate) fn to_bytes(&self) -> Result { + serde_cbor::to_vec(self) + .map(SecretBytes::from) + .map_err(|e| err_msg!(Unexpected, "Error serializing key params: {}", e)) + } + + pub(crate) fn from_slice(params: &[u8]) -> Result { + let result = serde_cbor::from_slice(params) + .map_err(|e| err_msg!(Unexpected, "Error deserializing key params: {}", e)); + result + } +} + +/// A stored key entry +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct KeyEntry { + /// The key entry identifier + pub(crate) name: String, + /// The parameters defining the key + pub(crate) params: KeyParams, + /// Key algorithm + pub(crate) alg: Option, + /// Thumbprints for the key + pub(crate) thumbprints: Vec, + /// Thumbprints for the key + pub(crate) tags: Vec, +} + +impl KeyEntry { + /// Accessor for the key identity + pub fn algorithm(&self) -> Option<&str> { + self.alg.as_ref().map(String::as_ref) + } + + /// Accessor for the stored key metadata + pub fn metadata(&self) -> Option<&str> { + self.params.metadata.as_ref().map(String::as_ref) + } + + /// Accessor for the key identity + pub fn name(&self) -> &str { + self.name.as_str() + } + + /// Determine if a key entry refers to a local or external key + pub fn is_local(&self) -> bool { + self.params.reference.is_none() + } + + pub(crate) fn from_entry(entry: Entry) -> Result { + let params = KeyParams::from_slice(&entry.value)?; + let mut alg = None; + let mut thumbprints = Vec::new(); + let mut tags = entry.tags; + let mut idx = 0; + while idx < tags.len() { + let tag = &mut tags[idx]; + let name = tag.name(); + if name.starts_with("user:") { + tag.update_name(|tag| tag.replace_range(0..5, "")); + idx += 1; + } else if name == "alg" { + alg.replace(tags.remove(idx).into_value()); + } else if name == "thumb" { + thumbprints.push(tags.remove(idx).into_value()); + } else { + // unrecognized tag + tags.remove(idx).into_value(); + } + } + // keep sorted for checking equality + thumbprints.sort(); + tags.sort(); + Ok(Self { + name: entry.name, + params, + alg, + thumbprints, + tags, + }) + } + + /// Create a local key instance from this key storage entry + pub fn load_local_key(&self) -> Result { + if let Some(key_data) = self.params.data.as_ref() { + let inner = Box::::from_jwk_slice(key_data.as_ref())?; + Ok(LocalKey { + inner, + ephemeral: false, + }) + } else { + Err(err_msg!("Missing key data")) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn key_params_roundtrip() { + let params = KeyParams { + metadata: Some("meta".to_string()), + reference: None, + data: Some(SecretBytes::from(vec![0, 0, 0, 0])), + }; + let enc_params = params.to_bytes().unwrap(); + let p2 = KeyParams::from_slice(&enc_params).unwrap(); + assert_eq!(p2, params); + } +} diff --git a/src/kms/envelope.rs b/src/kms/envelope.rs new file mode 100644 index 00000000..b0401920 --- /dev/null +++ b/src/kms/envelope.rs @@ -0,0 +1,113 @@ +use std::str::FromStr; + +use super::key::LocalKey; +use crate::{ + crypto::{ + alg::{x25519::X25519KeyPair, KeyAlg}, + buffer::SecretBytes, + encrypt::crypto_box::{ + crypto_box as nacl_box, crypto_box_open as nacl_box_open, + crypto_box_seal as nacl_box_seal, crypto_box_seal_open as nacl_box_seal_open, + CBOX_NONCE_LENGTH, CBOX_TAG_LENGTH, + }, + kdf::{ecdh_1pu::Ecdh1PU, ecdh_es::EcdhEs}, + random::fill_random, + }, + error::Error, +}; + +#[inline] +fn cast_x25519(key: &LocalKey) -> Result<&X25519KeyPair, Error> { + if let Some(kp) = key.inner.downcast_ref::() { + Ok(kp) + } else { + Err(err_msg!(Input, "x25519 keypair required")) + } +} + +/// Generate a new random nonce for crypto_box +pub fn crypto_box_random_nonce() -> Result<[u8; CBOX_NONCE_LENGTH], Error> { + let mut nonce = [0u8; CBOX_NONCE_LENGTH]; + fill_random(&mut nonce); + Ok(nonce) +} + +/// Encrypt a message with crypto_box and a detached nonce +pub fn crypto_box( + recip_x25519: &LocalKey, + sender_x25519: &LocalKey, + message: &[u8], + nonce: &[u8], +) -> Result, Error> { + let recip_pk = cast_x25519(recip_x25519)?; + let sender_sk = cast_x25519(sender_x25519)?; + let mut buffer = SecretBytes::from_slice_reserve(message, CBOX_TAG_LENGTH); + nacl_box(recip_pk, sender_sk, &mut buffer, nonce)?; + Ok(buffer.into_vec()) +} + +/// Decrypt a message with crypto_box and a detached nonce +pub fn crypto_box_open( + recip_x25519: &LocalKey, + sender_x25519: &LocalKey, + message: &[u8], + nonce: &[u8], +) -> Result { + let recip_pk = cast_x25519(recip_x25519)?; + let sender_sk = cast_x25519(sender_x25519)?; + let mut buffer = SecretBytes::from_slice(message); + nacl_box_open(recip_pk, sender_sk, &mut buffer, nonce)?; + Ok(buffer) +} + +/// Perform message encryption equivalent to libsodium's `crypto_box_seal` +pub fn crypto_box_seal(recip_x25519: &LocalKey, message: &[u8]) -> Result, Error> { + let kp = cast_x25519(recip_x25519)?; + let sealed = nacl_box_seal(kp, message)?; + Ok(sealed.into_vec()) +} + +/// Perform message decryption equivalent to libsodium's `crypto_box_seal_open` +pub fn crypto_box_seal_open( + recip_x25519: &LocalKey, + ciphertext: &[u8], +) -> Result { + let kp = cast_x25519(recip_x25519)?; + Ok(nacl_box_seal_open(kp, ciphertext)?) +} + +/// Derive an ECDH-1PU shared key for authenticated encryption +pub fn derive_key_ecdh_1pu( + ephem_key: &LocalKey, + sender_key: &LocalKey, + recip_key: &LocalKey, + alg: &str, + apu: &[u8], + apv: &[u8], + cc_tag: &[u8], +) -> Result { + let key_alg = KeyAlg::from_str(alg)?; + let derive = Ecdh1PU::new( + &*ephem_key, + &*sender_key, + &*recip_key, + alg.as_bytes(), + apu, + apv, + cc_tag, + ); + LocalKey::from_key_derivation(key_alg, derive) +} + +/// Derive an ECDH-ES shared key for anonymous encryption +pub fn derive_key_ecdh_es( + ephem_key: &LocalKey, + recip_key: &LocalKey, + alg: &str, + apu: &[u8], + apv: &[u8], +) -> Result { + let key_alg = KeyAlg::from_str(alg)?; + let derive = EcdhEs::new(&*ephem_key, &*recip_key, alg.as_bytes(), apu, apv); + LocalKey::from_key_derivation(key_alg, derive) +} diff --git a/src/kms/key.rs b/src/kms/key.rs new file mode 100644 index 00000000..580b703b --- /dev/null +++ b/src/kms/key.rs @@ -0,0 +1,229 @@ +use std::str::FromStr; + +pub use crate::crypto::{ + alg::KeyAlg, + buffer::{SecretBytes, WriteBuffer}, + encrypt::KeyAeadParams, +}; +use crate::{ + crypto::{ + alg::{AnyKey, AnyKeyCreate, BlsCurves}, + encrypt::KeyAeadInPlace, + jwk::{FromJwk, ToJwk}, + kdf::{KeyDerivation, KeyExchange}, + random::fill_random, + repr::{ToPublicBytes, ToSecretBytes}, + sign::{KeySigVerify, KeySign, SignatureType}, + Error as CryptoError, + }, + error::Error, +}; + +/// A stored key entry +#[derive(Debug)] +pub struct LocalKey { + pub(crate) inner: Box, + pub(crate) ephemeral: bool, +} + +impl LocalKey { + /// Create a new random key or keypair + pub fn generate(alg: KeyAlg, ephemeral: bool) -> Result { + let inner = Box::::generate(alg)?; + Ok(Self { inner, ephemeral }) + } + + /// Create a new deterministic key or keypair + pub fn from_seed(alg: KeyAlg, seed: &[u8], _method: Option<&str>) -> Result { + let inner = Box::::from_seed(alg, seed.into())?; + Ok(Self { + inner, + ephemeral: false, + }) + } + + /// Import a key or keypair from a JWK + pub fn from_jwk(jwk: &str) -> Result { + let inner = Box::::from_jwk(jwk)?; + Ok(Self { + inner, + ephemeral: false, + }) + } + + /// Import a public key from its compact representation + pub fn from_public_bytes(alg: KeyAlg, public: &[u8]) -> Result { + let inner = Box::::from_public_bytes(alg, public)?; + Ok(Self { + inner, + ephemeral: false, + }) + } + + /// Export the raw bytes of the public key + pub fn to_public_bytes(&self) -> Result { + Ok(self.inner.to_public_bytes()?) + } + + /// Import a symmetric key or public-private keypair from its compact representation + pub fn from_secret_bytes(alg: KeyAlg, secret: &[u8]) -> Result { + let inner = Box::::from_secret_bytes(alg, secret)?; + Ok(Self { + inner, + ephemeral: false, + }) + } + + /// Export the raw bytes of the private key + pub fn to_secret_bytes(&self) -> Result { + Ok(self.inner.to_secret_bytes()?) + } + + /// Derive a new key from a Diffie-Hellman exchange between this keypair and a public key + pub fn to_key_exchange(&self, alg: KeyAlg, pk: &LocalKey) -> Result { + let inner = Box::::from_key_exchange(alg, &*self.inner, &*pk.inner)?; + Ok(Self { + inner, + ephemeral: self.ephemeral || pk.ephemeral, + }) + } + + pub(crate) fn from_key_derivation( + alg: KeyAlg, + derive: impl KeyDerivation, + ) -> Result { + let inner = Box::::from_key_derivation(alg, derive)?; + Ok(Self { + inner, + ephemeral: false, + }) + } + + pub(crate) fn encode(&self) -> Result { + Ok(self.inner.to_jwk_secret()?) + } + + /// Accessor for the key algorithm + pub fn algorithm(&self) -> KeyAlg { + self.inner.algorithm() + } + + /// Get the public JWK representation for this key or keypair + pub fn to_jwk_public(&self, alg: Option) -> Result { + Ok(self.inner.to_jwk_public(alg)?) + } + + /// Get the JWK representation for this private key or keypair + pub fn to_jwk_secret(&self) -> Result { + Ok(self.inner.to_jwk_secret()?) + } + + /// Get the JWK thumbprint for this key or keypair + pub fn to_jwk_thumbprint(&self, alg: Option) -> Result { + Ok(self.inner.to_jwk_thumbprint(alg)?) + } + + /// Get the set of indexed JWK thumbprints for this key or keypair + pub fn to_jwk_thumbprints(&self) -> Result, Error> { + if self.inner.algorithm() == KeyAlg::Bls12_381(BlsCurves::G1G2) { + return Ok(vec![ + self.inner + .to_jwk_thumbprint(Some(KeyAlg::Bls12_381(BlsCurves::G1)))?, + self.inner + .to_jwk_thumbprint(Some(KeyAlg::Bls12_381(BlsCurves::G2)))?, + ]); + } else { + Ok(vec![self.inner.to_jwk_thumbprint(None)?]) + } + } + + /// Map this key or keypair to its equivalent for another key algorithm + pub fn convert_key(&self, alg: KeyAlg) -> Result { + let inner = self.inner.convert_key(alg)?; + Ok(Self { + inner, + ephemeral: self.ephemeral, + }) + } + + /// Fetch the AEAD parameter lengths + pub fn aead_params(&self) -> Result { + let params = self.inner.aead_params(); + if params.nonce_length == 0 { + return Err(err_msg!( + Unsupported, + "AEAD is not supported for this key type" + )); + } + Ok(params) + } + + /// Create a new random nonce for AEAD message encryption + pub fn aead_random_nonce(&self) -> Result, Error> { + let nonce_len = self.inner.aead_params().nonce_length; + if nonce_len == 0 { + return Err(err_msg!( + Unsupported, + "Key type does not support AEAD encryption" + )); + } + let buf = SecretBytes::new_with(nonce_len, fill_random); + Ok(buf.into_vec()) + } + + /// Perform AEAD message encryption with this encryption key + pub fn aead_encrypt(&self, message: &[u8], nonce: &[u8], aad: &[u8]) -> Result, Error> { + let params = self.inner.aead_params(); + let mut buf = + SecretBytes::from_slice_reserve(message, params.nonce_length + params.tag_length); + self.inner.encrypt_in_place(&mut buf, nonce, aad)?; + Ok(buf.into_vec()) + } + + /// Perform AEAD message decryption with this encryption key + pub fn aead_decrypt( + &self, + ciphertext: &[u8], + nonce: &[u8], + aad: &[u8], + ) -> Result { + let mut buf = SecretBytes::from_slice(ciphertext); + self.inner.decrypt_in_place(&mut buf, nonce, aad)?; + Ok(buf) + } + + /// Sign a message with this private signing key + pub fn sign_message(&self, message: &[u8], sig_type: Option<&str>) -> Result, Error> { + let mut sig = Vec::new(); + self.inner.write_signature( + message, + sig_type.map(SignatureType::from_str).transpose()?, + &mut sig, + )?; + Ok(sig) + } + + /// Verify a message signature with this private signing key or public verification key + pub fn verify_signature( + &self, + message: &[u8], + signature: &[u8], + sig_type: Option<&str>, + ) -> Result { + Ok(self.inner.verify_signature( + message, + signature, + sig_type.map(SignatureType::from_str).transpose()?, + )?) + } +} + +impl KeyExchange for LocalKey { + fn write_key_exchange( + &self, + other: &LocalKey, + out: &mut dyn WriteBuffer, + ) -> Result<(), CryptoError> { + self.inner.write_key_exchange(&other.inner, out) + } +} diff --git a/src/kms/mod.rs b/src/kms/mod.rs new file mode 100644 index 00000000..099f07df --- /dev/null +++ b/src/kms/mod.rs @@ -0,0 +1,62 @@ +//! Support for cryptographic key management and operations + +use std::{ + fmt::{self, Debug, Display, Formatter}, + str::FromStr, +}; + +use zeroize::Zeroize; + +use crate::error::Error; + +mod envelope; +pub use self::envelope::{ + crypto_box, crypto_box_open, crypto_box_random_nonce, crypto_box_seal, crypto_box_seal_open, + derive_key_ecdh_1pu, derive_key_ecdh_es, +}; + +mod entry; +pub use self::entry::{KeyEntry, KeyParams}; + +mod key; +pub use self::key::{KeyAlg, LocalKey}; + +/// Supported categories of KMS entries +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Zeroize)] +pub(crate) enum KmsCategory { + /// A stored key or keypair + CryptoKey, + // future options: Mnemonic, Entropy +} + +impl KmsCategory { + /// Get a reference to a string representing the `KmsCategory` + pub fn as_str(&self) -> &str { + match self { + Self::CryptoKey => "cryptokey", + } + } +} + +impl AsRef for KmsCategory { + fn as_ref(&self) -> &str { + self.as_str() + } +} + +impl FromStr for KmsCategory { + type Err = Error; + + fn from_str(s: &str) -> Result { + Ok(match s { + "cryptokey" => Self::CryptoKey, + _ => return Err(err_msg!("Unknown KMS category: {}", s)), + }) + } +} + +impl Display for KmsCategory { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str(self.as_str()) + } +} diff --git a/src/lib.rs b/src/lib.rs index b9fd024a..0d63bbe0 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,12 +1,16 @@ //! Secure storage designed for Hyperledger Aries agents #![cfg_attr(docsrs, feature(doc_cfg))] -#![warn(missing_docs, missing_debug_implementations, rust_2018_idioms)] +#![deny(missing_docs, missing_debug_implementations, rust_2018_idioms)] #[macro_use] mod error; pub use self::error::{Error, ErrorKind}; +#[cfg(test)] +#[macro_use] +extern crate hex_literal; + #[macro_use] mod macros; @@ -17,18 +21,22 @@ extern crate log; #[macro_use] extern crate serde; -#[cfg(any(feature = "postgres", feature = "sqlite"))] -mod db_utils; +pub mod backend; +pub use self::backend::{Backend, ManageBackend}; -#[doc(hidden)] -pub mod future; +#[cfg(feature = "any")] +pub use self::backend::any; -#[cfg(feature = "indy_compat")] -#[cfg_attr(docsrs, doc(cfg(feature = "indy_compat")))] -/// Indy wallet compatibility support -pub mod indy_compat; +#[cfg(feature = "postgres")] +pub use self::backend::postgres; + +#[cfg(feature = "sqlite")] +pub use self::backend::sqlite; -mod options; +pub use askar_crypto as crypto; + +#[doc(hidden)] +pub mod future; #[cfg(feature = "ffi")] #[macro_use] @@ -37,35 +45,10 @@ extern crate serde_json; #[cfg(feature = "ffi")] mod ffi; -#[cfg(feature = "postgres")] -#[cfg_attr(docsrs, doc(cfg(feature = "postgres")))] -/// Postgres database support -pub mod postgres; - -#[macro_use] -pub(crate) mod serde_utils; - -#[cfg(feature = "sqlite")] -#[cfg_attr(docsrs, doc(cfg(feature = "sqlite")))] -/// Sqlite database support -pub mod sqlite; - -#[cfg(feature = "any")] -#[cfg_attr(docsrs, doc(cfg(feature = "any")))] -/// Generic backend (from URI) support -pub mod any; - -mod keys; -pub use self::keys::{ - derive_verkey, verify_signature, - wrap::{generate_raw_wrap_key, WrapKeyMethod}, - KeyAlg, KeyCategory, KeyEntry, KeyParams, PassKey, -}; - -mod store; -pub use self::store::{Backend, ManageBackend, QueryBackend, Scan, Session, Store}; +pub mod kms; -mod types; -pub use self::types::{Entry, EntryOperation, EntryTag, SecretBytes, TagFilter}; +mod protect; +pub use protect::{generate_raw_store_key, PassKey, StoreKeyMethod}; -mod wql; +mod storage; +pub use storage::{Entry, EntryTag, Scan, Store, TagFilter}; diff --git a/src/protect/hmac_key.rs b/src/protect/hmac_key.rs new file mode 100644 index 00000000..aa0bc70a --- /dev/null +++ b/src/protect/hmac_key.rs @@ -0,0 +1,154 @@ +use std::{ + fmt::{self, Debug, Formatter}, + marker::PhantomData, +}; + +use hmac::{ + digest::{BlockInput, FixedOutput, Reset, Update}, + Hmac, Mac, NewMac, +}; +use serde::{Deserialize, Serialize}; + +use crate::{ + crypto::{ + self, + buffer::ArrayKey, + generic_array::{typenum::Unsigned, ArrayLength, GenericArray}, + kdf::KeyDerivation, + repr::KeyGen, + }, + error::Error, +}; + +#[derive(Clone, Deserialize, Serialize)] +#[serde( + transparent, + bound( + deserialize = "ArrayKey: for<'a> Deserialize<'a>", + serialize = "ArrayKey: Serialize" + ) +)] +pub struct HmacKey>(ArrayKey, PhantomData); + +impl> HmacKey { + #[allow(dead_code)] + pub fn from_slice(key: &[u8]) -> Result { + if key.len() != L::USIZE { + return Err(err_msg!(Encryption, "invalid length for hmac key")); + } + Ok(Self(ArrayKey::from_slice(key), PhantomData)) + } +} + +impl> AsRef<[u8]> for HmacKey { + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } +} + +impl> AsRef> for HmacKey { + fn as_ref(&self) -> &GenericArray { + self.0.as_ref() + } +} + +impl> Debug for HmacKey { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + if cfg!(test) { + f.debug_tuple("HmacKey").field(&*self).finish() + } else { + f.debug_tuple("HmacKey").field(&"").finish() + } + } +} + +impl> PartialEq for HmacKey { + fn eq(&self, other: &Self) -> bool { + self.0.as_ref() == other.0.as_ref() + } +} +impl> Eq for HmacKey {} + +impl> KeyGen for HmacKey { + fn generate() -> Result { + Ok(Self(ArrayKey::random(), PhantomData)) + } +} + +pub trait HmacDerive { + type Hash: BlockInput + Default + Reset + Update + Clone + FixedOutput; + type Key: AsRef<[u8]>; + + fn hmac_deriver<'d>(&'d self, inputs: &'d [&'d [u8]]) + -> HmacDeriver<'d, Self::Hash, Self::Key>; +} + +impl> HmacDerive for HmacKey +where + H: BlockInput + Default + Reset + Update + Clone + FixedOutput, +{ + type Hash = H; + type Key = Self; + + #[inline] + fn hmac_deriver<'d>( + &'d self, + inputs: &'d [&'d [u8]], + ) -> HmacDeriver<'d, Self::Hash, Self::Key> { + HmacDeriver { + key: self, + inputs, + _marker: PhantomData, + } + } +} + +pub struct HmacDeriver<'d, H, K: ?Sized> { + key: &'d K, + inputs: &'d [&'d [u8]], + _marker: PhantomData, +} + +impl KeyDerivation for HmacDeriver<'_, H, K> +where + K: AsRef<[u8]> + ?Sized, + H: BlockInput + Default + Reset + Update + Clone + FixedOutput, +{ + fn derive_key_bytes(&mut self, key_output: &mut [u8]) -> Result<(), crypto::Error> { + if key_output.len() > H::OutputSize::USIZE { + return Err(crypto::Error::from_msg( + crypto::ErrorKind::Encryption, + "invalid length for hmac output", + )); + } + let mut hmac = Hmac::::new_varkey(self.key.as_ref()).map_err(|_| { + crypto::Error::from_msg(crypto::ErrorKind::Encryption, "invalid length for hmac key") + })?; + for msg in self.inputs { + hmac.update(msg); + } + let hash = hmac.finalize().into_bytes(); + key_output.copy_from_slice(&hash[..key_output.len()]); + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::crypto::generic_array::typenum::U32; + use sha2::Sha256; + + #[test] + fn hmac_expected() { + let key = HmacKey::::from_slice(&hex!( + "c32ef97a2eed6316ae9b0d3129554358980ee6e0b21b81625229c191a3469f7e" + )) + .unwrap(); + let mut output = [0u8; 12]; + key.hmac_deriver(&[b"test message"]) + .derive_key_bytes(&mut output) + .unwrap(); + assert_eq!(output, &hex!("4cecfbf6be721395529be686")[..]); + } +} diff --git a/src/protect/kdf/argon2.rs b/src/protect/kdf/argon2.rs new file mode 100644 index 00000000..060271f6 --- /dev/null +++ b/src/protect/kdf/argon2.rs @@ -0,0 +1,64 @@ +use askar_crypto::kdf::KeyDerivation; + +use crate::{ + crypto::{ + buffer::ArrayKey, + kdf::argon2::{Argon2, Params, PARAMS_INTERACTIVE, PARAMS_MODERATE}, + repr::{KeyMeta, KeySecretBytes}, + }, + error::Error, + protect::store_key::{StoreKey, StoreKeyType}, +}; + +pub use crate::crypto::kdf::argon2::SaltSize; + +pub const LEVEL_INTERACTIVE: &'static str = "13:int"; +pub const LEVEL_MODERATE: &'static str = "13:mod"; + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub enum Level { + Interactive, + Moderate, +} + +impl Default for Level { + fn default() -> Self { + Self::Moderate + } +} + +impl Level { + pub fn from_str(level: &str) -> Option { + match level { + "int" | LEVEL_INTERACTIVE => Some(Self::Interactive), + "mod" | LEVEL_MODERATE => Some(Self::Moderate), + "" => Some(Self::default()), + _ => None, + } + } + + pub fn as_str(&self) -> &'static str { + match self { + Self::Interactive => LEVEL_INTERACTIVE, + Self::Moderate => LEVEL_MODERATE, + } + } + + pub fn generate_salt(&self) -> ArrayKey { + ArrayKey::random() + } + + fn params(&self) -> &Params { + match self { + Self::Interactive => &PARAMS_INTERACTIVE, + Self::Moderate => &PARAMS_MODERATE, + } + } + + pub fn derive_key(&self, password: &[u8], salt: &[u8]) -> Result { + ArrayKey::<::KeySize>::temp(|key| { + Argon2::new(password, salt, *self.params())?.derive_key_bytes(key)?; + Ok(StoreKey::from(StoreKeyType::from_secret_bytes(&*key)?)) + }) + } +} diff --git a/src/keys/kdf/mod.rs b/src/protect/kdf/mod.rs similarity index 68% rename from src/keys/kdf/mod.rs rename to src/protect/kdf/mod.rs index d5cf3649..a04ff2ef 100644 --- a/src/keys/kdf/mod.rs +++ b/src/protect/kdf/mod.rs @@ -1,12 +1,12 @@ -use indy_utils::base58; +use super::store_key::{StoreKey, PREFIX_KDF}; +use crate::{ + crypto::{buffer::ArrayKey, generic_array::ArrayLength}, + error::Error, + storage::Options, +}; -use super::wrap::PREFIX_KDF; -use crate::error::Result; -use crate::keys::wrap::WrapKey; -use crate::options::Options; - -pub mod argon2; -use self::argon2::{generate_salt, Level as Argon2Level, SALT_SIZE}; +mod argon2; +use self::argon2::{Level as Argon2Level, SaltSize as Argon2Salt}; pub const METHOD_ARGON2I: &'static str = "argon2i"; @@ -57,40 +57,34 @@ impl KdfMethod { } } - pub fn derive_new_key(&self, password: &str) -> Result<(WrapKey, String)> { + pub fn derive_new_key(&self, password: &str) -> Result<(StoreKey, String), Error> { match self { Self::Argon2i(level) => { - let salt = generate_salt(); - let key = level.derive_key(&salt, password)?; - let detail = format!("?salt={}", base58::encode(&salt)); + let salt = level.generate_salt(); + let key = level.derive_key(password.as_bytes(), salt.as_ref())?; + let detail = format!("?salt={}", salt.as_hex()); Ok((key.into(), detail)) } } } - pub fn derive_key(&self, password: &str, detail: &str) -> Result { + pub fn derive_key(&self, password: &str, detail: &str) -> Result { match self { Self::Argon2i(level) => { - let salt = parse_salt(detail)?; - let key = level.derive_key(&salt, password)?; + let salt = parse_salt::(detail)?; + let key = level.derive_key(password.as_bytes(), salt.as_ref())?; Ok(key.into()) } } } } -fn parse_salt(detail: &str) -> Result> { +fn parse_salt>(detail: &str) -> Result, Error> { let opts = Options::parse_uri(detail)?; if let Some(salt) = opts.query.get("salt") { - if let Ok(salt) = base58::decode(salt) { - if salt.len() >= SALT_SIZE { - Ok(salt) - } else { - Err(err_msg!(Input, "Invalid salt length")) - } - } else { - Err(err_msg!(Input, "Invalid salt")) - } + ArrayKey::::try_new_with(|arr| { + hex::decode_to_slice(salt, arr).map_err(|_| err_msg!(Input, "Invalid salt")) + }) } else { Err(err_msg!(Input, "Missing salt")) } diff --git a/src/keys/mod.rs b/src/protect/mod.rs similarity index 53% rename from src/keys/mod.rs rename to src/protect/mod.rs index 1a10b3fc..71f3f216 100644 --- a/src/keys/mod.rs +++ b/src/protect/mod.rs @@ -1,97 +1,68 @@ -use std::collections::HashMap; -use std::sync::Arc; +use std::{collections::HashMap, sync::Arc}; use async_mutex::Mutex; -use indy_utils::keys::{EncodedVerKey, PrivateKey}; use zeroize::Zeroize; -use super::error::Result; -use super::future::unblock; -use super::types::{EncEntryTag, EntryTag, ProfileId, SecretBytes}; - -use self::store::StoreKey; -use self::wrap::WrapKey; - -pub mod encrypt; - pub mod kdf; -pub mod store; - -mod types; -pub use self::types::{KeyAlg, KeyCategory, KeyEntry, KeyParams, PassKey}; +mod hmac_key; -pub mod wrap; +mod pass_key; +pub use self::pass_key::PassKey; -// #[cfg(target_os = "macos")] -// mod keychain; +mod profile_key; +pub use self::profile_key::ProfileKey; -/// Derive the (public) verification key for a keypair -pub fn derive_verkey(alg: KeyAlg, seed: &[u8]) -> Result { - match alg { - KeyAlg::ED25519 => (), - _ => return Err(err_msg!(Unsupported, "Unsupported key algorithm")), - } +mod store_key; +pub use self::store_key::{generate_raw_store_key, StoreKey, StoreKeyMethod, StoreKeyReference}; - let sk = - PrivateKey::from_seed(seed).map_err(err_map!(Unexpected, "Error generating keypair"))?; - let pk = sk - .public_key() - .map_err(err_map!(Unexpected, "Error generating public key"))? - .as_base58() - .map_err(err_map!(Unexpected, "Error encoding public key"))? - .long_form(); - Ok(pk) -} +use crate::{ + crypto::buffer::SecretBytes, + error::Error, + future::unblock, + storage::{EncEntryTag, EntryTag}, +}; -/// Verify that a message signature is consistent with the signer's key -pub fn verify_signature(signer_vk: &str, data: &[u8], signature: &[u8]) -> Result { - let vk = EncodedVerKey::from_str(&signer_vk).map_err(err_map!("Invalid verkey"))?; - Ok(vk - .decode() - .map_err(err_map!("Unsupported verkey"))? - .verify_signature(&data, &signature) - .unwrap_or(false)) -} +pub type ProfileId = i64; #[derive(Debug)] pub struct KeyCache { - profile_info: Mutex)>>, - pub(crate) wrap_key: Arc, + profile_info: Mutex)>>, + pub(crate) store_key: Arc, } impl KeyCache { - pub fn new(wrap_key: impl Into>) -> Self { + pub fn new(store_key: impl Into>) -> Self { Self { profile_info: Mutex::new(HashMap::new()), - wrap_key: wrap_key.into(), + store_key: store_key.into(), } } - pub async fn load_key(&self, ciphertext: Vec) -> Result { - let wrap_key = self.wrap_key.clone(); + pub async fn load_key(&self, ciphertext: Vec) -> Result { + let store_key = self.store_key.clone(); unblock(move || { - let mut data = wrap_key + let mut data = store_key .unwrap_data(ciphertext) - .map_err(err_map!(Encryption, "Error decrypting store key"))?; - let key = StoreKey::from_slice(&data)?; + .map_err(err_map!(Encryption, "Error decrypting profile key"))?; + let key = ProfileKey::from_slice(&data)?; data.zeroize(); Ok(key) }) .await } - pub fn add_profile_mut(&mut self, ident: String, pid: ProfileId, key: StoreKey) { + pub fn add_profile_mut(&mut self, ident: String, pid: ProfileId, key: ProfileKey) { self.profile_info .get_mut() .insert(ident, (pid, Arc::new(key))); } - pub async fn add_profile(&self, ident: String, pid: ProfileId, key: Arc) { + pub async fn add_profile(&self, ident: String, pid: ProfileId, key: Arc) { self.profile_info.lock().await.insert(ident, (pid, key)); } - pub async fn get_profile(&self, name: &str) -> Option<(ProfileId, Arc)> { + pub async fn get_profile(&self, name: &str) -> Option<(ProfileId, Arc)> { self.profile_info.lock().await.get(name).cloned() } } @@ -101,57 +72,77 @@ pub(crate) trait EntryEncryptor { SecretBytes::from(input) } - fn encrypt_entry_category(&self, category: SecretBytes) -> Result>; - fn encrypt_entry_name(&self, name: SecretBytes) -> Result>; - fn encrypt_entry_value(&self, value: SecretBytes) -> Result>; - fn encrypt_entry_tags(&self, tags: Vec) -> Result>; - - fn decrypt_entry_category(&self, enc_category: Vec) -> Result; - fn decrypt_entry_name(&self, enc_name: Vec) -> Result; - fn decrypt_entry_value(&self, enc_value: Vec) -> Result; - fn decrypt_entry_tags(&self, enc_tags: Vec) -> Result>; + fn encrypt_entry_category(&self, category: SecretBytes) -> Result, Error>; + fn encrypt_entry_name(&self, name: SecretBytes) -> Result, Error>; + fn encrypt_entry_value( + &self, + category: &[u8], + name: &[u8], + value: SecretBytes, + ) -> Result, Error>; + fn encrypt_entry_tags(&self, tags: Vec) -> Result, Error>; + + fn decrypt_entry_category(&self, enc_category: Vec) -> Result; + fn decrypt_entry_name(&self, enc_name: Vec) -> Result; + fn decrypt_entry_value( + &self, + category: &[u8], + name: &[u8], + enc_value: Vec, + ) -> Result; + fn decrypt_entry_tags(&self, enc_tags: Vec) -> Result, Error>; } pub struct NullEncryptor; impl EntryEncryptor for NullEncryptor { - fn encrypt_entry_category(&self, category: SecretBytes) -> Result> { + fn encrypt_entry_category(&self, category: SecretBytes) -> Result, Error> { Ok(category.into_vec()) } - fn encrypt_entry_name(&self, name: SecretBytes) -> Result> { + fn encrypt_entry_name(&self, name: SecretBytes) -> Result, Error> { Ok(name.into_vec()) } - fn encrypt_entry_value(&self, value: SecretBytes) -> Result> { + fn encrypt_entry_value( + &self, + _category: &[u8], + _name: &[u8], + value: SecretBytes, + ) -> Result, Error> { Ok(value.into_vec()) } - fn encrypt_entry_tags(&self, tags: Vec) -> Result> { + fn encrypt_entry_tags(&self, tags: Vec) -> Result, Error> { Ok(tags .into_iter() .map(|tag| match tag { EntryTag::Encrypted(name, value) => EncEntryTag { - name: name.into_bytes(), - value: value.into_bytes(), + name: name.into_bytes().into(), + value: value.into_bytes().into(), plaintext: false, }, EntryTag::Plaintext(name, value) => EncEntryTag { - name: name.into_bytes(), - value: value.into_bytes(), + name: name.into_bytes().into(), + value: value.into_bytes().into(), plaintext: true, }, }) .collect()) } - fn decrypt_entry_category(&self, enc_category: Vec) -> Result { + fn decrypt_entry_category(&self, enc_category: Vec) -> Result { Ok(String::from_utf8(enc_category).map_err(err_map!(Encryption))?) } - fn decrypt_entry_name(&self, enc_name: Vec) -> Result { + fn decrypt_entry_name(&self, enc_name: Vec) -> Result { Ok(String::from_utf8(enc_name).map_err(err_map!(Encryption))?) } - fn decrypt_entry_value(&self, enc_value: Vec) -> Result { + fn decrypt_entry_value( + &self, + _category: &[u8], + _name: &[u8], + enc_value: Vec, + ) -> Result { Ok(enc_value.into()) } - fn decrypt_entry_tags(&self, enc_tags: Vec) -> Result> { + fn decrypt_entry_tags(&self, enc_tags: Vec) -> Result, Error> { Ok(enc_tags.into_iter().try_fold(vec![], |mut acc, tag| { let name = String::from_utf8(tag.name).map_err(err_map!(Encryption))?; let value = String::from_utf8(tag.value).map_err(err_map!(Encryption))?; @@ -160,7 +151,7 @@ impl EntryEncryptor for NullEncryptor { } else { EntryTag::Encrypted(name, value) }); - Result::Ok(acc) + Result::<_, Error>::Ok(acc) })?) } } diff --git a/src/protect/pass_key.rs b/src/protect/pass_key.rs new file mode 100644 index 00000000..83901d6c --- /dev/null +++ b/src/protect/pass_key.rs @@ -0,0 +1,107 @@ +use zeroize::Zeroize; + +use std::{ + borrow::Cow, + fmt::{self, Debug, Formatter}, + mem::ManuallyDrop, + ops::Deref, +}; + +/// A possibly-empty password or key used to derive a store key +#[derive(Clone)] +pub struct PassKey<'a>(Option>); + +impl PassKey<'_> { + /// Create a scoped reference to the passkey + pub fn as_ref(&self) -> PassKey<'_> { + PassKey(Some(Cow::Borrowed(&**self))) + } + + /// Create an empty passkey + pub fn empty() -> PassKey<'static> { + PassKey(None) + } + + pub(crate) fn is_none(&self) -> bool { + self.0.is_none() + } + + pub(crate) fn into_owned(self) -> PassKey<'static> { + let mut slf = ManuallyDrop::new(self); + let val = slf.0.take(); + PassKey(match val { + None => None, + Some(Cow::Borrowed(s)) => Some(Cow::Owned(s.to_string())), + Some(Cow::Owned(s)) => Some(Cow::Owned(s)), + }) + } +} + +impl Debug for PassKey<'_> { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + if cfg!(test) { + f.debug_tuple("PassKey").field(&*self).finish() + } else { + f.debug_tuple("PassKey").field(&"").finish() + } + } +} + +impl Default for PassKey<'_> { + fn default() -> Self { + Self(None) + } +} + +impl Deref for PassKey<'_> { + type Target = str; + + fn deref(&self) -> &str { + match self.0.as_ref() { + None => "", + Some(s) => s.as_ref(), + } + } +} + +impl Drop for PassKey<'_> { + fn drop(&mut self) { + self.zeroize(); + } +} + +impl<'a> From<&'a str> for PassKey<'a> { + fn from(inner: &'a str) -> Self { + Self(Some(Cow::Borrowed(inner))) + } +} + +impl From for PassKey<'_> { + fn from(inner: String) -> Self { + Self(Some(Cow::Owned(inner))) + } +} + +impl<'a> From> for PassKey<'a> { + fn from(inner: Option<&'a str>) -> Self { + Self(inner.map(Cow::Borrowed)) + } +} + +impl<'a, 'b> PartialEq> for PassKey<'a> { + fn eq(&self, other: &PassKey<'b>) -> bool { + &**self == &**other + } +} +impl Eq for PassKey<'_> {} + +impl Zeroize for PassKey<'_> { + fn zeroize(&mut self) { + match self.0.take() { + Some(Cow::Owned(mut s)) => { + s.zeroize(); + } + _ => (), + } + } +} diff --git a/src/protect/profile_key.rs b/src/protect/profile_key.rs new file mode 100644 index 00000000..ec19a61d --- /dev/null +++ b/src/protect/profile_key.rs @@ -0,0 +1,320 @@ +use serde::{Deserialize, Serialize}; +use sha2::Sha256; + +use super::hmac_key::{HmacDerive, HmacKey}; +use super::EntryEncryptor; +use crate::{ + crypto::{ + alg::chacha20::{Chacha20Key, C20P}, + buffer::{ArrayKey, ResizeBuffer, SecretBytes, WriteBuffer}, + encrypt::{KeyAeadInPlace, KeyAeadMeta}, + generic_array::typenum::{Unsigned, U32}, + kdf::FromKeyDerivation, + repr::KeyGen, + }, + error::Error, + storage::{EncEntryTag, EntryTag}, +}; + +pub type ProfileKey = ProfileKeyImpl, HmacKey>; + +/// A record combining the keys required to encrypt and decrypt storage entries +#[derive(Clone, Debug, Deserialize, Serialize)] +#[serde(bound( + deserialize = "Key: for<'a> Deserialize<'a>, HmacKey: for<'a> Deserialize<'a>", + serialize = "Key: Serialize, HmacKey: Serialize" +))] +#[serde(tag = "ver", rename = "1")] +pub struct ProfileKeyImpl { + #[serde(rename = "ick")] + pub category_key: Key, + #[serde(rename = "ink")] + pub name_key: Key, + #[serde(rename = "ihk")] + pub item_hmac_key: HmacKey, + #[serde(rename = "tnk")] + pub tag_name_key: Key, + #[serde(rename = "tvk")] + pub tag_value_key: Key, + #[serde(rename = "thk")] + pub tags_hmac_key: HmacKey, +} + +impl ProfileKeyImpl +where + Key: KeyGen, + HmacKey: KeyGen, +{ + pub fn new() -> Result { + Ok(Self { + category_key: KeyGen::generate()?, + name_key: KeyGen::generate()?, + item_hmac_key: KeyGen::generate()?, + tag_name_key: KeyGen::generate()?, + tag_value_key: KeyGen::generate()?, + tags_hmac_key: KeyGen::generate()?, + }) + } +} + +impl ProfileKeyImpl +where + Key: Serialize + for<'de> Deserialize<'de>, + HmacKey: Serialize + for<'de> Deserialize<'de>, +{ + pub fn to_bytes(&self) -> Result { + serde_cbor::to_vec(self) + .map(SecretBytes::from) + .map_err(err_map!(Unexpected, "Error serializing profile key")) + } + + pub fn from_slice(input: &[u8]) -> Result { + serde_cbor::from_slice(input).map_err(err_map!(Unsupported, "Invalid profile key")) + } +} + +impl ProfileKeyImpl +where + Key: KeyAeadInPlace + KeyAeadMeta + FromKeyDerivation, + HmacKey: HmacDerive, +{ + fn encrypted_size(len: usize) -> usize { + len + Key::NonceSize::USIZE + Key::TagSize::USIZE + } + + /// Encrypt a value with a predictable nonce, making it searchable + fn encrypt_searchable( + mut buffer: SecretBytes, + enc_key: &Key, + hmac_key: &HmacKey, + ) -> Result, Error> { + let nonce = ArrayKey::::from_key_derivation( + hmac_key.hmac_deriver(&[buffer.as_ref()]), + )?; + enc_key.encrypt_in_place(&mut buffer, nonce.as_ref(), &[])?; + buffer.buffer_insert(0, nonce.as_ref())?; + Ok(buffer.into_vec()) + } + + fn encrypt(mut buffer: SecretBytes, enc_key: &Key) -> Result, Error> { + let nonce = ArrayKey::::random(); + enc_key.encrypt_in_place(&mut buffer, nonce.as_ref(), &[])?; + buffer.buffer_insert(0, nonce.as_ref())?; + Ok(buffer.into_vec()) + } + + fn decrypt(ciphertext: Vec, enc_key: &Key) -> Result { + let nonce_len = Key::NonceSize::USIZE; + if ciphertext.len() < nonce_len { + return Err(err_msg!(Encryption, "invalid encrypted value")); + } + let mut buffer = SecretBytes::from(ciphertext); + let nonce = ArrayKey::::from_slice(&buffer.as_ref()[..nonce_len]); + buffer.buffer_remove(0..nonce_len)?; + enc_key.decrypt_in_place(&mut buffer, nonce.as_ref(), &[])?; + Ok(buffer) + } + + #[inline] + fn derive_value_key(&self, category: &[u8], name: &[u8]) -> Result { + Ok(Key::from_key_derivation(self.item_hmac_key.hmac_deriver( + &[ + &(category.len() as u32).to_be_bytes(), + category, + &(name.len() as u32).to_be_bytes(), + name, + ], + ))?) + } + + pub fn encrypt_tag_name(&self, name: SecretBytes) -> Result, Error> { + Self::encrypt_searchable(name, &self.tag_name_key, &self.tags_hmac_key) + } + + pub fn encrypt_tag_value(&self, value: SecretBytes) -> Result, Error> { + Self::encrypt_searchable(value, &self.tag_value_key, &self.tags_hmac_key) + } + + pub fn decrypt_tag_name(&self, enc_tag_name: Vec) -> Result { + Self::decrypt(enc_tag_name, &self.tag_name_key) + } + + pub fn decrypt_tag_value(&self, enc_tag_value: Vec) -> Result { + Self::decrypt(enc_tag_value, &self.tag_value_key) + } +} + +impl PartialEq for ProfileKeyImpl { + fn eq(&self, other: &Self) -> bool { + self.category_key == other.category_key + && self.name_key == other.name_key + && self.item_hmac_key == other.item_hmac_key + && self.tag_name_key == other.tag_name_key + && self.tag_value_key == other.tag_value_key + && self.tags_hmac_key == other.tags_hmac_key + } +} +impl Eq for ProfileKeyImpl {} + +impl EntryEncryptor for ProfileKeyImpl +where + Key: KeyAeadInPlace + KeyAeadMeta + FromKeyDerivation, + HmacKey: HmacDerive, +{ + fn prepare_input(input: &[u8]) -> SecretBytes { + let mut buf = SecretBytes::with_capacity(Self::encrypted_size(input.len())); + buf.buffer_write(input).unwrap(); + buf + } + + fn encrypt_entry_category(&self, category: SecretBytes) -> Result, Error> { + Self::encrypt_searchable(category, &self.category_key, &self.item_hmac_key) + } + + fn encrypt_entry_name(&self, name: SecretBytes) -> Result, Error> { + Self::encrypt_searchable(name, &self.name_key, &self.item_hmac_key) + } + + fn encrypt_entry_value( + &self, + category: &[u8], + name: &[u8], + value: SecretBytes, + ) -> Result, Error> { + let value_key = self.derive_value_key(category, name)?; + Self::encrypt(value, &value_key) + } + + fn decrypt_entry_category(&self, enc_category: Vec) -> Result { + decode_utf8(Self::decrypt(enc_category, &self.category_key)?.into_vec()) + } + + fn decrypt_entry_name(&self, enc_name: Vec) -> Result { + decode_utf8(Self::decrypt(enc_name, &self.name_key)?.into_vec()) + } + + fn decrypt_entry_value( + &self, + category: &[u8], + name: &[u8], + enc_value: Vec, + ) -> Result { + let value_key = self.derive_value_key(category, name)?; + Self::decrypt(enc_value, &value_key) + } + + fn encrypt_entry_tags(&self, tags: Vec) -> Result, Error> { + tags.into_iter() + .map(|tag| match tag { + EntryTag::Plaintext(name, value) => { + let name = self.encrypt_tag_name(name.into())?; + Ok(EncEntryTag { + name, + value: value.into_bytes().into(), + plaintext: true, + }) + } + EntryTag::Encrypted(name, value) => { + let name = self.encrypt_tag_name(name.into())?; + let value = self.encrypt_tag_value(value.into())?; + Ok(EncEntryTag { + name, + value, + plaintext: false, + }) + } + }) + .collect() + } + + fn decrypt_entry_tags(&self, enc_tags: Vec) -> Result, Error> { + enc_tags.into_iter().try_fold(vec![], |mut acc, tag| { + let name = decode_utf8(self.decrypt_tag_name(tag.name)?.into_vec())?; + acc.push(if tag.plaintext { + let value = decode_utf8(tag.value)?; + EntryTag::Plaintext(name, value) + } else { + let value = decode_utf8(self.decrypt_tag_value(tag.value)?.into_vec())?; + EntryTag::Encrypted(name, value) + }); + Result::Ok(acc) + }) + } +} + +#[inline(always)] +fn decode_utf8(value: Vec) -> Result { + String::from_utf8(value).map_err(err_map!(Encryption)) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::storage::Entry; + + #[test] + fn encrypt_entry_round_trip() { + let key = ProfileKey::new().unwrap(); + let test_record = Entry::new( + "category", + "name", + "value", + vec![ + EntryTag::Plaintext("plain".to_string(), "tag".to_string()), + EntryTag::Encrypted("enctag".to_string(), "envtagval".to_string()), + ], + ); + let enc_category = key + .encrypt_entry_category(test_record.category.clone().into()) + .unwrap(); + let enc_name = key + .encrypt_entry_name(test_record.name.clone().into()) + .unwrap(); + let enc_value = key + .encrypt_entry_value( + test_record.category.as_bytes(), + test_record.name.as_bytes(), + test_record.value.clone().into(), + ) + .unwrap(); + let enc_tags = key.encrypt_entry_tags(test_record.tags.clone()).unwrap(); + assert_ne!(test_record.category.as_bytes(), &enc_category[..]); + assert_ne!(test_record.name.as_bytes(), &enc_name[..]); + assert_ne!(test_record.value, enc_value); + + let cmp_record = Entry::new( + key.decrypt_entry_category(enc_category).unwrap(), + key.decrypt_entry_name(enc_name).unwrap(), + key.decrypt_entry_value( + test_record.category.as_bytes(), + test_record.name.as_bytes(), + enc_value, + ) + .unwrap(), + key.decrypt_entry_tags(enc_tags).unwrap(), + ); + assert_eq!(test_record, cmp_record); + } + + #[test] + fn check_encrypt_searchable() { + let input = SecretBytes::from(&b"hello"[..]); + let key = Chacha20Key::::generate().unwrap(); + let hmac_key = HmacKey::generate().unwrap(); + let enc1 = ProfileKey::encrypt_searchable(input.clone(), &key, &hmac_key).unwrap(); + let enc2 = ProfileKey::encrypt_searchable(input.clone(), &key, &hmac_key).unwrap(); + let enc3 = ProfileKey::encrypt(input.clone(), &key).unwrap(); + assert_eq!(&enc1, &enc2); + assert_ne!(&enc1, &enc3); + let dec = ProfileKey::decrypt(enc1, &key).unwrap(); + assert_eq!(dec, input); + } + + #[test] + fn serialize_round_trip() { + let key = ProfileKey::new().unwrap(); + let key_cbor = serde_cbor::to_vec(&key).unwrap(); + let key_cmp = serde_cbor::from_slice(&key_cbor).unwrap(); + assert_eq!(key, key_cmp); + } +} diff --git a/src/keys/wrap.rs b/src/protect/store_key.rs similarity index 56% rename from src/keys/wrap.rs rename to src/protect/store_key.rs index 52eb2adc..e210755a 100644 --- a/src/keys/wrap.rs +++ b/src/protect/store_key.rs @@ -1,86 +1,106 @@ -use indy_utils::base58; - -use super::encrypt::{aead::ChaChaEncrypt, SymEncrypt, SymEncryptKey}; use super::kdf::KdfMethod; -use super::types::PassKey; -use crate::{error::Result, SecretBytes}; + +use super::pass_key::PassKey; +use crate::{ + crypto::{ + alg::chacha20::{Chacha20Key, C20P}, + buffer::{ArrayKey, ResizeBuffer, SecretBytes}, + encrypt::{KeyAeadInPlace, KeyAeadMeta}, + repr::{KeyGen, KeyMeta, KeySecretBytes}, + }, + error::Error, +}; pub const PREFIX_KDF: &'static str = "kdf"; pub const PREFIX_RAW: &'static str = "raw"; pub const PREFIX_NONE: &'static str = "none"; -pub type WrapKeyAlg = ChaChaEncrypt; -pub type WrapKeyData = ::Key; -pub const WRAP_KEY_SIZE: usize = ::Key::SIZE; +pub type StoreKeyType = Chacha20Key; -/// Create a new raw wrap key for a store -pub fn generate_raw_wrap_key(seed: Option<&[u8]>) -> Result> { +type StoreKeyNonce = ArrayKey<::NonceSize>; + +/// Create a new raw (non-derived) store key +pub fn generate_raw_store_key(seed: Option<&[u8]>) -> Result, Error> { let key = if let Some(seed) = seed { - WrapKey::from(WrapKeyData::from_seed(seed)?) + StoreKey::from(StoreKeyType::from_seed(seed.into())?) } else { - WrapKey::from(WrapKeyData::random_key()) + StoreKey::from(StoreKeyType::generate()?) }; - Ok(key.to_opt_string().unwrap().into()) + Ok(key.to_passkey()) } -pub fn parse_raw_key(raw_key: &str) -> Result { - let key = base58::decode(raw_key) - .map_err(|_| err_msg!(Input, "Error parsing raw key as base58 value"))?; - if key.len() != WRAP_KEY_SIZE { - Err(err_msg!(Input, "Incorrect length for encoded raw key")) - } else { - Ok(WrapKey::from(WrapKeyData::from_slice(key))) - } +pub fn parse_raw_store_key(raw_key: &str) -> Result { + ArrayKey::<::KeySize>::temp(|key| { + let key_len = bs58::decode(raw_key) + .into(&mut *key) + .map_err(|_| err_msg!(Input, "Error parsing raw key as base58 value"))?; + if key_len != key.len() { + Err(err_msg!(Input, "Incorrect length for encoded raw key")) + } else { + Ok(StoreKey::from(StoreKeyType::from_secret_bytes(&*key)?)) + } + }) } #[derive(Clone, Debug)] -pub struct WrapKey(pub Option); +pub struct StoreKey(pub Option); -impl WrapKey { +impl StoreKey { pub const fn empty() -> Self { Self(None) } - pub fn random() -> Result { - Ok(Self(Some(WrapKeyData::random()))) + pub fn random() -> Result { + Ok(Self(Some(StoreKeyType::generate()?))) } pub fn is_empty(&self) -> bool { self.0.is_none() } - pub fn prepare_input(&self, input: &[u8]) -> SecretBytes { - WrapKeyAlg::prepare_input(input) - } - - pub fn wrap_data(&self, data: SecretBytes) -> Result> { + pub fn wrap_data(&self, mut data: SecretBytes) -> Result, Error> { match &self.0 { - Some(key) => Ok(WrapKeyAlg::encrypt(data, key, None)?), + Some(key) => { + let nonce = StoreKeyNonce::random(); + key.encrypt_in_place(&mut data, nonce.as_ref(), &[])?; + data.buffer_insert(0, nonce.as_ref())?; + Ok(data.into_vec()) + } None => Ok(data.into_vec()), } } - pub fn unwrap_data(&self, ciphertext: Vec) -> Result { + pub fn unwrap_data(&self, ciphertext: Vec) -> Result { match &self.0 { - Some(key) => Ok(WrapKeyAlg::decrypt(ciphertext, key)?), + Some(key) => { + let nonce = StoreKeyNonce::from_slice(&ciphertext[..StoreKeyNonce::SIZE]); + let mut buffer = SecretBytes::from(ciphertext); + buffer.buffer_remove(0..StoreKeyNonce::SIZE)?; + key.decrypt_in_place(&mut buffer, nonce.as_ref(), &[])?; + Ok(buffer) + } None => Ok(ciphertext.into()), } } - pub fn to_opt_string(&self) -> Option { - self.0.as_ref().map(|key| base58::encode(key.as_slice())) + pub fn to_passkey(&self) -> PassKey<'static> { + if let Some(key) = self.0.as_ref() { + PassKey::from(key.with_secret_bytes(|sk| bs58::encode(sk.unwrap()).into_string())) + } else { + PassKey::empty() + } } } -impl From for WrapKey { - fn from(data: WrapKeyData) -> Self { +impl From for StoreKey { + fn from(data: StoreKeyType) -> Self { Self(Some(data)) } } -/// Supported methods for generating or referencing a new wrap key +/// Supported methods for generating or referencing a new store key #[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub enum WrapKeyMethod { +pub enum StoreKeyMethod { // CreateManagedKey(String), // ExistingManagedKey(String), /// Derive a new wrapping key using a key derivation function @@ -91,8 +111,8 @@ pub enum WrapKeyMethod { Unprotected, } -impl WrapKeyMethod { - pub(crate) fn parse_uri(uri: &str) -> Result { +impl StoreKeyMethod { + pub(crate) fn parse_uri(uri: &str) -> Result { let mut prefix_and_detail = uri.splitn(2, ':'); let prefix = prefix_and_detail.next().unwrap_or_default(); // let detail = prefix_and_detail.next().unwrap_or_default(); @@ -103,18 +123,21 @@ impl WrapKeyMethod { None => Err(err_msg!(Unsupported, "Invalid key derivation method")), }, PREFIX_NONE => Ok(Self::Unprotected), - _ => Err(err_msg!(Unsupported, "Invalid wrap key method")), + _ => Err(err_msg!(Unsupported, "Invalid store key method")), } } - pub(crate) fn resolve(&self, pass_key: PassKey<'_>) -> Result<(WrapKey, WrapKeyReference)> { + pub(crate) fn resolve( + &self, + pass_key: PassKey<'_>, + ) -> Result<(StoreKey, StoreKeyReference), Error> { match self { // Self::CreateManagedKey(_mgr_ref) => unimplemented!(), // Self::ExistingManagedKey(String) => unimplemented!(), Self::DeriveKey(method) => { if !pass_key.is_none() { let (key, detail) = method.derive_new_key(&*pass_key)?; - let key_ref = WrapKeyReference::DeriveKey(*method, detail); + let key_ref = StoreKeyReference::DeriveKey(*method, detail); Ok((key, key_ref)) } else { Err(err_msg!(Input, "Key derivation password not provided")) @@ -122,33 +145,33 @@ impl WrapKeyMethod { } Self::RawKey => { let key = if !pass_key.is_empty() { - parse_raw_key(&*pass_key)? + parse_raw_store_key(&*pass_key)? } else { - WrapKey::random()? + StoreKey::random()? }; - Ok((key, WrapKeyReference::RawKey)) + Ok((key, StoreKeyReference::RawKey)) } - Self::Unprotected => Ok((WrapKey::empty(), WrapKeyReference::Unprotected)), + Self::Unprotected => Ok((StoreKey::empty(), StoreKeyReference::Unprotected)), } } } -impl Default for WrapKeyMethod { +impl Default for StoreKeyMethod { fn default() -> Self { Self::DeriveKey(KdfMethod::Argon2i(Default::default())) } } #[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub enum WrapKeyReference { +pub enum StoreKeyReference { // ManagedKey(String), DeriveKey(KdfMethod, String), RawKey, Unprotected, } -impl WrapKeyReference { - pub fn parse_uri(uri: &str) -> Result { +impl StoreKeyReference { + pub fn parse_uri(uri: &str) -> Result { let mut prefix_and_detail = uri.splitn(2, ':'); let prefix = prefix_and_detail.next().unwrap_or_default(); match prefix { @@ -163,20 +186,20 @@ impl WrapKeyReference { PREFIX_NONE => Ok(Self::Unprotected), _ => Err(err_msg!( Unsupported, - "Invalid wrap key method for reference" + "Invalid store key method for reference" )), } } - pub fn compare_method(&self, method: &WrapKeyMethod) -> bool { + pub fn compare_method(&self, method: &StoreKeyMethod) -> bool { match self { // Self::ManagedKey(_keyref) => matches!(method, WrapKeyMethod::CreateManagedKey(..)), Self::DeriveKey(kdf_method, _detail) => match method { - WrapKeyMethod::DeriveKey(m) if m == kdf_method => true, + StoreKeyMethod::DeriveKey(m) if m == kdf_method => true, _ => false, }, - Self::RawKey => *method == WrapKeyMethod::RawKey, - Self::Unprotected => *method == WrapKeyMethod::Unprotected, + Self::RawKey => *method == StoreKeyMethod::RawKey, + Self::Unprotected => *method == StoreKeyMethod::Unprotected, } } @@ -189,7 +212,7 @@ impl WrapKeyReference { } } - pub fn resolve(&self, pass_key: PassKey<'_>) -> Result { + pub fn resolve(&self, pass_key: PassKey<'_>) -> Result { match self { // Self::ManagedKey(_key_ref) => unimplemented!(), Self::DeriveKey(method, detail) => { @@ -201,12 +224,12 @@ impl WrapKeyReference { } Self::RawKey => { if !pass_key.is_empty() { - parse_raw_key(&*pass_key) + parse_raw_store_key(&*pass_key) } else { Err(err_msg!(Input, "Encoded raw key not provided")) } } - Self::Unprotected => Ok(WrapKey::empty()), + Self::Unprotected => Ok(StoreKey::empty()), } } } @@ -218,12 +241,12 @@ mod tests { #[test] fn protection_method_parse() { - let parse = WrapKeyMethod::parse_uri; - assert_eq!(parse("none"), Ok(WrapKeyMethod::Unprotected)); - assert_eq!(parse("raw"), Ok(WrapKeyMethod::RawKey)); + let parse = StoreKeyMethod::parse_uri; + assert_eq!(parse("none"), Ok(StoreKeyMethod::Unprotected)); + assert_eq!(parse("raw"), Ok(StoreKeyMethod::RawKey)); assert_eq!( parse("kdf:argon2i"), - Ok(WrapKeyMethod::DeriveKey(KdfMethod::Argon2i( + Ok(StoreKeyMethod::DeriveKey(KdfMethod::Argon2i( Default::default() ))) ); @@ -237,14 +260,14 @@ mod tests { fn derived_key_wrap() { let input = b"test data"; let pass = PassKey::from("pass"); - let (key, key_ref) = WrapKeyMethod::DeriveKey(KdfMethod::Argon2i(Default::default())) + let (key, key_ref) = StoreKeyMethod::DeriveKey(KdfMethod::Argon2i(Default::default())) .resolve(pass.as_ref()) .expect("Error deriving new key"); assert!(!key.is_empty()); let wrapped = key - .wrap_data(key.prepare_input(input)) + .wrap_data((&input[..]).into()) .expect("Error wrapping input"); - assert_ne!(wrapped, input); + assert_ne!(wrapped, &input[..]); let unwrapped = key.unwrap_data(wrapped).expect("Error unwrapping data"); assert_eq!(unwrapped, &input[..]); let key_uri = key_ref.into_uri(); @@ -254,52 +277,52 @@ mod tests { #[test] fn derived_key_unwrap_expected() { let input = b"test data"; - let wrapped: &[u8] = &[ - 194, 156, 102, 253, 229, 11, 48, 184, 160, 119, 218, 30, 169, 188, 244, 223, 235, 95, - 171, 234, 18, 5, 9, 115, 174, 208, 232, 37, 31, 32, 250, 216, 32, 92, 253, 45, 236, - ]; + let wrapped = Vec::from(hex!( + "c29c66fde50b30b8a077da1ea9bcf4dfeb5fabea12050973aed0e8251f20fad8205cfd2dec" + )); let pass = PassKey::from("pass"); - let key_ref = WrapKeyReference::parse_uri("kdf:argon2i:13:mod?salt=MR6B1jrReV2JioaizEaRo6") - .expect("Error parsing derived key ref"); + let key_ref = StoreKeyReference::parse_uri( + "kdf:argon2i:13:mod?salt=a553cfb9c558b5c11c78efcfa06f3e29", + ) + .expect("Error parsing derived key ref"); let key = key_ref.resolve(pass).expect("Error deriving existing key"); - let unwrapped = key - .unwrap_data(wrapped.to_vec()) - .expect("Error unwrapping data"); + let unwrapped = key.unwrap_data(wrapped).expect("Error unwrapping data"); assert_eq!(unwrapped, &input[..]); } #[test] fn derived_key_check_bad_password() { - let wrapped: &[u8] = &[ - 194, 156, 102, 253, 229, 11, 48, 184, 160, 119, 218, 30, 169, 188, 244, 223, 235, 95, - 171, 234, 18, 5, 9, 115, 174, 208, 232, 37, 31, 32, 250, 216, 32, 92, 253, 45, 236, - ]; - let key_ref = WrapKeyReference::parse_uri("kdf:argon2i:13:mod?salt=MR6B1jrReV2JioaizEaRo6") - .expect("Error parsing derived key ref"); + let wrapped = Vec::from(hex!( + "c29c66fde50b30b8a077da1ea9bcf4dfeb5fabea12050973aed0e8251f20fad8205cfd2dec" + )); + let key_ref = StoreKeyReference::parse_uri( + "kdf:argon2i:13:mod?salt=a553cfb9c558b5c11c78efcfa06f3e29", + ) + .expect("Error parsing derived key ref"); let check_bad_pass = key_ref .resolve("not my pass".into()) .expect("Error deriving comparison key"); - let unwrapped_err = check_bad_pass.unwrap_data(wrapped.to_vec()); + let unwrapped_err = check_bad_pass.unwrap_data(wrapped); assert_eq!(unwrapped_err.is_err(), true); } #[test] fn raw_key_wrap() { let input = b"test data"; - let raw_key = generate_raw_wrap_key(None).unwrap(); + let raw_key = generate_raw_store_key(None).unwrap(); - let (key, key_ref) = WrapKeyMethod::RawKey + let (key, key_ref) = StoreKeyMethod::RawKey .resolve(raw_key.as_ref()) .expect("Error resolving raw key"); assert_eq!(key.is_empty(), false); let wrapped = key - .wrap_data(key.prepare_input(input)) + .wrap_data((&input[..]).into()) .expect("Error wrapping input"); - assert_ne!(wrapped, input); + assert_ne!(wrapped, &input[..]); // round trip the key reference let key_uri = key_ref.into_uri(); - let key_ref = WrapKeyReference::parse_uri(&key_uri).expect("Error parsing raw key URI"); + let key_ref = StoreKeyReference::parse_uri(&key_uri).expect("Error parsing raw key URI"); let key = key_ref.resolve(raw_key).expect("Error resolving raw key"); let unwrapped = key.unwrap_data(wrapped).expect("Error unwrapping data"); @@ -315,19 +338,19 @@ mod tests { #[test] fn unprotected_wrap() { let input = b"test data"; - let (key, key_ref) = WrapKeyMethod::Unprotected + let (key, key_ref) = StoreKeyMethod::Unprotected .resolve(None.into()) .expect("Error resolving unprotected"); assert_eq!(key.is_empty(), true); let wrapped = key - .wrap_data(key.prepare_input(input)) + .wrap_data((&input[..]).into()) .expect("Error wrapping unprotected"); - assert_eq!(wrapped, input); + assert_eq!(wrapped, &input[..]); // round trip the key reference let key_uri = key_ref.into_uri(); let key_ref = - WrapKeyReference::parse_uri(&key_uri).expect("Error parsing unprotected key ref"); + StoreKeyReference::parse_uri(&key_uri).expect("Error parsing unprotected key ref"); let key = key_ref .resolve(None.into()) .expect("Error resolving unprotected key ref"); diff --git a/src/serde_utils.rs b/src/serde_utils.rs deleted file mode 100644 index 18f09781..00000000 --- a/src/serde_utils.rs +++ /dev/null @@ -1,244 +0,0 @@ -use std::fmt::{self, Display}; -use std::marker::PhantomData; -use std::str::FromStr; - -use indy_utils::base58; - -use serde::{de::Visitor, Deserializer, Serializer}; - -use super::types::SecretBytes; - -macro_rules! serde_as_str_impl { - ($t:ident) => { - impl Serialize for $t { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - $crate::serde_utils::as_str::serialize(self, serializer) - } - } - - impl<'de> Deserialize<'de> for $t { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - $crate::serde_utils::as_str::deserialize(deserializer) - } - } - }; -} - -pub mod as_str { - use super::*; - - pub fn deserialize<'de, D, T>(deserializer: D) -> Result - where - D: Deserializer<'de>, - T: FromStr, - T::Err: Display, - { - deserializer.deserialize_str(FromStrVisitor { _pd: PhantomData }) - } - - pub fn serialize(inst: &T, serializer: S) -> Result - where - S: Serializer, - T: AsRef, - { - serializer.serialize_str(inst.as_ref()) - } - - struct FromStrVisitor { - _pd: PhantomData, - } - - impl<'de, T: FromStr> Visitor<'de> for FromStrVisitor - where - T::Err: Display, - { - type Value = T; - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - formatter.write_str("a valid string value") - } - - fn visit_str(self, v: &str) -> Result - where - E: serde::de::Error, - { - T::from_str(v).map_err(E::custom) - } - } -} - -// structure borrowed from serde_bytes crate: - -pub mod as_base58 { - use super::*; - - pub fn deserialize<'de, D, T>(deserializer: D) -> Result - where - D: Deserializer<'de>, - T: Deserialize<'de>, - { - Deserialize::deserialize(deserializer) - } - - pub fn serialize(inst: &T, serializer: S) -> Result - where - S: Serializer, - T: Serialize, - { - Serialize::serialize(inst, serializer) - } - - pub trait Serialize { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer; - } - - impl Serialize for Vec { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(&base58::encode(self)) - } - } - - impl Serialize for SecretBytes { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(&base58::encode(self)) - } - } - - impl<'a, T> Serialize for &'a T - where - T: ?Sized + Serialize, - { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - (**self).serialize(serializer) - } - } - - impl Serialize for Option - where - T: Serialize, - { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - struct Wrap(T); - - impl serde::Serialize for Wrap - where - T: Serialize, - { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - self.0.serialize(serializer) - } - } - - match self { - Some(val) => serializer.serialize_some(&Wrap(val)), - None => serializer.serialize_none(), - } - } - } - - pub trait Deserialize<'de>: Sized { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>; - } - - impl<'de> Deserialize<'de> for Vec { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct FromBase58Visitor; - - impl<'de> Visitor<'de> for FromBase58Visitor { - type Value = Vec; - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - formatter.write_str("a valid base58 string") - } - - fn visit_str(self, v: &str) -> Result - where - E: serde::de::Error, - { - base58::decode(v).map_err(E::custom) - } - } - - deserializer.deserialize_any(FromBase58Visitor) - } - } - - impl<'de> Deserialize<'de> for SecretBytes { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let result = as Deserialize>::deserialize(deserializer)?; - Ok(Self::from(result)) - } - } - - impl<'de, T> Deserialize<'de> for Option - where - T: Deserialize<'de>, - { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct FromBase58Visitor { - pd: PhantomData, - } - - impl<'de, T> Visitor<'de> for FromBase58Visitor - where - T: Deserialize<'de>, - { - type Value = Option; - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - formatter.write_str("an optional base58 string") - } - - fn visit_none(self) -> Result - where - E: serde::de::Error, - { - Ok(None) - } - - fn visit_some(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - T::deserialize(deserializer).map(Some) - } - } - - deserializer.deserialize_option(FromBase58Visitor { pd: PhantomData }) - } - } -} diff --git a/src/types.rs b/src/storage/entry.rs similarity index 74% rename from src/types.rs rename to src/storage/entry.rs index f8338541..c5d4629d 100644 --- a/src/types.rs +++ b/src/storage/entry.rs @@ -1,9 +1,11 @@ -use std::fmt::{self, Debug, Formatter}; -use std::mem; -use std::ops::Deref; -use std::str::FromStr; +use std::{ + borrow::Cow, + fmt::{self, Debug, Formatter}, + pin::Pin, + str::FromStr, +}; -use aead::Buffer; +use futures_lite::stream::{Stream, StreamExt}; use serde::{ de::{Error as SerdeError, MapAccess, SeqAccess, Visitor}, ser::SerializeMap, @@ -11,20 +13,16 @@ use serde::{ }; use zeroize::Zeroize; -use super::error::Error; use super::wql; +use crate::{crypto::buffer::SecretBytes, error::Error}; -pub type ProfileId = i64; - -pub type Expiry = chrono::DateTime; - -pub(crate) fn sorted_tags(tags: &Vec) -> Option> { +pub(crate) fn sorted_tags(tags: &Vec) -> Vec<&EntryTag> { if tags.len() > 0 { let mut tags = tags.iter().collect::>(); tags.sort(); - Some(tags) + tags } else { - None + Vec::new() } } @@ -41,7 +39,7 @@ pub struct Entry { pub value: SecretBytes, /// Tags associated with the entry record - pub tags: Option>, + pub tags: Vec, } impl Entry { @@ -51,7 +49,7 @@ impl Entry { category: C, name: N, value: V, - tags: Option>, + tags: Vec, ) -> Self { Self { category: category.into(), @@ -61,8 +59,8 @@ impl Entry { } } - pub(crate) fn sorted_tags(&self) -> Option> { - self.tags.as_ref().and_then(sorted_tags) + pub(crate) fn sorted_tags(&self) -> Vec<&EntryTag> { + sorted_tags(&self.tags) } } @@ -77,7 +75,7 @@ impl PartialEq for Entry { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum EntryKind { - Key = 1, + Kms = 1, Item = 2, } @@ -109,12 +107,39 @@ impl EntryTag { } } + pub(crate) fn map_ref(&self, f: impl FnOnce(&str, &str) -> (String, String)) -> Self { + match self { + Self::Encrypted(name, val) => { + let (name, val) = f(name.as_str(), val.as_str()); + Self::Encrypted(name, val) + } + Self::Plaintext(name, val) => { + let (name, val) = f(name.as_str(), val.as_str()); + Self::Plaintext(name, val) + } + } + } + + /// Setter for the tag name + pub(crate) fn update_name(&mut self, f: impl FnOnce(&mut String)) { + match self { + Self::Encrypted(name, _) | Self::Plaintext(name, _) => f(name), + } + } + /// Accessor for the tag value pub fn value(&self) -> &str { match self { Self::Encrypted(_, val) | Self::Plaintext(_, val) => val, } } + + /// Unwrap the tag value + pub(crate) fn into_value(self) -> String { + match self { + Self::Encrypted(_, value) | Self::Plaintext(_, value) => value, + } + } } impl Debug for EntryTag { @@ -134,22 +159,30 @@ impl Debug for EntryTag { } } +/// A wrapper type used for managing (de)serialization of tags #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] -pub(crate) struct EntryTagSet(Vec); +pub(crate) struct EntryTagSet<'e>(Cow<'e, [EntryTag]>); -impl EntryTagSet { +impl EntryTagSet<'_> { #[inline] - pub fn new(tags: Vec) -> Self { - Self(tags) + pub fn into_vec(self) -> Vec { + self.0.into_owned() } +} - #[inline] - pub fn into_inner(self) -> Vec { - self.0 +impl<'e> From<&'e [EntryTag]> for EntryTagSet<'e> { + fn from(tags: &'e [EntryTag]) -> Self { + Self(Cow::Borrowed(tags)) + } +} + +impl From> for EntryTagSet<'static> { + fn from(tags: Vec) -> Self { + Self(Cow::Owned(tags)) } } -impl<'de> Deserialize<'de> for EntryTagSet { +impl<'de> Deserialize<'de> for EntryTagSet<'static> { fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, @@ -157,7 +190,7 @@ impl<'de> Deserialize<'de> for EntryTagSet { struct TagSetVisitor; impl<'d> Visitor<'d> for TagSetVisitor { - type Value = EntryTagSet; + type Value = EntryTagSet<'static>; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str("an object containing zero or more entry tags") @@ -195,7 +228,7 @@ impl<'de> Deserialize<'de> for EntryTagSet { } } - Ok(EntryTagSet(v)) + Ok(EntryTagSet(Cow::Owned(v))) } } @@ -252,7 +285,7 @@ impl<'de> Deserialize<'de> for EntryTagValues { } } -impl Serialize for EntryTagSet { +impl Serialize for EntryTagSet<'_> { fn serialize(&self, serializer: S) -> Result where S: Serializer, @@ -311,149 +344,6 @@ pub(crate) struct EncEntryTag { pub plaintext: bool, } -struct MaybeStr<'a>(&'a [u8]); - -impl Debug for MaybeStr<'_> { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - if let Ok(sval) = std::str::from_utf8(self.0) { - write!(f, "{:?}", sval) - } else { - write!(f, "_\"{}\"", hex::encode(self.0)) - } - } -} - -/// A protected byte buffer -#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Zeroize)] -pub struct SecretBytes(Vec); - -impl SecretBytes { - pub(crate) fn as_buffer(&mut self) -> SecretBytesMut<'_> { - SecretBytesMut(&mut self.0) - } - - /// Try to convert the buffer value to a string reference - pub fn as_opt_str(&self) -> Option<&str> { - std::str::from_utf8(self.0.as_slice()).ok() - } - - pub(crate) fn into_vec(mut self) -> Vec { - let mut v = vec![]; // note: no heap allocation for empty vec - mem::swap(&mut v, &mut self.0); - mem::forget(self); - v - } -} - -impl Debug for SecretBytes { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - if cfg!(test) { - f.debug_tuple("Secret") - .field(&MaybeStr(self.0.as_slice())) - .finish() - } else { - f.write_str("") - } - } -} - -impl AsRef<[u8]> for SecretBytes { - fn as_ref(&self) -> &[u8] { - self.0.as_slice() - } -} - -impl Deref for SecretBytes { - type Target = [u8]; - - fn deref(&self) -> &Self::Target { - self.0.as_slice() - } -} - -impl Drop for SecretBytes { - fn drop(&mut self) { - self.zeroize(); - } -} - -impl From<&[u8]> for SecretBytes { - fn from(inner: &[u8]) -> Self { - Self(inner.to_vec()) - } -} - -impl From<&str> for SecretBytes { - fn from(inner: &str) -> Self { - Self(inner.as_bytes().to_vec()) - } -} - -impl From for SecretBytes { - fn from(inner: String) -> Self { - Self(inner.into_bytes()) - } -} - -impl From> for SecretBytes { - fn from(inner: Vec) -> Self { - Self(inner) - } -} - -impl PartialEq<&[u8]> for SecretBytes { - fn eq(&self, other: &&[u8]) -> bool { - self.0.eq(other) - } -} - -impl PartialEq> for SecretBytes { - fn eq(&self, other: &Vec) -> bool { - self.0.eq(other) - } -} - -pub(crate) struct SecretBytesMut<'m>(&'m mut Vec); - -impl SecretBytesMut<'_> { - /// Obtain a large-enough SecretBytes without creating unsafe copies of - /// the contained data - pub fn reserve_extra(&mut self, extra: usize) { - let len = self.0.len(); - if extra + len > self.0.capacity() { - // allocate a new buffer and copy the secure data over - let mut buf = Vec::with_capacity(extra + len); - buf.extend_from_slice(&self.0[..]); - mem::swap(&mut buf, &mut self.0); - buf.zeroize() - } - } -} - -impl Buffer for SecretBytesMut<'_> { - fn extend_from_slice(&mut self, other: &[u8]) -> Result<(), aead::Error> { - self.reserve_extra(other.len()); - self.0.extend_from_slice(other); - Ok(()) - } - - fn truncate(&mut self, len: usize) { - self.0.truncate(len); - } -} - -impl AsRef<[u8]> for SecretBytesMut<'_> { - fn as_ref(&self) -> &[u8] { - self.0.as_slice() - } -} - -impl AsMut<[u8]> for SecretBytesMut<'_> { - fn as_mut(&mut self) -> &mut [u8] { - self.0.as_mut_slice() - } -} - /// A WQL filter used to restrict record queries #[derive(Clone, Debug, PartialEq, Eq)] #[repr(transparent)] @@ -564,6 +454,12 @@ impl TagFilter { } } +impl From for TagFilter { + fn from(query: wql::Query) -> Self { + Self { query } + } +} + impl FromStr for TagFilter { type Err = Error; @@ -573,13 +469,56 @@ impl FromStr for TagFilter { } } +/// An active record scan of a store backend +pub struct Scan<'s, T> { + stream: Option, Error>> + Send + 's>>>, + page_size: usize, +} + +impl<'s, T> Scan<'s, T> { + pub(crate) fn new(stream: S, page_size: usize) -> Self + where + S: Stream, Error>> + Send + 's, + { + Self { + stream: Some(stream.boxed()), + page_size, + } + } + + /// Fetch the next set of result rows + pub async fn fetch_next(&mut self) -> Result>, Error> { + if let Some(mut s) = self.stream.take() { + match s.try_next().await? { + Some(val) => { + if val.len() == self.page_size { + self.stream.replace(s); + } + Ok(Some(val)) + } + None => Ok(None), + } + } else { + Ok(None) + } + } +} + +impl Debug for Scan<'_, S> { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.debug_struct("Scan") + .field("page_size", &self.page_size) + .finish() + } +} + #[cfg(test)] mod tests { use super::*; #[test] fn serialize_tags() { - let tags = EntryTagSet(vec![ + let tags = EntryTagSet::from(vec![ EntryTag::Encrypted("a".to_owned(), "aval".to_owned()), EntryTag::Plaintext("b".to_owned(), "bval".to_owned()), EntryTag::Plaintext("b".to_owned(), "bval-2".to_owned()), diff --git a/src/storage/mod.rs b/src/storage/mod.rs new file mode 100644 index 00000000..f7d00491 --- /dev/null +++ b/src/storage/mod.rs @@ -0,0 +1,11 @@ +mod entry; +pub(crate) use self::entry::{EncEntryTag, EntryTagSet}; +pub use self::entry::{Entry, EntryKind, EntryOperation, EntryTag, Scan, TagFilter}; + +mod options; +pub(crate) use self::options::{IntoOptions, Options}; + +mod store; +pub use self::store::{Session, Store}; + +pub(crate) mod wql; diff --git a/src/options.rs b/src/storage/options.rs similarity index 95% rename from src/options.rs rename to src/storage/options.rs index 609bf082..2f9cd9c3 100644 --- a/src/options.rs +++ b/src/storage/options.rs @@ -3,7 +3,7 @@ use std::collections::HashMap; use percent_encoding::{percent_decode_str, utf8_percent_encode, NON_ALPHANUMERIC}; -use super::error::Result; +use crate::error::Error; #[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct Options<'a> { @@ -17,7 +17,7 @@ pub struct Options<'a> { } impl<'a> Options<'a> { - pub fn parse_uri(uri: &str) -> Result> { + pub fn parse_uri(uri: &str) -> Result, Error> { let mut fragment_and_remain = uri.splitn(2, '#'); let uri = fragment_and_remain.next().unwrap_or_default(); let fragment = percent_decode(fragment_and_remain.next().unwrap_or_default()); @@ -127,17 +127,17 @@ fn percent_encode_into(result: &mut String, s: &str) { } pub trait IntoOptions<'a> { - fn into_options(self) -> Result>; + fn into_options(self) -> Result, Error>; } impl<'a> IntoOptions<'a> for Options<'a> { - fn into_options(self) -> Result> { + fn into_options(self) -> Result, Error> { Ok(self) } } impl<'a> IntoOptions<'a> for &'a str { - fn into_options(self) -> Result> { + fn into_options(self) -> Result, Error> { Options::parse_uri(self) } } diff --git a/src/storage/store.rs b/src/storage/store.rs new file mode 100644 index 00000000..b7331cb3 --- /dev/null +++ b/src/storage/store.rs @@ -0,0 +1,444 @@ +use std::sync::Arc; + +use super::entry::{Entry, EntryKind, EntryOperation, EntryTag, Scan, TagFilter}; +use crate::{ + backend::{Backend, QueryBackend}, + error::Error, + kms::{KeyEntry, KeyParams, KmsCategory, LocalKey}, + protect::{PassKey, StoreKeyMethod}, +}; + +#[derive(Debug)] +/// An instance of an opened store +pub struct Store(B); + +impl Store { + pub(crate) fn new(inner: B) -> Self { + Self(inner) + } + + #[cfg(test)] + #[allow(unused)] + pub(crate) fn inner(&self) -> &B { + &self.0 + } + + pub(crate) fn into_inner(self) -> B { + self.0 + } +} + +impl Store { + /// Get the default profile name used when starting a scan or a session + pub fn get_profile_name(&self) -> &str { + self.0.get_profile_name() + } + + /// Replace the wrapping key on a store + pub async fn rekey( + &mut self, + method: StoreKeyMethod, + pass_key: PassKey<'_>, + ) -> Result<(), Error> { + Ok(self.0.rekey_backend(method, pass_key).await?) + } + + /// Create a new profile with the given profile name + pub async fn create_profile(&self, name: Option) -> Result { + Ok(self.0.create_profile(name).await?) + } + + /// Remove an existing profile with the given profile name + pub async fn remove_profile(&self, name: String) -> Result { + Ok(self.0.remove_profile(name).await?) + } + + /// Create a new scan instance against the store + /// + /// The result will keep an open connection to the backend until it is consumed + pub async fn scan( + &self, + profile: Option, + category: String, + tag_filter: Option, + offset: Option, + limit: Option, + ) -> Result, Error> { + Ok(self + .0 + .scan( + profile, + EntryKind::Item, + category, + tag_filter, + offset, + limit, + ) + .await?) + } + + /// Create a new session against the store + pub async fn session(&self, profile: Option) -> Result, Error> { + // FIXME - add 'immediate' flag + Ok(Session::new(self.0.session(profile, false)?)) + } + + /// Create a new transaction session against the store + pub async fn transaction(&self, profile: Option) -> Result, Error> { + Ok(Session::new(self.0.session(profile, true)?)) + } + + /// Close the store instance, waiting for any shutdown procedures to complete. + pub async fn close(self) -> Result<(), Error> { + Ok(self.0.close().await?) + } + + pub(crate) async fn arc_close(self: Arc) -> Result<(), Error> { + Ok(self.0.close().await?) + } +} + +/// An active connection to the store backend +#[derive(Debug)] +pub struct Session(Q); + +impl Session { + pub(crate) fn new(inner: Q) -> Self { + Self(inner) + } +} + +impl Session { + /// Count the number of entries for a given record category + pub async fn count( + &mut self, + category: &str, + tag_filter: Option, + ) -> Result { + Ok(self.0.count(EntryKind::Item, category, tag_filter).await?) + } + + /// Retrieve the current record at `(category, name)`. + /// + /// Specify `for_update` when in a transaction to create an update lock on the + /// associated record, if supported by the store backend + pub async fn fetch( + &mut self, + category: &str, + name: &str, + for_update: bool, + ) -> Result, Error> { + Ok(self + .0 + .fetch(EntryKind::Item, category, name, for_update) + .await?) + } + + /// Retrieve all records matching the given `category` and `tag_filter`. + /// + /// Unlike `Store::scan`, this method may be used within a transaction. It should + /// not be used for very large result sets due to correspondingly large memory + /// requirements + pub async fn fetch_all( + &mut self, + category: &str, + tag_filter: Option, + limit: Option, + for_update: bool, + ) -> Result, Error> { + Ok(self + .0 + .fetch_all(EntryKind::Item, category, tag_filter, limit, for_update) + .await?) + } + + /// Insert a new record into the store + pub async fn insert( + &mut self, + category: &str, + name: &str, + value: &[u8], + tags: Option<&[EntryTag]>, + expiry_ms: Option, + ) -> Result<(), Error> { + Ok(self + .0 + .update( + EntryKind::Item, + EntryOperation::Insert, + category, + name, + Some(value), + tags, + expiry_ms, + ) + .await?) + } + + /// Remove a record from the store + pub async fn remove(&mut self, category: &str, name: &str) -> Result<(), Error> { + Ok(self + .0 + .update( + EntryKind::Item, + EntryOperation::Remove, + category, + name, + None, + None, + None, + ) + .await?) + } + + /// Replace the value and tags of a record in the store + pub async fn replace( + &mut self, + category: &str, + name: &str, + value: &[u8], + tags: Option<&[EntryTag]>, + expiry_ms: Option, + ) -> Result<(), Error> { + Ok(self + .0 + .update( + EntryKind::Item, + EntryOperation::Replace, + category, + name, + Some(value), + tags, + expiry_ms, + ) + .await?) + } + + /// Remove all records in the store matching a given `category` and `tag_filter` + pub async fn remove_all( + &mut self, + category: &str, + tag_filter: Option, + ) -> Result { + Ok(self + .0 + .remove_all(EntryKind::Item, category, tag_filter) + .await?) + } + + /// Perform a record update + /// + /// This may correspond to an record insert, replace, or remove depending on + /// the provided `operation` + pub async fn update( + &mut self, + operation: EntryOperation, + category: &str, + name: &str, + value: Option<&[u8]>, + tags: Option<&[EntryTag]>, + expiry_ms: Option, + ) -> Result<(), Error> { + Ok(self + .0 + .update( + EntryKind::Item, + operation, + category, + name, + value, + tags, + expiry_ms, + ) + .await?) + } + + /// Insert a local key instance into the store + pub async fn insert_key( + &mut self, + name: &str, + key: &LocalKey, + metadata: Option<&str>, + tags: Option<&[EntryTag]>, + expiry_ms: Option, + ) -> Result<(), Error> { + let data = key.encode()?; + let params = KeyParams { + metadata: metadata.map(str::to_string), + reference: None, + data: Some(data), + }; + let value = params.to_bytes()?; + let mut ins_tags = Vec::with_capacity(10); + let alg = key.algorithm().as_str(); + if !alg.is_empty() { + ins_tags.push(EntryTag::Encrypted("alg".to_string(), alg.to_string())); + } + let thumbs = key.to_jwk_thumbprints()?; + for thumb in thumbs { + ins_tags.push(EntryTag::Encrypted("thumb".to_string(), thumb)); + } + if let Some(tags) = tags { + for t in tags { + ins_tags.push(t.map_ref(|k, v| (format!("user:{}", k), v.to_string()))); + } + } + self.0 + .update( + EntryKind::Kms, + EntryOperation::Insert, + KmsCategory::CryptoKey.as_str(), + name, + Some(value.as_ref()), + Some(ins_tags.as_slice()), + expiry_ms, + ) + .await?; + Ok(()) + } + + /// Fetch an existing key from the store + /// + /// Specify `for_update` when in a transaction to create an update lock on the + /// associated record, if supported by the store backend + pub async fn fetch_key( + &mut self, + name: &str, + for_update: bool, + ) -> Result, Error> { + Ok( + if let Some(row) = self + .0 + .fetch( + EntryKind::Kms, + KmsCategory::CryptoKey.as_str(), + name, + for_update, + ) + .await? + { + Some(KeyEntry::from_entry(row)?) + } else { + None + }, + ) + } + + /// Retrieve all keys matching the given filters. + pub async fn fetch_all_keys( + &mut self, + algorithm: Option<&str>, + thumbprint: Option<&str>, + tag_filter: Option, + limit: Option, + for_update: bool, + ) -> Result, Error> { + let mut query_parts = Vec::with_capacity(3); + if let Some(query) = tag_filter.map(|f| f.query) { + query_parts.push(TagFilter::from( + query + .map_names(|mut k| { + k.replace_range(0..0, "user:"); + Result::<_, ()>::Ok(k) + }) + .unwrap(), + )); + } + if let Some(algorithm) = algorithm { + query_parts.push(TagFilter::is_eq("alg", algorithm)); + } + if let Some(thumbprint) = thumbprint { + query_parts.push(TagFilter::is_eq("thumb", thumbprint)); + } + let tag_filter = if query_parts.is_empty() { + None + } else { + Some(TagFilter::all_of(query_parts)) + }; + let rows = self + .0 + .fetch_all( + EntryKind::Kms, + KmsCategory::CryptoKey.as_str(), + tag_filter, + limit, + for_update, + ) + .await?; + let mut entries = Vec::with_capacity(rows.len()); + for row in rows { + entries.push(KeyEntry::from_entry(row)?) + } + Ok(entries) + } + + /// Remove an existing key from the store + pub async fn remove_key(&mut self, name: &str) -> Result<(), Error> { + self.0 + .update( + EntryKind::Kms, + EntryOperation::Remove, + KmsCategory::CryptoKey.as_str(), + name, + None, + None, + None, + ) + .await + } + + /// Replace the metadata and tags on an existing key in the store + pub async fn update_key( + &mut self, + name: &str, + metadata: Option<&str>, + tags: Option<&[EntryTag]>, + expiry_ms: Option, + ) -> Result<(), Error> { + let row = self + .0 + .fetch(EntryKind::Kms, KmsCategory::CryptoKey.as_str(), name, true) + .await? + .ok_or_else(|| err_msg!(NotFound, "Key entry not found"))?; + + let mut params = KeyParams::from_slice(&row.value)?; + params.metadata = metadata.map(str::to_string); + let value = params.to_bytes()?; + + let mut upd_tags = Vec::with_capacity(10); + if let Some(tags) = tags { + for t in tags { + upd_tags.push(t.map_ref(|k, v| (format!("user:{}", k), v.to_string()))); + } + } + for t in row.tags { + if !t.name().starts_with("user:") { + upd_tags.push(t); + } + } + + self.0 + .update( + EntryKind::Kms, + EntryOperation::Replace, + KmsCategory::CryptoKey.as_str(), + name, + Some(value.as_ref()), + Some(upd_tags.as_slice()), + expiry_ms, + ) + .await?; + + Ok(()) + } + + /// Commit the pending transaction + pub async fn commit(self) -> Result<(), Error> { + Ok(self.0.close(true).await?) + } + + /// Roll back the pending transaction + pub async fn rollback(self) -> Result<(), Error> { + Ok(self.0.close(false).await?) + } +} diff --git a/src/storage/wql/mod.rs b/src/storage/wql/mod.rs new file mode 100644 index 00000000..a34d1a49 --- /dev/null +++ b/src/storage/wql/mod.rs @@ -0,0 +1,4 @@ +pub use indy_wql::{AbstractQuery, Query}; + +pub mod sql; +pub mod tags; diff --git a/src/wql/sql.rs b/src/storage/wql/sql.rs similarity index 91% rename from src/wql/sql.rs rename to src/storage/wql/sql.rs index 2fdb3a4c..3d548c52 100644 --- a/src/wql/sql.rs +++ b/src/storage/wql/sql.rs @@ -3,7 +3,7 @@ use std::marker::PhantomData; use itertools::Itertools; use super::tags::{CompareOp, ConjunctionOp, TagName, TagQueryEncoder}; -use crate::error::Result; +use crate::error::Error; pub struct TagSqlEncoder<'e, EN, EV> { pub enc_name: EN, @@ -14,8 +14,8 @@ pub struct TagSqlEncoder<'e, EN, EV> { impl<'e, EN, EV> TagSqlEncoder<'e, EN, EV> where - EN: Fn(&str) -> Result> + 'e, - EV: Fn(&str) -> Result> + 'e, + EN: Fn(&str) -> Result, Error> + 'e, + EV: Fn(&str) -> Result, Error> + 'e, { pub fn new(enc_name: EN, enc_value: EV) -> Self { Self { @@ -29,19 +29,19 @@ where impl<'e, EN, EV> TagQueryEncoder for TagSqlEncoder<'e, EN, EV> where - EN: Fn(&str) -> Result> + 'e, - EV: Fn(&str) -> Result> + 'e, + EN: Fn(&str) -> Result, Error> + 'e, + EV: Fn(&str) -> Result, Error> + 'e, { type Arg = Vec; type Clause = String; - fn encode_name(&mut self, name: &TagName) -> Result { + fn encode_name(&mut self, name: &TagName) -> Result { Ok(match name { TagName::Encrypted(name) | TagName::Plaintext(name) => (&self.enc_name)(name)?, }) } - fn encode_value(&mut self, value: &String, is_plaintext: bool) -> Result { + fn encode_value(&mut self, value: &String, is_plaintext: bool) -> Result { Ok(if is_plaintext { value.as_bytes().to_vec() } else { @@ -55,7 +55,7 @@ where enc_name: Self::Arg, enc_value: Self::Arg, is_plaintext: bool, - ) -> Result> { + ) -> Result, Error> { let idx = self.arguments.len(); let (op_prefix, match_prefix) = match (is_plaintext, op.as_sql_str_for_prefix()) { (false, Some(pfx_op)) if enc_value.len() > 12 => { @@ -93,10 +93,8 @@ where enc_values: Vec, is_plaintext: bool, negate: bool, - ) -> Result> { - let args_in = std::iter::repeat("$$") - .take(enc_values.len()) - .intersperse(", ") + ) -> Result, Error> { + let args_in = Itertools::intersperse(std::iter::repeat("$$").take(enc_values.len()), ", ") .collect::(); let query = format!( "i.id IN (SELECT item_id FROM items_tags WHERE name = $$ AND value {} ({}) AND plaintext = {})", @@ -114,7 +112,7 @@ where enc_name: Self::Arg, is_plaintext: bool, negate: bool, - ) -> Result> { + ) -> Result, Error> { let query = format!( "i.id {} (SELECT item_id FROM items_tags WHERE name = $$ AND plaintext = {})", if negate { "NOT IN" } else { "IN" }, @@ -128,7 +126,7 @@ where &mut self, op: ConjunctionOp, clauses: Vec, - ) -> Result> { + ) -> Result, Error> { let qc = clauses.len(); if qc == 0 { if op == ConjunctionOp::Or { diff --git a/src/wql/tags.rs b/src/storage/wql/tags.rs similarity index 90% rename from src/wql/tags.rs rename to src/storage/wql/tags.rs index c28c793f..f877f4de 100644 --- a/src/wql/tags.rs +++ b/src/storage/wql/tags.rs @@ -1,13 +1,13 @@ use super::{AbstractQuery, Query}; -use crate::error::Result; +use crate::error::Error; pub type TagQuery = AbstractQuery; -pub fn tag_query(query: Query) -> Result { +pub fn tag_query(query: Query) -> Result { let result = query - .map_names(&mut |k| { + .map_names(|k| { if k.starts_with("~") { - Ok(TagName::Plaintext(k[1..].to_string())) + Result::<_, ()>::Ok(TagName::Plaintext(k[1..].to_string())) } else { Ok(TagName::Encrypted(k)) } @@ -17,7 +17,7 @@ pub fn tag_query(query: Query) -> Result { Ok(result) } -pub fn validate_tag_query(_query: &TagQuery) -> Result<()> { +pub fn validate_tag_query(_query: &TagQuery) -> Result<(), Error> { // FIXME only equality comparison supported for encrypted keys Ok(()) } @@ -47,16 +47,16 @@ pub trait TagQueryEncoder { type Arg; type Clause; - fn encode_query(&mut self, query: &TagQuery) -> Result> + fn encode_query(&mut self, query: &TagQuery) -> Result, Error> where Self: Sized, { encode_tag_query(query, self, false) } - fn encode_name(&mut self, name: &TagName) -> Result; + fn encode_name(&mut self, name: &TagName) -> Result; - fn encode_value(&mut self, value: &String, is_plaintext: bool) -> Result; + fn encode_value(&mut self, value: &String, is_plaintext: bool) -> Result; fn encode_op_clause( &mut self, @@ -64,7 +64,7 @@ pub trait TagQueryEncoder { enc_name: Self::Arg, enc_value: Self::Arg, is_plaintext: bool, - ) -> Result>; + ) -> Result, Error>; fn encode_in_clause( &mut self, @@ -72,20 +72,20 @@ pub trait TagQueryEncoder { enc_values: Vec, is_plaintext: bool, negate: bool, - ) -> Result>; + ) -> Result, Error>; fn encode_exist_clause( &mut self, enc_name: Self::Arg, is_plaintext: bool, negate: bool, - ) -> Result>; + ) -> Result, Error>; fn encode_conj_clause( &mut self, op: ConjunctionOp, clauses: Vec, - ) -> Result>; + ) -> Result, Error>; } #[derive(Debug, Clone, Copy, PartialEq, Eq)] @@ -160,7 +160,7 @@ impl ConjunctionOp { } } -fn encode_tag_query(query: &TagQuery, enc: &mut E, negate: bool) -> Result> +fn encode_tag_query(query: &TagQuery, enc: &mut E, negate: bool) -> Result, Error> where E: TagQueryEncoder, { @@ -202,7 +202,7 @@ fn encode_tag_op( value: &String, enc: &mut E, negate: bool, -) -> Result> +) -> Result, Error> where E: TagQueryEncoder, { @@ -222,7 +222,7 @@ fn encode_tag_in( values: &Vec, enc: &mut E, negate: bool, -) -> Result> +) -> Result, Error> where E: TagQueryEncoder, { @@ -234,12 +234,12 @@ where let enc_values = values .into_iter() .map(|val| enc.encode_value(val, is_plaintext)) - .collect::>>()?; + .collect::, Error>>()?; enc.encode_in_clause(enc_name, enc_values, is_plaintext, negate) } -fn encode_tag_exist(names: &[TagName], enc: &mut E, negate: bool) -> Result> +fn encode_tag_exist(names: &[TagName], enc: &mut E, negate: bool) -> Result, Error> where E: TagQueryEncoder, { @@ -270,7 +270,7 @@ fn encode_tag_conj( subqueries: &Vec, enc: &mut E, negate: bool, -) -> Result> +) -> Result, Error> where E: TagQueryEncoder, { @@ -278,7 +278,7 @@ where let clauses = subqueries .into_iter() .flat_map(|q| encode_tag_query(q, enc, negate).transpose()) - .collect::>>()?; + .collect::, Error>>()?; enc.encode_conj_clause(op, clauses) } @@ -288,7 +288,6 @@ mod tests { use itertools::Itertools; use super::*; - use crate::wql::Query; struct TestEncoder {} @@ -296,11 +295,11 @@ mod tests { type Arg = String; type Clause = String; - fn encode_name(&mut self, name: &TagName) -> Result { + fn encode_name(&mut self, name: &TagName) -> Result { Ok(name.to_string()) } - fn encode_value(&mut self, value: &String, _is_plaintext: bool) -> Result { + fn encode_value(&mut self, value: &String, _is_plaintext: bool) -> Result { Ok(value.clone()) } @@ -310,7 +309,7 @@ mod tests { name: Self::Arg, value: Self::Arg, _is_plaintext: bool, - ) -> Result> { + ) -> Result, Error> { Ok(Some(format!("{} {} {}", name, op.as_sql_str(), value))) } @@ -319,7 +318,7 @@ mod tests { name: Self::Arg, _is_plaintext: bool, negate: bool, - ) -> Result> { + ) -> Result, Error> { let op = if negate { "NOT EXIST" } else { "EXIST" }; Ok(Some(format!("{}({})", op, name))) } @@ -330,13 +329,10 @@ mod tests { values: Vec, _is_plaintext: bool, negate: bool, - ) -> Result> { + ) -> Result, Error> { let op = if negate { "NOT IN" } else { "IN" }; - let value = values - .iter() - .map(|v| v.as_str()) - .intersperse(", ") - .collect::(); + let value = + Itertools::intersperse(values.iter().map(|v| v.as_str()), ", ").collect::(); Ok(Some(format!("{} {} ({})", name, op, value))) } @@ -344,15 +340,13 @@ mod tests { &mut self, op: ConjunctionOp, clauses: Vec, - ) -> Result> { + ) -> Result, Error> { let mut r = String::new(); r.push_str("("); - r.extend( - clauses - .iter() - .map(String::as_str) - .intersperse(op.as_sql_str()), - ); + r.extend(Itertools::intersperse( + clauses.iter().map(String::as_str), + op.as_sql_str(), + )); r.push_str(")"); Ok(Some(r)) } diff --git a/src/store.rs b/src/store.rs deleted file mode 100644 index f62e4c0e..00000000 --- a/src/store.rs +++ /dev/null @@ -1,675 +0,0 @@ -use std::fmt::{self, Debug, Formatter}; -use std::future::Future; -use std::pin::Pin; -use std::str::FromStr; -use std::sync::Arc; - -use futures_lite::stream::{Stream, StreamExt}; -use indy_utils::{ - keys::{EncodedVerKey, KeyType as IndyKeyAlg, PrivateKey}, - pack::{pack_message, unpack_message, KeyLookup}, - Validatable, -}; -use zeroize::Zeroize; - -use super::error::Result; -use super::future::BoxFuture; -use super::keys::{wrap::WrapKeyMethod, KeyAlg, KeyCategory, KeyEntry, KeyParams, PassKey}; -use super::types::{Entry, EntryKind, EntryOperation, EntryTag, TagFilter}; - -/// Represents a generic backend implementation -pub trait Backend: Send + Sync { - /// The type of session managed by this backend - type Session: QueryBackend; - - /// Create a new profile - fn create_profile(&self, name: Option) -> BoxFuture<'_, Result>; - - /// Get the name of the active profile - fn get_profile_name(&self) -> &str; - - /// Remove an existing profile - fn remove_profile(&self, name: String) -> BoxFuture<'_, Result>; - - /// Create a [`Scan`] against the store - fn scan( - &self, - profile: Option, - kind: EntryKind, - category: String, - tag_filter: Option, - offset: Option, - limit: Option, - ) -> BoxFuture<'_, Result>>; - - /// Create a new session against the store - fn session(&self, profile: Option, transaction: bool) -> Result; - - /// Replace the wrapping key of the store - fn rekey_backend( - &mut self, - method: WrapKeyMethod, - key: PassKey<'_>, - ) -> BoxFuture<'_, Result<()>>; - - /// Close the store instance - fn close(&self) -> BoxFuture<'_, Result<()>>; -} - -/// Create, open, or remove a generic backend implementation -pub trait ManageBackend<'a> { - /// The type of store being managed - type Store; - - /// Open an existing store - fn open_backend( - self, - method: Option, - pass_key: PassKey<'a>, - profile: Option<&'a str>, - ) -> BoxFuture<'a, Result>; - - /// Provision a new store - fn provision_backend( - self, - method: WrapKeyMethod, - pass_key: PassKey<'a>, - profile: Option<&'a str>, - recreate: bool, - ) -> BoxFuture<'a, Result>; - - /// Remove an existing store - fn remove_backend(self) -> BoxFuture<'a, Result>; -} - -/// Query from a generic backend implementation -pub trait QueryBackend: Send { - /// Count the number of matching records in the store - fn count<'q>( - &'q mut self, - kind: EntryKind, - category: &'q str, - tag_filter: Option, - ) -> BoxFuture<'q, Result>; - - /// Fetch a single record from the store by category and name - fn fetch<'q>( - &'q mut self, - kind: EntryKind, - category: &'q str, - name: &'q str, - for_update: bool, - ) -> BoxFuture<'q, Result>>; - - /// Fetch all matching records from the store - fn fetch_all<'q>( - &'q mut self, - kind: EntryKind, - category: &'q str, - tag_filter: Option, - limit: Option, - for_update: bool, - ) -> BoxFuture<'q, Result>>; - - /// Remove all matching records from the store - fn remove_all<'q>( - &'q mut self, - kind: EntryKind, - category: &'q str, - tag_filter: Option, - ) -> BoxFuture<'q, Result>; - - /// Insert or replace a record in the store - fn update<'q>( - &'q mut self, - kind: EntryKind, - operation: EntryOperation, - category: &'q str, - name: &'q str, - value: Option<&'q [u8]>, - tags: Option<&'q [EntryTag]>, - expiry_ms: Option, - ) -> BoxFuture<'q, Result<()>>; - - /// Close the current store session - fn close(self, commit: bool) -> BoxFuture<'static, Result<()>>; -} - -#[derive(Debug)] -/// An instance of an opened store -pub struct Store(B); - -impl Store { - pub(crate) fn new(inner: B) -> Self { - Self(inner) - } - - #[cfg(test)] - #[allow(unused)] - pub(crate) fn inner(&self) -> &B { - &self.0 - } - - pub(crate) fn into_inner(self) -> B { - self.0 - } -} - -impl Store { - /// Get the default profile name used when starting a scan or a session - pub fn get_profile_name(&self) -> &str { - self.0.get_profile_name() - } - - /// Replace the wrapping key on a store - pub async fn rekey(&mut self, method: WrapKeyMethod, pass_key: PassKey<'_>) -> Result<()> { - Ok(self.0.rekey_backend(method, pass_key).await?) - } - - /// Create a new profile with the given profile name - pub async fn create_profile(&self, name: Option) -> Result { - Ok(self.0.create_profile(name).await?) - } - - /// Remove an existing profile with the given profile name - pub async fn remove_profile(&self, name: String) -> Result { - Ok(self.0.remove_profile(name).await?) - } - - /// Create a new scan instance against the store - /// - /// The result will keep an open connection to the backend until it is consumed - pub async fn scan( - &self, - profile: Option, - category: String, - tag_filter: Option, - offset: Option, - limit: Option, - ) -> Result> { - Ok(self - .0 - .scan( - profile, - EntryKind::Item, - category, - tag_filter, - offset, - limit, - ) - .await?) - } - - /// Create a new session against the store - pub async fn session(&self, profile: Option) -> Result> { - // FIXME - add 'immediate' flag - Ok(Session::new(self.0.session(profile, false)?)) - } - - /// Create a new transaction session against the store - pub async fn transaction(&self, profile: Option) -> Result> { - Ok(Session::new(self.0.session(profile, true)?)) - } - - /// Close the store instance, waiting for any shutdown procedures to complete. - pub async fn close(self) -> Result<()> { - Ok(self.0.close().await?) - } - - pub(crate) async fn arc_close(self: Arc) -> Result<()> { - Ok(self.0.close().await?) - } -} - -/// An active connection to the store backend -#[derive(Debug)] -pub struct Session(Q); - -impl Session { - pub(crate) fn new(inner: Q) -> Self { - Self(inner) - } -} - -impl Session { - /// Count the number of entries for a given record category - pub async fn count(&mut self, category: &str, tag_filter: Option) -> Result { - Ok(self.0.count(EntryKind::Item, category, tag_filter).await?) - } - - /// Retrieve the current record at `(category, name)`. - /// - /// Specify `for_update` when in a transaction to create an update lock on the - /// associated record, if supported by the store backend - pub async fn fetch( - &mut self, - category: &str, - name: &str, - for_update: bool, - ) -> Result> { - Ok(self - .0 - .fetch(EntryKind::Item, category, name, for_update) - .await?) - } - - /// Retrieve all records matching the given `category` and `tag_filter`. - /// - /// Unlike `Store::scan`, this method may be used within a transaction. It should - /// not be used for very large result sets due to correspondingly large memory - /// requirements - pub async fn fetch_all( - &mut self, - category: &str, - tag_filter: Option, - limit: Option, - for_update: bool, - ) -> Result> { - Ok(self - .0 - .fetch_all(EntryKind::Item, category, tag_filter, limit, for_update) - .await?) - } - - /// Insert a new record into the store - pub async fn insert( - &mut self, - category: &str, - name: &str, - value: &[u8], - tags: Option<&[EntryTag]>, - expiry_ms: Option, - ) -> Result<()> { - Ok(self - .0 - .update( - EntryKind::Item, - EntryOperation::Insert, - category, - name, - Some(value), - tags, - expiry_ms, - ) - .await?) - } - - /// Remove a record from the store - pub async fn remove(&mut self, category: &str, name: &str) -> Result<()> { - Ok(self - .0 - .update( - EntryKind::Item, - EntryOperation::Remove, - category, - name, - None, - None, - None, - ) - .await?) - } - - /// Replace the value and tags of a record in the store - pub async fn replace( - &mut self, - category: &str, - name: &str, - value: &[u8], - tags: Option<&[EntryTag]>, - expiry_ms: Option, - ) -> Result<()> { - Ok(self - .0 - .update( - EntryKind::Item, - EntryOperation::Replace, - category, - name, - Some(value), - tags, - expiry_ms, - ) - .await?) - } - - /// Remove all records in the store matching a given `category` and `tag_filter` - pub async fn remove_all( - &mut self, - category: &str, - tag_filter: Option, - ) -> Result { - Ok(self - .0 - .remove_all(EntryKind::Item, category, tag_filter) - .await?) - } - - /// Perform a record update - /// - /// This may correspond to an record insert, replace, or remove depending on - /// the provided `operation` - pub async fn update( - &mut self, - operation: EntryOperation, - category: &str, - name: &str, - value: Option<&[u8]>, - tags: Option<&[EntryTag]>, - expiry_ms: Option, - ) -> Result<()> { - Ok(self - .0 - .update( - EntryKind::Item, - operation, - category, - name, - value, - tags, - expiry_ms, - ) - .await?) - } - - /// Create a new keypair in the store - pub async fn create_keypair( - &mut self, - alg: KeyAlg, - metadata: Option<&str>, - seed: Option<&[u8]>, - tags: Option<&[EntryTag]>, - // backend - ) -> Result { - match alg { - KeyAlg::ED25519 => (), - _ => return Err(err_msg!(Unsupported, "Unsupported key algorithm")), - } - - let sk = match seed { - None => PrivateKey::generate(Some(IndyKeyAlg::ED25519)), - Some(s) => PrivateKey::from_seed(s), - } - .map_err(err_map!(Unexpected, "Error generating keypair"))?; - - let pk = sk - .public_key() - .map_err(err_map!(Unexpected, "Error generating public key"))?; - - let category = KeyCategory::KeyPair; - let ident = pk - .as_base58() - .map_err(err_map!(Unexpected, "Error encoding public key"))? - .long_form(); - - let params = KeyParams { - alg, - metadata: metadata.map(str::to_string), - reference: None, - pub_key: Some(pk.key_bytes()), - prv_key: Some(sk.key_bytes().into()), - }; - let mut value = params.to_vec()?; - - self.0 - .update( - EntryKind::Key, - EntryOperation::Insert, - category.as_str(), - &ident, - Some(value.as_slice()), - tags.clone(), - None, - ) - .await?; - value.zeroize(); - - Ok(KeyEntry { - category, - ident, - params, - tags: tags.map(|t| t.to_vec()), - }) - } - - // pub async fn import_key(&self, key: KeyEntry) -> Result<()> { - // Ok(()) - // } - - /// Fetch an existing key from the store - /// - /// Specify `for_update` when in a transaction to create an update lock on the - /// associated record, if supported by the store backend - pub async fn fetch_key( - &mut self, - category: KeyCategory, - ident: &str, - for_update: bool, - ) -> Result> { - // normalize ident - let ident = EncodedVerKey::from_str(&ident) - .and_then(|k| k.as_base58()) - .map_err(err_map!("Invalid key"))? - .long_form(); - - Ok( - if let Some(row) = self - .0 - .fetch(EntryKind::Key, category.as_str(), &ident, for_update) - .await? - { - let params = KeyParams::from_slice(&row.value)?; - Some(KeyEntry { - category: KeyCategory::from_str(&row.category).unwrap(), - ident: row.name.clone(), - params, - tags: row.tags.clone(), - }) - } else { - None - }, - ) - } - - /// Remove an existing key from the store - pub async fn remove_key(&mut self, category: KeyCategory, ident: &str) -> Result<()> { - // normalize ident - let ident = EncodedVerKey::from_str(&ident) - .and_then(|k| k.as_base58()) - .map_err(err_map!("Invalid key"))? - .long_form(); - - self.0 - .update( - EntryKind::Key, - EntryOperation::Remove, - category.as_str(), - &ident, - None, - None, - None, - ) - .await - } - - // pub async fn scan_keys( - // &self, - // profile: Option, - // category: String, - // options: EntryFetchOptions, - // tag_filter: Option, - // offset: Option, - // max_rows: Option, - // ) -> Result> { - // unimplemented!(); - // } - - /// Replace the metadata and tags on an existing key in the store - pub async fn update_key( - &mut self, - category: KeyCategory, - ident: &str, - metadata: Option<&str>, - tags: Option<&[EntryTag]>, - ) -> Result<()> { - // normalize ident - let ident = EncodedVerKey::from_str(&ident) - .and_then(|k| k.as_base58()) - .map_err(err_map!("Invalid key"))? - .long_form(); - - let row = self - .0 - .fetch(EntryKind::Key, category.as_str(), &ident, true) - .await? - .ok_or_else(|| err_msg!(NotFound, "Key entry not found"))?; - - let mut params = KeyParams::from_slice(&row.value)?; - params.metadata = metadata.map(str::to_string); - let mut value = params.to_vec()?; - - self.0 - .update( - EntryKind::Key, - EntryOperation::Replace, - category.as_str(), - &ident, - Some(&value), - tags, - None, - ) - .await?; - value.zeroize(); - - Ok(()) - } - - /// Sign a message using an existing keypair in the store identified by `key_ident` - pub async fn sign_message(&mut self, key_ident: &str, data: &[u8]) -> Result> { - if let Some(key) = self - .fetch_key(KeyCategory::KeyPair, key_ident, false) - .await? - { - let sk = key.private_key()?; - sk.sign(&data) - .map_err(|e| err_msg!(Unexpected, "Signature error: {}", e)) - } else { - return Err(err_msg!(NotFound, "Unknown key")); - } - } - - /// Pack a message using an existing keypair in the store identified by `key_ident` - /// - /// This uses the `pack` algorithm defined for DIDComm v1 - pub async fn pack_message( - &mut self, - recipient_vks: impl IntoIterator, - from_key_ident: Option<&str>, - data: &[u8], - ) -> Result> { - let sign_key = if let Some(ident) = from_key_ident { - let sk = self - .fetch_key(KeyCategory::KeyPair, ident, false) - .await? - .ok_or_else(|| err_msg!(NotFound, "Unknown sender key"))?; - Some(sk.private_key()?) - } else { - None - }; - let vks = recipient_vks - .into_iter() - .map(|vk| { - let vk = - EncodedVerKey::from_str(&vk).map_err(err_map!("Invalid recipient verkey"))?; - vk.validate()?; - Ok(vk) - }) - .collect::>>()?; - Ok(pack_message(data, vks, sign_key).map_err(err_map!("Error packing message"))?) - } - - /// Unpack a DIDComm v1 message, automatically looking up any associated keypairs - pub async fn unpack_message( - &mut self, - data: &[u8], - ) -> Result<(Vec, EncodedVerKey, Option)> { - match unpack_message(data, self).await { - Ok((message, recip, sender)) => Ok((message, recip, sender)), - Err(err) => Err(err_msg!(Unexpected, "Error unpacking message").with_cause(err)), - } - } - - /// Commit the pending transaction - pub async fn commit(self) -> Result<()> { - Ok(self.0.close(true).await?) - } - - /// Roll back the pending transaction - pub async fn rollback(self) -> Result<()> { - Ok(self.0.close(false).await?) - } -} - -impl<'a, Q: QueryBackend> KeyLookup<'a> for &'a mut Session { - fn find<'f>( - self, - keys: &'f Vec, - ) -> std::pin::Pin> + Send + 'f>> - where - 'a: 'f, - { - Box::pin(async move { - for (idx, key) in keys.into_iter().enumerate() { - if let Ok(Some(key)) = self.fetch_key(KeyCategory::KeyPair, &key.key, false).await { - if let Ok(sk) = key.private_key() { - return Some((idx, sk)); - } - } - } - return None; - }) - } -} - -/// An active record scan of a store backend -pub struct Scan<'s, T> { - stream: Option>> + Send + 's>>>, - page_size: usize, -} - -impl<'s, T> Scan<'s, T> { - pub(crate) fn new(stream: S, page_size: usize) -> Self - where - S: Stream>> + Send + 's, - { - Self { - stream: Some(stream.boxed()), - page_size, - } - } - - /// Fetch the next set of result rows - pub async fn fetch_next(&mut self) -> Result>> { - if let Some(mut s) = self.stream.take() { - match s.try_next().await? { - Some(val) => { - if val.len() == self.page_size { - self.stream.replace(s); - } - Ok(Some(val)) - } - None => Ok(None), - } - } else { - Ok(None) - } - } -} - -impl Debug for Scan<'_, S> { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - f.debug_struct("Scan") - .field("page_size", &self.page_size) - .finish() - } -} diff --git a/src/wql/mod.rs b/src/wql/mod.rs deleted file mode 100644 index 4b007b19..00000000 --- a/src/wql/mod.rs +++ /dev/null @@ -1,4 +0,0 @@ -pub use indy_utils::wql::{AbstractQuery, Query}; - -pub mod sql; -pub mod tags; diff --git a/tests/backends.rs b/tests/backends.rs index 9f43e4f0..b601a8b8 100644 --- a/tests/backends.rs +++ b/tests/backends.rs @@ -105,37 +105,37 @@ macro_rules! backend_tests { }) } - #[test] - fn keypair_create_fetch() { - block_on(async { - let db = $init.await; - super::utils::db_keypair_create_fetch(&db).await; - }) - } - - #[test] - fn keypair_sign_verify() { - block_on(async { - let db = $init.await; - super::utils::db_keypair_sign_verify(&db).await; - }) - } - - #[test] - fn keypair_pack_unpack_anon() { - block_on(async { - let db = $init.await; - super::utils::db_keypair_pack_unpack_anon(&db).await; - }) - } - - #[test] - fn keypair_pack_unpack_auth() { - block_on(async { - let db = $init.await; - super::utils::db_keypair_pack_unpack_auth(&db).await; - }) - } + // #[test] + // fn keypair_create_fetch() { + // block_on(async { + // let db = $init.await; + // super::utils::db_keypair_create_fetch(&db).await; + // }) + // } + + // #[test] + // fn keypair_sign_verify() { + // block_on(async { + // let db = $init.await; + // super::utils::db_keypair_sign_verify(&db).await; + // }) + // } + + // #[test] + // fn keypair_pack_unpack_anon() { + // block_on(async { + // let db = $init.await; + // super::utils::db_keypair_pack_unpack_anon(&db).await; + // }) + // } + + // #[test] + // fn keypair_pack_unpack_auth() { + // block_on(async { + // let db = $init.await; + // super::utils::db_keypair_pack_unpack_auth(&db).await; + // }) + // } #[test] fn txn_rollback() { @@ -181,8 +181,8 @@ macro_rules! backend_tests { #[cfg(feature = "sqlite")] mod sqlite { - use aries_askar::sqlite::{SqliteStore, SqliteStoreOptions}; - use aries_askar::{generate_raw_wrap_key, ManageBackend, Store, WrapKeyMethod}; + use aries_askar::backend::sqlite::{SqliteStore, SqliteStoreOptions}; + use aries_askar::{generate_raw_store_key, ManageBackend, Store, StoreKeyMethod}; use std::path::Path; #[test] @@ -195,7 +195,7 @@ mod sqlite { "Oops, should be a unique filename" ); - let key = generate_raw_wrap_key(None).expect("Error creating raw key"); + let key = generate_raw_store_key(None).expect("Error creating raw key"); block_on(async move { assert_eq!( SqliteStoreOptions::new(fname.as_str()) @@ -208,14 +208,14 @@ mod sqlite { let store = SqliteStoreOptions::new(fname.as_str()) .expect("Error initializing sqlite store options") - .provision_backend(WrapKeyMethod::RawKey, key.as_ref(), None, false) + .provision_backend(StoreKeyMethod::RawKey, key.as_ref(), None, false) .await .expect("Error provisioning sqlite store"); assert_eq!(Path::new(&fname).exists(), true); let store2 = SqliteStoreOptions::new(fname.as_str()) .expect("Error initializing sqlite store options") - .open_backend(Some(WrapKeyMethod::RawKey), key.as_ref(), None) + .open_backend(Some(StoreKeyMethod::RawKey), key.as_ref(), None) .await .expect("Error opening sqlite store"); store2.close().await.expect("Error closing sqlite store"); @@ -239,25 +239,25 @@ mod sqlite { fn rekey_db() { env_logger::builder().is_test(true).try_init().unwrap_or(()); let fname = format!("sqlite-test-{}.db", uuid::Uuid::new_v4().to_string()); - let key1 = generate_raw_wrap_key(None).expect("Error creating raw key"); - let key2 = generate_raw_wrap_key(None).expect("Error creating raw key"); + let key1 = generate_raw_store_key(None).expect("Error creating raw key"); + let key2 = generate_raw_store_key(None).expect("Error creating raw key"); assert_ne!(key1, key2); block_on(async move { let mut store = SqliteStoreOptions::new(fname.as_str()) .expect("Error initializing sqlite store options") - .provision_backend(WrapKeyMethod::RawKey, key1.as_ref(), None, false) + .provision_backend(StoreKeyMethod::RawKey, key1.as_ref(), None, false) .await .expect("Error provisioning sqlite store"); store - .rekey(WrapKeyMethod::RawKey, key2.as_ref()) + .rekey(StoreKeyMethod::RawKey, key2.as_ref()) .await .expect("Error rekeying database"); SqliteStoreOptions::new(fname.as_str()) .expect("Error initializing sqlite store options") - .open_backend(Some(WrapKeyMethod::RawKey), key2.as_ref(), None) + .open_backend(Some(StoreKeyMethod::RawKey), key2.as_ref(), None) .await .expect("Error opening rekeyed store") .close() @@ -276,9 +276,9 @@ mod sqlite { async fn init_db() -> Store { env_logger::builder().is_test(true).try_init().unwrap_or(()); - let key = generate_raw_wrap_key(None).expect("Error creating raw key"); + let key = generate_raw_store_key(None).expect("Error creating raw key"); SqliteStoreOptions::in_memory() - .provision(WrapKeyMethod::RawKey, key, None, false) + .provision(StoreKeyMethod::RawKey, key, None, false) .await .expect("Error provisioning sqlite store") } @@ -287,12 +287,12 @@ mod sqlite { #[test] fn provision_from_str() { - let key = generate_raw_wrap_key(None).expect("Error creating raw key"); + let key = generate_raw_store_key(None).expect("Error creating raw key"); block_on(async { let db_url = "sqlite://:memory:"; let _db = db_url - .provision_backend(WrapKeyMethod::RawKey, key.as_ref(), None, false) + .provision_backend(StoreKeyMethod::RawKey, key.as_ref(), None, false) .await .expect("Error provisioning store"); }); @@ -300,7 +300,7 @@ mod sqlite { block_on(async { let db_url = "not-sqlite://test-db"; let _db = db_url - .provision_backend(WrapKeyMethod::RawKey, key.as_ref(), None, false) + .provision_backend(StoreKeyMethod::RawKey, key.as_ref(), None, false) .await .expect_err("Expected provision failure"); }); @@ -309,7 +309,7 @@ mod sqlite { #[cfg(feature = "pg_test")] mod postgres { - use aries_askar::postgres::test_db::TestDB; + use aries_askar::backend::postgres::test_db::TestDB; async fn init_db() -> TestDB { env_logger::builder().is_test(true).try_init().unwrap_or(()); diff --git a/tests/faber.agent372766/sqlite.db b/tests/faber.agent372766/sqlite.db deleted file mode 100644 index 76061fd4..00000000 Binary files a/tests/faber.agent372766/sqlite.db and /dev/null differ diff --git a/tests/faber.rs b/tests/faber.rs deleted file mode 100644 index 9162cac6..00000000 --- a/tests/faber.rs +++ /dev/null @@ -1,9 +0,0 @@ -use aries_askar::future::block_on; -use aries_askar::indy_compat::print_records; - -#[test] -fn faber_print_records() { - let db = "tests/faber.agent372766/sqlite.db"; - let key = "Faber.Agent372766"; - block_on(print_records(db, key)).unwrap(); -} diff --git a/tests/utils/mod.rs b/tests/utils/mod.rs index 8c19ba76..7eae053d 100644 --- a/tests/utils/mod.rs +++ b/tests/utils/mod.rs @@ -1,6 +1,4 @@ -use aries_askar::{ - verify_signature, Backend, Entry, EntryTag, ErrorKind, KeyAlg, Store, TagFilter, -}; +use aries_askar::{Backend, Entry, EntryTag, ErrorKind, Store, TagFilter}; const ERR_PROFILE: &'static str = "Error creating profile"; const ERR_SESSION: &'static str = "Error starting session"; @@ -15,12 +13,10 @@ const ERR_REPLACE: &'static str = "Error replacing test row"; const ERR_REMOVE_ALL: &'static str = "Error removing test rows"; const ERR_SCAN: &'static str = "Error starting scan"; const ERR_SCAN_NEXT: &'static str = "Error fetching scan rows"; -const ERR_CREATE_KEYPAIR: &'static str = "Error creating keypair"; -const ERR_FETCH_KEY: &'static str = "Error fetching key"; -const ERR_SIGN: &'static str = "Error signing message"; -const ERR_VERIFY: &'static str = "Error verifying signature"; -const ERR_PACK: &'static str = "Error packing message"; -const ERR_UNPACK: &'static str = "Error unpacking message"; +// const ERR_CREATE_KEYPAIR: &'static str = "Error creating keypair"; +// const ERR_FETCH_KEY: &'static str = "Error fetching key"; +// const ERR_SIGN: &'static str = "Error signing message"; +// const ERR_VERIFY: &'static str = "Error verifying signature"; pub async fn db_create_remove_profile(db: &Store) { let profile = db.create_profile(None).await.expect(ERR_PROFILE); @@ -49,10 +45,10 @@ pub async fn db_insert_fetch(db: &Store) { "category", "name", "value", - Some(vec![ + vec![ EntryTag::Encrypted("t1".to_string(), "v1".to_string()), EntryTag::Plaintext("t2".to_string(), "v2".to_string()), - ]), + ], ); let mut conn = db.session(None).await.expect(ERR_SESSION); @@ -61,7 +57,7 @@ pub async fn db_insert_fetch(db: &Store) { &test_row.category, &test_row.name, &test_row.value, - test_row.tags.as_ref().map(|t| t.as_slice()), + Some(test_row.tags.as_slice()), None, ) .await @@ -83,7 +79,7 @@ pub async fn db_insert_fetch(db: &Store) { } pub async fn db_insert_duplicate(db: &Store) { - let test_row = Entry::new("category", "name", "value", None); + let test_row = Entry::new("category", "name", "value", Vec::new()); let mut conn = db.session(None).await.expect(ERR_SESSION); @@ -91,7 +87,7 @@ pub async fn db_insert_duplicate(db: &Store) { &test_row.category, &test_row.name, &test_row.value, - test_row.tags.as_ref().map(|t| t.as_slice()), + Some(test_row.tags.as_slice()), None, ) .await @@ -102,7 +98,7 @@ pub async fn db_insert_duplicate(db: &Store) { &test_row.category, &test_row.name, &test_row.value, - test_row.tags.as_ref().map(|t| t.as_slice()), + Some(test_row.tags.as_slice()), None, ) .await @@ -111,7 +107,7 @@ pub async fn db_insert_duplicate(db: &Store) { } pub async fn db_insert_remove(db: &Store) { - let test_row = Entry::new("category", "name", "value", None); + let test_row = Entry::new("category", "name", "value", Vec::new()); let mut conn = db.session(None).await.expect(ERR_SESSION); @@ -119,7 +115,7 @@ pub async fn db_insert_remove(db: &Store) { &test_row.category, &test_row.name, &test_row.value, - test_row.tags.as_ref().map(|t| t.as_slice()), + Some(test_row.tags.as_slice()), None, ) .await @@ -138,7 +134,7 @@ pub async fn db_remove_missing(db: &Store) { } pub async fn db_replace_fetch(db: &Store) { - let test_row = Entry::new("category", "name", "value", None); + let test_row = Entry::new("category", "name", "value", Vec::new()); let mut conn = db.session(None).await.expect(ERR_SESSION); @@ -146,7 +142,7 @@ pub async fn db_replace_fetch(db: &Store) { &test_row.category, &test_row.name, &test_row.value, - test_row.tags.as_ref().map(|t| t.as_slice()), + Some(test_row.tags.as_slice()), None, ) .await @@ -158,7 +154,7 @@ pub async fn db_replace_fetch(db: &Store) { &replace_row.category, &replace_row.name, &replace_row.value, - replace_row.tags.as_ref().map(|t| t.as_slice()), + Some(replace_row.tags.as_slice()), None, ) .await @@ -173,7 +169,7 @@ pub async fn db_replace_fetch(db: &Store) { } pub async fn db_replace_missing(db: &Store) { - let test_row = Entry::new("category", "name", "value", None); + let test_row = Entry::new("category", "name", "value", Vec::new()); let mut conn = db.session(None).await.expect(ERR_SESSION); @@ -182,7 +178,7 @@ pub async fn db_replace_missing(db: &Store) { &test_row.category, &test_row.name, &test_row.value, - test_row.tags.as_ref().map(|t| t.as_slice()), + Some(test_row.tags.as_slice()), None, ) .await @@ -192,7 +188,7 @@ pub async fn db_replace_missing(db: &Store) { pub async fn db_count(db: &Store) { let category = "category".to_string(); - let test_rows = vec![Entry::new(&category, "name", "value", None)]; + let test_rows = vec![Entry::new(&category, "name", "value", Vec::new())]; let mut conn = db.session(None).await.expect(ERR_SESSION); @@ -201,7 +197,7 @@ pub async fn db_count(db: &Store) { &upd.category, &upd.name, &upd.value, - upd.tags.as_ref().map(|t| t.as_slice()), + Some(upd.tags.as_slice()), None, ) .await @@ -222,10 +218,10 @@ pub async fn db_count_exist(db: &Store) { "category", "name", "value", - Some(vec![ + vec![ EntryTag::Encrypted("enc".to_string(), "v1".to_string()), EntryTag::Plaintext("plain".to_string(), "v2".to_string()), - ]), + ], ); let mut conn = db.session(None).await.expect(ERR_SESSION); @@ -234,7 +230,7 @@ pub async fn db_count_exist(db: &Store) { &test_row.category, &test_row.name, &test_row.value, - test_row.tags.as_ref().map(|t| t.as_slice()), + Some(test_row.tags.as_slice()), None, ) .await @@ -362,10 +358,10 @@ pub async fn db_scan(db: &Store) { &category, "name", "value", - Some(vec![ + vec![ EntryTag::Encrypted("t1".to_string(), "v1".to_string()), EntryTag::Plaintext("t2".to_string(), "v2".to_string()), - ]), + ], )]; let mut conn = db.session(None).await.expect(ERR_SESSION); @@ -375,7 +371,7 @@ pub async fn db_scan(db: &Store) { &upd.category, &upd.name, &upd.value, - upd.tags.as_ref().map(|t| t.as_slice()), + Some(upd.tags.as_slice()), None, ) .await @@ -410,28 +406,28 @@ pub async fn db_remove_all(db: &Store) { "category", "item1", "value", - Some(vec![ + vec![ EntryTag::Encrypted("t1".to_string(), "del".to_string()), EntryTag::Plaintext("t2".to_string(), "del".to_string()), - ]), + ], ), Entry::new( "category", "item2", "value", - Some(vec![ + vec![ EntryTag::Encrypted("t1".to_string(), "del".to_string()), EntryTag::Plaintext("t2".to_string(), "del".to_string()), - ]), + ], ), Entry::new( "category", "item3", "value", - Some(vec![ + vec![ EntryTag::Encrypted("t1".to_string(), "keep".to_string()), EntryTag::Plaintext("t2".to_string(), "keep".to_string()), - ]), + ], ), ]; @@ -442,7 +438,7 @@ pub async fn db_remove_all(db: &Store) { &test_row.category, &test_row.name, &test_row.value, - test_row.tags.as_ref().map(|t| t.as_slice()), + Some(test_row.tags.as_slice()), None, ) .await @@ -464,108 +460,118 @@ pub async fn db_remove_all(db: &Store) { assert_eq!(removed, 2); } -pub async fn db_keypair_create_fetch(db: &Store) { - let mut conn = db.session(None).await.expect(ERR_SESSION); - - let metadata = "meta".to_owned(); - let key_info = conn - .create_keypair(KeyAlg::ED25519, Some(&metadata), None, None) - .await - .expect(ERR_CREATE_KEYPAIR); - assert_eq!(key_info.params.metadata, Some(metadata)); - - let found = conn - .fetch_key(key_info.category.clone(), &key_info.ident, false) - .await - .expect(ERR_FETCH_KEY); - assert_eq!(Some(key_info), found); -} - -pub async fn db_keypair_sign_verify(db: &Store) { - let mut conn = db.session(None).await.expect(ERR_SESSION); - - let key_info = conn - .create_keypair(KeyAlg::ED25519, None, None, None) - .await - .expect(ERR_CREATE_KEYPAIR); - - let message = b"message".to_vec(); - let sig = conn - .sign_message(&key_info.ident, &message) - .await - .expect(ERR_SIGN); - - assert_eq!( - verify_signature(&key_info.ident, &message, &sig).expect(ERR_VERIFY), - true - ); - - assert_eq!( - verify_signature(&key_info.ident, b"bad input", &sig).expect(ERR_VERIFY), - false - ); - - assert_eq!( - verify_signature(&key_info.ident, &message, b"bad sig").expect(ERR_VERIFY), - false - ); - - let err = verify_signature("not a key", &message, &sig).expect_err(ERR_REQ_ERR); - assert_eq!(err.kind(), ErrorKind::Input); -} - -pub async fn db_keypair_pack_unpack_anon(db: &Store) { - let mut conn = db.session(None).await.expect(ERR_SESSION); - - let recip_key = conn - .create_keypair(KeyAlg::ED25519, None, None, None) - .await - .expect(ERR_CREATE_KEYPAIR); - - let msg = b"message".to_vec(); - - let packed = conn - .pack_message(vec![recip_key.ident.as_str()], None, &msg) - .await - .expect(ERR_PACK); - - let (unpacked, p_recip, p_send) = conn.unpack_message(&packed).await.expect(ERR_UNPACK); - assert_eq!(unpacked, msg); - assert_eq!(p_recip, recip_key.encoded_verkey().unwrap()); - assert_eq!(p_send, None); -} - -pub async fn db_keypair_pack_unpack_auth(db: &Store) { - let mut conn = db.session(None).await.expect(ERR_SESSION); - - let sender_key = conn - .create_keypair(KeyAlg::ED25519, None, None, None) - .await - .expect(ERR_CREATE_KEYPAIR); - let recip_key = conn - .create_keypair(KeyAlg::ED25519, None, None, None) - .await - .expect(ERR_CREATE_KEYPAIR); - - let msg = b"message".to_vec(); - - let packed = conn - .pack_message( - vec![recip_key.ident.as_str()], - Some(&sender_key.ident), - &msg, - ) - .await - .expect(ERR_PACK); - - let (unpacked, p_recip, p_send) = conn.unpack_message(&packed).await.expect(ERR_UNPACK); - assert_eq!(unpacked, msg); - assert_eq!(p_recip, recip_key.encoded_verkey().unwrap()); - assert_eq!(p_send, Some(sender_key.encoded_verkey().unwrap())); -} +// pub async fn db_keypair_create_fetch(db: &Store) { +// let mut conn = db.session(None).await.expect(ERR_SESSION); + +// let metadata = "meta".to_owned(); +// let key_info = conn +// .create_keypair(KeyAlg::Ed25519, Some(&metadata), None, None) +// .await +// .expect(ERR_CREATE_KEYPAIR); +// assert_eq!(key_info.params.metadata, Some(metadata)); + +// let found = conn +// .fetch_key(key_info.category.clone(), &key_info.ident, false) +// .await +// .expect(ERR_FETCH_KEY); +// assert_eq!(Some(key_info), found); +// } + +// pub async fn db_keypair_sign_verify(db: &Store) { +// let mut conn = db.session(None).await.expect(ERR_SESSION); + +// let key_info = conn +// .create_keypair(KeyAlg::Ed25519, None, None, None) +// .await +// .expect(ERR_CREATE_KEYPAIR); + +// let message = b"message".to_vec(); +// let sig = conn +// .sign_message(&key_info.ident, &message) +// .await +// .expect(ERR_SIGN); + +// assert_eq!( +// verify_signature(&key_info.ident, &message, &sig).expect(ERR_VERIFY), +// true +// ); + +// assert_eq!( +// verify_signature(&key_info.ident, b"bad input", &sig).expect(ERR_VERIFY), +// false +// ); + +// assert_eq!( +// verify_signature( +// &key_info.ident, +// // [0u8; 64] +// b"xt19s1sp2UZCGhy9rNyb1FtxdKiDGZZPNFnc1KiM9jYYEuHxuwNeFf1oQKsn8zv6yvYBGhXa83288eF4MqN1oDq", +// &sig +// ).expect(ERR_VERIFY), +// false +// ); + +// assert_eq!( +// verify_signature(&key_info.ident, &message, b"bad sig").is_err(), +// true +// ); + +// let err = verify_signature("not a key", &message, &sig).expect_err(ERR_REQ_ERR); +// assert_eq!(err.kind(), ErrorKind::Input); +// } + +// pub async fn db_keypair_pack_unpack_anon(db: &Store) { +// let mut conn = db.session(None).await.expect(ERR_SESSION); + +// let recip_key = conn +// .create_keypair(KeyAlg::Ed25519, None, None, None) +// .await +// .expect(ERR_CREATE_KEYPAIR); + +// let msg = b"message".to_vec(); + +// let packed = conn +// .pack_message(vec![recip_key.ident.as_str()], None, &msg) +// .await +// .expect(ERR_PACK); + +// let (unpacked, p_recip, p_send) = conn.unpack_message(&packed).await.expect(ERR_UNPACK); +// assert_eq!(unpacked, msg); +// assert_eq!(p_recip.to_string(), recip_key.ident); +// assert_eq!(p_send, None); +// } + +// pub async fn db_keypair_pack_unpack_auth(db: &Store) { +// let mut conn = db.session(None).await.expect(ERR_SESSION); + +// let sender_key = conn +// .create_keypair(KeyAlg::Ed25519, None, None, None) +// .await +// .expect(ERR_CREATE_KEYPAIR); +// let recip_key = conn +// .create_keypair(KeyAlg::Ed25519, None, None, None) +// .await +// .expect(ERR_CREATE_KEYPAIR); + +// let msg = b"message".to_vec(); + +// let packed = conn +// .pack_message( +// vec![recip_key.ident.as_str()], +// Some(&sender_key.ident), +// &msg, +// ) +// .await +// .expect(ERR_PACK); + +// let (unpacked, p_recip, p_send) = conn.unpack_message(&packed).await.expect(ERR_UNPACK); +// assert_eq!(unpacked, msg); +// assert_eq!(p_recip.to_string(), recip_key.ident); +// assert_eq!(p_send.map(|k| k.to_string()), Some(sender_key.ident)); +// } pub async fn db_txn_rollback(db: &Store) { - let test_row = Entry::new("category", "name", "value", None); + let test_row = Entry::new("category", "name", "value", Vec::new()); let mut conn = db.transaction(None).await.expect(ERR_TRANSACTION); @@ -573,7 +579,7 @@ pub async fn db_txn_rollback(db: &Store) { &test_row.category, &test_row.name, &test_row.value, - test_row.tags.as_ref().map(|t| t.as_slice()), + Some(test_row.tags.as_slice()), None, ) .await @@ -593,7 +599,7 @@ pub async fn db_txn_rollback(db: &Store) { } pub async fn db_txn_drop(db: &Store) { - let test_row = Entry::new("category", "name", "value", None); + let test_row = Entry::new("category", "name", "value", Vec::new()); let mut conn = db .transaction(None) @@ -604,7 +610,7 @@ pub async fn db_txn_drop(db: &Store) { &test_row.category, &test_row.name, &test_row.value, - test_row.tags.as_ref().map(|t| t.as_slice()), + Some(test_row.tags.as_slice()), None, ) .await @@ -623,7 +629,7 @@ pub async fn db_txn_drop(db: &Store) { // test that session does NOT have transaction rollback behaviour pub async fn db_session_drop(db: &Store) { - let test_row = Entry::new("category", "name", "value", None); + let test_row = Entry::new("category", "name", "value", Vec::new()); let mut conn = db.session(None).await.expect(ERR_SESSION); @@ -631,7 +637,7 @@ pub async fn db_session_drop(db: &Store) { &test_row.category, &test_row.name, &test_row.value, - test_row.tags.as_ref().map(|t| t.as_slice()), + Some(test_row.tags.as_slice()), None, ) .await @@ -649,7 +655,7 @@ pub async fn db_session_drop(db: &Store) { } pub async fn db_txn_commit(db: &Store) { - let test_row = Entry::new("category", "name", "value", None); + let test_row = Entry::new("category", "name", "value", Vec::new()); let mut conn = db.transaction(None).await.expect(ERR_TRANSACTION); @@ -657,7 +663,7 @@ pub async fn db_txn_commit(db: &Store) { &test_row.category, &test_row.name, &test_row.value, - test_row.tags.as_ref().map(|t| t.as_slice()), + Some(test_row.tags.as_slice()), None, ) .await @@ -675,7 +681,7 @@ pub async fn db_txn_commit(db: &Store) { } pub async fn db_txn_fetch_for_update(db: &Store) { - let test_row = Entry::new("category", "name", "value", None); + let test_row = Entry::new("category", "name", "value", Vec::new()); let mut conn = db.transaction(None).await.expect(ERR_TRANSACTION); @@ -683,7 +689,7 @@ pub async fn db_txn_fetch_for_update(db: &Store) { &test_row.category, &test_row.name, &test_row.value, - test_row.tags.as_ref().map(|t| t.as_slice()), + Some(test_row.tags.as_slice()), None, ) .await diff --git a/wrappers/python/aries_askar/__init__.py b/wrappers/python/aries_askar/__init__.py index 111b15f1..9092252e 100644 --- a/wrappers/python/aries_askar/__init__.py +++ b/wrappers/python/aries_askar/__init__.py @@ -1,19 +1,35 @@ """aries-askar Python wrapper library""" -from .bindings import derive_verkey, generate_raw_key, verify_signature, version -from .error import StoreError, StoreErrorCode +from .bindings import generate_raw_key, version +from .error import AskarError, AskarErrorCode +from .key import ( + Key, + crypto_box, + crypto_box_open, + crypto_box_random_nonce, + crypto_box_seal, + crypto_box_seal_open, + derive_key_ecdh_1pu, + derive_key_ecdh_es, +) from .store import Session, Store from .types import Entry, KeyAlg __all__ = ( - "derive_verkey", + "crypto_box", + "crypto_box_open", + "crypto_box_random_nonce", + "crypto_box_seal", + "crypto_box_seal_open", + "derive_key_ecdh_1pu", + "derive_key_ecdh_es", "generate_raw_key", - "verify_signature", "version", + "AskarError", + "AskarErrorCode", "Entry", + "Key", "KeyAlg", "Session", "Store", - "StoreError", - "StoreErrorCode", ) diff --git a/wrappers/python/aries_askar/bindings.py b/wrappers/python/aries_askar/bindings.py index 6af4f018..db1e22c5 100644 --- a/wrappers/python/aries_askar/bindings.py +++ b/wrappers/python/aries_askar/bindings.py @@ -16,14 +16,15 @@ c_int8, c_int32, c_int64, + c_size_t, c_void_p, c_ubyte, ) from ctypes.util import find_library -from typing import Optional, Sequence, Tuple, Union +from typing import Optional, Union -from .error import StoreError, StoreErrorCode -from .types import Entry, EntryOperation, KeyAlg +from .error import AskarError, AskarErrorCode +from .types import EntryOperation, KeyAlg CALLBACKS = {} @@ -38,7 +39,7 @@ MODULE_NAME = __name__.split(".")[0] -class StoreHandle(c_int64): +class StoreHandle(c_size_t): """Index of an active Store instance.""" def __repr__(self) -> str: @@ -57,7 +58,7 @@ def __del__(self): do_call("askar_store_close", self, c_void_p()) -class SessionHandle(c_int64): +class SessionHandle(c_size_t): """Index of an active Session/Transaction instance.""" def __repr__(self) -> str: @@ -80,7 +81,7 @@ def __del__(self): do_call("askar_session_close", self, c_int8(0), c_void_p()) -class ScanHandle(c_int64): +class ScanHandle(c_size_t): """Index of an active Store scan instance.""" def __repr__(self) -> str: @@ -93,38 +94,137 @@ def __del__(self): get_library().askar_scan_free(self) -class EntrySetHandle(c_int64): - """Index of an active EntrySet instance.""" +class EntryListHandle(c_size_t): + """Pointer to an active EntryList instance.""" + + def get_category(self, index: int) -> str: + """Get the entry category.""" + cat = StrBuffer() + do_call( + "askar_entry_list_get_category", + self, + c_int32(index), + byref(cat), + ) + return str(cat) + + def get_name(self, index: int) -> str: + """Get the entry name.""" + name = StrBuffer() + do_call( + "askar_entry_list_get_name", + self, + c_int32(index), + byref(name), + ) + return str(name) + + def get_value(self, index: int) -> memoryview: + """Get the entry value.""" + val = ByteBuffer() + do_call("askar_entry_list_get_value", self, c_int32(index), byref(val)) + return memoryview(val.raw) + + def get_tags(self, index: int) -> dict: + """Get the entry tags.""" + tags = StrBuffer() + do_call( + "askar_entry_list_get_tags", + self, + c_int32(index), + byref(tags), + ) + return json.loads(tags.value) if tags else None def __repr__(self) -> str: - """Format entry set handle as a string.""" + """Format entry list handle as a string.""" return f"{self.__class__.__name__}({self.value})" def __del__(self): """Free the entry set when there are no more references.""" if self: - get_library().askar_entry_set_free(self) + get_library().askar_entry_list_free(self) -class FfiEntry(Structure): - _fields_ = [ - ("category", c_char_p), - ("name", c_char_p), - ("value_len", c_int64), - ("value", c_void_p), - ("tags", c_char_p), - ] +class KeyEntryListHandle(c_size_t): + """Pointer to an active KeyEntryList instance.""" - def decode(self, handle: EntrySetHandle) -> Entry: - value = (c_ubyte * self.value_len).from_address(self.value) - setattr(value, "_ref_", handle) # ensure buffer is not dropped - tags = json.loads(decode_str(self.tags)) if self.tags is not None else None - return Entry( - decode_str(self.category), - decode_str(self.name), - memoryview(value), - tags, + def get_algorithm(self, index: int) -> str: + """Get the key algorithm.""" + name = StrBuffer() + do_call( + "askar_key_entry_list_get_algorithm", + self, + c_int32(index), + byref(name), + ) + return str(name) + + def get_name(self, index: int) -> str: + """Get the key name.""" + name = StrBuffer() + do_call( + "askar_key_entry_list_get_name", + self, + c_int32(index), + byref(name), ) + return str(name) + + def get_metadata(self, index: int) -> str: + """Get for the key metadata.""" + metadata = StrBuffer() + do_call( + "askar_key_entry_list_get_metadata", + self, + c_int32(index), + byref(metadata), + ) + return str(metadata) + + def get_tags(self, index: int) -> dict: + """Get the key tags.""" + tags = StrBuffer() + do_call( + "askar_key_entry_list_get_tags", + self, + c_int32(index), + byref(tags), + ) + return json.loads(tags.value) if tags else None + + def load_key(self, index: int) -> "LocalKeyHandle": + """Load the key instance.""" + handle = LocalKeyHandle() + do_call( + "askar_key_entry_list_load_local", + self, + c_int32(index), + byref(handle), + ) + return handle + + def __repr__(self) -> str: + """Format key entry list handle as a string.""" + return f"{self.__class__.__name__}({self.value})" + + def __del__(self): + """Free the key entry set when there are no more references.""" + if self: + get_library().askar_key_entry_list_free(self) + + +class LocalKeyHandle(c_size_t): + """Pointer to an active LocalKey instance.""" + + def __repr__(self) -> str: + """Format key handle as a string.""" + return f"{self.__class__.__name__}({self.value})" + + def __del__(self): + """Free the key when there are no more references.""" + if self: + get_library().askar_key_free(self) class FfiByteBuffer(Structure): @@ -194,13 +294,21 @@ def __del__(self): get_library().askar_string_free(self) -class lib_unpack_result(Structure): +class AeadParams(Structure): + """A byte buffer allocated by the library.""" + _fields_ = [ - ("unpacked", ByteBuffer), - ("recipient", StrBuffer), - ("sender", StrBuffer), + ("nonce_length", c_int32), + ("tag_length", c_int32), ] + def __repr__(self) -> str: + """Format AEAD params as a string.""" + return ( + f"" + ) + def get_library() -> CDLL: """Return the CDLL instance, loading it if necessary.""" @@ -233,14 +341,14 @@ def _load_library(lib_name: str) -> CDLL: lib_path = find_library(lib_name) if not lib_path: - raise StoreError( - StoreErrorCode.WRAPPER, f"Library not found in path: {lib_path}" + raise AskarError( + AskarErrorCode.WRAPPER, f"Library not found in path: {lib_path}" ) try: return CDLL(lib_path) except OSError as e: - raise StoreError( - StoreErrorCode.WRAPPER, f"Error loading library: {lib_path}" + raise AskarError( + AskarErrorCode.WRAPPER, f"Error loading library: {lib_path}" ) from e @@ -371,10 +479,6 @@ def do_call_async( return fut -def decode_str(value: c_char_p) -> str: - return value.decode("utf-8") - - def encode_str(arg: Optional[Union[str, bytes]]) -> c_char_p: """ Encode an optional input argument as a string. @@ -388,7 +492,11 @@ def encode_str(arg: Optional[Union[str, bytes]]) -> c_char_p: return c_char_p(arg) -def encode_bytes(arg: Optional[Union[str, bytes]]) -> FfiByteBuffer: +def encode_bytes( + arg: Optional[Union[str, bytes, ByteBuffer, FfiByteBuffer]] +) -> Union[FfiByteBuffer, ByteBuffer]: + if isinstance(arg, ByteBuffer) or isinstance(arg, FfiByteBuffer): + return arg buf = FfiByteBuffer() if isinstance(arg, memoryview): buf.len = arg.nbytes @@ -407,7 +515,7 @@ def encode_bytes(arg: Optional[Union[str, bytes]]) -> FfiByteBuffer: return buf -def get_current_error(expect: bool = False) -> Optional[StoreError]: +def get_current_error(expect: bool = False) -> Optional[AskarError]: """ Get the error result from the previous failed API method. @@ -422,51 +530,19 @@ def get_current_error(expect: bool = False) -> Optional[StoreError]: LOGGER.warning("JSON decode error for askar_get_current_error") msg = None if msg and "message" in msg and "code" in msg: - return StoreError( - StoreErrorCode(msg["code"]), msg["message"], msg.get("extra") + return AskarError( + AskarErrorCode(msg["code"]), msg["message"], msg.get("extra") ) if not expect: return None - return StoreError(StoreErrorCode.WRAPPER, "Unknown error") - - -async def derive_verkey(key_alg: KeyAlg, seed: Union[str, bytes]) -> str: - """Derive a verification key from a seed.""" - return str( - await do_call_async( - "askar_derive_verkey", - encode_str(key_alg.value), - encode_bytes(seed), - return_type=StrBuffer, - ) - ) + return AskarError(AskarErrorCode.WRAPPER, "Unknown error") -async def generate_raw_key(seed: Union[str, bytes] = None) -> str: +def generate_raw_key(seed: Union[str, bytes] = None) -> str: """Generate a new raw store wrapping key.""" - return str( - await do_call_async( - "askar_generate_raw_key", encode_bytes(seed), return_type=StrBuffer - ) - ) - - -async def verify_signature( - signer_vk: str, - message: Union[str, bytes], - signature: Union[str, bytes], -) -> bool: - """Verify a message signature.""" - return ( - await do_call_async( - "askar_verify_signature", - encode_str(signer_vk), - encode_bytes(message), - encode_bytes(signature), - return_type=c_int8, - ) - != 0 - ) + key = StrBuffer() + do_call("askar_store_generate_raw_key", encode_bytes(seed), byref(key)) + return str(key) def version() -> str: @@ -477,13 +553,13 @@ def version() -> str: async def store_open( - uri: str, wrap_method: str = None, pass_key: str = None, profile: str = None + uri: str, key_method: str = None, pass_key: str = None, profile: str = None ) -> StoreHandle: """Open an existing Store and return the open handle.""" return await do_call_async( "askar_store_open", encode_str(uri), - encode_str(wrap_method and wrap_method.lower()), + encode_str(key_method and key_method.lower()), encode_str(pass_key), encode_str(profile), return_type=StoreHandle, @@ -492,7 +568,7 @@ async def store_open( async def store_provision( uri: str, - wrap_method: str = None, + key_method: str = None, pass_key: str = None, profile: str = None, recreate: bool = False, @@ -501,7 +577,7 @@ async def store_provision( return await do_call_async( "askar_store_provision", encode_str(uri), - encode_str(wrap_method and wrap_method.lower()), + encode_str(key_method and key_method.lower()), encode_str(pass_key), encode_str(profile), c_int8(recreate), @@ -547,14 +623,14 @@ async def store_remove_profile(handle: StoreHandle, name: str) -> bool: async def store_rekey( handle: StoreHandle, - wrap_method: str = None, + key_method: str = None, pass_key: str = None, ) -> StoreHandle: - """Replace the wrap key on a Store.""" + """Replace the store key on a Store.""" return await do_call_async( "askar_store_rekey", handle, - encode_str(wrap_method and wrap_method.lower()), + encode_str(key_method and key_method.lower()), encode_str(pass_key), ) @@ -601,7 +677,7 @@ async def session_count( async def session_fetch( handle: SessionHandle, category: str, name: str, for_update: bool = False -) -> EntrySetHandle: +) -> EntryListHandle: """Fetch a row from the Store.""" category = encode_str(category) name = encode_str(name) @@ -611,7 +687,7 @@ async def session_fetch( category, name, c_int8(for_update), - return_type=EntrySetHandle, + return_type=EntryListHandle, ) @@ -621,20 +697,18 @@ async def session_fetch_all( tag_filter: Union[str, dict] = None, limit: int = None, for_update: bool = False, -) -> EntrySetHandle: +) -> EntryListHandle: """Fetch all matching rows in the Store.""" - category = encode_str(category) if isinstance(tag_filter, dict): tag_filter = json.dumps(tag_filter) - tag_filter = encode_str(tag_filter) return await do_call_async( "askar_session_fetch_all", handle, - category, - tag_filter, + encode_str(category), + encode_str(tag_filter), c_int64(limit if limit is not None else -1), c_int8(for_update), - return_type=EntrySetHandle, + return_type=EntryListHandle, ) @@ -644,16 +718,14 @@ async def session_remove_all( tag_filter: Union[str, dict] = None, ) -> int: """Remove all matching rows in the Store.""" - category = encode_str(category) if isinstance(tag_filter, dict): tag_filter = json.dumps(tag_filter) - tag_filter = encode_str(tag_filter) return int( await do_call_async( "askar_session_remove_all", handle, - category, - tag_filter, + encode_str(category), + encode_str(tag_filter), return_type=c_int64, ) ) @@ -682,94 +754,88 @@ async def session_update( ) -async def session_create_keypair( +async def session_insert_key( handle: SessionHandle, - alg: str, + key_handle: LocalKeyHandle, + name: str, metadata: str = None, tags: dict = None, - seed: Union[str, bytes] = None, -) -> str: - return str( - await do_call_async( - "askar_session_create_keypair", - handle, - encode_str(alg), - encode_str(metadata), - encode_str(None if tags is None else json.dumps(tags)), - encode_bytes(seed), - return_type=StrBuffer, - ) + expiry_ms: Optional[int] = None, +): + await do_call_async( + "askar_session_insert_key", + handle, + key_handle, + encode_str(name), + encode_str(metadata), + encode_str(None if tags is None else json.dumps(tags)), + c_int64(-1 if expiry_ms is None else expiry_ms), + return_type=c_void_p, ) -async def session_fetch_keypair( - handle: SessionHandle, ident: str, for_update: bool = False -) -> Optional[EntrySetHandle]: +async def session_fetch_key( + handle: SessionHandle, name: str, for_update: bool = False +) -> Optional[KeyEntryListHandle]: ptr = await do_call_async( - "askar_session_fetch_keypair", + "askar_session_fetch_key", handle, - encode_str(ident), + encode_str(name), c_int8(for_update), return_type=c_void_p, ) if ptr: - return EntrySetHandle(ptr) - - -async def session_update_keypair( - handle: SessionHandle, ident: str, metadata: str = None, tags: dict = None -): - await do_call_async( - "askar_session_update_keypair", - handle, - encode_str(ident), - encode_str(metadata), - encode_str(None if tags is None else json.dumps(tags)), - ) + return KeyEntryListHandle(ptr) -async def session_sign_message( +async def session_fetch_all_keys( handle: SessionHandle, - key_ident: str, - message: Union[str, bytes], -) -> ByteBuffer: + alg: Union[str, KeyAlg] = None, + thumbprint: str = None, + tag_filter: Union[str, dict] = None, + limit: int = None, + for_update: bool = False, +) -> EntryListHandle: + """Fetch all matching keys in the Store.""" + if isinstance(alg, KeyAlg): + alg = alg.value + if isinstance(tag_filter, dict): + tag_filter = json.dumps(tag_filter) return await do_call_async( - "askar_session_sign_message", + "askar_session_fetch_all_keys", handle, - encode_str(key_ident), - encode_bytes(message), - return_type=ByteBuffer, + encode_str(alg), + encode_str(thumbprint), + encode_str(tag_filter), + c_int64(limit if limit is not None else -1), + c_int8(for_update), + return_type=KeyEntryListHandle, ) -async def session_pack_message( +async def session_update_key( handle: SessionHandle, - recipient_vks: Sequence[str], - from_key_ident: Optional[str], - message: Union[str, bytes], -) -> ByteBuffer: - recipient_vks = encode_str(",".join(recipient_vks)) - from_key_ident = encode_str(from_key_ident) - message = encode_bytes(message) - return await do_call_async( - "askar_session_pack_message", + name: str, + metadata: str = None, + tags: dict = None, + expiry_ms: Optional[int] = None, +): + await do_call_async( + "askar_session_update_key", handle, - recipient_vks, - from_key_ident, - message, - return_type=ByteBuffer, + encode_str(name), + encode_str(metadata), + encode_str(None if tags is None else json.dumps(tags)), + c_int64(-1 if expiry_ms is None else expiry_ms), ) -async def session_unpack_message( - handle: SessionHandle, - message: Union[str, bytes], -) -> Tuple[ByteBuffer, str, Optional[str]]: - message = encode_bytes(message) - result = await do_call_async( - "askar_session_unpack_message", handle, message, return_type=lib_unpack_result +async def session_remove_key(handle: SessionHandle, name: str): + await do_call_async( + "askar_session_remove_key", + handle, + encode_str(name), ) - return (result.unpacked, str(result.recipient), result.sender.opt_str()) async def scan_start( @@ -796,15 +862,357 @@ async def scan_start( ) -async def scan_next(handle: StoreHandle) -> Optional[EntrySetHandle]: - handle = await do_call_async("askar_scan_next", handle, return_type=EntrySetHandle) +async def scan_next(handle: StoreHandle) -> Optional[EntryListHandle]: + handle = await do_call_async("askar_scan_next", handle, return_type=EntryListHandle) return handle or None -def entry_set_next(handle: EntrySetHandle) -> Optional[Entry]: - ffi_entry = FfiEntry() - found = c_int8(0) - do_call("askar_entry_set_next", handle, byref(ffi_entry), byref(found)) - if found: - return ffi_entry.decode(handle) - return None +def entry_list_count(handle: EntryListHandle) -> int: + len = c_int32() + do_call("askar_entry_list_count", handle, byref(len)) + return len.value + + +def key_entry_list_count(handle: EntryListHandle) -> int: + len = c_int32() + do_call("askar_key_entry_list_count", handle, byref(len)) + return len.value + + +def key_generate(alg: Union[str, KeyAlg], ephemeral: bool = False) -> LocalKeyHandle: + handle = LocalKeyHandle() + if isinstance(alg, KeyAlg): + alg = alg.value + do_call("askar_key_generate", encode_str(alg), c_int8(ephemeral), byref(handle)) + return handle + + +def key_from_seed( + alg: Union[str, KeyAlg], seed: Union[str, bytes, ByteBuffer], method: str = None +) -> LocalKeyHandle: + handle = LocalKeyHandle() + if isinstance(alg, KeyAlg): + alg = alg.value + do_call( + "askar_key_from_seed", + encode_str(alg), + encode_bytes(seed), + encode_str(method), + byref(handle), + ) + return handle + + +def key_from_public_bytes( + alg: Union[str, KeyAlg], public: Union[bytes, ByteBuffer] +) -> LocalKeyHandle: + handle = LocalKeyHandle() + if isinstance(alg, KeyAlg): + alg = alg.value + do_call( + "askar_key_from_public_bytes", + encode_str(alg), + encode_bytes(public), + byref(handle), + ) + return handle + + +def key_get_public_bytes(handle: LocalKeyHandle) -> ByteBuffer: + buf = ByteBuffer() + do_call( + "askar_key_get_public_bytes", + handle, + byref(buf), + ) + return buf + + +def key_from_secret_bytes( + alg: Union[str, KeyAlg], secret: Union[bytes, ByteBuffer] +) -> LocalKeyHandle: + handle = LocalKeyHandle() + if isinstance(alg, KeyAlg): + alg = alg.value + do_call( + "askar_key_from_secret_bytes", + encode_str(alg), + encode_bytes(secret), + byref(handle), + ) + return handle + + +def key_get_secret_bytes(handle: LocalKeyHandle) -> ByteBuffer: + buf = ByteBuffer() + do_call( + "askar_key_get_secret_bytes", + handle, + byref(buf), + ) + return buf + + +def key_from_jwk(jwk: str) -> LocalKeyHandle: + handle = LocalKeyHandle() + do_call("askar_key_from_jwk", encode_str(jwk), byref(handle)) + return handle + + +def key_convert(handle: LocalKeyHandle, alg: Union[str, KeyAlg]) -> LocalKeyHandle: + key = LocalKeyHandle() + if isinstance(alg, KeyAlg): + alg = alg.value + do_call("askar_key_convert", handle, encode_str(alg), byref(key)) + return key + + +def key_exchange( + alg: Union[str, KeyAlg], sk_handle: LocalKeyHandle, pk_handle: LocalKeyHandle +) -> LocalKeyHandle: + key = LocalKeyHandle() + if isinstance(alg, KeyAlg): + alg = alg.value + do_call( + "askar_key_from_key_exchange", encode_str(alg), sk_handle, pk_handle, byref(key) + ) + return key + + +def key_get_algorithm(handle: LocalKeyHandle) -> str: + alg = StrBuffer() + do_call("askar_key_get_algorithm", handle, byref(alg)) + return str(alg) + + +def key_get_ephemeral(handle: LocalKeyHandle) -> bool: + eph = c_int8() + do_call("askar_key_get_ephemeral", handle, byref(eph)) + return eph.value != 0 + + +def key_get_jwk_public(handle: LocalKeyHandle, alg: Union[str, KeyAlg] = None) -> str: + jwk = StrBuffer() + if isinstance(alg, KeyAlg): + alg = alg.value + do_call("askar_key_get_jwk_public", handle, encode_str(alg), byref(jwk)) + return str(jwk) + + +def key_get_jwk_secret(handle: LocalKeyHandle) -> ByteBuffer: + sec = ByteBuffer() + do_call("askar_key_get_jwk_public", handle, byref(sec)) + return sec + + +def key_get_jwk_thumbprint( + handle: LocalKeyHandle, alg: Union[str, KeyAlg] = None +) -> str: + thumb = StrBuffer() + if isinstance(alg, KeyAlg): + alg = alg.value + do_call("askar_key_get_jwk_thumbprint", handle, encode_str(alg), byref(thumb)) + return str(thumb) + + +def key_aead_get_params(handle: LocalKeyHandle) -> AeadParams: + params = AeadParams() + do_call("askar_key_aead_get_params", handle, byref(params)) + return params + + +def key_aead_random_nonce(handle: LocalKeyHandle) -> ByteBuffer: + nonce = ByteBuffer() + do_call("askar_key_aead_random_nonce", handle, byref(nonce)) + return nonce + + +def key_aead_encrypt( + handle: LocalKeyHandle, + input: Union[bytes, str, ByteBuffer], + nonce: Union[bytes, ByteBuffer], + aad: Union[bytes, ByteBuffer] = None, +) -> ByteBuffer: + enc = ByteBuffer() + do_call( + "askar_key_aead_encrypt", + handle, + encode_bytes(input), + encode_bytes(nonce), + encode_bytes(aad), + byref(enc), + ) + return enc + + +def key_aead_decrypt( + handle: LocalKeyHandle, + input: Union[bytes, ByteBuffer], + nonce: Union[bytes, ByteBuffer], + aad: Union[bytes, ByteBuffer] = None, +) -> ByteBuffer: + dec = ByteBuffer() + do_call( + "askar_key_aead_decrypt", + handle, + encode_bytes(input), + encode_bytes(nonce), + encode_bytes(aad), + byref(dec), + ) + return dec + + +def key_sign_message( + handle: LocalKeyHandle, + message: Union[bytes, str, ByteBuffer], + sig_type: str = None, +) -> ByteBuffer: + sig = ByteBuffer() + do_call( + "askar_key_sign_message", + handle, + encode_bytes(message), + encode_str(sig_type), + byref(sig), + ) + return sig + + +def key_verify_signature( + handle: LocalKeyHandle, + message: Union[bytes, str, ByteBuffer], + signature: Union[bytes, ByteBuffer], + sig_type: str = None, +) -> bool: + verify = c_int8() + do_call( + "askar_key_verify_signature", + handle, + encode_bytes(message), + encode_bytes(signature), + encode_str(sig_type), + byref(verify), + ) + return verify.value != 0 + + +def key_crypto_box_random_nonce() -> ByteBuffer: + buf = ByteBuffer() + do_call( + "askar_key_crypto_box_random_nonce", + byref(buf), + ) + return buf + + +def key_crypto_box( + recip_handle: LocalKeyHandle, + sender_handle: LocalKeyHandle, + message: Union[bytes, str, ByteBuffer], + nonce: Union[bytes, ByteBuffer], +) -> ByteBuffer: + buf = ByteBuffer() + do_call( + "askar_key_crypto_box", + recip_handle, + sender_handle, + encode_bytes(message), + encode_bytes(nonce), + byref(buf), + ) + return buf + + +def key_crypto_box_open( + recip_handle: LocalKeyHandle, + sender_handle: LocalKeyHandle, + message: Union[bytes, str, ByteBuffer], + nonce: Union[bytes, ByteBuffer], +) -> ByteBuffer: + buf = ByteBuffer() + do_call( + "askar_key_crypto_box_open", + recip_handle, + sender_handle, + encode_bytes(message), + encode_bytes(nonce), + byref(buf), + ) + return buf + + +def key_crypto_box_seal( + handle: LocalKeyHandle, + message: Union[bytes, str, ByteBuffer], +) -> ByteBuffer: + buf = ByteBuffer() + do_call( + "askar_key_crypto_box_seal", + handle, + encode_bytes(message), + byref(buf), + ) + return buf + + +def key_crypto_box_seal_open( + handle: LocalKeyHandle, + ciphertext: Union[bytes, ByteBuffer], +) -> ByteBuffer: + buf = ByteBuffer() + do_call( + "askar_key_crypto_box_seal_open", + handle, + encode_bytes(ciphertext), + byref(buf), + ) + return buf + + +def key_derive_ecdh_es( + alg: Union[str, KeyAlg], + ephem_key: LocalKeyHandle, + recip_key: LocalKeyHandle, + apu: Union[bytes, str, ByteBuffer], + apv: Union[bytes, str, ByteBuffer], +) -> LocalKeyHandle: + key = LocalKeyHandle() + if isinstance(alg, KeyAlg): + alg = alg.value + do_call( + "askar_key_derive_ecdh_es", + encode_str(alg), + ephem_key, + recip_key, + encode_bytes(apu), + encode_bytes(apv), + byref(key), + ) + return key + + +def key_derive_ecdh_1pu( + alg: Union[str, KeyAlg], + ephem_key: LocalKeyHandle, + sender_key: LocalKeyHandle, + recip_key: LocalKeyHandle, + apu: Union[bytes, str, ByteBuffer], + apv: Union[bytes, str, ByteBuffer], + cc_tag: Optional[Union[bytes, ByteBuffer]], +) -> LocalKeyHandle: + key = LocalKeyHandle() + if isinstance(alg, KeyAlg): + alg = alg.value + do_call( + "askar_key_derive_ecdh_1pu", + encode_str(alg), + ephem_key, + sender_key, + recip_key, + encode_bytes(apu), + encode_bytes(apv), + encode_bytes(cc_tag), + byref(key), + ) + return key diff --git a/wrappers/python/aries_askar/error.py b/wrappers/python/aries_askar/error.py index 318d6c09..dd1b4994 100644 --- a/wrappers/python/aries_askar/error.py +++ b/wrappers/python/aries_askar/error.py @@ -3,7 +3,7 @@ from enum import IntEnum -class StoreErrorCode(IntEnum): +class AskarErrorCode(IntEnum): SUCCESS = 0 BACKEND = 1 BUSY = 2 @@ -16,8 +16,8 @@ class StoreErrorCode(IntEnum): WRAPPER = 99 -class StoreError(Exception): - def __init__(self, code: StoreErrorCode, message: str, extra: str = None): +class AskarError(Exception): + def __init__(self, code: AskarErrorCode, message: str, extra: str = None): super().__init__(message) self.code = code self.extra = extra diff --git a/wrappers/python/aries_askar/key.py b/wrappers/python/aries_askar/key.py new file mode 100644 index 00000000..59a01a31 --- /dev/null +++ b/wrappers/python/aries_askar/key.py @@ -0,0 +1,172 @@ +"""Handling of Key instances.""" + +from typing import Union + +from . import bindings + +from .types import KeyAlg + + +class Key: + """An active key or keypair instance.""" + + def __init__(self, handle: bindings.LocalKeyHandle): + """Initialize the Key instance.""" + self._handle = handle + + @classmethod + def generate(cls, alg: Union[str, KeyAlg], *, ephemeral: bool = False) -> "Key": + return cls(bindings.key_generate(alg, ephemeral)) + + @classmethod + def from_seed( + cls, alg: Union[str, KeyAlg], seed: Union[str, bytes], *, method: str = None + ) -> "Key": + return cls(bindings.key_from_seed(alg, seed, method)) + + @classmethod + def from_secret_bytes(cls, alg: Union[str, KeyAlg], secret: bytes) -> "Key": + return cls(bindings.key_from_secret_bytes(alg, secret)) + + @classmethod + def from_public_bytes(cls, alg: Union[str, KeyAlg], public: bytes) -> "Key": + return cls(bindings.key_from_public_bytes(alg, public)) + + @property + def handle(self) -> bindings.LocalKeyHandle: + """Accessor for the key handle.""" + return self._handle + + @property + def algorithm(self) -> KeyAlg: + alg = bindings.key_get_algorithm(self._handle) + return KeyAlg.from_key_alg(alg) + + @property + def ephemeral(self) -> "Key": + return bindings.key_get_ephemeral(self._handle) + + def convert_key(self, alg: Union[str, KeyAlg]) -> "Key": + return self.__class__(bindings.key_convert(self._handle, alg)) + + def key_exchange(self, alg: Union[str, KeyAlg], pk: "Key") -> "Key": + return self.__class__(bindings.key_exchange(alg, self._handle, pk._handle)) + + def get_public_bytes(self) -> bytes: + return bytes(bindings.key_get_public_bytes(self._handle)) + + def get_secret_bytes(self) -> bytes: + return bytes(bindings.key_get_secret_bytes(self._handle)) + + def get_jwk_public(self, alg: Union[str, KeyAlg] = None) -> str: + return bindings.key_get_jwk_public(self._handle, alg) + + def get_jwk_secret(self) -> str: + return bindings.key_get_jwk_secret(self._handle) + + def get_jwk_thumbprint(self, alg: Union[str, KeyAlg] = None) -> str: + return bindings.key_get_jwk_thumbprint(self._handle, alg) + + def aead_params(self) -> bindings.AeadParams: + return bindings.key_aead_get_params(self._handle) + + def aead_random_nonce(self) -> bytes: + return bytes(bindings.key_aead_random_nonce(self._handle)) + + def aead_encrypt( + self, message: Union[str, bytes], nonce: bytes, aad: bytes = None + ) -> bytes: + return bytes(bindings.key_aead_encrypt(self._handle, message, nonce, aad)) + + def aead_decrypt(self, message: bytes, nonce: bytes, aad: bytes = None) -> bytes: + return bytes(bindings.key_aead_decrypt(self._handle, message, nonce, aad)) + + def sign_message(self, message: Union[str, bytes], sig_type: str = None) -> bytes: + return bytes(bindings.key_sign_message(self._handle, message, sig_type)) + + def verify_signature( + self, message: Union[str, bytes], signature: bytes, sig_type: str = None + ) -> bool: + return bindings.key_verify_signature(self._handle, message, signature, sig_type) + + def __repr__(self) -> str: + return ( + f"" + ) + + +def crypto_box_random_nonce() -> bytes: + return bytes(bindings.key_crypto_box_random_nonce()) + + +def crypto_box( + recip_key: Key, + sender_key: Key, + message: Union[bytes, str], + nonce: bytes, +) -> bytes: + return bytes( + bindings.key_crypto_box(recip_key._handle, sender_key._handle, message, nonce) + ) + + +def crypto_box_open( + recip_key: Key, + sender_key: Key, + message: Union[bytes, str], + nonce: bytes, +) -> bytes: + return bytes( + bindings.key_crypto_box_open( + recip_key._handle, sender_key._handle, message, nonce + ) + ) + + +def crypto_box_seal( + recip_key: Key, + message: Union[bytes, str], +) -> bytes: + return bytes(bindings.key_crypto_box_seal(recip_key._handle, message)) + + +def crypto_box_seal_open( + recip_key: Key, + ciphertext: bytes, +) -> bytes: + return bytes(bindings.key_crypto_box_seal_open(recip_key._handle, ciphertext)) + + +def derive_key_ecdh_1pu( + alg: str, + ephem_key: Key, + sender_key: Key, + recip_key: Key, + apu: Union[bytes, str], + apv: Union[bytes, str], + cc_tag: bytes = None, +) -> Key: + return Key( + bindings.key_derive_ecdh_1pu( + alg, + ephem_key._handle, + sender_key._handle, + recip_key._handle, + apu, + apv, + cc_tag, + ) + ) + + +def derive_key_ecdh_es( + alg: str, + ephem_key: Key, + recip_key: Key, + apu: Union[bytes, str], + apv: Union[bytes, str], +) -> Key: + return Key( + bindings.key_derive_ecdh_es(alg, ephem_key._handle, recip_key._handle, apu, apv) + ) diff --git a/wrappers/python/aries_askar/store.py b/wrappers/python/aries_askar/store.py index acb0e90c..a2dbf382 100644 --- a/wrappers/python/aries_askar/store.py +++ b/wrappers/python/aries_askar/store.py @@ -2,38 +2,194 @@ import json -from typing import Optional, Sequence, Union +from typing import Optional, Union + +from cached_property import cached_property from . import bindings -from .error import StoreError, StoreErrorCode -from .types import Entry, EntryOperation, KeyAlg, KeyEntry +from .bindings import ( + EntryListHandle, + KeyEntryListHandle, + ScanHandle, + SessionHandle, + StoreHandle, +) +from .error import AskarError, AskarErrorCode +from .key import Key +from .types import EntryOperation, KeyAlg + + +class Entry: + """A single result from a store query.""" + def __init__(self, lst: EntryListHandle, pos: int): + """Initialize the EntryHandle.""" + self._list = lst + self._pos = pos -class EntrySet: - """A set of query results.""" + @cached_property + def category(self) -> str: + """Accessor for the entry category.""" + return self._list.get_category(self._pos) + + @cached_property + def name(self) -> str: + """Accessor for the entry name.""" + return self._list.get_name(self._pos) + + @property + def value(self) -> bytes: + """Accessor for the entry value.""" + return bytes(self.raw_value) - def __init__(self, handle: bindings.EntrySetHandle): - """Initialize the EntrySet instance.""" + @cached_property + def raw_value(self) -> memoryview: + """Accessor for the entry raw value.""" + return self._list.get_value(self._pos) + + @property + def value_json(self) -> dict: + """Accessor for the entry value as JSON.""" + return json.loads(self.value) + + @cached_property + def tags(self) -> dict: + """Accessor for the entry tags.""" + return self._list.get_tags(self._pos) + + def __repr__(self) -> str: + """Format entry handle as a string.""" + return ( + f"" + ) + + +class EntryList: + """A list of query results.""" + + def __init__(self, handle: EntryListHandle, len: int = None): + """Initialize the EntryList instance.""" self._handle = handle + self._pos = 0 + if handle: + self._len = bindings.entry_list_count(self._handle) if len is None else len + else: + self._len = 0 @property - def handle(self) -> bindings.EntrySetHandle: - """Accessor for the entry set handle.""" + def handle(self) -> EntryListHandle: + """Accessor for the entry list handle.""" return self._handle + def __getitem__(self, index) -> Entry: + if not isinstance(index, int) or index < 0 or index >= self._len: + return IndexError() + return Entry(self._handle, index) + def __iter__(self): return self def __next__(self): - entry = bindings.entry_set_next(self._handle) - if entry: - # keep reference to self so the buffer isn't dropped - entry.entry_set = self + if self._pos < self._len: + entry = Entry(self._handle, self._pos) + self._pos += 1 return entry else: raise StopIteration + def __len__(self) -> int: + return self._len + + def __repr__(self) -> str: + return f"" + + +class KeyEntry: + """Pointer to one result of a KeyEntryList instance.""" + + def __init__(self, lst: KeyEntryListHandle, pos: int): + """Initialize the KeyEntryHandle.""" + self._list = lst + self._pos = pos + + @cached_property + def algorithm(self) -> str: + """Accessor for the key entry algorithm.""" + return self._list.get_algorithm(self._pos) + + @cached_property + def name(self) -> str: + """Accessor for the key entry name.""" + return self._list.get_name(self._pos) + + @cached_property + def metadata(self) -> str: + """Accessor for the key entry metadata.""" + return self._list.get_metadata(self._pos) + + @cached_property + def key(self) -> Key: + """Accessor for the entry metadata.""" + return Key(self._list.load_key(self._pos)) + + @cached_property + def tags(self) -> dict: + """Accessor for the entry tags.""" + return self._list.get_tags(self._pos) + + def __repr__(self) -> str: + """Format key entry handle as a string.""" + return ( + f"" + ) + + +class KeyEntryList: + """A list of key query results.""" + + def __init__(self, handle: KeyEntryListHandle, len: int = None): + """Initialize the KeyEntryList instance.""" + self._handle = handle + self._pos = 0 + if handle: + self._len = ( + bindings.key_entry_list_count(self._handle) if len is None else len + ) + else: + self._len = 0 + + @property + def handle(self) -> KeyEntryListHandle: + """Accessor for the key entry list handle.""" + return self._handle + + def __getitem__(self, index) -> KeyEntry: + if not isinstance(index, int) or index < 0 or index >= self._len: + return IndexError() + return KeyEntry(self._handle, index) + + def __iter__(self): + return self + + def __next__(self): + if self._pos < self._len: + entry = KeyEntry(self._handle, self._pos) + self._pos += 1 + return entry + else: + raise StopIteration + + def __len__(self) -> int: + return self._len + + def __repr__(self) -> str: + return ( + f"" + ) + class Scan: """A scan of the Store.""" @@ -49,11 +205,11 @@ def __init__( ): """Initialize the Scan instance.""" self.params = (store, profile, category, tag_filter, offset, limit) - self._handle: bindings.ScanHandle = None - self._buffer: EntrySet = None + self._handle: ScanHandle = None + self._buffer: EntryList = None @property - def handle(self) -> bindings.ScanHandle: + def handle(self) -> ScanHandle: """Accessor for the scan handle.""" return self._handle @@ -64,22 +220,22 @@ async def __anext__(self): if self._handle is None: (store, profile, category, tag_filter, offset, limit) = self.params if not store.handle: - raise StoreError( - StoreErrorCode.WRAPPER, "Cannot scan from closed store" + raise AskarError( + AskarErrorCode.WRAPPER, "Cannot scan from closed store" ) self._handle = await bindings.scan_start( store.handle, profile, category, tag_filter, offset, limit ) - scan_handle = await bindings.scan_next(self._handle) - self._buffer = EntrySet(scan_handle) if scan_handle else None + list_handle = await bindings.scan_next(self._handle) + self._buffer = EntryList(list_handle) if list_handle else None while True: if not self._buffer: raise StopAsyncIteration row = next(self._buffer, None) if row: return row - scan_handle = await bindings.scan_next(self._handle) - self._buffer = EntrySet(scan_handle) if scan_handle else None + list_handle = await bindings.scan_next(self._handle) + self._buffer = EntryList(list_handle) if list_handle else None def __repr__(self) -> str: return f"" @@ -88,14 +244,14 @@ def __repr__(self) -> str: class Store: """An opened Store instance.""" - def __init__(self, handle: bindings.StoreHandle, uri: str): + def __init__(self, handle: StoreHandle, uri: str): """Initialize the Store instance.""" self._handle = handle self._opener: OpenSession = None self._uri = uri @property - def handle(self) -> bindings.StoreHandle: + def handle(self) -> StoreHandle: """Accessor for the store handle.""" return self._handle @@ -108,7 +264,7 @@ def uri(self) -> str: async def provision( cls, uri: str, - wrap_method: str = None, + key_method: str = None, pass_key: str = None, *, profile: str = None, @@ -116,7 +272,7 @@ async def provision( ) -> "Store": return Store( await bindings.store_provision( - uri, wrap_method, pass_key, profile, recreate + uri, key_method, pass_key, profile, recreate ), uri, ) @@ -125,14 +281,12 @@ async def provision( async def open( cls, uri: str, - wrap_method: str = None, + key_method: str = None, pass_key: str = None, *, profile: str = None, ) -> "Store": - return Store( - await bindings.store_open(uri, wrap_method, pass_key, profile), uri - ) + return Store(await bindings.store_open(uri, key_method, pass_key, profile), uri) @classmethod async def remove(cls, uri: str) -> bool: @@ -157,10 +311,10 @@ async def remove_profile(self, name: str) -> bool: async def rekey( self, - wrap_method: str = None, + key_method: str = None, pass_key: str = None, ): - await bindings.store_rekey(self._handle, wrap_method, pass_key) + await bindings.store_rekey(self._handle, key_method, pass_key) def scan( self, @@ -196,7 +350,7 @@ def __repr__(self) -> str: class Session: """An opened Session instance.""" - def __init__(self, store: Store, handle: bindings.SessionHandle, is_txn: bool): + def __init__(self, store: Store, handle: SessionHandle, is_txn: bool): """Initialize the Session instance.""" self._store = store self._handle = handle @@ -208,7 +362,7 @@ def is_transaction(self) -> bool: return self._is_txn @property - def handle(self) -> bindings.SessionHandle: + def handle(self) -> SessionHandle: """Accessor for the SessionHandle instance.""" return self._handle @@ -219,18 +373,18 @@ def store(self) -> Store: async def count(self, category: str, tag_filter: Union[str, dict] = None) -> int: if not self._handle: - raise StoreError(StoreErrorCode.WRAPPER, "Cannot count from closed session") + raise AskarError(AskarErrorCode.WRAPPER, "Cannot count from closed session") return await bindings.session_count(self._handle, category, tag_filter) async def fetch( self, category: str, name: str, *, for_update: bool = False ) -> Optional[Entry]: if not self._handle: - raise StoreError(StoreErrorCode.WRAPPER, "Cannot fetch from closed session") + raise AskarError(AskarErrorCode.WRAPPER, "Cannot fetch from closed session") result_handle = await bindings.session_fetch( self._handle, category, name, for_update ) - return next(EntrySet(result_handle), None) if result_handle else None + return next(EntryList(result_handle, 1), None) if result_handle else None async def fetch_all( self, @@ -239,14 +393,12 @@ async def fetch_all( limit: int = None, *, for_update: bool = False, - ) -> Sequence[Entry]: + ) -> EntryList: if not self._handle: - raise StoreError(StoreErrorCode.WRAPPER, "Cannot fetch from closed session") - return list( - EntrySet( - await bindings.session_fetch_all( - self._handle, category, tag_filter, limit, for_update - ) + raise AskarError(AskarErrorCode.WRAPPER, "Cannot fetch from closed session") + return EntryList( + await bindings.session_fetch_all( + self._handle, category, tag_filter, limit, for_update ) ) @@ -260,7 +412,7 @@ async def insert( value_json=None, ): if not self._handle: - raise StoreError(StoreErrorCode.WRAPPER, "Cannot update closed session") + raise AskarError(AskarErrorCode.WRAPPER, "Cannot update closed session") if value is None and value_json is not None: value = json.dumps(value_json) await bindings.session_update( @@ -277,7 +429,7 @@ async def replace( value_json=None, ): if not self._handle: - raise StoreError(StoreErrorCode.WRAPPER, "Cannot update closed session") + raise AskarError(AskarErrorCode.WRAPPER, "Cannot update closed session") if value is None and value_json is not None: value = json.dumps(value_json) await bindings.session_update( @@ -290,7 +442,7 @@ async def remove( name: str, ): if not self._handle: - raise StoreError(StoreErrorCode.WRAPPER, "Cannot update closed session") + raise AskarError(AskarErrorCode.WRAPPER, "Cannot update closed session") await bindings.session_update( self._handle, EntryOperation.REMOVE, category, name ) @@ -301,106 +453,93 @@ async def remove_all( tag_filter: Union[str, dict] = None, ) -> int: if not self._handle: - raise StoreError( - StoreErrorCode.WRAPPER, "Cannot remove all for closed session" + raise AskarError( + AskarErrorCode.WRAPPER, "Cannot remove all for closed session" ) return await bindings.session_remove_all(self._handle, category, tag_filter) - async def create_keypair( + async def insert_key( self, - key_alg: KeyAlg, + name: str, + key: Key, *, metadata: str = None, tags: dict = None, - seed: Union[str, bytes] = None, + expiry_ms: int = None, ) -> str: if not self._handle: - raise StoreError( - StoreErrorCode.WRAPPER, "Cannot create keypair with closed session" + raise AskarError( + AskarErrorCode.WRAPPER, "Cannot insert key with closed session" ) return str( - await bindings.session_create_keypair( - self._handle, key_alg.value, metadata, tags, seed + await bindings.session_insert_key( + self._handle, key._handle, name, metadata, tags, expiry_ms ) ) - async def fetch_keypair( - self, ident: str, *, for_update: bool = False + async def fetch_key( + self, name: str, *, for_update: bool = False ) -> Optional[KeyEntry]: if not self._handle: - raise StoreError( - StoreErrorCode.WRAPPER, "Cannot fetch keypair from closed session" + raise AskarError( + AskarErrorCode.WRAPPER, "Cannot fetch key from closed session" ) - handle = await bindings.session_fetch_keypair(self._handle, ident, for_update) - if handle: - entry = next(EntrySet(handle)) - result = KeyEntry(entry.category, entry.name, entry.value_json, entry.tags) - return result + result_handle = await bindings.session_fetch_key(self._handle, name, for_update) + return next(KeyEntryList(result_handle, 1)) if result_handle else None - async def update_keypair( + async def fetch_all_keys( self, - ident: str, *, - metadata: str = None, - tags: dict = None, - ): - if not self._handle: - raise StoreError( - StoreErrorCode.WRAPPER, "Cannot update keypair with closed session" - ) - await bindings.session_update_keypair(self._handle, ident, metadata, tags) - - async def sign_message(self, key_ident: str, message: Union[str, bytes]) -> bytes: + alg: Union[str, KeyAlg] = None, + thumbprint: str = None, + tag_filter: Union[str, dict] = None, + limit: int = None, + for_update: bool = False, + ) -> KeyEntryList: if not self._handle: - raise StoreError( - StoreErrorCode.WRAPPER, "Cannot sign message with closed session" + raise AskarError( + AskarErrorCode.WRAPPER, "Cannot fetch key from closed session" ) - buf = await bindings.session_sign_message(self._handle, key_ident, message) - return bytes(buf) + result_handle = await bindings.session_fetch_all_keys( + self._handle, alg, thumbprint, tag_filter, limit, for_update + ) + return KeyEntryList(result_handle) - async def pack_message( + async def update_key( self, - recipient_vks: Sequence[str], - from_key_ident: Optional[str], - message: Union[str, bytes], - ) -> bytes: + name: str, + *, + metadata: str = None, + tags: dict = None, + expiry_ms: int = None, + ): if not self._handle: - raise StoreError( - StoreErrorCode.WRAPPER, "Cannot pack message with closed session" - ) - return bytes( - await bindings.session_pack_message( - self._handle, recipient_vks, from_key_ident, message + raise AskarError( + AskarErrorCode.WRAPPER, "Cannot update key with closed session" ) - ) + await bindings.session_update_key(self._handle, name, metadata, tags, expiry_ms) - async def unpack_message( - self, - message: Union[str, bytes], - ) -> (bytes, str, Optional[str]): + async def remove_key(self, name: str): if not self._handle: - raise StoreError( - StoreErrorCode.WRAPPER, "Cannot unpack message with closed session" + raise AskarError( + AskarErrorCode.WRAPPER, "Cannot remove key with closed session" ) - (unpacked, recip, sender) = await bindings.session_unpack_message( - self._handle, message - ) - return (bytes(unpacked), recip, sender) + await bindings.session_remove_key(self._handle, name) async def commit(self): if not self._is_txn: - raise StoreError(StoreErrorCode.WRAPPER, "Session is not a transaction") + raise AskarError(AskarErrorCode.WRAPPER, "Session is not a transaction") if not self._handle: - raise StoreError(StoreErrorCode.WRAPPER, "Cannot commit closed transaction") + raise AskarError(AskarErrorCode.WRAPPER, "Cannot commit closed transaction") await self._handle.close(commit=True) self._handle = None async def rollback(self): if not self._is_txn: - raise StoreError(StoreErrorCode.WRAPPER, "Session is not a transaction") + raise AskarError(AskarErrorCode.WRAPPER, "Session is not a transaction") if not self._handle: - raise StoreError( - StoreErrorCode.WRAPPER, "Cannot rollback closed transaction" + raise AskarError( + AskarErrorCode.WRAPPER, "Cannot rollback closed transaction" ) await self._handle.close(commit=False) self._handle = None @@ -428,11 +567,11 @@ def is_transaction(self) -> bool: async def _open(self) -> Session: if not self._store.handle: - raise StoreError( - StoreErrorCode.WRAPPER, "Cannot start session from closed store" + raise AskarError( + AskarErrorCode.WRAPPER, "Cannot start session from closed store" ) if self._session: - raise StoreError(StoreErrorCode.WRAPPER, "Session already opened") + raise AskarError(AskarErrorCode.WRAPPER, "Session already opened") self._session = Session( self._store, await bindings.session_start( diff --git a/wrappers/python/aries_askar/types.py b/wrappers/python/aries_askar/types.py index 47aed0b3..095fc679 100644 --- a/wrappers/python/aries_askar/types.py +++ b/wrappers/python/aries_askar/types.py @@ -1,10 +1,10 @@ import json as _json from enum import Enum -from typing import Mapping +from typing import Mapping, Optional, Union -def _make_binary(value: [str, bytes]) -> bytes: +def _make_binary(value: Union[str, bytes]) -> bytes: if isinstance(value, str): return value.encode("utf-8") else: @@ -16,7 +16,7 @@ def __init__( self, category: str, name: str, - value: [str, bytes], + value: Union[str, bytes], tags: Mapping[str, str] = None, ) -> "Entry": self.category = category @@ -56,28 +56,29 @@ def __repr__(self) -> str: ) -class KeyEntry: - def __init__( - self, - category: str, - ident: str, - params: dict, - tags: Mapping[str, str] = None, - ) -> "Entry": - self.category = category - self.ident = ident - self.params = params - self.tags = dict(tags) if tags else {} - - def __repr__(self) -> str: - return ( - f"{self.__class__.__name__}(category={repr(self.category)}, " - f"ident={repr(self.ident)}, params=.., tags={self.tags})" - ) - - class KeyAlg(Enum): + A128GCM = "a128gcm" + A256GCM = "a256gcm" + A128CBC_HS256 = "a128cbchs256" + A256CBC_HS512 = "a256cbchs512" + BLS12_381_G1 = "bls12381g1" + BLS12_381_G2 = "bls12381g2" + BLS12_381_G1G2 = "bls12381g1g2" + C20P = "c20p" + XC20P = "xc20p" ED25519 = "ed25519" + X25519 = "x25519" + K256 = "k256" + P256 = "p256" + + @classmethod + def from_key_alg(cls, alg: str) -> Optional["KeyAlg"]: + """Get KeyAlg instance from the algorithm identifier.""" + for cmp_alg in KeyAlg: + if cmp_alg.value == alg: + return cmp_alg + + return None class EntryOperation(Enum): diff --git a/wrappers/python/aries_askar/version.py b/wrappers/python/aries_askar/version.py index 695c0e2b..59cff6ae 100644 --- a/wrappers/python/aries_askar/version.py +++ b/wrappers/python/aries_askar/version.py @@ -1,3 +1,3 @@ """aries_askar library wrapper version.""" -__version__ = "0.1.3" +__version__ = "0.2.0-pre.2" diff --git a/wrappers/python/demo/perf.py b/wrappers/python/demo/perf.py index fee8ca83..aaa80b4c 100644 --- a/wrappers/python/demo/perf.py +++ b/wrappers/python/demo/perf.py @@ -15,7 +15,7 @@ if len(sys.argv) > 1: REPO_URI = sys.argv[1] if REPO_URI == "postgres": - REPO_URI = "postgres://postgres:pgpass@localhost:5432/askar-test" + REPO_URI = "postgres://postgres:mysecretpassword@localhost:5432/askar-test" else: REPO_URI = "sqlite://:memory:" @@ -27,7 +27,7 @@ def log(*args): async def perf_test(): - key = await generate_raw_key() + key = generate_raw_key() store = await Store.provision(REPO_URI, "raw", key, recreate=True) diff --git a/wrappers/python/demo/test.py b/wrappers/python/demo/test.py index c7522cd0..4707c560 100644 --- a/wrappers/python/demo/test.py +++ b/wrappers/python/demo/test.py @@ -4,19 +4,25 @@ import sys from aries_askar.bindings import ( - derive_verkey, generate_raw_key, - verify_signature, version, ) -from aries_askar import KeyAlg, Store +from aries_askar import ( + KeyAlg, + Key, + Store, + crypto_box_seal, + crypto_box_seal_open, + derive_key_ecdh_es, + derive_key_ecdh_1pu, +) logging.basicConfig(level=os.getenv("LOG_LEVEL", "").upper() or None) if len(sys.argv) > 1: REPO_URI = sys.argv[1] if REPO_URI == "postgres": - REPO_URI = "postgres://postgres:pgpass@localhost:5432/askar-test" + REPO_URI = "postgres://postgres:mysecretpassword@localhost:5432/askar-test" else: REPO_URI = "sqlite://:memory:" @@ -27,19 +33,67 @@ def log(*args): print(*args, "\n") -async def basic_test(): +def keys_test(): + + key = Key.generate(KeyAlg.ED25519) + log("Created key:", key) + message = b"test message" + sig = key.sign_message(message) + log("Signature:", sig) + verify = key.verify_signature(message, sig) + log("Verify:", verify) + x25519_key = key.convert_key(KeyAlg.X25519) + log("Converted key:", x25519_key) + + x25519_key_2 = Key.generate(KeyAlg.X25519) + kex = x25519_key.key_exchange(KeyAlg.XC20P, x25519_key_2) + log("Key exchange:", kex) + + msg = b"test message" + sealed = crypto_box_seal(x25519_key, msg) + opened = crypto_box_seal_open(x25519_key, sealed) + assert msg == opened + + log("Key algorithm:", key.algorithm) + + jwk = key.get_jwk_public() + log("JWK:", jwk) + + key = Key.generate(KeyAlg.A128GCM) + log("Key algorithm:", key.algorithm) + + data = b"test message" + nonce = key.aead_random_nonce() + params = key.aead_params() + assert params.nonce_length == 12 + assert params.tag_length == 16 + enc = key.aead_encrypt(data, nonce, b"aad") + dec = key.aead_decrypt(enc, nonce, b"aad") + assert data == bytes(dec) + + ephem = Key.generate(KeyAlg.P256, ephemeral=True) + alice = Key.generate(KeyAlg.P256) + bob = Key.generate(KeyAlg.P256) + derived = derive_key_ecdh_1pu("A256GCM", ephem, alice, bob, "Alice", "Bob") + log("Derived:", derived.get_jwk_thumbprint()) + derived = derive_key_ecdh_es("A256GCM", ephem, bob, "Alice", "Bob") + log("Derived:", derived.get_jwk_thumbprint()) + + key = Key.from_seed(KeyAlg.BLS12_381_G1G2, b"testseed000000000000000000000001") + log("BLS key G1:", key.get_jwk_public(KeyAlg.BLS12_381_G1)) + log("BLS key G2:", key.get_jwk_public(KeyAlg.BLS12_381_G2)) + log("BLS key G1G2:", key.get_jwk_public()) + + +async def store_test(): if ENCRYPT: - key = await generate_raw_key(b"00000000000000000000000000000My1") + key = generate_raw_key(b"00000000000000000000000000000My1") key_method = "raw" - log("Generated raw wallet key:", key) + log("Generated raw store key:", key) else: key = None key_method = "none" - # Derive a verkey - verkey = await derive_verkey(KeyAlg.ED25519, b"testseedtestseedtestseedtestseed") - log("Derive verkey:", verkey) - # Provision the store store = await Store.provision(REPO_URI, key_method, key, recreate=True) log("Provisioned store:", store) @@ -78,33 +132,32 @@ async def basic_test(): # test key operations in a new session async with store as session: # Create a new keypair - key_ident = await session.create_keypair(KeyAlg.ED25519, metadata="metadata") - log("Created key:", key_ident) + keypair = Key.generate(KeyAlg.ED25519) + log("Created key:", keypair) + + # Store keypair + key_name = "testkey" + await session.insert_key(key_name, keypair, metadata="metadata") + log("Inserted key") # Update keypair - await session.update_keypair(key_ident, metadata="updated metadata") + await session.update_key(key_name, metadata="updated metadata", tags={"a": "b"}) log("Updated key") # Fetch keypair - key = await session.fetch_keypair(key_ident) - log("Fetch key:", key, "\nKey params:", key.params) - - # Sign a message - signature = await session.sign_message(key_ident, b"my message") - log("Signature:", signature) - - # Verify signature - verify = await verify_signature(key_ident, b"my message", signature) - log("Verify signature:", verify) - - # Pack message - packed = await session.pack_message([key_ident], key_ident, b"my message") - log("Packed message:", packed) - - # Unpack message - unpacked = await session.unpack_message(packed) - log("Unpacked message:", unpacked) + fetch_key = await session.fetch_key(key_name) + log("Fetched key:", fetch_key) + thumbprint = keypair.get_jwk_thumbprint() + assert fetch_key.key.get_jwk_thumbprint() == thumbprint + + # Fetch with filters + keys = await session.fetch_all_keys( + alg=KeyAlg.ED25519, thumbprint=thumbprint, tag_filter={"a": "b"}, limit=1 + ) + log("Fetched keys:", keys) + assert len(keys) == 1 + async with store as session: # Remove rows by category and (optional) tag filter log( "Removed entry count:", @@ -118,7 +171,7 @@ async def basic_test(): log("Created profile:", profile) log("Removed profile:", await store.remove_profile(profile)) - key2 = await generate_raw_key(b"00000000000000000000000000000My2") + key2 = generate_raw_key(b"00000000000000000000000000000My2") await store.rekey("raw", key2) log("Re-keyed store") @@ -128,6 +181,7 @@ async def basic_test(): if __name__ == "__main__": log("aries-askar version:", version()) - asyncio.get_event_loop().run_until_complete(basic_test()) + keys_test() + asyncio.get_event_loop().run_until_complete(store_test()) log("done") diff --git a/wrappers/python/setup.py b/wrappers/python/setup.py index 7ce7cb5a..92b8b9f3 100644 --- a/wrappers/python/setup.py +++ b/wrappers/python/setup.py @@ -22,6 +22,7 @@ long_description_content_type="text/markdown", url="https://github.com/hyperledger/aries-askar", packages=find_packages(), + install_requires=["cached_property~=1.5"], include_package_data=True, package_data={ "": [