From ee3dfe6c3e504ee073a42eaa2e4aac17c582aed1 Mon Sep 17 00:00:00 2001 From: Darwin Lo Date: Fri, 11 Nov 2022 11:46:37 -0800 Subject: [PATCH] Use vc-derive-credential to create a derived credential with selectively disclosed claims and a BBS+ signature vc-derive-credential, when combined with --nonce, now verifies a BBS+ derived credential Check hashes that are revealed from BBS+ derived credential verification with the claims in the derived credential. Turn off canonicalization for now if using BBS+. Update to_nquads_vec to into_nquads_vec, following new conventions --- did-key/src/lib.rs | 4 +- ssi-json-ld/Cargo.toml | 1 + ssi-json-ld/src/lib.rs | 3 + ssi-json-ld/src/rdf.rs | 11 ++ ssi-jwk/Cargo.toml | 2 + ssi-jwk/src/lib.rs | 20 ++++ ssi-jws/Cargo.toml | 1 + ssi-jws/src/lib.rs | 210 +++++++++++++++++++++++++++++++++++++- ssi-ldp/Cargo.toml | 5 +- ssi-ldp/src/lib.rs | 205 ++++++++++++++++++++++++++++++++++++- ssi-ldp/src/proof.rs | 14 ++- ssi-ldp/src/suites/mod.rs | 25 ++++- ssi-ldp/src/suites/w3c.rs | 4 + ssi-vc/Cargo.toml | 1 + ssi-vc/src/lib.rs | 95 ++++++++++++++++- ssi-zcap-ld/src/lib.rs | 6 +- 16 files changed, 584 insertions(+), 23 deletions(-) diff --git a/did-key/src/lib.rs b/did-key/src/lib.rs index 70113002e..1611d4458 100644 --- a/did-key/src/lib.rs +++ b/did-key/src/lib.rs @@ -197,7 +197,9 @@ impl DIDResolver for DIDKey { && data[1] == DID_KEY_BLS12381_G2_PREFIX[1] { { - if data.len() - 2 != 96 { + // A BBS+ public key may have more than one generator; hence, the length may + // exceed 96 + if data.len() - 2 < 96 { return ( ResolutionMetadata::from_error(ERROR_INVALID_DID), None, diff --git a/ssi-json-ld/Cargo.toml b/ssi-json-ld/Cargo.toml index e30e25518..8237fdd8b 100644 --- a/ssi-json-ld/Cargo.toml +++ b/ssi-json-ld/Cargo.toml @@ -21,6 +21,7 @@ futures = "0.3" lazy_static = "1.4" combination = "0.1" grdf = "0.16.2" +hex = "0.4" ssi-contexts = { version = "0.1.3", path = "../contexts/" } ssi-crypto = { path = "../ssi-crypto", version = "0.1" } diff --git a/ssi-json-ld/src/lib.rs b/ssi-json-ld/src/lib.rs index f03ea245b..4cda28612 100644 --- a/ssi-json-ld/src/lib.rs +++ b/ssi-json-ld/src/lib.rs @@ -487,6 +487,7 @@ where L::Context: Into>, L::ContextError: Send, { + eprintln!("json_to_dataset: enter"); use json_ld::JsonLdProcessor; let options = Options { @@ -500,10 +501,12 @@ where let doc = json_ld::RemoteDocument::new(None, None, json); let mut generator = rdf_types::generator::Blank::new_with_prefix("b".to_string()).with_default_metadata(); + eprintln!("json_to_dataset: 1"); let mut to_rdf = doc .to_rdf_using(&mut generator, loader, options) .await .map_err(Box::new)?; + eprintln!("json_to_dataset: 2"); Ok(to_rdf .cloned_quads() .map(|q| { diff --git a/ssi-json-ld/src/rdf.rs b/ssi-json-ld/src/rdf.rs index b5a0e9024..a1a1ad5b2 100644 --- a/ssi-json-ld/src/rdf.rs +++ b/ssi-json-ld/src/rdf.rs @@ -12,6 +12,7 @@ pub type DataSet = /// See . pub trait IntoNQuads { fn into_nquads(self) -> String; + fn into_nquads_vec(self) -> Vec; } impl IntoNQuads for Q @@ -27,6 +28,16 @@ where lines.dedup(); lines.join("") } + + fn into_nquads_vec(self) -> Vec { + let mut lines = self + .into_iter() + .map(|quad| NQuadsStatement(quad.borrow()).to_string()) + .collect::>(); + lines.sort(); + lines.dedup(); + lines + } } /// Wrapper to display an RDF Quad as an N-Quads statement. diff --git a/ssi-jwk/Cargo.toml b/ssi-jwk/Cargo.toml index ba750751b..c0d4c4926 100644 --- a/ssi-jwk/Cargo.toml +++ b/ssi-jwk/Cargo.toml @@ -33,6 +33,8 @@ tezos = ["blake2b_simd", "secp256k1", "secp256r1", "bs58"] ring = ["dep:ring"] [dependencies] +# todo make bbs optional based on bbs feature +bbs = { version = "0.4" } num-bigint = "0.4" simple_asn1 = "^0.5.2" zeroize = { version = "1.5", features = ["zeroize_derive"] } diff --git a/ssi-jwk/src/lib.rs b/ssi-jwk/src/lib.rs index 7624e2757..046475980 100644 --- a/ssi-jwk/src/lib.rs +++ b/ssi-jwk/src/lib.rs @@ -6,6 +6,7 @@ use std::convert::TryFrom; use std::result::Result; use zeroize::Zeroize; pub mod error; +use bbs::prelude::*; pub use error::Error; #[cfg(feature = "ripemd-160")] @@ -255,6 +256,7 @@ pub enum Algorithm { ESKeccakKR, ESBlake2b, ESBlake2bK, + BLS12381G2, #[doc(hidden)] AleoTestnet1Signature, // Per the specs it should only be `none` but `None` is kept for backwards compatibility @@ -337,6 +339,21 @@ impl JWK { crate::aleo::generate_private_key_jwk().map_err(Error::AleoGeneratePrivateKey) } + //#[cfg(feature = "bbs")] + pub fn generate_bls12381_2020() -> Result { + let (pk, sk) = Issuer::new_keys(100).unwrap(); + let pk_bytes = pk.to_bytes_compressed_form(); + let sk_bytes = sk.to_bytes_compressed_form().to_vec(); + + let params = Params::OKP(OctetParams { + curve: "Bls12381G2".to_string(), + public_key: Base64urlUInt(pk_bytes), + private_key: Some(Base64urlUInt(sk_bytes)), + }); + + Ok(JWK::from(params)) + } + pub fn get_algorithm(&self) -> Option { if let Some(algorithm) = self.algorithm { return Some(algorithm); @@ -348,6 +365,9 @@ impl JWK { Params::OKP(okp_params) if okp_params.curve == "Ed25519" => { return Some(Algorithm::EdDSA); } + Params::OKP(okp_params) if okp_params.curve == "Bls12381G2" => { + return Some(Algorithm::BLS12381G2); + } #[cfg(feature = "aleo")] Params::OKP(okp_params) if okp_params.curve == crate::aleo::OKP_CURVE => { return Some(Algorithm::AleoTestnet1Signature); diff --git a/ssi-jws/Cargo.toml b/ssi-jws/Cargo.toml index 96ac2f759..4ecbdcd9e 100644 --- a/ssi-jws/Cargo.toml +++ b/ssi-jws/Cargo.toml @@ -32,6 +32,7 @@ tezos = ["ssi-jwk/tezos", "secp256k1", "secp256r1", "ed25519"] ring = ["ssi-jwk/ring", "dep:ring", "rand", "blake2"] [dependencies] +bbs = { version = "0.4.1" } thiserror = "1.0" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" diff --git a/ssi-jws/src/lib.rs b/ssi-jws/src/lib.rs index ffbc67e39..98389fcc6 100644 --- a/ssi-jws/src/lib.rs +++ b/ssi-jws/src/lib.rs @@ -1,10 +1,11 @@ #![cfg_attr(docsrs, feature(doc_auto_cfg))] - // TODO reinstate Error::MissingFeatures ? pub mod error; +use bbs::prelude::*; pub use error::Error; use serde::{Deserialize, Serialize}; +use ssi_crypto::hashes::sha256::sha256; use ssi_jwk::{Algorithm, Base64urlUInt, Params as JWKParams, JWK}; use std::collections::BTreeMap; use std::convert::TryFrom; @@ -72,6 +73,62 @@ fn base64_encode_json(object: &T) -> Result { Ok(base64::encode_config(json, base64::URL_SAFE_NO_PAD)) } +pub fn create_bbs_sig_input(payload: &JWSPayload) -> Vec { + let mut messages: Vec = Vec::new(); + messages.push(SignatureMessage::hash(payload.header.as_bytes())); + messages.push(SignatureMessage::hash(payload.sigopts_digest.as_ref())); + + for i in 0..payload.messages.len() { + let message = payload.messages[i].as_bytes(); + messages.push(SignatureMessage::hash(message)); + } + + let mut num_messages = payload.messages.len() + 2; + while num_messages < 100 { + // todo 100 is hardcoded; use config + messages.push(SignatureMessage::hash(b"")); + num_messages += 1; + } + messages +} + +pub fn sign_bytes_v2( + algorithm: Algorithm, + key: &JWK, + payload: &JWSPayload, +) -> Result, Error> { + match &key.params { + JWKParams::OKP(okp) => { + match algorithm { + Algorithm::BLS12381G2 => { + let messages = create_bbs_sig_input(payload); + + let Base64urlUInt(pk_bytes) = &okp.public_key; + let Base64urlUInt(sk_bytes) = okp.private_key.as_ref().unwrap(); + let pk = bbs::prelude::PublicKey::try_from(pk_bytes.as_slice()).unwrap(); + let sk = bbs::prelude::SecretKey::try_from(sk_bytes.as_slice()).unwrap(); + + let signature = Signature::new(messages.as_slice(), &sk, &pk).unwrap(); + return Ok(signature.to_bytes_compressed_form().to_vec()); + } + _ => (), + } + } + _ => (), + } + + let messages_str = payload.messages.join(""); + let messages_hash = sha256(messages_str.as_bytes()); + sign_bytes(algorithm, &messages_hash, key) +} + +pub fn generate_proof_nonce() -> String { + let proof_nonce = Verifier::generate_proof_nonce(); + let proof_nonce_bytes = proof_nonce.to_bytes_compressed_form(); + let proof_nonce_str = base64::encode(proof_nonce_bytes.as_ref()); + return proof_nonce_str; +} + pub fn sign_bytes(algorithm: Algorithm, data: &[u8], key: &JWK) -> Result, Error> { let signature = match &key.params { #[cfg(feature = "ring")] @@ -116,7 +173,9 @@ pub fn sign_bytes(algorithm: Algorithm, data: &[u8], key: &JWK) -> Result { use blake2::digest::{consts::U32, Digest}; - if algorithm != Algorithm::EdDSA && algorithm != Algorithm::EdBlake2b { + if algorithm != Algorithm::EdDSA + && algorithm != Algorithm::EdBlake2b + { return Err(Error::UnsupportedAlgorithm); } if okp.curve != *"Ed25519" { @@ -237,6 +296,119 @@ pub fn sign_bytes_b64(algorithm: Algorithm, data: &[u8], key: &JWK) -> Result Result { + let signature = sign_bytes_v2(algorithm, key, payload)?; + let sig_b64 = base64::encode_config(signature, base64::URL_SAFE_NO_PAD); + Ok(sig_b64) +} + +pub fn verify_payload( + algorithm: Algorithm, + key: &JWK, + payload: &JWSPayload, + signature: &[u8], + disclosed_message_indices: &[usize], + nonce: Option<&String>, +) -> Result { + // todo ensure algorithm is bbs + let mut warnings = VerificationWarnings::default(); + + match algorithm { + Algorithm::BLS12381G2 => (), + _ => { + return Err(Error::UnsupportedAlgorithm); + } + } + + match &key.params { + JWKParams::OKP(okp) => { + match nonce { + Some(n) => { + let proof = SignatureProof::try_from(signature).unwrap(); // todo error handling + + let Base64urlUInt(pk_bytes) = &okp.public_key; + let issuer_pk = PublicKey::try_from(pk_bytes.as_slice()).unwrap(); + let proof_request = Verifier::new_proof_request(disclosed_message_indices, &issuer_pk).unwrap(); + let proof_nonce_bytes = base64::decode(n).unwrap(); + assert!(proof_nonce_bytes.len() == 32); + let mut proof_nonce_bytes_sized: [u8; 32] = [0; 32]; + proof_nonce_bytes_sized.clone_from_slice(proof_nonce_bytes.as_slice()); + let proof_nonce = ProofNonce::from(proof_nonce_bytes_sized); + + let result = Verifier::verify_signature_pok(&proof_request, &proof, &proof_nonce); + match result { + Ok(message_hashes) => { + let mut i = 0; + let mut credential_subject_id = ""; + while i < payload.messages.len() { + let m = payload.messages[i].as_str(); + if m.contains("") { + let m_parts: Vec<&str> = m.split(" ").collect(); + credential_subject_id = m_parts[2]; + break; + } + i += 1; + } + assert!(credential_subject_id != "", "credentialSubject node not found"); + + let mut first_claim_found = false; + while i < payload.messages.len() { + let m = payload.messages[i].as_str(); + if m.starts_with(credential_subject_id) { + first_claim_found = true; + break; + } + i += 1; + } + assert!(first_claim_found, "No claims in derived credential"); + + for j in 0..message_hashes.len() { + let revealed_hash = message_hashes[j]; + let target_hash = SignatureMessage::hash(payload.messages[i].as_bytes()); + if revealed_hash != target_hash { + return Err(Error::InvalidSignature); + } + + i += 1; + } + }, + Err(_) => { + return Err(Error::InvalidSignature); + } + } + }, + None => { + if signature.len() != 112 { + return Err(Error::InvalidSignature); + } else { + let mut signature_sized: [u8; 112] = [0; 112]; + signature_sized.clone_from_slice(signature); + let bbs_sig = bbs::prelude::Signature::from(&signature_sized); + + let messages = create_bbs_sig_input(payload); + let Base64urlUInt(pk_bytes) = &okp.public_key; + let pk = bbs::prelude::PublicKey::try_from(pk_bytes.as_slice()).unwrap(); + let result = bbs_sig.verify(messages.as_slice(), &pk).unwrap(); + + if !result { + return Err(Error::InvalidSignature); + } + } + } + } + } + _ => { + return Err(Error::UnsupportedAlgorithm); + } + } + + Ok(warnings) +} + pub fn verify_bytes_warnable( algorithm: Algorithm, data: &[u8], @@ -297,6 +469,7 @@ pub fn verify_bytes_warnable( #[cfg(any(feature = "ring", feature = "ed25519"))] JWKParams::OKP(okp) => { use blake2::digest::{consts::U32, Digest}; + // todo error if BLS12-381 G2 if okp.curve != *"Ed25519" { return Err(ssi_jwk::Error::CurveNotImplemented(okp.curve.to_string()).into()); } @@ -310,7 +483,8 @@ pub fn verify_bytes_warnable( { use ring::signature::UnparsedPublicKey; let verification_algorithm = &ring::signature::ED25519; - let public_key = UnparsedPublicKey::new(verification_algorithm, &okp.public_key.0); + let public_key = + UnparsedPublicKey::new(verification_algorithm, &okp.public_key.0); public_key.verify(&hash, signature)?; } #[cfg(feature = "ed25519")] @@ -531,6 +705,30 @@ pub fn detached_sign_unencoded_payload( Ok(jws) } +pub fn generate_header(algorithm: Algorithm, key: &JWK) -> Result<(Header, String), Error> { + let header = Header { + algorithm, + key_id: key.key_id.clone(), + critical: Some(vec!["b64".to_string()]), + base64urlencode_payload: Some(false), + ..Default::default() + }; + let header_str = base64_encode_json(&header)?; + Ok((header, header_str)) +} + +pub fn detached_sign_unencoded_payload_v2( + algorithm: Algorithm, + payload: &mut JWSPayload, + key: &JWK, +) -> Result { + let (header, header_b64) = generate_header(algorithm, key)?; + payload.header = header_b64; + let sig_b64 = sign_bytes_b64_v2(header.algorithm, &key, payload)?; + let jws = payload.header.clone() + ".." + &sig_b64; + Ok(jws) +} + pub fn prepare_detached_unencoded_payload( algorithm: Algorithm, payload: &[u8], @@ -610,6 +808,12 @@ pub struct DecodedJWS { pub signature: Vec, } +pub struct JWSPayload { + pub header: String, + pub messages: Vec, + pub sigopts_digest: [u8; 32], +} + /// Decode JWS parts (JOSE header, payload, and signature) into useful values. /// The payload argument is bytes since it may be unencoded if the b64:false header parameter is used; otherwise it must be a base64url-encoded string. Header and signature are always expected to be base64url-encoded. /// "crit" (critical) header parameters are checked and disallowed if unrecognized/unsupported. diff --git a/ssi-ldp/Cargo.toml b/ssi-ldp/Cargo.toml index 5ff7be8f4..171c12d43 100644 --- a/ssi-ldp/Cargo.toml +++ b/ssi-ldp/Cargo.toml @@ -9,12 +9,13 @@ repository = "https://github.com/spruceid/ssi/" documentation = "https://docs.rs/ssi-ldp/" [features] -default = ["eip", "tezos", "w3c", "solana"] +default = ["eip", "tezos", "w3c", "solana", "bbsbls"] ## enable LDPs from the W3C linked data proof registry: ed25519, secp256k1, rsa2018, secp256r1 w3c = ["secp256k1", "secp256r1", "rsa", "ed25519"] secp256k1 = ["ssi-jws/secp256k1", "k256"] secp256r1 = ["ssi-jws/secp256r1", "p256"] secp384r1 = ["ssi-jws/secp384r1"] +bbsbls = [] ed25519 = ["ssi-jws/ed25519"] rsa = ["ssi-jws/rsa"] ## enable the EIP-defined LDPs: EIP712 @@ -30,6 +31,8 @@ example-http-issuer = [] test = [] [dependencies] +bbs = { version = "0.4.1" } +base64 = "0.12" thiserror = "1.0" async-trait = "0.1" lazy_static = "1.4" diff --git a/ssi-ldp/src/lib.rs b/ssi-ldp/src/lib.rs index 25cb6dd71..f00d3dfee 100644 --- a/ssi-ldp/src/lib.rs +++ b/ssi-ldp/src/lib.rs @@ -7,11 +7,13 @@ use chrono::prelude::*; pub mod proof; use iref::{Iri, IriBuf}; pub use proof::{Check, LinkedDataProofOptions, Proof}; +use ssi_json_ld::rdf::IntoNQuads; use static_iref::iri; pub mod error; pub use error::Error; pub mod context; pub mod soltx; +use bbs::prelude::*; pub use context::Context; #[cfg(feature = "eip")] @@ -27,7 +29,7 @@ use ssi_dids::did_resolve::{resolve_key, DIDResolver}; use ssi_dids::VerificationRelationship as ProofPurpose; use ssi_json_ld::{rdf::DataSet, urdna2015, ContextLoader}; use ssi_jwk::{Algorithm, Base64urlUInt, JWK}; -use ssi_jws::Header; +use ssi_jws::{Header, JWSPayload}; pub mod suites; pub use suites::*; @@ -181,6 +183,7 @@ pub trait ProofSuite { document: &(dyn LinkedDataDocument + Sync), resolver: &dyn DIDResolver, context_loader: &mut ContextLoader, + nonce: Option<&String>, ) -> Result; } @@ -336,7 +339,9 @@ impl LinkedDataProofs { extra_proof_properties: Option>, ) -> Result { let mut options = options.clone(); - ensure_or_pick_verification_relationship(&mut options, document, key, resolver).await?; + + // todo re-enable this + //ensure_or_pick_verification_relationship(&mut options, document, key, resolver).await?; // Use type property if present let suite = if let Some(ref type_) = options.type_ { type_.clone() @@ -396,10 +401,11 @@ impl LinkedDataProofs { document: &(dyn LinkedDataDocument + Sync), resolver: &dyn DIDResolver, context_loader: &mut ContextLoader, + nonce: Option<&String>, ) -> Result { let suite = &proof.type_; suite - .verify(proof, document, resolver, context_loader) + .verify(proof, document, resolver, context_loader, nonce) .await } } @@ -421,13 +427,156 @@ async fn to_jws_payload( let sigopts_digest = sha256(sigopts_normalized.as_bytes()); let doc_digest = sha256(doc_normalized.as_bytes()); let data = [ - sigopts_digest.as_ref().to_vec(), doc_digest.as_ref().to_vec(), ] .concat(); Ok(data) } +// todo: refactor, may want to move this to ssi-jws +pub async fn generate_bbs_signature_pok( + document: &(dyn LinkedDataDocument + Sync), + nonce: &str, + proof: &Proof, + did_resolver: &dyn DIDResolver, + selectors: &[String], +) -> Result { + let signature_with_header = proof + .jws + .as_ref() + .ok_or(Error::MissingProofSignature)? + .as_str(); + let verification_method = proof + .verification_method + .as_ref() + .ok_or(Error::MissingVerificationMethod)? + .as_str(); + + let key = ssi_dids::did_resolve::resolve_key(verification_method, did_resolver).await?; + + use ssi_jwk::Params as JWKParams; + let pk = match &key.params { + JWKParams::OKP(okp) => { + let Base64urlUInt(pk_bytes) = &okp.public_key; + PublicKey::try_from(pk_bytes.as_slice()).unwrap() + } + _ => unimplemented!(), + }; + + let mut proof_without_jws = proof.clone(); + proof_without_jws.jws = None; + let mut context_loader = ssi_json_ld::ContextLoader::default(); + let payload = to_jws_payload_v2(document, &proof_without_jws, &mut context_loader).await?; + let (header, header_str) = ssi_jws::generate_header(Algorithm::BLS12381G2, &key).unwrap(); + + let start_index = signature_with_header.find("..").unwrap() + 2; // +2 for ..; todo: switch to ok_or + let signature_str = &signature_with_header[start_index..]; + + let signature_byte_vec = base64::decode_config(signature_str, base64::URL_SAFE_NO_PAD).unwrap(); + assert!( + signature_byte_vec.len() == 112, + "Unexpected length for signature byte vector: {}", + signature_byte_vec.len() + ); + let mut signature_bytes: [u8; 112] = [0; 112]; + for i in 0..112 { + signature_bytes[i] = signature_byte_vec[i]; + } + let signature = Signature::from(&signature_bytes); + + let mut proof_messages: Vec = Vec::new(); + proof_messages.push(bbs::pm_hidden!(header_str.as_bytes())); + proof_messages.push(bbs::pm_hidden!(payload.sigopts_digest.as_ref())); + + let mut revealed_message_indices = Vec::new(); + for i in 0..payload.messages.len() { + let message_index = i + 2; + + let message_bytes = payload.messages[i].as_bytes(); + + let mut disclose = false; + for j in 0..selectors.len() { + let s = selectors[j].as_str(); + let m = payload.messages[i].as_str(); + let needle = format!("/{}>", s); + if m.contains(needle.as_str()) { + disclose = true; + break; + } + } + + if disclose { + revealed_message_indices.push(message_index); + let pm = bbs::pm_revealed!(message_bytes); + proof_messages.push(pm); + } else { + let pm = bbs::pm_hidden!(message_bytes); + proof_messages.push(pm); + } + } + + let mut num_messages = payload.messages.len() + 2; + while num_messages < 100 { + proof_messages.push(bbs::pm_hidden!(b"")); + num_messages += 1; + } + + let proof_request = + Verifier::new_proof_request(revealed_message_indices.as_slice(), &pk).unwrap(); + let pok = Prover::commit_signature_pok(&proof_request, proof_messages.as_slice(), &signature) + .unwrap(); + + let mut challenge_bytes = Vec::new(); + challenge_bytes.extend_from_slice(pok.to_bytes().as_slice()); + let nonce_bytes = base64::decode(nonce).unwrap(); + challenge_bytes.extend_from_slice(nonce_bytes.as_slice()); + let challenge = ProofChallenge::hash(&challenge_bytes); + + let bbs_proof = Prover::generate_signature_pok(pok, &challenge).unwrap(); + let bbs_proof_bytes = bbs_proof.to_bytes_compressed_form(); + let bbs_proof_str = base64::encode_config(bbs_proof_bytes.as_slice(), base64::URL_SAFE_NO_PAD); + let proof_str = header_str.clone() + ".." + bbs_proof_str.as_str(); + + let mut proof_with_new_sig = proof.clone(); + proof_with_new_sig.jws = Some(proof_str); // todo: change to proof/proofValue + proof_with_new_sig.disclosed_messages = Some(revealed_message_indices); + Ok(proof_with_new_sig) +} + +async fn to_jws_payload_v2( + document: &(dyn LinkedDataDocument + Sync), + proof: &Proof, + context_loader: &mut ContextLoader, +) -> Result { + eprintln!("to_jws_payload_v2: enter..."); + let mut payload = JWSPayload { + header: String::new(), + messages: Vec::new(), + sigopts_digest: [0; 32], + }; + + eprintln!("to_jws_payload_v2: sigopts hash 1"); + let sigopts_dataset = proof + .to_dataset_for_signing(Some(document), context_loader) + .await?; + eprintln!("to_jws_payload_v2: sigopts hash 2"); + let sigopts_normalized = + urdna2015::normalize(sigopts_dataset.quads().map(QuadRef::from)).into_nquads(); + eprintln!("to_jws_payload_v2: sigopts hash 3"); + payload.sigopts_digest = sha256(sigopts_normalized.as_bytes()); + eprintln!("to_jws_payload_v2: begin doc to n-quads 1"); + let doc_dataset = document + .to_dataset_for_signing(None, context_loader) + .await?; + eprintln!("to_jws_payload_v2: begin doc to n-quads 2"); + let doc_normalized = urdna2015::normalize(doc_dataset.quads().map(QuadRef::from)).into_nquads_vec(); + eprintln!("to_jws_payload_v2: begin doc to n-quads 3"); + payload.messages = doc_normalized; + + Ok(payload) +} + +#[allow(clippy::too_many_arguments)] async fn sign( document: &(dyn LinkedDataDocument + Sync), options: &LinkedDataProofOptions, @@ -445,7 +594,7 @@ async fn sign( let proof = Proof::new(type_) .with_options(options) .with_properties(extra_proof_properties); - sign_proof(document, proof, key, algorithm, context_loader).await + sign_proof_v2(document, proof, key, algorithm, context_loader).await } async fn sign_proof( @@ -461,6 +610,19 @@ async fn sign_proof( Ok(proof) } +async fn sign_proof_v2( + document: &(dyn LinkedDataDocument + Sync), + mut proof: Proof, + key: &JWK, + algorithm: Algorithm, + context_loader: &mut ContextLoader, +) -> Result { + let mut jws_payload = to_jws_payload_v2(document, &proof, context_loader).await?; + let jws = ssi_jws::detached_sign_unencoded_payload_v2(algorithm, &mut jws_payload, key)?; + proof.jws = Some(jws); + Ok(proof) +} + #[allow(clippy::too_many_arguments)] async fn sign_nojws( document: &(dyn LinkedDataDocument + Sync), @@ -596,6 +758,39 @@ async fn verify_nojws( )?) } +async fn verify_bbs_proof( + proof: &Proof, + document: &(dyn LinkedDataDocument + Sync), + resolver: &dyn DIDResolver, + context_loader: &mut ContextLoader, + algorithm: Algorithm, + nonce: Option<&String>, +) -> Result { + let proof_value = proof.jws.as_ref().ok_or(Error::MissingProofSignature)?; + let start_index = proof_value.find("..").unwrap() + 2; + let sig_str = &proof_value[start_index..]; + let sig = base64::decode_config(&sig_str, base64::URL_SAFE_NO_PAD).unwrap(); + + let verification_method = proof + .verification_method + .as_ref() + .ok_or(Error::MissingVerificationMethod)?; + let key = resolve_key(verification_method, resolver).await?; + let mut payload = to_jws_payload_v2(document, &proof, context_loader).await?; + let (_, header_b64) = ssi_jws::generate_header(algorithm, &key)?; + payload.header = header_b64; + + let mut disclosed_message_indices = Vec::new(); + match proof.disclosed_messages.as_ref() { + Some(message_indices) => { + disclosed_message_indices.extend_from_slice(message_indices.as_slice()); + }, + None => (), + }; + + Ok(ssi_jws::verify_payload(algorithm, &key, &payload, sig.as_slice(), disclosed_message_indices.as_slice(), nonce)?) +} + // Check if a linked data document has a given URI in its @context array. fn document_has_context( document: &(dyn LinkedDataDocument + Sync), diff --git a/ssi-ldp/src/proof.rs b/ssi-ldp/src/proof.rs index c2b1780be..55ee29f8c 100644 --- a/ssi-ldp/src/proof.rs +++ b/ssi-ldp/src/proof.rs @@ -59,6 +59,8 @@ pub struct Proof { #[serde(skip_serializing_if = "Option::is_none")] #[serde(flatten)] pub property_set: Option>, + #[serde(default)] + pub disclosed_messages: Option>, } impl Proof { @@ -76,6 +78,7 @@ impl Proof { nonce: None, jws: None, property_set: None, + disclosed_messages: None, } } @@ -149,8 +152,9 @@ impl Proof { document: &(dyn LinkedDataDocument + Sync), resolver: &dyn DIDResolver, context_loader: &mut ContextLoader, + nonce: Option<&String>, ) -> VerificationResult { - LinkedDataProofs::verify(self, document, resolver, context_loader) + LinkedDataProofs::verify(self, document, resolver, context_loader, nonce) .await .into() } @@ -171,6 +175,7 @@ impl LinkedDataDocument for Proof { let mut copy = self.clone(); copy.jws = None; copy.proof_value = None; + let json = json_syntax::to_value_with(copy, Default::default).unwrap(); let dataset = json_to_dataset( json, @@ -181,10 +186,9 @@ impl LinkedDataDocument for Proof { .flatten() .as_deref() .map(parse_ld_context) - .transpose()?, + .transpose()? ) .await?; - verify_proof_consistency(self, &dataset)?; Ok(dataset) } @@ -219,6 +223,9 @@ pub struct LinkedDataProofOptions { /// The challenge of the proof. pub challenge: Option, #[serde(skip_serializing_if = "Option::is_none")] + /// The nonce of the proof. + pub nonce: Option, + #[serde(skip_serializing_if = "Option::is_none")] /// The domain of the proof. pub domain: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -243,6 +250,7 @@ impl Default for LinkedDataProofOptions { checks: Some(vec![Check::Proof]), eip712_domain: None, type_: None, + nonce: None, } } } diff --git a/ssi-ldp/src/suites/mod.rs b/ssi-ldp/src/suites/mod.rs index 9438ff675..66cd7f79d 100644 --- a/ssi-ldp/src/suites/mod.rs +++ b/ssi-ldp/src/suites/mod.rs @@ -33,7 +33,7 @@ use w3c::*; use crate::{ prepare, prepare_nojws, sign, sign_nojws, use_eip712sig, use_epsig, verify, verify_nojws, - Error, LinkedDataDocument, LinkedDataProofOptions, Proof, ProofPreparation, ProofSuite, + Error, LinkedDataDocument, LinkedDataProofOptions, Proof, ProofPreparation, ProofSuite, verify_bbs_proof, }; use async_trait::async_trait; @@ -82,6 +82,7 @@ pub enum ProofSuiteType { #[cfg(feature = "test")] #[serde(rename = "ex:AnonCredDerivedCredentialv1")] AnonCredDerivedCredentialv1, + BbsBlsSignatureProof2020, } // #[derive(Debug, Error)] @@ -140,6 +141,7 @@ impl ProofSuiteType { Self::NonJwsProof | Self::AnonCredPresentationProofv1 | Self::AnonCredDerivedCredentialv1 => todo!(), + Self::BbsBlsSignatureProof2020 => SignatureType::JWS, } } @@ -188,6 +190,7 @@ Self::EcdsaSecp256r1Signature2019 => &["https://w3id.org/security#EcdsaSecp256r1 #[cfg(feature = "test")] Self::NonJwsProof | Self::AnonCredPresentationProofv1 | Self::AnonCredDerivedCredentialv1 => todo!(), + Self::BbsBlsSignatureProof2020 => &["https://w3id.org/security#BbsBlsSignatureProof2020"], } } @@ -299,12 +302,14 @@ Self::EcdsaSecp256r1Signature2019 => &["https://w3id.org/security#EcdsaSecp256r1 ))) } } - } + }, + Algorithm::BLS12381G2 => Self::BbsBlsSignatureProof2020, _ => return Err(Error::ProofTypeNotSupported), }) } pub fn is_zkp(&self) -> bool { + // todo probably need to make a change here as well matches!(self, Self::CLSignature2019) } } @@ -510,6 +515,17 @@ impl ProofSuite for ProofSuiteType { Self::NonJwsProof | Self::AnonCredPresentationProofv1 | Self::AnonCredDerivedCredentialv1 => todo!(), + Self::BbsBlsSignatureProof2020 => { + sign( + document, + options, + context_loader, + key, + self.clone(), + Algorithm::BLS12381G2, + extra_proof_properties, + ).await + } } } @@ -702,6 +718,7 @@ impl ProofSuite for ProofSuiteType { .await } Self::CLSignature2019 => todo!(), + Self::BbsBlsSignatureProof2020 => todo!(), #[cfg(feature = "test")] Self::NonJwsProof | Self::AnonCredPresentationProofv1 @@ -715,6 +732,7 @@ impl ProofSuite for ProofSuiteType { document: &(dyn LinkedDataDocument + Sync), resolver: &dyn DIDResolver, context_loader: &mut ContextLoader, + nonce: Option<&String>, ) -> Result { match self { #[cfg(feature = "rsa")] @@ -795,6 +813,9 @@ impl ProofSuite for ProofSuiteType { verify(proof, document, resolver, context_loader).await } Self::CLSignature2019 => todo!(), + Self::BbsBlsSignatureProof2020 => { + verify_bbs_proof(proof, document, resolver, context_loader, Algorithm::BLS12381G2, nonce).await + }, #[cfg(feature = "test")] Self::NonJwsProof | Self::AnonCredPresentationProofv1 diff --git a/ssi-ldp/src/suites/w3c.rs b/ssi-ldp/src/suites/w3c.rs index f2c47d599..3c966823c 100644 --- a/ssi-ldp/src/suites/w3c.rs +++ b/ssi-ldp/src/suites/w3c.rs @@ -134,6 +134,10 @@ impl JsonWebSignature2020 { Algorithm::EdDSA => (), _ => return Err(Error::JWS(ssi_jws::Error::UnsupportedAlgorithm)), }, + "Bls12381G2" => match algorithm { + Algorithm::BLS12381G2 => (), + _ => return Err(Error::JWS(ssi_jws::Error::UnsupportedAlgorithm)), + }, _ => { return Err(Error::UnsupportedCurve); } diff --git a/ssi-vc/Cargo.toml b/ssi-vc/Cargo.toml index 51013711d..f4251032b 100644 --- a/ssi-vc/Cargo.toml +++ b/ssi-vc/Cargo.toml @@ -9,6 +9,7 @@ repository = "https://github.com/spruceid/ssi/" documentation = "https://docs.rs/ssi-vc/" [dependencies] +bbs = { version = "0.4.1" } serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" async-trait = "0.1" diff --git a/ssi-vc/src/lib.rs b/ssi-vc/src/lib.rs index fee2bdb20..5da77b7da 100644 --- a/ssi-vc/src/lib.rs +++ b/ssi-vc/src/lib.rs @@ -671,6 +671,7 @@ impl Credential { checks, eip712_domain, type_, + nonce } = options; if checks.is_some() { return Err(Error::UnencodableOptionClaim("checks".to_string())); @@ -854,7 +855,7 @@ impl Credential { } // Try verifying each proof until one succeeds for proof in proofs { - let mut result = proof.verify(&vc, resolver, context_loader).await; + let mut result = proof.verify(&vc, resolver, context_loader, None).await; results.append(&mut result); if results.errors.is_empty() { results.checks.push(Check::Proof); @@ -977,6 +978,10 @@ impl Credential { resolver: &dyn DIDResolver, context_loader: &mut ContextLoader, ) -> VerificationResult { + let nonce = match options.as_ref() { + Some(ldp_options) => ldp_options.nonce.clone(), + None => None + }; let checks = options .as_ref() .and_then(|opts| opts.checks.clone()) @@ -991,10 +996,11 @@ impl Credential { return VerificationResult::error("No applicable proof"); // TODO: say why, e.g. expired } + let mut results = VerificationResult::new(); // Try verifying each proof until one succeeds for proof in proofs { - let mut result = proof.verify(self, resolver, context_loader).await; + let mut result = proof.verify(self, resolver, context_loader, nonce.as_ref()).await; results.append(&mut result); if result.errors.is_empty() { results.checks.push(Check::Proof); @@ -1113,6 +1119,7 @@ impl LinkedDataDocument for Credential { context_loader: &mut ContextLoader, ) -> Result { let mut copy = self.clone(); + copy.proof = None; let json = ssi_json_ld::syntax::to_value_with(copy, Default::default).unwrap(); Ok(json_to_dataset( @@ -1124,7 +1131,7 @@ impl LinkedDataDocument for Credential { .flatten() .as_deref() .map(parse_ld_context) - .transpose()?, + .transpose()? ) .await?) } @@ -1216,6 +1223,7 @@ impl Presentation { checks, eip712_domain, type_, + nonce, } = options; if checks.is_some() { return Err(Error::UnencodableOptionClaim("checks".to_string())); @@ -1399,7 +1407,7 @@ impl Presentation { } // Try verifying each proof until one succeeds for proof in proofs { - let mut result = proof.verify(&vp, resolver, context_loader).await; + let mut result = proof.verify(&vp, resolver, context_loader, None).await; if result.errors.is_empty() { result.checks.push(Check::Proof); return (Some(vp), result); @@ -1572,7 +1580,7 @@ impl Presentation { } // Try verifying each proof until one succeeds for proof in proofs { - let mut result = proof.verify(self, resolver, context_loader).await; + let mut result = proof.verify(self, resolver, context_loader, None).await; if result.errors.is_empty() { result.checks.push(Check::Proof); return result; @@ -1822,6 +1830,83 @@ fn jwt_matches( true } +fn select_fields(subject: &CredentialSubject, selectors: &[String]) -> Map { + let mut selected = Map::new(); + + match &subject.property_set { + Some(properties) => { + for (k, v) in properties { + for i in 0..selectors.len() { + if k.as_str() == selectors[i].as_str() { + selected.insert(k.clone(), v.clone()); + } + } + } + } + None => (), + } + + selected +} + +pub async fn derive_credential( + document: &Credential, + proof_nonce: &str, + selectors: &[String], + did_resolver: &dyn DIDResolver, +) -> Result { + use bbs::prelude::*; + use ssi_jwk::{Base64urlUInt, OctetParams, Params as JWKParams, JWK}; + use ssi_ldp::error::Error; + + let mut derived_credential = document.clone(); + + let proofs = derived_credential.proof.unwrap(); + + let proof = match proofs { + OneOrMany::One(proof) => proof, + OneOrMany::Many(_) => unimplemented!(), // todo: handle multiple proof case + }; + + // before zeroing this out, this is needed to generate the proof + derived_credential.proof = None; + + match &derived_credential.credential_subject { + OneOrMany::One(subject) => { + let selected_fields = select_fields(subject, selectors); + + let mut new_subject = subject.clone(); + new_subject.property_set = Some(selected_fields); + derived_credential.credential_subject = OneOrMany::One(new_subject); + } + OneOrMany::Many(subjects) => { + let mut new_subjects: Vec = Vec::new(); + + for i in 0..subjects.len() { + let selected_fields = select_fields(&subjects[i], selectors); + + let mut new_subject = subjects[i].clone(); + new_subject.property_set = Some(selected_fields); + new_subjects.push(new_subject); + } + + derived_credential.credential_subject = OneOrMany::Many(new_subjects); + } + } + + // can generate a signature POK here + // todo do not hardcode type, may need to be something different because not the same proof type as original credential + // todo may need to add proof options and so on + // todo revealed message indices + // make sure to pass in the orignal document, which has all the messages + let proof = + ssi_ldp::generate_bbs_signature_pok(document, proof_nonce, &proof, did_resolver, selectors) + .await?; + derived_credential.add_proof(proof); + + Ok(derived_credential) +} + #[cfg(test)] pub(crate) mod tests { use super::*; diff --git a/ssi-zcap-ld/src/lib.rs b/ssi-zcap-ld/src/lib.rs index 613799e7a..5d6bbe8ac 100644 --- a/ssi-zcap-ld/src/lib.rs +++ b/ssi-zcap-ld/src/lib.rs @@ -90,7 +90,7 @@ where match &self.proof { None => VerificationResult::error("No applicable proof"), Some(proof) => { - let mut result = proof.verify(self, resolver, context_loader).await; + let mut result = proof.verify(self, resolver, context_loader, None).await; if proof.proof_purpose != Some(ProofPurpose::CapabilityDelegation) { result.errors.push("Incorrect Proof Purpose".into()); }; @@ -230,7 +230,7 @@ where .flatten() .as_deref() .map(parse_ld_context) - .transpose()?, + .transpose()? ) .await?) } @@ -309,7 +309,7 @@ where match &self.proof { None => VerificationResult::error("No applicable proof"), Some(proof) => { - let mut result = proof.verify(self, resolver, context_loader).await; + let mut result = proof.verify(self, resolver, context_loader, None).await; if proof.proof_purpose != Some(ProofPurpose::CapabilityInvocation) { result.errors.push("Incorrect Proof Purpose".into()); };