From eeca440fa7faf5c6b3c72c225bb18f256e34e0bd Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sun, 1 Jan 2023 09:16:09 -0500 Subject: [PATCH] Offer a multi-DLEq proof which simply merges challenges for n underlying proofs This converts proofs from 2n elements to 1+n. Moves FROST over to it. Additionally, for FROST's binomial nonces, provides a single DLEq proof (2, not 1+2 elements) by proving the discrete log equality of their aggregate (with an appropriate binding factor). This may be split back up depending on later commentary... --- crypto/dkg/src/frost.rs | 2 +- crypto/dleq/src/lib.rs | 112 +++++++++++++++++++ crypto/dleq/src/tests/mod.rs | 70 ++++++++++-- crypto/frost/src/nonce.rs | 175 +++++++++++++++++------------- crypto/frost/src/tests/vectors.rs | 2 +- docs/cryptography/FROST.md | 16 ++- 6 files changed, 291 insertions(+), 86 deletions(-) diff --git a/crypto/dkg/src/frost.rs b/crypto/dkg/src/frost.rs index 2639d2c9..0bc7ac7a 100644 --- a/crypto/dkg/src/frost.rs +++ b/crypto/dkg/src/frost.rs @@ -36,7 +36,7 @@ type FrostError = DkgError>; #[allow(non_snake_case)] fn challenge(context: &str, l: u16, R: &[u8], Am: &[u8]) -> C::F { let mut transcript = RecommendedTranscript::new(b"DKG FROST v0.2"); - transcript.domain_separate(b"Schnorr Proof of Knowledge"); + transcript.domain_separate(b"schnorr_proof_of_knowledge"); transcript.append_message(b"context", context.as_bytes()); transcript.append_message(b"participant", l.to_le_bytes()); transcript.append_message(b"nonce", R); diff --git a/crypto/dleq/src/lib.rs b/crypto/dleq/src/lib.rs index 360791c4..3fc270bd 100644 --- a/crypto/dleq/src/lib.rs +++ b/crypto/dleq/src/lib.rs @@ -177,3 +177,115 @@ impl DLEqProof { res } } + +#[cfg(feature = "std")] +#[derive(Clone, PartialEq, Eq, Debug, Zeroize)] +pub struct MultiDLEqProof { + c: G::Scalar, + s: Vec, +} + +#[cfg(feature = "std")] +#[allow(non_snake_case)] +impl MultiDLEqProof { + pub fn prove( + rng: &mut R, + transcript: &mut T, + generators: &[Vec], + scalars: &[Zeroizing], + ) -> MultiDLEqProof + where + G::Scalar: Zeroize, + { + transcript.domain_separate(b"multi-dleq"); + + let mut nonces = vec![]; + for (i, (scalar, generators)) in scalars.iter().zip(generators).enumerate() { + // Delineate between discrete logarithms + transcript.append_message(b"discrete_logarithm", i.to_le_bytes()); + + let nonce = Zeroizing::new(G::Scalar::random(&mut *rng)); + for generator in generators { + DLEqProof::transcript( + transcript, + *generator, + *generator * nonce.deref(), + *generator * scalar.deref(), + ); + } + nonces.push(nonce); + } + + let c = challenge(transcript); + + let mut s = vec![]; + for (scalar, nonce) in scalars.iter().zip(nonces) { + s.push((c * scalar.deref()) + nonce.deref()); + } + + MultiDLEqProof { c, s } + } + + pub fn verify( + &self, + transcript: &mut T, + generators: &[Vec], + points: &[Vec], + ) -> Result<(), DLEqError> { + if points.len() != generators.len() { + Err(DLEqError::InvalidProof)?; + } + if self.s.len() != generators.len() { + Err(DLEqError::InvalidProof)?; + } + + transcript.domain_separate(b"multi-dleq"); + for (i, (generators, points)) in generators.iter().zip(points).enumerate() { + if points.len() != generators.len() { + Err(DLEqError::InvalidProof)?; + } + + transcript.append_message(b"discrete_logarithm", i.to_le_bytes()); + for (generator, point) in generators.iter().zip(points) { + DLEqProof::transcript( + transcript, + *generator, + (*generator * self.s[i]) - (*point * self.c), + *point, + ); + } + } + + if self.c != challenge(transcript) { + Err(DLEqError::InvalidProof)?; + } + + Ok(()) + } + + #[cfg(feature = "serialize")] + pub fn write(&self, w: &mut W) -> io::Result<()> { + w.write_all(self.c.to_repr().as_ref())?; + for s in &self.s { + w.write_all(s.to_repr().as_ref())?; + } + Ok(()) + } + + #[cfg(feature = "serialize")] + pub fn read(r: &mut R, discrete_logs: usize) -> io::Result> { + let c = read_scalar(r)?; + let mut s = vec![]; + for _ in 0 .. discrete_logs { + s.push(read_scalar(r)?); + } + Ok(MultiDLEqProof { c, s }) + } + + #[cfg(feature = "serialize")] + pub fn serialize(&self) -> Vec { + let mut res = vec![]; + self.write(&mut res).unwrap(); + res + } +} diff --git a/crypto/dleq/src/tests/mod.rs b/crypto/dleq/src/tests/mod.rs index 336c0a1b..104c2238 100644 --- a/crypto/dleq/src/tests/mod.rs +++ b/crypto/dleq/src/tests/mod.rs @@ -13,16 +13,13 @@ use k256::{Scalar, ProjectivePoint}; use transcript::{Transcript, RecommendedTranscript}; -use crate::DLEqProof; +use crate::{DLEqProof, MultiDLEqProof}; #[cfg(feature = "experimental")] mod cross_group; -#[test] -fn test_dleq() { - let transcript = || RecommendedTranscript::new(b"DLEq Proof Test"); - - let generators = [ +fn generators() -> [k256::ProjectivePoint; 5] { + [ ProjectivePoint::GENERATOR, ProjectivePoint::from_bytes( &(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac0").into()), @@ -41,7 +38,13 @@ fn test_dleq() { &(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803acb").into()), ) .unwrap(), - ]; + ] +} + +#[test] +fn test_dleq() { + let generators = generators(); + let transcript = || RecommendedTranscript::new(b"DLEq Proof Test"); for i in 0 .. 5 { let key = Zeroizing::new(Scalar::random(&mut OsRng)); @@ -61,6 +64,9 @@ fn test_dleq() { ) .is_err()); + // All of these following tests should effectively be a different challenge and accordingly + // pointless. They're still nice to have though + // We could edit these tests to always test with at least two generators // Then we don't test proofs with zero/one generator(s) // While those are stupid, and pointless, and potentially point to a failure in the caller, @@ -94,3 +100,53 @@ fn test_dleq() { } } } + +#[test] +fn test_multi_dleq() { + let generators = generators(); + let transcript = || RecommendedTranscript::new(b"MultiDLEq Proof Test"); + + // Test up to 3 keys + for k in 0 ..= 3 { + let mut keys = vec![]; + let mut these_generators = vec![]; + let mut pub_keys = vec![]; + for i in 0 .. k { + let key = Zeroizing::new(Scalar::random(&mut OsRng)); + // For each key, test a variable set of generators + // 0: 0 + // 1: 1, 2 + // 2: 2, 3, 4 + let key_generators = generators[i .. (i + i + 1)].to_vec(); + let mut these_pub_keys = vec![]; + for generator in &key_generators { + these_pub_keys.push(generator * key.deref()); + } + keys.push(key); + these_generators.push(key_generators); + pub_keys.push(these_pub_keys); + } + + let proof = MultiDLEqProof::prove(&mut OsRng, &mut transcript(), &these_generators, &keys); + + proof.verify(&mut transcript(), &these_generators, &pub_keys).unwrap(); + // Different challenge + assert!(proof + .verify(&mut RecommendedTranscript::new(b"different challenge"), &these_generators, &pub_keys) + .is_err()); + + // Test verifying for a different amount of keys fail + if k > 0 { + assert!(proof.verify(&mut transcript(), &these_generators, &pub_keys[.. k - 1]).is_err()); + } + + #[cfg(feature = "serialize")] + { + let mut buf = vec![]; + proof.write(&mut buf).unwrap(); + let deserialized = + MultiDLEqProof::::read::<&[u8]>(&mut buf.as_ref(), k).unwrap(); + assert_eq!(proof, deserialized); + } + } +} diff --git a/crypto/frost/src/nonce.rs b/crypto/frost/src/nonce.rs index 9d8ac586..b937aff9 100644 --- a/crypto/frost/src/nonce.rs +++ b/crypto/frost/src/nonce.rs @@ -23,10 +23,17 @@ use transcript::Transcript; use group::{ff::PrimeField, Group, GroupEncoding}; use multiexp::multiexp_vartime; -use dleq::DLEqProof; +use dleq::MultiDLEqProof; use crate::curve::Curve; +// Transcript used to aggregate binomial nonces for usage within a single DLEq proof. +fn aggregation_transcript(context: &[u8]) -> T { + let mut transcript = T::new(b"FROST DLEq Aggregation v0.5"); + transcript.append_message(b"context", context); + transcript +} + // Every participant proves for their commitments at the start of the protocol // These proofs are verified sequentially, requiring independent transcripts // In order to make these transcripts more robust, the FROST transcript (at time of preprocess) is @@ -37,7 +44,7 @@ use crate::curve::Curve; // constructed). For higher level protocols, the transcript may have contextual info these proofs // will then be bound to fn dleq_transcript(context: &[u8]) -> T { - let mut transcript = T::new(b"FROST_commitments"); + let mut transcript = T::new(b"FROST Commitments DLEq v0.5"); transcript.append_message(b"context", context); transcript } @@ -47,7 +54,7 @@ fn dleq_transcript(context: &[u8]) -> T { #[derive(Clone, Zeroize)] pub(crate) struct Nonce(pub(crate) [Zeroizing; 2]); -// Commitments to a specific generator for this nonce +// Commitments to a specific generator for this binomial nonce #[derive(Copy, Clone, PartialEq, Eq)] pub(crate) struct GeneratorCommitments(pub(crate) [C::G; 2]); impl GeneratorCommitments { @@ -64,13 +71,8 @@ impl GeneratorCommitments { // A single nonce's commitments and relevant proofs #[derive(Clone, PartialEq, Eq)] pub(crate) struct NonceCommitments { - // Called generators as these commitments are indexed by generator + // Called generators as these commitments are indexed by generator later on pub(crate) generators: Vec>, - // DLEq Proofs proving that these commitments are generated using the same scalar pair - // This could be further optimized with a multi-nonce proof, offering just one proof for all - // nonces. See https://github.com/serai-dex/serai/issues/38 - // TODO - pub(crate) dleqs: Option<[DLEqProof; 2]>, } impl NonceCommitments { @@ -78,7 +80,6 @@ impl NonceCommitments { rng: &mut R, secret_share: &Zeroizing, generators: &[C::G], - context: &[u8], ) -> (Nonce, NonceCommitments) { let nonce = Nonce::([ C::random_nonce(secret_share, &mut *rng), @@ -93,64 +94,49 @@ impl NonceCommitments { ])); } - let mut dleqs = None; - if generators.len() >= 2 { - let mut dleq = |nonce| { - // Uses an independent transcript as each signer must prove this with their commitments, - // yet they're validated while processing everyone's data sequentially, by the global order - // This avoids needing to clone and fork the transcript around - DLEqProof::prove(&mut *rng, &mut dleq_transcript::(context), generators, nonce) - }; - dleqs = Some([dleq(&nonce.0[0]), dleq(&nonce.0[1])]); - } - - (nonce, NonceCommitments { generators: commitments, dleqs }) + (nonce, NonceCommitments { generators: commitments }) } fn read( reader: &mut R, generators: &[C::G], - context: &[u8], ) -> io::Result> { - let commitments: Vec> = (0 .. generators.len()) - .map(|_| GeneratorCommitments::read(reader)) - .collect::>()?; - - let mut dleqs = None; - if generators.len() >= 2 { - let mut verify = |i| -> io::Result<_> { - let dleq = DLEqProof::read(reader)?; - dleq - .verify( - &mut dleq_transcript::(context), - generators, - &commitments.iter().map(|commitments| commitments.0[i]).collect::>(), - ) - .map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid DLEq proof"))?; - Ok(dleq) - }; - dleqs = Some([verify(0)?, verify(1)?]); - } - - Ok(NonceCommitments { generators: commitments, dleqs }) + Ok(NonceCommitments { + generators: (0 .. generators.len()) + .map(|_| GeneratorCommitments::read(reader)) + .collect::>()?, + }) } fn write(&self, writer: &mut W) -> io::Result<()> { for generator in &self.generators { generator.write(writer)?; } - if let Some(dleqs) = &self.dleqs { - dleqs[0].write(writer)?; - dleqs[1].write(writer)?; - } Ok(()) } + + fn transcript(&self, t: &mut T) { + t.domain_separate(b"nonce"); + for commitments in &self.generators { + t.append_message(b"commitment_D", commitments.0[0].to_bytes()); + t.append_message(b"commitment_E", commitments.0[1].to_bytes()); + } + } + + fn aggregation_factor(&self, context: &[u8]) -> C::F { + let mut transcript = aggregation_transcript::(context); + self.transcript(&mut transcript); + ::hash_to_F(b"dleq_aggregation", transcript.challenge(b"binding").as_ref()) + } } #[derive(Clone, PartialEq, Eq)] pub(crate) struct Commitments { // Called nonces as these commitments are indexed by nonce pub(crate) nonces: Vec>, + // DLEq Proof proving that each set of commitments were generated using a single pair of discrete + // logarithms + pub(crate) dleq: Option>, } impl Commitments { @@ -162,53 +148,96 @@ impl Commitments { ) -> (Vec>, Commitments) { let mut nonces = vec![]; let mut commitments = vec![]; + + let mut dleq_generators = vec![]; + let mut dleq_nonces = vec![]; for generators in planned_nonces { - let (nonce, these_commitments) = - NonceCommitments::new::<_, T>(&mut *rng, secret_share, generators, context); + let (nonce, these_commitments): (Nonce, _) = + NonceCommitments::new::<_, T>(&mut *rng, secret_share, generators); + + if generators.len() > 1 { + dleq_generators.push(generators.clone()); + dleq_nonces.push(Zeroizing::new( + (these_commitments.aggregation_factor::(context) * nonce.0[1].deref()) + + nonce.0[0].deref(), + )); + } + nonces.push(nonce); commitments.push(these_commitments); } - (nonces, Commitments { nonces: commitments }) + + let dleq = if !dleq_generators.is_empty() { + Some(MultiDLEqProof::prove( + rng, + &mut dleq_transcript::(context), + &dleq_generators, + &dleq_nonces, + )) + } else { + None + }; + + (nonces, Commitments { nonces: commitments, dleq }) } pub(crate) fn transcript(&self, t: &mut T) { + t.domain_separate(b"commitments"); for nonce in &self.nonces { - for commitments in &nonce.generators { - t.append_message(b"commitment_D", commitments.0[0].to_bytes()); - t.append_message(b"commitment_E", commitments.0[1].to_bytes()); - } + nonce.transcript(t); + } - // Transcripting the DLEqs implicitly transcripts the exact generators used for this nonce - // This means it shouldn't be possible for variadic generators to cause conflicts as they're - // committed to as their entire series per-nonce, not as isolates - if let Some(dleqs) = &nonce.dleqs { - let mut transcript_dleq = |label, dleq: &DLEqProof| { - let mut buf = vec![]; - dleq.write(&mut buf).unwrap(); - t.append_message(label, &buf); - }; - transcript_dleq(b"dleq_D", &dleqs[0]); - transcript_dleq(b"dleq_E", &dleqs[1]); - } + // Transcripting the DLEqs implicitly transcripts the exact generators used for the nonces in + // an exact order + // This means it shouldn't be possible for variadic generators to cause conflicts + if let Some(dleq) = &self.dleq { + t.append_message(b"dleq", dleq.serialize()); } } pub(crate) fn read( reader: &mut R, - nonces: &[Vec], + generators: &[Vec], context: &[u8], ) -> io::Result { - Ok(Commitments { - nonces: (0 .. nonces.len()) - .map(|i| NonceCommitments::read::<_, T>(reader, &nonces[i], context)) - .collect::>()?, - }) + let nonces = (0 .. generators.len()) + .map(|i| NonceCommitments::read::<_, T>(reader, &generators[i])) + .collect::>, _>>()?; + + let mut dleq_generators = vec![]; + let mut dleq_nonces = vec![]; + for (generators, nonce) in generators.iter().cloned().zip(&nonces) { + if generators.len() > 1 { + let binding = nonce.aggregation_factor::(context); + let mut aggregated = vec![]; + for commitments in &nonce.generators { + aggregated.push(commitments.0[0] + (commitments.0[1] * binding)); + } + dleq_generators.push(generators); + dleq_nonces.push(aggregated); + } + } + + let dleq = if !dleq_generators.is_empty() { + let dleq = MultiDLEqProof::read(reader, dleq_generators.len())?; + dleq + .verify(&mut dleq_transcript::(context), &dleq_generators, &dleq_nonces) + .map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid DLEq proof"))?; + Some(dleq) + } else { + None + }; + + Ok(Commitments { nonces, dleq }) } pub(crate) fn write(&self, writer: &mut W) -> io::Result<()> { for nonce in &self.nonces { nonce.write(writer)?; } + if let Some(dleq) = &self.dleq { + dleq.write(writer)?; + } Ok(()) } } diff --git a/crypto/frost/src/tests/vectors.rs b/crypto/frost/src/tests/vectors.rs index 01c787c5..c4a26d6f 100644 --- a/crypto/frost/src/tests/vectors.rs +++ b/crypto/frost/src/tests/vectors.rs @@ -182,8 +182,8 @@ pub fn test_with_vectors>( commitments: Commitments { nonces: vec![NonceCommitments { generators: vec![GeneratorCommitments(these_commitments)], - dleqs: None, }], + dleq: None, }, addendum: (), }, diff --git a/docs/cryptography/FROST.md b/docs/cryptography/FROST.md index f3f89cee..9f6b3378 100644 --- a/docs/cryptography/FROST.md +++ b/docs/cryptography/FROST.md @@ -18,9 +18,12 @@ multiple generators, FROST supports providing a nonce's commitments across multiple generators. In order to ensure their correctness, an extended [CP93's Discrete Log Equality Proof](https://chaum.com/wp-content/uploads/2021/12/Wallet_Databases.pdf) is used. The extension is simply to transcript `n` generators, instead of just -two, enabling proving for all of them at once. Since FROST nonces are binomial, -two DLEq proofs are provided, one for each nonce component. In the future, a -modified proof proving for both components simultaneously may be used. +two, enabling proving for all of them at once. + +Since FROST nonces are binomial, every nonce would require two DLEq proofs. To +make this more efficient, we hash their commitments to obtain a binding factor, +before doing a single DLEq proof for `d + be`, similar to how FROST calculates +its nonces (as well as MuSig's key aggregation). As some algorithms require multiple nonces, effectively including multiple Schnorr signatures within one signature, the library also supports providing @@ -29,12 +32,17 @@ multiplied by a per-participant binding factor to ensure the security of FROST. When additional nonces are used, this is actually a per-nonce per-participant binding factor. +When multiple nonces are used, with multiple generators, we use a single DLEq +proof for all nonces, merging their challenges. This provides a proof of `1 + n` +elements instead of `2n`. + Finally, to support additive offset signing schemes (accounts, stealth addresses, randomization), it's possible to specify a scalar offset for keys. The public key signed for is also offset by this value. During the signing process, the offset is explicitly transcripted. Then, the offset is divided by `p`, the amount of participating signers, and each signer adds it to their -post-interpolation key share. +post-interpolation key share. This maintains a leaderless protocol while still +being correct. # Caching