From 18ac80671ff27932c8b7b0ca1a4fea14532ea7a8 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 23 Feb 2023 00:37:19 -0500 Subject: [PATCH 01/71] 3.1.1 Document secp256k1/P-256 hash_to_F --- crypto/ciphersuite/src/kp256.rs | 42 +++++++++++++++++++++++++++++---- 1 file changed, 38 insertions(+), 4 deletions(-) diff --git a/crypto/ciphersuite/src/kp256.rs b/crypto/ciphersuite/src/kp256.rs index 1de4e652..3b898564 100644 --- a/crypto/ciphersuite/src/kp256.rs +++ b/crypto/ciphersuite/src/kp256.rs @@ -34,6 +34,7 @@ macro_rules! kp_curve { } fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F { + // Handle dsts which are greater than 255 bytes let mut dst = dst; let oversize = Sha256::digest([b"H2C-OVERSIZE-DST-".as_ref(), dst].concat()); if dst.len() > 255 { @@ -42,11 +43,41 @@ macro_rules! kp_curve { // While one of these two libraries does support directly hashing to the Scalar field, the // other doesn't. While that's probably an oversight, this is a universally working method - let mut modulus = [0; 48]; + + // This method is from + // https://www.ietf.org/archive/id/draft-irtf-cfrg-hash-to-curve-16.html + // Specifically, Section 5 + + // While that draft, overall, is intended for hashing to curves, that necessitates + // detailing how to hash to a finite field. The draft comments that its mechanism for + // doing so, which it uses to derive field elements, is also applicable to the scalar field + + // Unfortunately, there's currently no test vectors for this, despite them being requested + // in https://github.com/cfrg/draft-irtf-cfrg-hash-to-curve/issues/343 + + // The hash_to_field function is intended to provide unbiased values + // In order to do so, a wide reduction from an extra k bits is applied, minimizing bias to + // 2^-k + // k is intended to be the bits of security of the suite, which is 128 for secp256k1 and + // P-256 + const K: usize = 128; + + // L is the amount of bytes of material which should be used in the wide reduction + // The 256 is for the bit-length of the primes, rounded up to the nearest byte threshold + // This is a simplification of the formula from the end of section 5 + const L: usize = (256 + K) / 8; // 48 + + // In order to perform this reduction, we need to use 48-byte numbers + // First, convert the modulus to a 48-byte number + // This is done by getting -1 as bytes, parsing it into a U384, and then adding back one + let mut modulus = [0; L]; + // The byte repr of scalars will be 32 big-endian bytes + // Set the lower 32 bytes of our 48-byte array accordingly modulus[16 ..].copy_from_slice(&(Self::F::zero() - Self::F::one()).to_bytes()); let modulus = U384::from_be_slice(&modulus).wrapping_add(&U384::ONE); - let mut unreduced = U384::from_be_bytes({ + // The defined P-256 and secp256k1 ciphersuites both use expand_message_xmd + let mut wide = U384::from_be_bytes({ let mut bytes = [0; 48]; ExpandMsgXmd::::expand_message(&[msg], dst, 48).unwrap().fill_bytes(&mut bytes); bytes @@ -55,9 +86,12 @@ macro_rules! kp_curve { .unwrap() .to_be_bytes(); - let mut array = *GenericArray::from_slice(&unreduced[16 ..]); + // Now that this has been reduced back to a 32-byte value, grab the lower 32-bytes + let mut array = *GenericArray::from_slice(&wide[16 ..]); let res = $lib::Scalar::from_repr(array).unwrap(); - unreduced.zeroize(); + + // Zeroize the temp values we can due to the possibility hash_to_F is being used for nonces + wide.zeroize(); array.zeroize(); res } From ac0f5e9b2d7a9ab6a61271742696f748ba4c2806 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 23 Feb 2023 00:52:13 -0500 Subject: [PATCH 02/71] 3.1.2 Remove oversize DST handling for code present in elliptic-curve already Adds a test to ensure that elliptic-curve does in fact handle this properly. --- crypto/ciphersuite/src/kp256.rs | 33 +++++++++++++++++++++------------ 1 file changed, 21 insertions(+), 12 deletions(-) diff --git a/crypto/ciphersuite/src/kp256.rs b/crypto/ciphersuite/src/kp256.rs index 3b898564..983751fb 100644 --- a/crypto/ciphersuite/src/kp256.rs +++ b/crypto/ciphersuite/src/kp256.rs @@ -1,6 +1,6 @@ use zeroize::Zeroize; -use sha2::{Digest, Sha256}; +use sha2::Sha256; use group::ff::{Field, PrimeField}; @@ -34,13 +34,6 @@ macro_rules! kp_curve { } fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F { - // Handle dsts which are greater than 255 bytes - let mut dst = dst; - let oversize = Sha256::digest([b"H2C-OVERSIZE-DST-".as_ref(), dst].concat()); - if dst.len() > 255 { - dst = oversize.as_ref(); - } - // While one of these two libraries does support directly hashing to the Scalar field, the // other doesn't. While that's probably an oversight, this is a universally working method @@ -52,9 +45,6 @@ macro_rules! kp_curve { // detailing how to hash to a finite field. The draft comments that its mechanism for // doing so, which it uses to derive field elements, is also applicable to the scalar field - // Unfortunately, there's currently no test vectors for this, despite them being requested - // in https://github.com/cfrg/draft-irtf-cfrg-hash-to-curve/issues/343 - // The hash_to_field function is intended to provide unbiased values // In order to do so, a wide reduction from an extra k bits is applied, minimizing bias to // 2^-k @@ -99,6 +89,18 @@ macro_rules! kp_curve { }; } +#[cfg(test)] +fn test_oversize_dst() { + use sha2::Digest; + + // The draft specifies DSTs >255 bytes should be hashed into a 32-byte DST + let oversize_dst = [0x00; 256]; + let actual_dst = Sha256::digest([b"H2C-OVERSIZE-DST-".as_ref(), &oversize_dst].concat()); + // Test the hash_to_F function handles this + // If it didn't, these would return different values + assert_eq!(C::hash_to_F(&oversize_dst, &[]), C::hash_to_F(&actual_dst, &[])); +} + #[cfg(feature = "secp256k1")] kp_curve!("secp256k1", k256, Secp256k1, b"secp256k1"); #[cfg(feature = "secp256k1")] @@ -106,8 +108,11 @@ kp_curve!("secp256k1", k256, Secp256k1, b"secp256k1"); fn test_secp256k1() { ff_group_tests::group::test_prime_group_bits::(); - // Ideally, a test vector from hash to field (not FROST) would be here + // Ideally, a test vector from hash_to_field (not FROST) would be here // Unfortunately, the IETF draft only provides vectors for field elements, not scalars + // Vectors have been requested in + // https://github.com/cfrg/draft-irtf-cfrg-hash-to-curve/issues/343 + assert_eq!( Secp256k1::hash_to_F( b"FROST-secp256k1-SHA256-v11nonce", @@ -124,6 +129,8 @@ fn test_secp256k1() { .collect::>(), hex::decode("acc83278035223c1ba464e2d11bfacfc872b2b23e1041cf5f6130da21e4d8068").unwrap() ); + + test_oversize_dst::(); } #[cfg(feature = "p256")] @@ -149,4 +156,6 @@ f4e8cf80aec3f888d997900ac7e3e349944b5a6b47649fc32186d2f1238103c6\ .collect::>(), hex::decode("f871dfcf6bcd199342651adc361b92c941cb6a0d8c8c1a3b91d79e2c1bf3722d").unwrap() ); + + test_oversize_dst::(); } From cb4ce5e354c1e36ef0ff2425e82b7fdbeeb971e9 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 23 Feb 2023 00:57:41 -0500 Subject: [PATCH 03/71] 3.1.3 Use a checked_add for the modulus in secp256k1/P-256 --- crypto/ciphersuite/src/kp256.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/crypto/ciphersuite/src/kp256.rs b/crypto/ciphersuite/src/kp256.rs index 983751fb..aa84f8e5 100644 --- a/crypto/ciphersuite/src/kp256.rs +++ b/crypto/ciphersuite/src/kp256.rs @@ -6,7 +6,7 @@ use group::ff::{Field, PrimeField}; use elliptic_curve::{ generic_array::GenericArray, - bigint::{Encoding, U384}, + bigint::{CheckedAdd, Encoding, U384}, hash2curve::{Expander, ExpandMsg, ExpandMsgXmd}, }; @@ -64,7 +64,11 @@ macro_rules! kp_curve { // The byte repr of scalars will be 32 big-endian bytes // Set the lower 32 bytes of our 48-byte array accordingly modulus[16 ..].copy_from_slice(&(Self::F::zero() - Self::F::one()).to_bytes()); - let modulus = U384::from_be_slice(&modulus).wrapping_add(&U384::ONE); + // Use a checked_add + unwrap since this addition cannot fail (being a 32-byte value with + // 48-bytes of space) + // While a non-panicking saturating_add/wrapping_add could be used, they'd likely be less + // performant + let modulus = U384::from_be_slice(&modulus).checked_add(&U384::ONE).unwrap(); // The defined P-256 and secp256k1 ciphersuites both use expand_message_xmd let mut wide = U384::from_be_bytes({ From 686a5ee36425e119b1e762c88e70811c4755eaec Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 23 Feb 2023 01:03:53 -0500 Subject: [PATCH 04/71] 3.1.4 Further document hash_to_F which may collide --- crypto/ciphersuite/src/dalek.rs | 18 ++++++++++++++++-- crypto/ciphersuite/src/ed448.rs | 7 ++++++- crypto/ciphersuite/src/kp256.rs | 14 ++++++++++++-- 3 files changed, 34 insertions(+), 5 deletions(-) diff --git a/crypto/ciphersuite/src/dalek.rs b/crypto/ciphersuite/src/dalek.rs index ee49459f..3c40506d 100644 --- a/crypto/ciphersuite/src/dalek.rs +++ b/crypto/ciphersuite/src/dalek.rs @@ -17,8 +17,6 @@ macro_rules! dalek_curve { ) => { use dalek_ff_group::$Point; - #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] - pub struct $Ciphersuite; impl Ciphersuite for $Ciphersuite { type F = Scalar; type G = $Point; @@ -37,6 +35,14 @@ macro_rules! dalek_curve { }; } +/// Ciphersuite for Ristretto. +/// +/// hash_to_F is implemented with a naive concatenation of the dst and data, allowing transposition +/// between the two. This means `dst: b"abc", data: b"def"`, will produce the same scalar as +/// `dst: "abcdef", data: b""`. Please use carefully, not letting dsts be substrings of each other. +#[cfg(any(test, feature = "ristretto"))] +#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] +pub struct Ristretto; #[cfg(any(test, feature = "ristretto"))] dalek_curve!("ristretto", Ristretto, RistrettoPoint, b"ristretto"); #[cfg(any(test, feature = "ristretto"))] @@ -60,6 +66,14 @@ fn test_ristretto() { ); } +/// Ciphersuite for Ed25519. +/// +/// hash_to_F is implemented with a naive concatenation of the dst and data, allowing transposition +/// between the two. This means `dst: b"abc", data: b"def"`, will produce the same scalar as +/// `dst: "abcdef", data: b""`. Please use carefully, not letting dsts be substrings of each other. +#[cfg(feature = "ed25519")] +#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] +pub struct Ed25519; #[cfg(feature = "ed25519")] dalek_curve!("ed25519", Ed25519, EdwardsPoint, b"edwards25519"); #[cfg(feature = "ed25519")] diff --git a/crypto/ciphersuite/src/ed448.rs b/crypto/ciphersuite/src/ed448.rs index 30301ec9..e4f1d0a9 100644 --- a/crypto/ciphersuite/src/ed448.rs +++ b/crypto/ciphersuite/src/ed448.rs @@ -11,7 +11,7 @@ use minimal_ed448::{scalar::Scalar, point::Point}; use crate::Ciphersuite; -// Re-define Shake256 as a traditional Digest to meet API expectations +/// Shake256, fixed to a 114-byte output, as used by Ed448. #[derive(Clone, Default)] pub struct Shake256_114(Shake256); impl BlockSizeUser for Shake256_114 { @@ -48,6 +48,11 @@ impl FixedOutput for Shake256_114 { } impl HashMarker for Shake256_114 {} +/// Ciphersuite for Ed448. +/// +/// hash_to_F is implemented with a naive concatenation of the dst and data, allowing transposition +/// between the two. This means `dst: b"abc", data: b"def"`, will produce the same scalar as +/// `dst: "abcdef", data: b""`. Please use carefully, not letting dsts be substrings of each other. #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] pub struct Ed448; impl Ciphersuite for Ed448 { diff --git a/crypto/ciphersuite/src/kp256.rs b/crypto/ciphersuite/src/kp256.rs index aa84f8e5..c90b7214 100644 --- a/crypto/ciphersuite/src/kp256.rs +++ b/crypto/ciphersuite/src/kp256.rs @@ -20,8 +20,6 @@ macro_rules! kp_curve { $Ciphersuite: ident, $ID: literal ) => { - #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] - pub struct $Ciphersuite; impl Ciphersuite for $Ciphersuite { type F = $lib::Scalar; type G = $lib::ProjectivePoint; @@ -105,6 +103,12 @@ fn test_oversize_dst() { assert_eq!(C::hash_to_F(&oversize_dst, &[]), C::hash_to_F(&actual_dst, &[])); } +/// Ciphersuite for Secp256k1. +/// +/// hash_to_F is implemented via the IETF draft for hash to curve's hash_to_field (v16). +#[cfg(feature = "secp256k1")] +#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] +pub struct Secp256k1; #[cfg(feature = "secp256k1")] kp_curve!("secp256k1", k256, Secp256k1, b"secp256k1"); #[cfg(feature = "secp256k1")] @@ -137,6 +141,12 @@ fn test_secp256k1() { test_oversize_dst::(); } +/// Ciphersuite for P-256. +/// +/// hash_to_F is implemented via the IETF draft for hash to curve's hash_to_field (v16). +#[cfg(feature = "p256")] +#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] +pub struct P256; #[cfg(feature = "p256")] kp_curve!("p256", p256, P256, b"P-256"); #[cfg(feature = "p256")] From 40a6672547c371c1bc8e624bea1844aeb4dbceb8 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 23 Feb 2023 04:05:47 -0500 Subject: [PATCH 05/71] 3.2.1, 3.2.4, 3.2.5. Documentation and tests --- crypto/dalek-ff-group/src/field.rs | 122 ++++++++++++++++++++++++----- crypto/dalek-ff-group/src/lib.rs | 18 ++++- 2 files changed, 119 insertions(+), 21 deletions(-) diff --git a/crypto/dalek-ff-group/src/field.rs b/crypto/dalek-ff-group/src/field.rs index 78d6633d..c9aa2a9c 100644 --- a/crypto/dalek-ff-group/src/field.rs +++ b/crypto/dalek-ff-group/src/field.rs @@ -13,30 +13,69 @@ use ff::{Field, PrimeField, FieldBits, PrimeFieldBits}; use crate::{constant_time, math, from_uint}; -const MODULUS: U256 = - U256::from_be_hex("7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffed"); - -const WIDE_MODULUS: U512 = U512::from_be_hex(concat!( - "0000000000000000000000000000000000000000000000000000000000000000", - "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffed" -)); +// 2^255 - 19 +// Uses saturating_sub because checked_sub isn't available at compile time +const MODULUS: U256 = U256::from_u8(1).shl_vartime(255).saturating_sub(&U256::from_u8(19)); +const WIDE_MODULUS: U512 = U256::ZERO.concat(&MODULUS); #[derive(Clone, Copy, PartialEq, Eq, Default, Debug)] pub struct FieldElement(U256); -pub const MOD_3_8: FieldElement = - FieldElement(MODULUS.saturating_add(&U256::from_u8(3)).wrapping_div(&U256::from_u8(8))); +/* +The following is a valid const definition of sqrt(-1) yet exceeds the const_eval_limit by 24x. +Accordingly, it'd only be usable on a nightly compiler with the following crate attributes: +#![feature(const_eval_limit)] +#![const_eval_limit = "24000000"] -pub const MOD_5_8: FieldElement = FieldElement(MOD_3_8.0.saturating_sub(&U256::ONE)); +const SQRT_M1: FieldElement = { + // Formula from RFC-8032 (modp_sqrt_m1/sqrt8k5 z) + // 2 ** ((MODULUS - 1) // 4) % MODULUS + let base = U256::from_u8(2); + let exp = MODULUS.saturating_sub(&U256::from_u8(1)).wrapping_div(&U256::from_u8(4)); -pub const EDWARDS_D: FieldElement = FieldElement(U256::from_be_hex( - "52036cee2b6ffe738cc740797779e89800700a4d4141d8ab75eb4dca135978a3", -)); + const fn mul(x: U256, y: U256) -> U256 { + let wide = U256::mul_wide(&x, &y); + let wide = U256::concat(&wide.1, &wide.0); + wide.wrapping_rem(&WIDE_MODULUS).split().1 + } -pub const SQRT_M1: FieldElement = FieldElement(U256::from_be_hex( + // Perform the pow via multiply and square + let mut res = U256::ONE; + // Iterate from highest bit to lowest bit + let mut bit = 255; + loop { + if bit != 255 { + res = mul(res, res); + } + + // Reverse from little endian to big endian + if exp.bit_vartime(bit) == 1 { + res = mul(res, base); + } + + if bit == 0 { + break; + } + bit -= 1; + } + + FieldElement(res) +}; +*/ + +// Use a constant since we can't calculate it at compile-time without a nightly compiler +// Even without const_eval_limit, it'd take ~30s to calculate, which isn't worth it +const SQRT_M1: FieldElement = FieldElement(U256::from_be_hex( "2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0", )); +// Constant useful in calculating square roots (RFC-8032 sqrt8k5's exponent used to calculate y) +const MOD_3_8: FieldElement = + FieldElement(MODULUS.saturating_add(&U256::from_u8(3)).wrapping_div(&U256::from_u8(8))); + +// Constant useful in sqrt_ratio_i (sqrt(u / v)) +const MOD_5_8: FieldElement = FieldElement(MOD_3_8.0.saturating_sub(&U256::ONE)); + fn reduce(x: U512) -> U256 { U256::from_le_slice(&x.reduce(&WIDE_MODULUS).unwrap().to_le_bytes()[.. 32]) } @@ -93,6 +132,7 @@ impl Field for FieldElement { CtOption::new(self.pow(NEG_2), !self.is_zero()) } + // RFC-8032 sqrt8k5 fn sqrt(&self) -> CtOption { let tv1 = self.pow(MOD_3_8); let tv2 = tv1 * SQRT_M1; @@ -113,14 +153,20 @@ impl PrimeField for FieldElement { self.0.to_le_bytes() } + // This was set per the specification in the ff crate docs + // The number of leading zero bits in the little-endian bit representation of (modulus - 1) const S: u32 = 2; fn is_odd(&self) -> Choice { self.0.is_odd() } fn multiplicative_generator() -> Self { + // This was calculated with the method from the ff crate docs + // SageMath GF(modulus).primitive_element() 2u64.into() } fn root_of_unity() -> Self { + // This was calculated via the formula from the ff crate docs + // Self::multiplicative_generator() ** ((modulus - 1) >> Self::S) FieldElement(U256::from_be_hex( "2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0", )) @@ -172,27 +218,67 @@ impl FieldElement { res } + /// The square root of u/v, as used for Ed25519 point decoding (RFC 8032 5.1.3) and within + /// Ristretto (5.1 Extracting an Inverse Square Root). + /// + /// The result is only a valid square root if the Choice is true. + /// RFC 8032 simply fails if there isn't a square root, leaving any return value undefined. + /// Ristretto explicitly returns 0 or sqrt((SQRT_M1 * u) / v). pub fn sqrt_ratio_i(u: FieldElement, v: FieldElement) -> (Choice, FieldElement) { let i = SQRT_M1; let v3 = v.square() * v; let v7 = v3.square() * v; + // Candidate root let mut r = (u * v3) * (u * v7).pow(MOD_5_8); + // 8032 3.1 let check = v * r.square(); let correct_sign = check.ct_eq(&u); - let flipped_sign = check.ct_eq(&(-u)); - let flipped_sign_i = check.ct_eq(&((-u) * i)); + // 8032 3.2 conditional + let neg_u = -u; + let flipped_sign = check.ct_eq(&neg_u); + // Ristretto Step 5 + let flipped_sign_i = check.ct_eq(&(neg_u * i)); + // 3.2 set r.conditional_assign(&(r * i), flipped_sign | flipped_sign_i); - let r_is_negative = r.is_odd(); - r.conditional_negate(r_is_negative); + // Always return the even root, per Ristretto + // This doesn't break Ed25519 point decoding as that doesn't expect these steps to return a + // specific root + // Ed25519 points include a dedicated sign bit to determine which root to use, so at worst + // this is a pointless inefficiency + r.conditional_negate(r.is_odd()); (correct_sign | flipped_sign, r) } } +#[test] +fn test_wide_modulus() { + let mut wide = [0; 64]; + wide[.. 32].copy_from_slice(&MODULUS.to_le_bytes()); + assert_eq!(wide, WIDE_MODULUS.to_le_bytes()); +} + +#[test] +fn test_sqrt_m1() { + // Test equivalence against the known constant value + const SQRT_M1_MAGIC: U256 = + U256::from_be_hex("2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0"); + assert_eq!(SQRT_M1.0, SQRT_M1_MAGIC); + + // Also test equivalence against the result of the formula from RFC-8032 (modp_sqrt_m1/sqrt8k5 z) + // 2 ** ((MODULUS - 1) // 4) % MODULUS + assert_eq!( + SQRT_M1, + FieldElement::from(2u8).pow(FieldElement( + (FieldElement::zero() - FieldElement::one()).0.wrapping_div(&U256::from(4u8)) + )) + ); +} + #[test] fn test_field() { ff_group_tests::prime_field::test_prime_field_bits::(); diff --git a/crypto/dalek-ff-group/src/lib.rs b/crypto/dalek-ff-group/src/lib.rs index fb66b790..d5e44252 100644 --- a/crypto/dalek-ff-group/src/lib.rs +++ b/crypto/dalek-ff-group/src/lib.rs @@ -177,6 +177,7 @@ constant_time!(Scalar, DScalar); math_neg!(Scalar, Scalar, DScalar::add, DScalar::sub, DScalar::mul); from_uint!(Scalar, DScalar); +// Ed25519 order/scalar modulus const MODULUS: U256 = U256::from_be_hex("1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3ed"); @@ -272,19 +273,25 @@ impl PrimeField for Scalar { self.0.to_bytes() } + // This was set per the specification in the ff crate docs + // The number of leading zero bits in the little-endian bit representation of (modulus - 1) const S: u32 = 2; fn is_odd(&self) -> Choice { choice(self.to_le_bits()[0]) } fn multiplicative_generator() -> Self { + // This was calculated with the method from the ff crate docs + // SageMath GF(modulus).primitive_element() 2u64.into() } fn root_of_unity() -> Self { - const ROOT: [u8; 32] = [ + // This was calculated via the formula from the ff crate docs + // Self::multiplicative_generator() ** ((modulus - 1) >> Self::S) + Scalar::from_repr([ 212, 7, 190, 235, 223, 117, 135, 190, 254, 131, 206, 66, 83, 86, 240, 14, 122, 194, 193, 171, 96, 109, 61, 125, 231, 129, 121, 224, 16, 115, 74, 9, - ]; - Scalar::from_repr(ROOT).unwrap() + ]) + .unwrap() } } @@ -433,6 +440,11 @@ dalek_group!( RISTRETTO_BASEPOINT_TABLE ); +#[test] +fn test_scalar_modulus() { + assert_eq!(MODULUS.to_le_bytes(), curve25519_dalek::constants::BASEPOINT_ORDER.to_bytes()); +} + #[test] fn test_ed25519_group() { ff_group_tests::group::test_prime_group_bits::(); From 74647b1b5273cad7f9fd2dfe048257164cba8275 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 23 Feb 2023 04:14:07 -0500 Subject: [PATCH 06/71] 3.2.3 Don't yield identity in Group::random --- crypto/dalek-ff-group/src/lib.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/crypto/dalek-ff-group/src/lib.rs b/crypto/dalek-ff-group/src/lib.rs index d5e44252..c60bbb5c 100644 --- a/crypto/dalek-ff-group/src/lib.rs +++ b/crypto/dalek-ff-group/src/lib.rs @@ -358,7 +358,11 @@ macro_rules! dalek_group { bytes[31] |= u8::try_from(rng.next_u32() % 2).unwrap() << 7; let opt = Self::from_bytes(&bytes); if opt.is_some().into() { - return opt.unwrap(); + let opt = opt.unwrap(); + // Ban identity, per the trait specification + if !bool::from(opt.is_identity()) { + return opt; + } } } } From d929a8d96ecd066b8df88b64b9a9e8cc8bf09958 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 23 Feb 2023 04:27:31 -0500 Subject: [PATCH 07/71] 3.2.2 Use a hash to point for random points in dfg --- Cargo.lock | 1 + crypto/dalek-ff-group/Cargo.toml | 2 ++ crypto/dalek-ff-group/src/lib.rs | 15 ++++++--------- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ba3aa9b2..aa5654bb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1517,6 +1517,7 @@ dependencies = [ "ff-group-tests", "group", "rand_core 0.6.4", + "sha2 0.9.9", "subtle", "zeroize", ] diff --git a/crypto/dalek-ff-group/Cargo.toml b/crypto/dalek-ff-group/Cargo.toml index aea5f2a9..fa6a1395 100644 --- a/crypto/dalek-ff-group/Cargo.toml +++ b/crypto/dalek-ff-group/Cargo.toml @@ -23,6 +23,8 @@ ff = "0.12" group = "0.12" crypto-bigint = "0.4" + +sha2 = "0.9" curve25519-dalek = "3.2" [dev-dependencies] diff --git a/crypto/dalek-ff-group/src/lib.rs b/crypto/dalek-ff-group/src/lib.rs index c60bbb5c..09e18c88 100644 --- a/crypto/dalek-ff-group/src/lib.rs +++ b/crypto/dalek-ff-group/src/lib.rs @@ -354,15 +354,12 @@ macro_rules! dalek_group { type Scalar = Scalar; fn random(mut rng: impl RngCore) -> Self { loop { - let mut bytes = field::FieldElement::random(&mut rng).to_repr(); - bytes[31] |= u8::try_from(rng.next_u32() % 2).unwrap() << 7; - let opt = Self::from_bytes(&bytes); - if opt.is_some().into() { - let opt = opt.unwrap(); - // Ban identity, per the trait specification - if !bool::from(opt.is_identity()) { - return opt; - } + let mut bytes = [0; 64]; + rng.fill_bytes(&mut bytes); + let point = $Point($DPoint::hash_from_bytes::(&bytes)); + // Ban identity, per the trait specification + if !bool::from(point.is_identity()) { + return point; } } } From d72c4ca4f764d4aed745e75b5c1a93ad120fd880 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 23 Feb 2023 04:29:38 -0500 Subject: [PATCH 08/71] 3.3.1 replace try_from with from --- crypto/dkg/src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crypto/dkg/src/lib.rs b/crypto/dkg/src/lib.rs index b858eec6..87c874a8 100644 --- a/crypto/dkg/src/lib.rs +++ b/crypto/dkg/src/lib.rs @@ -140,9 +140,9 @@ pub fn lagrange(i: u16, included: &[u16]) -> F { continue; } - let share = F::from(u64::try_from(*l).unwrap()); + let share = F::from(u64::from(*l)); num *= share; - denom *= share - F::from(u64::try_from(i).unwrap()); + denom *= share - F::from(u64::from(i)); } // Safe as this will only be 0 if we're part of the above loop From 8bee62609caf6d127aed6a0f338c57f7b473f2f9 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 23 Feb 2023 04:44:20 -0500 Subject: [PATCH 09/71] 3.3.2 Use a static IV and clarify cipher documentation --- crypto/dkg/src/encryption.rs | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/crypto/dkg/src/encryption.rs b/crypto/dkg/src/encryption.rs index 341f033d..6543f346 100644 --- a/crypto/dkg/src/encryption.rs +++ b/crypto/dkg/src/encryption.rs @@ -96,6 +96,8 @@ fn ecdh(private: &Zeroizing, public: C::G) -> Zeroizing(dst: &'static [u8], ecdh: &Zeroizing) -> ChaCha20 { // Ideally, we'd box this transcript with ZAlloc, yet that's only possible on nightly // TODO: https://github.com/serai-dex/serai/issues/151 @@ -113,18 +115,18 @@ fn cipher(dst: &'static [u8], ecdh: &Zeroizing) -> ChaCha2 key.copy_from_slice(&challenge[.. 32]); zeroize(challenge.as_mut()); - // The RecommendedTranscript isn't vulnerable to length extension attacks, yet if it was, - // it'd make sense to clone it (and fork it) just to hedge against that + // Since the key is single-use, it doesn't matter what we use for the IV + // The isssue is key + IV reuse. If we never reuse the key, we can't have the opportunity to + // reuse a nonce + // Use a static IV in acknowledgement of this let mut iv = Cc20Iv::default(); - let mut challenge = transcript.challenge(b"iv"); - iv.copy_from_slice(&challenge[.. 12]); - zeroize(challenge.as_mut()); + // The \0 is to satisfy the length requirement (12), not to be null terminated + iv.copy_from_slice(b"DKG IV v0.2\0"); - // Same commentary as the transcript regarding ZAlloc + // ChaCha20 has the same commentary as the transcript regarding ZAlloc // TODO: https://github.com/serai-dex/serai/issues/151 let res = ChaCha20::new(&key, &iv); zeroize(key.as_mut()); - zeroize(iv.as_mut()); res } @@ -144,6 +146,8 @@ fn encrypt( last.as_mut().zeroize(); */ + // Generate a new key for this message, satisfying cipher's requirement of distinct keys per + // message, and enabling revealing this message without revealing any others let key = Zeroizing::new(C::random_nonzero_F(rng)); cipher::(dst, &ecdh::(&key, to)).apply_keystream(msg.as_mut().as_mut()); From 87dea5e455fa20ee0a0ffb97af3d8c0bdb938d79 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 23 Feb 2023 04:56:05 -0500 Subject: [PATCH 10/71] 3.3.3 Add an assert if polynomial is called with 0 This will only be called with 0 if the code fails to do proper screening of its arguments. If such a flaw is present, the DKG lib is critically broken (as this function isn't public). If it was allowed to continue executing, it'd reveal the secret share. --- crypto/dkg/src/frost.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crypto/dkg/src/frost.rs b/crypto/dkg/src/frost.rs index 0bc7ac7a..8884a9b2 100644 --- a/crypto/dkg/src/frost.rs +++ b/crypto/dkg/src/frost.rs @@ -151,6 +151,7 @@ impl KeyGenMachine { } fn polynomial(coefficients: &[Zeroizing], l: u16) -> Zeroizing { + assert!(l != 0, "attempting to evaluate a polynomial with 0"); let l = F::from(u64::from(l)); let mut share = Zeroizing::new(F::zero()); for (idx, coefficient) in coefficients.iter().rev().enumerate() { From 2d56d24d9c1d4637700f8d6038d5ac6a2e7329b7 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 23 Feb 2023 06:50:45 -0500 Subject: [PATCH 11/71] 3.3.3 (cont) Add a dedicated Participant type --- coins/monero/src/ringct/clsag/multisig.rs | 10 +- coins/monero/src/tests/clsag.rs | 9 +- coins/monero/src/wallet/send/multisig.rs | 12 +- coins/monero/tests/runner.rs | 5 +- crypto/dkg/src/encryption.rs | 34 +++--- crypto/dkg/src/frost.rs | 80 +++++++------ crypto/dkg/src/lib.rs | 132 +++++++++++++--------- crypto/dkg/src/promote.rs | 10 +- crypto/dkg/src/tests/frost.rs | 91 +++++++-------- crypto/dkg/src/tests/mod.rs | 8 +- crypto/frost/src/algorithm.rs | 11 +- crypto/frost/src/lib.rs | 24 ++-- crypto/frost/src/nonce.rs | 12 +- crypto/frost/src/sign.rs | 24 ++-- crypto/frost/src/tests/mod.rs | 31 ++--- crypto/frost/src/tests/vectors.rs | 29 +++-- processor/src/lib.rs | 4 +- processor/src/tests/mod.rs | 19 +++- 18 files changed, 308 insertions(+), 237 deletions(-) diff --git a/coins/monero/src/ringct/clsag/multisig.rs b/coins/monero/src/ringct/clsag/multisig.rs index cec1cbd6..d9f3b5f3 100644 --- a/coins/monero/src/ringct/clsag/multisig.rs +++ b/coins/monero/src/ringct/clsag/multisig.rs @@ -23,7 +23,7 @@ use dleq::DLEqProof; use frost::{ dkg::lagrange, curve::Ed25519, - FrostError, ThresholdKeys, ThresholdView, + Participant, FrostError, ThresholdKeys, ThresholdView, algorithm::{WriteAddendum, Algorithm}, }; @@ -146,8 +146,8 @@ pub(crate) fn add_key_image_share( image: &mut EdwardsPoint, generator: EdwardsPoint, offset: Scalar, - included: &[u16], - participant: u16, + included: &[Participant], + participant: Participant, share: EdwardsPoint, ) { if image.is_identity() { @@ -203,7 +203,7 @@ impl Algorithm for ClsagMultisig { fn process_addendum( &mut self, view: &ThresholdView, - l: u16, + l: Participant, addendum: ClsagAddendum, ) -> Result<(), FrostError> { if self.image.is_identity() { @@ -212,7 +212,7 @@ impl Algorithm for ClsagMultisig { self.transcript.append_message(b"mask", self.mask().to_bytes()); } - self.transcript.append_message(b"participant", l.to_be_bytes()); + self.transcript.append_message(b"participant", l.to_bytes()); addendum .dleq diff --git a/coins/monero/src/tests/clsag.rs b/coins/monero/src/tests/clsag.rs index 3becb414..273124e8 100644 --- a/coins/monero/src/tests/clsag.rs +++ b/coins/monero/src/tests/clsag.rs @@ -24,7 +24,10 @@ use crate::{ use crate::ringct::clsag::{ClsagDetails, ClsagMultisig}; #[cfg(feature = "multisig")] -use frost::tests::{key_gen, algorithm_machines, sign}; +use frost::{ + Participant, + tests::{key_gen, algorithm_machines, sign}, +}; const RING_LEN: u64 = 11; const AMOUNT: u64 = 1337; @@ -93,7 +96,7 @@ fn clsag_multisig() { mask = random_scalar(&mut OsRng); amount = OsRng.next_u64(); } else { - dest = keys[&1].group_key().0; + dest = keys[&Participant::new(1).unwrap()].group_key().0; mask = randomness; amount = AMOUNT; } @@ -103,7 +106,7 @@ fn clsag_multisig() { let mask_sum = random_scalar(&mut OsRng); let algorithm = ClsagMultisig::new( RecommendedTranscript::new(b"Monero Serai CLSAG Test"), - keys[&1].group_key().0, + keys[&Participant::new(1).unwrap()].group_key().0, Arc::new(RwLock::new(Some(ClsagDetails::new( ClsagInput::new( Commitment::new(randomness, AMOUNT), diff --git a/coins/monero/src/wallet/send/multisig.rs b/coins/monero/src/wallet/send/multisig.rs index 7dcb6f13..e8509029 100644 --- a/coins/monero/src/wallet/send/multisig.rs +++ b/coins/monero/src/wallet/send/multisig.rs @@ -14,7 +14,7 @@ use dalek_ff_group as dfg; use transcript::{Transcript, RecommendedTranscript}; use frost::{ curve::Ed25519, - FrostError, ThresholdKeys, + Participant, FrostError, ThresholdKeys, sign::{ Writable, Preprocess, CachedPreprocess, SignatureShare, PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine, AlgorithmSignMachine, AlgorithmSignatureMachine, @@ -35,7 +35,7 @@ use crate::{ /// FROST signing machine to produce a signed transaction. pub struct TransactionMachine { signable: SignableTransaction, - i: u16, + i: Participant, transcript: RecommendedTranscript, decoys: Vec, @@ -48,7 +48,7 @@ pub struct TransactionMachine { pub struct TransactionSignMachine { signable: SignableTransaction, - i: u16, + i: Participant, transcript: RecommendedTranscript, decoys: Vec, @@ -236,7 +236,7 @@ impl SignMachine for TransactionSignMachine { fn sign( mut self, - mut commitments: HashMap, + mut commitments: HashMap, msg: &[u8], ) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> { if !msg.is_empty() { @@ -263,7 +263,7 @@ impl SignMachine for TransactionSignMachine { // While each CLSAG will do this as they need to for security, they have their own // transcripts cloned from this TX's initial premise's transcript. For our TX // transcript to have the CLSAG data for entropy, it'll have to be added ourselves here - self.transcript.append_message(b"participant", (*l).to_be_bytes()); + self.transcript.append_message(b"participant", (*l).to_bytes()); let preprocess = if *l == self.i { self.our_preprocess[c].clone() @@ -389,7 +389,7 @@ impl SignatureMachine for TransactionSignatureMachine { fn complete( mut self, - shares: HashMap, + shares: HashMap, ) -> Result { let mut tx = self.tx; match tx.rct_signatures.prunable { diff --git a/coins/monero/tests/runner.rs b/coins/monero/tests/runner.rs index fe9239d7..149374ba 100644 --- a/coins/monero/tests/runner.rs +++ b/coins/monero/tests/runner.rs @@ -131,6 +131,7 @@ macro_rules! test { #[cfg(feature = "multisig")] use frost::{ curve::Ed25519, + Participant, tests::{THRESHOLD, key_gen}, }; @@ -165,7 +166,7 @@ macro_rules! test { #[cfg(not(feature = "multisig"))] panic!("Multisig branch called without the multisig feature"); #[cfg(feature = "multisig")] - keys[&1].group_key().0 + keys[&Participant::new(1).unwrap()].group_key().0 }; let view = ViewPair::new(spend_pub, Zeroizing::new(random_scalar(&mut OsRng))); @@ -211,7 +212,7 @@ macro_rules! test { #[cfg(feature = "multisig")] { let mut machines = HashMap::new(); - for i in 1 ..= THRESHOLD { + for i in (1 ..= THRESHOLD).map(|i| Participant::new(i).unwrap()) { machines.insert( i, tx diff --git a/crypto/dkg/src/encryption.rs b/crypto/dkg/src/encryption.rs index 6543f346..4bcb7802 100644 --- a/crypto/dkg/src/encryption.rs +++ b/crypto/dkg/src/encryption.rs @@ -26,7 +26,7 @@ use multiexp::BatchVerifier; use schnorr::SchnorrSignature; use dleq::DLEqProof; -use crate::ThresholdParams; +use crate::{Participant, ThresholdParams}; pub trait ReadWrite: Sized { fn read(reader: &mut R, params: ThresholdParams) -> io::Result; @@ -133,7 +133,7 @@ fn cipher(dst: &'static [u8], ecdh: &Zeroizing) -> ChaCha2 fn encrypt( rng: &mut R, dst: &'static [u8], - from: u16, + from: Participant, to: C::G, mut msg: Zeroizing, ) -> EncryptedMessage { @@ -192,7 +192,7 @@ impl EncryptedMessage { } #[cfg(test)] - pub(crate) fn invalidate_msg(&mut self, rng: &mut R, from: u16) { + pub(crate) fn invalidate_msg(&mut self, rng: &mut R, from: Participant) { // Invalidate the message by specifying a new key/Schnorr PoP // This will cause all initial checks to pass, yet a decrypt to gibberish let key = Zeroizing::new(C::random_nonzero_F(rng)); @@ -213,7 +213,7 @@ impl EncryptedMessage { &mut self, rng: &mut R, dst: &'static [u8], - from: u16, + from: Participant, to: C::G, ) { use group::ff::PrimeField; @@ -237,7 +237,7 @@ impl EncryptedMessage { &mut self, rng: &mut R, dst: &'static [u8], - from: u16, + from: Participant, to: C::G, ) { use group::ff::PrimeField; @@ -292,12 +292,12 @@ impl EncryptionKeyProof { // This doesn't need to take the msg. It just doesn't hurt as an extra layer. // This still doesn't mean the DKG offers an authenticated channel. The per-message keys have no // root of trust other than their existence in the assumed-to-exist external authenticated channel. -fn pop_challenge(nonce: C::G, key: C::G, sender: u16, msg: &[u8]) -> C::F { +fn pop_challenge(nonce: C::G, key: C::G, sender: Participant, msg: &[u8]) -> C::F { let mut transcript = RecommendedTranscript::new(b"DKG Encryption Key Proof of Possession v0.2"); transcript.append_message(b"nonce", nonce.to_bytes()); transcript.append_message(b"key", key.to_bytes()); // This is sufficient to prevent the attack this is meant to stop - transcript.append_message(b"sender", sender.to_le_bytes()); + transcript.append_message(b"sender", sender.to_bytes()); // This, as written above, doesn't hurt transcript.append_message(b"message", msg); // While this is a PoK and a PoP, it's called a PoP here since the important part is its owner @@ -322,10 +322,10 @@ pub(crate) enum DecryptionError { #[derive(Clone)] pub(crate) struct Encryption { dst: &'static [u8], - i: u16, + i: Participant, enc_key: Zeroizing, enc_pub_key: C::G, - enc_keys: HashMap, + enc_keys: HashMap, } impl Zeroize for Encryption { @@ -339,7 +339,11 @@ impl Zeroize for Encryption { } impl Encryption { - pub(crate) fn new(dst: &'static [u8], i: u16, rng: &mut R) -> Self { + pub(crate) fn new( + dst: &'static [u8], + i: Participant, + rng: &mut R, + ) -> Self { let enc_key = Zeroizing::new(C::random_nonzero_F(rng)); Self { dst, @@ -356,7 +360,7 @@ impl Encryption { pub(crate) fn register( &mut self, - participant: u16, + participant: Participant, msg: EncryptionKeyMessage, ) -> M { if self.enc_keys.contains_key(&participant) { @@ -369,7 +373,7 @@ impl Encryption { pub(crate) fn encrypt( &self, rng: &mut R, - participant: u16, + participant: Participant, msg: Zeroizing, ) -> EncryptedMessage { encrypt(rng, self.dst, self.i, self.enc_keys[&participant], msg) @@ -382,7 +386,7 @@ impl Encryption { // Uses a distinct batch ID so if this batch verifier is reused, we know its the PoP aspect // which failed, and therefore to use None for the blame batch_id: I, - from: u16, + from: Participant, mut msg: EncryptedMessage, ) -> (Zeroizing, EncryptionKeyProof) { msg.pop.batch_verify( @@ -413,8 +417,8 @@ impl Encryption { // Returns None if the key was wrong. pub(crate) fn decrypt_with_proof( &self, - from: u16, - decryptor: u16, + from: Participant, + decryptor: Participant, mut msg: EncryptedMessage, // There's no encryption key proof if the accusation is of an invalid signature proof: Option>, diff --git a/crypto/dkg/src/frost.rs b/crypto/dkg/src/frost.rs index 8884a9b2..b8b9b449 100644 --- a/crypto/dkg/src/frost.rs +++ b/crypto/dkg/src/frost.rs @@ -24,7 +24,7 @@ use multiexp::{multiexp_vartime, BatchVerifier}; use schnorr::SchnorrSignature; use crate::{ - DkgError, ThresholdParams, ThresholdCore, validate_map, + Participant, DkgError, ThresholdParams, ThresholdCore, validate_map, encryption::{ ReadWrite, EncryptionKeyMessage, EncryptedMessage, Encryption, EncryptionKeyProof, DecryptionError, @@ -34,11 +34,11 @@ use crate::{ type FrostError = DkgError>; #[allow(non_snake_case)] -fn challenge(context: &str, l: u16, R: &[u8], Am: &[u8]) -> C::F { +fn challenge(context: &str, l: Participant, R: &[u8], Am: &[u8]) -> C::F { let mut transcript = RecommendedTranscript::new(b"DKG FROST v0.2"); transcript.domain_separate(b"schnorr_proof_of_knowledge"); transcript.append_message(b"context", context.as_bytes()); - transcript.append_message(b"participant", l.to_le_bytes()); + transcript.append_message(b"participant", l.to_bytes()); transcript.append_message(b"nonce", R); transcript.append_message(b"commitments", Am); C::hash_to_F(b"DKG-FROST-proof_of_knowledge-0", &transcript.challenge(b"schnorr")) @@ -150,9 +150,13 @@ impl KeyGenMachine { } } -fn polynomial(coefficients: &[Zeroizing], l: u16) -> Zeroizing { - assert!(l != 0, "attempting to evaluate a polynomial with 0"); - let l = F::from(u64::from(l)); +fn polynomial( + coefficients: &[Zeroizing], + l: Participant, +) -> Zeroizing { + let l = F::from(u64::from(u16::from(l))); + // This should never be reached since Participant is explicitly non-zero + assert!(l != F::zero(), "zero participant passed to polynomial"); let mut share = Zeroizing::new(F::zero()); for (idx, coefficient) in coefficients.iter().rev().enumerate() { *share += coefficient.deref(); @@ -231,11 +235,15 @@ impl SecretShareMachine { fn verify_r1( &mut self, rng: &mut R, - mut commitments: HashMap>>, - ) -> Result>, FrostError> { - validate_map(&commitments, &(1 ..= self.params.n()).collect::>(), self.params.i())?; + mut commitments: HashMap>>, + ) -> Result>, FrostError> { + validate_map( + &commitments, + &(1 ..= self.params.n()).map(Participant).collect::>(), + self.params.i(), + )?; - let mut batch = BatchVerifier::::new(commitments.len()); + let mut batch = BatchVerifier::::new(commitments.len()); let mut commitments = commitments .drain() .map(|(l, msg)| { @@ -269,14 +277,16 @@ impl SecretShareMachine { pub fn generate_secret_shares( mut self, rng: &mut R, - commitments: HashMap>>, - ) -> Result<(KeyMachine, HashMap>>), FrostError> - { + commitments: HashMap>>, + ) -> Result< + (KeyMachine, HashMap>>), + FrostError, + > { let commitments = self.verify_r1(&mut *rng, commitments)?; // Step 1: Generate secret shares for all other parties let mut res = HashMap::new(); - for l in 1 ..= self.params.n() { + for l in (1 ..= self.params.n()).map(Participant) { // Don't insert our own shares to the byte buffer which is meant to be sent around // An app developer could accidentally send it. Best to keep this black boxed if l == self.params.i() { @@ -308,7 +318,7 @@ impl SecretShareMachine { pub struct KeyMachine { params: ThresholdParams, secret: Zeroizing, - commitments: HashMap>, + commitments: HashMap>, encryption: Encryption, } @@ -326,8 +336,8 @@ impl Zeroize for KeyMachine { // Calculate the exponent for a given participant and apply it to a series of commitments // Initially used with the actual commitments to verify the secret share, later used with // stripes to generate the verification shares -fn exponential(i: u16, values: &[C::G]) -> Vec<(C::F, C::G)> { - let i = C::F::from(i.into()); +fn exponential(i: Participant, values: &[C::G]) -> Vec<(C::F, C::G)> { + let i = C::F::from(u16::from(i).into()); let mut res = Vec::with_capacity(values.len()); (0 .. values.len()).fold(C::F::one(), |exp, l| { res.push((exp, values[l])); @@ -337,7 +347,7 @@ fn exponential(i: u16, values: &[C::G]) -> Vec<(C::F, C::G)> { } fn share_verification_statements( - target: u16, + target: Participant, commitments: &[C::G], mut share: Zeroizing, ) -> Vec<(C::F, C::G)> { @@ -359,8 +369,8 @@ fn share_verification_statements( #[derive(Clone, Copy, Hash, Debug, Zeroize)] enum BatchId { - Decryption(u16), - Share(u16), + Decryption(Participant), + Share(Participant), } impl KeyMachine { @@ -370,9 +380,13 @@ impl KeyMachine { pub fn calculate_share( mut self, rng: &mut R, - mut shares: HashMap>>, + mut shares: HashMap>>, ) -> Result, FrostError> { - validate_map(&shares, &(1 ..= self.params.n()).collect::>(), self.params.i())?; + validate_map( + &shares, + &(1 ..= self.params.n()).map(Participant).collect::>(), + self.params.i(), + )?; let mut batch = BatchVerifier::new(shares.len()); let mut blames = HashMap::new(); @@ -412,7 +426,7 @@ impl KeyMachine { // Calculate each user's verification share let mut verification_shares = HashMap::new(); - for i in 1 ..= self.params.n() { + for i in (1 ..= self.params.n()).map(Participant) { verification_shares.insert( i, if i == self.params.i() { @@ -438,7 +452,7 @@ impl KeyMachine { } pub struct BlameMachine { - commitments: HashMap>, + commitments: HashMap>, encryption: Encryption, result: ThresholdCore, } @@ -469,11 +483,11 @@ impl BlameMachine { fn blame_internal( &self, - sender: u16, - recipient: u16, + sender: Participant, + recipient: Participant, msg: EncryptedMessage>, proof: Option>, - ) -> u16 { + ) -> Participant { let share_bytes = match self.encryption.decrypt_with_proof(sender, recipient, msg, proof) { Ok(share_bytes) => share_bytes, // If there's an invalid signature, the sender did not send a properly formed message @@ -518,11 +532,11 @@ impl BlameMachine { /// order to prevent multiple instances of blame over a single incident. pub fn blame( self, - sender: u16, - recipient: u16, + sender: Participant, + recipient: Participant, msg: EncryptedMessage>, proof: Option>, - ) -> (AdditionalBlameMachine, u16) { + ) -> (AdditionalBlameMachine, Participant) { let faulty = self.blame_internal(sender, recipient, msg, proof); (AdditionalBlameMachine(self), faulty) } @@ -543,11 +557,11 @@ impl AdditionalBlameMachine { /// over a single incident. pub fn blame( self, - sender: u16, - recipient: u16, + sender: Participant, + recipient: Participant, msg: EncryptedMessage>, proof: Option>, - ) -> u16 { + ) -> Participant { self.0.blame_internal(sender, recipient, msg, proof) } } diff --git a/crypto/dkg/src/lib.rs b/crypto/dkg/src/lib.rs index 87c874a8..e513665d 100644 --- a/crypto/dkg/src/lib.rs +++ b/crypto/dkg/src/lib.rs @@ -7,7 +7,7 @@ //! provided. use core::{ - fmt::{Debug, Formatter}, + fmt::{self, Debug}, ops::Deref, }; use std::{io::Read, sync::Arc, collections::HashMap}; @@ -36,29 +36,59 @@ pub mod promote; #[cfg(any(test, feature = "tests"))] pub mod tests; +/// The ID of a participant, defined as a non-zero u16. +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Zeroize)] +pub struct Participant(pub(crate) u16); +impl Participant { + pub fn new(i: u16) -> Option { + if i == 0 { + None + } else { + Some(Participant(i)) + } + } + + #[allow(clippy::wrong_self_convention)] + pub fn to_bytes(&self) -> [u8; 2] { + self.0.to_le_bytes() + } +} + +impl From for u16 { + fn from(participant: Participant) -> u16 { + participant.0 + } +} + +impl fmt::Display for Participant { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.0) + } +} + /// Various errors possible during key generation/signing. #[derive(Clone, PartialEq, Eq, Debug, Error)] pub enum DkgError { - #[error("a parameter was 0 (required {0}, participants {1})")] + #[error("a parameter was 0 (threshold {0}, participants {1})")] ZeroParameter(u16, u16), #[error("invalid amount of required participants (max {1}, got {0})")] InvalidRequiredQuantity(u16, u16), - #[error("invalid participant index (0 < index <= {0}, yet index is {1})")] - InvalidParticipantIndex(u16, u16), + #[error("invalid participant (0 < participant <= {0}, yet participant is {1})")] + InvalidParticipant(u16, Participant), #[error("invalid signing set")] InvalidSigningSet, #[error("invalid participant quantity (expected {0}, got {1})")] InvalidParticipantQuantity(usize, usize), - #[error("duplicated participant index ({0})")] - DuplicatedIndex(u16), + #[error("duplicated participant ({0})")] + DuplicatedParticipant(Participant), #[error("missing participant {0}")] - MissingParticipant(u16), + MissingParticipant(Participant), #[error("invalid proof of knowledge (participant {0})")] - InvalidProofOfKnowledge(u16), + InvalidProofOfKnowledge(Participant), #[error("invalid share (participant {participant}, blame {blame})")] - InvalidShare { participant: u16, blame: Option }, + InvalidShare { participant: Participant, blame: Option }, #[error("internal error ({0})")] InternalError(&'static str), @@ -66,9 +96,9 @@ pub enum DkgError { // Validate a map of values to have the expected included participants pub(crate) fn validate_map( - map: &HashMap, - included: &[u16], - ours: u16, + map: &HashMap, + included: &[Participant], + ours: Participant, ) -> Result<(), DkgError> { if (map.len() + 1) != included.len() { Err(DkgError::InvalidParticipantQuantity(included.len(), map.len() + 1))?; @@ -77,7 +107,7 @@ pub(crate) fn validate_map( for included in included { if *included == ours { if map.contains_key(included) { - Err(DkgError::DuplicatedIndex(*included))?; + Err(DkgError::DuplicatedParticipant(*included))?; } continue; } @@ -99,11 +129,11 @@ pub struct ThresholdParams { /// Amount of participants. n: u16, /// Index of the participant being acted for. - i: u16, + i: Participant, } impl ThresholdParams { - pub fn new(t: u16, n: u16, i: u16) -> Result> { + pub fn new(t: u16, n: u16, i: Participant) -> Result> { if (t == 0) || (n == 0) { Err(DkgError::ZeroParameter(t, n))?; } @@ -113,8 +143,8 @@ impl ThresholdParams { if t > n { Err(DkgError::InvalidRequiredQuantity(t, n))?; } - if (i == 0) || (i > n) { - Err(DkgError::InvalidParticipantIndex(n, i))?; + if u16::from(i) > n { + Err(DkgError::InvalidParticipant(n, i))?; } Ok(ThresholdParams { t, n, i }) @@ -126,13 +156,15 @@ impl ThresholdParams { pub fn n(&self) -> u16 { self.n } - pub fn i(&self) -> u16 { + pub fn i(&self) -> Participant { self.i } } /// Calculate the lagrange coefficient for a signing set. -pub fn lagrange(i: u16, included: &[u16]) -> F { +pub fn lagrange(i: Participant, included: &[Participant]) -> F { + let i_f = F::from(u64::from(u16::from(i))); + let mut num = F::one(); let mut denom = F::one(); for l in included { @@ -140,9 +172,9 @@ pub fn lagrange(i: u16, included: &[u16]) -> F { continue; } - let share = F::from(u64::from(*l)); + let share = F::from(u64::from(u16::from(*l))); num *= share; - denom *= share - F::from(u64::from(i)); + denom *= share - i_f; } // Safe as this will only be 0 if we're part of the above loop @@ -162,11 +194,11 @@ pub struct ThresholdCore { /// Group key. group_key: C::G, /// Verification shares. - verification_shares: HashMap, + verification_shares: HashMap, } -impl Debug for ThresholdCore { - fn fmt(&self, fmt: &mut Formatter<'_>) -> Result<(), core::fmt::Error> { +impl fmt::Debug for ThresholdCore { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt .debug_struct("ThresholdCore") .field("params", &self.params) @@ -191,16 +223,9 @@ impl ThresholdCore { pub(crate) fn new( params: ThresholdParams, secret_share: Zeroizing, - verification_shares: HashMap, + verification_shares: HashMap, ) -> ThresholdCore { - debug_assert!(validate_map::<_, ()>( - &verification_shares, - &(0 ..= params.n).collect::>(), - 0 - ) - .is_ok()); - - let t = (1 ..= params.t).collect::>(); + let t = (1 ..= params.t).map(Participant).collect::>(); ThresholdCore { params, secret_share, @@ -220,19 +245,19 @@ impl ThresholdCore { self.group_key } - pub(crate) fn verification_shares(&self) -> HashMap { + pub(crate) fn verification_shares(&self) -> HashMap { self.verification_shares.clone() } pub fn serialize(&self) -> Vec { let mut serialized = vec![]; - serialized.extend(u32::try_from(C::ID.len()).unwrap().to_be_bytes()); + serialized.extend(u32::try_from(C::ID.len()).unwrap().to_le_bytes()); serialized.extend(C::ID); - serialized.extend(self.params.t.to_be_bytes()); - serialized.extend(self.params.n.to_be_bytes()); - serialized.extend(self.params.i.to_be_bytes()); + serialized.extend(self.params.t.to_le_bytes()); + serialized.extend(self.params.n.to_le_bytes()); + serialized.extend(self.params.i.to_bytes()); serialized.extend(self.secret_share.to_repr().as_ref()); - for l in 1 ..= self.params.n { + for l in (1 ..= self.params.n).map(Participant) { serialized.extend(self.verification_shares[&l].to_bytes().as_ref()); } serialized @@ -245,7 +270,7 @@ impl ThresholdCore { let mut id_len = [0; 4]; reader.read_exact(&mut id_len).map_err(|_| missing.clone())?; - if u32::try_from(C::ID.len()).unwrap().to_be_bytes() != id_len { + if u32::try_from(C::ID.len()).unwrap().to_le_bytes() != id_len { Err(different.clone())?; } @@ -262,9 +287,14 @@ impl ThresholdCore { reader .read_exact(&mut value) .map_err(|_| DkgError::InternalError("missing participant quantities"))?; - Ok(u16::from_be_bytes(value)) + Ok(u16::from_le_bytes(value)) }; - (read_u16()?, read_u16()?, read_u16()?) + ( + read_u16()?, + read_u16()?, + Participant::new(read_u16()?) + .ok_or(DkgError::InternalError("invalid participant index"))?, + ) }; let secret_share = Zeroizing::new( @@ -272,7 +302,7 @@ impl ThresholdCore { ); let mut verification_shares = HashMap::new(); - for l in 1 ..= n { + for l in (1 ..= n).map(Participant) { verification_shares.insert( l, ::read_G(reader) @@ -306,10 +336,10 @@ pub struct ThresholdKeys { pub struct ThresholdView { offset: C::F, group_key: C::G, - included: Vec, + included: Vec, secret_share: Zeroizing, - original_verification_shares: HashMap, - verification_shares: HashMap, + original_verification_shares: HashMap, + verification_shares: HashMap, } impl Zeroize for ThresholdView { @@ -363,7 +393,7 @@ impl ThresholdKeys { } /// Returns all participants' verification shares without any offsetting. - pub(crate) fn verification_shares(&self) -> HashMap { + pub(crate) fn verification_shares(&self) -> HashMap { self.core.verification_shares() } @@ -371,7 +401,7 @@ impl ThresholdKeys { self.core.serialize() } - pub fn view(&self, included: &[u16]) -> Result, DkgError<()>> { + pub fn view(&self, included: &[Participant]) -> Result, DkgError<()>> { if (included.len() < self.params().t.into()) || (usize::from(self.params().n) < included.len()) { Err(DkgError::InvalidSigningSet)?; @@ -409,7 +439,7 @@ impl ThresholdView { self.group_key } - pub fn included(&self) -> &[u16] { + pub fn included(&self) -> &[Participant] { &self.included } @@ -417,11 +447,11 @@ impl ThresholdView { &self.secret_share } - pub fn original_verification_share(&self, l: u16) -> C::G { + pub fn original_verification_share(&self, l: Participant) -> C::G { self.original_verification_shares[&l] } - pub fn verification_share(&self, l: u16) -> C::G { + pub fn verification_share(&self, l: Participant) -> C::G { self.verification_shares[&l] } } diff --git a/crypto/dkg/src/promote.rs b/crypto/dkg/src/promote.rs index 6645cb04..1c4c223a 100644 --- a/crypto/dkg/src/promote.rs +++ b/crypto/dkg/src/promote.rs @@ -14,7 +14,7 @@ use ciphersuite::Ciphersuite; use transcript::{Transcript, RecommendedTranscript}; use dleq::DLEqProof; -use crate::{DkgError, ThresholdCore, ThresholdKeys, validate_map}; +use crate::{Participant, DkgError, ThresholdCore, ThresholdKeys, validate_map}; /// Promote a set of keys to another Ciphersuite definition. pub trait CiphersuitePromote { @@ -27,10 +27,10 @@ pub trait CiphersuitePromote { fn promote(self) -> ThresholdKeys; } -fn transcript(key: G, i: u16) -> RecommendedTranscript { +fn transcript(key: G, i: Participant) -> RecommendedTranscript { let mut transcript = RecommendedTranscript::new(b"DKG Generator Promotion v0.2"); transcript.append_message(b"group_key", key.to_bytes()); - transcript.append_message(b"participant", i.to_be_bytes()); + transcript.append_message(b"participant", i.to_bytes()); transcript } @@ -97,10 +97,10 @@ where /// Complete promotion by taking in the proofs from all other participants. pub fn complete( self, - proofs: &HashMap>, + proofs: &HashMap>, ) -> Result, DkgError<()>> { let params = self.base.params(); - validate_map(proofs, &(1 ..= params.n).collect::>(), params.i)?; + validate_map(proofs, &(1 ..= params.n).map(Participant).collect::>(), params.i)?; let original_shares = self.base.verification_shares(); diff --git a/crypto/dkg/src/tests/frost.rs b/crypto/dkg/src/tests/frost.rs index 34579758..ad8327c1 100644 --- a/crypto/dkg/src/tests/frost.rs +++ b/crypto/dkg/src/tests/frost.rs @@ -3,7 +3,7 @@ use std::collections::HashMap; use rand_core::{RngCore, CryptoRng}; use crate::{ - Ciphersuite, ThresholdParams, ThresholdCore, + Ciphersuite, Participant, ThresholdParams, ThresholdCore, frost::{KeyGenMachine, SecretShare, KeyMachine}, encryption::{EncryptionKeyMessage, EncryptedMessage}, tests::{THRESHOLD, PARTICIPANTS, clone_without}, @@ -11,31 +11,30 @@ use crate::{ // Needed so rustfmt doesn't fail to format on line length issues type FrostEncryptedMessage = EncryptedMessage::F>>; -type FrostSecretShares = HashMap>; +type FrostSecretShares = HashMap>; // Commit, then return enc key and shares #[allow(clippy::type_complexity)] fn commit_enc_keys_and_shares( rng: &mut R, -) -> (HashMap>, HashMap, HashMap>) { +) -> ( + HashMap>, + HashMap, + HashMap>, +) { let mut machines = HashMap::new(); let mut commitments = HashMap::new(); let mut enc_keys = HashMap::new(); - for i in 1 ..= PARTICIPANTS { - let machine = KeyGenMachine::::new( - ThresholdParams::new(THRESHOLD, PARTICIPANTS, i).unwrap(), - "DKG Test Key Generation".to_string(), - ); + for i in (1 ..= PARTICIPANTS).map(Participant) { + let params = ThresholdParams::new(THRESHOLD, PARTICIPANTS, i).unwrap(); + let machine = KeyGenMachine::::new(params, "DKG Test Key Generation".to_string()); let (machine, these_commitments) = machine.generate_coefficients(rng); machines.insert(i, machine); commitments.insert( i, - EncryptionKeyMessage::read::<&[u8]>( - &mut these_commitments.serialize().as_ref(), - ThresholdParams { t: THRESHOLD, n: PARTICIPANTS, i: 1 }, - ) - .unwrap(), + EncryptionKeyMessage::read::<&[u8]>(&mut these_commitments.serialize().as_ref(), params) + .unwrap(), ); enc_keys.insert(i, commitments[&i].enc_key()); } @@ -53,7 +52,8 @@ fn commit_enc_keys_and_shares( l, EncryptedMessage::read::<&[u8]>( &mut share.serialize().as_ref(), - ThresholdParams { t: THRESHOLD, n: PARTICIPANTS, i: 1 }, + // Only t/n actually matters, so hardcode i to 1 here + ThresholdParams { t: THRESHOLD, n: PARTICIPANTS, i: Participant(1) }, ) .unwrap(), ) @@ -68,8 +68,8 @@ fn commit_enc_keys_and_shares( } fn generate_secret_shares( - shares: &HashMap>, - recipient: u16, + shares: &HashMap>, + recipient: Participant, ) -> FrostSecretShares { let mut our_secret_shares = HashMap::new(); for (i, shares) in shares { @@ -84,7 +84,7 @@ fn generate_secret_shares( /// Fully perform the FROST key generation algorithm. pub fn frost_gen( rng: &mut R, -) -> HashMap> { +) -> HashMap> { let (mut machines, _, secret_shares) = commit_enc_keys_and_shares::<_, C>(rng); let mut verification_shares = None; @@ -122,16 +122,19 @@ mod literal { use super::*; + const ONE: Participant = Participant(1); + const TWO: Participant = Participant(2); + fn test_blame( machines: Vec>, msg: FrostEncryptedMessage, blame: Option>, ) { for machine in machines { - let (additional, blamed) = machine.blame(1, 2, msg.clone(), blame.clone()); - assert_eq!(blamed, 1); + let (additional, blamed) = machine.blame(ONE, TWO, msg.clone(), blame.clone()); + assert_eq!(blamed, ONE); // Verify additional blame also works - assert_eq!(additional.blame(1, 2, msg.clone(), blame.clone()), 1); + assert_eq!(additional.blame(ONE, TWO, msg.clone(), blame.clone()), ONE); } } @@ -142,7 +145,7 @@ mod literal { commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); // Mutate the PoP of the encrypted message from 1 to 2 - secret_shares.get_mut(&1).unwrap().get_mut(&2).unwrap().invalidate_pop(); + secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_pop(); let mut blame = None; let machines = machines @@ -150,8 +153,8 @@ mod literal { .filter_map(|(i, machine)| { let our_secret_shares = generate_secret_shares(&secret_shares, i); let machine = machine.calculate_share(&mut OsRng, our_secret_shares); - if i == 2 { - assert_eq!(machine.err(), Some(DkgError::InvalidShare { participant: 1, blame: None })); + if i == TWO { + assert_eq!(machine.err(), Some(DkgError::InvalidShare { participant: ONE, blame: None })); // Explicitly declare we have a blame object, which happens to be None since invalid PoP // is self-explainable blame = Some(None); @@ -162,7 +165,7 @@ mod literal { }) .collect::>(); - test_blame(machines, secret_shares[&1][&2].clone(), blame.unwrap()); + test_blame(machines, secret_shares[&ONE][&TWO].clone(), blame.unwrap()); } #[test] @@ -176,7 +179,7 @@ mod literal { // We then malleate 1's blame proof, so 1 ends up malicious // Doesn't simply invalidate the PoP as that won't have a blame statement // By mutating the encrypted data, we do ensure a blame statement is created - secret_shares.get_mut(&2).unwrap().get_mut(&1).unwrap().invalidate_msg(&mut OsRng, 2); + secret_shares.get_mut(&TWO).unwrap().get_mut(&ONE).unwrap().invalidate_msg(&mut OsRng, TWO); let mut blame = None; let machines = machines @@ -184,9 +187,9 @@ mod literal { .filter_map(|(i, machine)| { let our_secret_shares = generate_secret_shares(&secret_shares, i); let machine = machine.calculate_share(&mut OsRng, our_secret_shares); - if i == 1 { + if i == ONE { blame = Some(match machine.err() { - Some(DkgError::InvalidShare { participant: 2, blame: Some(blame) }) => Some(blame), + Some(DkgError::InvalidShare { participant: TWO, blame: Some(blame) }) => Some(blame), _ => panic!(), }); None @@ -197,7 +200,7 @@ mod literal { .collect::>(); blame.as_mut().unwrap().as_mut().unwrap().invalidate_key(); - test_blame(machines, secret_shares[&2][&1].clone(), blame.unwrap()); + test_blame(machines, secret_shares[&TWO][&ONE].clone(), blame.unwrap()); } // This should be largely equivalent to the prior test @@ -206,7 +209,7 @@ mod literal { let (mut machines, _, mut secret_shares) = commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); - secret_shares.get_mut(&2).unwrap().get_mut(&1).unwrap().invalidate_msg(&mut OsRng, 2); + secret_shares.get_mut(&TWO).unwrap().get_mut(&ONE).unwrap().invalidate_msg(&mut OsRng, TWO); let mut blame = None; let machines = machines @@ -214,9 +217,9 @@ mod literal { .filter_map(|(i, machine)| { let our_secret_shares = generate_secret_shares(&secret_shares, i); let machine = machine.calculate_share(&mut OsRng, our_secret_shares); - if i == 1 { + if i == ONE { blame = Some(match machine.err() { - Some(DkgError::InvalidShare { participant: 2, blame: Some(blame) }) => Some(blame), + Some(DkgError::InvalidShare { participant: TWO, blame: Some(blame) }) => Some(blame), _ => panic!(), }); None @@ -227,7 +230,7 @@ mod literal { .collect::>(); blame.as_mut().unwrap().as_mut().unwrap().invalidate_dleq(); - test_blame(machines, secret_shares[&2][&1].clone(), blame.unwrap()); + test_blame(machines, secret_shares[&TWO][&ONE].clone(), blame.unwrap()); } #[test] @@ -235,11 +238,11 @@ mod literal { let (mut machines, enc_keys, mut secret_shares) = commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); - secret_shares.get_mut(&1).unwrap().get_mut(&2).unwrap().invalidate_share_serialization( + secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_serialization( &mut OsRng, b"FROST", - 1, - enc_keys[&2], + ONE, + enc_keys[&TWO], ); let mut blame = None; @@ -248,9 +251,9 @@ mod literal { .filter_map(|(i, machine)| { let our_secret_shares = generate_secret_shares(&secret_shares, i); let machine = machine.calculate_share(&mut OsRng, our_secret_shares); - if i == 2 { + if i == TWO { blame = Some(match machine.err() { - Some(DkgError::InvalidShare { participant: 1, blame: Some(blame) }) => Some(blame), + Some(DkgError::InvalidShare { participant: ONE, blame: Some(blame) }) => Some(blame), _ => panic!(), }); None @@ -260,7 +263,7 @@ mod literal { }) .collect::>(); - test_blame(machines, secret_shares[&1][&2].clone(), blame.unwrap()); + test_blame(machines, secret_shares[&ONE][&TWO].clone(), blame.unwrap()); } #[test] @@ -268,11 +271,11 @@ mod literal { let (mut machines, enc_keys, mut secret_shares) = commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); - secret_shares.get_mut(&1).unwrap().get_mut(&2).unwrap().invalidate_share_value( + secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_value( &mut OsRng, b"FROST", - 1, - enc_keys[&2], + ONE, + enc_keys[&TWO], ); let mut blame = None; @@ -281,9 +284,9 @@ mod literal { .filter_map(|(i, machine)| { let our_secret_shares = generate_secret_shares(&secret_shares, i); let machine = machine.calculate_share(&mut OsRng, our_secret_shares); - if i == 2 { + if i == TWO { blame = Some(match machine.err() { - Some(DkgError::InvalidShare { participant: 1, blame: Some(blame) }) => Some(blame), + Some(DkgError::InvalidShare { participant: ONE, blame: Some(blame) }) => Some(blame), _ => panic!(), }); None @@ -293,6 +296,6 @@ mod literal { }) .collect::>(); - test_blame(machines, secret_shares[&1][&2].clone(), blame.unwrap()); + test_blame(machines, secret_shares[&ONE][&TWO].clone(), blame.unwrap()); } } diff --git a/crypto/dkg/src/tests/mod.rs b/crypto/dkg/src/tests/mod.rs index 89957315..81e8a9ab 100644 --- a/crypto/dkg/src/tests/mod.rs +++ b/crypto/dkg/src/tests/mod.rs @@ -7,7 +7,7 @@ use group::ff::Field; use ciphersuite::Ciphersuite; -use crate::{ThresholdCore, ThresholdKeys, lagrange}; +use crate::{Participant, ThresholdCore, ThresholdKeys, lagrange}; /// FROST generation test. pub mod frost; @@ -33,7 +33,7 @@ pub fn clone_without( } /// Recover the secret from a collection of keys. -pub fn recover_key(keys: &HashMap>) -> C::F { +pub fn recover_key(keys: &HashMap>) -> C::F { let first = keys.values().next().expect("no keys provided"); assert!(keys.len() >= first.params().t().into(), "not enough keys provided"); let included = keys.keys().cloned().collect::>(); @@ -48,7 +48,7 @@ pub fn recover_key(keys: &HashMap>) -> C:: /// Generate threshold keys for tests. pub fn key_gen( rng: &mut R, -) -> HashMap> { +) -> HashMap> { let res = frost_gen(rng) .drain() .map(|(i, core)| { @@ -59,7 +59,7 @@ pub fn key_gen( (i, ThresholdKeys::new(core)) }) .collect(); - assert_eq!(C::generator() * recover_key(&res), res[&1].group_key()); + assert_eq!(C::generator() * recover_key(&res), res[&Participant(1)].group_key()); res } diff --git a/crypto/frost/src/algorithm.rs b/crypto/frost/src/algorithm.rs index ad0dff5d..e0679523 100644 --- a/crypto/frost/src/algorithm.rs +++ b/crypto/frost/src/algorithm.rs @@ -6,7 +6,7 @@ use rand_core::{RngCore, CryptoRng}; use transcript::Transcript; -use crate::{Curve, FrostError, ThresholdKeys, ThresholdView}; +use crate::{Curve, Participant, FrostError, ThresholdKeys, ThresholdView}; pub use schnorr::SchnorrSignature; /// Write an addendum to a writer. @@ -55,7 +55,7 @@ pub trait Algorithm: Clone { fn process_addendum( &mut self, params: &ThresholdView, - l: u16, + l: Participant, reader: Self::Addendum, ) -> Result<(), FrostError>; @@ -161,7 +161,12 @@ impl> Algorithm for Schnorr { Ok(()) } - fn process_addendum(&mut self, _: &ThresholdView, _: u16, _: ()) -> Result<(), FrostError> { + fn process_addendum( + &mut self, + _: &ThresholdView, + _: Participant, + _: (), + ) -> Result<(), FrostError> { Ok(()) } diff --git a/crypto/frost/src/lib.rs b/crypto/frost/src/lib.rs index 9648e05d..a1d44030 100644 --- a/crypto/frost/src/lib.rs +++ b/crypto/frost/src/lib.rs @@ -19,7 +19,7 @@ use std::collections::HashMap; use thiserror::Error; /// Distributed key generation protocol. -pub use dkg::{self, ThresholdParams, ThresholdCore, ThresholdKeys, ThresholdView}; +pub use dkg::{self, Participant, ThresholdParams, ThresholdCore, ThresholdKeys, ThresholdView}; /// Curve trait and provided curves/HRAMs, forming various ciphersuites. pub mod curve; @@ -38,21 +38,21 @@ pub mod tests; /// Various errors possible during signing. #[derive(Clone, Copy, PartialEq, Eq, Debug, Error)] pub enum FrostError { - #[error("invalid participant index (0 < index <= {0}, yet index is {1})")] - InvalidParticipantIndex(u16, u16), + #[error("invalid participant (0 < participant <= {0}, yet participant is {1})")] + InvalidParticipant(u16, Participant), #[error("invalid signing set ({0})")] InvalidSigningSet(&'static str), #[error("invalid participant quantity (expected {0}, got {1})")] InvalidParticipantQuantity(usize, usize), - #[error("duplicated participant index ({0})")] - DuplicatedIndex(u16), + #[error("duplicated participant ({0})")] + DuplicatedParticipant(Participant), #[error("missing participant {0}")] - MissingParticipant(u16), + MissingParticipant(Participant), #[error("invalid preprocess (participant {0})")] - InvalidPreprocess(u16), + InvalidPreprocess(Participant), #[error("invalid share (participant {0})")] - InvalidShare(u16), + InvalidShare(Participant), #[error("internal error ({0})")] InternalError(&'static str), @@ -60,9 +60,9 @@ pub enum FrostError { // Validate a map of values to have the expected included participants pub fn validate_map( - map: &HashMap, - included: &[u16], - ours: u16, + map: &HashMap, + included: &[Participant], + ours: Participant, ) -> Result<(), FrostError> { if (map.len() + 1) != included.len() { Err(FrostError::InvalidParticipantQuantity(included.len(), map.len() + 1))?; @@ -71,7 +71,7 @@ pub fn validate_map( for included in included { if *included == ours { if map.contains_key(included) { - Err(FrostError::DuplicatedIndex(*included))?; + Err(FrostError::DuplicatedParticipant(*included))?; } continue; } diff --git a/crypto/frost/src/nonce.rs b/crypto/frost/src/nonce.rs index b937aff9..71f5fa47 100644 --- a/crypto/frost/src/nonce.rs +++ b/crypto/frost/src/nonce.rs @@ -25,7 +25,7 @@ use multiexp::multiexp_vartime; use dleq::MultiDLEqProof; -use crate::curve::Curve; +use crate::{curve::Curve, Participant}; // Transcript used to aggregate binomial nonces for usage within a single DLEq proof. fn aggregation_transcript(context: &[u8]) -> T { @@ -247,17 +247,17 @@ pub(crate) struct IndividualBinding { binding_factors: Option>, } -pub(crate) struct BindingFactor(pub(crate) HashMap>); +pub(crate) struct BindingFactor(pub(crate) HashMap>); impl BindingFactor { - pub(crate) fn insert(&mut self, i: u16, commitments: Commitments) { + pub(crate) fn insert(&mut self, i: Participant, commitments: Commitments) { self.0.insert(i, IndividualBinding { commitments, binding_factors: None }); } pub(crate) fn calculate_binding_factors(&mut self, transcript: &mut T) { for (l, binding) in self.0.iter_mut() { let mut transcript = transcript.clone(); - transcript.append_message(b"participant", C::F::from(u64::from(*l)).to_repr()); + transcript.append_message(b"participant", C::F::from(u64::from(u16::from(*l))).to_repr()); // It *should* be perfectly fine to reuse a binding factor for multiple nonces // This generates a binding factor per nonce just to ensure it never comes up as a question binding.binding_factors = Some( @@ -268,12 +268,12 @@ impl BindingFactor { } } - pub(crate) fn binding_factors(&self, i: u16) -> &[C::F] { + pub(crate) fn binding_factors(&self, i: Participant) -> &[C::F] { self.0[&i].binding_factors.as_ref().unwrap() } // Get the bound nonces for a specific party - pub(crate) fn bound(&self, l: u16) -> Vec> { + pub(crate) fn bound(&self, l: Participant) -> Vec> { let mut res = vec![]; for (i, (nonce, rho)) in self.0[&l].commitments.nonces.iter().zip(self.binding_factors(l).iter()).enumerate() diff --git a/crypto/frost/src/sign.rs b/crypto/frost/src/sign.rs index 183ddd6b..4c7a15ec 100644 --- a/crypto/frost/src/sign.rs +++ b/crypto/frost/src/sign.rs @@ -19,7 +19,7 @@ use multiexp::BatchVerifier; use crate::{ curve::Curve, - FrostError, ThresholdParams, ThresholdKeys, ThresholdView, + Participant, FrostError, ThresholdParams, ThresholdKeys, ThresholdView, algorithm::{WriteAddendum, Addendum, Algorithm}, validate_map, }; @@ -239,7 +239,7 @@ pub trait SignMachine: Sized { /// become the signing set for this session. fn sign( self, - commitments: HashMap, + commitments: HashMap, msg: &[u8], ) -> Result<(Self::SignatureMachine, Self::SignatureShare), FrostError>; } @@ -291,7 +291,7 @@ impl> SignMachine for AlgorithmSignMachi fn sign( mut self, - mut preprocesses: HashMap>, + mut preprocesses: HashMap>, msg: &[u8], ) -> Result<(Self::SignatureMachine, SignatureShare), FrostError> { let multisig_params = self.params.multisig_params(); @@ -307,18 +307,14 @@ impl> SignMachine for AlgorithmSignMachi if included.len() < usize::from(multisig_params.t()) { Err(FrostError::InvalidSigningSet("not enough signers"))?; } - // Invalid index - if included[0] == 0 { - Err(FrostError::InvalidParticipantIndex(included[0], multisig_params.n()))?; - } // OOB index - if included[included.len() - 1] > multisig_params.n() { - Err(FrostError::InvalidParticipantIndex(included[included.len() - 1], multisig_params.n()))?; + if u16::from(included[included.len() - 1]) > multisig_params.n() { + Err(FrostError::InvalidParticipant(multisig_params.n(), included[included.len() - 1]))?; } // Same signer included multiple times for i in 0 .. (included.len() - 1) { if included[i] == included[i + 1] { - Err(FrostError::DuplicatedIndex(included[i]))?; + Err(FrostError::DuplicatedParticipant(included[i]))?; } } @@ -332,7 +328,7 @@ impl> SignMachine for AlgorithmSignMachi let nonces = self.params.algorithm.nonces(); #[allow(non_snake_case)] - let mut B = BindingFactor(HashMap::::with_capacity(included.len())); + let mut B = BindingFactor(HashMap::::with_capacity(included.len())); { // Parse the preprocesses for l in &included { @@ -341,7 +337,7 @@ impl> SignMachine for AlgorithmSignMachi .params .algorithm .transcript() - .append_message(b"participant", C::F::from(u64::from(*l)).to_repr()); + .append_message(b"participant", C::F::from(u64::from(u16::from(*l))).to_repr()); } if *l == self.params.keys.params().i() { @@ -449,7 +445,7 @@ pub trait SignatureMachine { /// Complete signing. /// Takes in everyone elses' shares. Returns the signature. - fn complete(self, shares: HashMap) -> Result; + fn complete(self, shares: HashMap) -> Result; } /// Final step of the state machine for the signing process. @@ -472,7 +468,7 @@ impl> SignatureMachine for AlgorithmSign fn complete( self, - mut shares: HashMap>, + mut shares: HashMap>, ) -> Result { let params = self.params.multisig_params(); validate_map(&shares, self.view.included(), params.i())?; diff --git a/crypto/frost/src/tests/mod.rs b/crypto/frost/src/tests/mod.rs index 9c6c965f..690be9d5 100644 --- a/crypto/frost/src/tests/mod.rs +++ b/crypto/frost/src/tests/mod.rs @@ -5,7 +5,7 @@ use rand_core::{RngCore, CryptoRng}; pub use dkg::tests::{key_gen, recover_key}; use crate::{ - Curve, ThresholdKeys, + Curve, Participant, ThresholdKeys, algorithm::Algorithm, sign::{Writable, PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine}, }; @@ -36,11 +36,14 @@ pub fn clone_without( pub fn algorithm_machines>( rng: &mut R, algorithm: A, - keys: &HashMap>, -) -> HashMap> { + keys: &HashMap>, +) -> HashMap> { let mut included = vec![]; - while included.len() < usize::from(keys[&1].params().t()) { - let n = u16::try_from((rng.next_u64() % u64::try_from(keys.len()).unwrap()) + 1).unwrap(); + while included.len() < usize::from(keys[&Participant::new(1).unwrap()].params().t()) { + let n = Participant::new( + u16::try_from((rng.next_u64() % u64::try_from(keys.len()).unwrap()) + 1).unwrap(), + ) + .unwrap(); if included.contains(&n) { continue; } @@ -64,15 +67,15 @@ pub fn algorithm_machines>( pub(crate) fn commit_and_shares< R: RngCore + CryptoRng, M: PreprocessMachine, - F: FnMut(&mut R, &mut HashMap), + F: FnMut(&mut R, &mut HashMap), >( rng: &mut R, - mut machines: HashMap, + mut machines: HashMap, mut cache: F, msg: &[u8], ) -> ( - HashMap>::SignatureMachine>, - HashMap>::SignatureShare>, + HashMap>::SignatureMachine>, + HashMap>::SignatureShare>, ) { let mut commitments = HashMap::new(); let mut machines = machines @@ -110,10 +113,10 @@ pub(crate) fn commit_and_shares< fn sign_internal< R: RngCore + CryptoRng, M: PreprocessMachine, - F: FnMut(&mut R, &mut HashMap), + F: FnMut(&mut R, &mut HashMap), >( rng: &mut R, - machines: HashMap, + machines: HashMap, cache: F, msg: &[u8], ) -> M::Signature { @@ -135,7 +138,7 @@ fn sign_internal< /// caching. pub fn sign_without_caching( rng: &mut R, - machines: HashMap, + machines: HashMap, msg: &[u8], ) -> M::Signature { sign_internal(rng, machines, |_, _| {}, msg) @@ -146,8 +149,8 @@ pub fn sign_without_caching( pub fn sign( rng: &mut R, params: >::Params, - mut keys: HashMap>::Keys>, - machines: HashMap, + mut keys: HashMap>::Keys>, + machines: HashMap, msg: &[u8], ) -> M::Signature { sign_internal( diff --git a/crypto/frost/src/tests/vectors.rs b/crypto/frost/src/tests/vectors.rs index c4a26d6f..771a3199 100644 --- a/crypto/frost/src/tests/vectors.rs +++ b/crypto/frost/src/tests/vectors.rs @@ -13,7 +13,7 @@ use dkg::tests::key_gen; use crate::{ curve::Curve, - ThresholdCore, ThresholdKeys, FrostError, + Participant, ThresholdCore, ThresholdKeys, FrostError, algorithm::{Schnorr, Hram}, sign::{ Nonce, GeneratorCommitments, NonceCommitments, Commitments, Writable, Preprocess, SignMachine, @@ -30,7 +30,7 @@ pub struct Vectors { pub shares: Vec, pub msg: String, - pub included: Vec, + pub included: Vec, pub nonces: Vec<[String; 2]>, pub sig_shares: Vec, @@ -58,8 +58,11 @@ impl From for Vectors { included: to_str(&value["round_one_outputs"]["participant_list"]) .split(',') .map(u16::from_str) - .collect::>() - .unwrap(), + .collect::, _>>() + .unwrap() + .iter() + .map(|i| Participant::new(*i).unwrap()) + .collect(), nonces: value["round_one_outputs"]["participants"] .as_object() .unwrap() @@ -80,7 +83,7 @@ impl From for Vectors { } // Load these vectors into ThresholdKeys using a custom serialization it'll deserialize -fn vectors_to_multisig_keys(vectors: &Vectors) -> HashMap> { +fn vectors_to_multisig_keys(vectors: &Vectors) -> HashMap> { let shares = vectors .shares .iter() @@ -92,11 +95,11 @@ fn vectors_to_multisig_keys(vectors: &Vectors) -> HashMap(vectors: &Vectors) -> HashMap::deserialize::<&[u8]>(&mut serialized.as_ref()).unwrap(); assert_eq!(these_keys.params().t(), vectors.threshold); assert_eq!(usize::from(these_keys.params().n()), shares.len()); - assert_eq!(these_keys.params().i(), i); + let participant = Participant::new(i).unwrap(); + assert_eq!(these_keys.params().i(), participant); assert_eq!(these_keys.secret_share().deref(), &shares[usize::from(i - 1)]); assert_eq!(hex::encode(these_keys.group_key().to_bytes().as_ref()), vectors.group_key); - keys.insert(i, ThresholdKeys::new(these_keys)); + keys.insert(participant, ThresholdKeys::new(these_keys)); } keys @@ -124,7 +128,8 @@ pub fn test_with_vectors>( let machines = algorithm_machines(&mut *rng, Schnorr::::new(), &keys); const MSG: &[u8] = b"Hello, World!"; let sig = sign(&mut *rng, Schnorr::::new(), keys.clone(), machines, MSG); - assert!(sig.verify(keys[&1].group_key(), H::hram(&sig.R, &keys[&1].group_key(), MSG))); + let group_key = keys[&Participant::new(1).unwrap()].group_key(); + assert!(sig.verify(group_key, H::hram(&sig.R, &group_key, MSG))); } // Test blame on an invalid Schnorr signature share diff --git a/processor/src/lib.rs b/processor/src/lib.rs index 24d8c0d2..462b682e 100644 --- a/processor/src/lib.rs +++ b/processor/src/lib.rs @@ -3,7 +3,7 @@ use std::{marker::Send, collections::HashMap}; use async_trait::async_trait; use thiserror::Error; -use frost::{curve::Ciphersuite, FrostError}; +use frost::{curve::Ciphersuite, Participant, FrostError}; mod coin; use coin::{CoinError, Coin}; @@ -18,7 +18,7 @@ pub enum NetworkError {} #[async_trait] pub trait Network: Send { - async fn round(&mut self, data: Vec) -> Result>, NetworkError>; + async fn round(&mut self, data: Vec) -> Result>, NetworkError>; } #[derive(Clone, Error, Debug)] diff --git a/processor/src/tests/mod.rs b/processor/src/tests/mod.rs index 577fb205..36e50f74 100644 --- a/processor/src/tests/mod.rs +++ b/processor/src/tests/mod.rs @@ -7,6 +7,8 @@ use async_trait::async_trait; use rand_core::OsRng; +use frost::Participant; + use crate::{ NetworkError, Network, coin::{Coin, Monero}, @@ -15,11 +17,11 @@ use crate::{ #[derive(Clone)] struct LocalNetwork { - i: u16, + i: Participant, size: u16, round: usize, #[allow(clippy::type_complexity)] - rounds: Arc>>>>, + rounds: Arc>>>>, } impl LocalNetwork { @@ -27,7 +29,12 @@ impl LocalNetwork { let rounds = Arc::new(RwLock::new(vec![])); let mut res = vec![]; for i in 1 ..= size { - res.push(LocalNetwork { i, size, round: 0, rounds: rounds.clone() }); + res.push(LocalNetwork { + i: Participant::new(i).unwrap(), + size, + round: 0, + rounds: rounds.clone(), + }); } res } @@ -35,7 +42,7 @@ impl LocalNetwork { #[async_trait] impl Network for LocalNetwork { - async fn round(&mut self, data: Vec) -> Result>, NetworkError> { + async fn round(&mut self, data: Vec) -> Result>, NetworkError> { { let mut rounds = self.rounds.write().unwrap(); if rounds.len() == self.round { @@ -64,14 +71,14 @@ async fn test_send(coin: C, fee: C::Fee) { let latest = coin.get_latest_block_number().await.unwrap(); let mut keys = frost::tests::key_gen::<_, C::Curve>(&mut OsRng); - let threshold = keys[&1].params().t(); + let threshold = keys[&Participant::new(1).unwrap()].params().t(); let mut networks = LocalNetwork::new(threshold); let mut wallets = vec![]; for i in 1 ..= threshold { let mut wallet = Wallet::new(MemCoinDb::new(), coin.clone()); wallet.acknowledge_block(0, latest); - wallet.add_keys(&WalletKeys::new(keys.remove(&i).unwrap(), 0)); + wallet.add_keys(&WalletKeys::new(keys.remove(&Participant::new(i).unwrap()).unwrap(), 0)); wallets.push(wallet); } From 4d6a0bbd7d8a2c08a839d8ca9099a04c0e1a4500 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 23 Feb 2023 07:19:55 -0500 Subject: [PATCH 12/71] 3.3.4 Use FROST context throughout Encryption --- crypto/dkg/src/encryption.rs | 77 +++++++++++++++++++++-------------- crypto/dkg/src/frost.rs | 2 +- crypto/dkg/src/tests/frost.rs | 22 +++++++--- 3 files changed, 64 insertions(+), 37 deletions(-) diff --git a/crypto/dkg/src/encryption.rs b/crypto/dkg/src/encryption.rs index 4bcb7802..e4aab0dd 100644 --- a/crypto/dkg/src/encryption.rs +++ b/crypto/dkg/src/encryption.rs @@ -98,11 +98,13 @@ fn ecdh(private: &Zeroizing, public: C::G) -> Zeroizing(dst: &'static [u8], ecdh: &Zeroizing) -> ChaCha20 { +fn cipher(context: &str, ecdh: &Zeroizing) -> ChaCha20 { // Ideally, we'd box this transcript with ZAlloc, yet that's only possible on nightly // TODO: https://github.com/serai-dex/serai/issues/151 let mut transcript = RecommendedTranscript::new(b"DKG Encryption v0.2"); - transcript.domain_separate(dst); + transcript.append_message(b"context", context.as_bytes()); + + transcript.domain_separate(b"encryption_key"); let mut ecdh = ecdh.to_bytes(); transcript.append_message(b"shared_key", ecdh.as_ref()); @@ -132,7 +134,7 @@ fn cipher(dst: &'static [u8], ecdh: &Zeroizing) -> ChaCha2 fn encrypt( rng: &mut R, - dst: &'static [u8], + context: &str, from: Participant, to: C::G, mut msg: Zeroizing, @@ -149,7 +151,7 @@ fn encrypt( // Generate a new key for this message, satisfying cipher's requirement of distinct keys per // message, and enabling revealing this message without revealing any others let key = Zeroizing::new(C::random_nonzero_F(rng)); - cipher::(dst, &ecdh::(&key, to)).apply_keystream(msg.as_mut().as_mut()); + cipher::(context, &ecdh::(&key, to)).apply_keystream(msg.as_mut().as_mut()); let pub_key = C::generator() * key.deref(); let nonce = Zeroizing::new(C::random_nonzero_F(rng)); @@ -159,7 +161,7 @@ fn encrypt( pop: SchnorrSignature::sign( &key, nonce, - pop_challenge::(pub_nonce, pub_key, from, msg.deref().as_ref()), + pop_challenge::(context, pub_nonce, pub_key, from, msg.deref().as_ref()), ), msg, } @@ -192,7 +194,12 @@ impl EncryptedMessage { } #[cfg(test)] - pub(crate) fn invalidate_msg(&mut self, rng: &mut R, from: Participant) { + pub(crate) fn invalidate_msg( + &mut self, + rng: &mut R, + context: &str, + from: Participant, + ) { // Invalidate the message by specifying a new key/Schnorr PoP // This will cause all initial checks to pass, yet a decrypt to gibberish let key = Zeroizing::new(C::random_nonzero_F(rng)); @@ -203,7 +210,7 @@ impl EncryptedMessage { self.pop = SchnorrSignature::sign( &key, nonce, - pop_challenge::(pub_nonce, pub_key, from, self.msg.deref().as_ref()), + pop_challenge::(context, pub_nonce, pub_key, from, self.msg.deref().as_ref()), ); } @@ -212,7 +219,7 @@ impl EncryptedMessage { pub(crate) fn invalidate_share_serialization( &mut self, rng: &mut R, - dst: &'static [u8], + context: &str, from: Participant, to: C::G, ) { @@ -228,7 +235,7 @@ impl EncryptedMessage { assert!(!bool::from(C::F::from_repr(repr).is_some())); self.msg.as_mut().as_mut().copy_from_slice(repr.as_ref()); - *self = encrypt(rng, dst, from, to, self.msg.clone()); + *self = encrypt(rng, context, from, to, self.msg.clone()); } // Assumes the encrypted message is a secret share. @@ -236,7 +243,7 @@ impl EncryptedMessage { pub(crate) fn invalidate_share_value( &mut self, rng: &mut R, - dst: &'static [u8], + context: &str, from: Participant, to: C::G, ) { @@ -245,7 +252,7 @@ impl EncryptedMessage { // Assumes the share isn't randomly 1 let repr = C::F::one().to_repr(); self.msg.as_mut().as_mut().copy_from_slice(repr.as_ref()); - *self = encrypt(rng, dst, from, to, self.msg.clone()); + *self = encrypt(rng, context, from, to, self.msg.clone()); } } @@ -292,8 +299,18 @@ impl EncryptionKeyProof { // This doesn't need to take the msg. It just doesn't hurt as an extra layer. // This still doesn't mean the DKG offers an authenticated channel. The per-message keys have no // root of trust other than their existence in the assumed-to-exist external authenticated channel. -fn pop_challenge(nonce: C::G, key: C::G, sender: Participant, msg: &[u8]) -> C::F { +fn pop_challenge( + context: &str, + nonce: C::G, + key: C::G, + sender: Participant, + msg: &[u8], +) -> C::F { let mut transcript = RecommendedTranscript::new(b"DKG Encryption Key Proof of Possession v0.2"); + transcript.append_message(b"context", context.as_bytes()); + + transcript.domain_separate(b"proof_of_possession"); + transcript.append_message(b"nonce", nonce.to_bytes()); transcript.append_message(b"key", key.to_bytes()); // This is sufficient to prevent the attack this is meant to stop @@ -306,8 +323,10 @@ fn pop_challenge(nonce: C::G, key: C::G, sender: Participant, ms C::hash_to_F(b"DKG-encryption-proof_of_possession", &transcript.challenge(b"schnorr")) } -fn encryption_key_transcript() -> RecommendedTranscript { - RecommendedTranscript::new(b"DKG Encryption Key Correctness Proof v0.2") +fn encryption_key_transcript(context: &str) -> RecommendedTranscript { + let mut transcript = RecommendedTranscript::new(b"DKG Encryption Key Correctness Proof v0.2"); + transcript.append_message(b"context", context.as_bytes()); + transcript } #[derive(Clone, Copy, PartialEq, Eq, Debug, Error)] @@ -321,7 +340,7 @@ pub(crate) enum DecryptionError { // A simple box for managing encryption. #[derive(Clone)] pub(crate) struct Encryption { - dst: &'static [u8], + context: String, i: Participant, enc_key: Zeroizing, enc_pub_key: C::G, @@ -339,14 +358,10 @@ impl Zeroize for Encryption { } impl Encryption { - pub(crate) fn new( - dst: &'static [u8], - i: Participant, - rng: &mut R, - ) -> Self { + pub(crate) fn new(context: String, i: Participant, rng: &mut R) -> Self { let enc_key = Zeroizing::new(C::random_nonzero_F(rng)); Self { - dst, + context, i, enc_pub_key: C::generator() * enc_key.deref(), enc_key, @@ -376,7 +391,7 @@ impl Encryption { participant: Participant, msg: Zeroizing, ) -> EncryptedMessage { - encrypt(rng, self.dst, self.i, self.enc_keys[&participant], msg) + encrypt(rng, &self.context, self.i, self.enc_keys[&participant], msg) } pub(crate) fn decrypt( @@ -394,18 +409,18 @@ impl Encryption { batch, batch_id, msg.key, - pop_challenge::(msg.pop.R, msg.key, from, msg.msg.deref().as_ref()), + pop_challenge::(&self.context, msg.pop.R, msg.key, from, msg.msg.deref().as_ref()), ); let key = ecdh::(&self.enc_key, msg.key); - cipher::(self.dst, &key).apply_keystream(msg.msg.as_mut().as_mut()); + cipher::(&self.context, &key).apply_keystream(msg.msg.as_mut().as_mut()); ( msg.msg, EncryptionKeyProof { key, dleq: DLEqProof::prove( rng, - &mut encryption_key_transcript(), + &mut encryption_key_transcript(&self.context), &[C::generator(), msg.key], &self.enc_key, ), @@ -423,10 +438,10 @@ impl Encryption { // There's no encryption key proof if the accusation is of an invalid signature proof: Option>, ) -> Result, DecryptionError> { - if !msg - .pop - .verify(msg.key, pop_challenge::(msg.pop.R, msg.key, from, msg.msg.deref().as_ref())) - { + if !msg.pop.verify( + msg.key, + pop_challenge::(&self.context, msg.pop.R, msg.key, from, msg.msg.deref().as_ref()), + ) { Err(DecryptionError::InvalidSignature)?; } @@ -435,13 +450,13 @@ impl Encryption { proof .dleq .verify( - &mut encryption_key_transcript(), + &mut encryption_key_transcript(&self.context), &[C::generator(), msg.key], &[self.enc_keys[&decryptor], *proof.key], ) .map_err(|_| DecryptionError::InvalidProof)?; - cipher::(self.dst, &proof.key).apply_keystream(msg.msg.as_mut().as_mut()); + cipher::(&self.context, &proof.key).apply_keystream(msg.msg.as_mut().as_mut()); Ok(msg.msg) } else { Err(DecryptionError::InvalidProof) diff --git a/crypto/dkg/src/frost.rs b/crypto/dkg/src/frost.rs index b8b9b449..054d7bd9 100644 --- a/crypto/dkg/src/frost.rs +++ b/crypto/dkg/src/frost.rs @@ -132,7 +132,7 @@ impl KeyGenMachine { ); // Additionally create an encryption mechanism to protect the secret shares - let encryption = Encryption::new(b"FROST", self.params.i, rng); + let encryption = Encryption::new(self.context.clone(), self.params.i, rng); // Step 4: Broadcast let msg = diff --git a/crypto/dkg/src/tests/frost.rs b/crypto/dkg/src/tests/frost.rs index ad8327c1..39b2f658 100644 --- a/crypto/dkg/src/tests/frost.rs +++ b/crypto/dkg/src/tests/frost.rs @@ -13,6 +13,8 @@ use crate::{ type FrostEncryptedMessage = EncryptedMessage::F>>; type FrostSecretShares = HashMap>; +const CONTEXT: &str = "DKG Test Key Generation"; + // Commit, then return enc key and shares #[allow(clippy::type_complexity)] fn commit_enc_keys_and_shares( @@ -27,7 +29,7 @@ fn commit_enc_keys_and_shares( let mut enc_keys = HashMap::new(); for i in (1 ..= PARTICIPANTS).map(Participant) { let params = ThresholdParams::new(THRESHOLD, PARTICIPANTS, i).unwrap(); - let machine = KeyGenMachine::::new(params, "DKG Test Key Generation".to_string()); + let machine = KeyGenMachine::::new(params, CONTEXT.to_string()); let (machine, these_commitments) = machine.generate_coefficients(rng); machines.insert(i, machine); @@ -179,7 +181,12 @@ mod literal { // We then malleate 1's blame proof, so 1 ends up malicious // Doesn't simply invalidate the PoP as that won't have a blame statement // By mutating the encrypted data, we do ensure a blame statement is created - secret_shares.get_mut(&TWO).unwrap().get_mut(&ONE).unwrap().invalidate_msg(&mut OsRng, TWO); + secret_shares + .get_mut(&TWO) + .unwrap() + .get_mut(&ONE) + .unwrap() + .invalidate_msg(&mut OsRng, CONTEXT, TWO); let mut blame = None; let machines = machines @@ -209,7 +216,12 @@ mod literal { let (mut machines, _, mut secret_shares) = commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng); - secret_shares.get_mut(&TWO).unwrap().get_mut(&ONE).unwrap().invalidate_msg(&mut OsRng, TWO); + secret_shares + .get_mut(&TWO) + .unwrap() + .get_mut(&ONE) + .unwrap() + .invalidate_msg(&mut OsRng, CONTEXT, TWO); let mut blame = None; let machines = machines @@ -240,7 +252,7 @@ mod literal { secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_serialization( &mut OsRng, - b"FROST", + CONTEXT, ONE, enc_keys[&TWO], ); @@ -273,7 +285,7 @@ mod literal { secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_value( &mut OsRng, - b"FROST", + CONTEXT, ONE, enc_keys[&TWO], ); From 1a6497f37af9ecb0ded83bedac1eae6ead7a4770 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 23 Feb 2023 07:21:47 -0500 Subject: [PATCH 13/71] 3.3.5 Clarify GeneratorPromotion is only for generators, not curves --- crypto/dkg/src/promote.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/crypto/dkg/src/promote.rs b/crypto/dkg/src/promote.rs index 1c4c223a..9399224d 100644 --- a/crypto/dkg/src/promote.rs +++ b/crypto/dkg/src/promote.rs @@ -61,9 +61,10 @@ impl GeneratorProof { } } -/// Promote a set of keys from one curve to another, where the elliptic curve is the same. +/// Promote a set of keys from one generator to another, where the elliptic curve is the same. /// Since the Ciphersuite trait additionally specifies a generator, this provides an O(n) way to -/// update the generator used with keys. The key generation protocol itself is exponential. +/// update the generator used with keys. This outperforms the key generation protocol which is +// exponential. pub struct GeneratorPromotion { base: ThresholdKeys, proof: GeneratorProof, @@ -74,7 +75,7 @@ impl GeneratorPromotion where C2: Ciphersuite, { - /// Begin promoting keys from one curve to another. Returns a proof this share was properly + /// Begin promoting keys from one generator to another. Returns a proof this share was properly /// promoted. pub fn promote( rng: &mut R, From 6104d606becf133a69f7a790b5b6462c65f6fd4a Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 24 Feb 2023 04:37:20 -0500 Subject: [PATCH 14/71] 3.4.2 Document the DLEq lib --- crypto/dleq/src/lib.rs | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/crypto/dleq/src/lib.rs b/crypto/dleq/src/lib.rs index 3fc270bd..f224dd88 100644 --- a/crypto/dleq/src/lib.rs +++ b/crypto/dleq/src/lib.rs @@ -21,6 +21,7 @@ pub mod cross_group; #[cfg(test)] mod tests; +// Produce a non-biased challenge from the transcript in the specified field pub(crate) fn challenge(transcript: &mut T) -> F { // From here, there are three ways to get a scalar under the ff/group API // 1: Scalar::random(ChaCha20Rng::from_seed(self.transcript.rng_seed(b"challenge"))) @@ -80,6 +81,7 @@ pub(crate) fn challenge(transcript: &mut T) -> F { challenge } +// Helper function to read a scalar #[cfg(feature = "serialize")] fn read_scalar(r: &mut R) -> io::Result { let mut repr = F::Repr::default(); @@ -91,11 +93,13 @@ fn read_scalar(r: &mut R) -> io::Result { Ok(scalar.unwrap()) } -#[derive(Debug)] +/// Error for DLEq proofs. +#[derive(Clone, Copy, PartialEq, Eq, Debug)] pub enum DLEqError { InvalidProof, } +/// A proof that points have the same discrete logarithm across generators. #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] pub struct DLEqProof { c: G::Scalar, @@ -110,6 +114,8 @@ impl DLEqProof { transcript.append_message(b"point", point.to_bytes()); } + /// Prove that the points created by `scalar * G`, for each specified generator, share a discrete + /// logarithm. pub fn prove( rng: &mut R, transcript: &mut T, @@ -134,6 +140,7 @@ impl DLEqProof { DLEqProof { c, s } } + /// Verify the specified points share a discrete logarithm across the specified generators. pub fn verify( &self, transcript: &mut T, @@ -159,17 +166,20 @@ impl DLEqProof { Ok(()) } + /// Write a DLEq proof to something implementing Write. #[cfg(feature = "serialize")] pub fn write(&self, w: &mut W) -> io::Result<()> { w.write_all(self.c.to_repr().as_ref())?; w.write_all(self.s.to_repr().as_ref()) } + /// Read a DLEq proof from something implementing Read. #[cfg(feature = "serialize")] pub fn read(r: &mut R) -> io::Result> { Ok(DLEqProof { c: read_scalar(r)?, s: read_scalar(r)? }) } + /// Serialize a DLEq proof to a Vec. #[cfg(feature = "serialize")] pub fn serialize(&self) -> Vec { let mut res = vec![]; @@ -178,6 +188,9 @@ impl DLEqProof { } } +/// A proof that multiple series of points each have a single discrete logarithm across generators. +/// This is effectively n distinct DLEq proofs, one for each discrete logarithm and its points +/// across some generators, yet with a smaller overall proof size. #[cfg(feature = "std")] #[derive(Clone, PartialEq, Eq, Debug, Zeroize)] pub struct MultiDLEqProof { @@ -188,6 +201,8 @@ pub struct MultiDLEqProof { #[cfg(feature = "std")] #[allow(non_snake_case)] impl MultiDLEqProof { + /// Prove for each scalar that the series of points created by multiplying it against its + /// matching generators share a discrete logarithm. pub fn prove( rng: &mut R, transcript: &mut T, @@ -226,6 +241,8 @@ impl MultiDLEqProof { MultiDLEqProof { c, s } } + /// Verify each series of points share a discrete logarithm against their matching series of + /// generators. pub fn verify( &self, transcript: &mut T, @@ -263,6 +280,7 @@ impl MultiDLEqProof { Ok(()) } + /// Write a multi-DLEq proof to something implementing Write. #[cfg(feature = "serialize")] pub fn write(&self, w: &mut W) -> io::Result<()> { w.write_all(self.c.to_repr().as_ref())?; @@ -272,6 +290,7 @@ impl MultiDLEqProof { Ok(()) } + /// Read a multi-DLEq proof from something implementing Read. #[cfg(feature = "serialize")] pub fn read(r: &mut R, discrete_logs: usize) -> io::Result> { let c = read_scalar(r)?; @@ -282,6 +301,7 @@ impl MultiDLEqProof { Ok(MultiDLEqProof { c, s }) } + /// Serialize a multi-DLEq proof to a Vec. #[cfg(feature = "serialize")] pub fn serialize(&self) -> Vec { let mut res = vec![]; From 65376e93e5d077d90348d531b911ee22ded411b3 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 24 Feb 2023 05:11:01 -0500 Subject: [PATCH 15/71] 3.4.3 Merge the nonce calculation from DLEqProof and MultiDLEqProof into a single function 3.4.3 actually describes getting rid of DLEqProof for a thin wrapper around MultiDLEqProof. That can't be done due to DLEqProof not requiring the std features, enabling Vecs, which MultiDLEqProof relies on. Merging the verification statement does simplify the code a bit. While merging the proof could also be, it has much less value due to the simplicity of proving (nonce * G, scalar * G). --- crypto/dleq/src/lib.rs | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/crypto/dleq/src/lib.rs b/crypto/dleq/src/lib.rs index f224dd88..1936d5aa 100644 --- a/crypto/dleq/src/lib.rs +++ b/crypto/dleq/src/lib.rs @@ -140,6 +140,21 @@ impl DLEqProof { DLEqProof { c, s } } + // Transcript a specific generator/nonce/point (G/R/A), as used when verifying a proof. + // This takes in the generator/point, and then the challenge and solution to calculate the nonce. + fn verify_statement( + transcript: &mut T, + generator: G, + point: G, + c: G::Scalar, + s: G::Scalar, + ) { + // s = r + ca + // sG - cA = R + // R, A + Self::transcript(transcript, generator, (generator * s) - (point * c), point); + } + /// Verify the specified points share a discrete logarithm across the specified generators. pub fn verify( &self, @@ -153,10 +168,7 @@ impl DLEqProof { transcript.domain_separate(b"dleq"); for (generator, point) in generators.iter().zip(points) { - // s = r + ca - // sG - cA = R - // R, A - Self::transcript(transcript, *generator, (*generator * self.s) - (*point * self.c), *point); + Self::verify_statement(transcript, *generator, *point, self.c, self.s); } if self.c != challenge(transcript) { @@ -212,7 +224,7 @@ impl MultiDLEqProof { where G::Scalar: Zeroize, { - transcript.domain_separate(b"multi-dleq"); + transcript.domain_separate(b"multi_dleq"); let mut nonces = vec![]; for (i, (scalar, generators)) in scalars.iter().zip(generators).enumerate() { @@ -256,7 +268,7 @@ impl MultiDLEqProof { Err(DLEqError::InvalidProof)?; } - transcript.domain_separate(b"multi-dleq"); + transcript.domain_separate(b"multi_dleq"); for (i, (generators, points)) in generators.iter().zip(points).enumerate() { if points.len() != generators.len() { Err(DLEqError::InvalidProof)?; @@ -264,12 +276,7 @@ impl MultiDLEqProof { transcript.append_message(b"discrete_logarithm", i.to_le_bytes()); for (generator, point) in generators.iter().zip(points) { - DLEqProof::transcript( - transcript, - *generator, - (*generator * self.s[i]) - (*point * self.c), - *point, - ); + DLEqProof::verify_statement(transcript, *generator, *point, self.c, self.s[i]); } } From 32c18cac843b47d62f9946d053a59322f6e1fd9f Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 24 Feb 2023 05:28:31 -0500 Subject: [PATCH 16/71] 3.5.1 Document presence of k256/p256 --- crypto/ff-group-tests/src/group.rs | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/crypto/ff-group-tests/src/group.rs b/crypto/ff-group-tests/src/group.rs index 7dd6a213..ebf8cdec 100644 --- a/crypto/ff-group-tests/src/group.rs +++ b/crypto/ff-group-tests/src/group.rs @@ -157,12 +157,20 @@ where test_prime_group::(); } +// Run these tests against k256/p256 +// This ensures that these tests are well formed and won't error for valid implementations, +// assuming the validity of k256/p256 +// While k256 and p256 may be malformed in a way which coincides with a faulty test, this is +// considered unlikely +// The other option, not running against any libraries, would leave faulty tests completely +// undetected + #[test] -fn test_k256_group_encoding() { +fn test_k256() { test_prime_group_bits::(); } #[test] -fn test_p256_group_encoding() { +fn test_p256() { test_prime_group_bits::(); } From 93f7afec8badb0e943e92dde7fd76ee40c474228 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 24 Feb 2023 06:03:56 -0500 Subject: [PATCH 17/71] 3.5.2 Add more tests to ff-group-tests The audit recommends checking failure cases for from_bytes, from_bytes_unechecked, and from_repr. This isn't feasible. from_bytes is allowed to have non-canonical values. [0xff; 32] may accordingly be a valid point for non-SEC1-encoded curves. from_bytes_unchecked doesn't have a defined failure mode, and by name, unchecked, shouldn't necessarily fail. The audit acknowledges the tests should test for whatever result is 'appropriate', yet any result which isn't a failure on a valid element is appropriate. from_repr must be canonical, yet for a binary field of 2^n where n % 8 == 0, a [0xff; n / 8] repr would be valid. --- Cargo.lock | 1 + crypto/ciphersuite/README.md | 4 +- crypto/ciphersuite/src/dalek.rs | 4 +- crypto/ciphersuite/src/kp256.rs | 4 +- crypto/dalek-ff-group/src/field.rs | 2 +- crypto/dalek-ff-group/src/lib.rs | 4 +- crypto/ed448/src/field.rs | 2 +- crypto/ed448/src/point.rs | 1 + crypto/ed448/src/scalar.rs | 2 +- crypto/ff-group-tests/Cargo.toml | 1 + crypto/ff-group-tests/src/field.rs | 23 +++++++++++- crypto/ff-group-tests/src/group.rs | 47 +++++++++++++++++++----- crypto/ff-group-tests/src/prime_field.rs | 25 +++++++++++-- 13 files changed, 95 insertions(+), 25 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index aa5654bb..b740c7f0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2342,6 +2342,7 @@ dependencies = [ "group", "k256", "p256", + "rand_core 0.6.4", ] [[package]] diff --git a/crypto/ciphersuite/README.md b/crypto/ciphersuite/README.md index 2e8e460e..3cde68a4 100644 --- a/crypto/ciphersuite/README.md +++ b/crypto/ciphersuite/README.md @@ -27,8 +27,8 @@ The domain-separation tag is naively prefixed to the message. ### Ed448 Ed448 is offered via [minimal-ed448](https://crates.io/crates/minimal-ed448), an -explicitly not recommended, unaudited Ed448 implementation, limited to its -prime-order subgroup. +explicitly not recommended, unaudited, incomplete Ed448 implementation, limited +to its prime-order subgroup. Its `hash_to_F` is the wide reduction of SHAKE256, with a 114-byte output, as used in [RFC-8032](https://www.rfc-editor.org/rfc/rfc8032). The diff --git a/crypto/ciphersuite/src/dalek.rs b/crypto/ciphersuite/src/dalek.rs index 3c40506d..2a6fe708 100644 --- a/crypto/ciphersuite/src/dalek.rs +++ b/crypto/ciphersuite/src/dalek.rs @@ -48,7 +48,7 @@ dalek_curve!("ristretto", Ristretto, RistrettoPoint, b"ristretto"); #[cfg(any(test, feature = "ristretto"))] #[test] fn test_ristretto() { - ff_group_tests::group::test_prime_group_bits::(); + ff_group_tests::group::test_prime_group_bits::<_, RistrettoPoint>(&mut rand_core::OsRng); assert_eq!( Ristretto::hash_to_F( @@ -79,7 +79,7 @@ dalek_curve!("ed25519", Ed25519, EdwardsPoint, b"edwards25519"); #[cfg(feature = "ed25519")] #[test] fn test_ed25519() { - ff_group_tests::group::test_prime_group_bits::(); + ff_group_tests::group::test_prime_group_bits::<_, EdwardsPoint>(&mut rand_core::OsRng); // Ideally, a test vector from RFC-8032 (not FROST) would be here // Unfortunately, the IETF draft doesn't provide any vectors for the derived challenges diff --git a/crypto/ciphersuite/src/kp256.rs b/crypto/ciphersuite/src/kp256.rs index c90b7214..34e37f12 100644 --- a/crypto/ciphersuite/src/kp256.rs +++ b/crypto/ciphersuite/src/kp256.rs @@ -114,7 +114,7 @@ kp_curve!("secp256k1", k256, Secp256k1, b"secp256k1"); #[cfg(feature = "secp256k1")] #[test] fn test_secp256k1() { - ff_group_tests::group::test_prime_group_bits::(); + ff_group_tests::group::test_prime_group_bits::<_, k256::ProjectivePoint>(&mut rand_core::OsRng); // Ideally, a test vector from hash_to_field (not FROST) would be here // Unfortunately, the IETF draft only provides vectors for field elements, not scalars @@ -152,7 +152,7 @@ kp_curve!("p256", p256, P256, b"P-256"); #[cfg(feature = "p256")] #[test] fn test_p256() { - ff_group_tests::group::test_prime_group_bits::(); + ff_group_tests::group::test_prime_group_bits::<_, p256::ProjectivePoint>(&mut rand_core::OsRng); assert_eq!( P256::hash_to_F( diff --git a/crypto/dalek-ff-group/src/field.rs b/crypto/dalek-ff-group/src/field.rs index c9aa2a9c..694de783 100644 --- a/crypto/dalek-ff-group/src/field.rs +++ b/crypto/dalek-ff-group/src/field.rs @@ -281,5 +281,5 @@ fn test_sqrt_m1() { #[test] fn test_field() { - ff_group_tests::prime_field::test_prime_field_bits::(); + ff_group_tests::prime_field::test_prime_field_bits::<_, FieldElement>(&mut rand_core::OsRng); } diff --git a/crypto/dalek-ff-group/src/lib.rs b/crypto/dalek-ff-group/src/lib.rs index 09e18c88..1d2e7083 100644 --- a/crypto/dalek-ff-group/src/lib.rs +++ b/crypto/dalek-ff-group/src/lib.rs @@ -448,10 +448,10 @@ fn test_scalar_modulus() { #[test] fn test_ed25519_group() { - ff_group_tests::group::test_prime_group_bits::(); + ff_group_tests::group::test_prime_group_bits::<_, EdwardsPoint>(&mut rand_core::OsRng); } #[test] fn test_ristretto_group() { - ff_group_tests::group::test_prime_group_bits::(); + ff_group_tests::group::test_prime_group_bits::<_, RistrettoPoint>(&mut rand_core::OsRng); } diff --git a/crypto/ed448/src/field.rs b/crypto/ed448/src/field.rs index 69e4ecc9..cbabb62f 100644 --- a/crypto/ed448/src/field.rs +++ b/crypto/ed448/src/field.rs @@ -32,5 +32,5 @@ field!(FieldElement, MODULUS, WIDE_MODULUS, 448); #[test] fn test_field() { // TODO: Move to test_prime_field_bits once the impl is finished - ff_group_tests::prime_field::test_prime_field::(); + ff_group_tests::prime_field::test_prime_field::<_, FieldElement>(&mut rand_core::OsRng); } diff --git a/crypto/ed448/src/point.rs b/crypto/ed448/src/point.rs index 505abbdc..2cce0a37 100644 --- a/crypto/ed448/src/point.rs +++ b/crypto/ed448/src/point.rs @@ -323,6 +323,7 @@ fn test_group() { test_sub::(); test_mul::(); test_order::(); + test_random::<_, Point>(&mut rand_core::OsRng); test_encoding::(); } diff --git a/crypto/ed448/src/scalar.rs b/crypto/ed448/src/scalar.rs index 406ab8df..f2ce816d 100644 --- a/crypto/ed448/src/scalar.rs +++ b/crypto/ed448/src/scalar.rs @@ -35,5 +35,5 @@ impl Scalar { #[test] fn test_scalar_field() { // TODO: Move to test_prime_field_bits once the impl is finished - ff_group_tests::prime_field::test_prime_field::(); + ff_group_tests::prime_field::test_prime_field::<_, Scalar>(&mut rand_core::OsRng); } diff --git a/crypto/ff-group-tests/Cargo.toml b/crypto/ff-group-tests/Cargo.toml index 2c2897da..94fb8099 100644 --- a/crypto/ff-group-tests/Cargo.toml +++ b/crypto/ff-group-tests/Cargo.toml @@ -13,6 +13,7 @@ all-features = true rustdoc-args = ["--cfg", "docsrs"] [dependencies] +rand_core = "0.6" group = "0.12" [dev-dependencies] diff --git a/crypto/ff-group-tests/src/field.rs b/crypto/ff-group-tests/src/field.rs index 71544785..eeadd4c0 100644 --- a/crypto/ff-group-tests/src/field.rs +++ b/crypto/ff-group-tests/src/field.rs @@ -1,3 +1,4 @@ +use rand_core::RngCore; use group::ff::Field; /// Perform basic tests on equality. @@ -106,8 +107,27 @@ pub fn test_cube() { assert_eq!(two.cube(), two * two * two, "2^3 != 8"); } +/// Test random. +pub fn test_random(rng: &mut R) { + let a = F::random(&mut *rng); + + // Run up to 128 times so small fields, which may occasionally return the same element twice, + // are statistically unlikely to fail + // Field of order 1 will always fail this test due to lack of distinct elements to sample + // from + let mut pass = false; + for _ in 0 .. 128 { + let b = F::random(&mut *rng); + // This test passes if a distinct element is returned at least once + if b != a { + pass = true; + } + } + assert!(pass, "random always returned the same value"); +} + /// Run all tests on fields implementing Field. -pub fn test_field() { +pub fn test_field(rng: &mut R) { test_eq::(); test_conditional_select::(); test_add::(); @@ -119,4 +139,5 @@ pub fn test_field() { test_sqrt::(); test_is_zero::(); test_cube::(); + test_random::(rng); } diff --git a/crypto/ff-group-tests/src/group.rs b/crypto/ff-group-tests/src/group.rs index ebf8cdec..59b51376 100644 --- a/crypto/ff-group-tests/src/group.rs +++ b/crypto/ff-group-tests/src/group.rs @@ -1,3 +1,4 @@ +use rand_core::RngCore; use group::{ ff::{Field, PrimeFieldBits}, Group, @@ -69,6 +70,11 @@ pub fn test_sum() { G::generator().double(), "[generator, generator].sum() != two" ); + assert_eq!( + [G::generator().double(), G::generator()].iter().sum::(), + G::generator().double() + G::generator(), + "[generator.double(), generator].sum() != three" + ); } /// Test negation. @@ -107,9 +113,31 @@ pub fn test_order() { assert_eq!(minus_one + G::generator(), G::identity(), "((modulus - 1) * G) + G wasn't identity"); } +/// Test random. +pub fn test_random(rng: &mut R) { + let a = G::random(&mut *rng); + assert!(!bool::from(a.is_identity()), "random returned identity"); + + // Run up to 128 times so small groups, which may occasionally return the same element twice, + // are statistically unlikely to fail + // Groups of order <= 2 will always fail this test due to lack of distinct elements to sample + // from + let mut pass = false; + for _ in 0 .. 128 { + let b = G::random(&mut *rng); + assert!(!bool::from(b.is_identity()), "random returned identity"); + + // This test passes if a distinct element is returned at least once + if b != a { + pass = true; + } + } + assert!(pass, "random always returned the same value"); +} + /// Run all tests on groups implementing Group. -pub fn test_group() { - test_prime_field::(); +pub fn test_group(rng: &mut R) { + test_prime_field::(rng); test_eq::(); test_identity::(); @@ -121,6 +149,7 @@ pub fn test_group() { test_sub::(); test_mul::(); test_order::(); + test_random::(rng); } /// Test encoding and decoding of group elements. @@ -142,19 +171,19 @@ pub fn test_encoding() { } /// Run all tests on groups implementing PrimeGroup (Group + GroupEncoding). -pub fn test_prime_group() { - test_group::(); +pub fn test_prime_group(rng: &mut R) { + test_group::(rng); test_encoding::(); } /// Run all tests offered by this crate on the group. -pub fn test_prime_group_bits() +pub fn test_prime_group_bits(rng: &mut R) where G::Scalar: PrimeFieldBits, { - test_prime_field_bits::(); - test_prime_group::(); + test_prime_field_bits::(rng); + test_prime_group::(rng); } // Run these tests against k256/p256 @@ -167,10 +196,10 @@ where #[test] fn test_k256() { - test_prime_group_bits::(); + test_prime_group_bits::<_, k256::ProjectivePoint>(&mut rand_core::OsRng); } #[test] fn test_p256() { - test_prime_group_bits::(); + test_prime_group_bits::<_, p256::ProjectivePoint>(&mut rand_core::OsRng); } diff --git a/crypto/ff-group-tests/src/prime_field.rs b/crypto/ff-group-tests/src/prime_field.rs index a40e37e1..46e8486b 100644 --- a/crypto/ff-group-tests/src/prime_field.rs +++ b/crypto/ff-group-tests/src/prime_field.rs @@ -1,3 +1,4 @@ +use rand_core::RngCore; use group::ff::{PrimeField, PrimeFieldBits}; use crate::field::test_field; @@ -29,6 +30,16 @@ pub fn test_is_odd() { assert_eq!(F::one().is_odd().unwrap_u8(), 1, "1 was even"); assert_eq!(F::one().is_even().unwrap_u8(), 0, "1 wasn't odd"); + // Make sure an odd value added to an odd value is even + let two = F::one().double(); + assert_eq!(two.is_odd().unwrap_u8(), 0, "2 was odd"); + assert_eq!(two.is_even().unwrap_u8(), 1, "2 wasn't even"); + + // Make sure an even value added to an even value is even + let four = two.double(); + assert_eq!(four.is_odd().unwrap_u8(), 0, "4 was odd"); + assert_eq!(four.is_even().unwrap_u8(), 1, "4 wasn't even"); + let neg_one = -F::one(); assert_eq!(neg_one.is_odd().unwrap_u8(), 0, "-1 was odd"); assert_eq!(neg_one.is_even().unwrap_u8(), 1, "-1 wasn't even"); @@ -49,6 +60,11 @@ pub fn test_encoding() { F::from_repr_vartime(repr).unwrap(), "{msg} couldn't be encoded and decoded", ); + assert_eq!( + bytes.as_ref(), + F::from_repr(repr).unwrap().to_repr().as_ref(), + "canonical encoding decoded produced distinct encoding" + ); }; test(F::zero(), "0"); test(F::one(), "1"); @@ -57,8 +73,8 @@ pub fn test_encoding() { } /// Run all tests on fields implementing PrimeField. -pub fn test_prime_field() { - test_field::(); +pub fn test_prime_field(rng: &mut R) { + test_field::(rng); test_zero::(); test_one::(); @@ -265,6 +281,7 @@ pub fn test_root_of_unity() { } bit = bit.double(); } + assert!(bool::from(t.is_odd()), "t wasn't odd"); assert_eq!(pow(F::multiplicative_generator(), t), F::root_of_unity(), "incorrect root of unity"); assert_eq!( @@ -275,8 +292,8 @@ pub fn test_root_of_unity() { } /// Run all tests on fields implementing PrimeFieldBits. -pub fn test_prime_field_bits() { - test_prime_field::(); +pub fn test_prime_field_bits(rng: &mut R) { + test_prime_field::(rng); test_to_le_bits::(); test_char_le_bits::(); From 969a5d94f2516245a77245cf815f3351cd991b02 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 24 Feb 2023 06:16:22 -0500 Subject: [PATCH 18/71] 3.6.1 Document rejection of zero nonces --- crypto/frost/src/curve/mod.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/crypto/frost/src/curve/mod.rs b/crypto/frost/src/curve/mod.rs index c1d95bb0..14ceb91d 100644 --- a/crypto/frost/src/curve/mod.rs +++ b/crypto/frost/src/curve/mod.rs @@ -77,6 +77,12 @@ pub trait Curve: Ciphersuite { let mut repr = secret.to_repr(); + // Perform rejection sampling until we reach a non-zero nonce + // While the IETF spec doesn't explicitly require this, generating a zero nonce will produce + // commitments which will be rejected for being zero (and if they were used, leak the secret + // share) + // Rejection sampling here will prevent an honest participant from ever generating 'malicious' + // values and ensure safety let mut res; while { seed.extend(repr.as_ref()); From c1435a20455b0469526f534354aa3f0ac7209d83 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 28 Feb 2023 00:00:29 -0500 Subject: [PATCH 19/71] 3.4.a Panic if generators.len() != scalars.len() for MultiDLEqProof --- crypto/dleq/src/lib.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/crypto/dleq/src/lib.rs b/crypto/dleq/src/lib.rs index 1936d5aa..71b9cb4b 100644 --- a/crypto/dleq/src/lib.rs +++ b/crypto/dleq/src/lib.rs @@ -215,6 +215,7 @@ pub struct MultiDLEqProof { impl MultiDLEqProof { /// Prove for each scalar that the series of points created by multiplying it against its /// matching generators share a discrete logarithm. + /// This function panics if `generators.len() != scalars.len()`. pub fn prove( rng: &mut R, transcript: &mut T, @@ -224,6 +225,12 @@ impl MultiDLEqProof { where G::Scalar: Zeroize, { + assert_eq!( + generators.len(), + scalars.len(), + "amount of series of generators doesn't match the amount of scalars" + ); + transcript.domain_separate(b"multi_dleq"); let mut nonces = vec![]; From 7a054660496e0dbd7d7adc6e6bc8cf0891e1c83a Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 28 Feb 2023 02:16:32 -0500 Subject: [PATCH 20/71] 3.6.2 Test nonce generation There's two ways which this could be tested. 1) Preprocess not taking in an arbitrary RNG item, yet the relevant bytes This would be an unsafe level of refactoring, in my opinion. 2) Test random_nonce and test the passed in RNG eventually ends up at random_nonce. This takes the latter route, both verifying random_nonce meets the vectors and that the FROST machine calls random_nonce properly. --- crypto/ciphersuite/src/lib.rs | 4 +- crypto/frost/src/curve/mod.rs | 5 +- crypto/frost/src/tests/vectors.rs | 268 +++++++++++++++++++++--------- 3 files changed, 192 insertions(+), 85 deletions(-) diff --git a/crypto/ciphersuite/src/lib.rs b/crypto/ciphersuite/src/lib.rs index 099f38b8..076da606 100644 --- a/crypto/ciphersuite/src/lib.rs +++ b/crypto/ciphersuite/src/lib.rs @@ -90,9 +90,7 @@ pub trait Ciphersuite: Clone + Copy + PartialEq + Eq + Debug + Zeroize { // ff mandates this is canonical let res = Option::::from(Self::F::from_repr(encoding)) .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "non-canonical scalar")); - for b in encoding.as_mut() { - b.zeroize(); - } + encoding.as_mut().zeroize(); res } diff --git a/crypto/frost/src/curve/mod.rs b/crypto/frost/src/curve/mod.rs index 14ceb91d..f4e705d2 100644 --- a/crypto/frost/src/curve/mod.rs +++ b/crypto/frost/src/curve/mod.rs @@ -92,10 +92,7 @@ pub trait Curve: Ciphersuite { seed = Zeroizing::new(vec![0; 32]); rng.fill_bytes(&mut seed); } - - for i in repr.as_mut() { - i.zeroize(); - } + repr.as_mut().zeroize(); res } diff --git a/crypto/frost/src/tests/vectors.rs b/crypto/frost/src/tests/vectors.rs index 771a3199..d19f1163 100644 --- a/crypto/frost/src/tests/vectors.rs +++ b/crypto/frost/src/tests/vectors.rs @@ -5,7 +5,9 @@ use std::collections::HashMap; use std::str::FromStr; use zeroize::Zeroizing; -use rand_core::{RngCore, CryptoRng}; + +use rand_core::{RngCore, CryptoRng, SeedableRng}; +use rand_chacha::ChaCha20Rng; use group::{ff::PrimeField, GroupEncoding}; @@ -16,8 +18,8 @@ use crate::{ Participant, ThresholdCore, ThresholdKeys, FrostError, algorithm::{Schnorr, Hram}, sign::{ - Nonce, GeneratorCommitments, NonceCommitments, Commitments, Writable, Preprocess, SignMachine, - SignatureMachine, AlgorithmMachine, + Nonce, GeneratorCommitments, NonceCommitments, Commitments, Writable, Preprocess, + PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine, }, tests::{clone_without, recover_key, algorithm_machines, commit_and_shares, sign}, }; @@ -31,6 +33,8 @@ pub struct Vectors { pub msg: String, pub included: Vec, + + pub nonce_randomness: Vec<[String; 2]>, pub nonces: Vec<[String; 2]>, pub sig_shares: Vec, @@ -63,6 +67,15 @@ impl From for Vectors { .iter() .map(|i| Participant::new(*i).unwrap()) .collect(), + + nonce_randomness: value["round_one_outputs"]["participants"] + .as_object() + .unwrap() + .values() + .map(|value| { + [to_str(&value["hiding_nonce_randomness"]), to_str(&value["binding_nonce_randomness"])] + }) + .collect(), nonces: value["round_one_outputs"]["participants"] .as_object() .unwrap() @@ -155,87 +168,186 @@ pub fn test_with_vectors>( // Test against the vectors let keys = vectors_to_multisig_keys::(&vectors); - let group_key = - ::read_G::<&[u8]>(&mut hex::decode(&vectors.group_key).unwrap().as_ref()).unwrap(); - let secret = - C::read_F::<&[u8]>(&mut hex::decode(&vectors.group_secret).unwrap().as_ref()).unwrap(); - assert_eq!(C::generator() * secret, group_key); - assert_eq!(recover_key(&keys), secret); + { + let group_key = + ::read_G::<&[u8]>(&mut hex::decode(&vectors.group_key).unwrap().as_ref()) + .unwrap(); + let secret = + C::read_F::<&[u8]>(&mut hex::decode(&vectors.group_secret).unwrap().as_ref()).unwrap(); + assert_eq!(C::generator() * secret, group_key); + assert_eq!(recover_key(&keys), secret); - let mut machines = vec![]; - for i in &vectors.included { - machines.push((i, AlgorithmMachine::new(Schnorr::::new(), keys[i].clone()).unwrap())); + let mut machines = vec![]; + for i in &vectors.included { + machines.push((i, AlgorithmMachine::new(Schnorr::::new(), keys[i].clone()).unwrap())); + } + + let mut commitments = HashMap::new(); + let mut machines = machines + .drain(..) + .enumerate() + .map(|(c, (i, machine))| { + let nonce = |i| { + Zeroizing::new( + C::read_F::<&[u8]>(&mut hex::decode(&vectors.nonces[c][i]).unwrap().as_ref()).unwrap(), + ) + }; + let nonces = [nonce(0), nonce(1)]; + let these_commitments = + [C::generator() * nonces[0].deref(), C::generator() * nonces[1].deref()]; + let machine = machine.unsafe_override_preprocess( + vec![Nonce(nonces)], + Preprocess { + commitments: Commitments { + nonces: vec![NonceCommitments { + generators: vec![GeneratorCommitments(these_commitments)], + }], + dleq: None, + }, + addendum: (), + }, + ); + + commitments.insert( + *i, + machine + .read_preprocess::<&[u8]>( + &mut [ + these_commitments[0].to_bytes().as_ref(), + these_commitments[1].to_bytes().as_ref(), + ] + .concat() + .as_ref(), + ) + .unwrap(), + ); + (i, machine) + }) + .collect::>(); + + let mut shares = HashMap::new(); + let mut machines = machines + .drain(..) + .enumerate() + .map(|(c, (i, machine))| { + let (machine, share) = machine + .sign(clone_without(&commitments, i), &hex::decode(&vectors.msg).unwrap()) + .unwrap(); + + let share = { + let mut buf = vec![]; + share.write(&mut buf).unwrap(); + buf + }; + assert_eq!(share, hex::decode(&vectors.sig_shares[c]).unwrap()); + + shares.insert(*i, machine.read_share::<&[u8]>(&mut share.as_ref()).unwrap()); + (i, machine) + }) + .collect::>(); + + for (i, machine) in machines.drain() { + let sig = machine.complete(clone_without(&shares, i)).unwrap(); + let mut serialized = sig.R.to_bytes().as_ref().to_vec(); + serialized.extend(sig.s.to_repr().as_ref()); + assert_eq!(hex::encode(serialized), vectors.sig); + } } - let mut commitments = HashMap::new(); - let mut c = 0; - let mut machines = machines - .drain(..) - .map(|(i, machine)| { - let nonce = |i| { + // The above code didn't test the nonce generation due to the infeasibility of doing so against + // the current codebase + + // A transparent RNG which has a fixed output + struct TransparentRng(Option<[u8; 32]>); + impl RngCore for TransparentRng { + fn next_u32(&mut self) -> u32 { + unimplemented!() + } + fn next_u64(&mut self) -> u64 { + unimplemented!() + } + fn fill_bytes(&mut self, dest: &mut [u8]) { + dest.copy_from_slice(&self.0.take().unwrap()) + } + fn try_fill_bytes(&mut self, _: &mut [u8]) -> Result<(), rand_core::Error> { + unimplemented!() + } + } + // CryptoRng requires the output not reveal any info about any other outputs + // Since this only will produce one output, this is actually met, even though it'd be fine to + // fake it as this is a test + impl CryptoRng for TransparentRng {} + + // Test C::random_nonce matches the expected vectors + for (i, l) in vectors.included.iter().enumerate() { + let l = usize::from(u16::from(*l)); + + // Shares are a zero-indexed array of all participants, hence l - 1 + let share = Zeroizing::new( + C::read_F::<&[u8]>(&mut hex::decode(&vectors.shares[l - 1]).unwrap().as_ref()).unwrap(), + ); + for nonce in 0 .. 2 { + // Nonces are only present for participating signers, hence i + assert_eq!( + C::random_nonce( + &share, + &mut TransparentRng(Some( + hex::decode(&vectors.nonce_randomness[i][nonce]).unwrap().try_into().unwrap() + )) + ), Zeroizing::new( - C::read_F::<&[u8]>(&mut hex::decode(&vectors.nonces[c][i]).unwrap().as_ref()).unwrap(), + C::read_F::<&[u8]>(&mut hex::decode(&vectors.nonces[i][nonce]).unwrap().as_ref()) + .unwrap() ) - }; - let nonces = [nonce(0), nonce(1)]; - c += 1; - let these_commitments = - [C::generator() * nonces[0].deref(), C::generator() * nonces[1].deref()]; - let machine = machine.unsafe_override_preprocess( - vec![Nonce(nonces)], - Preprocess { - commitments: Commitments { - nonces: vec![NonceCommitments { - generators: vec![GeneratorCommitments(these_commitments)], - }], - dleq: None, - }, - addendum: (), - }, + ); + } + } + + // This doesn't verify C::random_nonce is called correctly, where the code should call it with + // the output from a ChaCha20 stream + // Create a known ChaCha20 stream to verify it ends up at random_nonce properly + + { + let mut chacha_seed = [0; 32]; + rng.fill_bytes(&mut chacha_seed); + let mut ours = ChaCha20Rng::from_seed(chacha_seed); + let frosts = ours.clone(); + + // The machines should geenerate a seed, and then use that seed in a ChaCha20 RNG for nonces + let mut preprocess_seed = [0; 32]; + ours.fill_bytes(&mut preprocess_seed); + let mut ours = ChaCha20Rng::from_seed(preprocess_seed); + + // Get the randomness which will be used + let mut randomness = ([0; 32], [0; 32]); + ours.fill_bytes(&mut randomness.0); + ours.fill_bytes(&mut randomness.1); + + // Create the machines + let mut machines = vec![]; + for i in &vectors.included { + machines.push((i, AlgorithmMachine::new(Schnorr::::new(), keys[i].clone()).unwrap())); + } + + for (i, machine) in machines.drain(..) { + let (_, preprocess) = machine.preprocess(&mut frosts.clone()); + + // Calculate the expected nonces + let mut expected = (C::generator() * + C::random_nonce(keys[i].secret_share(), &mut TransparentRng(Some(randomness.0))).deref()) + .to_bytes() + .as_ref() + .to_vec(); + expected.extend( + (C::generator() * + C::random_nonce(keys[i].secret_share(), &mut TransparentRng(Some(randomness.1))) + .deref()) + .to_bytes() + .as_ref(), ); - commitments.insert( - *i, - machine - .read_preprocess::<&[u8]>( - &mut [ - these_commitments[0].to_bytes().as_ref(), - these_commitments[1].to_bytes().as_ref(), - ] - .concat() - .as_ref(), - ) - .unwrap(), - ); - (i, machine) - }) - .collect::>(); - - let mut shares = HashMap::new(); - c = 0; - let mut machines = machines - .drain(..) - .map(|(i, machine)| { - let (machine, share) = - machine.sign(clone_without(&commitments, i), &hex::decode(&vectors.msg).unwrap()).unwrap(); - - let share = { - let mut buf = vec![]; - share.write(&mut buf).unwrap(); - buf - }; - assert_eq!(share, hex::decode(&vectors.sig_shares[c]).unwrap()); - c += 1; - - shares.insert(*i, machine.read_share::<&[u8]>(&mut share.as_ref()).unwrap()); - (i, machine) - }) - .collect::>(); - - for (i, machine) in machines.drain() { - let sig = machine.complete(clone_without(&shares, i)).unwrap(); - let mut serialized = sig.R.to_bytes().as_ref().to_vec(); - serialized.extend(sig.s.to_repr().as_ref()); - assert_eq!(hex::encode(serialized), vectors.sig); + // Ensure they match + assert_eq!(preprocess.serialize(), expected); + } } } From 39b3452da1719696495613b928652942225e1de9 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 28 Feb 2023 21:57:18 -0500 Subject: [PATCH 21/71] 3.6.3 Check commitment encoding Also extends tests of 3.6.2 and so on. --- crypto/frost/src/tests/vectors.rs | 104 +++++++++++++++++++++--------- 1 file changed, 74 insertions(+), 30 deletions(-) diff --git a/crypto/frost/src/tests/vectors.rs b/crypto/frost/src/tests/vectors.rs index d19f1163..7a670f1a 100644 --- a/crypto/frost/src/tests/vectors.rs +++ b/crypto/frost/src/tests/vectors.rs @@ -16,9 +16,9 @@ use dkg::tests::key_gen; use crate::{ curve::Curve, Participant, ThresholdCore, ThresholdKeys, FrostError, - algorithm::{Schnorr, Hram}, + algorithm::{IetfTranscript, Schnorr, Hram}, sign::{ - Nonce, GeneratorCommitments, NonceCommitments, Commitments, Writable, Preprocess, + Writable, Nonce, GeneratorCommitments, NonceCommitments, Commitments, Preprocess, PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine, }, tests::{clone_without, recover_key, algorithm_machines, commit_and_shares, sign}, @@ -36,6 +36,7 @@ pub struct Vectors { pub nonce_randomness: Vec<[String; 2]>, pub nonces: Vec<[String; 2]>, + pub commitments: Vec<[String; 2]>, pub sig_shares: Vec, @@ -82,6 +83,14 @@ impl From for Vectors { .values() .map(|value| [to_str(&value["hiding_nonce"]), to_str(&value["binding_nonce"])]) .collect(), + commitments: value["round_one_outputs"]["participants"] + .as_object() + .unwrap() + .values() + .map(|value| { + [to_str(&value["hiding_nonce_commitment"]), to_str(&value["binding_nonce_commitment"])] + }) + .collect(), sig_shares: value["round_two_outputs"]["participants"] .as_object() @@ -195,18 +204,33 @@ pub fn test_with_vectors>( let nonces = [nonce(0), nonce(1)]; let these_commitments = [C::generator() * nonces[0].deref(), C::generator() * nonces[1].deref()]; - let machine = machine.unsafe_override_preprocess( - vec![Nonce(nonces)], - Preprocess { - commitments: Commitments { - nonces: vec![NonceCommitments { - generators: vec![GeneratorCommitments(these_commitments)], - }], - dleq: None, - }, - addendum: (), - }, + + assert_eq!( + these_commitments[0].to_bytes().as_ref(), + hex::decode(&vectors.commitments[c][0]).unwrap() ); + assert_eq!( + these_commitments[1].to_bytes().as_ref(), + hex::decode(&vectors.commitments[c][1]).unwrap() + ); + + let preprocess = Preprocess { + commitments: Commitments { + nonces: vec![NonceCommitments { + generators: vec![GeneratorCommitments(these_commitments)], + }], + dleq: None, + }, + addendum: (), + }; + // FROST doesn't specify how to serialize these together, yet this is sane + // (and the simplest option) + assert_eq!( + preprocess.serialize(), + hex::decode(vectors.commitments[c][0].clone() + &vectors.commitments[c][1]).unwrap() + ); + + let machine = machine.unsafe_override_preprocess(vec![Nonce(nonces)], preprocess); commitments.insert( *i, @@ -258,7 +282,7 @@ pub fn test_with_vectors>( // the current codebase // A transparent RNG which has a fixed output - struct TransparentRng(Option<[u8; 32]>); + struct TransparentRng(Vec<[u8; 32]>); impl RngCore for TransparentRng { fn next_u32(&mut self) -> u32 { unimplemented!() @@ -267,7 +291,7 @@ pub fn test_with_vectors>( unimplemented!() } fn fill_bytes(&mut self, dest: &mut [u8]) { - dest.copy_from_slice(&self.0.take().unwrap()) + dest.copy_from_slice(&self.0.remove(0)) } fn try_fill_bytes(&mut self, _: &mut [u8]) -> Result<(), rand_core::Error> { unimplemented!() @@ -286,21 +310,41 @@ pub fn test_with_vectors>( let share = Zeroizing::new( C::read_F::<&[u8]>(&mut hex::decode(&vectors.shares[l - 1]).unwrap().as_ref()).unwrap(), ); - for nonce in 0 .. 2 { + + let randomness = vectors.nonce_randomness[i] + .iter() + .map(|randomness| hex::decode(randomness).unwrap().try_into().unwrap()) + .collect::>(); + + let nonces = vectors.nonces[i] + .iter() + .map(|nonce| { + Zeroizing::new(C::read_F::<&[u8]>(&mut hex::decode(nonce).unwrap().as_ref()).unwrap()) + }) + .collect::>(); + + for (randomness, nonce) in randomness.iter().zip(&nonces) { // Nonces are only present for participating signers, hence i - assert_eq!( - C::random_nonce( - &share, - &mut TransparentRng(Some( - hex::decode(&vectors.nonce_randomness[i][nonce]).unwrap().try_into().unwrap() - )) - ), - Zeroizing::new( - C::read_F::<&[u8]>(&mut hex::decode(&vectors.nonces[i][nonce]).unwrap().as_ref()) - .unwrap() - ) - ); + assert_eq!(C::random_nonce(&share, &mut TransparentRng(vec![*randomness])), *nonce); } + + // Also test it at the Commitments level + let (generated_nonces, commitments) = Commitments::::new::<_, IetfTranscript>( + &mut TransparentRng(randomness), + &share, + &[vec![C::generator()]], + &[], + ); + + assert_eq!(generated_nonces.len(), 1); + assert_eq!(generated_nonces[0].0, [nonces[0].clone(), nonces[1].clone()]); + + let mut commitments_bytes = vec![]; + commitments.write(&mut commitments_bytes).unwrap(); + assert_eq!( + commitments_bytes, + hex::decode(vectors.commitments[i][0].clone() + &vectors.commitments[i][1]).unwrap() + ); } // This doesn't verify C::random_nonce is called correctly, where the code should call it with @@ -334,13 +378,13 @@ pub fn test_with_vectors>( // Calculate the expected nonces let mut expected = (C::generator() * - C::random_nonce(keys[i].secret_share(), &mut TransparentRng(Some(randomness.0))).deref()) + C::random_nonce(keys[i].secret_share(), &mut TransparentRng(vec![randomness.0])).deref()) .to_bytes() .as_ref() .to_vec(); expected.extend( (C::generator() * - C::random_nonce(keys[i].secret_share(), &mut TransparentRng(Some(randomness.1))) + C::random_nonce(keys[i].secret_share(), &mut TransparentRng(vec![randomness.1])) .deref()) .to_bytes() .as_ref(), From 6a15b21949e7ca7a177d7c06a7ae65ddda2429c1 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 28 Feb 2023 23:14:59 -0500 Subject: [PATCH 22/71] 3.6.4 Document inclusion of Ed448 HRAM vector --- crypto/frost/src/tests/literal/ed448.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/crypto/frost/src/tests/literal/ed448.rs b/crypto/frost/src/tests/literal/ed448.rs index dc5bf3ac..79b7679b 100644 --- a/crypto/frost/src/tests/literal/ed448.rs +++ b/crypto/frost/src/tests/literal/ed448.rs @@ -9,6 +9,14 @@ use crate::{ tests::vectors::{Vectors, test_with_vectors}, }; +// This is a vector from RFC 8032 to sanity check the HRAM is properly implemented +// The RFC 8032 Ed448 HRAM is much more complex than the other HRAMs, hence why it's helpful to +// have additional testing for it +// Additionally, FROST, despite being supposed to use the RFC 8032 HRAMs, originally applied +// Ed25519's HRAM to both Ed25519 and Ed448 +// This test was useful when proposing the corrections to the spec to demonstrate the correctness +// the new algorithm/vectors +// While we could test all Ed448 vectors here, this is sufficient for sanity #[test] fn ed448_8032_vector() { let context = hex::decode("666f6f").unwrap(); From 62b3036cbd3d0fc9d024fde5b377567db6b90d53 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 28 Feb 2023 23:23:22 -0500 Subject: [PATCH 23/71] 3.6.5 Document origin of vectors --- crypto/frost/src/tests/vectors.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crypto/frost/src/tests/vectors.rs b/crypto/frost/src/tests/vectors.rs index 7a670f1a..8441d364 100644 --- a/crypto/frost/src/tests/vectors.rs +++ b/crypto/frost/src/tests/vectors.rs @@ -43,6 +43,9 @@ pub struct Vectors { pub sig: String, } +// Vectors are expected to be formatted per the IETF proof of concept +// The included vectors are direcly from +// https://github.com/cfrg/draft-irtf-cfrg-frost/tree/draft-irtf-cfrg-frost-11/poc #[cfg(test)] impl From for Vectors { fn from(value: serde_json::Value) -> Vectors { From 5a3406bb5f483dd84efc2eea34ab6092ca13bf10 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Wed, 1 Mar 2023 00:35:37 -0500 Subject: [PATCH 24/71] 3.6.6 Further document nonces This was already a largely documented file. While the terminology is potentially ambiguous, there's not a clearer path perceived at this time. --- crypto/frost/src/algorithm.rs | 2 +- crypto/frost/src/nonce.rs | 8 ++++++-- crypto/frost/src/sign.rs | 4 ++-- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/crypto/frost/src/algorithm.rs b/crypto/frost/src/algorithm.rs index e0679523..16f0bfa6 100644 --- a/crypto/frost/src/algorithm.rs +++ b/crypto/frost/src/algorithm.rs @@ -38,7 +38,7 @@ pub trait Algorithm: Clone { fn transcript(&mut self) -> &mut Self::Transcript; /// Obtain the list of nonces to generate, as specified by the generators to create commitments - /// against per-nonce + /// against per-nonce. fn nonces(&self) -> Vec>; /// Generate an addendum to FROST"s preprocessing stage. diff --git a/crypto/frost/src/nonce.rs b/crypto/frost/src/nonce.rs index 71f5fa47..c4c5f361 100644 --- a/crypto/frost/src/nonce.rs +++ b/crypto/frost/src/nonce.rs @@ -3,10 +3,11 @@ // Then there is a signature (a modified Chaum Pedersen proof) using multiple nonces at once // // Accordingly, in order for this library to be robust, it supports generating an arbitrary amount -// of nonces, each against an arbitrary list of basepoints +// of nonces, each against an arbitrary list of generators // // Each nonce remains of the form (d, e) and made into a proper nonce with d + (e * b) -// When multiple D, E pairs are provided, a DLEq proof is also provided to confirm their integrity +// When representations across multiple generators are provided, a DLEq proof is also provided to +// confirm their integrity use core::ops::Deref; use std::{ @@ -72,6 +73,7 @@ impl GeneratorCommitments { #[derive(Clone, PartialEq, Eq)] pub(crate) struct NonceCommitments { // Called generators as these commitments are indexed by generator later on + // So to get the commitments for the first generator, it'd be commitments.generators[0] pub(crate) generators: Vec>, } @@ -130,9 +132,11 @@ impl NonceCommitments { } } +/// Commitments for all the nonces across all their generators. #[derive(Clone, PartialEq, Eq)] pub(crate) struct Commitments { // Called nonces as these commitments are indexed by nonce + // So to get the commitments for the first nonce, it'd be commitments.nonces[0] pub(crate) nonces: Vec>, // DLEq Proof proving that each set of commitments were generated using a single pair of discrete // logarithms diff --git a/crypto/frost/src/sign.rs b/crypto/frost/src/sign.rs index 4c7a15ec..b869e610 100644 --- a/crypto/frost/src/sign.rs +++ b/crypto/frost/src/sign.rs @@ -221,8 +221,8 @@ pub trait SignMachine: Sized { /// security as your private key share. fn cache(self) -> CachedPreprocess; - /// Create a sign machine from a cached preprocess. After this, the preprocess should be fully - /// deleted, as it must never be reused. It is + /// Create a sign machine from a cached preprocess. After this, the preprocess must be deleted so + /// it's never reused. Any reuse would cause the signer to leak their secret share. fn from_cache( params: Self::Params, keys: Self::Keys, From a42a84e1e8adb83451d70fe9d9e66cd47aba85da Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Wed, 1 Mar 2023 00:42:01 -0500 Subject: [PATCH 25/71] 3.6.7 Seal IetfTranscript --- crypto/frost/src/algorithm.rs | 43 +++++++++++++++++++---------------- 1 file changed, 24 insertions(+), 19 deletions(-) diff --git a/crypto/frost/src/algorithm.rs b/crypto/frost/src/algorithm.rs index 16f0bfa6..f676879b 100644 --- a/crypto/frost/src/algorithm.rs +++ b/crypto/frost/src/algorithm.rs @@ -87,32 +87,37 @@ pub trait Algorithm: Clone { ) -> Result, ()>; } -/// IETF-compliant transcript. This is incredibly naive and should not be used within larger -/// protocols. -#[derive(Clone, Debug)] -pub struct IetfTranscript(Vec); -impl Transcript for IetfTranscript { - type Challenge = Vec; +mod sealed { + pub use super::*; - fn new(_: &'static [u8]) -> IetfTranscript { - IetfTranscript(vec![]) - } + /// IETF-compliant transcript. This is incredibly naive and should not be used within larger + /// protocols. + #[derive(Clone, Debug)] + pub struct IetfTranscript(pub(crate) Vec); + impl Transcript for IetfTranscript { + type Challenge = Vec; - fn domain_separate(&mut self, _: &[u8]) {} + fn new(_: &'static [u8]) -> IetfTranscript { + IetfTranscript(vec![]) + } - fn append_message>(&mut self, _: &'static [u8], message: M) { - self.0.extend(message.as_ref()); - } + fn domain_separate(&mut self, _: &[u8]) {} - fn challenge(&mut self, _: &'static [u8]) -> Vec { - self.0.clone() - } + fn append_message>(&mut self, _: &'static [u8], message: M) { + self.0.extend(message.as_ref()); + } - // FROST won't use this and this shouldn't be used outside of FROST - fn rng_seed(&mut self, _: &'static [u8]) -> [u8; 32] { - unimplemented!() + fn challenge(&mut self, _: &'static [u8]) -> Vec { + self.0.clone() + } + + // FROST won't use this and this shouldn't be used outside of FROST + fn rng_seed(&mut self, _: &'static [u8]) -> [u8; 32] { + unimplemented!() + } } } +pub(crate) use sealed::IetfTranscript; /// HRAm usable by the included Schnorr signature algorithm to generate challenges. pub trait Hram: Clone { From c6284b85a4c3ccb91e336f30cda9b11d0b6bea9f Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Wed, 1 Mar 2023 01:06:13 -0500 Subject: [PATCH 26/71] 3.6.8 Simplify offset splitting This wasn't done prior to be 'leaderless', as now the participant with the lowest ID has an extra step, yet this is still trivial. There's also notable performance benefits to not taking the previous dividing approach, which performed an exp. --- crypto/dkg/src/lib.rs | 36 ++++++++++++++++++++---------------- crypto/frost/src/sign.rs | 2 +- docs/cryptography/FROST.md | 6 ++---- 3 files changed, 23 insertions(+), 21 deletions(-) diff --git a/crypto/dkg/src/lib.rs b/crypto/dkg/src/lib.rs index e513665d..ce0ce400 100644 --- a/crypto/dkg/src/lib.rs +++ b/crypto/dkg/src/lib.rs @@ -401,31 +401,35 @@ impl ThresholdKeys { self.core.serialize() } - pub fn view(&self, included: &[Participant]) -> Result, DkgError<()>> { + pub fn view(&self, mut included: Vec) -> Result, DkgError<()>> { if (included.len() < self.params().t.into()) || (usize::from(self.params().n) < included.len()) { Err(DkgError::InvalidSigningSet)?; } + included.sort(); - let offset_share = self.offset.unwrap_or_else(C::F::zero) * - C::F::from(included.len().try_into().unwrap()).invert().unwrap(); - let offset_verification_share = C::generator() * offset_share; + let mut secret_share = + Zeroizing::new(lagrange::(self.params().i, &included) * self.secret_share().deref()); + + let mut verification_shares = self.verification_shares(); + for (i, share) in verification_shares.iter_mut() { + *share *= lagrange::(*i, &included); + } + + // The offset is included by adding it to the participant with the lowest ID + let offset = self.offset.unwrap_or_else(C::F::zero); + if included[0] == self.params().i() { + *secret_share += offset; + } + *verification_shares.get_mut(&included[0]).unwrap() += C::generator() * offset; Ok(ThresholdView { - offset: self.offset.unwrap_or_else(C::F::zero), + offset, group_key: self.group_key(), - secret_share: Zeroizing::new( - (lagrange::(self.params().i, included) * self.secret_share().deref()) + offset_share, - ), + secret_share, original_verification_shares: self.verification_shares(), - verification_shares: self - .verification_shares() - .iter() - .map(|(l, share)| { - (*l, (*share * lagrange::(*l, included)) + offset_verification_share) - }) - .collect(), - included: included.to_vec(), + verification_shares, + included, }) } } diff --git a/crypto/frost/src/sign.rs b/crypto/frost/src/sign.rs index b869e610..9634b161 100644 --- a/crypto/frost/src/sign.rs +++ b/crypto/frost/src/sign.rs @@ -318,7 +318,7 @@ impl> SignMachine for AlgorithmSignMachi } } - let view = self.params.keys.view(&included).unwrap(); + let view = self.params.keys.view(included.clone()).unwrap(); validate_map(&preprocesses, &included, multisig_params.i())?; { diff --git a/docs/cryptography/FROST.md b/docs/cryptography/FROST.md index 9f6b3378..464052e2 100644 --- a/docs/cryptography/FROST.md +++ b/docs/cryptography/FROST.md @@ -39,10 +39,8 @@ elements instead of `2n`. Finally, to support additive offset signing schemes (accounts, stealth addresses, randomization), it's possible to specify a scalar offset for keys. The public key signed for is also offset by this value. During the signing -process, the offset is explicitly transcripted. Then, the offset is divided by -`p`, the amount of participating signers, and each signer adds it to their -post-interpolation key share. This maintains a leaderless protocol while still -being correct. +process, the offset is explicitly transcripted. Then, the offset is added to the +participant with the lowest ID. # Caching From 2fd5cd8161426182b4d9ea481d65d9d6ff995dbf Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Wed, 1 Mar 2023 08:02:45 -0500 Subject: [PATCH 27/71] 3.6.9 Add several tests to the FROST library Offset signing is now tested. Multi-nonce algorithms are now tested. Multi-generator nonce algorithms are now tested. More fault cases are now tested as well. --- crypto/dkg/src/lib.rs | 1 + crypto/frost/src/algorithm.rs | 2 +- crypto/frost/src/tests/mod.rs | 105 +++++++++++-- crypto/frost/src/tests/nonces.rs | 236 ++++++++++++++++++++++++++++++ crypto/frost/src/tests/vectors.rs | 39 +---- 5 files changed, 336 insertions(+), 47 deletions(-) create mode 100644 crypto/frost/src/tests/nonces.rs diff --git a/crypto/dkg/src/lib.rs b/crypto/dkg/src/lib.rs index ce0ce400..4c63104f 100644 --- a/crypto/dkg/src/lib.rs +++ b/crypto/dkg/src/lib.rs @@ -365,6 +365,7 @@ impl ThresholdKeys { /// Offset the keys by a given scalar to allow for account and privacy schemes. /// This offset is ephemeral and will not be included when these keys are serialized. /// Keys offset multiple times will form a new offset of their sum. + #[must_use] pub fn offset(&self, offset: C::F) -> ThresholdKeys { let mut res = self.clone(); // Carry any existing offset diff --git a/crypto/frost/src/algorithm.rs b/crypto/frost/src/algorithm.rs index f676879b..ca6108dc 100644 --- a/crypto/frost/src/algorithm.rs +++ b/crypto/frost/src/algorithm.rs @@ -6,7 +6,7 @@ use rand_core::{RngCore, CryptoRng}; use transcript::Transcript; -use crate::{Curve, Participant, FrostError, ThresholdKeys, ThresholdView}; +use crate::{Participant, ThresholdKeys, ThresholdView, Curve, FrostError}; pub use schnorr::SchnorrSignature; /// Write an addendum to a writer. diff --git a/crypto/frost/src/tests/mod.rs b/crypto/frost/src/tests/mod.rs index 690be9d5..a0d2e8fe 100644 --- a/crypto/frost/src/tests/mod.rs +++ b/crypto/frost/src/tests/mod.rs @@ -5,11 +5,15 @@ use rand_core::{RngCore, CryptoRng}; pub use dkg::tests::{key_gen, recover_key}; use crate::{ - Curve, Participant, ThresholdKeys, - algorithm::Algorithm, + Curve, Participant, ThresholdKeys, FrostError, + algorithm::{Algorithm, Hram, Schnorr}, sign::{Writable, PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine}, }; +/// Tests for the nonce handling code. +pub mod nonces; +use nonces::{test_multi_nonce, test_invalid_commitment, test_invalid_dleq_proof}; + /// Vectorized test suite to ensure consistency. pub mod vectors; @@ -62,9 +66,8 @@ pub fn algorithm_machines>( .collect() } -// Run the commit step and generate signature shares -#[allow(clippy::type_complexity)] -pub(crate) fn commit_and_shares< +// Run the preprocess step +pub(crate) fn preprocess< R: RngCore + CryptoRng, M: PreprocessMachine, F: FnMut(&mut R, &mut HashMap), @@ -72,11 +75,7 @@ pub(crate) fn commit_and_shares< rng: &mut R, mut machines: HashMap, mut cache: F, - msg: &[u8], -) -> ( - HashMap>::SignatureMachine>, - HashMap>::SignatureShare>, -) { +) -> (HashMap, HashMap) { let mut commitments = HashMap::new(); let mut machines = machines .drain() @@ -93,6 +92,26 @@ pub(crate) fn commit_and_shares< cache(rng, &mut machines); + (machines, commitments) +} + +// Run the preprocess and generate signature shares +#[allow(clippy::type_complexity)] +pub(crate) fn preprocess_and_shares< + R: RngCore + CryptoRng, + M: PreprocessMachine, + F: FnMut(&mut R, &mut HashMap), +>( + rng: &mut R, + machines: HashMap, + cache: F, + msg: &[u8], +) -> ( + HashMap>::SignatureMachine>, + HashMap>::SignatureShare>, +) { + let (mut machines, commitments) = preprocess(rng, machines, cache); + let mut shares = HashMap::new(); let machines = machines .drain() @@ -120,7 +139,7 @@ fn sign_internal< cache: F, msg: &[u8], ) -> M::Signature { - let (mut machines, shares) = commit_and_shares(rng, machines, cache, msg); + let (mut machines, shares) = preprocess_and_shares(rng, machines, cache, msg); let mut signature = None; for (i, machine) in machines.drain() { @@ -172,3 +191,67 @@ pub fn sign( msg, ) } + +/// Test a basic Schnorr signature. +pub fn test_schnorr>(rng: &mut R) { + const MSG: &[u8] = b"Hello, World!"; + + let keys = key_gen(&mut *rng); + let machines = algorithm_machines(&mut *rng, Schnorr::::new(), &keys); + let sig = sign(&mut *rng, Schnorr::::new(), keys.clone(), machines, MSG); + let group_key = keys[&Participant::new(1).unwrap()].group_key(); + assert!(sig.verify(group_key, H::hram(&sig.R, &group_key, MSG))); +} + +// Test an offset Schnorr signature. +pub fn test_offset_schnorr>(rng: &mut R) { + const MSG: &[u8] = b"Hello, World!"; + + let mut keys = key_gen(&mut *rng); + let group_key = keys[&Participant::new(1).unwrap()].group_key(); + + let offset = C::F::from(5); + let offset_key = group_key + (C::generator() * offset); + for (_, keys) in keys.iter_mut() { + *keys = keys.offset(offset); + assert_eq!(keys.group_key(), offset_key); + } + + let machines = algorithm_machines(&mut *rng, Schnorr::::new(), &keys); + let sig = sign(&mut *rng, Schnorr::::new(), keys.clone(), machines, MSG); + let group_key = keys[&Participant::new(1).unwrap()].group_key(); + assert!(sig.verify(offset_key, H::hram(&sig.R, &group_key, MSG))); +} + +// Test blame for an invalid Schnorr signature share. +pub fn test_schnorr_blame>(rng: &mut R) { + const MSG: &[u8] = b"Hello, World!"; + + let keys = key_gen(&mut *rng); + let machines = algorithm_machines(&mut *rng, Schnorr::::new(), &keys); + + let (mut machines, shares) = preprocess_and_shares(&mut *rng, machines, |_, _| {}, MSG); + + for (i, machine) in machines.drain() { + let mut shares = clone_without(&shares, &i); + + // Select a random participant to give an invalid share + let participants = shares.keys().collect::>(); + let faulty = *participants + [usize::try_from(rng.next_u64() % u64::try_from(participants.len()).unwrap()).unwrap()]; + shares.get_mut(&faulty).unwrap().invalidate(); + + assert_eq!(machine.complete(shares).err(), Some(FrostError::InvalidShare(faulty))); + } +} + +// Run a variety of tests against a ciphersuite. +pub fn test_ciphersuite>(rng: &mut R) { + test_schnorr::(rng); + test_offset_schnorr::(rng); + test_schnorr_blame::(rng); + + test_multi_nonce::(rng); + test_invalid_commitment::(rng); + test_invalid_dleq_proof::(rng); +} diff --git a/crypto/frost/src/tests/nonces.rs b/crypto/frost/src/tests/nonces.rs new file mode 100644 index 00000000..134d4a28 --- /dev/null +++ b/crypto/frost/src/tests/nonces.rs @@ -0,0 +1,236 @@ +use std::io::{self, Read}; + +use zeroize::Zeroizing; + +use rand_core::{RngCore, CryptoRng, SeedableRng}; +use rand_chacha::ChaCha20Rng; + +use transcript::{Transcript, RecommendedTranscript}; + +use group::{ff::Field, Group, GroupEncoding}; + +use dleq::MultiDLEqProof; +pub use dkg::tests::{key_gen, recover_key}; + +use crate::{ + Curve, Participant, ThresholdView, ThresholdKeys, FrostError, + algorithm::Algorithm, + sign::{Writable, SignMachine}, + tests::{algorithm_machines, preprocess, sign}, +}; + +#[derive(Clone)] +struct MultiNonce { + transcript: RecommendedTranscript, + nonces: Option>>, +} + +impl MultiNonce { + fn new() -> MultiNonce { + MultiNonce { + transcript: RecommendedTranscript::new(b"FROST MultiNonce Algorithm Test"), + nonces: None, + } + } +} + +fn nonces() -> Vec> { + vec![ + vec![C::generator(), C::generator().double()], + vec![C::generator(), C::generator() * C::F::from(3), C::generator() * C::F::from(4)], + ] +} + +fn verify_nonces(nonces: &[Vec]) { + assert_eq!(nonces.len(), 2); + + // Each nonce should be a series of commitments, over some generators, which share a discrete log + // Since they share a discrete log, their only distinction should be the generator + // Above, the generators were created with a known relationship + // Accordingly, we can check here that relationship holds to make sure these commitments are well + // formed + assert_eq!(nonces[0].len(), 2); + assert_eq!(nonces[0][0].double(), nonces[0][1]); + + assert_eq!(nonces[1].len(), 3); + assert_eq!(nonces[1][0] * C::F::from(3), nonces[1][1]); + assert_eq!(nonces[1][0] * C::F::from(4), nonces[1][2]); + + assert!(nonces[0][0] != nonces[1][0]); +} + +impl Algorithm for MultiNonce { + type Transcript = RecommendedTranscript; + type Addendum = (); + type Signature = (); + + fn transcript(&mut self) -> &mut Self::Transcript { + &mut self.transcript + } + + fn nonces(&self) -> Vec> { + nonces::() + } + + fn preprocess_addendum(&mut self, _: &mut R, _: &ThresholdKeys) {} + + fn read_addendum(&self, _: &mut R) -> io::Result { + Ok(()) + } + + fn process_addendum( + &mut self, + _: &ThresholdView, + _: Participant, + _: (), + ) -> Result<(), FrostError> { + Ok(()) + } + + fn sign_share( + &mut self, + _: &ThresholdView, + nonce_sums: &[Vec], + nonces: Vec>, + _: &[u8], + ) -> C::F { + // Verify the nonce sums are as expected + verify_nonces::(nonce_sums); + + // Verify we actually have two nonces and that they're distinct + assert_eq!(nonces.len(), 2); + assert!(nonces[0] != nonces[1]); + + // Save the nonce sums for later so we can check they're consistent with the call to verify + assert!(self.nonces.is_none()); + self.nonces = Some(nonce_sums.to_vec()); + + // Sum the nonces so we can later check they actually have a relationship to nonce_sums + let mut res = C::F::zero(); + + // Weight each nonce + // This is probably overkill, since their unweighted forms would practically still require + // some level of crafting to pass a naive sum via malleability, yet this makes it more robust + for nonce in nonce_sums { + self.transcript.domain_separate(b"nonce"); + for commitment in nonce { + self.transcript.append_message(b"commitment", commitment.to_bytes()); + } + } + let mut rng = ChaCha20Rng::from_seed(self.transcript.clone().rng_seed(b"weight")); + + for nonce in nonces { + res += *nonce * C::F::random(&mut rng); + } + res + } + + #[must_use] + fn verify(&self, _: C::G, nonces: &[Vec], sum: C::F) -> Option { + verify_nonces::(nonces); + assert_eq!(&self.nonces.clone().unwrap(), nonces); + + // Make sure the nonce sums actually relate to the nonces + let mut res = C::G::identity(); + let mut rng = ChaCha20Rng::from_seed(self.transcript.clone().rng_seed(b"weight")); + for nonce in nonces { + res += nonce[0] * C::F::random(&mut rng); + } + assert_eq!(res, C::generator() * sum); + + Some(()) + } + + fn verify_share(&self, _: C::G, _: &[Vec], _: C::F) -> Result, ()> { + panic!("share verification triggered"); + } +} + +/// Test a multi-nonce, multi-generator algorithm. +// Specifically verifies this library can: +// 1) Generate multiple nonces +// 2) Provide the group nonces (nonce_sums) across multiple generators, still with the same +// discrete log +// 3) Provide algorithms with nonces which match the group nonces +pub fn test_multi_nonce(rng: &mut R) { + let keys = key_gen::(&mut *rng); + let machines = algorithm_machines(&mut *rng, MultiNonce::::new(), &keys); + sign(&mut *rng, MultiNonce::::new(), keys.clone(), machines, &[]); +} + +/// Test malleating a commitment for a nonce across generators causes the preprocess to error. +pub fn test_invalid_commitment(rng: &mut R) { + let keys = key_gen::(&mut *rng); + let machines = algorithm_machines(&mut *rng, MultiNonce::::new(), &keys); + let (machines, mut preprocesses) = preprocess(&mut *rng, machines, |_, _| {}); + + // Select a random participant to give an invalid commitment + let participants = preprocesses.keys().collect::>(); + let faulty = *participants + [usize::try_from(rng.next_u64() % u64::try_from(participants.len()).unwrap()).unwrap()]; + + // Grab their preprocess + let mut preprocess = preprocesses.remove(&faulty).unwrap(); + + // Mutate one of the commitments + let nonce = + preprocess.commitments.nonces.get_mut(usize::try_from(rng.next_u64()).unwrap() % 2).unwrap(); + let generators_len = nonce.generators.len(); + *nonce + .generators + .get_mut(usize::try_from(rng.next_u64()).unwrap() % generators_len) + .unwrap() + .0 + .get_mut(usize::try_from(rng.next_u64()).unwrap() % 2) + .unwrap() = C::G::random(&mut *rng); + + // The commitments are validated at time of deserialization (read_preprocess) + // Accordingly, serialize it and read it again to make sure that errors + assert!(machines + .iter() + .next() + .unwrap() + .1 + .read_preprocess::<&[u8]>(&mut preprocess.serialize().as_ref()) + .is_err()); +} + +/// Test malleating the DLEq proof for a preprocess causes it to error. +pub fn test_invalid_dleq_proof(rng: &mut R) { + let keys = key_gen::(&mut *rng); + let machines = algorithm_machines(&mut *rng, MultiNonce::::new(), &keys); + let (machines, mut preprocesses) = preprocess(&mut *rng, machines, |_, _| {}); + + // Select a random participant to give an invalid DLEq proof + let participants = preprocesses.keys().collect::>(); + let faulty = *participants + [usize::try_from(rng.next_u64() % u64::try_from(participants.len()).unwrap()).unwrap()]; + + // Invalidate it by replacing it with a completely different proof + let dlogs = [Zeroizing::new(C::F::random(&mut *rng)), Zeroizing::new(C::F::random(&mut *rng))]; + let mut preprocess = preprocesses.remove(&faulty).unwrap(); + preprocess.commitments.dleq = Some(MultiDLEqProof::prove( + &mut *rng, + &mut RecommendedTranscript::new(b"Invalid DLEq Proof"), + &nonces::(), + &dlogs, + )); + + assert!(machines + .iter() + .next() + .unwrap() + .1 + .read_preprocess::<&[u8]>(&mut preprocess.serialize().as_ref()) + .is_err()); + + // Also test None for a proof will cause an error + preprocess.commitments.dleq = None; + assert!(machines + .iter() + .next() + .unwrap() + .1 + .read_preprocess::<&[u8]>(&mut preprocess.serialize().as_ref()) + .is_err()); +} diff --git a/crypto/frost/src/tests/vectors.rs b/crypto/frost/src/tests/vectors.rs index 8441d364..c7aaf0ac 100644 --- a/crypto/frost/src/tests/vectors.rs +++ b/crypto/frost/src/tests/vectors.rs @@ -11,17 +11,15 @@ use rand_chacha::ChaCha20Rng; use group::{ff::PrimeField, GroupEncoding}; -use dkg::tests::key_gen; - use crate::{ curve::Curve, - Participant, ThresholdCore, ThresholdKeys, FrostError, - algorithm::{IetfTranscript, Schnorr, Hram}, + Participant, ThresholdCore, ThresholdKeys, + algorithm::{IetfTranscript, Hram, Schnorr}, sign::{ Writable, Nonce, GeneratorCommitments, NonceCommitments, Commitments, Preprocess, PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine, }, - tests::{clone_without, recover_key, algorithm_machines, commit_and_shares, sign}, + tests::{clone_without, recover_key, test_ciphersuite}, }; pub struct Vectors { @@ -147,36 +145,7 @@ pub fn test_with_vectors>( rng: &mut R, vectors: Vectors, ) { - // Test a basic Schnorr signature - { - let keys = key_gen(&mut *rng); - let machines = algorithm_machines(&mut *rng, Schnorr::::new(), &keys); - const MSG: &[u8] = b"Hello, World!"; - let sig = sign(&mut *rng, Schnorr::::new(), keys.clone(), machines, MSG); - let group_key = keys[&Participant::new(1).unwrap()].group_key(); - assert!(sig.verify(group_key, H::hram(&sig.R, &group_key, MSG))); - } - - // Test blame on an invalid Schnorr signature share - { - let keys = key_gen(&mut *rng); - let machines = algorithm_machines(&mut *rng, Schnorr::::new(), &keys); - const MSG: &[u8] = b"Hello, World!"; - - let (mut machines, mut shares) = commit_and_shares(&mut *rng, machines, |_, _| {}, MSG); - let faulty = *shares.keys().next().unwrap(); - shares.get_mut(&faulty).unwrap().invalidate(); - - for (i, machine) in machines.drain() { - if i == faulty { - continue; - } - assert_eq!( - machine.complete(clone_without(&shares, &i)).err(), - Some(FrostError::InvalidShare(faulty)) - ); - } - } + test_ciphersuite::(rng); // Test against the vectors let keys = vectors_to_multisig_keys::(&vectors); From 15d6be16783064a3e94780e1fd79861534c6aef8 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 2 Mar 2023 01:13:07 -0500 Subject: [PATCH 28/71] 3.7.1 Deduplicate flattening/zeroize code While the prior intent was to avoid zeroizing for vartime verification, which is assumed to not have any private data, this simplifies the code and promotes safety. --- crypto/multiexp/src/batch.rs | 59 +++++++++++++++++++----------------- 1 file changed, 31 insertions(+), 28 deletions(-) diff --git a/crypto/multiexp/src/batch.rs b/crypto/multiexp/src/batch.rs index 3fd07733..5584ba69 100644 --- a/crypto/multiexp/src/batch.rs +++ b/crypto/multiexp/src/batch.rs @@ -1,16 +1,32 @@ use rand_core::{RngCore, CryptoRng}; -use zeroize::Zeroize; +use zeroize::{Zeroize, Zeroizing}; use ff::{Field, PrimeFieldBits}; use group::Group; use crate::{multiexp, multiexp_vartime}; +// Flatten the contained statements to a single Vec. +// Wrapped in Zeroizing in case any of the included statements contain private values. +#[allow(clippy::type_complexity)] +fn flat( + slice: &[(Id, Vec<(G::Scalar, G)>)], +) -> Zeroizing> +where + ::Scalar: PrimeFieldBits + Zeroize, +{ + Zeroizing::new(slice.iter().flat_map(|pairs| pairs.1.iter()).cloned().collect::>()) +} + /// A batch verifier intended to verify a series of statements are each equivalent to zero. #[allow(clippy::type_complexity)] #[derive(Clone, Zeroize)] -pub struct BatchVerifier(Vec<(Id, Vec<(G::Scalar, G)>)>); +pub struct BatchVerifier( + Zeroizing)>>, +) +where + ::Scalar: PrimeFieldBits + Zeroize; impl BatchVerifier where @@ -19,7 +35,7 @@ where /// Create a new batch verifier, expected to verify the following amount of statements. /// This is a size hint and is not required to be accurate. pub fn new(capacity: usize) -> BatchVerifier { - BatchVerifier(Vec::with_capacity(capacity)) + BatchVerifier(Zeroizing::new(Vec::with_capacity(capacity))) } /// Queue a statement for batch verification. @@ -71,31 +87,20 @@ where } {} weight }; + self.0.push((id, pairs.into_iter().map(|(scalar, point)| (scalar * u, point)).collect())); } /// Perform batch verification, returning a boolean of if the statements equaled zero. #[must_use] - pub fn verify_core(&self) -> bool { - let mut flat = self.0.iter().flat_map(|pairs| pairs.1.iter()).cloned().collect::>(); - let res = multiexp(&flat).is_identity().into(); - flat.zeroize(); - res - } - - /// Perform batch verification, zeroizing the statements verified. - pub fn verify(mut self) -> bool { - let res = self.verify_core(); - self.zeroize(); - res + pub fn verify(&self) -> bool { + multiexp(&flat(&self.0)).is_identity().into() } /// Perform batch verification in variable time. #[must_use] pub fn verify_vartime(&self) -> bool { - multiexp_vartime(&self.0.iter().flat_map(|pairs| pairs.1.iter()).cloned().collect::>()) - .is_identity() - .into() + multiexp_vartime(&flat(&self.0)).is_identity().into() } /// Perform a binary search to identify which statement does not equal 0, returning None if all @@ -106,12 +111,8 @@ where let mut slice = self.0.as_slice(); while slice.len() > 1 { let split = slice.len() / 2; - if multiexp_vartime( - &slice[.. split].iter().flat_map(|pairs| pairs.1.iter()).cloned().collect::>(), - ) - .is_identity() - .into() - { + + if multiexp_vartime(&flat(&slice[.. split])).is_identity().into() { slice = &slice[split ..]; } else { slice = &slice[.. split]; @@ -126,10 +127,12 @@ where /// Perform constant time batch verification, and if verification fails, identify one faulty /// statement in variable time. - pub fn verify_with_vartime_blame(mut self) -> Result<(), Id> { - let res = if self.verify_core() { Ok(()) } else { Err(self.blame_vartime().unwrap()) }; - self.zeroize(); - res + pub fn verify_with_vartime_blame(&self) -> Result<(), Id> { + if self.verify() { + Ok(()) + } else { + Err(self.blame_vartime().unwrap()) + } } /// Perform variable time batch verification, and if verification fails, identify one faulty From 93d5f419171e401f9969259284715bab4441de9e Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 2 Mar 2023 01:55:08 -0500 Subject: [PATCH 29/71] 3.7.2 Randomize which side odd elements end up on during blame --- crypto/multiexp/src/batch.rs | 45 +++++++++++++++++++++++++++--------- 1 file changed, 34 insertions(+), 11 deletions(-) diff --git a/crypto/multiexp/src/batch.rs b/crypto/multiexp/src/batch.rs index 5584ba69..24384596 100644 --- a/crypto/multiexp/src/batch.rs +++ b/crypto/multiexp/src/batch.rs @@ -22,11 +22,13 @@ where /// A batch verifier intended to verify a series of statements are each equivalent to zero. #[allow(clippy::type_complexity)] #[derive(Clone, Zeroize)] -pub struct BatchVerifier( - Zeroizing)>>, -) +pub struct BatchVerifier where - ::Scalar: PrimeFieldBits + Zeroize; + ::Scalar: PrimeFieldBits + Zeroize, +{ + split: u64, + statements: Zeroizing)>>, +} impl BatchVerifier where @@ -35,7 +37,7 @@ where /// Create a new batch verifier, expected to verify the following amount of statements. /// This is a size hint and is not required to be accurate. pub fn new(capacity: usize) -> BatchVerifier { - BatchVerifier(Zeroizing::new(Vec::with_capacity(capacity))) + BatchVerifier { split: 0, statements: Zeroizing::new(Vec::with_capacity(capacity)) } } /// Queue a statement for batch verification. @@ -45,8 +47,15 @@ where id: Id, pairs: I, ) { + // If this is the first time we're queueing a value, grab a u64 (AKA 64 bits) to determine + // which side to handle odd splits with during blame (preventing malicious actors from gaming + // the system by maximizing the round length) + if self.statements.len() == 0 { + self.split = rng.next_u64(); + } + // Define a unique scalar factor for this set of variables so individual items can't overlap - let u = if self.0.is_empty() { + let u = if self.statements.is_empty() { G::Scalar::one() } else { let mut weight; @@ -88,19 +97,21 @@ where weight }; - self.0.push((id, pairs.into_iter().map(|(scalar, point)| (scalar * u, point)).collect())); + self + .statements + .push((id, pairs.into_iter().map(|(scalar, point)| (scalar * u, point)).collect())); } /// Perform batch verification, returning a boolean of if the statements equaled zero. #[must_use] pub fn verify(&self) -> bool { - multiexp(&flat(&self.0)).is_identity().into() + multiexp(&flat(&self.statements)).is_identity().into() } /// Perform batch verification in variable time. #[must_use] pub fn verify_vartime(&self) -> bool { - multiexp_vartime(&flat(&self.0)).is_identity().into() + multiexp_vartime(&flat(&self.statements)).is_identity().into() } /// Perform a binary search to identify which statement does not equal 0, returning None if all @@ -108,9 +119,21 @@ where /// multiple are invalid. // A constant time variant may be beneficial for robust protocols pub fn blame_vartime(&self) -> Option { - let mut slice = self.0.as_slice(); + let mut slice = self.statements.as_slice(); + let mut split_side = self.split; + while slice.len() > 1 { - let split = slice.len() / 2; + let mut split = slice.len() / 2; + // If there's an odd number of elements, this can be gamed to increase the amount of rounds + // For [0, 1, 2], where 2 is invalid, this will take three rounds + // ([0], [1, 2]), then ([1], [2]), before just 2 + // If 1 and 2 were valid, this would've only taken 2 rounds to complete + // To prevent this from being gamed, if there's an odd number of elements, randomize which + // side the split occurs on + if slice.len() % 2 == 1 { + split += usize::try_from(split_side & 1).unwrap(); + split_side >>= 1; + } if multiexp_vartime(&flat(&slice[.. split])).is_identity().into() { slice = &slice[split ..]; From 8661111fc6b9228e15276a566994a9fa7a6e39f6 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 2 Mar 2023 03:58:48 -0500 Subject: [PATCH 30/71] 3.7.3 Add multiexp tests --- crypto/multiexp/src/batch.rs | 8 +++ crypto/multiexp/src/lib.rs | 1 + crypto/multiexp/src/tests/batch.rs | 94 ++++++++++++++++++++++++++++++ crypto/multiexp/src/tests/mod.rs | 44 +++++++++++++- 4 files changed, 144 insertions(+), 3 deletions(-) create mode 100644 crypto/multiexp/src/tests/batch.rs diff --git a/crypto/multiexp/src/batch.rs b/crypto/multiexp/src/batch.rs index 24384596..945c8bcb 100644 --- a/crypto/multiexp/src/batch.rs +++ b/crypto/multiexp/src/batch.rs @@ -130,6 +130,14 @@ where // If 1 and 2 were valid, this would've only taken 2 rounds to complete // To prevent this from being gamed, if there's an odd number of elements, randomize which // side the split occurs on + + // This does risk breaking determinism + // The concern is if the select split point causes different paths to be taken when multiple + // invalid elements exist + // While the split point may move an element from the right to the left, always choosing the + // left side (if it's invalid) means this will still always return the left-most, + // invalid element + if slice.len() % 2 == 1 { split += usize::try_from(split_side & 1).unwrap(); split_side >>= 1; diff --git a/crypto/multiexp/src/lib.rs b/crypto/multiexp/src/lib.rs index d0a4542a..321c5b87 100644 --- a/crypto/multiexp/src/lib.rs +++ b/crypto/multiexp/src/lib.rs @@ -169,6 +169,7 @@ where match algorithm(pairs.len()) { Algorithm::Null => Group::identity(), Algorithm::Single => pairs[0].1 * pairs[0].0, + // These functions panic if called without any pairs Algorithm::Straus(window) => straus(pairs, window), Algorithm::Pippenger(window) => pippenger(pairs, window), } diff --git a/crypto/multiexp/src/tests/batch.rs b/crypto/multiexp/src/tests/batch.rs new file mode 100644 index 00000000..02331a7b --- /dev/null +++ b/crypto/multiexp/src/tests/batch.rs @@ -0,0 +1,94 @@ +use rand_core::OsRng; + +use zeroize::Zeroize; + +use rand_core::RngCore; + +use ff::{Field, PrimeFieldBits}; +use group::Group; + +use crate::BatchVerifier; + +pub(crate) fn test_batch() +where + G::Scalar: PrimeFieldBits + Zeroize, +{ + let valid = |batch: BatchVerifier<_, G>| { + assert!(batch.verify()); + assert!(batch.verify_vartime()); + assert_eq!(batch.blame_vartime(), None); + assert_eq!(batch.verify_with_vartime_blame(), Ok(())); + assert_eq!(batch.verify_vartime_with_vartime_blame(), Ok(())); + }; + + let invalid = |batch: BatchVerifier<_, G>, id| { + assert!(!batch.verify()); + assert!(!batch.verify_vartime()); + assert_eq!(batch.blame_vartime(), Some(id)); + assert_eq!(batch.verify_with_vartime_blame(), Err(id)); + assert_eq!(batch.verify_vartime_with_vartime_blame(), Err(id)); + }; + + // Test an empty batch + let batch = BatchVerifier::new(0); + valid(batch); + + // Test a batch with one set of statements + let valid_statements = + vec![(-G::Scalar::one(), G::generator()), (G::Scalar::one(), G::generator())]; + let mut batch = BatchVerifier::new(1); + batch.queue(&mut OsRng, 0, valid_statements.clone()); + valid(batch); + + // Test a batch with an invalid set of statements fails properly + let invalid_statements = vec![(-G::Scalar::one(), G::generator())]; + let mut batch = BatchVerifier::new(1); + batch.queue(&mut OsRng, 0, invalid_statements.clone()); + invalid(batch, 0); + + // Test blame can properly identify faulty participants + // Run with 17 statements, rotating which one is faulty + for i in 0 .. 17 { + let mut batch = BatchVerifier::new(17); + for j in 0 .. 17 { + batch.queue( + &mut OsRng, + j, + if i == j { invalid_statements.clone() } else { valid_statements.clone() }, + ); + } + invalid(batch, i); + } + + // Test blame always identifies the left-most invalid statement + for i in 1 .. 32 { + for j in 1 .. i { + let mut batch = BatchVerifier::new(j); + let mut leftmost = None; + + // Create j statements + for k in 0 .. j { + batch.queue( + &mut OsRng, + k, + // The usage of i / 10 makes this less likely to add invalid elements, and increases + // the space between them + // For high i values, yet low j values, this will make it likely that random elements + // are at/near the end + if ((OsRng.next_u64() % u64::try_from(1 + (i / 4)).unwrap()) == 0) || + (leftmost.is_none() && (k == (j - 1))) + { + if leftmost.is_none() { + leftmost = Some(k); + } + invalid_statements.clone() + } else { + valid_statements.clone() + }, + ); + } + + invalid(batch, leftmost.unwrap()); + } + } +} diff --git a/crypto/multiexp/src/tests/mod.rs b/crypto/multiexp/src/tests/mod.rs index b587d356..550da1c0 100644 --- a/crypto/multiexp/src/tests/mod.rs +++ b/crypto/multiexp/src/tests/mod.rs @@ -10,7 +10,12 @@ use group::Group; use k256::ProjectivePoint; use dalek_ff_group::EdwardsPoint; -use crate::{straus, pippenger, multiexp, multiexp_vartime}; +use crate::{straus, straus_vartime, pippenger, pippenger_vartime, multiexp, multiexp_vartime}; + +#[cfg(feature = "batch")] +mod batch; +#[cfg(feature = "batch")] +use batch::test_batch; #[allow(dead_code)] fn benchmark_internal(straus_bool: bool) @@ -85,26 +90,59 @@ fn test_multiexp() where G::Scalar: PrimeFieldBits + Zeroize, { + let test = |pairs: &[_], sum| { + // These should automatically determine the best algorithm + assert_eq!(multiexp(pairs), sum); + assert_eq!(multiexp_vartime(pairs), sum); + + // Also explicitly test straus/pippenger for each bit size + if !pairs.is_empty() { + for window in 1 .. 8 { + assert_eq!(straus(pairs, window), sum); + assert_eq!(straus_vartime(pairs, window), sum); + assert_eq!(pippenger(pairs, window), sum); + assert_eq!(pippenger_vartime(pairs, window), sum); + } + } + }; + + // Test an empty multiexp is identity + test(&[], G::identity()); + + // Test an multiexp of identity/zero elements is identity + test(&[(G::Scalar::zero(), G::generator())], G::identity()); + test(&[(G::Scalar::one(), G::identity())], G::identity()); + + // Test a variety of multiexp sizes let mut pairs = Vec::with_capacity(1000); let mut sum = G::identity(); for _ in 0 .. 10 { + // Test a multiexp of a single item + // On successive loop iterations, this will test a multiexp with an odd number of pairs + pairs.push((G::Scalar::random(&mut OsRng), G::generator() * G::Scalar::random(&mut OsRng))); + sum += pairs[pairs.len() - 1].1 * pairs[pairs.len() - 1].0; + test(&pairs, sum); + for _ in 0 .. 100 { pairs.push((G::Scalar::random(&mut OsRng), G::generator() * G::Scalar::random(&mut OsRng))); sum += pairs[pairs.len() - 1].1 * pairs[pairs.len() - 1].0; } - assert_eq!(multiexp(&pairs), sum); - assert_eq!(multiexp_vartime(&pairs), sum); + test(&pairs, sum); } } #[test] fn test_secp256k1() { test_multiexp::(); + #[cfg(feature = "batch")] + test_batch::(); } #[test] fn test_ed25519() { test_multiexp::(); + #[cfg(feature = "batch")] + test_batch::(); } #[ignore] From 408494f8dec50e8b6670b29c86db11a3da5225bb Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 2 Mar 2023 03:59:49 -0500 Subject: [PATCH 31/71] 3.7.4 Always zeroize multiexp data This was handled as part of handling 3.7.1 From e5329b42e6122fb6f2ebe5c97e94107daa34f798 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 2 Mar 2023 05:49:39 -0500 Subject: [PATCH 32/71] 3.7.5 Further document multiexp functions --- crypto/multiexp/src/lib.rs | 19 +++---------------- crypto/multiexp/src/pippenger.rs | 2 ++ crypto/multiexp/src/straus.rs | 19 ++++++++++++++++++- 3 files changed, 23 insertions(+), 17 deletions(-) diff --git a/crypto/multiexp/src/lib.rs b/crypto/multiexp/src/lib.rs index 321c5b87..df4da3e3 100644 --- a/crypto/multiexp/src/lib.rs +++ b/crypto/multiexp/src/lib.rs @@ -19,6 +19,8 @@ pub use batch::BatchVerifier; #[cfg(test)] mod tests; +// Convert scalars to `window`-sized bit groups, as needed to index a table +// This algorithm works for `window <= 8` pub(crate) fn prep_bits(pairs: &[(G::Scalar, G)], window: u8) -> Vec> where G::Scalar: PrimeFieldBits, @@ -31,10 +33,9 @@ where let mut bits = pair.0.to_le_bits(); groupings.push(vec![0; (bits.len() + (w_usize - 1)) / w_usize]); - #[allow(unused_assignments)] for (i, mut raw_bit) in bits.iter_mut().enumerate() { let mut bit = u8::from(*raw_bit); - *raw_bit = false; + (*raw_bit).zeroize(); groupings[p][i / w_usize] |= bit << (i % w_usize); bit.zeroize(); @@ -44,20 +45,6 @@ where groupings } -pub(crate) fn prep_tables(pairs: &[(G::Scalar, G)], window: u8) -> Vec> { - let mut tables = Vec::with_capacity(pairs.len()); - for pair in pairs { - let p = tables.len(); - tables.push(vec![G::identity(); 2_usize.pow(window.into())]); - let mut accum = G::identity(); - for i in 1 .. tables[p].len() { - accum += pair.1; - tables[p][i] = accum; - } - } - tables -} - #[derive(Clone, Copy, PartialEq, Eq, Debug)] enum Algorithm { Null, diff --git a/crypto/multiexp/src/pippenger.rs b/crypto/multiexp/src/pippenger.rs index 79d945cb..aca872d0 100644 --- a/crypto/multiexp/src/pippenger.rs +++ b/crypto/multiexp/src/pippenger.rs @@ -5,6 +5,8 @@ use group::Group; use crate::prep_bits; +// Pippenger's algorithm for multiexponentation, as published in the SIAM Journal on Computing +// DOI: 10.1137/0209022 pub(crate) fn pippenger(pairs: &[(G::Scalar, G)], window: u8) -> G where G::Scalar: PrimeFieldBits, diff --git a/crypto/multiexp/src/straus.rs b/crypto/multiexp/src/straus.rs index a670cc74..283d7948 100644 --- a/crypto/multiexp/src/straus.rs +++ b/crypto/multiexp/src/straus.rs @@ -3,8 +3,25 @@ use zeroize::Zeroize; use ff::PrimeFieldBits; use group::Group; -use crate::{prep_bits, prep_tables}; +use crate::prep_bits; +// Create tables for every included point of size 2^window +fn prep_tables(pairs: &[(G::Scalar, G)], window: u8) -> Vec> { + let mut tables = Vec::with_capacity(pairs.len()); + for pair in pairs { + let p = tables.len(); + tables.push(vec![G::identity(); 2_usize.pow(window.into())]); + let mut accum = G::identity(); + for i in 1 .. tables[p].len() { + accum += pair.1; + tables[p][i] = accum; + } + } + tables +} + +// Straus's algorithm for multiexponentation, as published in The American Mathematical Monthly +// DOI: 10.2307/2310929 pub(crate) fn straus(pairs: &[(G::Scalar, G)], window: u8) -> G where G::Scalar: PrimeFieldBits + Zeroize, From 1d2ebdca62148a690d7e7a8c90daf2838f0267ab Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 2 Mar 2023 06:12:02 -0500 Subject: [PATCH 33/71] 3.7.6, 3.7.7 Optimize multiexp implementations --- crypto/multiexp/src/pippenger.rs | 30 +++++++++++++++++++++++------- crypto/multiexp/src/straus.rs | 18 ++++++++++++------ 2 files changed, 35 insertions(+), 13 deletions(-) diff --git a/crypto/multiexp/src/pippenger.rs b/crypto/multiexp/src/pippenger.rs index aca872d0..e182d51e 100644 --- a/crypto/multiexp/src/pippenger.rs +++ b/crypto/multiexp/src/pippenger.rs @@ -15,8 +15,10 @@ where let mut res = G::identity(); for n in (0 .. bits[0].len()).rev() { - for _ in 0 .. window { - res = res.double(); + if n != (bits[0].len() - 1) { + for _ in 0 .. window { + res = res.double(); + } } let mut buckets = vec![G::identity(); 2_usize.pow(window.into())]; @@ -49,18 +51,32 @@ where } } - let mut buckets = vec![G::identity(); 2_usize.pow(window.into())]; + // Use None to represent identity since is_none is likely faster than is_identity + let mut buckets = vec![None; 2_usize.pow(window.into())]; for p in 0 .. bits.len() { let nibble = usize::from(bits[p][n]); if nibble != 0 { - buckets[nibble] += pairs[p].1; + if let Some(bucket) = buckets[nibble].as_mut() { + *bucket += pairs[p].1; + } else { + buckets[nibble] = Some(pairs[p].1); + } } } - let mut intermediate_sum = G::identity(); + let mut intermediate_sum = None; for b in (1 .. buckets.len()).rev() { - intermediate_sum += buckets[b]; - res += intermediate_sum; + if let Some(bucket) = buckets[b].as_ref() { + if let Some(intermediate_sum) = intermediate_sum.as_mut() { + *intermediate_sum += bucket; + } else { + intermediate_sum = Some(*bucket); + } + } + + if let Some(intermediate_sum) = intermediate_sum.as_ref() { + res += intermediate_sum; + } } } diff --git a/crypto/multiexp/src/straus.rs b/crypto/multiexp/src/straus.rs index 283d7948..e074d9f1 100644 --- a/crypto/multiexp/src/straus.rs +++ b/crypto/multiexp/src/straus.rs @@ -31,8 +31,10 @@ where let mut res = G::identity(); for b in (0 .. groupings[0].len()).rev() { - for _ in 0 .. window { - res = res.double(); + if b != (groupings[0].len() - 1) { + for _ in 0 .. window { + res = res.double(); + } } for s in 0 .. tables.len() { @@ -51,20 +53,24 @@ where let groupings = prep_bits(pairs, window); let tables = prep_tables(pairs, window); - let mut res = G::identity(); + let mut res: Option = None; for b in (0 .. groupings[0].len()).rev() { if b != (groupings[0].len() - 1) { for _ in 0 .. window { - res = res.double(); + res = res.map(|res| res.double()); } } for s in 0 .. tables.len() { if groupings[s][b] != 0 { - res += tables[s][usize::from(groupings[s][b])]; + if let Some(res) = res.as_mut() { + *res += tables[s][usize::from(groupings[s][b])]; + } else { + res = Some(tables[s][usize::from(groupings[s][b])]); + } } } } - res + res.unwrap_or_else(G::identity) } From 35043d28893c0e64529f16a368f859fedf82c452 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 2 Mar 2023 08:45:09 -0500 Subject: [PATCH 34/71] 3.8.1 Document RFC 8032 compatibility --- crypto/schnorr/src/lib.rs | 7 +++++++ crypto/schnorr/src/tests.rs | 4 ++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/crypto/schnorr/src/lib.rs b/crypto/schnorr/src/lib.rs index fed606f4..44eef31e 100644 --- a/crypto/schnorr/src/lib.rs +++ b/crypto/schnorr/src/lib.rs @@ -20,6 +20,13 @@ pub mod aggregate; mod tests; /// A Schnorr signature of the form (R, s) where s = r + cx. +/// +/// These are intended to be strict. It is generic over Ciphersuite which is for PrimeGroups, +/// and mandates canonical encodings in its read function. +/// +/// RFC 8032 has an alternative verification formula, 8R = 8s - 8cX, which is intended to handle +/// torsioned nonces/public keys. Due to this library's strict requirements, such signatures will +/// not be verifiable with this library. #[allow(non_snake_case)] #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] pub struct SchnorrSignature { diff --git a/crypto/schnorr/src/tests.rs b/crypto/schnorr/src/tests.rs index 2d24a0a6..7a912767 100644 --- a/crypto/schnorr/src/tests.rs +++ b/crypto/schnorr/src/tests.rs @@ -54,7 +54,7 @@ pub(crate) fn batch_verify() { for (i, sig) in sigs.iter().enumerate() { sig.batch_verify(&mut OsRng, &mut batch, i, C::generator() * keys[i].deref(), challenges[i]); } - batch.verify_with_vartime_blame().unwrap(); + batch.verify_vartime_with_vartime_blame().unwrap(); } // Shift 1 from s from one to another and verify it fails @@ -70,7 +70,7 @@ pub(crate) fn batch_verify() { } sig.batch_verify(&mut OsRng, &mut batch, i, C::generator() * keys[i].deref(), challenges[i]); } - if let Err(blame) = batch.verify_with_vartime_blame() { + if let Err(blame) = batch.verify_vartime_with_vartime_blame() { assert!((blame == 1) || (blame == 2)); } else { panic!("Batch verification considered malleated signatures valid"); From 08f92871079926b20a179f9ca415a016e166a4b2 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 2 Mar 2023 09:06:03 -0500 Subject: [PATCH 35/71] 3.8.2 Add Ed25519 RFC 8032 test vectors --- crypto/schnorr/Cargo.toml | 5 +- crypto/schnorr/src/{tests.rs => tests/mod.rs} | 24 ++++---- crypto/schnorr/src/tests/rfc8032.rs | 60 +++++++++++++++++++ 3 files changed, 75 insertions(+), 14 deletions(-) rename crypto/schnorr/src/{tests.rs => tests/mod.rs} (90%) create mode 100644 crypto/schnorr/src/tests/rfc8032.rs diff --git a/crypto/schnorr/Cargo.toml b/crypto/schnorr/Cargo.toml index e99fe96d..f26b3e13 100644 --- a/crypto/schnorr/Cargo.toml +++ b/crypto/schnorr/Cargo.toml @@ -25,6 +25,7 @@ ciphersuite = { path = "../ciphersuite", version = "0.1" } multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] } [dev-dependencies] -blake2 = "0.10" +hex = "0.4" +sha2 = "0.10" dalek-ff-group = { path = "../dalek-ff-group", version = "^0.1.2" } -ciphersuite = { path = "../ciphersuite", version = "0.1", features = ["ristretto"] } +ciphersuite = { path = "../ciphersuite", version = "0.1", features = ["ed25519"] } diff --git a/crypto/schnorr/src/tests.rs b/crypto/schnorr/src/tests/mod.rs similarity index 90% rename from crypto/schnorr/src/tests.rs rename to crypto/schnorr/src/tests/mod.rs index 7a912767..e70460d9 100644 --- a/crypto/schnorr/src/tests.rs +++ b/crypto/schnorr/src/tests/mod.rs @@ -1,22 +1,22 @@ use core::ops::Deref; +use zeroize::Zeroizing; use rand_core::OsRng; -use zeroize::Zeroizing; - -use blake2::{digest::typenum::U32, Blake2b}; -type Blake2b256 = Blake2b; +use sha2::Sha256; use group::{ff::Field, Group}; - use multiexp::BatchVerifier; -use ciphersuite::{Ciphersuite, Ristretto}; +use ciphersuite::{Ciphersuite, Ed25519}; + use crate::{ SchnorrSignature, aggregate::{SchnorrAggregator, SchnorrAggregate}, }; +mod rfc8032; + pub(crate) fn sign() { let private_key = Zeroizing::new(C::random_nonzero_F(&mut OsRng)); let nonce = Zeroizing::new(C::random_nonzero_F(&mut OsRng)); @@ -82,7 +82,7 @@ pub(crate) fn aggregate() { // Create 5 signatures let mut keys = vec![]; let mut challenges = vec![]; - let mut aggregator = SchnorrAggregator::::new(); + let mut aggregator = SchnorrAggregator::::new(); for i in 0 .. 5 { keys.push(Zeroizing::new(C::random_nonzero_F(&mut OsRng))); challenges.push(C::random_nonzero_F(&mut OsRng)); @@ -100,7 +100,7 @@ pub(crate) fn aggregate() { let aggregate = aggregator.complete().unwrap(); let aggregate = SchnorrAggregate::::read::<&[u8]>(&mut aggregate.serialize().as_ref()).unwrap(); - assert!(aggregate.verify::( + assert!(aggregate.verify::( keys .iter() .map(|key| C::generator() * key.deref()) @@ -112,8 +112,8 @@ pub(crate) fn aggregate() { #[test] fn test() { - sign::(); - verify::(); - batch_verify::(); - aggregate::(); + sign::(); + verify::(); + batch_verify::(); + aggregate::(); } diff --git a/crypto/schnorr/src/tests/rfc8032.rs b/crypto/schnorr/src/tests/rfc8032.rs new file mode 100644 index 00000000..001927ac --- /dev/null +++ b/crypto/schnorr/src/tests/rfc8032.rs @@ -0,0 +1,60 @@ +// RFC 8032 Ed25519 test vectors +// The s = r + cx format modernly used for Schnorr signatures was popularized by EdDSA +// While not all RFC 8032 signatures will work with this library, any canonical ones will, and +// these vectors are canonical + +use sha2::{Digest, Sha512}; + +use group::GroupEncoding; +use dalek_ff_group::Scalar; +use ciphersuite::{Ciphersuite, Ed25519}; + +use crate::SchnorrSignature; + +// Public key, message, signature +#[rustfmt::skip] +const VECTORS: [(&str, &str, &str); 5] = [ + ( + "d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a", + "", + "e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b" + ), + + ( + "3d4017c3e843895a92b70aa74d1b7ebc9c982ccf2ec4968cc0cd55f12af4660c", + "72", + "92a009a9f0d4cab8720e820b5f642540a2b27b5416503f8fb3762223ebdb69da085ac1e43e15996e458f3613d0f11d8c387b2eaeb4302aeeb00d291612bb0c00" + ), + + ( + "fc51cd8e6218a1a38da47ed00230f0580816ed13ba3303ac5deb911548908025", + "af82", + "6291d657deec24024827e69c3abe01a30ce548a284743a445e3680d7db5ac3ac18ff9b538d16f290ae67f760984dc6594a7c15e9716ed28dc027beceea1ec40a" + ), + + ( + "278117fc144c72340f67d0f2316e8386ceffbf2b2428c9c51fef7c597f1d426e", + "08b8b2b733424243760fe426a4b54908632110a66c2f6591eabd3345e3e4eb98fa6e264bf09efe12ee50f8f54e9f77b1e355f6c50544e23fb1433ddf73be84d879de7c0046dc4996d9e773f4bc9efe5738829adb26c81b37c93a1b270b20329d658675fc6ea534e0810a4432826bf58c941efb65d57a338bbd2e26640f89ffbc1a858efcb8550ee3a5e1998bd177e93a7363c344fe6b199ee5d02e82d522c4feba15452f80288a821a579116ec6dad2b3b310da903401aa62100ab5d1a36553e06203b33890cc9b832f79ef80560ccb9a39ce767967ed628c6ad573cb116dbefefd75499da96bd68a8a97b928a8bbc103b6621fcde2beca1231d206be6cd9ec7aff6f6c94fcd7204ed3455c68c83f4a41da4af2b74ef5c53f1d8ac70bdcb7ed185ce81bd84359d44254d95629e9855a94a7c1958d1f8ada5d0532ed8a5aa3fb2d17ba70eb6248e594e1a2297acbbb39d502f1a8c6eb6f1ce22b3de1a1f40cc24554119a831a9aad6079cad88425de6bde1a9187ebb6092cf67bf2b13fd65f27088d78b7e883c8759d2c4f5c65adb7553878ad575f9fad878e80a0c9ba63bcbcc2732e69485bbc9c90bfbd62481d9089beccf80cfe2df16a2cf65bd92dd597b0707e0917af48bbb75fed413d238f5555a7a569d80c3414a8d0859dc65a46128bab27af87a71314f318c782b23ebfe808b82b0ce26401d2e22f04d83d1255dc51addd3b75a2b1ae0784504df543af8969be3ea7082ff7fc9888c144da2af58429ec96031dbcad3dad9af0dcbaaaf268cb8fcffead94f3c7ca495e056a9b47acdb751fb73e666c6c655ade8297297d07ad1ba5e43f1bca32301651339e22904cc8c42f58c30c04aafdb038dda0847dd988dcda6f3bfd15c4b4c4525004aa06eeff8ca61783aacec57fb3d1f92b0fe2fd1a85f6724517b65e614ad6808d6f6ee34dff7310fdc82aebfd904b01e1dc54b2927094b2db68d6f903b68401adebf5a7e08d78ff4ef5d63653a65040cf9bfd4aca7984a74d37145986780fc0b16ac451649de6188a7dbdf191f64b5fc5e2ab47b57f7f7276cd419c17a3ca8e1b939ae49e488acba6b965610b5480109c8b17b80e1b7b750dfc7598d5d5011fd2dcc5600a32ef5b52a1ecc820e308aa342721aac0943bf6686b64b2579376504ccc493d97e6aed3fb0f9cd71a43dd497f01f17c0e2cb3797aa2a2f256656168e6c496afc5fb93246f6b1116398a346f1a641f3b041e989f7914f90cc2c7fff357876e506b50d334ba77c225bc307ba537152f3f1610e4eafe595f6d9d90d11faa933a15ef1369546868a7f3a45a96768d40fd9d03412c091c6315cf4fde7cb68606937380db2eaaa707b4c4185c32eddcdd306705e4dc1ffc872eeee475a64dfac86aba41c0618983f8741c5ef68d3a101e8a3b8cac60c905c15fc910840b94c00a0b9d0", + "0aab4c900501b3e24d7cdf4663326a3a87df5e4843b2cbdb67cbf6e460fec350aa5371b1508f9f4528ecea23c436d94b5e8fcd4f681e30a6ac00a9704a188a03" + ), + + ( + "ec172b93ad5e563bf4932c70e1245034c35467ef2efd4d64ebf819683467e2bf", + "ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f", + "dc2a4459e7369633a52b1bf277839a00201009a3efbf3ecb69bea2186c26b58909351fc9ac90b3ecfdfbc7c66431e0303dca179c138ac17ad9bef1177331a704" + ), +]; + +#[test] +fn test_rfc8032() { + for vector in VECTORS { + let key = Ed25519::read_G::<&[u8]>(&mut hex::decode(vector.0).unwrap().as_ref()).unwrap(); + let sig = + SchnorrSignature::::read::<&[u8]>(&mut hex::decode(vector.2).unwrap().as_ref()) + .unwrap(); + let hram = Sha512::new_with_prefix( + &[sig.R.to_bytes().as_ref(), &key.to_bytes(), &hex::decode(vector.1).unwrap()].concat(), + ); + assert!(sig.verify(key, Scalar::from_hash(hram))); + } +} From 053f07a28146d633cfd86011d0a6808b2b816779 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 2 Mar 2023 09:08:53 -0500 Subject: [PATCH 36/71] 3.8.3 Document challenge requirements --- crypto/schnorr/src/lib.rs | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/crypto/schnorr/src/lib.rs b/crypto/schnorr/src/lib.rs index 44eef31e..972d3864 100644 --- a/crypto/schnorr/src/lib.rs +++ b/crypto/schnorr/src/lib.rs @@ -54,6 +54,10 @@ impl SchnorrSignature { } /// Sign a Schnorr signature with the given nonce for the specified challenge. + /// + /// This challenge must be properly crafted, which means being binding to the public key, nonce, + /// and any message. Failure to do so will let a malicious adversary to forge signatures for + /// different keys/messages. pub fn sign( private_key: &Zeroizing, nonce: Zeroizing, @@ -83,12 +87,20 @@ impl SchnorrSignature { } /// Verify a Schnorr signature for the given key with the specified challenge. + /// + /// This challenge must be properly crafted, which means being binding to the public key, nonce, + /// and any message. Failure to do so will let a malicious adversary to forge signatures for + /// different keys/messages. #[must_use] pub fn verify(&self, public_key: C::G, challenge: C::F) -> bool { multiexp_vartime(&self.batch_statements(public_key, challenge)).is_identity().into() } /// Queue a signature for batch verification. + /// + /// This challenge must be properly crafted, which means being binding to the public key, nonce, + /// and any message. Failure to do so will let a malicious adversary to forge signatures for + /// different keys/messages. pub fn batch_verify( &self, rng: &mut R, From 8b7e7b1a1c5c98ef31a99df6b726c420e38fd221 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 2 Mar 2023 09:14:36 -0500 Subject: [PATCH 37/71] 3.8.4 Don't additionally transcript keys with challenges --- crypto/schnorr/src/aggregate.rs | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/crypto/schnorr/src/aggregate.rs b/crypto/schnorr/src/aggregate.rs index 6afadb67..933470de 100644 --- a/crypto/schnorr/src/aggregate.rs +++ b/crypto/schnorr/src/aggregate.rs @@ -20,15 +20,6 @@ fn digest() -> D { D::new_with_prefix(b"Schnorr Aggregate") } -// A secure challenge will include the nonce and whatever message -// Depending on the environment, a secure challenge *may* not include the public key, even if -// the modern consensus is it should -// Accordingly, transcript both here, even if ideally only the latter would need to be -fn digest_accumulate(digest: &mut D, key: G, challenge: G::Scalar) { - digest.update(key.to_bytes().as_ref()); - digest.update(challenge.to_repr().as_ref()); -} - // Performs a big-endian modular reduction of the hash value // This is used by the below aggregator to prevent mutability // Only an 128-bit scalar is needed to offer 128-bits of security against malleability per @@ -82,7 +73,7 @@ impl SchnorrAggregate { Ok(SchnorrAggregate { Rs, s: C::read_F(reader)? }) } - /// Write a SchnorrAggregate to something implementing Read. + /// Write a SchnorrAggregate to something implementing Write. pub fn write(&self, writer: &mut W) -> io::Result<()> { writer.write_all( &u32::try_from(self.Rs.len()) @@ -104,6 +95,10 @@ impl SchnorrAggregate { } /// Perform signature verification. + /// + /// This challenge must be properly crafted, which means being binding to the public key, nonce, + /// and any message. Failure to do so will let a malicious adversary to forge signatures for + /// different keys/messages. #[must_use] pub fn verify(&self, keys_and_challenges: &[(C::G, C::F)]) -> bool { if self.Rs.len() != keys_and_challenges.len() { @@ -112,7 +107,7 @@ impl SchnorrAggregate { let mut digest = digest::(); for (key, challenge) in keys_and_challenges { - digest_accumulate(&mut digest, *key, *challenge); + digest.update(challenge.to_repr().as_ref()); } let mut pairs = Vec::with_capacity((2 * keys_and_challenges.len()) + 1); @@ -147,7 +142,7 @@ impl SchnorrAggregator { /// Aggregate a signature. pub fn aggregate(&mut self, public_key: C::G, challenge: C::F, sig: SchnorrSignature) { - digest_accumulate(&mut self.digest, public_key, challenge); + self.digest.update(challenge.to_repr().as_ref()); self.sigs.push(sig); } From 530671795ada0f42f593d680cf892ee222047af2 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 2 Mar 2023 09:29:37 -0500 Subject: [PATCH 38/71] 3.8.5 Let the caller pass in a DST for the aggregation hash function Also moves the aggregator over to Digest. While a bit verbose for this context, as all appended items were fixed length, it's length prefixing is solid and the API is pleasant. The downside is the additional dependency which is in tree and quite compact. --- crypto/schnorr/Cargo.toml | 4 +- crypto/schnorr/src/aggregate.rs | 72 ++++++++++++++++----------------- crypto/schnorr/src/tests/mod.rs | 9 +++-- 3 files changed, 42 insertions(+), 43 deletions(-) diff --git a/crypto/schnorr/Cargo.toml b/crypto/schnorr/Cargo.toml index f26b3e13..9606206d 100644 --- a/crypto/schnorr/Cargo.toml +++ b/crypto/schnorr/Cargo.toml @@ -18,11 +18,11 @@ rand_core = "0.6" zeroize = { version = "1.5", features = ["zeroize_derive"] } digest = "0.10" +transcript = { package = "flexible-transcript", path = "../transcript", version = "0.2" } group = "0.12" -ciphersuite = { path = "../ciphersuite", version = "0.1" } - multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] } +ciphersuite = { path = "../ciphersuite", version = "0.1" } [dev-dependencies] hex = "0.4" diff --git a/crypto/schnorr/src/aggregate.rs b/crypto/schnorr/src/aggregate.rs index 933470de..c528d9b3 100644 --- a/crypto/schnorr/src/aggregate.rs +++ b/crypto/schnorr/src/aggregate.rs @@ -2,32 +2,27 @@ use std::io::{self, Read, Write}; use zeroize::Zeroize; -use digest::Digest; +use transcript::{Transcript, SecureDigest, DigestTranscript}; use group::{ ff::{Field, PrimeField}, Group, GroupEncoding, - prime::PrimeGroup, }; - use multiexp::multiexp_vartime; - use ciphersuite::Ciphersuite; use crate::SchnorrSignature; -fn digest() -> D { - D::new_with_prefix(b"Schnorr Aggregate") -} - // Performs a big-endian modular reduction of the hash value // This is used by the below aggregator to prevent mutability // Only an 128-bit scalar is needed to offer 128-bits of security against malleability per // https://cr.yp.to/badbatch/badbatch-20120919.pdf // Accordingly, while a 256-bit hash used here with a 256-bit ECC will have bias, it shouldn't be // an issue -fn scalar_from_digest(digest: D) -> F { - let bytes = digest.finalize(); +fn scalar_from_digest( + digest: &mut DigestTranscript, +) -> F { + let bytes = digest.challenge(b"aggregation_weight"); debug_assert_eq!(bytes.len() % 8, 0); let mut res = F::zero(); @@ -44,12 +39,6 @@ fn scalar_from_digest(digest: D) -> F { res } -fn digest_yield(digest: D, i: usize) -> F { - scalar_from_digest(digest.chain_update( - u32::try_from(i).expect("more than 4 billion signatures in aggregate").to_le_bytes(), - )) -} - /// Aggregate Schnorr signature as defined in . #[allow(non_snake_case)] #[derive(Clone, PartialEq, Eq, Debug, Zeroize)] @@ -96,23 +85,31 @@ impl SchnorrAggregate { /// Perform signature verification. /// - /// This challenge must be properly crafted, which means being binding to the public key, nonce, - /// and any message. Failure to do so will let a malicious adversary to forge signatures for + /// Challenges must be properly crafted, which means being binding to the public key, nonce, and + /// any message. Failure to do so will let a malicious adversary to forge signatures for /// different keys/messages. + /// + /// The DST used here must prevent a collision with whatever hash function produced the + /// challenges. #[must_use] - pub fn verify(&self, keys_and_challenges: &[(C::G, C::F)]) -> bool { + pub fn verify( + &self, + dst: &'static [u8], + keys_and_challenges: &[(C::G, C::F)], + ) -> bool { if self.Rs.len() != keys_and_challenges.len() { return false; } - let mut digest = digest::(); - for (key, challenge) in keys_and_challenges { - digest.update(challenge.to_repr().as_ref()); + let mut digest = DigestTranscript::::new(dst); + digest.domain_separate(b"signatures"); + for (_, challenge) in keys_and_challenges { + digest.append_message(b"challenge", challenge.to_repr()); } let mut pairs = Vec::with_capacity((2 * keys_and_challenges.len()) + 1); for (i, (key, challenge)) in keys_and_challenges.iter().enumerate() { - let z = digest_yield(digest.clone(), i); + let z = scalar_from_digest(&mut digest); pairs.push((z, self.Rs[i])); pairs.push((z * challenge, *key)); } @@ -123,31 +120,30 @@ impl SchnorrAggregate { #[allow(non_snake_case)] #[derive(Clone, Debug, Zeroize)] -pub struct SchnorrAggregator { - digest: D, +pub struct SchnorrAggregator { + digest: DigestTranscript, sigs: Vec>, } -impl Default for SchnorrAggregator { - fn default() -> Self { - Self { digest: digest(), sigs: vec![] } - } -} - -impl SchnorrAggregator { +impl SchnorrAggregator { /// Create a new aggregator. - pub fn new() -> Self { - Self::default() + /// + /// The DST used here must prevent a collision with whatever hash function produced the + /// challenges. + pub fn new(dst: &'static [u8]) -> Self { + let mut res = Self { digest: DigestTranscript::::new(dst), sigs: vec![] }; + res.digest.domain_separate(b"signatures"); + res } /// Aggregate a signature. - pub fn aggregate(&mut self, public_key: C::G, challenge: C::F, sig: SchnorrSignature) { - self.digest.update(challenge.to_repr().as_ref()); + pub fn aggregate(&mut self, challenge: C::F, sig: SchnorrSignature) { + self.digest.append_message(b"challenge", challenge.to_repr()); self.sigs.push(sig); } /// Complete aggregation, returning None if none were aggregated. - pub fn complete(self) -> Option> { + pub fn complete(mut self) -> Option> { if self.sigs.is_empty() { return None; } @@ -156,7 +152,7 @@ impl SchnorrAggregator { SchnorrAggregate { Rs: Vec::with_capacity(self.sigs.len()), s: C::F::zero() }; for i in 0 .. self.sigs.len() { aggregate.Rs.push(self.sigs[i].R); - aggregate.s += self.sigs[i].s * digest_yield::<_, C::F>(self.digest.clone(), i); + aggregate.s += self.sigs[i].s * scalar_from_digest::<_, C::F>(&mut self.digest); } Some(aggregate) } diff --git a/crypto/schnorr/src/tests/mod.rs b/crypto/schnorr/src/tests/mod.rs index e70460d9..c2791870 100644 --- a/crypto/schnorr/src/tests/mod.rs +++ b/crypto/schnorr/src/tests/mod.rs @@ -79,15 +79,17 @@ pub(crate) fn batch_verify() { } pub(crate) fn aggregate() { + const DST: &[u8] = b"Schnorr Aggregator Test"; + // Create 5 signatures let mut keys = vec![]; let mut challenges = vec![]; - let mut aggregator = SchnorrAggregator::::new(); + let mut aggregator = SchnorrAggregator::::new(DST); for i in 0 .. 5 { keys.push(Zeroizing::new(C::random_nonzero_F(&mut OsRng))); + // In practice, this MUST be a secure challenge binding to the nonce, key, and any message challenges.push(C::random_nonzero_F(&mut OsRng)); aggregator.aggregate( - C::generator() * keys[i].deref(), challenges[i], SchnorrSignature::::sign( &keys[i], @@ -101,12 +103,13 @@ pub(crate) fn aggregate() { let aggregate = SchnorrAggregate::::read::<&[u8]>(&mut aggregate.serialize().as_ref()).unwrap(); assert!(aggregate.verify::( + DST, keys .iter() .map(|key| C::generator() * key.deref()) .zip(challenges.iter().cloned()) .collect::>() - .as_ref() + .as_ref(), )); } From 97374a3e243ad8fcc707018f861390e7220b7ceb Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 2 Mar 2023 10:04:18 -0500 Subject: [PATCH 39/71] 3.8.6 Correct transcript to scalar derivation Replaces the externally passed in Digest with C::H since C is available. --- Cargo.lock | 6 ++- crypto/ciphersuite/Cargo.toml | 1 + crypto/ciphersuite/src/lib.rs | 5 ++- crypto/schnorr/Cargo.toml | 1 - crypto/schnorr/src/aggregate.rs | 71 ++++++++++++++++++++------------- crypto/schnorr/src/tests/mod.rs | 6 +-- 6 files changed, 54 insertions(+), 36 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b740c7f0..a4913d97 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -912,6 +912,7 @@ dependencies = [ "elliptic-curve", "ff", "ff-group-tests", + "flexible-transcript", "group", "hex", "k256", @@ -7493,13 +7494,14 @@ dependencies = [ name = "schnorr-signatures" version = "0.2.0" dependencies = [ - "blake2", "ciphersuite", "dalek-ff-group", - "digest 0.10.6", + "flexible-transcript", "group", + "hex", "multiexp", "rand_core 0.6.4", + "sha2 0.10.6", "zeroize", ] diff --git a/crypto/ciphersuite/Cargo.toml b/crypto/ciphersuite/Cargo.toml index f3f12f78..4497299d 100644 --- a/crypto/ciphersuite/Cargo.toml +++ b/crypto/ciphersuite/Cargo.toml @@ -19,6 +19,7 @@ zeroize = { version = "1.5", features = ["zeroize_derive"] } subtle = "2" digest = "0.10" +transcript = { package = "flexible-transcript", path = "../transcript", version = "0.2" } sha2 = { version = "0.10", optional = true } sha3 = { version = "0.10", optional = true } diff --git a/crypto/ciphersuite/src/lib.rs b/crypto/ciphersuite/src/lib.rs index 076da606..ab5a0a4b 100644 --- a/crypto/ciphersuite/src/lib.rs +++ b/crypto/ciphersuite/src/lib.rs @@ -11,7 +11,8 @@ use rand_core::{RngCore, CryptoRng}; use zeroize::Zeroize; use subtle::ConstantTimeEq; -use digest::{core_api::BlockSizeUser, Digest}; +use digest::{core_api::BlockSizeUser, Digest, HashMarker}; +use transcript::SecureDigest; use group::{ ff::{Field, PrimeField, PrimeFieldBits}, @@ -49,7 +50,7 @@ pub trait Ciphersuite: Clone + Copy + PartialEq + Eq + Debug + Zeroize { type G: Group + GroupOps + PrimeGroup + Zeroize + ConstantTimeEq; /// Hash algorithm used with this curve. // Requires BlockSizeUser so it can be used within Hkdf which requies that. - type H: Clone + BlockSizeUser + Digest; + type H: Clone + BlockSizeUser + Digest + HashMarker + SecureDigest; /// ID for this curve. const ID: &'static [u8]; diff --git a/crypto/schnorr/Cargo.toml b/crypto/schnorr/Cargo.toml index 9606206d..2a46c47a 100644 --- a/crypto/schnorr/Cargo.toml +++ b/crypto/schnorr/Cargo.toml @@ -17,7 +17,6 @@ rand_core = "0.6" zeroize = { version = "1.5", features = ["zeroize_derive"] } -digest = "0.10" transcript = { package = "flexible-transcript", path = "../transcript", version = "0.2" } group = "0.12" diff --git a/crypto/schnorr/src/aggregate.rs b/crypto/schnorr/src/aggregate.rs index c528d9b3..bcd36910 100644 --- a/crypto/schnorr/src/aggregate.rs +++ b/crypto/schnorr/src/aggregate.rs @@ -13,28 +13,49 @@ use ciphersuite::Ciphersuite; use crate::SchnorrSignature; -// Performs a big-endian modular reduction of the hash value -// This is used by the below aggregator to prevent mutability -// Only an 128-bit scalar is needed to offer 128-bits of security against malleability per -// https://cr.yp.to/badbatch/badbatch-20120919.pdf -// Accordingly, while a 256-bit hash used here with a 256-bit ECC will have bias, it shouldn't be -// an issue -fn scalar_from_digest( - digest: &mut DigestTranscript, -) -> F { - let bytes = digest.challenge(b"aggregation_weight"); +// Returns a unbiased scalar weight to use on a signature in order to prevent malleability +fn weight(digest: &mut DigestTranscript) -> F { + let mut bytes = digest.challenge(b"aggregation_weight"); debug_assert_eq!(bytes.len() % 8, 0); + // This should be guaranteed thanks to SecureDigest + debug_assert!(bytes.len() >= 32); let mut res = F::zero(); let mut i = 0; - while i < bytes.len() { - if i != 0 { - for _ in 0 .. 8 { + + // Derive a scalar from enough bits of entropy that bias is < 2^128 + // This can't be const due to its usage of a generic + // Also due to the usize::try_from, yet that could be replaced with an `as` + // The + 7 forces it to round up + #[allow(non_snake_case)] + let BYTES: usize = usize::try_from(((F::NUM_BITS + 128) + 7) / 8).unwrap(); + + let mut remaining = BYTES; + + // We load bits in as u64s + const WORD_LEN_IN_BITS: usize = 64; + const WORD_LEN_IN_BYTES: usize = WORD_LEN_IN_BITS / 8; + + let mut first = true; + while i < remaining { + // Shift over the already loaded bits + if !first { + for _ in 0 .. WORD_LEN_IN_BITS { res += res; } } - res += F::from(u64::from_be_bytes(bytes[i .. (i + 8)].try_into().unwrap())); - i += 8; + first = false; + + // Add the next 64 bits + res += F::from(u64::from_be_bytes(bytes[i .. (i + WORD_LEN_IN_BYTES)].try_into().unwrap())); + i += WORD_LEN_IN_BYTES; + + // If we've exhausted this challenge, get another + if i == bytes.len() { + bytes = digest.challenge(b"aggregation_weight_continued"); + remaining -= i; + i = 0; + } } res } @@ -92,16 +113,12 @@ impl SchnorrAggregate { /// The DST used here must prevent a collision with whatever hash function produced the /// challenges. #[must_use] - pub fn verify( - &self, - dst: &'static [u8], - keys_and_challenges: &[(C::G, C::F)], - ) -> bool { + pub fn verify(&self, dst: &'static [u8], keys_and_challenges: &[(C::G, C::F)]) -> bool { if self.Rs.len() != keys_and_challenges.len() { return false; } - let mut digest = DigestTranscript::::new(dst); + let mut digest = DigestTranscript::::new(dst); digest.domain_separate(b"signatures"); for (_, challenge) in keys_and_challenges { digest.append_message(b"challenge", challenge.to_repr()); @@ -109,7 +126,7 @@ impl SchnorrAggregate { let mut pairs = Vec::with_capacity((2 * keys_and_challenges.len()) + 1); for (i, (key, challenge)) in keys_and_challenges.iter().enumerate() { - let z = scalar_from_digest(&mut digest); + let z = weight(&mut digest); pairs.push((z, self.Rs[i])); pairs.push((z * challenge, *key)); } @@ -120,18 +137,18 @@ impl SchnorrAggregate { #[allow(non_snake_case)] #[derive(Clone, Debug, Zeroize)] -pub struct SchnorrAggregator { - digest: DigestTranscript, +pub struct SchnorrAggregator { + digest: DigestTranscript, sigs: Vec>, } -impl SchnorrAggregator { +impl SchnorrAggregator { /// Create a new aggregator. /// /// The DST used here must prevent a collision with whatever hash function produced the /// challenges. pub fn new(dst: &'static [u8]) -> Self { - let mut res = Self { digest: DigestTranscript::::new(dst), sigs: vec![] }; + let mut res = Self { digest: DigestTranscript::::new(dst), sigs: vec![] }; res.digest.domain_separate(b"signatures"); res } @@ -152,7 +169,7 @@ impl SchnorrAggregator { SchnorrAggregate { Rs: Vec::with_capacity(self.sigs.len()), s: C::F::zero() }; for i in 0 .. self.sigs.len() { aggregate.Rs.push(self.sigs[i].R); - aggregate.s += self.sigs[i].s * scalar_from_digest::<_, C::F>(&mut self.digest); + aggregate.s += self.sigs[i].s * weight::<_, C::F>(&mut self.digest); } Some(aggregate) } diff --git a/crypto/schnorr/src/tests/mod.rs b/crypto/schnorr/src/tests/mod.rs index c2791870..1b4fe212 100644 --- a/crypto/schnorr/src/tests/mod.rs +++ b/crypto/schnorr/src/tests/mod.rs @@ -3,8 +3,6 @@ use core::ops::Deref; use zeroize::Zeroizing; use rand_core::OsRng; -use sha2::Sha256; - use group::{ff::Field, Group}; use multiexp::BatchVerifier; @@ -84,7 +82,7 @@ pub(crate) fn aggregate() { // Create 5 signatures let mut keys = vec![]; let mut challenges = vec![]; - let mut aggregator = SchnorrAggregator::::new(DST); + let mut aggregator = SchnorrAggregator::::new(DST); for i in 0 .. 5 { keys.push(Zeroizing::new(C::random_nonzero_F(&mut OsRng))); // In practice, this MUST be a secure challenge binding to the nonce, key, and any message @@ -102,7 +100,7 @@ pub(crate) fn aggregate() { let aggregate = aggregator.complete().unwrap(); let aggregate = SchnorrAggregate::::read::<&[u8]>(&mut aggregate.serialize().as_ref()).unwrap(); - assert!(aggregate.verify::( + assert!(aggregate.verify( DST, keys .iter() From 2f4f1de488272bc0d4f3a040e253534c24f7e7bb Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 2 Mar 2023 10:57:22 -0500 Subject: [PATCH 40/71] 3.9.1 Fix SecureDigest trait bound --- crypto/transcript/src/lib.rs | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/crypto/transcript/src/lib.rs b/crypto/transcript/src/lib.rs index 1cd385ba..69374d8a 100644 --- a/crypto/transcript/src/lib.rs +++ b/crypto/transcript/src/lib.rs @@ -6,7 +6,12 @@ mod merlin; #[cfg(feature = "merlin")] pub use crate::merlin::MerlinTranscript; -use digest::{typenum::type_operators::IsGreaterOrEqual, consts::U256, Digest, Output, HashMarker}; +use digest::{ + typenum::{ + consts::U32, marker_traits::NonZero, type_operators::IsGreaterOrEqual, operator_aliases::GrEq, + }, + Digest, Output, HashMarker, +}; pub trait Transcript { type Challenge: Clone + Send + Sync + AsRef<[u8]>; @@ -53,7 +58,15 @@ impl DigestTranscriptMember { /// A trait defining cryptographic Digests with at least a 256-byte output size, assuming at least /// a 128-bit level of security accordingly. pub trait SecureDigest: Digest + HashMarker {} -impl SecureDigest for D where D::OutputSize: IsGreaterOrEqual {} +impl SecureDigest for D +where + // This just lets us perform the comparison + D::OutputSize: IsGreaterOrEqual, + // Perform the comparison and make sure it's true (not zero), meaning D::OutputSize is >= U32 + // This should be U32 as it's length in bytes, not bits + GrEq: NonZero, +{ +} /// A simple transcript format constructed around the specified hash algorithm. #[derive(Clone, Debug)] From 6fec95b1a738c16cbe0935d3d2bb8d526cfb1198 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 2 Mar 2023 11:16:00 -0500 Subject: [PATCH 41/71] 3.7.2 Remove code randomizing which side odd elements end up on This could still be gamed. For [1, 2, 3], the options were ([1], [2, 3]) or ([1, 2], [3]). This means 2 would always have the maximum round count, and thus this is still game-able. There's no point to keeping its complexity accordingly when the algorithm is as efficient as it is. While a proper random could be used to satisfy 3.7.2, it'd break the expected determinism. --- crypto/multiexp/src/batch.rs | 50 +++++++----------------------------- 1 file changed, 9 insertions(+), 41 deletions(-) diff --git a/crypto/multiexp/src/batch.rs b/crypto/multiexp/src/batch.rs index 945c8bcb..81458914 100644 --- a/crypto/multiexp/src/batch.rs +++ b/crypto/multiexp/src/batch.rs @@ -22,13 +22,9 @@ where /// A batch verifier intended to verify a series of statements are each equivalent to zero. #[allow(clippy::type_complexity)] #[derive(Clone, Zeroize)] -pub struct BatchVerifier +pub struct BatchVerifier(Zeroizing)>>) where - ::Scalar: PrimeFieldBits + Zeroize, -{ - split: u64, - statements: Zeroizing)>>, -} + ::Scalar: PrimeFieldBits + Zeroize; impl BatchVerifier where @@ -37,7 +33,7 @@ where /// Create a new batch verifier, expected to verify the following amount of statements. /// This is a size hint and is not required to be accurate. pub fn new(capacity: usize) -> BatchVerifier { - BatchVerifier { split: 0, statements: Zeroizing::new(Vec::with_capacity(capacity)) } + BatchVerifier(Zeroizing::new(Vec::with_capacity(capacity))) } /// Queue a statement for batch verification. @@ -47,15 +43,8 @@ where id: Id, pairs: I, ) { - // If this is the first time we're queueing a value, grab a u64 (AKA 64 bits) to determine - // which side to handle odd splits with during blame (preventing malicious actors from gaming - // the system by maximizing the round length) - if self.statements.len() == 0 { - self.split = rng.next_u64(); - } - // Define a unique scalar factor for this set of variables so individual items can't overlap - let u = if self.statements.is_empty() { + let u = if self.0.is_empty() { G::Scalar::one() } else { let mut weight; @@ -98,20 +87,20 @@ where }; self - .statements + .0 .push((id, pairs.into_iter().map(|(scalar, point)| (scalar * u, point)).collect())); } /// Perform batch verification, returning a boolean of if the statements equaled zero. #[must_use] pub fn verify(&self) -> bool { - multiexp(&flat(&self.statements)).is_identity().into() + multiexp(&flat(&self.0)).is_identity().into() } /// Perform batch verification in variable time. #[must_use] pub fn verify_vartime(&self) -> bool { - multiexp_vartime(&flat(&self.statements)).is_identity().into() + multiexp_vartime(&flat(&self.0)).is_identity().into() } /// Perform a binary search to identify which statement does not equal 0, returning None if all @@ -119,30 +108,9 @@ where /// multiple are invalid. // A constant time variant may be beneficial for robust protocols pub fn blame_vartime(&self) -> Option { - let mut slice = self.statements.as_slice(); - let mut split_side = self.split; - + let mut slice = self.0.as_slice(); while slice.len() > 1 { - let mut split = slice.len() / 2; - // If there's an odd number of elements, this can be gamed to increase the amount of rounds - // For [0, 1, 2], where 2 is invalid, this will take three rounds - // ([0], [1, 2]), then ([1], [2]), before just 2 - // If 1 and 2 were valid, this would've only taken 2 rounds to complete - // To prevent this from being gamed, if there's an odd number of elements, randomize which - // side the split occurs on - - // This does risk breaking determinism - // The concern is if the select split point causes different paths to be taken when multiple - // invalid elements exist - // While the split point may move an element from the right to the left, always choosing the - // left side (if it's invalid) means this will still always return the left-most, - // invalid element - - if slice.len() % 2 == 1 { - split += usize::try_from(split_side & 1).unwrap(); - split_side >>= 1; - } - + let split = slice.len() / 2; if multiexp_vartime(&flat(&slice[.. split])).is_identity().into() { slice = &slice[split ..]; } else { From 79124b9a3394ae1eaadf066fb7bc431afdaf5be3 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Thu, 2 Mar 2023 11:19:26 -0500 Subject: [PATCH 42/71] 3.9.2 Better document rng_seed is allowed to conflict with challenge --- crypto/transcript/src/lib.rs | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/crypto/transcript/src/lib.rs b/crypto/transcript/src/lib.rs index 69374d8a..e8af6161 100644 --- a/crypto/transcript/src/lib.rs +++ b/crypto/transcript/src/lib.rs @@ -25,13 +25,19 @@ pub trait Transcript { /// Append a message to the transcript. fn append_message>(&mut self, label: &'static [u8], message: M); - /// Produce a challenge. This MUST update the transcript as it does so, preventing the same - /// challenge from being generated multiple times. + /// Produce a challenge. + /// + /// Implementors MUST update the transcript as it does so, preventing the same challenge from + /// being generated multiple times. fn challenge(&mut self, label: &'static [u8]) -> Self::Challenge; - /// Produce a RNG seed. Helper function for parties needing to generate random data from an - /// agreed upon state. Internally calls the challenge function for the needed bytes, converting - /// them to the seed format rand_core expects. + /// Produce a RNG seed. + /// + /// Helper function for parties needing to generate random data from an agreed upon state. + /// + /// Implementors MAY internally call the challenge function for the needed bytes, and accordingly + /// produce a transcript conflict between two transcripts, one which called challenge(label) and + /// one which called rng_seed(label) at the same point. fn rng_seed(&mut self, label: &'static [u8]) -> [u8; 32]; } From 7efedb9a9136ec3e37fec1cb35f33a8426d89a90 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 6 Mar 2023 07:16:04 -0500 Subject: [PATCH 43/71] 3.9.1 Also correct invalid doc comment --- crypto/transcript/src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crypto/transcript/src/lib.rs b/crypto/transcript/src/lib.rs index e8af6161..455b3df0 100644 --- a/crypto/transcript/src/lib.rs +++ b/crypto/transcript/src/lib.rs @@ -61,8 +61,8 @@ impl DigestTranscriptMember { } } -/// A trait defining cryptographic Digests with at least a 256-byte output size, assuming at least -/// a 128-bit level of security accordingly. +/// A trait defining cryptographic Digests with at least a 256-bit output size, assuming at least a +/// 128-bit level of security accordingly. pub trait SecureDigest: Digest + HashMarker {} impl SecureDigest for D where From 20a33079f8a6d3b6f69a9221432844406440f21b Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 6 Mar 2023 20:16:57 -0500 Subject: [PATCH 44/71] 3.9.3 Document Merlin domain_separate conflict potential and add an asert --- crypto/transcript/src/merlin.rs | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/crypto/transcript/src/merlin.rs b/crypto/transcript/src/merlin.rs index 5fe7eb68..ce4ee4d9 100644 --- a/crypto/transcript/src/merlin.rs +++ b/crypto/transcript/src/merlin.rs @@ -2,8 +2,18 @@ use core::fmt::{Debug, Formatter}; use crate::Transcript; +/// A wrapper around a Merlin transcript which satisfiees the Transcript API. +/// +/// Challenges are fixed to 64 bytes, despite Merlin supporting variable length challenges. +/// +/// This implementation is intended to remain in the spirit of Merlin more than it's intended to be +/// in the spirit of the provided DigestTranscript. While DigestTranscript uses flags for each of +/// its different field types, the domain_separate function simply appends a message with a label +/// of "dom-sep", Merlin's preferred domain separation label. Since this could introduce transcript +/// conflicts between a domain separation and a message with a label of "dom-sep", the +/// append_message function uses an assertion to prevent such labels. #[derive(Clone)] -pub struct MerlinTranscript(pub merlin::Transcript); +pub struct MerlinTranscript(merlin::Transcript); // Merlin doesn't implement Debug so provide a stub which won't panic impl Debug for MerlinTranscript { fn fmt(&self, fmt: &mut Formatter<'_>) -> Result<(), core::fmt::Error> { @@ -12,11 +22,10 @@ impl Debug for MerlinTranscript { } impl Transcript for MerlinTranscript { - // Uses a challenge length of 64 bytes to support wide reduction on generated scalars - // From a security level standpoint, this should just be 32 bytes + // Uses a challenge length of 64 bytes to support wide reduction on commonly used EC scalars + // From a security level standpoint (Merlin targets 128-bits), this should just be 32 bytes // From a Merlin standpoint, this should be variable per call - // From a practical standpoint, this is a demo file not planned to be used and anything using - // this wrapper should be secure with this setting + // From a practical standpoint, this should be practical type Challenge = [u8; 64]; fn new(name: &'static [u8]) -> Self { @@ -24,10 +33,14 @@ impl Transcript for MerlinTranscript { } fn domain_separate(&mut self, label: &'static [u8]) { - self.append_message(b"dom-sep", label); + self.0.append_message(b"dom-sep", label); } fn append_message>(&mut self, label: &'static [u8], message: M) { + assert!( + label != "dom-sep".as_bytes(), + "\"dom-sep\" is reserved for the domain_separate function", + ); self.0.append_message(label, message.as_ref()); } From a053454ae4135cad006dc38afdac008412300a81 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 02:25:10 -0500 Subject: [PATCH 45/71] 3.9.4 Add tests to the transcript crate --- crypto/transcript/Cargo.toml | 4 ++ crypto/transcript/src/lib.rs | 3 ++ crypto/transcript/src/tests.rs | 95 ++++++++++++++++++++++++++++++++++ 3 files changed, 102 insertions(+) create mode 100644 crypto/transcript/src/tests.rs diff --git a/crypto/transcript/Cargo.toml b/crypto/transcript/Cargo.toml index b75d7173..b7a6d9d0 100644 --- a/crypto/transcript/Cargo.toml +++ b/crypto/transcript/Cargo.toml @@ -18,6 +18,10 @@ digest = "0.10" blake2 = { version = "0.10", optional = true } merlin = { version = "3", optional = true } +[dev-dependencies] +blake2 = "0.10" + [features] recommended = ["blake2"] merlin = ["dep:merlin"] +tests = [] diff --git a/crypto/transcript/src/lib.rs b/crypto/transcript/src/lib.rs index 455b3df0..2f4ef615 100644 --- a/crypto/transcript/src/lib.rs +++ b/crypto/transcript/src/lib.rs @@ -6,6 +6,9 @@ mod merlin; #[cfg(feature = "merlin")] pub use crate::merlin::MerlinTranscript; +#[cfg(any(test, feature = "tests"))] +pub mod tests; + use digest::{ typenum::{ consts::U32, marker_traits::NonZero, type_operators::IsGreaterOrEqual, operator_aliases::GrEq, diff --git a/crypto/transcript/src/tests.rs b/crypto/transcript/src/tests.rs new file mode 100644 index 00000000..0a461ba5 --- /dev/null +++ b/crypto/transcript/src/tests.rs @@ -0,0 +1,95 @@ +use crate::Transcript; + +pub fn test_transcript() +where + T::Challenge: PartialEq, +{ + // Ensure distinct names cause distinct challenges + { + let mut t1 = T::new(b"1"); + let mut t2 = T::new(b"2"); + assert!(t1.challenge(b"c") != t2.challenge(b"c")); + } + + // Ensure names can't lead into labels + { + let mut t1 = T::new(b"12"); + let c1 = t1.challenge(b"c"); + let mut t2 = T::new(b"1"); + let c2 = t2.challenge(b"2c"); + assert!(c1 != c2); + } + + let t = || T::new(b"name"); + let c = |mut t: T| t.challenge(b"c"); + + // Ensure domain separators do something + { + let mut t1 = t(); + t1.domain_separate(b"d"); + assert!(c(t1) != c(t())); + } + + // Ensure distinct domain separators create distinct challenges + { + let mut t1 = t(); + let mut t2 = t(); + t1.domain_separate(b"d1"); + t2.domain_separate(b"d2"); + assert!(c(t1) != c(t2)); + } + + // Ensure distinct messages create distinct challenges + { + // By label + { + let mut t1 = t(); + let mut t2 = t(); + t1.append_message(b"msg", b"a"); + t2.append_message(b"msg", b"b"); + assert!(c(t1) != c(t2)); + } + + // By value + { + let mut t1 = t(); + let mut t2 = t(); + t1.append_message(b"a", b"val"); + t2.append_message(b"b", b"val"); + assert!(c(t1) != c(t2)); + } + } + + // Ensure challenges advance the transcript + { + let mut t = t(); + let c1 = t.challenge(b"c"); + let c2 = t.challenge(b"c"); + assert!(c1 != c2); + } + + // Ensure distinct challenge labels produce distinct challenges + assert!(t().challenge(b"a") != t().challenge(b"b")); + + // Ensure RNG seed calls advance the transcript + { + let mut t = t(); + let s1 = t.rng_seed(b"s"); + let s2 = t.rng_seed(b"s"); + assert!(s1 != s2); + } + + // Ensure distinct RNG seed labels produce distinct seeds + assert!(t().rng_seed(b"a") != t().rng_seed(b"b")); +} + +#[test] +fn test_digest() { + test_transcript::>(); +} + +#[cfg(feature = "merlin")] +#[test] +fn test_merlin() { + test_transcript::(); +} From c37cc0b4e21dd9ca9897d1961daf07836775bba9 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 02:29:59 -0500 Subject: [PATCH 46/71] Update Zeroize pin to ^1.5 from 1.5 --- coins/monero/Cargo.toml | 2 +- common/zalloc/Cargo.toml | 2 +- crypto/ciphersuite/Cargo.toml | 2 +- crypto/dalek-ff-group/Cargo.toml | 2 +- crypto/dkg/Cargo.toml | 2 +- crypto/ed448/Cargo.toml | 2 +- crypto/frost/Cargo.toml | 2 +- crypto/multiexp/Cargo.toml | 2 +- crypto/multiexp/src/batch.rs | 8 ++++---- crypto/schnorr/Cargo.toml | 2 +- processor/Cargo.toml | 2 +- 11 files changed, 14 insertions(+), 14 deletions(-) diff --git a/coins/monero/Cargo.toml b/coins/monero/Cargo.toml index 6525ab25..e3255099 100644 --- a/coins/monero/Cargo.toml +++ b/coins/monero/Cargo.toml @@ -21,7 +21,7 @@ rand_chacha = { version = "0.3", optional = true } rand = "0.8" rand_distr = "0.4" -zeroize = { version = "1.5", features = ["zeroize_derive"] } +zeroize = { version = "^1.5", features = ["zeroize_derive"] } subtle = "2.4" sha3 = "0.10" diff --git a/common/zalloc/Cargo.toml b/common/zalloc/Cargo.toml index db38f3e2..2a18cf3c 100644 --- a/common/zalloc/Cargo.toml +++ b/common/zalloc/Cargo.toml @@ -13,7 +13,7 @@ all-features = true rustdoc-args = ["--cfg", "docsrs"] [dependencies] -zeroize = "1.5" +zeroize = "^1.5" [features] # Commented for now as it requires nightly and we don't use nightly diff --git a/crypto/ciphersuite/Cargo.toml b/crypto/ciphersuite/Cargo.toml index 4497299d..16bb2118 100644 --- a/crypto/ciphersuite/Cargo.toml +++ b/crypto/ciphersuite/Cargo.toml @@ -15,7 +15,7 @@ rustdoc-args = ["--cfg", "docsrs"] [dependencies] rand_core = "0.6" -zeroize = { version = "1.5", features = ["zeroize_derive"] } +zeroize = { version = "^1.5", features = ["zeroize_derive"] } subtle = "2" digest = "0.10" diff --git a/crypto/dalek-ff-group/Cargo.toml b/crypto/dalek-ff-group/Cargo.toml index fa6a1395..a0b336a2 100644 --- a/crypto/dalek-ff-group/Cargo.toml +++ b/crypto/dalek-ff-group/Cargo.toml @@ -16,7 +16,7 @@ rustdoc-args = ["--cfg", "docsrs"] rand_core = "0.6" digest = "0.10" -zeroize = { version = "1.5", features = ["zeroize_derive"] } +zeroize = { version = "^1.5", features = ["zeroize_derive"] } subtle = "2.4" ff = "0.12" diff --git a/crypto/dkg/Cargo.toml b/crypto/dkg/Cargo.toml index fbdcba75..c45f3c00 100644 --- a/crypto/dkg/Cargo.toml +++ b/crypto/dkg/Cargo.toml @@ -17,7 +17,7 @@ thiserror = "1" rand_core = "0.6" -zeroize = { version = "1.5", features = ["zeroize_derive"] } +zeroize = { version = "^1.5", features = ["zeroize_derive"] } subtle = "2" hex = "0.4" diff --git a/crypto/ed448/Cargo.toml b/crypto/ed448/Cargo.toml index fe061fc6..78758090 100644 --- a/crypto/ed448/Cargo.toml +++ b/crypto/ed448/Cargo.toml @@ -18,7 +18,7 @@ lazy_static = "1" rand_core = "0.6" digest = "0.10" -zeroize = { version = "1.5", features = ["zeroize_derive"] } +zeroize = { version = "^1.5", features = ["zeroize_derive"] } subtle = "2.4" ff = "0.12" diff --git a/crypto/frost/Cargo.toml b/crypto/frost/Cargo.toml index 91eaa91d..f03d7a87 100644 --- a/crypto/frost/Cargo.toml +++ b/crypto/frost/Cargo.toml @@ -18,7 +18,7 @@ thiserror = "1" rand_core = "0.6" rand_chacha = "0.3" -zeroize = { version = "1.5", features = ["zeroize_derive"] } +zeroize = { version = "^1.5", features = ["zeroize_derive"] } subtle = "2" hex = "0.4" diff --git a/crypto/multiexp/Cargo.toml b/crypto/multiexp/Cargo.toml index 0a935289..ecddf764 100644 --- a/crypto/multiexp/Cargo.toml +++ b/crypto/multiexp/Cargo.toml @@ -13,7 +13,7 @@ all-features = true rustdoc-args = ["--cfg", "docsrs"] [dependencies] -zeroize = { version = "1.5", features = ["zeroize_derive"] } +zeroize = { version = "^1.5", features = ["zeroize_derive"] } ff = "0.12" group = "0.12" diff --git a/crypto/multiexp/src/batch.rs b/crypto/multiexp/src/batch.rs index 81458914..c47e2845 100644 --- a/crypto/multiexp/src/batch.rs +++ b/crypto/multiexp/src/batch.rs @@ -22,7 +22,9 @@ where /// A batch verifier intended to verify a series of statements are each equivalent to zero. #[allow(clippy::type_complexity)] #[derive(Clone, Zeroize)] -pub struct BatchVerifier(Zeroizing)>>) +pub struct BatchVerifier( + Zeroizing)>>, +) where ::Scalar: PrimeFieldBits + Zeroize; @@ -86,9 +88,7 @@ where weight }; - self - .0 - .push((id, pairs.into_iter().map(|(scalar, point)| (scalar * u, point)).collect())); + self.0.push((id, pairs.into_iter().map(|(scalar, point)| (scalar * u, point)).collect())); } /// Perform batch verification, returning a boolean of if the statements equaled zero. diff --git a/crypto/schnorr/Cargo.toml b/crypto/schnorr/Cargo.toml index 2a46c47a..f6a2ea89 100644 --- a/crypto/schnorr/Cargo.toml +++ b/crypto/schnorr/Cargo.toml @@ -15,7 +15,7 @@ rustdoc-args = ["--cfg", "docsrs"] [dependencies] rand_core = "0.6" -zeroize = { version = "1.5", features = ["zeroize_derive"] } +zeroize = { version = "^1.5", features = ["zeroize_derive"] } transcript = { package = "flexible-transcript", path = "../transcript", version = "0.2" } diff --git a/processor/Cargo.toml b/processor/Cargo.toml index 695b16e4..f86fb7a6 100644 --- a/processor/Cargo.toml +++ b/processor/Cargo.toml @@ -15,7 +15,7 @@ rustdoc-args = ["--cfg", "docsrs"] [dependencies] async-trait = "0.1" -zeroize = "1.5" +zeroize = "^1.5" thiserror = "1" rand_core = "0.6" From 2729882d6557ea776eee555239e77e84b3c84229 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Mon, 6 Mar 2023 08:22:04 -0500 Subject: [PATCH 47/71] Update to {k, p}256 0.12 --- Cargo.lock | 75 ++++++++++++++++++++++++++------ coins/ethereum/Cargo.toml | 3 +- coins/ethereum/tests/contract.rs | 2 +- coins/ethereum/tests/crypto.rs | 25 ++++++----- crypto/ciphersuite/Cargo.toml | 4 +- crypto/dleq/Cargo.toml | 2 +- crypto/ff-group-tests/Cargo.toml | 4 +- crypto/multiexp/Cargo.toml | 2 +- 8 files changed, 84 insertions(+), 33 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a4913d97..7b6fa723 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -915,7 +915,7 @@ dependencies = [ "flexible-transcript", "group", "hex", - "k256", + "k256 0.12.0", "minimal-ed448", "p256", "rand_core 0.6.4", @@ -1034,7 +1034,7 @@ dependencies = [ "digest 0.10.6", "getrandom 0.2.8", "hmac 0.12.1", - "k256", + "k256 0.11.6", "lazy_static", "serde", "sha2 0.10.6", @@ -1716,7 +1716,7 @@ dependencies = [ "flexible-transcript", "group", "hex-literal", - "k256", + "k256 0.12.0", "multiexp", "rand_core 0.6.4", "thiserror", @@ -1793,7 +1793,19 @@ dependencies = [ "der", "elliptic-curve", "rfc6979", - "signature", + "signature 1.6.4", +] + +[[package]] +name = "ecdsa" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12844141594ad74185a926d030f3b605f6a903b4e3fec351f3ea338ac5b7637e" +dependencies = [ + "der", + "elliptic-curve", + "rfc6979", + "signature 2.0.0", ] [[package]] @@ -1802,7 +1814,7 @@ version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e9c280362032ea4203659fc489832d0204ef09f247a0506f170dafcac08c369" dependencies = [ - "signature", + "signature 1.6.4", ] [[package]] @@ -1998,11 +2010,12 @@ dependencies = [ "eyre", "group", "hex-literal", - "k256", + "k256 0.12.0", "modular-frost", "rand_core 0.6.4", "serde", "serde_json", + "sha2 0.10.6", "sha3", "thiserror", "tokio", @@ -2126,7 +2139,7 @@ dependencies = [ "ethabi", "generic-array 0.14.6", "hex", - "k256", + "k256 0.11.6", "once_cell", "open-fastrlp", "proc-macro2", @@ -2341,7 +2354,7 @@ name = "ff-group-tests" version = "0.12.0" dependencies = [ "group", - "k256", + "k256 0.12.0", "p256", "rand_core 0.6.4", ] @@ -2593,7 +2606,7 @@ dependencies = [ "frame-metadata", "frame-support-procedural", "impl-trait-for-tuples", - "k256", + "k256 0.11.6", "log", "once_cell", "parity-scale-codec", @@ -3785,12 +3798,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" dependencies = [ "cfg-if", - "ecdsa", + "ecdsa 0.14.8", "elliptic-curve", "sha2 0.10.6", "sha3", ] +[[package]] +name = "k256" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92a55e0ff3b72c262bcf041d9e97f1b84492b68f1c1a384de2323d3dc9403397" +dependencies = [ + "cfg-if", + "ecdsa 0.15.1", + "elliptic-curve", + "once_cell", + "sha2 0.10.6", + "signature 2.0.0", +] + [[package]] name = "keccak" version = "0.1.3" @@ -4765,7 +4792,7 @@ dependencies = [ "dalek-ff-group", "ff", "group", - "k256", + "k256 0.12.0", "rand_core 0.6.4", "zeroize", ] @@ -5177,12 +5204,13 @@ checksum = "9b7820b9daea5457c9f21c69448905d723fbd21136ccf521748f23fd49e723ee" [[package]] name = "p256" -version = "0.11.1" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51f44edd08f51e2ade572f141051021c5af22677e42b7dd28a88155151c33594" +checksum = "49c124b3cbce43bcbac68c58ec181d98ed6cc7e6d0aa7c3ba97b2563410b0e55" dependencies = [ - "ecdsa", + "ecdsa 0.15.1", "elliptic-curve", + "primeorder", "sha2 0.10.6", ] @@ -5844,6 +5872,15 @@ dependencies = [ "syn", ] +[[package]] +name = "primeorder" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b54f7131b3dba65a2f414cf5bd25b66d4682e4608610668eae785750ba4c5b2" +dependencies = [ + "elliptic-curve", +] + [[package]] name = "primitive-types" version = "0.12.1" @@ -7963,6 +8000,16 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "signature" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fe458c98333f9c8152221191a77e2a44e8325d0193484af2e9421a53019e57d" +dependencies = [ + "digest 0.10.6", + "rand_core 0.6.4", +] + [[package]] name = "simba" version = "0.5.1" diff --git a/coins/ethereum/Cargo.toml b/coins/ethereum/Cargo.toml index 9d59efd8..4e837f0e 100644 --- a/coins/ethereum/Cargo.toml +++ b/coins/ethereum/Cargo.toml @@ -20,10 +20,11 @@ rand_core = "0.6" serde_json = "1.0" serde = "1.0" +sha2 = "0.10" sha3 = "0.10" group = "0.12" -k256 = { version = "0.11", features = ["arithmetic", "keccak256", "ecdsa"] } +k256 = { version = "0.12", features = ["arithmetic", "ecdsa"] } frost = { package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1", "tests"] } eyre = "0.6" diff --git a/coins/ethereum/tests/contract.rs b/coins/ethereum/tests/contract.rs index 057f0b4c..70f3320a 100644 --- a/coins/ethereum/tests/contract.rs +++ b/coins/ethereum/tests/contract.rs @@ -2,7 +2,7 @@ use std::{convert::TryFrom, sync::Arc, time::Duration}; use rand_core::OsRng; -use k256::{elliptic_curve::bigint::ArrayEncoding, U256}; +use ::k256::{elliptic_curve::bigint::ArrayEncoding, U256}; use ethers::{ prelude::*, diff --git a/coins/ethereum/tests/crypto.rs b/coins/ethereum/tests/crypto.rs index f48f1869..82f61e01 100644 --- a/coins/ethereum/tests/crypto.rs +++ b/coins/ethereum/tests/crypto.rs @@ -1,29 +1,32 @@ -use ethereum_serai::crypto::*; -use frost::curve::Secp256k1; use k256::{ elliptic_curve::{bigint::ArrayEncoding, ops::Reduce, sec1::ToEncodedPoint}, ProjectivePoint, Scalar, U256, }; +use frost::curve::Secp256k1; + +use ethereum_serai::crypto::*; #[test] fn test_ecrecover() { - use k256::ecdsa::{ - recoverable::Signature, - signature::{Signer, Verifier}, - SigningKey, VerifyingKey, - }; use rand_core::OsRng; + use sha2::Sha256; + use sha3::{Digest, Keccak256}; + use k256::ecdsa::{hazmat::SignPrimitive, signature::DigestVerifier, SigningKey, VerifyingKey}; let private = SigningKey::random(&mut OsRng); let public = VerifyingKey::from(&private); const MESSAGE: &[u8] = b"Hello, World!"; - let sig: Signature = private.sign(MESSAGE); - public.verify(MESSAGE, &sig).unwrap(); + let (sig, recovery_id) = private + .as_nonzero_scalar() + .try_sign_prehashed_rfc6979::(Keccak256::digest(MESSAGE), b"") + .unwrap(); + assert_eq!(public.verify_digest(Keccak256::new_with_prefix(MESSAGE), &sig).unwrap(), ()); assert_eq!( - ecrecover(hash_to_scalar(MESSAGE), sig.as_ref()[64], *sig.r(), *sig.s()).unwrap(), - address(&ProjectivePoint::from(public)) + ecrecover(hash_to_scalar(MESSAGE), recovery_id.unwrap().is_y_odd().into(), *sig.r(), *sig.s()) + .unwrap(), + address(&ProjectivePoint::from(public.as_affine())) ); } diff --git a/crypto/ciphersuite/Cargo.toml b/crypto/ciphersuite/Cargo.toml index 16bb2118..163e7981 100644 --- a/crypto/ciphersuite/Cargo.toml +++ b/crypto/ciphersuite/Cargo.toml @@ -29,8 +29,8 @@ group = "0.12" dalek-ff-group = { path = "../dalek-ff-group", version = "^0.1.2", optional = true } elliptic-curve = { version = "0.12", features = ["hash2curve"], optional = true } -p256 = { version = "0.11", features = ["arithmetic", "bits", "hash2curve"], optional = true } -k256 = { version = "0.11", features = ["arithmetic", "bits", "hash2curve"], optional = true } +p256 = { version = "0.12", features = ["arithmetic", "bits", "hash2curve"], optional = true } +k256 = { version = "0.12", features = ["arithmetic", "bits", "hash2curve"], optional = true } minimal-ed448 = { path = "../ed448", version = "^0.1.2", optional = true } diff --git a/crypto/dleq/Cargo.toml b/crypto/dleq/Cargo.toml index 15ba6b6d..71745a11 100644 --- a/crypto/dleq/Cargo.toml +++ b/crypto/dleq/Cargo.toml @@ -31,7 +31,7 @@ hex-literal = "0.3" blake2 = "0.10" -k256 = { version = "0.11", features = ["arithmetic", "bits"] } +k256 = { version = "0.12", features = ["arithmetic", "bits"] } dalek-ff-group = { path = "../dalek-ff-group" } transcript = { package = "flexible-transcript", path = "../transcript", features = ["recommended"] } diff --git a/crypto/ff-group-tests/Cargo.toml b/crypto/ff-group-tests/Cargo.toml index 94fb8099..67e9eaaf 100644 --- a/crypto/ff-group-tests/Cargo.toml +++ b/crypto/ff-group-tests/Cargo.toml @@ -17,5 +17,5 @@ rand_core = "0.6" group = "0.12" [dev-dependencies] -k256 = { version = "0.11", features = ["bits"] } -p256 = { version = "0.11", features = ["bits"] } +k256 = { version = "0.12", features = ["bits"] } +p256 = { version = "0.12", features = ["bits"] } diff --git a/crypto/multiexp/Cargo.toml b/crypto/multiexp/Cargo.toml index ecddf764..a54ae0d8 100644 --- a/crypto/multiexp/Cargo.toml +++ b/crypto/multiexp/Cargo.toml @@ -23,7 +23,7 @@ rand_core = { version = "0.6", optional = true } [dev-dependencies] rand_core = "0.6" -k256 = { version = "0.11", features = ["bits"] } +k256 = { version = "0.12", features = ["bits"] } dalek-ff-group = { path = "../dalek-ff-group" } [features] From 0bbf511062aea9439ab244d64fdd6a5491d41daa Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 02:38:47 -0500 Subject: [PATCH 48/71] Add 'static/Send/Sync to specific traits in crypto These were proven necessary by our real world usage. --- crypto/ciphersuite/src/lib.rs | 4 +++- crypto/frost/src/algorithm.rs | 8 ++++---- crypto/frost/src/sign.rs | 6 +++--- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/crypto/ciphersuite/src/lib.rs b/crypto/ciphersuite/src/lib.rs index ab5a0a4b..fde5306c 100644 --- a/crypto/ciphersuite/src/lib.rs +++ b/crypto/ciphersuite/src/lib.rs @@ -42,7 +42,9 @@ mod ed448; pub use ed448::*; /// Unified trait defining a ciphersuite around an elliptic curve. -pub trait Ciphersuite: Clone + Copy + PartialEq + Eq + Debug + Zeroize { +pub trait Ciphersuite: + 'static + Send + Sync + Clone + Copy + PartialEq + Eq + Debug + Zeroize +{ /// Scalar field element type. // This is available via G::Scalar yet `C::G::Scalar` is ambiguous, forcing horrific accesses type F: PrimeField + PrimeFieldBits + Zeroize; diff --git a/crypto/frost/src/algorithm.rs b/crypto/frost/src/algorithm.rs index ca6108dc..43a11a3d 100644 --- a/crypto/frost/src/algorithm.rs +++ b/crypto/frost/src/algorithm.rs @@ -21,11 +21,11 @@ impl WriteAddendum for () { } /// Trait alias for the requirements to be used as an addendum. -pub trait Addendum: Clone + PartialEq + Debug + WriteAddendum {} -impl Addendum for A {} +pub trait Addendum: Send + Clone + PartialEq + Debug + WriteAddendum {} +impl Addendum for A {} /// Algorithm trait usable by the FROST signing machine to produce signatures.. -pub trait Algorithm: Clone { +pub trait Algorithm: Send + Clone { /// The transcript format this algorithm uses. This likely should NOT be the IETF-compatible /// transcript included in this crate. type Transcript: Clone + Debug + Transcript; @@ -120,7 +120,7 @@ mod sealed { pub(crate) use sealed::IetfTranscript; /// HRAm usable by the included Schnorr signature algorithm to generate challenges. -pub trait Hram: Clone { +pub trait Hram: Send + Clone { /// HRAm function to generate a challenge. /// H2 from the IETF draft, despite having a different argument set (not being pre-formatted). #[allow(non_snake_case)] diff --git a/crypto/frost/src/sign.rs b/crypto/frost/src/sign.rs index 9634b161..089c82c3 100644 --- a/crypto/frost/src/sign.rs +++ b/crypto/frost/src/sign.rs @@ -89,7 +89,7 @@ impl Writable for Preprocess { pub struct CachedPreprocess(pub Zeroizing<[u8; 32]>); /// Trait for the initial state machine of a two-round signing protocol. -pub trait PreprocessMachine { +pub trait PreprocessMachine: Send { /// Preprocess message for this machine. type Preprocess: Clone + PartialEq + Writable; /// Signature produced by this machine. @@ -203,7 +203,7 @@ impl SignatureShare { } /// Trait for the second machine of a two-round signing protocol. -pub trait SignMachine: Sized { +pub trait SignMachine: Send + Sized { /// Params used to instantiate this machine which can be used to rebuild from a cache. type Params: Clone; /// Keys used for signing operations. @@ -436,7 +436,7 @@ impl> SignMachine for AlgorithmSignMachi } /// Trait for the final machine of a two-round signing protocol. -pub trait SignatureMachine { +pub trait SignatureMachine: Send { /// SignatureShare message for this machine. type SignatureShare: Clone + PartialEq + Writable; From d36fc026dda67b97c0954fb46dd289312dcd8d7d Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 02:40:09 -0500 Subject: [PATCH 49/71] Remove unused generic in frost --- crypto/frost/src/nonce.rs | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/crypto/frost/src/nonce.rs b/crypto/frost/src/nonce.rs index c4c5f361..2268c755 100644 --- a/crypto/frost/src/nonce.rs +++ b/crypto/frost/src/nonce.rs @@ -78,7 +78,7 @@ pub(crate) struct NonceCommitments { } impl NonceCommitments { - pub(crate) fn new( + pub(crate) fn new( rng: &mut R, secret_share: &Zeroizing, generators: &[C::G], @@ -99,10 +99,7 @@ impl NonceCommitments { (nonce, NonceCommitments { generators: commitments }) } - fn read( - reader: &mut R, - generators: &[C::G], - ) -> io::Result> { + fn read(reader: &mut R, generators: &[C::G]) -> io::Result> { Ok(NonceCommitments { generators: (0 .. generators.len()) .map(|_| GeneratorCommitments::read(reader)) @@ -157,7 +154,7 @@ impl Commitments { let mut dleq_nonces = vec![]; for generators in planned_nonces { let (nonce, these_commitments): (Nonce, _) = - NonceCommitments::new::<_, T>(&mut *rng, secret_share, generators); + NonceCommitments::new(&mut *rng, secret_share, generators); if generators.len() > 1 { dleq_generators.push(generators.clone()); @@ -205,7 +202,7 @@ impl Commitments { context: &[u8], ) -> io::Result { let nonces = (0 .. generators.len()) - .map(|i| NonceCommitments::read::<_, T>(reader, &generators[i])) + .map(|i| NonceCommitments::read(reader, &generators[i])) .collect::>, _>>()?; let mut dleq_generators = vec![]; From 0e8c55e05070fcfcbb68fad1ac79032d781c67ce Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 03:06:46 -0500 Subject: [PATCH 50/71] Update and remove unused dependencies --- Cargo.lock | 14 -------------- coins/monero/Cargo.toml | 7 ++++--- coins/monero/generators/Cargo.toml | 2 +- crypto/ciphersuite/Cargo.toml | 2 +- crypto/dalek-ff-group/Cargo.toml | 6 +++--- crypto/dalek-ff-group/src/field.rs | 2 +- crypto/dalek-ff-group/src/lib.rs | 7 +++++-- crypto/dkg/Cargo.toml | 3 --- crypto/dkg/src/encryption.rs | 2 +- crypto/dleq/Cargo.toml | 6 +++--- crypto/ed448/Cargo.toml | 5 ++--- crypto/ed448/src/backend.rs | 2 +- crypto/ed448/src/point.rs | 7 +++++-- crypto/frost/Cargo.toml | 15 +++++---------- crypto/frost/src/sign.rs | 7 +++---- 15 files changed, 35 insertions(+), 52 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7b6fa723..e244b6dc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1696,11 +1696,9 @@ dependencies = [ "dleq", "flexible-transcript", "group", - "hex", "multiexp", "rand_core 0.6.4", "schnorr-signatures", - "subtle", "thiserror", "zeroize", ] @@ -3081,15 +3079,6 @@ version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ebdb29d2ea9ed0083cd8cece49bbd968021bd99b0849edb4a9a7ee0fdf6a4e0" -[[package]] -name = "hkdf" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" -dependencies = [ - "hmac 0.12.1", -] - [[package]] name = "hmac" version = "0.8.1" @@ -4610,7 +4599,6 @@ version = "0.1.2" dependencies = [ "crypto-bigint", "dalek-ff-group", - "digest 0.10.6", "ff", "ff-group-tests", "generic-array 0.14.6", @@ -4680,7 +4668,6 @@ dependencies = [ name = "modular-frost" version = "0.5.0" dependencies = [ - "chacha20 0.9.0", "ciphersuite", "dalek-ff-group", "digest 0.10.6", @@ -4689,7 +4676,6 @@ dependencies = [ "flexible-transcript", "group", "hex", - "hkdf", "minimal-ed448", "multiexp", "rand_chacha 0.3.1", diff --git a/coins/monero/Cargo.toml b/coins/monero/Cargo.toml index e3255099..4093f745 100644 --- a/coins/monero/Cargo.toml +++ b/coins/monero/Cargo.toml @@ -12,7 +12,6 @@ all-features = true rustdoc-args = ["--cfg", "docsrs"] [dependencies] -hex-literal = "0.3" lazy_static = "1" thiserror = "1" @@ -22,12 +21,12 @@ rand = "0.8" rand_distr = "0.4" zeroize = { version = "^1.5", features = ["zeroize_derive"] } -subtle = "2.4" +subtle = "^2.4" sha3 = "0.10" blake2 = { version = "0.10", optional = true } -curve25519-dalek = { version = "3", features = ["std"] } +curve25519-dalek = { version = "^3.2", features = ["std"] } group = { version = "0.12" } dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.1" } @@ -54,6 +53,8 @@ dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.1" } monero-generators = { path = "generators", version = "0.1" } [dev-dependencies] +hex-literal = "0.3" + tokio = { version = "1", features = ["full"] } frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.5", features = ["ed25519", "tests"] } diff --git a/coins/monero/generators/Cargo.toml b/coins/monero/generators/Cargo.toml index 0f626f17..b450e2ea 100644 --- a/coins/monero/generators/Cargo.toml +++ b/coins/monero/generators/Cargo.toml @@ -14,7 +14,7 @@ rustdoc-args = ["--cfg", "docsrs"] [dependencies] lazy_static = "1" -subtle = "2.4" +subtle = "^2.4" sha3 = "0.10" diff --git a/crypto/ciphersuite/Cargo.toml b/crypto/ciphersuite/Cargo.toml index 163e7981..ec7c3d71 100644 --- a/crypto/ciphersuite/Cargo.toml +++ b/crypto/ciphersuite/Cargo.toml @@ -16,7 +16,7 @@ rustdoc-args = ["--cfg", "docsrs"] rand_core = "0.6" zeroize = { version = "^1.5", features = ["zeroize_derive"] } -subtle = "2" +subtle = "^2.4" digest = "0.10" transcript = { package = "flexible-transcript", path = "../transcript", version = "0.2" } diff --git a/crypto/dalek-ff-group/Cargo.toml b/crypto/dalek-ff-group/Cargo.toml index a0b336a2..bea86d89 100644 --- a/crypto/dalek-ff-group/Cargo.toml +++ b/crypto/dalek-ff-group/Cargo.toml @@ -17,15 +17,15 @@ rand_core = "0.6" digest = "0.10" zeroize = { version = "^1.5", features = ["zeroize_derive"] } -subtle = "2.4" +subtle = "^2.4" -ff = "0.12" +ff = { version = "0.12", features = ["bits"] } group = "0.12" crypto-bigint = "0.4" sha2 = "0.9" -curve25519-dalek = "3.2" +curve25519-dalek = "^3.2" [dev-dependencies] ff-group-tests = { path = "../ff-group-tests" } diff --git a/crypto/dalek-ff-group/src/field.rs b/crypto/dalek-ff-group/src/field.rs index 694de783..8c9a927d 100644 --- a/crypto/dalek-ff-group/src/field.rs +++ b/crypto/dalek-ff-group/src/field.rs @@ -9,7 +9,7 @@ use subtle::{ use crypto_bigint::{Integer, Encoding, U256, U512}; -use ff::{Field, PrimeField, FieldBits, PrimeFieldBits}; +use group::ff::{Field, PrimeField, FieldBits, PrimeFieldBits}; use crate::{constant_time, math, from_uint}; diff --git a/crypto/dalek-ff-group/src/lib.rs b/crypto/dalek-ff-group/src/lib.rs index 1d2e7083..9ff5f76a 100644 --- a/crypto/dalek-ff-group/src/lib.rs +++ b/crypto/dalek-ff-group/src/lib.rs @@ -32,8 +32,11 @@ use dalek::{ }, }; -use ff::{Field, PrimeField, FieldBits, PrimeFieldBits}; -use group::{Group, GroupEncoding, prime::PrimeGroup}; +use group::{ + ff::{Field, PrimeField, FieldBits, PrimeFieldBits}, + Group, GroupEncoding, + prime::PrimeGroup, +}; pub mod field; diff --git a/crypto/dkg/Cargo.toml b/crypto/dkg/Cargo.toml index c45f3c00..00fa973a 100644 --- a/crypto/dkg/Cargo.toml +++ b/crypto/dkg/Cargo.toml @@ -18,9 +18,6 @@ thiserror = "1" rand_core = "0.6" zeroize = { version = "^1.5", features = ["zeroize_derive"] } -subtle = "2" - -hex = "0.4" transcript = { package = "flexible-transcript", path = "../transcript", version = "0.2", features = ["recommended"] } chacha20 = { version = "0.9", features = ["zeroize"] } diff --git a/crypto/dkg/src/encryption.rs b/crypto/dkg/src/encryption.rs index e4aab0dd..e7047065 100644 --- a/crypto/dkg/src/encryption.rs +++ b/crypto/dkg/src/encryption.rs @@ -66,7 +66,7 @@ impl EncryptionKeyMessage { buf } - // Used by tests + #[cfg(any(test, feature = "tests"))] pub(crate) fn enc_key(&self) -> C::G { self.enc_key } diff --git a/crypto/dleq/Cargo.toml b/crypto/dleq/Cargo.toml index 71745a11..da114e0f 100644 --- a/crypto/dleq/Cargo.toml +++ b/crypto/dleq/Cargo.toml @@ -12,10 +12,10 @@ all-features = true rustdoc-args = ["--cfg", "docsrs"] [dependencies] -thiserror = "1" +thiserror = { version = "1", optional = true } rand_core = "0.6" -zeroize = { version = "1.3", features = ["zeroize_derive"] } +zeroize = { version = "^1.5", features = ["zeroize_derive"] } digest = "0.10" @@ -39,7 +39,7 @@ transcript = { package = "flexible-transcript", path = "../transcript", features [features] std = [] serialize = ["std"] -experimental = ["std", "multiexp"] +experimental = ["std", "thiserror", "multiexp"] secure_capacity_difference = [] # Only applies to experimental, yet is default to ensure security diff --git a/crypto/ed448/Cargo.toml b/crypto/ed448/Cargo.toml index 78758090..50a98c2f 100644 --- a/crypto/ed448/Cargo.toml +++ b/crypto/ed448/Cargo.toml @@ -16,12 +16,11 @@ rustdoc-args = ["--cfg", "docsrs"] lazy_static = "1" rand_core = "0.6" -digest = "0.10" zeroize = { version = "^1.5", features = ["zeroize_derive"] } -subtle = "2.4" +subtle = "^2.4" -ff = "0.12" +ff = { version = "0.12", features = ["bits"] } group = "0.12" generic-array = "0.14" diff --git a/crypto/ed448/src/backend.rs b/crypto/ed448/src/backend.rs index 484df359..617dfb5e 100644 --- a/crypto/ed448/src/backend.rs +++ b/crypto/ed448/src/backend.rs @@ -11,7 +11,7 @@ macro_rules! field { use generic_array::{typenum::U57, GenericArray}; use crypto_bigint::{Integer, Encoding}; - use ff::{Field, PrimeField, FieldBits, PrimeFieldBits}; + use group::ff::{Field, PrimeField, FieldBits, PrimeFieldBits}; // Needed to publish for some reason? Yet not actually needed #[allow(unused_imports)] diff --git a/crypto/ed448/src/point.rs b/crypto/ed448/src/point.rs index 2cce0a37..25916b95 100644 --- a/crypto/ed448/src/point.rs +++ b/crypto/ed448/src/point.rs @@ -12,8 +12,11 @@ use subtle::{Choice, CtOption, ConstantTimeEq, ConditionallySelectable, Conditio use crypto_bigint::U512; -use ff::{Field, PrimeField, PrimeFieldBits}; -use group::{Group, GroupEncoding, prime::PrimeGroup}; +use group::{ + ff::{Field, PrimeField, PrimeFieldBits}, + Group, GroupEncoding, + prime::PrimeGroup, +}; use crate::{ scalar::{Scalar, MODULUS as SCALAR_MODULUS}, diff --git a/crypto/frost/Cargo.toml b/crypto/frost/Cargo.toml index f03d7a87..2cbfcf89 100644 --- a/crypto/frost/Cargo.toml +++ b/crypto/frost/Cargo.toml @@ -19,24 +19,19 @@ rand_core = "0.6" rand_chacha = "0.3" zeroize = { version = "^1.5", features = ["zeroize_derive"] } -subtle = "2" +subtle = "^2.4" -hex = "0.4" +hex = { version = "0.4", optional = true } digest = "0.10" - -hkdf = "0.12" -chacha20 = { version = "0.9", features = ["zeroize"] } +transcript = { package = "flexible-transcript", path = "../transcript", version = "0.2", features = ["recommended"] } group = "0.12" +ciphersuite = { path = "../ciphersuite", version = "0.1", features = ["std"] } dalek-ff-group = { path = "../dalek-ff-group", version = "^0.1.2", optional = true } minimal-ed448 = { path = "../ed448", version = "^0.1.2", optional = true } -ciphersuite = { path = "../ciphersuite", version = "0.1", features = ["std"] } - -transcript = { package = "flexible-transcript", path = "../transcript", version = "0.2", features = ["recommended"] } - multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] } schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "0.2" } @@ -58,4 +53,4 @@ p256 = ["ciphersuite/p256"] ed448 = ["minimal-ed448", "ciphersuite/ed448"] -tests = ["dkg/tests"] +tests = ["hex", "dkg/tests"] diff --git a/crypto/frost/src/sign.rs b/crypto/frost/src/sign.rs index 089c82c3..a92732eb 100644 --- a/crypto/frost/src/sign.rs +++ b/crypto/frost/src/sign.rs @@ -11,10 +11,7 @@ use zeroize::{Zeroize, Zeroizing}; use transcript::Transcript; -use group::{ - ff::{Field, PrimeField}, - GroupEncoding, -}; +use group::{ff::PrimeField, GroupEncoding}; use multiexp::BatchVerifier; use crate::{ @@ -198,6 +195,8 @@ impl Writable for SignatureShare { #[cfg(any(test, feature = "tests"))] impl SignatureShare { pub(crate) fn invalidate(&mut self) { + use group::ff::Field; + self.0 += C::F::one(); } } From b1ea2dfba6464814be0c5c7f8d60b447e668f635 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 03:10:55 -0500 Subject: [PATCH 51/71] Add support for hashing (as in HashMap) dalek points --- crypto/dalek-ff-group/src/lib.rs | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/crypto/dalek-ff-group/src/lib.rs b/crypto/dalek-ff-group/src/lib.rs index 9ff5f76a..56aaf275 100644 --- a/crypto/dalek-ff-group/src/lib.rs +++ b/crypto/dalek-ff-group/src/lib.rs @@ -2,9 +2,10 @@ #![no_std] use core::{ - ops::{Deref, Add, AddAssign, Sub, SubAssign, Neg, Mul, MulAssign}, borrow::Borrow, + ops::{Deref, Add, AddAssign, Sub, SubAssign, Neg, Mul, MulAssign}, iter::{Iterator, Sum}, + hash::{Hash, Hasher}, }; use zeroize::Zeroize; @@ -413,6 +414,16 @@ macro_rules! dalek_group { $Point(&b.0 * &self.0) } } + + // Support being used as a key in a table + // While it is expensive as a key, due to the field operations required, there's frequently + // use cases for public key -> value lookups + #[allow(clippy::derived_hash_with_manual_eq)] + impl Hash for $Point { + fn hash(&self, state: &mut H) { + self.to_bytes().hash(state); + } + } }; } From 1a99629a4a1bd6c161dd25d50c05a23ee7e3d679 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 03:13:55 -0500 Subject: [PATCH 52/71] Add feature-gated serde support for Participant/ThresholdParams These don't have secret data yet sometimes have value to be communicated. --- Cargo.lock | 1 + crypto/dkg/Cargo.toml | 3 +++ crypto/dkg/src/lib.rs | 2 ++ 3 files changed, 6 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index e244b6dc..252ef1a4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1699,6 +1699,7 @@ dependencies = [ "multiexp", "rand_core 0.6.4", "schnorr-signatures", + "serde", "thiserror", "zeroize", ] diff --git a/crypto/dkg/Cargo.toml b/crypto/dkg/Cargo.toml index 00fa973a..22403f7b 100644 --- a/crypto/dkg/Cargo.toml +++ b/crypto/dkg/Cargo.toml @@ -19,6 +19,8 @@ rand_core = "0.6" zeroize = { version = "^1.5", features = ["zeroize_derive"] } +serde = { version = "1", features = ["derive"], optional = true } + transcript = { package = "flexible-transcript", path = "../transcript", version = "0.2", features = ["recommended"] } chacha20 = { version = "0.9", features = ["zeroize"] } @@ -33,4 +35,5 @@ dleq = { path = "../dleq", version = "0.2", features = ["serialize"] } ciphersuite = { path = "../ciphersuite", version = "0.1", features = ["std", "ristretto"] } [features] +serde = ["dep:serde"] tests = [] diff --git a/crypto/dkg/src/lib.rs b/crypto/dkg/src/lib.rs index 4c63104f..47ffc895 100644 --- a/crypto/dkg/src/lib.rs +++ b/crypto/dkg/src/lib.rs @@ -38,6 +38,7 @@ pub mod tests; /// The ID of a participant, defined as a non-zero u16. #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Zeroize)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Participant(pub(crate) u16); impl Participant { pub fn new(i: u16) -> Option { @@ -123,6 +124,7 @@ pub(crate) fn validate_map( /// Parameters for a multisig. // These fields should not be made public as they should be static #[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct ThresholdParams { /// Participants needed to sign on behalf of the group. t: u16, From 5b26115f81f496f23c7806050965a61d8f3c0a65 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 03:25:16 -0500 Subject: [PATCH 53/71] Add Debug implementations to dkg --- crypto/dkg/src/encryption.rs | 19 +++++++++++--- crypto/dkg/src/frost.rs | 48 +++++++++++++++++++++++++++++------- crypto/dkg/src/lib.rs | 13 ++++++++++ 3 files changed, 67 insertions(+), 13 deletions(-) diff --git a/crypto/dkg/src/encryption.rs b/crypto/dkg/src/encryption.rs index e7047065..cbf61a99 100644 --- a/crypto/dkg/src/encryption.rs +++ b/crypto/dkg/src/encryption.rs @@ -1,6 +1,5 @@ -use core::fmt::Debug; +use core::{ops::Deref, fmt}; use std::{ - ops::Deref, io::{self, Read, Write}, collections::HashMap, }; @@ -39,8 +38,8 @@ pub trait ReadWrite: Sized { } } -pub trait Message: Clone + PartialEq + Eq + Debug + Zeroize + ReadWrite {} -impl Message for M {} +pub trait Message: Clone + PartialEq + Eq + fmt::Debug + Zeroize + ReadWrite {} +impl Message for M {} /// Wraps a message with a key to use for encryption in the future. #[derive(Clone, PartialEq, Eq, Debug, Zeroize)] @@ -347,6 +346,18 @@ pub(crate) struct Encryption { enc_keys: HashMap, } +impl fmt::Debug for Encryption { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt + .debug_struct("Encryption") + .field("context", &self.context) + .field("i", &self.i) + .field("enc_pub_key", &self.enc_pub_key) + .field("enc_keys", &self.enc_keys) + .finish_non_exhaustive() + } +} + impl Zeroize for Encryption { fn zeroize(&mut self) { self.enc_key.zeroize(); diff --git a/crypto/dkg/src/frost.rs b/crypto/dkg/src/frost.rs index 054d7bd9..e2e7c64b 100644 --- a/crypto/dkg/src/frost.rs +++ b/crypto/dkg/src/frost.rs @@ -1,8 +1,4 @@ -use core::{ - marker::PhantomData, - ops::Deref, - fmt::{Debug, Formatter}, -}; +use core::{marker::PhantomData, ops::Deref, fmt}; use std::{ io::{self, Read, Write}, collections::HashMap, @@ -85,6 +81,7 @@ impl ReadWrite for Commitments { } /// State machine to begin the key generation protocol. +#[derive(Debug, Zeroize)] pub struct KeyGenMachine { params: ThresholdParams, context: String, @@ -186,8 +183,8 @@ impl AsMut<[u8]> for SecretShare { self.0.as_mut() } } -impl Debug for SecretShare { - fn fmt(&self, fmt: &mut Formatter<'_>) -> Result<(), core::fmt::Error> { +impl fmt::Debug for SecretShare { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("SecretShare").finish_non_exhaustive() } } @@ -198,7 +195,7 @@ impl Zeroize for SecretShare { } // Still manually implement ZeroizeOnDrop to ensure these don't stick around. // We could replace Zeroizing with a bound M: ZeroizeOnDrop. -// Doing so would potentially fail to highlight thr expected behavior with these and remove a layer +// Doing so would potentially fail to highlight the expected behavior with these and remove a layer // of depth. impl Drop for SecretShare { fn drop(&mut self) { @@ -229,6 +226,18 @@ pub struct SecretShareMachine { encryption: Encryption, } +impl fmt::Debug for SecretShareMachine { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt + .debug_struct("SecretShareMachine") + .field("params", &self.params) + .field("context", &self.context) + .field("our_commitments", &self.our_commitments) + .field("encryption", &self.encryption) + .finish_non_exhaustive() + } +} + impl SecretShareMachine { /// Verify the data from the previous round (canonicity, PoKs, message authenticity) #[allow(clippy::type_complexity)] @@ -322,6 +331,17 @@ pub struct KeyMachine { encryption: Encryption, } +impl fmt::Debug for KeyMachine { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt + .debug_struct("KeyMachine") + .field("params", &self.params) + .field("commitments", &self.commitments) + .field("encryption", &self.encryption) + .finish_non_exhaustive() + } +} + impl Zeroize for KeyMachine { fn zeroize(&mut self) { self.params.zeroize(); @@ -457,6 +477,16 @@ pub struct BlameMachine { result: ThresholdCore, } +impl fmt::Debug for BlameMachine { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt + .debug_struct("BlameMachine") + .field("commitments", &self.commitments) + .field("encryption", &self.encryption) + .finish_non_exhaustive() + } +} + impl Zeroize for BlameMachine { fn zeroize(&mut self) { for (_, commitments) in self.commitments.iter_mut() { @@ -542,7 +572,7 @@ impl BlameMachine { } } -#[derive(Zeroize)] +#[derive(Debug, Zeroize)] pub struct AdditionalBlameMachine(BlameMachine); impl AdditionalBlameMachine { /// Given an accusation of fault, determine the faulty party (either the sender, who sent an diff --git a/crypto/dkg/src/lib.rs b/crypto/dkg/src/lib.rs index 47ffc895..20c3c06a 100644 --- a/crypto/dkg/src/lib.rs +++ b/crypto/dkg/src/lib.rs @@ -344,6 +344,19 @@ pub struct ThresholdView { verification_shares: HashMap, } +impl fmt::Debug for ThresholdView { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt + .debug_struct("ThresholdView") + .field("offset", &self.offset) + .field("group_key", &self.group_key) + .field("included", &self.included) + .field("original_verification_shares", &self.original_verification_shares) + .field("verification_shares", &self.verification_shares) + .finish_non_exhaustive() + } +} + impl Zeroize for ThresholdView { fn zeroize(&mut self) { self.offset.zeroize(); From 5037962d3cbd9e04c1a644a6ebbef3beebd271e2 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 03:37:25 -0500 Subject: [PATCH 54/71] Rename dkg serialize/deserialize to write/read --- crypto/dkg/src/lib.rs | 34 +++++++++++++++++++------------ crypto/dkg/src/tests/mod.rs | 2 +- crypto/frost/src/tests/vectors.rs | 2 +- 3 files changed, 23 insertions(+), 15 deletions(-) diff --git a/crypto/dkg/src/lib.rs b/crypto/dkg/src/lib.rs index 20c3c06a..ca2c0204 100644 --- a/crypto/dkg/src/lib.rs +++ b/crypto/dkg/src/lib.rs @@ -10,7 +10,7 @@ use core::{ fmt::{self, Debug}, ops::Deref, }; -use std::{io::Read, sync::Arc, collections::HashMap}; +use std::{io, sync::Arc, collections::HashMap}; use thiserror::Error; @@ -251,21 +251,29 @@ impl ThresholdCore { self.verification_shares.clone() } - pub fn serialize(&self) -> Vec { - let mut serialized = vec![]; - serialized.extend(u32::try_from(C::ID.len()).unwrap().to_le_bytes()); - serialized.extend(C::ID); - serialized.extend(self.params.t.to_le_bytes()); - serialized.extend(self.params.n.to_le_bytes()); - serialized.extend(self.params.i.to_bytes()); - serialized.extend(self.secret_share.to_repr().as_ref()); - for l in (1 ..= self.params.n).map(Participant) { - serialized.extend(self.verification_shares[&l].to_bytes().as_ref()); + pub fn write(&self, writer: &mut W) -> io::Result<()> { + writer.write_all(&u32::try_from(C::ID.len()).unwrap().to_le_bytes())?; + writer.write_all(C::ID)?; + writer.write_all(&self.params.t.to_le_bytes())?; + writer.write_all(&self.params.n.to_le_bytes())?; + writer.write_all(&self.params.i.to_bytes())?; + let mut share_bytes = self.secret_share.to_repr(); + writer.write_all(share_bytes.as_ref())?; + share_bytes.as_mut().zeroize(); + for l in 1 ..= self.params.n { + writer + .write_all(self.verification_shares[&Participant::new(l).unwrap()].to_bytes().as_ref())?; } + Ok(()) + } + + pub fn serialize(&self) -> Zeroizing> { + let mut serialized = Zeroizing::new(vec![]); + self.write::>(serialized.as_mut()).unwrap(); serialized } - pub fn deserialize(reader: &mut R) -> Result, DkgError<()>> { + pub fn read(reader: &mut R) -> Result, DkgError<()>> { { let missing = DkgError::InternalError("ThresholdCore serialization is missing its curve"); let different = DkgError::InternalError("deserializing ThresholdCore for another curve"); @@ -413,7 +421,7 @@ impl ThresholdKeys { self.core.verification_shares() } - pub fn serialize(&self) -> Vec { + pub fn serialize(&self) -> Zeroizing> { self.core.serialize() } diff --git a/crypto/dkg/src/tests/mod.rs b/crypto/dkg/src/tests/mod.rs index 81e8a9ab..db14f250 100644 --- a/crypto/dkg/src/tests/mod.rs +++ b/crypto/dkg/src/tests/mod.rs @@ -53,7 +53,7 @@ pub fn key_gen( .drain() .map(|(i, core)| { assert_eq!( - &ThresholdCore::::deserialize::<&[u8]>(&mut core.serialize().as_ref()).unwrap(), + &ThresholdCore::::read::<&[u8]>(&mut core.serialize().as_ref()).unwrap(), &core ); (i, ThresholdKeys::new(core)) diff --git a/crypto/frost/src/tests/vectors.rs b/crypto/frost/src/tests/vectors.rs index c7aaf0ac..14c303af 100644 --- a/crypto/frost/src/tests/vectors.rs +++ b/crypto/frost/src/tests/vectors.rs @@ -128,7 +128,7 @@ fn vectors_to_multisig_keys(vectors: &Vectors) -> HashMap::deserialize::<&[u8]>(&mut serialized.as_ref()).unwrap(); + let these_keys = ThresholdCore::::read::<&[u8]>(&mut serialized.as_ref()).unwrap(); assert_eq!(these_keys.params().t(), vectors.threshold); assert_eq!(usize::from(these_keys.params().n()), shares.len()); let participant = Participant::new(i).unwrap(); From af5702fccd6a846fc8458e917c216c55bce3dd80 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 03:37:30 -0500 Subject: [PATCH 55/71] Make encryption public It's necessary in order to read encryption messages over the network. --- crypto/dkg/src/lib.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crypto/dkg/src/lib.rs b/crypto/dkg/src/lib.rs index ca2c0204..e2e2c70d 100644 --- a/crypto/dkg/src/lib.rs +++ b/crypto/dkg/src/lib.rs @@ -23,7 +23,8 @@ use group::{ use ciphersuite::Ciphersuite; -mod encryption; +/// Encryption types and utilities used to secure DKG messages. +pub mod encryption; /// The distributed key generation protocol described in the /// [FROST paper](https://eprint.iacr.org/2020/852). From e08adcc1ac66888658909935f36df8d5ac42c467 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 03:46:16 -0500 Subject: [PATCH 56/71] Have Ciphersuite re-export Group --- Cargo.lock | 3 --- crypto/ciphersuite/src/lib.rs | 1 + crypto/dkg/Cargo.toml | 3 +-- crypto/dkg/src/encryption.rs | 9 ++++----- crypto/dkg/src/frost.rs | 10 ++++++---- crypto/dkg/src/lib.rs | 11 ++++++----- crypto/dkg/src/promote.rs | 4 +--- crypto/dkg/src/tests/mod.rs | 4 +--- crypto/dkg/src/tests/promote.rs | 4 +--- crypto/frost/Cargo.toml | 5 ++--- crypto/frost/src/curve/ed448.rs | 5 +---- crypto/frost/src/curve/kp256.rs | 4 +--- crypto/frost/src/curve/mod.rs | 11 ++++++----- crypto/frost/src/nonce.rs | 2 +- crypto/frost/src/sign.rs | 4 ++-- crypto/frost/src/tests/nonces.rs | 2 +- crypto/frost/src/tests/vectors.rs | 2 +- crypto/schnorr/Cargo.toml | 3 +-- crypto/schnorr/src/aggregate.rs | 10 ++++++---- crypto/schnorr/src/lib.rs | 12 ++++++------ crypto/schnorr/src/tests/mod.rs | 7 ++++--- crypto/schnorr/src/tests/rfc8032.rs | 3 +-- 22 files changed, 54 insertions(+), 65 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 252ef1a4..e2a311f3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1695,7 +1695,6 @@ dependencies = [ "ciphersuite", "dleq", "flexible-transcript", - "group", "multiexp", "rand_core 0.6.4", "schnorr-signatures", @@ -4675,7 +4674,6 @@ dependencies = [ "dkg", "dleq", "flexible-transcript", - "group", "hex", "minimal-ed448", "multiexp", @@ -7521,7 +7519,6 @@ dependencies = [ "ciphersuite", "dalek-ff-group", "flexible-transcript", - "group", "hex", "multiexp", "rand_core 0.6.4", diff --git a/crypto/ciphersuite/src/lib.rs b/crypto/ciphersuite/src/lib.rs index fde5306c..11533a73 100644 --- a/crypto/ciphersuite/src/lib.rs +++ b/crypto/ciphersuite/src/lib.rs @@ -14,6 +14,7 @@ use subtle::ConstantTimeEq; use digest::{core_api::BlockSizeUser, Digest, HashMarker}; use transcript::SecureDigest; +pub use group; use group::{ ff::{Field, PrimeField, PrimeFieldBits}, Group, GroupOps, diff --git a/crypto/dkg/Cargo.toml b/crypto/dkg/Cargo.toml index 22403f7b..1822cb4c 100644 --- a/crypto/dkg/Cargo.toml +++ b/crypto/dkg/Cargo.toml @@ -24,9 +24,8 @@ serde = { version = "1", features = ["derive"], optional = true } transcript = { package = "flexible-transcript", path = "../transcript", version = "0.2", features = ["recommended"] } chacha20 = { version = "0.9", features = ["zeroize"] } -group = "0.12" -multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] } ciphersuite = { path = "../ciphersuite", version = "0.1", features = ["std"] } +multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] } schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "0.2" } dleq = { path = "../dleq", version = "0.2", features = ["serialize"] } diff --git a/crypto/dkg/src/encryption.rs b/crypto/dkg/src/encryption.rs index cbf61a99..c37221ec 100644 --- a/crypto/dkg/src/encryption.rs +++ b/crypto/dkg/src/encryption.rs @@ -17,9 +17,8 @@ use chacha20::{ use transcript::{Transcript, RecommendedTranscript}; #[cfg(test)] -use group::ff::Field; -use group::GroupEncoding; -use ciphersuite::Ciphersuite; +use ciphersuite::group::ff::Field; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; use multiexp::BatchVerifier; use schnorr::SchnorrSignature; @@ -222,7 +221,7 @@ impl EncryptedMessage { from: Participant, to: C::G, ) { - use group::ff::PrimeField; + use ciphersuite::group::ff::PrimeField; let mut repr = ::Repr::default(); for b in repr.as_mut().iter_mut() { @@ -246,7 +245,7 @@ impl EncryptedMessage { from: Participant, to: C::G, ) { - use group::ff::PrimeField; + use ciphersuite::group::ff::PrimeField; // Assumes the share isn't randomly 1 let repr = C::F::one().to_repr(); diff --git a/crypto/dkg/src/frost.rs b/crypto/dkg/src/frost.rs index e2e7c64b..f137e04b 100644 --- a/crypto/dkg/src/frost.rs +++ b/crypto/dkg/src/frost.rs @@ -10,11 +10,13 @@ use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing}; use transcript::{Transcript, RecommendedTranscript}; -use group::{ - ff::{Field, PrimeField}, - Group, GroupEncoding, +use ciphersuite::{ + group::{ + ff::{Field, PrimeField}, + Group, GroupEncoding, + }, + Ciphersuite, }; -use ciphersuite::Ciphersuite; use multiexp::{multiexp_vartime, BatchVerifier}; use schnorr::SchnorrSignature; diff --git a/crypto/dkg/src/lib.rs b/crypto/dkg/src/lib.rs index e2e2c70d..81e58b1b 100644 --- a/crypto/dkg/src/lib.rs +++ b/crypto/dkg/src/lib.rs @@ -16,13 +16,14 @@ use thiserror::Error; use zeroize::{Zeroize, Zeroizing}; -use group::{ - ff::{Field, PrimeField}, - GroupEncoding, +use ciphersuite::{ + group::{ + ff::{Field, PrimeField}, + GroupEncoding, + }, + Ciphersuite, }; -use ciphersuite::Ciphersuite; - /// Encryption types and utilities used to secure DKG messages. pub mod encryption; diff --git a/crypto/dkg/src/promote.rs b/crypto/dkg/src/promote.rs index 9399224d..7edb85d5 100644 --- a/crypto/dkg/src/promote.rs +++ b/crypto/dkg/src/promote.rs @@ -7,9 +7,7 @@ use std::{ use rand_core::{RngCore, CryptoRng}; -use group::GroupEncoding; - -use ciphersuite::Ciphersuite; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; use transcript::{Transcript, RecommendedTranscript}; use dleq::DLEqProof; diff --git a/crypto/dkg/src/tests/mod.rs b/crypto/dkg/src/tests/mod.rs index db14f250..5617121f 100644 --- a/crypto/dkg/src/tests/mod.rs +++ b/crypto/dkg/src/tests/mod.rs @@ -3,9 +3,7 @@ use std::collections::HashMap; use rand_core::{RngCore, CryptoRng}; -use group::ff::Field; - -use ciphersuite::Ciphersuite; +use ciphersuite::{group::ff::Field, Ciphersuite}; use crate::{Participant, ThresholdCore, ThresholdKeys, lagrange}; diff --git a/crypto/dkg/src/tests/promote.rs b/crypto/dkg/src/tests/promote.rs index e9fefc07..99c00433 100644 --- a/crypto/dkg/src/tests/promote.rs +++ b/crypto/dkg/src/tests/promote.rs @@ -5,9 +5,7 @@ use rand_core::{RngCore, CryptoRng}; use zeroize::Zeroize; -use group::Group; - -use ciphersuite::Ciphersuite; +use ciphersuite::{group::Group, Ciphersuite}; use crate::{ promote::{GeneratorPromotion, GeneratorProof}, diff --git a/crypto/frost/Cargo.toml b/crypto/frost/Cargo.toml index 2cbfcf89..637c1498 100644 --- a/crypto/frost/Cargo.toml +++ b/crypto/frost/Cargo.toml @@ -26,12 +26,11 @@ hex = { version = "0.4", optional = true } digest = "0.10" transcript = { package = "flexible-transcript", path = "../transcript", version = "0.2", features = ["recommended"] } -group = "0.12" -ciphersuite = { path = "../ciphersuite", version = "0.1", features = ["std"] } - dalek-ff-group = { path = "../dalek-ff-group", version = "^0.1.2", optional = true } minimal-ed448 = { path = "../ed448", version = "^0.1.2", optional = true } +ciphersuite = { path = "../ciphersuite", version = "0.1", features = ["std"] } + multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] } schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "0.2" } diff --git a/crypto/frost/src/curve/ed448.rs b/crypto/frost/src/curve/ed448.rs index 05877dbe..5b24dad0 100644 --- a/crypto/frost/src/curve/ed448.rs +++ b/crypto/frost/src/curve/ed448.rs @@ -1,10 +1,7 @@ use digest::Digest; -use group::GroupEncoding; - use minimal_ed448::{Scalar, Point}; - -pub use ciphersuite::{Shake256_114, Ed448}; +pub use ciphersuite::{group::GroupEncoding, Shake256_114, Ed448}; use crate::{curve::Curve, algorithm::Hram}; diff --git a/crypto/frost/src/curve/kp256.rs b/crypto/frost/src/curve/kp256.rs index 653d5c18..582d9d1d 100644 --- a/crypto/frost/src/curve/kp256.rs +++ b/crypto/frost/src/curve/kp256.rs @@ -1,6 +1,4 @@ -use group::GroupEncoding; - -use ciphersuite::Ciphersuite; +use ciphersuite::{group::GroupEncoding, Ciphersuite}; use crate::{curve::Curve, algorithm::Hram}; diff --git a/crypto/frost/src/curve/mod.rs b/crypto/frost/src/curve/mod.rs index f4e705d2..a941bbe7 100644 --- a/crypto/frost/src/curve/mod.rs +++ b/crypto/frost/src/curve/mod.rs @@ -8,13 +8,14 @@ use subtle::ConstantTimeEq; use digest::{Digest, Output}; -use group::{ - ff::{Field, PrimeField}, - Group, +pub use ciphersuite::{ + group::{ + ff::{Field, PrimeField}, + Group, + }, + Ciphersuite, }; -pub use ciphersuite::Ciphersuite; - #[cfg(any(feature = "ristretto", feature = "ed25519"))] mod dalek; #[cfg(feature = "ristretto")] diff --git a/crypto/frost/src/nonce.rs b/crypto/frost/src/nonce.rs index 2268c755..6b156212 100644 --- a/crypto/frost/src/nonce.rs +++ b/crypto/frost/src/nonce.rs @@ -21,7 +21,7 @@ use zeroize::{Zeroize, Zeroizing}; use transcript::Transcript; -use group::{ff::PrimeField, Group, GroupEncoding}; +use ciphersuite::group::{ff::PrimeField, Group, GroupEncoding}; use multiexp::multiexp_vartime; use dleq::MultiDLEqProof; diff --git a/crypto/frost/src/sign.rs b/crypto/frost/src/sign.rs index a92732eb..52507e7f 100644 --- a/crypto/frost/src/sign.rs +++ b/crypto/frost/src/sign.rs @@ -11,7 +11,7 @@ use zeroize::{Zeroize, Zeroizing}; use transcript::Transcript; -use group::{ff::PrimeField, GroupEncoding}; +use ciphersuite::group::{ff::PrimeField, GroupEncoding}; use multiexp::BatchVerifier; use crate::{ @@ -195,7 +195,7 @@ impl Writable for SignatureShare { #[cfg(any(test, feature = "tests"))] impl SignatureShare { pub(crate) fn invalidate(&mut self) { - use group::ff::Field; + use ciphersuite::group::ff::Field; self.0 += C::F::one(); } diff --git a/crypto/frost/src/tests/nonces.rs b/crypto/frost/src/tests/nonces.rs index 134d4a28..a4fdd19e 100644 --- a/crypto/frost/src/tests/nonces.rs +++ b/crypto/frost/src/tests/nonces.rs @@ -7,7 +7,7 @@ use rand_chacha::ChaCha20Rng; use transcript::{Transcript, RecommendedTranscript}; -use group::{ff::Field, Group, GroupEncoding}; +use ciphersuite::group::{ff::Field, Group, GroupEncoding}; use dleq::MultiDLEqProof; pub use dkg::tests::{key_gen, recover_key}; diff --git a/crypto/frost/src/tests/vectors.rs b/crypto/frost/src/tests/vectors.rs index 14c303af..e92474b2 100644 --- a/crypto/frost/src/tests/vectors.rs +++ b/crypto/frost/src/tests/vectors.rs @@ -9,7 +9,7 @@ use zeroize::Zeroizing; use rand_core::{RngCore, CryptoRng, SeedableRng}; use rand_chacha::ChaCha20Rng; -use group::{ff::PrimeField, GroupEncoding}; +use ciphersuite::group::{ff::PrimeField, GroupEncoding}; use crate::{ curve::Curve, diff --git a/crypto/schnorr/Cargo.toml b/crypto/schnorr/Cargo.toml index f6a2ea89..e6d9e1c6 100644 --- a/crypto/schnorr/Cargo.toml +++ b/crypto/schnorr/Cargo.toml @@ -19,9 +19,8 @@ zeroize = { version = "^1.5", features = ["zeroize_derive"] } transcript = { package = "flexible-transcript", path = "../transcript", version = "0.2" } -group = "0.12" -multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] } ciphersuite = { path = "../ciphersuite", version = "0.1" } +multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] } [dev-dependencies] hex = "0.4" diff --git a/crypto/schnorr/src/aggregate.rs b/crypto/schnorr/src/aggregate.rs index bcd36910..c45abff9 100644 --- a/crypto/schnorr/src/aggregate.rs +++ b/crypto/schnorr/src/aggregate.rs @@ -4,12 +4,14 @@ use zeroize::Zeroize; use transcript::{Transcript, SecureDigest, DigestTranscript}; -use group::{ - ff::{Field, PrimeField}, - Group, GroupEncoding, +use ciphersuite::{ + group::{ + ff::{Field, PrimeField}, + Group, GroupEncoding, + }, + Ciphersuite, }; use multiexp::multiexp_vartime; -use ciphersuite::Ciphersuite; use crate::SchnorrSignature; diff --git a/crypto/schnorr/src/lib.rs b/crypto/schnorr/src/lib.rs index 972d3864..f53fb26c 100644 --- a/crypto/schnorr/src/lib.rs +++ b/crypto/schnorr/src/lib.rs @@ -5,15 +5,15 @@ use rand_core::{RngCore, CryptoRng}; use zeroize::{Zeroize, Zeroizing}; -use group::{ - ff::{Field, PrimeField}, - Group, GroupEncoding, +use ciphersuite::{ + group::{ + ff::{Field, PrimeField}, + Group, GroupEncoding, + }, + Ciphersuite, }; - use multiexp::{multiexp_vartime, BatchVerifier}; -use ciphersuite::Ciphersuite; - pub mod aggregate; #[cfg(test)] diff --git a/crypto/schnorr/src/tests/mod.rs b/crypto/schnorr/src/tests/mod.rs index 1b4fe212..fc434b97 100644 --- a/crypto/schnorr/src/tests/mod.rs +++ b/crypto/schnorr/src/tests/mod.rs @@ -3,11 +3,12 @@ use core::ops::Deref; use zeroize::Zeroizing; use rand_core::OsRng; -use group::{ff::Field, Group}; +use ciphersuite::{ + group::{ff::Field, Group}, + Ciphersuite, Ed25519, +}; use multiexp::BatchVerifier; -use ciphersuite::{Ciphersuite, Ed25519}; - use crate::{ SchnorrSignature, aggregate::{SchnorrAggregator, SchnorrAggregate}, diff --git a/crypto/schnorr/src/tests/rfc8032.rs b/crypto/schnorr/src/tests/rfc8032.rs index 001927ac..991cf450 100644 --- a/crypto/schnorr/src/tests/rfc8032.rs +++ b/crypto/schnorr/src/tests/rfc8032.rs @@ -5,9 +5,8 @@ use sha2::{Digest, Sha512}; -use group::GroupEncoding; use dalek_ff_group::Scalar; -use ciphersuite::{Ciphersuite, Ed25519}; +use ciphersuite::{group::GroupEncoding, Ciphersuite, Ed25519}; use crate::SchnorrSignature; From 7d12c785b7e5e3554cf059fac581a5ad05e4a342 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 03:46:55 -0500 Subject: [PATCH 57/71] Correct error comment in ff-group-tests --- crypto/ff-group-tests/src/group.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crypto/ff-group-tests/src/group.rs b/crypto/ff-group-tests/src/group.rs index 59b51376..e0fddb69 100644 --- a/crypto/ff-group-tests/src/group.rs +++ b/crypto/ff-group-tests/src/group.rs @@ -11,7 +11,7 @@ use crate::prime_field::{test_prime_field, test_prime_field_bits}; pub fn test_eq() { assert_eq!(G::identity(), G::identity(), "identity != identity"); assert_eq!(G::generator(), G::generator(), "generator != generator"); - assert!(G::identity() != G::generator(), "identity != generator"); + assert!(G::identity() != G::generator(), "identity == generator"); } /// Test identity. From 5a9a42f0252587d2c57a8fce270351d89add1c50 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 03:48:16 -0500 Subject: [PATCH 58/71] Use variable time for verifying PoKs in the DKG --- crypto/dkg/src/frost.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crypto/dkg/src/frost.rs b/crypto/dkg/src/frost.rs index f137e04b..2c4d74d4 100644 --- a/crypto/dkg/src/frost.rs +++ b/crypto/dkg/src/frost.rs @@ -274,7 +274,7 @@ impl SecretShareMachine { }) .collect::>(); - batch.verify_with_vartime_blame().map_err(FrostError::InvalidProofOfKnowledge)?; + batch.verify_vartime_with_vartime_blame().map_err(FrostError::InvalidProofOfKnowledge)?; commitments.insert(self.params.i, self.our_commitments.drain(..).collect()); Ok(commitments) From 2bad06e5d99634dbade981dac23f5bbe76492434 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 03:55:58 -0500 Subject: [PATCH 59/71] Fix #200 --- crypto/transcript/src/lib.rs | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/crypto/transcript/src/lib.rs b/crypto/transcript/src/lib.rs index 2f4ef615..6cd6485c 100644 --- a/crypto/transcript/src/lib.rs +++ b/crypto/transcript/src/lib.rs @@ -50,6 +50,8 @@ enum DigestTranscriptMember { Label, Value, Challenge, + Continued, + Challenged, } impl DigestTranscriptMember { @@ -60,6 +62,8 @@ impl DigestTranscriptMember { DigestTranscriptMember::Label => 2, DigestTranscriptMember::Value => 3, DigestTranscriptMember::Challenge => 4, + DigestTranscriptMember::Continued => 5, + DigestTranscriptMember::Challenged => 6, } } } @@ -110,7 +114,13 @@ impl Transcript for DigestTranscript { fn challenge(&mut self, label: &'static [u8]) -> Self::Challenge { self.append(DigestTranscriptMember::Challenge, label); - self.0.clone().finalize() + let mut cloned = self.0.clone(); + + // Explicitly fork these transcripts to prevent length extension attacks from being possible + // (at least, without the additional ability to remove a byte from a finalized hash) + self.0.update([DigestTranscriptMember::Continued.as_u8()]); + cloned.update([DigestTranscriptMember::Challenged.as_u8()]); + cloned.finalize() } fn rng_seed(&mut self, label: &'static [u8]) -> [u8; 32] { From ed056cceaf95fd4f2ce5cf3afb893e95c95feb06 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 04:05:56 -0500 Subject: [PATCH 60/71] 3.5.2 Test non-canonical from_repr Unfortunately, G::from_bytes doesn't require canonicity so that still can't be properly tested for. While we could try to detect SEC1, and write tests on that, there's not a suitably stable/wide enough solution to be worth it. --- crypto/ff-group-tests/src/prime_field.rs | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/crypto/ff-group-tests/src/prime_field.rs b/crypto/ff-group-tests/src/prime_field.rs index 46e8486b..0f639372 100644 --- a/crypto/ff-group-tests/src/prime_field.rs +++ b/crypto/ff-group-tests/src/prime_field.rs @@ -70,6 +70,24 @@ pub fn test_encoding() { test(F::one(), "1"); test(F::one() + F::one(), "2"); test(-F::one(), "-1"); + + // Also check if a non-canonical encoding is possible + let mut high = (F::zero() - F::one()).to_repr(); + let mut possible_non_canon = false; + for byte in high.as_mut() { + // The fact a bit isn't set in the highest possible value suggests there's unused bits + // If there's unused bits, mark the possibility of a non-canonical encoding and set the bits + if *byte != 255 { + possible_non_canon = true; + *byte = 255; + break; + } + } + + // Any non-canonical encoding should fail to be read + if possible_non_canon { + assert!(!bool::from(F::from_repr(high).is_some())); + } } /// Run all tests on fields implementing PrimeField. From c358090f1681c06ac8a015f19b299fad7a162ea2 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 04:22:57 -0500 Subject: [PATCH 61/71] Use black_box to help obscure the dalek-ff-group bool -> Choice conversion I have no idea if this will actually help, yet it can't hurt. Feature gated due to MSRV requirements. Fixes #242. --- crypto/dalek-ff-group/Cargo.toml | 3 +++ crypto/dalek-ff-group/src/lib.rs | 9 ++++++++- processor/Cargo.toml | 2 +- 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/crypto/dalek-ff-group/Cargo.toml b/crypto/dalek-ff-group/Cargo.toml index bea86d89..3403282b 100644 --- a/crypto/dalek-ff-group/Cargo.toml +++ b/crypto/dalek-ff-group/Cargo.toml @@ -29,3 +29,6 @@ curve25519-dalek = "^3.2" [dev-dependencies] ff-group-tests = { path = "../ff-group-tests" } + +[features] +black_box = [] diff --git a/crypto/dalek-ff-group/src/lib.rs b/crypto/dalek-ff-group/src/lib.rs index 56aaf275..4b5d5fc3 100644 --- a/crypto/dalek-ff-group/src/lib.rs +++ b/crypto/dalek-ff-group/src/lib.rs @@ -43,7 +43,14 @@ pub mod field; // Convert a boolean to a Choice in a *presumably* constant time manner fn choice(value: bool) -> Choice { - Choice::from(u8::from(value)) + #[cfg(not(feature = "black_box"))] + let res = Choice::from(u8::from(value)); + #[cfg(feature = "black_box")] + let res = { + use core::hint::black_box; + Choice::from(black_box(u8::from(black_box(value)))) + }; + res } macro_rules! deref_borrow { diff --git a/processor/Cargo.toml b/processor/Cargo.toml index f86fb7a6..127d5bd6 100644 --- a/processor/Cargo.toml +++ b/processor/Cargo.toml @@ -24,7 +24,7 @@ group = "0.12" curve25519-dalek = { version = "3", features = ["std"] } transcript = { package = "flexible-transcript", path = "../crypto/transcript", features = ["recommended"] } -dalek-ff-group = { path = "../crypto/dalek-ff-group" } +dalek-ff-group = { path = "../crypto/dalek-ff-group", features = ["black_box"] } frost = { package = "modular-frost", path = "../crypto/frost", features = ["ed25519"] } monero-serai = { path = "../coins/monero", features = ["multisig"] } From 0b8a4ab3d0dea0a79251c5b01813808282483127 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 04:26:19 -0500 Subject: [PATCH 62/71] Use a backwards compatible clippy lint for impl Hash --- crypto/dalek-ff-group/src/lib.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crypto/dalek-ff-group/src/lib.rs b/crypto/dalek-ff-group/src/lib.rs index 4b5d5fc3..22492e79 100644 --- a/crypto/dalek-ff-group/src/lib.rs +++ b/crypto/dalek-ff-group/src/lib.rs @@ -425,7 +425,9 @@ macro_rules! dalek_group { // Support being used as a key in a table // While it is expensive as a key, due to the field operations required, there's frequently // use cases for public key -> value lookups + #[allow(unknown_lints)] #[allow(clippy::derived_hash_with_manual_eq)] + #[allow(clippy::derived_hash_xor_eq)] impl Hash for $Point { fn hash(&self, state: &mut H) { self.to_bytes().hash(state); From 2e78d617520271ea845c589e9d2cb167fcd547d5 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 04:39:15 -0500 Subject: [PATCH 63/71] Fix last commit --- crypto/dalek-ff-group/src/lib.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crypto/dalek-ff-group/src/lib.rs b/crypto/dalek-ff-group/src/lib.rs index 22492e79..ebc4824b 100644 --- a/crypto/dalek-ff-group/src/lib.rs +++ b/crypto/dalek-ff-group/src/lib.rs @@ -426,8 +426,7 @@ macro_rules! dalek_group { // While it is expensive as a key, due to the field operations required, there's frequently // use cases for public key -> value lookups #[allow(unknown_lints)] - #[allow(clippy::derived_hash_with_manual_eq)] - #[allow(clippy::derived_hash_xor_eq)] + #[allow(clippy::derived_hash_with_manual_eq, clippy::derived_hash_xor_eq)] impl Hash for $Point { fn hash(&self, state: &mut H) { self.to_bytes().hash(state); From b0730e3fdf4f7c1f2e5c47a45771a19f04261ca4 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 04:47:06 -0500 Subject: [PATCH 64/71] Fix last commit again --- crypto/dalek-ff-group/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crypto/dalek-ff-group/src/lib.rs b/crypto/dalek-ff-group/src/lib.rs index ebc4824b..559a42da 100644 --- a/crypto/dalek-ff-group/src/lib.rs +++ b/crypto/dalek-ff-group/src/lib.rs @@ -426,7 +426,7 @@ macro_rules! dalek_group { // While it is expensive as a key, due to the field operations required, there's frequently // use cases for public key -> value lookups #[allow(unknown_lints)] - #[allow(clippy::derived_hash_with_manual_eq, clippy::derived_hash_xor_eq)] + #[allow(clippy::derived_hash_with_manual_eq, clippy::derive_hash_xor_eq)] impl Hash for $Point { fn hash(&self, state: &mut H) { self.to_bytes().hash(state); From 6bff3866ea3b9720418eb7059d7db42e708708b4 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 05:10:14 -0500 Subject: [PATCH 65/71] Correct Ethereum --- Cargo.lock | 1 - coins/ethereum/Cargo.toml | 1 - coins/ethereum/tests/contract.rs | 3 ++- coins/ethereum/tests/crypto.rs | 11 +++++++---- 4 files changed, 9 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e2a311f3..b6c86581 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2007,7 +2007,6 @@ dependencies = [ "ethers-solc", "eyre", "group", - "hex-literal", "k256 0.12.0", "modular-frost", "rand_core 0.6.4", diff --git a/coins/ethereum/Cargo.toml b/coins/ethereum/Cargo.toml index 4e837f0e..262be7ae 100644 --- a/coins/ethereum/Cargo.toml +++ b/coins/ethereum/Cargo.toml @@ -13,7 +13,6 @@ all-features = true rustdoc-args = ["--cfg", "docsrs"] [dependencies] -hex-literal = "0.3" thiserror = "1" rand_core = "0.6" diff --git a/coins/ethereum/tests/contract.rs b/coins/ethereum/tests/contract.rs index 70f3320a..06b18d65 100644 --- a/coins/ethereum/tests/contract.rs +++ b/coins/ethereum/tests/contract.rs @@ -11,6 +11,7 @@ use ethers::{ use frost::{ curve::Secp256k1, + Participant, algorithm::Schnorr as Algo, tests::{key_gen, algorithm_machines, sign}, }; @@ -44,7 +45,7 @@ async fn test_ecrecover_hack() { let chain_id = U256::from(chain_id); let keys = key_gen::<_, Secp256k1>(&mut OsRng); - let group_key = keys[&1].group_key(); + let group_key = keys[&Participant::new(1).unwrap()].group_key(); const MESSAGE: &[u8] = b"Hello, World!"; let hashed_message = keccak256(MESSAGE); diff --git a/coins/ethereum/tests/crypto.rs b/coins/ethereum/tests/crypto.rs index 82f61e01..01413ece 100644 --- a/coins/ethereum/tests/crypto.rs +++ b/coins/ethereum/tests/crypto.rs @@ -2,7 +2,7 @@ use k256::{ elliptic_curve::{bigint::ArrayEncoding, ops::Reduce, sec1::ToEncodedPoint}, ProjectivePoint, Scalar, U256, }; -use frost::curve::Secp256k1; +use frost::{curve::Secp256k1, Participant}; use ethereum_serai::crypto::*; @@ -21,7 +21,10 @@ fn test_ecrecover() { .as_nonzero_scalar() .try_sign_prehashed_rfc6979::(Keccak256::digest(MESSAGE), b"") .unwrap(); - assert_eq!(public.verify_digest(Keccak256::new_with_prefix(MESSAGE), &sig).unwrap(), ()); + #[allow(clippy::unit_cmp)] // Intended to assert this wasn't changed to Result + { + assert_eq!(public.verify_digest(Keccak256::new_with_prefix(MESSAGE), &sig).unwrap(), ()); + } assert_eq!( ecrecover(hash_to_scalar(MESSAGE), recovery_id.unwrap().is_y_odd().into(), *sig.r(), *sig.s()) @@ -39,7 +42,7 @@ fn test_signing() { use rand_core::OsRng; let keys = key_gen::<_, Secp256k1>(&mut OsRng); - let _group_key = keys[&1].group_key(); + let _group_key = keys[&Participant::new(1).unwrap()].group_key(); const MESSAGE: &[u8] = b"Hello, World!"; @@ -62,7 +65,7 @@ fn test_ecrecover_hack() { use rand_core::OsRng; let keys = key_gen::<_, Secp256k1>(&mut OsRng); - let group_key = keys[&1].group_key(); + let group_key = keys[&Participant::new(1).unwrap()].group_key(); let group_key_encoded = group_key.to_encoded_point(true); let group_key_compressed = group_key_encoded.as_ref(); let group_key_x = Scalar::from_uint_reduced(U256::from_be_slice(&group_key_compressed[1 .. 33])); From 837c77629740a2af4c266cac94ae321f10e93965 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 05:30:21 -0500 Subject: [PATCH 66/71] Make Schnorr modular to its transcript --- crypto/ciphersuite/src/lib.rs | 2 +- crypto/frost/src/algorithm.rs | 36 +++++++++++++++++++++---------- crypto/frost/src/tests/mod.rs | 12 +++++------ crypto/frost/src/tests/vectors.rs | 8 ++++--- crypto/schnorr/src/aggregate.rs | 2 +- crypto/transcript/src/lib.rs | 10 ++++----- 6 files changed, 43 insertions(+), 27 deletions(-) diff --git a/crypto/ciphersuite/src/lib.rs b/crypto/ciphersuite/src/lib.rs index 11533a73..c4bbcf55 100644 --- a/crypto/ciphersuite/src/lib.rs +++ b/crypto/ciphersuite/src/lib.rs @@ -53,7 +53,7 @@ pub trait Ciphersuite: type G: Group + GroupOps + PrimeGroup + Zeroize + ConstantTimeEq; /// Hash algorithm used with this curve. // Requires BlockSizeUser so it can be used within Hkdf which requies that. - type H: Clone + BlockSizeUser + Digest + HashMarker + SecureDigest; + type H: Send + Clone + BlockSizeUser + Digest + HashMarker + SecureDigest; /// ID for this curve. const ID: &'static [u8]; diff --git a/crypto/frost/src/algorithm.rs b/crypto/frost/src/algorithm.rs index 43a11a3d..a8222168 100644 --- a/crypto/frost/src/algorithm.rs +++ b/crypto/frost/src/algorithm.rs @@ -127,28 +127,42 @@ pub trait Hram: Send + Clone { fn hram(R: &C::G, A: &C::G, m: &[u8]) -> C::F; } -/// IETF-compliant Schnorr signature algorithm ((R, s) where s = r + cx). +/// Schnorr signature algorithm ((R, s) where s = r + cx). #[derive(Clone)] -pub struct Schnorr> { - transcript: IetfTranscript, +pub struct Schnorr> { + transcript: T, c: Option, _hram: PhantomData, } -impl> Default for Schnorr { - fn default() -> Self { - Self::new() +/// IETF-compliant Schnorr signature algorithm. +/// +/// This algorithm specifically uses the transcript format defined in the FROST IETF draft. +/// It's a naive transcript format not viable for usage in larger protocols, yet is presented here +/// in order to provide compatibility. +/// +/// Usage of this with key offsets will break the intended compatibility as the IETF draft does not +/// specify a protocol for offsets. +pub type IetfSchnorr = Schnorr; + +impl> Schnorr { + /// Construct a Schnorr algorithm continuing the specified transcript. + pub fn new(transcript: T) -> Schnorr { + Schnorr { transcript, c: None, _hram: PhantomData } } } -impl> Schnorr { - pub fn new() -> Schnorr { - Schnorr { transcript: IetfTranscript(vec![]), c: None, _hram: PhantomData } +impl> IetfSchnorr { + /// Construct a IETF-compatible Schnorr algorithm. + /// + /// Please see the IetfSchnorr documentation for the full details of this. + pub fn ietf() -> IetfSchnorr { + Schnorr::new(IetfTranscript(vec![])) } } -impl> Algorithm for Schnorr { - type Transcript = IetfTranscript; +impl> Algorithm for Schnorr { + type Transcript = T; type Addendum = (); type Signature = SchnorrSignature; diff --git a/crypto/frost/src/tests/mod.rs b/crypto/frost/src/tests/mod.rs index a0d2e8fe..1297cdd9 100644 --- a/crypto/frost/src/tests/mod.rs +++ b/crypto/frost/src/tests/mod.rs @@ -6,7 +6,7 @@ pub use dkg::tests::{key_gen, recover_key}; use crate::{ Curve, Participant, ThresholdKeys, FrostError, - algorithm::{Algorithm, Hram, Schnorr}, + algorithm::{Algorithm, Hram, IetfSchnorr}, sign::{Writable, PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine}, }; @@ -197,8 +197,8 @@ pub fn test_schnorr>(rng: &mut R) { const MSG: &[u8] = b"Hello, World!"; let keys = key_gen(&mut *rng); - let machines = algorithm_machines(&mut *rng, Schnorr::::new(), &keys); - let sig = sign(&mut *rng, Schnorr::::new(), keys.clone(), machines, MSG); + let machines = algorithm_machines(&mut *rng, IetfSchnorr::::ietf(), &keys); + let sig = sign(&mut *rng, IetfSchnorr::::ietf(), keys.clone(), machines, MSG); let group_key = keys[&Participant::new(1).unwrap()].group_key(); assert!(sig.verify(group_key, H::hram(&sig.R, &group_key, MSG))); } @@ -217,8 +217,8 @@ pub fn test_offset_schnorr>(rng: &m assert_eq!(keys.group_key(), offset_key); } - let machines = algorithm_machines(&mut *rng, Schnorr::::new(), &keys); - let sig = sign(&mut *rng, Schnorr::::new(), keys.clone(), machines, MSG); + let machines = algorithm_machines(&mut *rng, IetfSchnorr::::ietf(), &keys); + let sig = sign(&mut *rng, IetfSchnorr::::ietf(), keys.clone(), machines, MSG); let group_key = keys[&Participant::new(1).unwrap()].group_key(); assert!(sig.verify(offset_key, H::hram(&sig.R, &group_key, MSG))); } @@ -228,7 +228,7 @@ pub fn test_schnorr_blame>(rng: &mu const MSG: &[u8] = b"Hello, World!"; let keys = key_gen(&mut *rng); - let machines = algorithm_machines(&mut *rng, Schnorr::::new(), &keys); + let machines = algorithm_machines(&mut *rng, IetfSchnorr::::ietf(), &keys); let (mut machines, shares) = preprocess_and_shares(&mut *rng, machines, |_, _| {}, MSG); diff --git a/crypto/frost/src/tests/vectors.rs b/crypto/frost/src/tests/vectors.rs index e92474b2..84d3676e 100644 --- a/crypto/frost/src/tests/vectors.rs +++ b/crypto/frost/src/tests/vectors.rs @@ -14,7 +14,7 @@ use ciphersuite::group::{ff::PrimeField, GroupEncoding}; use crate::{ curve::Curve, Participant, ThresholdCore, ThresholdKeys, - algorithm::{IetfTranscript, Hram, Schnorr}, + algorithm::{IetfTranscript, Hram, IetfSchnorr}, sign::{ Writable, Nonce, GeneratorCommitments, NonceCommitments, Commitments, Preprocess, PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine, @@ -160,7 +160,8 @@ pub fn test_with_vectors>( let mut machines = vec![]; for i in &vectors.included { - machines.push((i, AlgorithmMachine::new(Schnorr::::new(), keys[i].clone()).unwrap())); + machines + .push((i, AlgorithmMachine::new(IetfSchnorr::::ietf(), keys[i].clone()).unwrap())); } let mut commitments = HashMap::new(); @@ -342,7 +343,8 @@ pub fn test_with_vectors>( // Create the machines let mut machines = vec![]; for i in &vectors.included { - machines.push((i, AlgorithmMachine::new(Schnorr::::new(), keys[i].clone()).unwrap())); + machines + .push((i, AlgorithmMachine::new(IetfSchnorr::::ietf(), keys[i].clone()).unwrap())); } for (i, machine) in machines.drain(..) { diff --git a/crypto/schnorr/src/aggregate.rs b/crypto/schnorr/src/aggregate.rs index c45abff9..52a9554f 100644 --- a/crypto/schnorr/src/aggregate.rs +++ b/crypto/schnorr/src/aggregate.rs @@ -16,7 +16,7 @@ use multiexp::multiexp_vartime; use crate::SchnorrSignature; // Returns a unbiased scalar weight to use on a signature in order to prevent malleability -fn weight(digest: &mut DigestTranscript) -> F { +fn weight(digest: &mut DigestTranscript) -> F { let mut bytes = digest.challenge(b"aggregation_weight"); debug_assert_eq!(bytes.len() % 8, 0); // This should be guaranteed thanks to SecureDigest diff --git a/crypto/transcript/src/lib.rs b/crypto/transcript/src/lib.rs index 6cd6485c..13bb750a 100644 --- a/crypto/transcript/src/lib.rs +++ b/crypto/transcript/src/lib.rs @@ -16,8 +16,8 @@ use digest::{ Digest, Output, HashMarker, }; -pub trait Transcript { - type Challenge: Clone + Send + Sync + AsRef<[u8]>; +pub trait Transcript: Send + Clone { + type Challenge: Send + Sync + Clone + AsRef<[u8]>; /// Create a new transcript with the specified name. fn new(name: &'static [u8]) -> Self; @@ -83,9 +83,9 @@ where /// A simple transcript format constructed around the specified hash algorithm. #[derive(Clone, Debug)] -pub struct DigestTranscript(D); +pub struct DigestTranscript(D); -impl DigestTranscript { +impl DigestTranscript { fn append(&mut self, kind: DigestTranscriptMember, value: &[u8]) { self.0.update([kind.as_u8()]); // Assumes messages don't exceed 16 exabytes @@ -94,7 +94,7 @@ impl DigestTranscript { } } -impl Transcript for DigestTranscript { +impl Transcript for DigestTranscript { type Challenge = Output; fn new(name: &'static [u8]) -> Self { From 11114dcb74d02aadbb5050f8f9088b2fe6e49155 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 05:31:02 -0500 Subject: [PATCH 67/71] Further fix the clippy lint controls for Hash on dalek_ff_group::*Point --- crypto/dalek-ff-group/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crypto/dalek-ff-group/src/lib.rs b/crypto/dalek-ff-group/src/lib.rs index 559a42da..a05725ff 100644 --- a/crypto/dalek-ff-group/src/lib.rs +++ b/crypto/dalek-ff-group/src/lib.rs @@ -425,7 +425,7 @@ macro_rules! dalek_group { // Support being used as a key in a table // While it is expensive as a key, due to the field operations required, there's frequently // use cases for public key -> value lookups - #[allow(unknown_lints)] + #[allow(unknown_lints, renamed_and_removed_lints)] #[allow(clippy::derived_hash_with_manual_eq, clippy::derive_hash_xor_eq)] impl Hash for $Point { fn hash(&self, state: &mut H) { From 1e201562dfc80ff1e8e2ad639ab5298c5b43211e Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 05:34:29 -0500 Subject: [PATCH 68/71] Correct doc comments re: HTML tags --- crypto/dleq/src/lib.rs | 4 ++-- crypto/frost/src/algorithm.rs | 2 +- crypto/schnorr/src/aggregate.rs | 2 +- crypto/schnorr/src/lib.rs | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/crypto/dleq/src/lib.rs b/crypto/dleq/src/lib.rs index 71b9cb4b..25062feb 100644 --- a/crypto/dleq/src/lib.rs +++ b/crypto/dleq/src/lib.rs @@ -191,7 +191,7 @@ impl DLEqProof { Ok(DLEqProof { c: read_scalar(r)?, s: read_scalar(r)? }) } - /// Serialize a DLEq proof to a Vec. + /// Serialize a DLEq proof to a `Vec`. #[cfg(feature = "serialize")] pub fn serialize(&self) -> Vec { let mut res = vec![]; @@ -315,7 +315,7 @@ impl MultiDLEqProof { Ok(MultiDLEqProof { c, s }) } - /// Serialize a multi-DLEq proof to a Vec. + /// Serialize a multi-DLEq proof to a `Vec`. #[cfg(feature = "serialize")] pub fn serialize(&self) -> Vec { let mut res = vec![]; diff --git a/crypto/frost/src/algorithm.rs b/crypto/frost/src/algorithm.rs index a8222168..6c5cac9a 100644 --- a/crypto/frost/src/algorithm.rs +++ b/crypto/frost/src/algorithm.rs @@ -155,7 +155,7 @@ impl> Schnorr { impl> IetfSchnorr { /// Construct a IETF-compatible Schnorr algorithm. /// - /// Please see the IetfSchnorr documentation for the full details of this. + /// Please see the `IetfSchnorr` documentation for the full details of this. pub fn ietf() -> IetfSchnorr { Schnorr::new(IetfTranscript(vec![])) } diff --git a/crypto/schnorr/src/aggregate.rs b/crypto/schnorr/src/aggregate.rs index 52a9554f..592ed051 100644 --- a/crypto/schnorr/src/aggregate.rs +++ b/crypto/schnorr/src/aggregate.rs @@ -99,7 +99,7 @@ impl SchnorrAggregate { writer.write_all(self.s.to_repr().as_ref()) } - /// Serialize a SchnorrAggregate, returning a Vec. + /// Serialize a SchnorrAggregate, returning a `Vec`. pub fn serialize(&self) -> Vec { let mut buf = vec![]; self.write(&mut buf).unwrap(); diff --git a/crypto/schnorr/src/lib.rs b/crypto/schnorr/src/lib.rs index f53fb26c..f852eee2 100644 --- a/crypto/schnorr/src/lib.rs +++ b/crypto/schnorr/src/lib.rs @@ -46,7 +46,7 @@ impl SchnorrSignature { writer.write_all(self.s.to_repr().as_ref()) } - /// Serialize a SchnorrSignature, returning a Vec. + /// Serialize a SchnorrSignature, returning a `Vec`. pub fn serialize(&self) -> Vec { let mut buf = vec![]; self.write(&mut buf).unwrap(); From 62dfc63532f1dbd97ea1273ae9ac9f0761e94ac8 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Tue, 7 Mar 2023 06:25:21 -0500 Subject: [PATCH 69/71] Fix Ethereum, again --- coins/ethereum/tests/contract.rs | 4 ++-- coins/ethereum/tests/crypto.rs | 10 +++++----- crypto/frost/Cargo.toml | 1 + 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/coins/ethereum/tests/contract.rs b/coins/ethereum/tests/contract.rs index 06b18d65..f4dbc8ef 100644 --- a/coins/ethereum/tests/contract.rs +++ b/coins/ethereum/tests/contract.rs @@ -12,7 +12,7 @@ use ethers::{ use frost::{ curve::Secp256k1, Participant, - algorithm::Schnorr as Algo, + algorithm::IetfSchnorr, tests::{key_gen, algorithm_machines, sign}, }; @@ -52,7 +52,7 @@ async fn test_ecrecover_hack() { let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat(); - let algo = Algo::::new(); + let algo = IetfSchnorr::::ietf(); let sig = sign( &mut OsRng, algo.clone(), diff --git a/coins/ethereum/tests/crypto.rs b/coins/ethereum/tests/crypto.rs index 01413ece..cc8359ed 100644 --- a/coins/ethereum/tests/crypto.rs +++ b/coins/ethereum/tests/crypto.rs @@ -36,7 +36,7 @@ fn test_ecrecover() { #[test] fn test_signing() { use frost::{ - algorithm::Schnorr, + algorithm::IetfSchnorr, tests::{algorithm_machines, key_gen, sign}, }; use rand_core::OsRng; @@ -46,12 +46,12 @@ fn test_signing() { const MESSAGE: &[u8] = b"Hello, World!"; - let algo = Schnorr::::new(); + let algo = IetfSchnorr::::ietf(); let _sig = sign( &mut OsRng, algo, keys.clone(), - algorithm_machines(&mut OsRng, Schnorr::::new(), &keys), + algorithm_machines(&mut OsRng, IetfSchnorr::::ietf(), &keys), MESSAGE, ); } @@ -59,7 +59,7 @@ fn test_signing() { #[test] fn test_ecrecover_hack() { use frost::{ - algorithm::Schnorr, + algorithm::IetfSchnorr, tests::{algorithm_machines, key_gen, sign}, }; use rand_core::OsRng; @@ -76,7 +76,7 @@ fn test_ecrecover_hack() { let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat(); - let algo = Schnorr::::new(); + let algo = IetfSchnorr::::ietf(); let sig = sign( &mut OsRng, algo.clone(), diff --git a/crypto/frost/Cargo.toml b/crypto/frost/Cargo.toml index 637c1498..1ce90194 100644 --- a/crypto/frost/Cargo.toml +++ b/crypto/frost/Cargo.toml @@ -39,6 +39,7 @@ dleq = { path = "../dleq", version = "0.2", features = ["serialize"] } dkg = { path = "../dkg", version = "0.2" } [dev-dependencies] +hex = "0.4" serde_json = "1" dkg = { path = "../dkg", version = "0.2", features = ["tests"] } From ad470bc969c58e30192e2bcd020b970b90cab60a Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Fri, 10 Mar 2023 06:27:44 -0500 Subject: [PATCH 70/71] \#242 Expand usage of black_box/zeroize This commit greatly expands the usage of black_box/zeroize on bits, as it originally should have. It is likely overkill, leading to less efficient code generation, yet does its best to be comprehensive where comprehensiveness is extremely annoying to achieve. In the future, this usage of black_box may be desirable to move to its own crate. Credit to @AaronFeickert for identifying the original commit was incomplete. --- crypto/dalek-ff-group/src/field.rs | 10 +++-- crypto/dalek-ff-group/src/lib.rs | 54 ++++++++++++++++++++------- crypto/dleq/Cargo.toml | 8 +++- crypto/dleq/src/cross_group/mod.rs | 34 +++++++++++++---- crypto/dleq/src/cross_group/scalar.rs | 21 ++++++----- crypto/ed448/Cargo.toml | 3 ++ crypto/ed448/src/backend.rs | 37 +++++++++++++++--- crypto/ed448/src/point.rs | 11 ++++-- crypto/multiexp/Cargo.toml | 1 + crypto/multiexp/src/lib.rs | 31 +++++++++++++-- 10 files changed, 163 insertions(+), 47 deletions(-) diff --git a/crypto/dalek-ff-group/src/field.rs b/crypto/dalek-ff-group/src/field.rs index 8c9a927d..2afe6396 100644 --- a/crypto/dalek-ff-group/src/field.rs +++ b/crypto/dalek-ff-group/src/field.rs @@ -1,5 +1,6 @@ -use core::ops::{Add, AddAssign, Sub, SubAssign, Neg, Mul, MulAssign}; +use core::ops::{DerefMut, Add, AddAssign, Sub, SubAssign, Neg, Mul, MulAssign}; +use zeroize::Zeroize; use rand_core::RngCore; use subtle::{ @@ -11,7 +12,7 @@ use crypto_bigint::{Integer, Encoding, U256, U512}; use group::ff::{Field, PrimeField, FieldBits, PrimeFieldBits}; -use crate::{constant_time, math, from_uint}; +use crate::{u8_from_bool, constant_time, math, from_uint}; // 2^255 - 19 // Uses saturating_sub because checked_sub isn't available at compile time @@ -200,10 +201,11 @@ impl FieldElement { let mut res = FieldElement::one(); let mut bits = 0; - for (i, bit) in other.to_le_bits().iter().rev().enumerate() { + for (i, mut bit) in other.to_le_bits().iter_mut().rev().enumerate() { bits <<= 1; - let bit = u8::from(*bit); + let mut bit = u8_from_bool(bit.deref_mut()); bits |= bit; + bit.zeroize(); if ((i + 1) % 4) == 0 { if i != 3 { diff --git a/crypto/dalek-ff-group/src/lib.rs b/crypto/dalek-ff-group/src/lib.rs index a05725ff..5a63375e 100644 --- a/crypto/dalek-ff-group/src/lib.rs +++ b/crypto/dalek-ff-group/src/lib.rs @@ -3,7 +3,7 @@ use core::{ borrow::Borrow, - ops::{Deref, Add, AddAssign, Sub, SubAssign, Neg, Mul, MulAssign}, + ops::{Deref, DerefMut, Add, AddAssign, Sub, SubAssign, Neg, Mul, MulAssign}, iter::{Iterator, Sum}, hash::{Hash, Hasher}, }; @@ -41,18 +41,34 @@ use group::{ pub mod field; -// Convert a boolean to a Choice in a *presumably* constant time manner -fn choice(value: bool) -> Choice { - #[cfg(not(feature = "black_box"))] - let res = Choice::from(u8::from(value)); - #[cfg(feature = "black_box")] - let res = { - use core::hint::black_box; - Choice::from(black_box(u8::from(black_box(value)))) - }; +// Feature gated due to MSRV requirements +#[cfg(feature = "black_box")] +pub(crate) fn black_box(val: T) -> T { + core::hint::black_box(val) +} + +#[cfg(not(feature = "black_box"))] +pub(crate) fn black_box(val: T) -> T { + val +} + +fn u8_from_bool(bit_ref: &mut bool) -> u8 { + let bit_ref = black_box(bit_ref); + + let mut bit = black_box(*bit_ref); + let res = black_box(bit as u8); + bit.zeroize(); + debug_assert!((res | 1) == 1); + + bit_ref.zeroize(); res } +// Convert a boolean to a Choice in a *presumably* constant time manner +fn choice(mut value: bool) -> Choice { + Choice::from(u8_from_bool(&mut value)) +} + macro_rules! deref_borrow { ($Source: ident, $Target: ident) => { impl Deref for $Source { @@ -202,10 +218,11 @@ impl Scalar { let mut res = Scalar::one(); let mut bits = 0; - for (i, bit) in other.to_le_bits().iter().rev().enumerate() { + for (i, mut bit) in other.to_le_bits().iter_mut().rev().enumerate() { bits <<= 1; - let bit = u8::from(*bit); + let mut bit = u8_from_bool(bit.deref_mut()); bits |= bit; + bit.zeroize(); if ((i + 1) % 4) == 0 { if i != 3 { @@ -288,7 +305,18 @@ impl PrimeField for Scalar { // The number of leading zero bits in the little-endian bit representation of (modulus - 1) const S: u32 = 2; fn is_odd(&self) -> Choice { - choice(self.to_le_bits()[0]) + // This is probably overkill? Yet it's better safe than sorry since this is a complete + // decomposition of the scalar + let mut bits = self.to_le_bits(); + let res = choice(bits[0]); + // This shouldn't need mut since it should be a mutable reference + // Per the bitvec docs, writing through a derefence requires mut, writing through one of its + // methods does not + // We do not use one of its methods to ensure we write via zeroize + for mut bit in bits.iter_mut() { + bit.deref_mut().zeroize(); + } + res } fn multiplicative_generator() -> Self { // This was calculated with the method from the ff crate docs diff --git a/crypto/dleq/Cargo.toml b/crypto/dleq/Cargo.toml index da114e0f..0032c447 100644 --- a/crypto/dleq/Cargo.toml +++ b/crypto/dleq/Cargo.toml @@ -39,8 +39,14 @@ transcript = { package = "flexible-transcript", path = "../transcript", features [features] std = [] serialize = ["std"] -experimental = ["std", "thiserror", "multiexp"] + +# Needed for cross-group DLEqs +black_box = [] secure_capacity_difference = [] +experimental = ["std", "thiserror", "multiexp"] # Only applies to experimental, yet is default to ensure security +# experimental doesn't mandate it itself in case two curves with extreme +# capacity differences are desired to be used together, in which case the user +# must specify experimental without default features default = ["secure_capacity_difference"] diff --git a/crypto/dleq/src/cross_group/mod.rs b/crypto/dleq/src/cross_group/mod.rs index e5095b45..b59a84fc 100644 --- a/crypto/dleq/src/cross_group/mod.rs +++ b/crypto/dleq/src/cross_group/mod.rs @@ -1,4 +1,6 @@ -use core::ops::Deref; +use core::ops::{Deref, DerefMut}; +#[cfg(feature = "serialize")] +use std::io::{Read, Write}; use thiserror::Error; @@ -27,8 +29,28 @@ pub(crate) mod aos; mod bits; use bits::{BitSignature, Bits}; -#[cfg(feature = "serialize")] -use std::io::{Read, Write}; +// Feature gated due to MSRV requirements +#[cfg(feature = "black_box")] +pub(crate) fn black_box(val: T) -> T { + core::hint::black_box(val) +} + +#[cfg(not(feature = "black_box"))] +pub(crate) fn black_box(val: T) -> T { + val +} + +fn u8_from_bool(bit_ref: &mut bool) -> u8 { + let bit_ref = black_box(bit_ref); + + let mut bit = black_box(*bit_ref); + let res = black_box(bit as u8); + bit.zeroize(); + debug_assert!((res | 1) == 1); + + bit_ref.zeroize(); + res +} #[cfg(feature = "serialize")] pub(crate) fn read_point(r: &mut R) -> std::io::Result { @@ -224,15 +246,13 @@ where let mut these_bits: u8 = 0; // Needed to zero out the bits #[allow(unused_assignments)] - for (i, mut raw_bit) in raw_bits.iter_mut().enumerate() { + for (i, mut bit) in raw_bits.iter_mut().enumerate() { if i == capacity { break; } - let mut bit = u8::from(*raw_bit); - *raw_bit = false; - // Accumulate this bit + let mut bit = u8_from_bool(bit.deref_mut()); these_bits |= bit << (i % bits_per_group); bit.zeroize(); diff --git a/crypto/dleq/src/cross_group/scalar.rs b/crypto/dleq/src/cross_group/scalar.rs index fefd819d..14ef71c9 100644 --- a/crypto/dleq/src/cross_group/scalar.rs +++ b/crypto/dleq/src/cross_group/scalar.rs @@ -1,21 +1,26 @@ +use core::ops::DerefMut; + use ff::PrimeFieldBits; use zeroize::Zeroize; +use crate::cross_group::u8_from_bool; + /// Convert a uniform scalar into one usable on both fields, clearing the top bits as needed. pub fn scalar_normalize( mut scalar: F0, ) -> (F0, F1) { let mutual_capacity = F0::CAPACITY.min(F1::CAPACITY); - // The security of a mutual key is the security of the lower field. Accordingly, this bans a - // difference of more than 4 bits + // A mutual key is only as secure as its weakest group + // Accordingly, this bans a capacity difference of more than 4 bits to prevent a curve generally + // offering n-bits of security from being forced into a situation with much fewer bits #[cfg(feature = "secure_capacity_difference")] - assert!((F0::CAPACITY.max(F1::CAPACITY) - mutual_capacity) < 4); + assert!((F0::CAPACITY.max(F1::CAPACITY) - mutual_capacity) <= 4); let mut res1 = F0::zero(); let mut res2 = F1::zero(); - // Uses the bit view API to ensure a consistent endianess + // Uses the bits API to ensure a consistent endianess let mut bits = scalar.to_le_bits(); scalar.zeroize(); // Convert it to big endian @@ -24,9 +29,9 @@ pub fn scalar_normalize( let mut skip = bits.len() - usize::try_from(mutual_capacity).unwrap(); // Needed to zero out the bits #[allow(unused_assignments)] - for mut raw_bit in bits.iter_mut() { + for mut bit in bits.iter_mut() { if skip > 0 { - *raw_bit = false; + bit.deref_mut().zeroize(); skip -= 1; continue; } @@ -34,9 +39,7 @@ pub fn scalar_normalize( res1 = res1.double(); res2 = res2.double(); - let mut bit = u8::from(*raw_bit); - *raw_bit = false; - + let mut bit = u8_from_bool(bit.deref_mut()); res1 += F0::from(bit.into()); res2 += F1::from(bit.into()); bit.zeroize(); diff --git a/crypto/ed448/Cargo.toml b/crypto/ed448/Cargo.toml index 50a98c2f..fb2da204 100644 --- a/crypto/ed448/Cargo.toml +++ b/crypto/ed448/Cargo.toml @@ -32,3 +32,6 @@ dalek-ff-group = { path = "../dalek-ff-group", version = "^0.1.2" } hex = "0.4" ff-group-tests = { path = "../ff-group-tests" } + +[features] +black_box = [] diff --git a/crypto/ed448/src/backend.rs b/crypto/ed448/src/backend.rs index 617dfb5e..7daae853 100644 --- a/crypto/ed448/src/backend.rs +++ b/crypto/ed448/src/backend.rs @@ -1,12 +1,36 @@ +use zeroize::Zeroize; + +// Feature gated due to MSRV requirements +#[cfg(feature = "black_box")] +pub(crate) fn black_box(val: T) -> T { + core::hint::black_box(val) +} + +#[cfg(not(feature = "black_box"))] +pub(crate) fn black_box(val: T) -> T { + val +} + +pub(crate) fn u8_from_bool(bit_ref: &mut bool) -> u8 { + let bit_ref = black_box(bit_ref); + + let mut bit = black_box(*bit_ref); + let res = black_box(bit as u8); + bit.zeroize(); + debug_assert!((res | 1) == 1); + + bit_ref.zeroize(); + res +} + #[doc(hidden)] #[macro_export] macro_rules! field { ($FieldName: ident, $MODULUS: ident, $WIDE_MODULUS: ident, $NUM_BITS: literal) => { - use core::ops::{Add, AddAssign, Neg, Sub, SubAssign, Mul, MulAssign}; - - use rand_core::RngCore; + use core::ops::{DerefMut, Add, AddAssign, Neg, Sub, SubAssign, Mul, MulAssign}; use subtle::{Choice, CtOption, ConstantTimeEq, ConstantTimeLess, ConditionallySelectable}; + use rand_core::RngCore; use generic_array::{typenum::U57, GenericArray}; use crypto_bigint::{Integer, Encoding}; @@ -18,6 +42,8 @@ macro_rules! field { use dalek_ff_group::{from_wrapper, math_op}; use dalek_ff_group::{constant_time, from_uint, math}; + use $crate::backend::u8_from_bool; + fn reduce(x: U1024) -> U512 { U512::from_le_slice(&x.reduce(&$WIDE_MODULUS).unwrap().to_le_bytes()[.. 64]) } @@ -59,10 +85,11 @@ macro_rules! field { let mut res = Self(U512::ONE); let mut bits = 0; - for (i, bit) in other.to_le_bits().iter().rev().enumerate() { + for (i, mut bit) in other.to_le_bits().iter_mut().rev().enumerate() { bits <<= 1; - let bit = u8::from(*bit); + let mut bit = u8_from_bool(bit.deref_mut()); bits |= bit; + bit.zeroize(); if ((i + 1) % 4) == 0 { if i != 3 { diff --git a/crypto/ed448/src/point.rs b/crypto/ed448/src/point.rs index 25916b95..54e8caac 100644 --- a/crypto/ed448/src/point.rs +++ b/crypto/ed448/src/point.rs @@ -1,5 +1,5 @@ use core::{ - ops::{Add, AddAssign, Neg, Sub, SubAssign, Mul, MulAssign}, + ops::{DerefMut, Add, AddAssign, Neg, Sub, SubAssign, Mul, MulAssign}, iter::Sum, }; @@ -19,6 +19,7 @@ use group::{ }; use crate::{ + backend::u8_from_bool, scalar::{Scalar, MODULUS as SCALAR_MODULUS}, field::{FieldElement, MODULUS as FIELD_MODULUS, Q_4}, }; @@ -218,7 +219,7 @@ impl<'a> Sum<&'a Point> for Point { impl Mul for Point { type Output = Point; - fn mul(self, other: Scalar) -> Point { + fn mul(self, mut other: Scalar) -> Point { // Precompute the optimal amount that's a multiple of 2 let mut table = [Point::identity(); 16]; table[1] = self; @@ -228,10 +229,11 @@ impl Mul for Point { let mut res = Self::identity(); let mut bits = 0; - for (i, bit) in other.to_le_bits().iter().rev().enumerate() { + for (i, mut bit) in other.to_le_bits().iter_mut().rev().enumerate() { bits <<= 1; - let bit = u8::from(*bit); + let mut bit = u8_from_bool(bit.deref_mut()); bits |= bit; + bit.zeroize(); if ((i + 1) % 4) == 0 { if i != 3 { @@ -243,6 +245,7 @@ impl Mul for Point { bits = 0; } } + other.zeroize(); res } } diff --git a/crypto/multiexp/Cargo.toml b/crypto/multiexp/Cargo.toml index a54ae0d8..24ebd399 100644 --- a/crypto/multiexp/Cargo.toml +++ b/crypto/multiexp/Cargo.toml @@ -27,4 +27,5 @@ k256 = { version = "0.12", features = ["bits"] } dalek-ff-group = { path = "../dalek-ff-group" } [features] +black_box = [] batch = ["rand_core"] diff --git a/crypto/multiexp/src/lib.rs b/crypto/multiexp/src/lib.rs index df4da3e3..9d309637 100644 --- a/crypto/multiexp/src/lib.rs +++ b/crypto/multiexp/src/lib.rs @@ -1,5 +1,7 @@ #![cfg_attr(docsrs, feature(doc_auto_cfg))] +use core::ops::DerefMut; + use zeroize::Zeroize; use ff::PrimeFieldBits; @@ -19,6 +21,29 @@ pub use batch::BatchVerifier; #[cfg(test)] mod tests; +// Feature gated due to MSRV requirements +#[cfg(feature = "black_box")] +pub(crate) fn black_box(val: T) -> T { + core::hint::black_box(val) +} + +#[cfg(not(feature = "black_box"))] +pub(crate) fn black_box(val: T) -> T { + val +} + +fn u8_from_bool(bit_ref: &mut bool) -> u8 { + let bit_ref = black_box(bit_ref); + + let mut bit = black_box(*bit_ref); + let res = black_box(bit as u8); + bit.zeroize(); + debug_assert!((res | 1) == 1); + + bit_ref.zeroize(); + res +} + // Convert scalars to `window`-sized bit groups, as needed to index a table // This algorithm works for `window <= 8` pub(crate) fn prep_bits(pairs: &[(G::Scalar, G)], window: u8) -> Vec> @@ -33,10 +58,8 @@ where let mut bits = pair.0.to_le_bits(); groupings.push(vec![0; (bits.len() + (w_usize - 1)) / w_usize]); - for (i, mut raw_bit) in bits.iter_mut().enumerate() { - let mut bit = u8::from(*raw_bit); - (*raw_bit).zeroize(); - + for (i, mut bit) in bits.iter_mut().enumerate() { + let mut bit = u8_from_bool(bit.deref_mut()); groupings[p][i / w_usize] |= bit << (i % w_usize); bit.zeroize(); } From 669d2dbffc1dafb82a09d9419ea182667115df06 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Wed, 15 Mar 2023 19:55:07 -0400 Subject: [PATCH 71/71] 3.10.2 Explicitly test RecommendedTranscript --- Cargo.lock | 1 + crypto/transcript/Cargo.toml | 1 + crypto/transcript/src/tests.rs | 7 +++++++ 3 files changed, 9 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index b6c86581..ee05a685 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2436,6 +2436,7 @@ dependencies = [ "blake2", "digest 0.10.6", "merlin 3.0.0", + "sha2 0.10.6", ] [[package]] diff --git a/crypto/transcript/Cargo.toml b/crypto/transcript/Cargo.toml index b7a6d9d0..d884ee96 100644 --- a/crypto/transcript/Cargo.toml +++ b/crypto/transcript/Cargo.toml @@ -19,6 +19,7 @@ blake2 = { version = "0.10", optional = true } merlin = { version = "3", optional = true } [dev-dependencies] +sha2 = "0.10" blake2 = "0.10" [features] diff --git a/crypto/transcript/src/tests.rs b/crypto/transcript/src/tests.rs index 0a461ba5..4a9f9dfc 100644 --- a/crypto/transcript/src/tests.rs +++ b/crypto/transcript/src/tests.rs @@ -85,9 +85,16 @@ where #[test] fn test_digest() { + test_transcript::>(); test_transcript::>(); } +#[cfg(feature = "recommended")] +#[test] +fn test_recommended() { + test_transcript::(); +} + #[cfg(feature = "merlin")] #[test] fn test_merlin() {