Correct discrepancies with the IETF draft

While all the transcript/extension code works as expected, which means, 
they don't cause any conflicts, n was still capped at u64::MAX at 
creation when it needs to be u16. Furthermore, participant index and 
scalars/points were little endian instead of big endian/curve dependent.
This commit is contained in:
Luke Parker 2022-05-06 07:49:18 -04:00
parent b443747994
commit 3dab26cd94
No known key found for this signature in database
GPG key ID: F9F1386DB1E119B6
7 changed files with 32 additions and 35 deletions

View file

@ -162,7 +162,7 @@ impl Algorithm<Ed25519> for Multisig {
// Given this is guaranteed to match commitments, which FROST commits to, this also technically // Given this is guaranteed to match commitments, which FROST commits to, this also technically
// doesn't need to be committed to if a canonical serialization is guaranteed // doesn't need to be committed to if a canonical serialization is guaranteed
// It, again, doesn't hurt to include and ensures security boundaries are well formed // It, again, doesn't hurt to include and ensures security boundaries are well formed
self.transcript.append_message(b"participant", &u64::try_from(l).unwrap().to_le_bytes()); self.transcript.append_message(b"participant", &u16::try_from(l).unwrap().to_be_bytes());
self.transcript.append_message(b"commitments_H", &serialized[0 .. 64]); self.transcript.append_message(b"commitments_H", &serialized[0 .. 64]);
#[allow(non_snake_case)] #[allow(non_snake_case)]

View file

@ -77,15 +77,14 @@ impl Curve for Ed25519 {
32 32
} }
fn F_from_le_slice(slice: &[u8]) -> Result<Self::F, CurveError> { fn F_from_slice(slice: &[u8]) -> Result<Self::F, CurveError> {
let scalar = Self::F::from_repr( let scalar = Self::F::from_repr(
slice.try_into().map_err(|_| CurveError::InvalidLength(32, slice.len()))? slice.try_into().map_err(|_| CurveError::InvalidLength(32, slice.len()))?
); );
if scalar.is_some().unwrap_u8() == 1 { if scalar.is_some().unwrap_u8() == 0 {
Ok(scalar.unwrap()) Err(CurveError::InvalidScalar)?;
} else {
Err(CurveError::InvalidScalar)
} }
Ok(scalar.unwrap())
} }
fn G_from_slice(slice: &[u8]) -> Result<Self::G, CurveError> { fn G_from_slice(slice: &[u8]) -> Result<Self::G, CurveError> {
@ -105,7 +104,7 @@ impl Curve for Ed25519 {
} }
} }
fn F_to_le_bytes(f: &Self::F) -> Vec<u8> { fn F_to_bytes(f: &Self::F) -> Vec<u8> {
f.to_repr().to_vec() f.to_repr().to_vec()
} }

View file

@ -64,6 +64,8 @@ impl SignableTransaction {
// These outputs can only be spent once. Therefore, it forces all RNGs derived from this // These outputs can only be spent once. Therefore, it forces all RNGs derived from this
// transcript (such as the one used to create one time keys) to be unique // transcript (such as the one used to create one time keys) to be unique
transcript.append_message(b"input_hash", &input.tx.0); transcript.append_message(b"input_hash", &input.tx.0);
// TODO: Should this be u8, u16, or u32? Right now, outputs are solely up to 16, but what
// about the future?
transcript.append_message(b"input_output_index", &u64::try_from(input.o).unwrap().to_le_bytes()); transcript.append_message(b"input_output_index", &u64::try_from(input.o).unwrap().to_le_bytes());
// Not including this, with a doxxed list of payments, would allow brute forcing the inputs // Not including this, with a doxxed list of payments, would allow brute forcing the inputs
// to determine RNG seeds and therefore the true spends // to determine RNG seeds and therefore the true spends

View file

@ -9,8 +9,8 @@ use crate::{Curve, MultisigParams, MultisigKeys, FrostError};
#[allow(non_snake_case)] #[allow(non_snake_case)]
fn challenge<C: Curve>(l: usize, context: &str, R: &[u8], Am: &[u8]) -> C::F { fn challenge<C: Curve>(l: usize, context: &str, R: &[u8], Am: &[u8]) -> C::F {
let mut c = Vec::with_capacity(8 + context.len() + R.len() + Am.len()); let mut c = Vec::with_capacity(2 + context.len() + R.len() + Am.len());
c.extend(&u64::try_from(l).unwrap().to_le_bytes()); c.extend(&u16::try_from(l).unwrap().to_be_bytes());
c.extend(context.as_bytes()); c.extend(context.as_bytes());
c.extend(R); // R c.extend(R); // R
c.extend(Am); // A of the first commitment, which is what we're proving we have the private key c.extend(Am); // A of the first commitment, which is what we're proving we have the private key
@ -59,7 +59,7 @@ fn generate_key_r1<R: RngCore + CryptoRng, C: Curve>(
let s = k + (coefficients[0] * c); let s = k + (coefficients[0] * c);
serialized.extend(&C::G_to_bytes(&R)); serialized.extend(&C::G_to_bytes(&R));
serialized.extend(&C::F_to_le_bytes(&s)); serialized.extend(&C::F_to_bytes(&s));
// Step 4: Broadcast // Step 4: Broadcast
(coefficients, commitments, serialized) (coefficients, commitments, serialized)
@ -148,7 +148,7 @@ fn verify_r1<R: RngCore + CryptoRng, C: Curve>(
); );
scalars.push( scalars.push(
-C::F_from_le_slice( -C::F_from_slice(
&serialized[l][commitments_len + C::G_len() .. serialized[l].len()] &serialized[l][commitments_len + C::G_len() .. serialized[l].len()]
).map_err(|_| FrostError::InvalidProofOfKnowledge(l))? * u ).map_err(|_| FrostError::InvalidProofOfKnowledge(l))? * u
); );
@ -184,7 +184,7 @@ fn verify_r1<R: RngCore + CryptoRng, C: Curve>(
&serialized[l][commitments_len .. commitments_len + C::G_len()] &serialized[l][commitments_len .. commitments_len + C::G_len()]
).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?; ).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?;
let s = C::F_from_le_slice( let s = C::F_from_slice(
&serialized[l][commitments_len + C::G_len() .. serialized[l].len()] &serialized[l][commitments_len + C::G_len() .. serialized[l].len()]
).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?; ).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?;
@ -248,7 +248,7 @@ fn generate_key_r2<R: RngCore + CryptoRng, C: Curve>(
continue continue
} }
res.push(C::F_to_le_bytes(&polynomial(&coefficients, i))); res.push(C::F_to_bytes(&polynomial(&coefficients, i)));
} }
// Calculate our own share // Calculate our own share
@ -298,7 +298,7 @@ fn complete_r2<C: Curve>(
shares.push(C::F::zero()); shares.push(C::F::zero());
continue; continue;
} }
shares.push(C::F_from_le_slice(&serialized[i]).map_err(|_| FrostError::InvalidShare(i))?); shares.push(C::F_from_slice(&serialized[i]).map_err(|_| FrostError::InvalidShare(i))?);
} }

View file

@ -98,7 +98,7 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug {
// While they do technically exist, their usage of Self::Repr breaks all potential library usage // While they do technically exist, their usage of Self::Repr breaks all potential library usage
// without helper functions like this // without helper functions like this
#[allow(non_snake_case)] #[allow(non_snake_case)]
fn F_from_le_slice(slice: &[u8]) -> Result<Self::F, CurveError>; fn F_from_slice(slice: &[u8]) -> Result<Self::F, CurveError>;
/// Group element from slice. Must require canonicity or risks differing binding factors /// Group element from slice. Must require canonicity or risks differing binding factors
#[allow(non_snake_case)] #[allow(non_snake_case)]
@ -106,7 +106,7 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug {
/// Obtain a vector of the byte encoding of F /// Obtain a vector of the byte encoding of F
#[allow(non_snake_case)] #[allow(non_snake_case)]
fn F_to_le_bytes(f: &Self::F) -> Vec<u8>; fn F_to_bytes(f: &Self::F) -> Vec<u8>;
/// Obtain a vector of the byte encoding of G /// Obtain a vector of the byte encoding of G
#[allow(non_snake_case)] #[allow(non_snake_case)]
@ -135,8 +135,8 @@ impl MultisigParams {
Err(FrostError::ZeroParameter(t, n))?; Err(FrostError::ZeroParameter(t, n))?;
} }
if u64::try_from(n).is_err() { if u16::try_from(n).is_err() {
Err(FrostError::TooManyParticipants(n, u64::MAX))?; Err(FrostError::TooManyParticipants(n, u16::MAX))?;
} }
// When t == n, this shouldn't be used (MuSig2 and other variants of MuSig exist for a reason), // When t == n, this shouldn't be used (MuSig2 and other variants of MuSig exist for a reason),
@ -161,7 +161,7 @@ pub enum FrostError {
#[error("a parameter was 0 (required {0}, participants {1})")] #[error("a parameter was 0 (required {0}, participants {1})")]
ZeroParameter(usize, usize), ZeroParameter(usize, usize),
#[error("too many participants (max {1}, got {0})")] #[error("too many participants (max {1}, got {0})")]
TooManyParticipants(usize, u64), TooManyParticipants(usize, u16),
#[error("invalid amount of required participants (max {1}, got {0})")] #[error("invalid amount of required participants (max {1}, got {0})")]
InvalidRequiredQuantity(usize, usize), InvalidRequiredQuantity(usize, usize),
#[error("invalid participant index (0 < index <= {0}, yet index is {1})")] #[error("invalid participant index (0 < index <= {0}, yet index is {1})")]
@ -296,7 +296,7 @@ impl<C: Curve> MultisigKeys<C> {
serialized.extend(&(self.params.n as u64).to_le_bytes()); serialized.extend(&(self.params.n as u64).to_le_bytes());
serialized.extend(&(self.params.t as u64).to_le_bytes()); serialized.extend(&(self.params.t as u64).to_le_bytes());
serialized.extend(&(self.params.i as u64).to_le_bytes()); serialized.extend(&(self.params.i as u64).to_le_bytes());
serialized.extend(&C::F_to_le_bytes(&self.secret_share)); serialized.extend(&C::F_to_bytes(&self.secret_share));
serialized.extend(&C::G_to_bytes(&self.group_key)); serialized.extend(&C::G_to_bytes(&self.group_key));
for i in 1 ..= self.params.n { for i in 1 ..= self.params.n {
serialized.extend(&C::G_to_bytes(&self.verification_shares[i])); serialized.extend(&C::G_to_bytes(&self.verification_shares[i]));
@ -345,7 +345,7 @@ impl<C: Curve> MultisigKeys<C> {
.map_err(|_| FrostError::InternalError("parameter doesn't fit into usize".to_string()))?; .map_err(|_| FrostError::InternalError("parameter doesn't fit into usize".to_string()))?;
cursor += 8; cursor += 8;
let secret_share = C::F_from_le_slice(&serialized[cursor .. (cursor + C::F_len())]) let secret_share = C::F_from_slice(&serialized[cursor .. (cursor + C::F_len())])
.map_err(|_| FrostError::InternalError("invalid secret share".to_string()))?; .map_err(|_| FrostError::InternalError("invalid secret share".to_string()))?;
cursor += C::F_len(); cursor += C::F_len();
let group_key = C::G_from_slice(&serialized[cursor .. (cursor + C::G_len())]) let group_key = C::G_from_slice(&serialized[cursor .. (cursor + C::G_len())])

View file

@ -149,7 +149,7 @@ fn sign_with_share<C: Curve, A: Algorithm<C>>(
let transcript = params.algorithm.transcript(); let transcript = params.algorithm.transcript();
transcript.domain_separate(b"FROST"); transcript.domain_separate(b"FROST");
if params.keys.offset.is_some() { if params.keys.offset.is_some() {
transcript.append_message(b"offset", &C::F_to_le_bytes(&params.keys.offset.unwrap())); transcript.append_message(b"offset", &C::F_to_bytes(&params.keys.offset.unwrap()));
} }
} }
@ -170,7 +170,7 @@ fn sign_with_share<C: Curve, A: Algorithm<C>>(
B.push(Some(our_preprocess.commitments)); B.push(Some(our_preprocess.commitments));
{ {
let transcript = params.algorithm.transcript(); let transcript = params.algorithm.transcript();
transcript.append_message(b"participant", &u16::try_from(l).unwrap().to_le_bytes()); transcript.append_message(b"participant", &u16::try_from(l).unwrap().to_be_bytes());
transcript.append_message( transcript.append_message(
b"commitments", b"commitments",
&our_preprocess.serialized[0 .. (C::G_len() * 2)] &our_preprocess.serialized[0 .. (C::G_len() * 2)]
@ -206,7 +206,7 @@ fn sign_with_share<C: Curve, A: Algorithm<C>>(
B.push(Some([D, E])); B.push(Some([D, E]));
{ {
let transcript = params.algorithm.transcript(); let transcript = params.algorithm.transcript();
transcript.append_message(b"participant", &u16::try_from(l).unwrap().to_le_bytes()); transcript.append_message(b"participant", &u16::try_from(l).unwrap().to_be_bytes());
transcript.append_message(b"commitments", &commitments[0 .. commitments_len]); transcript.append_message(b"commitments", &commitments[0 .. commitments_len]);
} }
} }
@ -255,7 +255,7 @@ fn sign_with_share<C: Curve, A: Algorithm<C>>(
our_preprocess.nonces[0] + (our_preprocess.nonces[1] * binding), our_preprocess.nonces[0] + (our_preprocess.nonces[1] * binding),
msg msg
); );
Ok((Package { Ris, R, share }, C::F_to_le_bytes(&share))) Ok((Package { Ris, R, share }, C::F_to_bytes(&share)))
} }
// This doesn't check the signing set is as expected and unexpected changes can cause false blames // This doesn't check the signing set is as expected and unexpected changes can cause false blames
@ -291,7 +291,7 @@ fn complete<C: Curve, A: Algorithm<C>>(
Err(FrostError::InvalidShare(l))?; Err(FrostError::InvalidShare(l))?;
} }
let part = C::F_from_le_slice(serialized[l].as_ref().unwrap()) let part = C::F_from_slice(serialized[l].as_ref().unwrap())
.map_err(|_| FrostError::InvalidShare(l))?; .map_err(|_| FrostError::InvalidShare(l))?;
sum += part; sum += part;
responses.push(Some(part)); responses.push(Some(part));

View file

@ -61,11 +61,9 @@ impl Curve for Secp256k1 {
33 33
} }
fn F_from_le_slice(slice: &[u8]) -> Result<Self::F, CurveError> { fn F_from_slice(slice: &[u8]) -> Result<Self::F, CurveError> {
let mut bytes: [u8; 32] = slice.try_into().map_err( let bytes: [u8; 32] = slice.try_into()
|_| CurveError::InvalidLength(32, slice.len()) .map_err(|_| CurveError::InvalidLength(32, slice.len()))?;
)?;
bytes.reverse();
let scalar = Scalar::from_repr(bytes.into()); let scalar = Scalar::from_repr(bytes.into());
if scalar.is_none().unwrap_u8() == 1 { if scalar.is_none().unwrap_u8() == 1 {
Err(CurveError::InvalidScalar)?; Err(CurveError::InvalidScalar)?;
@ -81,10 +79,8 @@ impl Curve for Secp256k1 {
Ok(point.unwrap()) Ok(point.unwrap())
} }
fn F_to_le_bytes(f: &Self::F) -> Vec<u8> { fn F_to_bytes(f: &Self::F) -> Vec<u8> {
let mut res: [u8; 32] = f.to_bytes().into(); (&f.to_bytes()).to_vec()
res.reverse();
res.to_vec()
} }
fn G_to_bytes(g: &Self::G) -> Vec<u8> { fn G_to_bytes(g: &Self::G) -> Vec<u8> {