Move FROST to Read

Fixes https://github.com/serai-dex/serai/issues/33 and 
https://github.com/serai-dex/serai/issues/35. Also fixes a few potential 
panics/DoS AFAICT.
This commit is contained in:
Luke Parker 2022-07-13 02:38:29 -04:00
parent c0c8915698
commit 6cc8ce840e
No known key found for this signature in database
GPG key ID: F9F1386DB1E119B6
13 changed files with 357 additions and 349 deletions

View file

@ -1,4 +1,4 @@
use std::{convert::TryInto, io::Cursor}; use std::io::Read;
use thiserror::Error; use thiserror::Error;
use rand_core::{RngCore, CryptoRng}; use rand_core::{RngCore, CryptoRng};
@ -47,17 +47,14 @@ pub(crate) fn write_dleq<R: RngCore + CryptoRng>(
} }
#[allow(non_snake_case)] #[allow(non_snake_case)]
pub(crate) fn read_dleq( pub(crate) fn read_dleq<Re: Read>(
serialized: &[u8], serialized: &mut Re,
H: EdwardsPoint, H: EdwardsPoint,
l: u16, l: u16,
xG: dfg::EdwardsPoint xG: dfg::EdwardsPoint
) -> Result<dfg::EdwardsPoint, MultisigError> { ) -> Result<dfg::EdwardsPoint, MultisigError> {
if serialized.len() != 96 { let mut bytes = [0; 32];
Err(MultisigError::InvalidDLEqProof(l))?; serialized.read_exact(&mut bytes).map_err(|_| MultisigError::InvalidDLEqProof(l))?;
}
let bytes = (&serialized[.. 32]).try_into().unwrap();
// dfg ensures the point is torsion free // dfg ensures the point is torsion free
let xH = Option::<dfg::EdwardsPoint>::from( let xH = Option::<dfg::EdwardsPoint>::from(
dfg::EdwardsPoint::from_bytes(&bytes)).ok_or(MultisigError::InvalidDLEqProof(l) dfg::EdwardsPoint::from_bytes(&bytes)).ok_or(MultisigError::InvalidDLEqProof(l)
@ -68,7 +65,7 @@ pub(crate) fn read_dleq(
} }
DLEqProof::<dfg::EdwardsPoint>::deserialize( DLEqProof::<dfg::EdwardsPoint>::deserialize(
&mut Cursor::new(&serialized[32 ..]) serialized
).map_err(|_| MultisigError::InvalidDLEqProof(l))?.verify( ).map_err(|_| MultisigError::InvalidDLEqProof(l))?.verify(
&mut transcript(), &mut transcript(),
Generators::new(dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)), Generators::new(dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)),

View file

@ -1,5 +1,5 @@
use core::fmt::Debug; use core::fmt::Debug;
use std::sync::{Arc, RwLock}; use std::{io::Read, sync::{Arc, RwLock}};
use rand_core::{RngCore, CryptoRng, SeedableRng}; use rand_core::{RngCore, CryptoRng, SeedableRng};
use rand_chacha::ChaCha12Rng; use rand_chacha::ChaCha12Rng;
@ -104,7 +104,7 @@ impl ClsagMultisig {
) )
} }
pub fn serialized_len() -> usize { pub const fn serialized_len() -> usize {
32 + (2 * 32) 32 + (2 * 32)
} }
@ -136,17 +136,12 @@ impl Algorithm<Ed25519> for ClsagMultisig {
serialized serialized
} }
fn process_addendum( fn process_addendum<Re: Read>(
&mut self, &mut self,
view: &FrostView<Ed25519>, view: &FrostView<Ed25519>,
l: u16, l: u16,
serialized: &[u8] serialized: &mut Re
) -> Result<(), FrostError> { ) -> Result<(), FrostError> {
if serialized.len() != Self::serialized_len() {
// Not an optimal error but...
Err(FrostError::InvalidCommitment(l))?;
}
if self.image.is_identity().into() { if self.image.is_identity().into() {
self.transcript.domain_separate(b"CLSAG"); self.transcript.domain_separate(b"CLSAG");
self.input().transcript(&mut self.transcript); self.input().transcript(&mut self.transcript);
@ -154,13 +149,14 @@ impl Algorithm<Ed25519> for ClsagMultisig {
} }
self.transcript.append_message(b"participant", &l.to_be_bytes()); self.transcript.append_message(b"participant", &l.to_be_bytes());
self.transcript.append_message(b"key_image_share", &serialized[.. 32]); let image = read_dleq(
self.image += read_dleq(
serialized, serialized,
self.H, self.H,
l, l,
view.verification_share(l) view.verification_share(l)
).map_err(|_| FrostError::InvalidCommitment(l))?.0; ).map_err(|_| FrostError::InvalidCommitment(l))?.0;
self.transcript.append_message(b"key_image_share", image.compress().to_bytes().as_ref());
self.image += image;
Ok(()) Ok(())
} }

View file

@ -1,4 +1,4 @@
use std::{sync::{Arc, RwLock}, collections::HashMap}; use std::{io::{Read, Cursor}, sync::{Arc, RwLock}, collections::HashMap};
use rand_core::{RngCore, CryptoRng, SeedableRng}; use rand_core::{RngCore, CryptoRng, SeedableRng};
use rand_chacha::ChaCha12Rng; use rand_chacha::ChaCha12Rng;
@ -202,57 +202,57 @@ impl PreprocessMachine for TransactionMachine {
impl SignMachine<Transaction> for TransactionSignMachine { impl SignMachine<Transaction> for TransactionSignMachine {
type SignatureMachine = TransactionSignatureMachine; type SignatureMachine = TransactionSignatureMachine;
fn sign( fn sign<Re: Read>(
mut self, mut self,
mut commitments: HashMap<u16, Vec<u8>>, mut commitments: HashMap<u16, Re>,
msg: &[u8] msg: &[u8]
) -> Result<(TransactionSignatureMachine, Vec<u8>), FrostError> { ) -> Result<(TransactionSignatureMachine, Vec<u8>), FrostError> {
if msg.len() != 0 { if msg.len() != 0 {
Err( Err(
FrostError::InternalError( FrostError::InternalError(
"message was passed to the TransactionMachine when it generates its own".to_string() "message was passed to the TransactionMachine when it generates its own"
) )
)?; )?;
} }
// Add all commitments to the transcript for their entropy
// While each CLSAG will do this as they need to for security, they have their own transcripts
// cloned from this TX's initial premise's transcript. For our TX transcript to have the CLSAG
// data for entropy, it'll have to be added ourselves
commitments.insert(self.i, self.our_preprocess);
for l in &self.included {
self.transcript.append_message(b"participant", &(*l).to_be_bytes());
// FROST itself will error if this is None, so let it
if let Some(preprocess) = commitments.get(l) {
self.transcript.append_message(b"preprocess", preprocess);
}
}
// FROST commitments and their DLEqs, and the image and its DLEq // FROST commitments and their DLEqs, and the image and its DLEq
let clsag_len = (2 * (32 + 32)) + (2 * (32 + 32)) + ClsagMultisig::serialized_len(); const CLSAG_LEN: usize = (2 * (32 + 32)) + (2 * (32 + 32)) + ClsagMultisig::serialized_len();
for (l, commitments) in &commitments {
if commitments.len() != (self.clsags.len() * clsag_len) {
Err(FrostError::InvalidCommitment(*l))?;
}
}
// Convert the unified commitments to a Vec of the individual commitments // Convert the unified commitments to a Vec of the individual commitments
let mut commitments = (0 .. self.clsags.len()).map(|_| commitments.iter_mut().map(
|(l, commitments)| (*l, commitments.drain(.. clsag_len).collect::<Vec<_>>())
).collect::<HashMap<_, _>>()).collect::<Vec<_>>();
// Calculate the key images
// Clsag will parse/calculate/validate this as needed, yet doing so here as well provides
// the easiest API overall, as this is where the TX is (which needs the key images in its
// message), along with where the outputs are determined (where our change output needs these
// to be unique)
let mut images = vec![EdwardsPoint::identity(); self.clsags.len()]; let mut images = vec![EdwardsPoint::identity(); self.clsags.len()];
for c in 0 .. self.clsags.len() { let mut commitments = (0 .. self.clsags.len()).map(|c| {
for (l, preprocess) in &commitments[c] { let mut buf = [0; CLSAG_LEN];
(&self.included).iter().map(|l| {
// Add all commitments to the transcript for their entropy
// While each CLSAG will do this as they need to for security, they have their own transcripts
// cloned from this TX's initial premise's transcript. For our TX transcript to have the CLSAG
// data for entropy, it'll have to be added ourselves here
self.transcript.append_message(b"participant", &(*l).to_be_bytes());
if *l == self.i {
buf.copy_from_slice(self.our_preprocess.drain(.. CLSAG_LEN).as_slice());
} else {
commitments.get_mut(l).ok_or(FrostError::MissingParticipant(*l))?
.read_exact(&mut buf).map_err(|_| FrostError::InvalidCommitment(*l))?;
}
self.transcript.append_message(b"preprocess", &buf);
// While here, calculate the key image
// Clsag will parse/calculate/validate this as needed, yet doing so here as well provides
// the easiest API overall, as this is where the TX is (which needs the key images in its
// message), along with where the outputs are determined (where our outputs may need
// these in order to guarantee uniqueness)
images[c] += CompressedEdwardsY( images[c] += CompressedEdwardsY(
preprocess[(clsag_len - 96) .. (clsag_len - 64)].try_into().map_err(|_| FrostError::InvalidCommitment(*l))? buf[(CLSAG_LEN - 96) .. (CLSAG_LEN - 64)].try_into().map_err(|_| FrostError::InvalidCommitment(*l))?
).decompress().ok_or(FrostError::InvalidCommitment(*l))?; ).decompress().ok_or(FrostError::InvalidCommitment(*l))?;
}
Ok((*l, Cursor::new(buf)))
}).collect::<Result<HashMap<_, _>, _>>()
}).collect::<Result<Vec<_>, _>>()?;
// Remove our preprocess which shouldn't be here. It was just the easiest way to implement the
// above
for map in commitments.iter_mut() {
map.remove(&self.i);
} }
// Create the actual transaction // Create the actual transaction
@ -345,16 +345,18 @@ impl SignMachine<Transaction> for TransactionSignMachine {
} }
impl SignatureMachine<Transaction> for TransactionSignatureMachine { impl SignatureMachine<Transaction> for TransactionSignatureMachine {
fn complete(self, mut shares: HashMap<u16, Vec<u8>>) -> Result<Transaction, FrostError> { fn complete<Re: Read>(self, mut shares: HashMap<u16, Re>) -> Result<Transaction, FrostError> {
let mut tx = self.tx; let mut tx = self.tx;
match tx.rct_signatures.prunable { match tx.rct_signatures.prunable {
RctPrunable::Null => panic!("Signing for RctPrunable::Null"), RctPrunable::Null => panic!("Signing for RctPrunable::Null"),
RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. } => { RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. } => {
for clsag in self.clsags { for clsag in self.clsags {
let (clsag, pseudo_out) = clsag.complete( let (clsag, pseudo_out) = clsag.complete(
shares.iter_mut().map( shares.iter_mut().map(|(l, shares)| {
|(l, shares)| (*l, shares.drain(.. 32).collect()) let mut buf = [0; 32];
).collect::<HashMap<_, _>>() shares.read_exact(&mut buf).map_err(|_| FrostError::InvalidShare(*l))?;
Ok((*l, Cursor::new(buf)))
}).collect::<Result<HashMap<_, _>, _>>()?
)?; )?;
clsags.push(clsag); clsags.push(clsag);
pseudo_outs.push(pseudo_out); pseudo_outs.push(pseudo_out);

View file

@ -1,4 +1,5 @@
use core::{marker::PhantomData, fmt::Debug}; use core::{marker::PhantomData, fmt::Debug};
use std::io::Read;
use rand_core::{RngCore, CryptoRng}; use rand_core::{RngCore, CryptoRng};
@ -28,11 +29,11 @@ pub trait Algorithm<C: Curve>: Clone {
) -> Vec<u8>; ) -> Vec<u8>;
/// Proccess the addendum for the specified participant. Guaranteed to be ordered /// Proccess the addendum for the specified participant. Guaranteed to be ordered
fn process_addendum( fn process_addendum<Re: Read>(
&mut self, &mut self,
params: &FrostView<C>, params: &FrostView<C>,
l: u16, l: u16,
serialized: &[u8], reader: &mut Re,
) -> Result<(), FrostError>; ) -> Result<(), FrostError>;
/// Sign a share with the given secret/nonce /// Sign a share with the given secret/nonce
@ -133,11 +134,11 @@ impl<C: Curve, H: Hram<C>> Algorithm<C> for Schnorr<C, H> {
vec![] vec![]
} }
fn process_addendum( fn process_addendum<Re: Read>(
&mut self, &mut self,
_: &FrostView<C>, _: &FrostView<C>,
_: u16, _: u16,
_: &[u8], _: &mut Re,
) -> Result<(), FrostError> { ) -> Result<(), FrostError> {
Ok(()) Ok(())
} }

View file

@ -1,3 +1,5 @@
use std::io::Cursor;
use rand_core::{RngCore, CryptoRng}; use rand_core::{RngCore, CryptoRng};
use sha2::{digest::Update, Digest, Sha256}; use sha2::{digest::Update, Digest, Sha256};
@ -6,7 +8,7 @@ use group::{ff::Field, GroupEncoding};
use elliptic_curve::{bigint::{Encoding, U384}, hash2curve::{Expander, ExpandMsg, ExpandMsgXmd}}; use elliptic_curve::{bigint::{Encoding, U384}, hash2curve::{Expander, ExpandMsg, ExpandMsgXmd}};
use crate::{curve::{Curve, F_from_slice}, algorithm::Hram}; use crate::{curve::Curve, algorithm::Hram};
macro_rules! kp_curve { macro_rules! kp_curve {
( (
@ -58,16 +60,18 @@ macro_rules! kp_curve {
let mut modulus = vec![0; 16]; let mut modulus = vec![0; 16];
modulus.extend((Self::F::zero() - Self::F::one()).to_bytes()); modulus.extend((Self::F::zero() - Self::F::one()).to_bytes());
let modulus = U384::from_be_slice(&modulus).wrapping_add(&U384::ONE); let modulus = U384::from_be_slice(&modulus).wrapping_add(&U384::ONE);
F_from_slice::<Self::F>( Self::read_F(
&U384::from_be_slice(&{ &mut Cursor::new(
let mut bytes = [0; 48]; &U384::from_be_slice(&{
ExpandMsgXmd::<Sha256>::expand_message( let mut bytes = [0; 48];
&[msg], ExpandMsgXmd::<Sha256>::expand_message(
dst, &[msg],
48 dst,
).unwrap().fill_bytes(&mut bytes); 48
bytes ).unwrap().fill_bytes(&mut bytes);
}).reduce(&modulus).unwrap().to_be_bytes()[16 ..] bytes
}).reduce(&modulus).unwrap().to_be_bytes()[16 ..]
)
).unwrap() ).unwrap()
} }
} }

View file

@ -1,4 +1,5 @@
use core::fmt::Debug; use core::fmt::Debug;
use std::io::Read;
use thiserror::Error; use thiserror::Error;
@ -77,41 +78,37 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug {
// hash_msg and hash_binding_factor // hash_msg and hash_binding_factor
#[allow(non_snake_case)] #[allow(non_snake_case)]
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F; fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F;
}
#[allow(non_snake_case)] #[allow(non_snake_case)]
pub(crate) fn F_len<C: Curve>() -> usize { fn F_len() -> usize {
<C::F as PrimeField>::Repr::default().as_ref().len() <Self::F as PrimeField>::Repr::default().as_ref().len()
}
#[allow(non_snake_case)]
pub(crate) fn G_len<C: Curve>() -> usize {
<C::G as GroupEncoding>::Repr::default().as_ref().len()
}
/// Field element from slice
#[allow(non_snake_case)]
pub(crate) fn F_from_slice<F: PrimeField>(slice: &[u8]) -> Result<F, CurveError> {
let mut encoding = F::Repr::default();
encoding.as_mut().copy_from_slice(slice);
let point = Option::<F>::from(F::from_repr(encoding)).ok_or(CurveError::InvalidScalar)?;
if point.to_repr().as_ref() != slice {
Err(CurveError::InvalidScalar)?;
} }
Ok(point)
}
/// Group element from slice #[allow(non_snake_case)]
#[allow(non_snake_case)] fn G_len() -> usize {
pub(crate) fn G_from_slice<G: PrimeGroup>(slice: &[u8]) -> Result<G, CurveError> { <Self::G as GroupEncoding>::Repr::default().as_ref().len()
let mut encoding = G::Repr::default(); }
encoding.as_mut().copy_from_slice(slice);
#[allow(non_snake_case)]
let point = Option::<G>::from(G::from_bytes(&encoding)).ok_or(CurveError::InvalidPoint)?; fn read_F<R: Read>(r: &mut R) -> Result<Self::F, CurveError> {
// Ban the identity, per the FROST spec, and non-canonical points let mut encoding = <Self::F as PrimeField>::Repr::default();
if (point.is_identity().into()) || (point.to_bytes().as_ref() != slice) { r.read_exact(encoding.as_mut()).map_err(|_| CurveError::InvalidScalar)?;
Err(CurveError::InvalidPoint)?; // ff mandates this is canonical
Option::<Self::F>::from(Self::F::from_repr(encoding)).ok_or(CurveError::InvalidScalar)
}
#[allow(non_snake_case)]
fn read_G<R: Read>(r: &mut R) -> Result<Self::G, CurveError> {
let mut encoding = <Self::G as GroupEncoding>::Repr::default();
r.read_exact(encoding.as_mut()).map_err(|_| CurveError::InvalidPoint)?;
let point = Option::<Self::G>::from(
Self::G::from_bytes(&encoding)
).ok_or(CurveError::InvalidPoint)?;
// Ban the identity, per the FROST spec, and non-canonical points
if (point.is_identity().into()) || (point.to_bytes().as_ref() != encoding.as_ref()) {
Err(CurveError::InvalidPoint)?;
}
Ok(point)
} }
Ok(point)
} }

View file

@ -1,4 +1,4 @@
use std::{marker::PhantomData, collections::HashMap}; use std::{marker::PhantomData, io::{Read, Cursor}, collections::HashMap};
use rand_core::{RngCore, CryptoRng}; use rand_core::{RngCore, CryptoRng};
@ -7,7 +7,7 @@ use group::{ff::{Field, PrimeField}, GroupEncoding};
use multiexp::{multiexp_vartime, BatchVerifier}; use multiexp::{multiexp_vartime, BatchVerifier};
use crate::{ use crate::{
curve::{Curve, F_len, G_len, F_from_slice, G_from_slice}, curve::Curve,
FrostError, FrostParams, FrostKeys, FrostError, FrostParams, FrostKeys,
schnorr::{self, SchnorrSignature}, schnorr::{self, SchnorrSignature},
validate_map validate_map
@ -31,11 +31,11 @@ fn generate_key_r1<R: RngCore + CryptoRng, C: Curve>(
rng: &mut R, rng: &mut R,
params: &FrostParams, params: &FrostParams,
context: &str, context: &str,
) -> (Vec<C::F>, Vec<u8>) { ) -> (Vec<C::F>, Vec<C::G>, Vec<u8>) {
let t = usize::from(params.t); let t = usize::from(params.t);
let mut coefficients = Vec::with_capacity(t); let mut coefficients = Vec::with_capacity(t);
let mut commitments = Vec::with_capacity(t); let mut commitments = Vec::with_capacity(t);
let mut serialized = Vec::with_capacity((G_len::<C>() * t) + G_len::<C>() + F_len::<C>()); let mut serialized = Vec::with_capacity((C::G_len() * t) + C::G_len() + C::F_len());
for i in 0 .. t { for i in 0 .. t {
// Step 1: Generate t random values to form a polynomial with // Step 1: Generate t random values to form a polynomial with
@ -66,58 +66,55 @@ fn generate_key_r1<R: RngCore + CryptoRng, C: Curve>(
); );
// Step 4: Broadcast // Step 4: Broadcast
(coefficients, serialized) (coefficients, commitments, serialized)
} }
// Verify the received data from the first round of key generation // Verify the received data from the first round of key generation
fn verify_r1<R: RngCore + CryptoRng, C: Curve>( fn verify_r1<Re: Read, R: RngCore + CryptoRng, C: Curve>(
rng: &mut R, rng: &mut R,
params: &FrostParams, params: &FrostParams,
context: &str, context: &str,
our_commitments: Vec<u8>, our_commitments: Vec<C::G>,
mut serialized: HashMap<u16, Vec<u8>>, mut serialized: HashMap<u16, Re>,
) -> Result<HashMap<u16, Vec<C::G>>, FrostError> { ) -> Result<HashMap<u16, Vec<C::G>>, FrostError> {
validate_map( validate_map(&mut serialized, &(1 ..= params.n()).collect::<Vec<_>>(), params.i())?;
&mut serialized,
&(1 ..= params.n()).into_iter().collect::<Vec<_>>(),
(params.i(), our_commitments)
)?;
let commitments_len = usize::from(params.t()) * G_len::<C>();
let mut commitments = HashMap::new(); let mut commitments = HashMap::new();
commitments.insert(params.i, our_commitments);
#[allow(non_snake_case)]
let R_bytes = |l| &serialized[&l][commitments_len .. commitments_len + G_len::<C>()];
#[allow(non_snake_case)]
let R = |l| G_from_slice::<C::G>(R_bytes(l)).map_err(|_| FrostError::InvalidProofOfKnowledge(l));
#[allow(non_snake_case)]
let Am = |l| &serialized[&l][0 .. commitments_len];
let s = |l| F_from_slice::<C::F>(
&serialized[&l][commitments_len + G_len::<C>() ..]
).map_err(|_| FrostError::InvalidProofOfKnowledge(l));
let mut signatures = Vec::with_capacity(usize::from(params.n() - 1)); let mut signatures = Vec::with_capacity(usize::from(params.n() - 1));
for l in 1 ..= params.n() { for l in 1 ..= params.n() {
if l == params.i {
continue;
}
let invalid = FrostError::InvalidCommitment(l.try_into().unwrap());
// Read the entire list of commitments as the key we're providing a PoK for (A) and the message
#[allow(non_snake_case)]
let mut Am = vec![0; usize::from(params.t()) * C::G_len()];
serialized.get_mut(&l).unwrap().read_exact(&mut Am).map_err(|_| invalid)?;
let mut these_commitments = vec![]; let mut these_commitments = vec![];
for c in 0 .. usize::from(params.t()) { let mut cursor = Cursor::new(&Am);
these_commitments.push( for _ in 0 .. usize::from(params.t()) {
G_from_slice::<C::G>( these_commitments.push(C::read_G(&mut cursor).map_err(|_| invalid)?);
&serialized[&l][(c * G_len::<C>()) .. ((c + 1) * G_len::<C>())]
).map_err(|_| FrostError::InvalidCommitment(l.try_into().unwrap()))?
);
} }
// Don't bother validating our own proof of knowledge // Don't bother validating our own proof of knowledge
if l != params.i() { if l != params.i() {
let cursor = serialized.get_mut(&l).unwrap();
#[allow(non_snake_case)]
let R = C::read_G(cursor).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?;
let s = C::read_F(cursor).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?;
// Step 5: Validate each proof of knowledge // Step 5: Validate each proof of knowledge
// This is solely the prep step for the latter batch verification // This is solely the prep step for the latter batch verification
signatures.push(( signatures.push((
l, l,
these_commitments[0], these_commitments[0],
challenge::<C>(context, l, R_bytes(l), Am(l)), challenge::<C>(context, l, R.to_bytes().as_ref(), &Am),
SchnorrSignature::<C> { R: R(l)?, s: s(l)? } SchnorrSignature::<C> { R, s }
)); ));
} }
@ -147,15 +144,15 @@ fn polynomial<F: PrimeField>(
// Implements round 1, step 5 and round 2, step 1 of FROST key generation // Implements round 1, step 5 and round 2, step 1 of FROST key generation
// Returns our secret share part, commitments for the next step, and a vector for each // Returns our secret share part, commitments for the next step, and a vector for each
// counterparty to receive // counterparty to receive
fn generate_key_r2<R: RngCore + CryptoRng, C: Curve>( fn generate_key_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>(
rng: &mut R, rng: &mut R,
params: &FrostParams, params: &FrostParams,
context: &str, context: &str,
coefficients: Vec<C::F>, coefficients: Vec<C::F>,
our_commitments: Vec<u8>, our_commitments: Vec<C::G>,
commitments: HashMap<u16, Vec<u8>>, commitments: HashMap<u16, Re>,
) -> Result<(C::F, HashMap<u16, Vec<C::G>>, HashMap<u16, Vec<u8>>), FrostError> { ) -> Result<(C::F, HashMap<u16, Vec<C::G>>, HashMap<u16, Vec<u8>>), FrostError> {
let commitments = verify_r1::<R, C>(rng, params, context, our_commitments, commitments)?; let commitments = verify_r1::<_, _, C>(rng, params, context, our_commitments, commitments)?;
// Step 1: Generate secret shares for all other parties // Step 1: Generate secret shares for all other parties
let mut res = HashMap::new(); let mut res = HashMap::new();
@ -188,25 +185,21 @@ fn generate_key_r2<R: RngCore + CryptoRng, C: Curve>(
/// issue, yet simply confirming protocol completion without issue is enough to confirm the same /// issue, yet simply confirming protocol completion without issue is enough to confirm the same
/// key was generated as long as a lack of duplicated commitments was also confirmed when they were /// key was generated as long as a lack of duplicated commitments was also confirmed when they were
/// broadcasted initially /// broadcasted initially
fn complete_r2<R: RngCore + CryptoRng, C: Curve>( fn complete_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>(
rng: &mut R, rng: &mut R,
params: FrostParams, params: FrostParams,
mut secret_share: C::F, mut secret_share: C::F,
commitments: HashMap<u16, Vec<C::G>>, commitments: HashMap<u16, Vec<C::G>>,
// Vec to preserve ownership mut serialized: HashMap<u16, Re>,
mut serialized: HashMap<u16, Vec<u8>>,
) -> Result<FrostKeys<C>, FrostError> { ) -> Result<FrostKeys<C>, FrostError> {
validate_map( validate_map(&mut serialized, &(1 ..= params.n()).collect::<Vec<_>>(), params.i())?;
&mut serialized,
&(1 ..= params.n()).into_iter().collect::<Vec<_>>(),
(params.i(), secret_share.to_repr().as_ref().to_vec())
)?;
// Step 2. Verify each share // Step 2. Verify each share
let mut shares = HashMap::new(); let mut shares = HashMap::new();
for (l, share) in serialized { for (l, share) in serialized.iter_mut() {
shares.insert(l, F_from_slice::<C::F>(&share).map_err(|_| FrostError::InvalidShare(l))?); shares.insert(*l, C::read_F(share).map_err(|_| FrostError::InvalidShare(*l))?);
} }
shares.insert(params.i(), secret_share);
// Calculate the exponent for a given participant and apply it to a series of commitments // Calculate the exponent for a given participant and apply it to a series of commitments
// Initially used with the actual commitments to verify the secret share, later used with stripes // Initially used with the actual commitments to verify the secret share, later used with stripes
@ -282,7 +275,7 @@ pub struct SecretShareMachine<C: Curve> {
params: FrostParams, params: FrostParams,
context: String, context: String,
coefficients: Vec<C::F>, coefficients: Vec<C::F>,
our_commitments: Vec<u8>, our_commitments: Vec<C::G>,
} }
pub struct KeyMachine<C: Curve> { pub struct KeyMachine<C: Curve> {
@ -303,15 +296,20 @@ impl<C: Curve> KeyGenMachine<C> {
/// channel. If any party submits multiple sets of commitments, they MUST be treated as malicious /// channel. If any party submits multiple sets of commitments, they MUST be treated as malicious
pub fn generate_coefficients<R: RngCore + CryptoRng>( pub fn generate_coefficients<R: RngCore + CryptoRng>(
self, self,
rng: &mut R rng: &mut R,
) -> (SecretShareMachine<C>, Vec<u8>) { ) -> (SecretShareMachine<C>, Vec<u8>) {
let (coefficients, serialized) = generate_key_r1::<R, C>(rng, &self.params, &self.context); let (
coefficients,
our_commitments,
serialized
) = generate_key_r1::<_, C>(rng, &self.params, &self.context);
( (
SecretShareMachine { SecretShareMachine {
params: self.params, params: self.params,
context: self.context, context: self.context,
coefficients, coefficients,
our_commitments: serialized.clone() our_commitments,
}, },
serialized, serialized,
) )
@ -324,12 +322,12 @@ impl<C: Curve> SecretShareMachine<C> {
/// index = Vec index. An empty vector is expected at index 0 to allow for this. An empty vector /// index = Vec index. An empty vector is expected at index 0 to allow for this. An empty vector
/// is also expected at index i which is locally handled. Returns a byte vector representing a /// is also expected at index i which is locally handled. Returns a byte vector representing a
/// secret share for each other participant which should be encrypted before sending /// secret share for each other participant which should be encrypted before sending
pub fn generate_secret_shares<R: RngCore + CryptoRng>( pub fn generate_secret_shares<Re: Read, R: RngCore + CryptoRng>(
self, self,
rng: &mut R, rng: &mut R,
commitments: HashMap<u16, Vec<u8>>, commitments: HashMap<u16, Re>,
) -> Result<(KeyMachine<C>, HashMap<u16, Vec<u8>>), FrostError> { ) -> Result<(KeyMachine<C>, HashMap<u16, Vec<u8>>), FrostError> {
let (secret, commitments, shares) = generate_key_r2::<R, C>( let (secret, commitments, shares) = generate_key_r2::<_, _, C>(
rng, rng,
&self.params, &self.params,
&self.context, &self.context,
@ -348,10 +346,10 @@ impl<C: Curve> KeyMachine<C> {
/// group's public key, while setting a valid secret share inside the machine. > t participants /// group's public key, while setting a valid secret share inside the machine. > t participants
/// must report completion without issue before this key can be considered usable, yet you should /// must report completion without issue before this key can be considered usable, yet you should
/// wait for all participants to report as such /// wait for all participants to report as such
pub fn complete<R: RngCore + CryptoRng>( pub fn complete<Re: Read, R: RngCore + CryptoRng>(
self, self,
rng: &mut R, rng: &mut R,
shares: HashMap<u16, Vec<u8>>, shares: HashMap<u16, Re>,
) -> Result<FrostKeys<C>, FrostError> { ) -> Result<FrostKeys<C>, FrostError> {
complete_r2(rng, self.params, self.secret, self.commitments, shares) complete_r2(rng, self.params, self.secret, self.commitments, shares)
} }

View file

@ -1,5 +1,5 @@
use core::fmt::Debug; use core::fmt::Debug;
use std::collections::HashMap; use std::{io::Read, collections::HashMap};
use thiserror::Error; use thiserror::Error;
@ -8,7 +8,7 @@ use group::{ff::{Field, PrimeField}, GroupEncoding};
mod schnorr; mod schnorr;
pub mod curve; pub mod curve;
use curve::{Curve, F_len, G_len, F_from_slice, G_from_slice}; use curve::Curve;
pub mod key_gen; pub mod key_gen;
pub mod algorithm; pub mod algorithm;
pub mod sign; pub mod sign;
@ -54,7 +54,7 @@ impl FrostParams {
pub fn i(&self) -> u16 { self.i } pub fn i(&self) -> u16 { self.i }
} }
#[derive(Clone, Error, Debug)] #[derive(Copy, Clone, Error, Debug)]
pub enum FrostError { pub enum FrostError {
#[error("a parameter was 0 (required {0}, participants {1})")] #[error("a parameter was 0 (required {0}, participants {1})")]
ZeroParameter(u16, u16), ZeroParameter(u16, u16),
@ -66,11 +66,11 @@ pub enum FrostError {
InvalidParticipantIndex(u16, u16), InvalidParticipantIndex(u16, u16),
#[error("invalid signing set ({0})")] #[error("invalid signing set ({0})")]
InvalidSigningSet(String), InvalidSigningSet(&'static str),
#[error("invalid participant quantity (expected {0}, got {1})")] #[error("invalid participant quantity (expected {0}, got {1})")]
InvalidParticipantQuantity(usize, usize), InvalidParticipantQuantity(usize, usize),
#[error("duplicated participant index ({0})")] #[error("duplicated participant index ({0})")]
DuplicatedIndex(usize), DuplicatedIndex(u16),
#[error("missing participant {0}")] #[error("missing participant {0}")]
MissingParticipant(u16), MissingParticipant(u16),
#[error("invalid commitment (participant {0})")] #[error("invalid commitment (participant {0})")]
@ -81,7 +81,7 @@ pub enum FrostError {
InvalidShare(u16), InvalidShare(u16),
#[error("internal error ({0})")] #[error("internal error ({0})")]
InternalError(String), InternalError(&'static str),
} }
// View of keys passable to algorithm implementations // View of keys passable to algorithm implementations
@ -182,7 +182,7 @@ impl<C: Curve> FrostKeys<C> {
pub fn view(&self, included: &[u16]) -> Result<FrostView<C>, FrostError> { pub fn view(&self, included: &[u16]) -> Result<FrostView<C>, FrostError> {
if (included.len() < self.params.t.into()) || (usize::from(self.params.n) < included.len()) { if (included.len() < self.params.t.into()) || (usize::from(self.params.n) < included.len()) {
Err(FrostError::InvalidSigningSet("invalid amount of participants included".to_string()))?; Err(FrostError::InvalidSigningSet("invalid amount of participants included"))?;
} }
let secret_share = self.secret_share * lagrange::<C::F>(self.params.i, &included); let secret_share = self.secret_share * lagrange::<C::F>(self.params.i, &included);
@ -203,12 +203,12 @@ impl<C: Curve> FrostKeys<C> {
} }
pub fn serialized_len(n: u16) -> usize { pub fn serialized_len(n: u16) -> usize {
8 + C::ID.len() + (3 * 2) + F_len::<C>() + G_len::<C>() + (usize::from(n) * G_len::<C>()) 8 + C::ID.len() + (3 * 2) + C::F_len() + C::G_len() + (usize::from(n) * C::G_len())
} }
pub fn serialize(&self) -> Vec<u8> { pub fn serialize(&self) -> Vec<u8> {
let mut serialized = Vec::with_capacity(FrostKeys::<C>::serialized_len(self.params.n)); let mut serialized = Vec::with_capacity(FrostKeys::<C>::serialized_len(self.params.n));
serialized.extend(u64::try_from(C::ID.len()).unwrap().to_be_bytes()); serialized.extend(u32::try_from(C::ID.len()).unwrap().to_be_bytes());
serialized.extend(C::ID); serialized.extend(C::ID);
serialized.extend(&self.params.t.to_be_bytes()); serialized.extend(&self.params.t.to_be_bytes());
serialized.extend(&self.params.n.to_be_bytes()); serialized.extend(&self.params.n.to_be_bytes());
@ -221,59 +221,51 @@ impl<C: Curve> FrostKeys<C> {
serialized serialized
} }
pub fn deserialize(serialized: &[u8]) -> Result<FrostKeys<C>, FrostError> { pub fn deserialize<R: Read>(cursor: &mut R) -> Result<FrostKeys<C>, FrostError> {
let mut start = u64::try_from(C::ID.len()).unwrap().to_be_bytes().to_vec(); {
start.extend(C::ID); let missing = FrostError::InternalError("FrostKeys serialization is missing its curve");
let mut cursor = start.len(); let different = FrostError::InternalError("deserializing FrostKeys for another curve");
if serialized.len() < (cursor + 4) { let mut id_len = [0; 4];
Err( cursor.read_exact(&mut id_len).map_err(|_| missing)?;
FrostError::InternalError( if u32::try_from(C::ID.len()).unwrap().to_be_bytes() != id_len {
"FrostKeys serialization is missing its curve/participant quantities".to_string() Err(different)?;
) }
)?;
} let mut id = vec![0; C::ID.len()];
if &start != &serialized[.. cursor] { cursor.read_exact(&mut id).map_err(|_| missing)?;
Err( if &id != &C::ID {
FrostError::InternalError( Err(different)?;
"curve is distinct between serialization and deserialization".to_string() }
)
)?;
} }
let t = u16::from_be_bytes(serialized[cursor .. (cursor + 2)].try_into().unwrap()); let (t, n, i) = {
cursor += 2; let mut read_u16 = || {
let mut value = [0; 2];
cursor.read_exact(&mut value).map_err(
|_| FrostError::InternalError("missing participant quantities")
)?;
Ok(u16::from_be_bytes(value))
};
(read_u16()?, read_u16()?, read_u16()?)
};
let n = u16::from_be_bytes(serialized[cursor .. (cursor + 2)].try_into().unwrap()); let secret_share = C::read_F(cursor)
cursor += 2; .map_err(|_| FrostError::InternalError("invalid secret share"))?;
if serialized.len() != FrostKeys::<C>::serialized_len(n) { let group_key = C::read_G(cursor).map_err(|_| FrostError::InternalError("invalid group key"))?;
Err(FrostError::InternalError("incorrect serialization length".to_string()))?;
}
let i = u16::from_be_bytes(serialized[cursor .. (cursor + 2)].try_into().unwrap());
cursor += 2;
let secret_share = F_from_slice::<C::F>(&serialized[cursor .. (cursor + F_len::<C>())])
.map_err(|_| FrostError::InternalError("invalid secret share".to_string()))?;
cursor += F_len::<C>();
let group_key = G_from_slice::<C::G>(&serialized[cursor .. (cursor + G_len::<C>())])
.map_err(|_| FrostError::InternalError("invalid group key".to_string()))?;
cursor += G_len::<C>();
let mut verification_shares = HashMap::new(); let mut verification_shares = HashMap::new();
for l in 1 ..= n { for l in 1 ..= n {
verification_shares.insert( verification_shares.insert(
l, l,
G_from_slice::<C::G>(&serialized[cursor .. (cursor + G_len::<C>())]) C::read_G(cursor).map_err(|_| FrostError::InternalError("invalid verification share"))?
.map_err(|_| FrostError::InternalError("invalid verification share".to_string()))?
); );
cursor += G_len::<C>();
} }
Ok( Ok(
FrostKeys { FrostKeys {
params: FrostParams::new(t, n, i) params: FrostParams::new(t, n, i)
.map_err(|_| FrostError::InternalError("invalid parameters".to_string()))?, .map_err(|_| FrostError::InternalError("invalid parameters"))?,
secret_share, secret_share,
group_key, group_key,
verification_shares, verification_shares,
@ -287,15 +279,20 @@ impl<C: Curve> FrostKeys<C> {
pub(crate) fn validate_map<T>( pub(crate) fn validate_map<T>(
map: &mut HashMap<u16, T>, map: &mut HashMap<u16, T>,
included: &[u16], included: &[u16],
ours: (u16, T) ours: u16
) -> Result<(), FrostError> { ) -> Result<(), FrostError> {
map.insert(ours.0, ours.1); if (map.len() + 1) != included.len() {
Err(FrostError::InvalidParticipantQuantity(included.len(), map.len() + 1))?;
if map.len() != included.len() {
Err(FrostError::InvalidParticipantQuantity(included.len(), map.len()))?;
} }
for included in included { for included in included {
if *included == ours {
if map.contains_key(included) {
Err(FrostError::DuplicatedIndex(*included))?;
}
continue;
}
if !map.contains_key(included) { if !map.contains_key(included) {
Err(FrostError::MissingParticipant(*included))?; Err(FrostError::MissingParticipant(*included))?;
} }

View file

@ -4,7 +4,7 @@ use group::{ff::{Field, PrimeField}, GroupEncoding};
use multiexp::BatchVerifier; use multiexp::BatchVerifier;
use crate::{Curve, F_len, G_len}; use crate::Curve;
#[allow(non_snake_case)] #[allow(non_snake_case)]
#[derive(Clone, Copy, PartialEq, Eq, Debug)] #[derive(Clone, Copy, PartialEq, Eq, Debug)]
@ -15,7 +15,7 @@ pub struct SchnorrSignature<C: Curve> {
impl<C: Curve> SchnorrSignature<C> { impl<C: Curve> SchnorrSignature<C> {
pub fn serialize(&self) -> Vec<u8> { pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(G_len::<C>() + F_len::<C>()); let mut res = Vec::with_capacity(C::G_len() + C::F_len());
res.extend(self.R.to_bytes().as_ref()); res.extend(self.R.to_bytes().as_ref());
res.extend(self.s.to_repr().as_ref()); res.extend(self.s.to_repr().as_ref());
res res

View file

@ -1,5 +1,5 @@
use core::fmt; use core::fmt;
use std::{sync::Arc, collections::HashMap}; use std::{io::{Read, Cursor}, sync::Arc, collections::HashMap};
use rand_core::{RngCore, CryptoRng}; use rand_core::{RngCore, CryptoRng};
@ -11,9 +11,8 @@ use multiexp::multiexp_vartime;
use dleq::{Generators, DLEqProof}; use dleq::{Generators, DLEqProof};
use crate::{ use crate::{
curve::{Curve, F_len, G_len, F_from_slice, G_from_slice}, curve::Curve,
FrostError, FrostError, FrostParams, FrostKeys, FrostView,
FrostParams, FrostKeys, FrostView,
algorithm::Algorithm, algorithm::Algorithm,
validate_map validate_map
}; };
@ -38,7 +37,7 @@ impl<C: Curve, A: Algorithm<C>> Params<C, A> {
// Included < threshold // Included < threshold
if included.len() < usize::from(keys.params.t) { if included.len() < usize::from(keys.params.t) {
Err(FrostError::InvalidSigningSet("not enough signers".to_string()))?; Err(FrostError::InvalidSigningSet("not enough signers"))?;
} }
// Invalid index // Invalid index
if included[0] == 0 { if included[0] == 0 {
@ -51,12 +50,12 @@ impl<C: Curve, A: Algorithm<C>> Params<C, A> {
// Same signer included multiple times // Same signer included multiple times
for i in 0 .. included.len() - 1 { for i in 0 .. included.len() - 1 {
if included[i] == included[i + 1] { if included[i] == included[i + 1] {
Err(FrostError::DuplicatedIndex(included[i].into()))?; Err(FrostError::DuplicatedIndex(included[i]))?;
} }
} }
// Not included // Not included
if !included.contains(&keys.params.i) { if !included.contains(&keys.params.i) {
Err(FrostError::InvalidSigningSet("signing despite not being included".to_string()))?; Err(FrostError::InvalidSigningSet("signing despite not being included"))?;
} }
// Out of order arguments to prevent additional cloning // Out of order arguments to prevent additional cloning
@ -78,7 +77,8 @@ fn nonce_transcript<T: Transcript>() -> T {
pub(crate) struct PreprocessPackage<C: Curve> { pub(crate) struct PreprocessPackage<C: Curve> {
pub(crate) nonces: Vec<[C::F; 2]>, pub(crate) nonces: Vec<[C::F; 2]>,
pub(crate) serialized: Vec<u8>, pub(crate) commitments: Vec<Vec<[C::G; 2]>>,
pub(crate) addendum: Vec<u8>,
} }
// This library unifies the preprocessing step with signing due to security concerns and to provide // This library unifies the preprocessing step with signing due to security concerns and to provide
@ -86,26 +86,29 @@ pub(crate) struct PreprocessPackage<C: Curve> {
fn preprocess<R: RngCore + CryptoRng, C: Curve, A: Algorithm<C>>( fn preprocess<R: RngCore + CryptoRng, C: Curve, A: Algorithm<C>>(
rng: &mut R, rng: &mut R,
params: &mut Params<C, A>, params: &mut Params<C, A>,
) -> PreprocessPackage<C> { ) -> (PreprocessPackage<C>, Vec<u8>) {
let mut serialized = Vec::with_capacity(2 * G_len::<C>()); let mut serialized = Vec::with_capacity(2 * C::G_len());
let nonces = params.algorithm.nonces().iter().cloned().map( let (nonces, commitments) = params.algorithm.nonces().iter().cloned().map(
|mut generators| { |mut generators| {
let nonces = [ let nonces = [
C::random_nonce(params.view().secret_share(), &mut *rng), C::random_nonce(params.view().secret_share(), &mut *rng),
C::random_nonce(params.view().secret_share(), &mut *rng) C::random_nonce(params.view().secret_share(), &mut *rng)
]; ];
let commit = |generator: C::G| { let commit = |generator: C::G, buf: &mut Vec<u8>| {
let commitments = [generator * nonces[0], generator * nonces[1]]; let commitments = [generator * nonces[0], generator * nonces[1]];
[commitments[0].to_bytes().as_ref(), commitments[1].to_bytes().as_ref()].concat().to_vec() buf.extend(commitments[0].to_bytes().as_ref());
buf.extend(commitments[1].to_bytes().as_ref());
commitments
}; };
let mut commitments = Vec::with_capacity(generators.len());
let first = generators.remove(0); let first = generators.remove(0);
serialized.extend(commit(first)); commitments.push(commit(first, &mut serialized));
// Iterate over the rest // Iterate over the rest
for generator in generators.iter() { for generator in generators.iter() {
serialized.extend(commit(*generator)); commitments.push(commit(*generator, &mut serialized));
// Provide a DLEq to verify these commitments are for the same nonce // Provide a DLEq to verify these commitments are for the same nonce
// TODO: Provide a single DLEq. See https://github.com/serai-dex/serai/issues/34 // TODO: Provide a single DLEq. See https://github.com/serai-dex/serai/issues/34
for nonce in nonces { for nonce in nonces {
@ -120,102 +123,109 @@ fn preprocess<R: RngCore + CryptoRng, C: Curve, A: Algorithm<C>>(
} }
} }
nonces (nonces, commitments)
} }
).collect::<Vec<_>>(); ).unzip();
serialized.extend(&params.algorithm.preprocess_addendum(rng, &params.view)); let addendum = params.algorithm.preprocess_addendum(rng, &params.view);
serialized.extend(&addendum);
PreprocessPackage { nonces, serialized } (PreprocessPackage { nonces, commitments, addendum }, serialized)
}
#[allow(non_snake_case)]
fn read_D_E<Re: Read, C: Curve>(cursor: &mut Re, l: u16) -> Result<[C::G; 2], FrostError> {
Ok([
C::read_G(cursor).map_err(|_| FrostError::InvalidCommitment(l))?,
C::read_G(cursor).map_err(|_| FrostError::InvalidCommitment(l))?
])
} }
#[allow(non_snake_case)] #[allow(non_snake_case)]
struct Package<C: Curve> { struct Package<C: Curve> {
B: HashMap<u16, (Vec<Vec<[C::G; 2]>>, C::F)>, B: HashMap<u16, (Vec<Vec<[C::G; 2]>>, C::F)>,
Rs: Vec<Vec<C::G>>, Rs: Vec<Vec<C::G>>,
share: Vec<u8> share: C::F,
} }
// Has every signer perform the role of the signature aggregator // Has every signer perform the role of the signature aggregator
// Step 1 was already deprecated by performing nonce generation as needed // Step 1 was already deprecated by performing nonce generation as needed
// Step 2 is simply the broadcast round from step 1 // Step 2 is simply the broadcast round from step 1
fn sign_with_share<C: Curve, A: Algorithm<C>>( fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
params: &mut Params<C, A>, params: &mut Params<C, A>,
our_preprocess: PreprocessPackage<C>, our_preprocess: PreprocessPackage<C>,
mut commitments: HashMap<u16, Vec<u8>>, mut commitments: HashMap<u16, Re>,
msg: &[u8], msg: &[u8],
) -> Result<(Package<C>, Vec<u8>), FrostError> { ) -> Result<(Package<C>, Vec<u8>), FrostError> {
let multisig_params = params.multisig_params(); let multisig_params = params.multisig_params();
validate_map( validate_map(&mut commitments, &params.view.included, multisig_params.i)?;
&mut commitments,
&params.view.included,
(multisig_params.i, our_preprocess.serialized)
)?;
{ {
let transcript = params.algorithm.transcript();
// Domain separate FROST // Domain separate FROST
transcript.domain_separate(b"FROST"); params.algorithm.transcript().domain_separate(b"FROST");
} }
let nonces = params.algorithm.nonces();
#[allow(non_snake_case)] #[allow(non_snake_case)]
let mut B = HashMap::<u16, _>::with_capacity(params.view.included.len()); let mut B = HashMap::<u16, _>::with_capacity(params.view.included.len());
// Get the binding factors
let nonces = params.algorithm.nonces();
let mut addendums = HashMap::new();
{ {
let transcript = params.algorithm.transcript();
// Parse the commitments // Parse the commitments
for l in &params.view.included { for l in &params.view.included {
transcript.append_message(b"participant", &l.to_be_bytes()); {
let serialized = commitments.remove(l).unwrap(); params.algorithm.transcript().append_message(b"participant", &l.to_be_bytes());
}
let mut read_commitment = |c, label| {
let commitment = &serialized[c .. (c + G_len::<C>())];
transcript.append_message(label, commitment);
G_from_slice::<C::G>(commitment).map_err(|_| FrostError::InvalidCommitment(*l))
};
// While this doesn't note which nonce/basepoint this is for, those are expected to be // While this doesn't note which nonce/basepoint this is for, those are expected to be
// static. Beyond that, they're committed to in the DLEq proof transcripts, ensuring // static. Beyond that, they're committed to in the DLEq proof transcripts, ensuring
// consistency. While this is suboptimal, it maintains IETF compliance, and Algorithm is // consistency. While this is suboptimal, it maintains IETF compliance, and Algorithm is
// documented accordingly // documented accordingly
#[allow(non_snake_case)] let transcript = |t: &mut A::Transcript, commitments: [C::G; 2]| {
let mut read_D_E = |c| Ok([ t.append_message(b"commitment_D", commitments[0].to_bytes().as_ref());
read_commitment(c, b"commitment_D")?, t.append_message(b"commitment_E", commitments[1].to_bytes().as_ref());
read_commitment(c + G_len::<C>(), b"commitment_E")? };
]);
let mut c = 0; if *l == params.keys.params.i {
let mut commitments = Vec::with_capacity(nonces.len()); for nonce_commitments in &our_preprocess.commitments {
for (n, nonce_generators) in nonces.clone().iter_mut().enumerate() { for commitments in nonce_commitments {
commitments.push(Vec::with_capacity(nonce_generators.len())); transcript(params.algorithm.transcript(), *commitments);
let first = nonce_generators.remove(0);
commitments[n].push(read_D_E(c)?);
c += 2 * G_len::<C>();
let mut c = 2 * G_len::<C>();
for generator in nonce_generators {
commitments[n].push(read_D_E(c)?);
c += 2 * G_len::<C>();
for de in 0 .. 2 {
DLEqProof::deserialize(
&mut std::io::Cursor::new(&serialized[c .. (c + (2 * F_len::<C>()))])
).map_err(|_| FrostError::InvalidCommitment(*l))?.verify(
&mut nonce_transcript::<A::Transcript>(),
Generators::new(first, *generator),
(commitments[n][0][de], commitments[n][commitments[n].len() - 1][de])
).map_err(|_| FrostError::InvalidCommitment(*l))?;
c += 2 * F_len::<C>();
} }
} }
addendums.insert(*l, serialized[c ..].to_vec()); B.insert(*l, (our_preprocess.commitments.clone(), C::F::zero()));
params.algorithm.process_addendum(
&params.view,
*l,
&mut Cursor::new(our_preprocess.addendum.clone())
)?;
} else {
let mut cursor = commitments.remove(l).unwrap();
let mut commitments = Vec::with_capacity(nonces.len());
for (n, nonce_generators) in nonces.clone().iter_mut().enumerate() {
commitments.push(Vec::with_capacity(nonce_generators.len()));
let first = nonce_generators.remove(0);
commitments[n].push(read_D_E::<_, C>(&mut cursor, *l)?);
transcript(params.algorithm.transcript(), commitments[n][0]);
for generator in nonce_generators {
commitments[n].push(read_D_E::<_, C>(&mut cursor, *l)?);
transcript(params.algorithm.transcript(), commitments[n][commitments[n].len() - 1]);
for de in 0 .. 2 {
DLEqProof::deserialize(
&mut cursor
).map_err(|_| FrostError::InvalidCommitment(*l))?.verify(
&mut nonce_transcript::<A::Transcript>(),
Generators::new(first, *generator),
(commitments[n][0][de], commitments[n][commitments[n].len() - 1][de])
).map_err(|_| FrostError::InvalidCommitment(*l))?;
}
}
}
B.insert(*l, (commitments, C::F::zero()));
params.algorithm.process_addendum(&params.view, *l, &mut cursor)?;
} }
B.insert(*l, (commitments, C::F::zero()));
} }
// Re-format into the FROST-expected rho transcript // Re-format into the FROST-expected rho transcript
@ -225,7 +235,7 @@ fn sign_with_share<C: Curve, A: Algorithm<C>>(
// protocol // protocol
rho_transcript.append_message( rho_transcript.append_message(
b"commitments", b"commitments",
&C::hash_msg(transcript.challenge(b"commitments").as_ref()) &C::hash_msg(params.algorithm.transcript().challenge(b"commitments").as_ref())
); );
// Include the offset, if one exists // Include the offset, if one exists
// While this isn't part of the FROST-expected rho transcript, the offset being here coincides // While this isn't part of the FROST-expected rho transcript, the offset being here coincides
@ -243,12 +253,10 @@ fn sign_with_share<C: Curve, A: Algorithm<C>>(
// Merge the rho transcript back into the global one to ensure its advanced while committing to // Merge the rho transcript back into the global one to ensure its advanced while committing to
// everything // everything
transcript.append_message(b"rho_transcript", rho_transcript.challenge(b"merge").as_ref()); params.algorithm.transcript().append_message(
} b"rho_transcript",
rho_transcript.challenge(b"merge").as_ref()
// Process the addendums );
for l in &params.view.included {
params.algorithm.process_addendum(&params.view, *l, &addendums[l])?;
} }
#[allow(non_snake_case)] #[allow(non_snake_case)]
@ -275,23 +283,26 @@ fn sign_with_share<C: Curve, A: Algorithm<C>>(
|nonces| nonces[0] + (nonces[1] * B[&params.keys.params.i()].1) |nonces| nonces[0] + (nonces[1] * B[&params.keys.params.i()].1)
).collect::<Vec<_>>(), ).collect::<Vec<_>>(),
msg msg
).to_repr().as_ref().to_vec(); );
Ok((Package { B, Rs, share }, share.to_repr().as_ref().to_vec()))
Ok((Package { B, Rs, share: share.clone() }, share))
} }
fn complete<C: Curve, A: Algorithm<C>>( fn complete<Re: Read, C: Curve, A: Algorithm<C>>(
sign_params: &Params<C, A>, sign_params: &Params<C, A>,
sign: Package<C>, sign: Package<C>,
mut shares: HashMap<u16, Vec<u8>>, mut shares: HashMap<u16, Re>,
) -> Result<A::Signature, FrostError> { ) -> Result<A::Signature, FrostError> {
let params = sign_params.multisig_params(); let params = sign_params.multisig_params();
validate_map(&mut shares, &sign_params.view.included, (params.i(), sign.share))?; validate_map(&mut shares, &sign_params.view.included, params.i)?;
let mut responses = HashMap::new(); let mut responses = HashMap::new();
let mut sum = C::F::zero(); let mut sum = C::F::zero();
for l in &sign_params.view.included { for l in &sign_params.view.included {
let part = F_from_slice::<C::F>(&shares[l]).map_err(|_| FrostError::InvalidShare(*l))?; let part = if *l == params.i {
sign.share
} else {
C::read_F(shares.get_mut(l).unwrap()).map_err(|_| FrostError::InvalidShare(*l))?
};
sum += part; sum += part;
responses.insert(*l, part); responses.insert(*l, part);
} }
@ -322,9 +333,7 @@ fn complete<C: Curve, A: Algorithm<C>>(
// If everyone has a valid share and there were enough participants, this should've worked // If everyone has a valid share and there were enough participants, this should've worked
Err( Err(
FrostError::InternalError( FrostError::InternalError("everyone had a valid share yet the signature was still invalid")
"everyone had a valid share yet the signature was still invalid".to_string()
)
) )
} }
@ -349,9 +358,9 @@ pub trait SignMachine<S> {
/// index = Vec index. None is expected at index 0 to allow for this. None is also expected at /// index = Vec index. None is expected at index 0 to allow for this. None is also expected at
/// index i which is locally handled. Returns a byte vector representing a share of the signature /// index i which is locally handled. Returns a byte vector representing a share of the signature
/// for every other participant to receive, over an authenticated channel /// for every other participant to receive, over an authenticated channel
fn sign( fn sign<Re: Read>(
self, self,
commitments: HashMap<u16, Vec<u8>>, commitments: HashMap<u16, Re>,
msg: &[u8], msg: &[u8],
) -> Result<(Self::SignatureMachine, Vec<u8>), FrostError>; ) -> Result<(Self::SignatureMachine, Vec<u8>), FrostError>;
} }
@ -361,7 +370,7 @@ pub trait SignatureMachine<S> {
/// Takes in everyone elses' shares submitted to us as a Vec, expecting participant index = /// Takes in everyone elses' shares submitted to us as a Vec, expecting participant index =
/// Vec index with None at index 0 and index i. Returns a byte vector representing the serialized /// Vec index with None at index 0 and index i. Returns a byte vector representing the serialized
/// signature /// signature
fn complete(self, shares: HashMap<u16, Vec<u8>>) -> Result<S, FrostError>; fn complete<Re: Read>(self, shares: HashMap<u16, Re>) -> Result<S, FrostError>;
} }
/// State machine which manages signing for an arbitrary signature algorithm /// State machine which manages signing for an arbitrary signature algorithm
@ -392,9 +401,8 @@ impl<C: Curve, A: Algorithm<C>> AlgorithmMachine<C, A> {
pub(crate) fn unsafe_override_preprocess( pub(crate) fn unsafe_override_preprocess(
self, self,
preprocess: PreprocessPackage<C> preprocess: PreprocessPackage<C>
) -> (AlgorithmSignMachine<C, A>, Vec<u8>) { ) -> AlgorithmSignMachine<C, A> {
let serialized = preprocess.serialized.clone(); AlgorithmSignMachine { params: self.params, preprocess }
(AlgorithmSignMachine { params: self.params, preprocess }, serialized)
} }
} }
@ -407,8 +415,7 @@ impl<C: Curve, A: Algorithm<C>> PreprocessMachine for AlgorithmMachine<C, A> {
rng: &mut R rng: &mut R
) -> (Self::SignMachine, Vec<u8>) { ) -> (Self::SignMachine, Vec<u8>) {
let mut params = self.params; let mut params = self.params;
let preprocess = preprocess::<R, C, A>(rng, &mut params); let (preprocess, serialized) = preprocess::<R, C, A>(rng, &mut params);
let serialized = preprocess.serialized.clone();
(AlgorithmSignMachine { params, preprocess }, serialized) (AlgorithmSignMachine { params, preprocess }, serialized)
} }
} }
@ -416,9 +423,9 @@ impl<C: Curve, A: Algorithm<C>> PreprocessMachine for AlgorithmMachine<C, A> {
impl<C: Curve, A: Algorithm<C>> SignMachine<A::Signature> for AlgorithmSignMachine<C, A> { impl<C: Curve, A: Algorithm<C>> SignMachine<A::Signature> for AlgorithmSignMachine<C, A> {
type SignatureMachine = AlgorithmSignatureMachine<C, A>; type SignatureMachine = AlgorithmSignatureMachine<C, A>;
fn sign( fn sign<Re: Read>(
self, self,
commitments: HashMap<u16, Vec<u8>>, commitments: HashMap<u16, Re>,
msg: &[u8] msg: &[u8]
) -> Result<(Self::SignatureMachine, Vec<u8>), FrostError> { ) -> Result<(Self::SignatureMachine, Vec<u8>), FrostError> {
let mut params = self.params; let mut params = self.params;
@ -431,7 +438,7 @@ impl<
C: Curve, C: Curve,
A: Algorithm<C> A: Algorithm<C>
> SignatureMachine<A::Signature> for AlgorithmSignatureMachine<C, A> { > SignatureMachine<A::Signature> for AlgorithmSignatureMachine<C, A> {
fn complete(self, shares: HashMap<u16, Vec<u8>>) -> Result<A::Signature, FrostError> { fn complete<Re: Read>(self, shares: HashMap<u16, Re>) -> Result<A::Signature, FrostError> {
complete(&self.params, self.sign, shares) complete(&self.params, self.sign, shares)
} }
} }

View file

@ -1,3 +1,5 @@
use std::io::Cursor;
use rand_core::{RngCore, CryptoRng}; use rand_core::{RngCore, CryptoRng};
use group::{ff::Field, Group}; use group::{ff::Field, Group};
@ -13,7 +15,7 @@ fn key_generation<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
// Test serialization of generated keys // Test serialization of generated keys
fn keys_serialization<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) { fn keys_serialization<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
for (_, keys) in key_gen::<_, C>(rng) { for (_, keys) in key_gen::<_, C>(rng) {
assert_eq!(&FrostKeys::<C>::deserialize(&keys.serialize()).unwrap(), &*keys); assert_eq!(&FrostKeys::<C>::deserialize(&mut Cursor::new(keys.serialize())).unwrap(), &*keys);
} }
} }

View file

@ -1,4 +1,4 @@
use std::{sync::Arc, collections::HashMap}; use std::{io::Cursor, sync::Arc, collections::HashMap};
use rand_core::{RngCore, CryptoRng}; use rand_core::{RngCore, CryptoRng};
@ -46,15 +46,13 @@ pub fn key_gen<R: RngCore + CryptoRng, C: Curve>(
); );
let (machine, these_commitments) = machine.generate_coefficients(rng); let (machine, these_commitments) = machine.generate_coefficients(rng);
machines.insert(i, machine); machines.insert(i, machine);
commitments.insert(i, these_commitments); commitments.insert(i, Cursor::new(these_commitments));
} }
let mut secret_shares = HashMap::new(); let mut secret_shares = HashMap::new();
let mut machines = machines.drain().map(|(l, machine)| { let mut machines = machines.drain().map(|(l, machine)| {
let (machine, shares) = machine.generate_secret_shares( let (machine, shares) = machine.generate_secret_shares(
rng, rng,
// clone_without isn't necessary, as this machine's own data will be inserted without
// conflict, yet using it ensures the machine's own data is actually inserted as expected
clone_without(&commitments, &l) clone_without(&commitments, &l)
).unwrap(); ).unwrap();
secret_shares.insert(l, shares); secret_shares.insert(l, shares);
@ -69,7 +67,7 @@ pub fn key_gen<R: RngCore + CryptoRng, C: Curve>(
if i == *l { if i == *l {
continue; continue;
} }
our_secret_shares.insert(*l, shares[&i].clone()); our_secret_shares.insert(*l, Cursor::new(shares[&i].clone()));
} }
let these_keys = machine.complete(rng, our_secret_shares).unwrap(); let these_keys = machine.complete(rng, our_secret_shares).unwrap();
@ -140,14 +138,14 @@ pub fn sign<R: RngCore + CryptoRng, M: PreprocessMachine>(
let mut commitments = HashMap::new(); let mut commitments = HashMap::new();
let mut machines = machines.drain().map(|(i, machine)| { let mut machines = machines.drain().map(|(i, machine)| {
let (machine, preprocess) = machine.preprocess(rng); let (machine, preprocess) = machine.preprocess(rng);
commitments.insert(i, preprocess); commitments.insert(i, Cursor::new(preprocess));
(i, machine) (i, machine)
}).collect::<HashMap<_, _>>(); }).collect::<HashMap<_, _>>();
let mut shares = HashMap::new(); let mut shares = HashMap::new();
let mut machines = machines.drain().map(|(i, machine)| { let mut machines = machines.drain().map(|(i, machine)| {
let (machine, share) = machine.sign(clone_without(&commitments, &i), msg).unwrap(); let (machine, share) = machine.sign(clone_without(&commitments, &i), msg).unwrap();
shares.insert(i, share); shares.insert(i, Cursor::new(share));
(i, machine) (i, machine)
}).collect::<HashMap<_, _>>(); }).collect::<HashMap<_, _>>();

View file

@ -1,14 +1,14 @@
use std::{sync::Arc, collections::HashMap}; use std::{io::Cursor, sync::Arc, collections::HashMap};
use rand_core::{RngCore, CryptoRng}; use rand_core::{RngCore, CryptoRng};
use group::{ff::PrimeField, GroupEncoding}; use group::{ff::PrimeField, GroupEncoding};
use crate::{ use crate::{
curve::{Curve, F_from_slice, G_from_slice}, FrostKeys, curve::Curve, FrostKeys,
algorithm::{Schnorr, Hram}, algorithm::{Schnorr, Hram},
sign::{PreprocessPackage, SignMachine, SignatureMachine, AlgorithmMachine}, sign::{PreprocessPackage, SignMachine, SignatureMachine, AlgorithmMachine},
tests::{curve::test_curve, schnorr::test_schnorr, recover} tests::{clone_without, curve::test_curve, schnorr::test_schnorr, recover}
}; };
pub struct Vectors { pub struct Vectors {
@ -27,7 +27,7 @@ pub struct Vectors {
// Load these vectors into FrostKeys using a custom serialization it'll deserialize // Load these vectors into FrostKeys using a custom serialization it'll deserialize
fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, FrostKeys<C>> { fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, FrostKeys<C>> {
let shares = vectors.shares.iter().map( let shares = vectors.shares.iter().map(
|secret| F_from_slice::<C::F>(&hex::decode(secret).unwrap()).unwrap() |secret| C::read_F(&mut Cursor::new(hex::decode(secret).unwrap())).unwrap()
).collect::<Vec<_>>(); ).collect::<Vec<_>>();
let verification_shares = shares.iter().map( let verification_shares = shares.iter().map(
|secret| C::GENERATOR * secret |secret| C::GENERATOR * secret
@ -36,7 +36,7 @@ fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, FrostKe
let mut keys = HashMap::new(); let mut keys = HashMap::new();
for i in 1 ..= u16::try_from(shares.len()).unwrap() { for i in 1 ..= u16::try_from(shares.len()).unwrap() {
let mut serialized = vec![]; let mut serialized = vec![];
serialized.extend(u64::try_from(C::ID.len()).unwrap().to_be_bytes()); serialized.extend(u32::try_from(C::ID.len()).unwrap().to_be_bytes());
serialized.extend(C::ID); serialized.extend(C::ID);
serialized.extend(vectors.threshold.to_be_bytes()); serialized.extend(vectors.threshold.to_be_bytes());
serialized.extend(u16::try_from(shares.len()).unwrap().to_be_bytes()); serialized.extend(u16::try_from(shares.len()).unwrap().to_be_bytes());
@ -47,7 +47,7 @@ fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, FrostKe
serialized.extend(share.to_bytes().as_ref()); serialized.extend(share.to_bytes().as_ref());
} }
let these_keys = FrostKeys::<C>::deserialize(&serialized).unwrap(); let these_keys = FrostKeys::<C>::deserialize(&mut Cursor::new(serialized)).unwrap();
assert_eq!(these_keys.params().t(), vectors.threshold); assert_eq!(these_keys.params().t(), vectors.threshold);
assert_eq!(usize::from(these_keys.params().n()), shares.len()); assert_eq!(usize::from(these_keys.params().n()), shares.len());
assert_eq!(these_keys.params().i(), i); assert_eq!(these_keys.params().i(), i);
@ -70,14 +70,14 @@ pub fn test_with_vectors<
// Test against the vectors // Test against the vectors
let keys = vectors_to_multisig_keys::<C>(&vectors); let keys = vectors_to_multisig_keys::<C>(&vectors);
let group_key = G_from_slice::<C::G>(&hex::decode(vectors.group_key).unwrap()).unwrap(); let group_key = C::read_G(&mut Cursor::new(hex::decode(vectors.group_key).unwrap())).unwrap();
assert_eq!( assert_eq!(
C::GENERATOR * F_from_slice::<C::F>(&hex::decode(vectors.group_secret).unwrap()).unwrap(), C::GENERATOR * C::read_F(&mut Cursor::new(hex::decode(vectors.group_secret).unwrap())).unwrap(),
group_key group_key
); );
assert_eq!( assert_eq!(
recover(&keys), recover(&keys),
F_from_slice::<C::F>(&hex::decode(vectors.group_secret).unwrap()).unwrap() C::read_F(&mut Cursor::new(hex::decode(vectors.group_secret).unwrap())).unwrap()
); );
let mut machines = vec![]; let mut machines = vec![];
@ -96,19 +96,28 @@ pub fn test_with_vectors<
let mut c = 0; let mut c = 0;
let mut machines = machines.drain(..).map(|(i, machine)| { let mut machines = machines.drain(..).map(|(i, machine)| {
let nonces = [ let nonces = [
F_from_slice::<C::F>(&hex::decode(vectors.nonces[c][0]).unwrap()).unwrap(), C::read_F(&mut Cursor::new(hex::decode(vectors.nonces[c][0]).unwrap())).unwrap(),
F_from_slice::<C::F>(&hex::decode(vectors.nonces[c][1]).unwrap()).unwrap() C::read_F(&mut Cursor::new(hex::decode(vectors.nonces[c][1]).unwrap())).unwrap()
]; ];
c += 1; c += 1;
let these_commitments = vec![[C::GENERATOR * nonces[0], C::GENERATOR * nonces[1]]];
let mut serialized = (C::GENERATOR * nonces[0]).to_bytes().as_ref().to_vec(); let machine = machine.unsafe_override_preprocess(
serialized.extend((C::GENERATOR * nonces[1]).to_bytes().as_ref()); PreprocessPackage {
nonces: vec![nonces],
let (machine, serialized) = machine.unsafe_override_preprocess( commitments: vec![these_commitments.clone()],
PreprocessPackage { nonces: vec![nonces], serialized: serialized.clone() } addendum: vec![]
}
); );
commitments.insert(i, serialized); commitments.insert(
i,
Cursor::new(
[
these_commitments[0][0].to_bytes().as_ref(),
these_commitments[0][1].to_bytes().as_ref()
].concat().to_vec()
)
);
(i, machine) (i, machine)
}).collect::<Vec<_>>(); }).collect::<Vec<_>>();
@ -116,19 +125,19 @@ pub fn test_with_vectors<
c = 0; c = 0;
let mut machines = machines.drain(..).map(|(i, machine)| { let mut machines = machines.drain(..).map(|(i, machine)| {
let (machine, share) = machine.sign( let (machine, share) = machine.sign(
commitments.clone(), clone_without(&commitments, &i),
&hex::decode(vectors.msg).unwrap() &hex::decode(vectors.msg).unwrap()
).unwrap(); ).unwrap();
assert_eq!(share, hex::decode(vectors.sig_shares[c]).unwrap()); assert_eq!(share, hex::decode(vectors.sig_shares[c]).unwrap());
c += 1; c += 1;
shares.insert(i, share); shares.insert(i, Cursor::new(share));
(i, machine) (i, machine)
}).collect::<HashMap<_, _>>(); }).collect::<HashMap<_, _>>();
for (_, machine) in machines.drain() { for (i, machine) in machines.drain() {
let sig = machine.complete(shares.clone()).unwrap(); let sig = machine.complete(clone_without(&shares, &i)).unwrap();
let mut serialized = sig.R.to_bytes().as_ref().to_vec(); let mut serialized = sig.R.to_bytes().as_ref().to_vec();
serialized.extend(sig.s.to_repr().as_ref()); serialized.extend(sig.s.to_repr().as_ref());
assert_eq!(hex::encode(serialized), vectors.sig); assert_eq!(hex::encode(serialized), vectors.sig);