Move FROST to HashMaps

Honestly, the borrowed keys are frustrating, and this probably reduces 
performance while no longer offering an order when iterating. That said, 
they enable full u16 indexing and should mildly improve the API.

Cleans the Proof of Knowledge handling present in key gen.
This commit is contained in:
Luke Parker 2022-05-24 21:41:14 -04:00
parent 5ff65bd268
commit d10c6e16dc
No known key found for this signature in database
GPG key ID: F9F1386DB1E119B6
12 changed files with 585 additions and 747 deletions

View file

@ -28,9 +28,9 @@ pub enum MultisigError {
#[error("internal error ({0})")]
InternalError(String),
#[error("invalid discrete log equality proof")]
InvalidDLEqProof(usize),
InvalidDLEqProof(u16),
#[error("invalid key image {0}")]
InvalidKeyImage(usize)
InvalidKeyImage(u16)
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
@ -168,7 +168,7 @@ impl DLEqProof {
pub fn verify(
&self,
H: &DPoint,
l: usize,
l: u16,
xG: &DPoint,
xH: &DPoint
) -> Result<(), MultisigError> {
@ -214,7 +214,7 @@ pub fn read_dleq(
serialized: &[u8],
start: usize,
H: &DPoint,
l: usize,
l: u16,
xG: &DPoint
) -> Result<dfg::EdwardsPoint, MultisigError> {
// Not using G_from_slice here would enable non-canonical points and break blame

View file

@ -148,13 +148,13 @@ impl Algorithm<Ed25519> for ClsagMultisig {
fn process_addendum(
&mut self,
view: &MultisigView<Ed25519>,
l: usize,
l: u16,
commitments: &[dfg::EdwardsPoint; 2],
serialized: &[u8]
) -> Result<(), FrostError> {
if serialized.len() != ClsagMultisig::serialized_len() {
// Not an optimal error but...
Err(FrostError::InvalidCommitmentQuantity(l, 9, serialized.len() / 32))?;
Err(FrostError::InvalidCommitment(l))?;
}
if self.AH.0.is_identity().into() {
@ -163,29 +163,28 @@ impl Algorithm<Ed25519> for ClsagMultisig {
self.transcript.append_message(b"mask", &self.mask().to_bytes());
}
let share = read_dleq(
// Uses the same format FROST does for the expected commitments (nonce * G where this is nonce * H)
// The following technically shouldn't need to be committed to, as we've committed to equivalents,
// yet it doesn't hurt and may resolve some unknown issues
self.transcript.append_message(b"participant", &l.to_be_bytes());
let mut cursor = 0;
self.transcript.append_message(b"image_share", &serialized[cursor .. (cursor + 32)]);
self.image += read_dleq(
serialized,
0,
cursor,
&self.H,
l,
&view.verification_share(l).0
).map_err(|_| FrostError::InvalidCommitment(l))?.0;
// Given the fact there's only ever one possible value for this, this may technically not need
// to be committed to. If signing a TX, it'll be double committed to thanks to the message
// It doesn't hurt to have though and ensures security boundaries are well formed
self.transcript.append_message(b"image_share", &share.compress().to_bytes());
self.image += share;
cursor += 96;
// Uses the same format FROST does for the expected commitments (nonce * G where this is nonce * H)
// Given this is guaranteed to match commitments, which FROST commits to, this also technically
// doesn't need to be committed to if a canonical serialization is guaranteed
// It, again, doesn't hurt to include and ensures security boundaries are well formed
self.transcript.append_message(b"participant", &u16::try_from(l).unwrap().to_be_bytes());
self.transcript.append_message(b"commitment_D_H", &serialized[cursor .. (cursor + 32)]);
self.AH.0 += read_dleq(serialized, cursor, &self.H, l, &commitments[0]).map_err(|_| FrostError::InvalidCommitment(l))?;
cursor += 96;
self.transcript.append_message(b"commitment_D_H", &serialized[0 .. 32]);
self.AH.0 += read_dleq(serialized, 96, &self.H, l, &commitments[0]).map_err(|_| FrostError::InvalidCommitment(l))?;
self.transcript.append_message(b"commitment_E_H", &serialized[0 .. 32]);
self.AH.1 += read_dleq(serialized, 192, &self.H, l, &commitments[1]).map_err(|_| FrostError::InvalidCommitment(l))?;
self.transcript.append_message(b"commitment_E_H", &serialized[cursor .. (cursor + 32)]);
self.AH.1 += read_dleq(serialized, cursor, &self.H, l, &commitments[1]).map_err(|_| FrostError::InvalidCommitment(l))?;
Ok(())
}

View file

@ -1,5 +1,5 @@
#[cfg(feature = "multisig")]
use std::{cell::RefCell, rc::Rc};
use std::{cell::RefCell, rc::Rc, collections::HashMap};
use rand::{RngCore, rngs::OsRng};
@ -71,7 +71,6 @@ fn clsag() {
#[test]
fn clsag_multisig() -> Result<(), MultisigError> {
let (keys, group_private) = generate_keys();
let t = keys[0].params().t();
let randomness = random_scalar(&mut OsRng);
let mut ring = vec![];
@ -92,9 +91,10 @@ fn clsag_multisig() -> Result<(), MultisigError> {
}
let mask_sum = random_scalar(&mut OsRng);
let mut machines = Vec::with_capacity(t);
for i in 1 ..= t {
machines.push(
let mut machines = HashMap::new();
for i in 1 ..= THRESHOLD {
machines.insert(
i,
sign::AlgorithmMachine::new(
ClsagMultisig::new(
Transcript::new(b"Monero Serai CLSAG Test".to_vec()),
@ -112,15 +112,15 @@ fn clsag_multisig() -> Result<(), MultisigError> {
)
)))
).unwrap(),
Rc::new(keys[i - 1].clone()),
&(1 ..= THRESHOLD).collect::<Vec<usize>>()
Rc::new(keys[&i].clone()),
&(1 ..= THRESHOLD).collect::<Vec<_>>()
).unwrap()
);
}
let mut signatures = sign(&mut machines, &[1; 32]);
let signature = signatures.swap_remove(0);
for s in 0 .. (t - 1) {
for s in 0 .. usize::from(THRESHOLD - 1) {
// Verify the commitments and the non-decoy s scalar are identical to every other signature
// FROST will already have called verify on the produced signature, before checking individual
// key shares. For FROST Schnorr, it's cheaper. For CLSAG, it may be more expensive? Yet it

View file

@ -1,5 +1,7 @@
#![cfg(feature = "multisig")]
use std::collections::HashMap;
use rand::rngs::OsRng;
use ff::Field;
@ -7,102 +9,87 @@ use dalek_ff_group::{ED25519_BASEPOINT_TABLE, Scalar};
pub use frost::{
FrostError, MultisigParams, MultisigKeys,
key_gen, algorithm::Algorithm, sign::{self, lagrange}
lagrange, key_gen, algorithm::Algorithm, sign
};
use crate::frost::Ed25519;
pub const THRESHOLD: usize = 3;
pub const PARTICIPANTS: usize = 5;
pub const THRESHOLD: u16 = 3;
pub const PARTICIPANTS: u16 = 5;
pub fn generate_keys() -> (Vec<MultisigKeys<Ed25519>>, Scalar) {
let mut params = vec![];
let mut machines = vec![];
let mut commitments = vec![vec![]];
fn clone_without<K: Clone + std::cmp::Eq + std::hash::Hash, V: Clone>(
map: &HashMap<K, V>,
without: &K
) -> HashMap<K, V> {
let mut res = map.clone();
res.remove(without).unwrap();
res
}
pub fn generate_keys() -> (HashMap<u16, MultisigKeys<Ed25519>>, Scalar) {
let mut params = HashMap::new();
let mut machines = HashMap::new();
let mut commitments = HashMap::new();
for i in 1 ..= PARTICIPANTS {
params.push(
params.insert(
i,
MultisigParams::new(THRESHOLD, PARTICIPANTS, i).unwrap()
);
machines.push(
machines.insert(
i,
key_gen::StateMachine::<Ed25519>::new(
params[i - 1],
params[&i],
"monero-sign-rs test suite".to_string()
)
);
commitments.push(machines[i - 1].generate_coefficients(&mut OsRng).unwrap());
commitments.insert(i, machines.get_mut(&i).unwrap().generate_coefficients(&mut OsRng).unwrap());
}
let mut secret_shares = vec![];
for i in 1 ..= PARTICIPANTS {
secret_shares.push(
machines[i - 1].generate_secret_shares(
&mut OsRng,
commitments
.iter()
.enumerate()
.map(|(idx, commitments)| if idx == i { vec![] } else { commitments.to_vec() })
.collect()
).unwrap()
let mut secret_shares = HashMap::new();
for (i, machine) in machines.iter_mut() {
secret_shares.insert(
*i,
machine.generate_secret_shares(&mut OsRng, clone_without(&commitments, i)).unwrap()
);
}
let mut keys = vec![];
for i in 1 ..= PARTICIPANTS {
let mut our_secret_shares = vec![vec![]];
our_secret_shares.extend(
secret_shares.iter().map(|shares| shares[i].clone()).collect::<Vec<Vec<u8>>>()
);
keys.push(machines[i - 1].complete(our_secret_shares).unwrap().clone());
let mut keys = HashMap::new();
for (i, machine) in machines.iter_mut() {
let mut our_secret_shares = HashMap::new();
for (l, shares) in &secret_shares {
if i == l {
continue;
}
our_secret_shares.insert(*l, shares[&i].clone());
}
keys.insert(*i, machine.complete(our_secret_shares).unwrap().clone());
}
let mut group_private = Scalar::zero();
for i in 1 ..= THRESHOLD {
group_private += keys[i - 1].secret_share() * lagrange::<Scalar>(
i,
&(1 ..= THRESHOLD).collect::<Vec<usize>>()
);
group_private += keys[&i].secret_share() * lagrange::<Scalar>(i, &(1 ..= THRESHOLD).collect::<Vec<_>>());
}
assert_eq!(&ED25519_BASEPOINT_TABLE * group_private, keys[0].group_key());
assert_eq!(&ED25519_BASEPOINT_TABLE * group_private, keys[&1].group_key());
(keys, group_private)
}
#[allow(dead_code)] // Currently has some false positive
pub fn sign<S, M: sign::StateMachine<Signature = S>>(machines: &mut Vec<M>, msg: &[u8]) -> Vec<S> {
assert!(machines.len() >= THRESHOLD);
pub fn sign<S, M: sign::StateMachine<Signature = S>>(machines: &mut HashMap<u16, M>, msg: &[u8]) -> Vec<S> {
assert!(machines.len() >= THRESHOLD.into());
let mut commitments = Vec::with_capacity(PARTICIPANTS + 1);
commitments.resize(PARTICIPANTS + 1, None);
for i in 1 ..= THRESHOLD {
commitments[i] = Some(machines[i - 1].preprocess(&mut OsRng).unwrap());
let mut commitments = HashMap::new();
for (i, machine) in machines.iter_mut() {
commitments.insert(*i, machine.preprocess(&mut OsRng).unwrap());
}
let mut shares = Vec::with_capacity(PARTICIPANTS + 1);
shares.resize(PARTICIPANTS + 1, None);
for i in 1 ..= THRESHOLD {
shares[i] = Some(
machines[i - 1].sign(
&commitments
.iter()
.enumerate()
.map(|(idx, value)| if idx == i { None } else { value.to_owned() })
.collect::<Vec<Option<Vec<u8>>>>(),
msg
).unwrap()
);
let mut shares = HashMap::new();
for (i, machine) in machines.iter_mut() {
shares.insert(*i, machine.sign(clone_without(&commitments, i), msg).unwrap());
}
let mut res = Vec::with_capacity(THRESHOLD);
for i in 1 ..= THRESHOLD {
res.push(
machines[i - 1].complete(
&shares
.iter()
.enumerate()
.map(|(idx, value)| if idx == i { None } else { value.to_owned() })
.collect::<Vec<Option<Vec<u8>>>>()
).unwrap()
);
let mut res = vec![];
for (i, machine) in machines.iter_mut() {
res.push(machine.complete(clone_without(&shares, i)).unwrap())
}
res
}

View file

@ -1,4 +1,4 @@
use std::{rc::Rc, cell::RefCell};
use std::{cell::RefCell, rc::Rc, collections::HashMap};
use rand_core::{RngCore, CryptoRng, SeedableRng};
use rand_chacha::ChaCha12Rng;
@ -18,7 +18,8 @@ use crate::{
pub struct TransactionMachine {
signable: SignableTransaction,
i: usize,
i: u16,
included: Vec<u16>,
transcript: Transcript,
decoys: Vec<Decoys>,
@ -41,7 +42,7 @@ impl SignableTransaction {
rpc: &Rpc,
height: usize,
keys: MultisigKeys<Ed25519>,
included: &[usize]
mut included: Vec<u16>
) -> Result<TransactionMachine, TransactionError> {
let mut images = vec![];
images.resize(self.inputs.len(), EdwardsPoint::identity());
@ -95,6 +96,9 @@ impl SignableTransaction {
&self.inputs
).await.map_err(|e| TransactionError::RpcError(e))?;
// Sort included before cloning it around
included.sort_unstable();
for (i, input) in self.inputs.iter().enumerate() {
clsags.push(
AlgorithmMachine::new(
@ -103,7 +107,7 @@ impl SignableTransaction {
inputs[i].clone()
).map_err(|e| TransactionError::MultisigError(e))?,
Rc::new(keys.offset(dalek_ff_group::Scalar(input.key_offset))),
included
&included
).map_err(|e| TransactionError::FrostError(e))?
);
}
@ -114,6 +118,7 @@ impl SignableTransaction {
Ok(TransactionMachine {
signable: self,
i: keys.params().i(),
included,
transcript,
decoys,
@ -142,12 +147,9 @@ impl StateMachine for TransactionMachine {
}
// Iterate over each CLSAG calling preprocess
let mut serialized = vec![];
for (i, clsag) in self.clsags.iter_mut().enumerate() {
let preprocess = clsag.preprocess(rng)?;
// First 64 bytes are FROST's commitments
self.images[i] = CompressedEdwardsY(preprocess[64 .. 96].try_into().unwrap()).decompress().unwrap();
serialized.extend(&preprocess);
let mut serialized = Vec::with_capacity(self.clsags.len() * (64 + ClsagMultisig::serialized_len()));
for clsag in self.clsags.iter_mut() {
serialized.extend(&clsag.preprocess(rng)?);
}
self.our_preprocess = serialized.clone();
@ -165,7 +167,7 @@ impl StateMachine for TransactionMachine {
fn sign(
&mut self,
commitments: &[Option<Vec<u8>>],
mut commitments: HashMap<u16, Vec<u8>>,
// Drop FROST's 'msg' since we calculate the actual message in this function
_: &[u8]
) -> Result<Vec<u8>, FrostError> {
@ -177,29 +179,32 @@ impl StateMachine for TransactionMachine {
// While each CLSAG will do this as they need to for security, they have their own transcripts
// cloned from this TX's initial premise's transcript. For our TX transcript to have the CLSAG
// data for entropy, it'll have to be added ourselves
for c in 0 .. commitments.len() {
self.transcript.append_message(b"participant", &u16::try_from(c).unwrap().to_le_bytes());
if c == self.i {
self.transcript.append_message(b"preprocess", &self.our_preprocess);
} else if let Some(commitments) = commitments[c].as_ref() {
self.transcript.append_message(b"preprocess", commitments);
commitments.insert(self.i, self.our_preprocess.clone());
for l in &self.included {
self.transcript.append_message(b"participant", &(*l).to_be_bytes());
// FROST itself will error if this is None, so let it
if let Some(preprocess) = commitments.get(l) {
self.transcript.append_message(b"preprocess", preprocess);
}
}
// FROST commitments, image, H commitments, and their proofs
let clsag_len = 64 + ClsagMultisig::serialized_len();
let mut commitments = (0 .. self.clsags.len()).map(|c| commitments.iter().map(
|(l, commitments)| (*l, commitments[(c * clsag_len) .. ((c + 1) * clsag_len)].to_vec())
).collect::<HashMap<_, _>>()).collect::<Vec<_>>();
for c in 0 .. self.clsags.len() {
// Calculate the key images
// Multisig will parse/calculate/validate this as needed, yet doing so here as well provides
// the easiest API overall, as this is where the TX is (which needs the key images in its
// message), along with where the outputs are determined (where our change output needs these
// to be unique)
for (l, serialized) in commitments.iter().enumerate().filter(|(_, s)| s.is_some()) {
for (l, preprocess) in &commitments[c] {
self.images[c] += CompressedEdwardsY(
serialized.as_ref().unwrap()[((c * clsag_len) + 64) .. ((c * clsag_len) + 96)]
.try_into().map_err(|_| FrostError::InvalidCommitment(l))?
).decompress().ok_or(FrostError::InvalidCommitment(l))?;
preprocess[64 .. 96].try_into().map_err(|_| FrostError::InvalidCommitment(*l))?
).decompress().ok_or(FrostError::InvalidCommitment(*l))?;
}
}
@ -231,12 +236,6 @@ impl StateMachine for TransactionMachine {
)
};
let mut commitments = (0 .. self.inputs.len()).map(|c| commitments.iter().map(
|commitments| commitments.clone().map(
|commitments| commitments[(c * clsag_len) .. ((c * clsag_len) + clsag_len)].to_vec()
)
).collect::<Vec<_>>()).collect::<Vec<_>>();
let mut sorted = Vec::with_capacity(self.decoys.len());
while self.decoys.len() != 0 {
sorted.push((
@ -291,14 +290,14 @@ impl StateMachine for TransactionMachine {
// Iterate over each CLSAG calling sign
let mut serialized = Vec::with_capacity(self.clsags.len() * 32);
for (c, clsag) in self.clsags.iter_mut().enumerate() {
serialized.extend(&clsag.sign(&commitments[c], &msg)?);
for clsag in self.clsags.iter_mut() {
serialized.extend(&clsag.sign(commitments.remove(0), &msg)?);
}
Ok(serialized)
}
fn complete(&mut self, shares: &[Option<Vec<u8>>]) -> Result<Transaction, FrostError> {
fn complete(&mut self, shares: HashMap<u16, Vec<u8>>) -> Result<Transaction, FrostError> {
if self.state() != State::Signed {
Err(FrostError::InvalidSignTransition(State::Signed, self.state()))?;
}
@ -308,9 +307,9 @@ impl StateMachine for TransactionMachine {
RctPrunable::Null => panic!("Signing for RctPrunable::Null"),
RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. } => {
for (c, clsag) in self.clsags.iter_mut().enumerate() {
let (clsag, pseudo_out) = clsag.complete(&shares.iter().map(
|share| share.clone().map(|share| share[(c * 32) .. ((c * 32) + 32)].to_vec())
).collect::<Vec<_>>())?;
let (clsag, pseudo_out) = clsag.complete(shares.iter().map(
|(l, shares)| (*l, shares[(c * 32) .. ((c + 1) * 32)].to_vec())
).collect::<HashMap<_, _>>())?;
clsags.push(clsag);
pseudo_outs.push(pseudo_out);
}

View file

@ -1,5 +1,7 @@
#![cfg(feature = "multisig")]
use std::collections::HashMap;
use rand::rngs::OsRng;
use ff::Field;
@ -7,102 +9,87 @@ use dalek_ff_group::{ED25519_BASEPOINT_TABLE, Scalar};
pub use frost::{
FrostError, MultisigParams, MultisigKeys,
key_gen, algorithm::Algorithm, sign::{self, lagrange}
lagrange, key_gen, algorithm::Algorithm, sign
};
use monero_serai::frost::Ed25519;
pub const THRESHOLD: usize = 3;
pub const PARTICIPANTS: usize = 5;
pub const THRESHOLD: u16 = 3;
pub const PARTICIPANTS: u16 = 5;
pub fn generate_keys() -> (Vec<MultisigKeys<Ed25519>>, Scalar) {
let mut params = vec![];
let mut machines = vec![];
let mut commitments = vec![vec![]];
fn clone_without<K: Clone + std::cmp::Eq + std::hash::Hash, V: Clone>(
map: &HashMap<K, V>,
without: &K
) -> HashMap<K, V> {
let mut res = map.clone();
res.remove(without).unwrap();
res
}
pub fn generate_keys() -> (HashMap<u16, MultisigKeys<Ed25519>>, Scalar) {
let mut params = HashMap::new();
let mut machines = HashMap::new();
let mut commitments = HashMap::new();
for i in 1 ..= PARTICIPANTS {
params.push(
params.insert(
i,
MultisigParams::new(THRESHOLD, PARTICIPANTS, i).unwrap()
);
machines.push(
machines.insert(
i,
key_gen::StateMachine::<Ed25519>::new(
params[i - 1],
params[&i],
"monero-sign-rs test suite".to_string()
)
);
commitments.push(machines[i - 1].generate_coefficients(&mut OsRng).unwrap());
commitments.insert(i, machines.get_mut(&i).unwrap().generate_coefficients(&mut OsRng).unwrap());
}
let mut secret_shares = vec![];
for i in 1 ..= PARTICIPANTS {
secret_shares.push(
machines[i - 1].generate_secret_shares(
&mut OsRng,
commitments
.iter()
.enumerate()
.map(|(idx, commitments)| if idx == i { vec![] } else { commitments.to_vec() })
.collect()
).unwrap()
let mut secret_shares = HashMap::new();
for (i, machine) in machines.iter_mut() {
secret_shares.insert(
*i,
machine.generate_secret_shares(&mut OsRng, clone_without(&commitments, i)).unwrap()
);
}
let mut keys = vec![];
for i in 1 ..= PARTICIPANTS {
let mut our_secret_shares = vec![vec![]];
our_secret_shares.extend(
secret_shares.iter().map(|shares| shares[i].clone()).collect::<Vec<Vec<u8>>>()
);
keys.push(machines[i - 1].complete(our_secret_shares).unwrap().clone());
let mut keys = HashMap::new();
for (i, machine) in machines.iter_mut() {
let mut our_secret_shares = HashMap::new();
for (l, shares) in &secret_shares {
if i == l {
continue;
}
our_secret_shares.insert(*l, shares[&i].clone());
}
keys.insert(*i, machine.complete(our_secret_shares).unwrap().clone());
}
let mut group_private = Scalar::zero();
for i in 1 ..= THRESHOLD {
group_private += keys[i - 1].secret_share() * lagrange::<Scalar>(
i,
&(1 ..= THRESHOLD).collect::<Vec<usize>>()
);
group_private += keys[&i].secret_share() * lagrange::<Scalar>(i, &(1 ..= THRESHOLD).collect::<Vec<_>>());
}
assert_eq!(&ED25519_BASEPOINT_TABLE * group_private, keys[0].group_key());
assert_eq!(&ED25519_BASEPOINT_TABLE * group_private, keys[&1].group_key());
(keys, group_private)
}
#[allow(dead_code)] // Currently has some false positive
pub fn sign<S, M: sign::StateMachine<Signature = S>>(machines: &mut Vec<M>, msg: &[u8]) -> Vec<S> {
assert!(machines.len() >= THRESHOLD);
pub fn sign<S, M: sign::StateMachine<Signature = S>>(machines: &mut HashMap<u16, M>, msg: &[u8]) -> Vec<S> {
assert!(machines.len() >= THRESHOLD.into());
let mut commitments = Vec::with_capacity(PARTICIPANTS + 1);
commitments.resize(PARTICIPANTS + 1, None);
for i in 1 ..= THRESHOLD {
commitments[i] = Some(machines[i - 1].preprocess(&mut OsRng).unwrap());
let mut commitments = HashMap::new();
for (i, machine) in machines.iter_mut() {
commitments.insert(*i, machine.preprocess(&mut OsRng).unwrap());
}
let mut shares = Vec::with_capacity(PARTICIPANTS + 1);
shares.resize(PARTICIPANTS + 1, None);
for i in 1 ..= THRESHOLD {
shares[i] = Some(
machines[i - 1].sign(
&commitments
.iter()
.enumerate()
.map(|(idx, value)| if idx == i { None } else { value.to_owned() })
.collect::<Vec<Option<Vec<u8>>>>(),
msg
).unwrap()
);
let mut shares = HashMap::new();
for (i, machine) in machines.iter_mut() {
shares.insert(*i, machine.sign(clone_without(&commitments, i), msg).unwrap());
}
let mut res = Vec::with_capacity(THRESHOLD);
for i in 1 ..= THRESHOLD {
res.push(
machines[i - 1].complete(
&shares
.iter()
.enumerate()
.map(|(idx, value)| if idx == i { None } else { value.to_owned() })
.collect::<Vec<Option<Vec<u8>>>>()
).unwrap()
);
let mut res = vec![];
for (i, machine) in machines.iter_mut() {
res.push(machine.complete(clone_without(&shares, i)).unwrap())
}
res
}

View file

@ -1,4 +1,4 @@
use std::sync::Mutex;
use std::{sync::Mutex, collections::HashMap};
use lazy_static::lazy_static;
@ -60,8 +60,6 @@ async fn send_core(test: usize, multisig: bool) {
#[cfg(feature = "multisig")]
let (keys, _) = generate_keys();
#[cfg(feature = "multisig")]
let t = keys[0].params().t();
if multisig {
#[cfg(not(feature = "multisig"))]
@ -69,7 +67,7 @@ async fn send_core(test: usize, multisig: bool) {
#[cfg(feature = "multisig")]
{
view = Scalar::from_hash(Blake2b512::new().chain("Monero Serai Transaction Test")).0;
spend_pub = keys[0].group_key().0;
spend_pub = keys[&1].group_key().0;
}
}
@ -132,23 +130,24 @@ async fn send_core(test: usize, multisig: bool) {
} else {
#[cfg(feature = "multisig")]
{
let mut machines = Vec::with_capacity(t);
for i in 1 ..= t {
machines.push(
let mut machines = HashMap::new();
for i in 1 ..= THRESHOLD {
machines.insert(
i,
signable.clone().multisig(
b"Monero Serai Test Transaction".to_vec(),
&mut OsRng,
&rpc,
rpc.get_height().await.unwrap() - 10,
keys[i - 1].clone(),
&(1 ..= THRESHOLD).collect::<Vec<usize>>()
keys[&i].clone(),
(1 ..= THRESHOLD).collect::<Vec<_>>()
).await.unwrap()
);
}
let mut txs = sign(&mut machines, &vec![]);
for s in 0 .. (t - 1) {
assert_eq!(txs[s].hash(), txs[0].hash());
for s in 1 .. THRESHOLD {
assert_eq!(txs[usize::from(s)].hash(), txs[0].hash());
}
tx = Some(txs.swap_remove(0));
}

View file

@ -28,7 +28,7 @@ pub trait Algorithm<C: Curve>: Clone {
fn process_addendum(
&mut self,
params: &MultisigView<C>,
l: usize,
l: u16,
commitments: &[C::G; 2],
serialized: &[u8],
) -> Result<(), FrostError>;
@ -131,7 +131,7 @@ impl<C: Curve, H: Hram<C>> Algorithm<C> for Schnorr<C, H> {
fn process_addendum(
&mut self,
_: &MultisigView<C>,
_: usize,
_: u16,
_: &[C::G; 2],
_: &[u8],
) -> Result<(), FrostError> {

View file

@ -1,16 +1,17 @@
use core::{convert::TryFrom, cmp::min, fmt};
use core::{convert::TryFrom, fmt};
use std::collections::HashMap;
use rand_core::{RngCore, CryptoRng};
use ff::{Field, PrimeField};
use group::Group;
use crate::{Curve, MultisigParams, MultisigKeys, FrostError};
use crate::{Curve, MultisigParams, MultisigKeys, FrostError, validate_map};
#[allow(non_snake_case)]
fn challenge<C: Curve>(l: usize, context: &str, R: &[u8], Am: &[u8]) -> C::F {
fn challenge<C: Curve>(l: u16, context: &str, R: &[u8], Am: &[u8]) -> C::F {
let mut c = Vec::with_capacity(2 + context.len() + R.len() + Am.len());
c.extend(&u16::try_from(l).unwrap().to_be_bytes());
c.extend(l.to_be_bytes());
c.extend(context.as_bytes());
c.extend(R); // R
c.extend(Am); // A of the first commitment, which is what we're proving we have the private key
@ -21,48 +22,41 @@ fn challenge<C: Curve>(l: usize, context: &str, R: &[u8], Am: &[u8]) -> C::F {
// Implements steps 1 through 3 of round 1 of FROST DKG. Returns the coefficients, commitments, and
// the serialized commitments to be broadcasted over an authenticated channel to all parties
// TODO: This potentially could return a much more robust serialized message, including a signature
// of its entirety. The issue is it can't use its own key as it has no chain of custody behind it.
// While we could ask for a key to be passed in, explicitly declaring the needed for authenticated
// communications in the API itself, systems will likely already provide a authenticated
// communication method making this redundant. It also doesn't guarantee the system which passed
// the key is correctly using it, meaning we can only minimize risk so much
// One notable improvement would be to include the index in the message. While the system must
// still track this to determine if it's ready for the next step, and to remove duplicates, it
// would ensure no counterparties presume the same index and this system didn't mislabel a
// counterparty
fn generate_key_r1<R: RngCore + CryptoRng, C: Curve>(
rng: &mut R,
params: &MultisigParams,
context: &str,
) -> (Vec<C::F>, Vec<C::G>, Vec<u8>) {
let mut coefficients = Vec::with_capacity(params.t);
let mut commitments = Vec::with_capacity(params.t);
let mut serialized = Vec::with_capacity((C::G_len() * params.t) + C::G_len() + C::F_len());
for j in 0 .. params.t {
) -> (Vec<C::F>, Vec<u8>) {
let t = usize::from(params.t);
let mut coefficients = Vec::with_capacity(t);
let mut commitments = Vec::with_capacity(t);
let mut serialized = Vec::with_capacity((C::G_len() * t) + C::G_len() + C::F_len());
for i in 0 .. t {
// Step 1: Generate t random values to form a polynomial with
coefficients.push(C::F::random(&mut *rng));
// Step 3: Generate public commitments
commitments.push(C::generator_table() * coefficients[j]);
commitments.push(C::generator_table() * coefficients[i]);
// Serialize them for publication
serialized.extend(&C::G_to_bytes(&commitments[j]));
serialized.extend(&C::G_to_bytes(&commitments[i]));
}
// Step 2: Provide a proof of knowledge
// This can be deterministic as the PoK is a singleton never opened up to cooperative discussion
// There's also no reason to spend the time and effort to make this deterministic besides a
// general obsession with canonicity and determinism
let k = C::F::random(rng);
let r = C::F::random(rng);
#[allow(non_snake_case)]
let R = C::generator_table() * k;
let c = challenge::<C>(params.i, context, &C::G_to_bytes(&R), &serialized);
let s = k + (coefficients[0] * c);
let R = C::generator_table() * r;
let s = r + (
coefficients[0] * challenge::<C>(params.i(), context, &C::G_to_bytes(&R), &serialized)
);
serialized.extend(&C::G_to_bytes(&R));
serialized.extend(&C::F_to_bytes(&s));
// Step 4: Broadcast
(coefficients, commitments, serialized)
(coefficients, serialized)
}
// Verify the received data from the first round of key generation
@ -70,69 +64,48 @@ fn verify_r1<R: RngCore + CryptoRng, C: Curve>(
rng: &mut R,
params: &MultisigParams,
context: &str,
our_commitments: Vec<C::G>,
serialized: &[Vec<u8>],
) -> Result<Vec<Vec<C::G>>, FrostError> {
// Deserialize all of the commitments, validating the input buffers as needed
if serialized.len() != (params.n + 1) {
Err(
// Prevents a panic if serialized.len() == 0
FrostError::InvalidParticipantQuantity(params.n, serialized.len() - min(1, serialized.len()))
)?;
}
our_commitments: Vec<u8>,
mut serialized: HashMap<u16, Vec<u8>>,
) -> Result<HashMap<u16, Vec<C::G>>, FrostError> {
validate_map(
&mut serialized,
&(1 ..= params.n()).into_iter().collect::<Vec<_>>(),
(params.i(), our_commitments)
)?;
// Expect a null set of commitments for index 0 so the vector is guaranteed to line up with
// actual indexes. Even if we did the offset internally, the system would need to write the vec
// with the same offset in mind. Therefore, this trick which is probably slightly less efficient
// yet keeps everything simple is preferred
if serialized[0] != vec![] {
Err(FrostError::NonEmptyParticipantZero)?;
}
let commitments_len = usize::from(params.t()) * C::G_len();
let commitments_len = params.t * C::G_len();
let mut commitments = Vec::with_capacity(params.n + 1);
commitments.push(vec![]);
let mut commitments = HashMap::new();
#[allow(non_snake_case)]
let R_bytes = |l| &serialized[&l][commitments_len .. commitments_len + C::G_len()];
#[allow(non_snake_case)]
let R = |l| C::G_from_slice(R_bytes(l)).map_err(|_| FrostError::InvalidProofOfKnowledge(l));
#[allow(non_snake_case)]
let Am = |l| &serialized[&l][0 .. commitments_len];
let s = |l| C::F_from_slice(
&serialized[&l][commitments_len + C::G_len() ..]
).map_err(|_| FrostError::InvalidProofOfKnowledge(l));
let signature_len = C::G_len() + C::F_len();
let mut first = true;
let mut scalars = Vec::with_capacity((params.n - 1) * 3);
let mut points = Vec::with_capacity((params.n - 1) * 3);
for l in 1 ..= params.n {
if l == params.i {
if serialized[l].len() != 0 {
Err(FrostError::DuplicatedIndex(l))?;
}
commitments.push(vec![]);
continue;
}
if serialized[l].len() != (commitments_len + signature_len) {
// Return an error with an approximation for how many commitments were included
// Prevents errors if not even the signature was included
if serialized[l].len() < signature_len {
Err(FrostError::InvalidCommitmentQuantity(l, params.t, 0))?;
}
Err(
FrostError::InvalidCommitmentQuantity(
l,
params.t,
// Could technically be x.y despite this returning x, yet any y is negligible
// It could help with debugging to know a partial piece of data was read but this error
// alone should be enough
(serialized[l].len() - signature_len) / C::G_len()
)
)?;
}
commitments.push(Vec::with_capacity(params.t));
for o in 0 .. params.t {
commitments[l].push(
let mut scalars = Vec::with_capacity((usize::from(params.n()) - 1) * 3);
let mut points = Vec::with_capacity((usize::from(params.n()) - 1) * 3);
for l in 1 ..= params.n() {
let mut these_commitments = vec![];
for c in 0 .. usize::from(params.t()) {
these_commitments.push(
C::G_from_slice(
&serialized[l][(o * C::G_len()) .. ((o + 1) * C::G_len())]
).map_err(|_| FrostError::InvalidCommitment(l))?
&serialized[&l][(c * C::G_len()) .. ((c + 1) * C::G_len())]
).map_err(|_| FrostError::InvalidCommitment(l.try_into().unwrap()))?
);
}
commitments.insert(l, these_commitments);
// Don't bother validating our own proof of knowledge
if l == params.i() {
continue;
}
// Step 5: Validate each proof of knowledge (prep)
let mut u = C::F::one();
@ -140,62 +113,35 @@ fn verify_r1<R: RngCore + CryptoRng, C: Curve>(
u = C::F::random(&mut *rng);
}
// uR
scalars.push(u);
points.push(
C::G_from_slice(
&serialized[l][commitments_len .. commitments_len + C::G_len()]
).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?
);
points.push(R(l)?);
scalars.push(
-C::F_from_slice(
&serialized[l][commitments_len + C::G_len() .. serialized[l].len()]
).map_err(|_| FrostError::InvalidProofOfKnowledge(l))? * u
);
// -usG
scalars.push(-s(l)? * u);
points.push(C::generator());
let c = challenge::<C>(
l,
context,
&serialized[l][commitments_len .. commitments_len + C::G_len()],
&serialized[l][0 .. commitments_len]
);
if first {
scalars.push(c);
first = false;
} else {
scalars.push(c * u);
}
points.push(commitments[l][0]);
// ucA
let c = challenge::<C>(l, context, R_bytes(l), Am(l));
scalars.push(if first { first = false; c } else { c * u});
points.push(commitments[&l][0]);
}
// Step 5: Implementation
// Uses batch verification to optimize the success case dramatically
// On failure, the cost is now this + blame, yet that should happen infrequently
// s = r + ca
// sG == R + cA
// R + cA - sG == 0
if C::multiexp_vartime(&scalars, &points) != C::G::identity() {
for l in 1 ..= params.n {
if l == params.i {
for l in 1 ..= params.n() {
if l == params.i() {
continue;
}
#[allow(non_snake_case)]
let R = C::G_from_slice(
&serialized[l][commitments_len .. commitments_len + C::G_len()]
).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?;
let s = C::F_from_slice(
&serialized[l][commitments_len + C::G_len() .. serialized[l].len()]
).map_err(|_| FrostError::InvalidProofOfKnowledge(l))?;
let c = challenge::<C>(
l,
context,
&serialized[l][commitments_len .. commitments_len + C::G_len()],
&serialized[l][0 .. commitments_len]
);
if R != ((C::generator_table() * s) + (commitments[l][0] * (C::F::zero() - &c))) {
if (C::generator_table() * s(l)?) != (
R(l)? + (commitments[&l][0] * challenge::<C>(l, context, R_bytes(l), Am(l)))
) {
Err(FrostError::InvalidProofOfKnowledge(l))?;
}
}
@ -203,22 +149,19 @@ fn verify_r1<R: RngCore + CryptoRng, C: Curve>(
Err(FrostError::InternalError("batch validation is broken".to_string()))?;
}
// Write in our own commitments
commitments[params.i] = our_commitments;
Ok(commitments)
}
fn polynomial<F: PrimeField>(
coefficients: &[F],
i: usize
l: u16
) -> F {
let i = F::from(u64::try_from(i).unwrap());
let l = F::from(u64::from(l));
let mut share = F::zero();
for (idx, coefficient) in coefficients.iter().rev().enumerate() {
share += coefficient;
if idx != (coefficients.len() - 1) {
share *= i;
share *= l;
}
}
share
@ -232,27 +175,25 @@ fn generate_key_r2<R: RngCore + CryptoRng, C: Curve>(
params: &MultisigParams,
context: &str,
coefficients: Vec<C::F>,
our_commitments: Vec<C::G>,
commitments: &[Vec<u8>],
) -> Result<(C::F, Vec<Vec<C::G>>, Vec<Vec<u8>>), FrostError> {
our_commitments: Vec<u8>,
commitments: HashMap<u16, Vec<u8>>,
) -> Result<(C::F, HashMap<u16, Vec<C::G>>, HashMap<u16, Vec<u8>>), FrostError> {
let commitments = verify_r1::<R, C>(rng, params, context, our_commitments, commitments)?;
// Step 1: Generate secret shares for all other parties
let mut res = Vec::with_capacity(params.n + 1);
res.push(vec![]);
for i in 1 ..= params.n {
// Don't push our own to the byte buffer which is meant to be sent around
let mut res = HashMap::new();
for l in 1 ..= params.n() {
// Don't insert our own shares to the byte buffer which is meant to be sent around
// An app developer could accidentally send it. Best to keep this black boxed
if i == params.i {
res.push(vec![]);
continue
if l == params.i() {
continue;
}
res.push(C::F_to_bytes(&polynomial(&coefficients, i)));
res.insert(l, C::F_to_bytes(&polynomial(&coefficients, l)));
}
// Calculate our own share
let share = polynomial(&coefficients, params.i);
let share = polynomial(&coefficients, params.i());
// The secret shares are discarded here, not cleared. While any system which leaves its memory
// accessible is likely totally lost already, making the distinction meaningless when the key gen
@ -273,87 +214,67 @@ fn generate_key_r2<R: RngCore + CryptoRng, C: Curve>(
fn complete_r2<C: Curve>(
params: MultisigParams,
share: C::F,
commitments: &[Vec<C::G>],
commitments: HashMap<u16, Vec<C::G>>,
// Vec to preserve ownership
serialized: Vec<Vec<u8>>,
mut serialized: HashMap<u16, Vec<u8>>,
) -> Result<MultisigKeys<C>, FrostError> {
validate_map(
&mut serialized,
&(1 ..= params.n()).into_iter().collect::<Vec<_>>(),
(params.i(), C::F_to_bytes(&share))
)?;
// Step 2. Verify each share
if serialized.len() != (params.n + 1) {
Err(
FrostError::InvalidParticipantQuantity(params.n, serialized.len() - min(1, serialized.len()))
)?;
let mut shares = HashMap::new();
for (l, share) in serialized {
shares.insert(l, C::F_from_slice(&share).map_err(|_| FrostError::InvalidShare(params.i()))?);
}
if (commitments[0].len() != 0) || (serialized[0].len() != 0) {
Err(FrostError::NonEmptyParticipantZero)?;
}
// Deserialize them
let mut shares: Vec<C::F> = vec![C::F::zero()];
for i in 1 .. serialized.len() {
if i == params.i {
if serialized[i].len() != 0 {
Err(FrostError::DuplicatedIndex(i))?;
}
shares.push(C::F::zero());
continue;
}
shares.push(C::F_from_slice(&serialized[i]).map_err(|_| FrostError::InvalidShare(i))?);
}
for l in 1 ..= params.n {
if l == params.i {
for (l, share) in &shares {
if *l == params.i() {
continue;
}
let i_scalar = C::F::from(u64::try_from(params.i).unwrap());
let i_scalar = C::F::from(params.i.into());
let mut exp = C::F::one();
let mut exps = Vec::with_capacity(params.t);
for _ in 0 .. params.t {
let mut exps = Vec::with_capacity(usize::from(params.t()));
for _ in 0 .. params.t() {
exps.push(exp);
exp *= i_scalar;
}
// Doesn't use multiexp_vartime with -shares[l] due to not being able to push to commitments
if C::multiexp_vartime(&exps, &commitments[l]) != (C::generator_table() * shares[l]) {
Err(FrostError::InvalidCommitment(l))?;
if C::multiexp_vartime(&exps, &commitments[&l]) != (C::generator_table() * *share) {
Err(FrostError::InvalidCommitment(*l))?;
}
}
// TODO: Clear the original share
let mut secret_share = share;
for remote_share in shares {
secret_share += remote_share;
let mut secret_share = C::F::zero();
for (_, share) in shares {
secret_share += share;
}
let mut verification_shares = vec![C::G::identity()];
for i in 1 ..= params.n {
let mut verification_shares = HashMap::new();
for l in 1 ..= params.n() {
let mut exps = vec![];
let mut cs = vec![];
for j in 1 ..= params.n {
for k in 0 .. params.t {
for i in 1 ..= params.n() {
for j in 0 .. params.t() {
let mut exp = C::F::one();
for _ in 0 .. k {
exp *= C::F::from(u64::try_from(i).unwrap());
for _ in 0 .. j {
exp *= C::F::from(u64::try_from(l).unwrap());
}
exps.push(exp);
cs.push(commitments[j][k]);
cs.push(commitments[&i][usize::from(j)]);
}
}
verification_shares.push(C::multiexp_vartime(&exps, &cs));
verification_shares.insert(l, C::multiexp_vartime(&exps, &cs));
}
debug_assert_eq!(C::generator_table() * secret_share, verification_shares[&params.i()]);
debug_assert_eq!(
C::generator_table() * secret_share,
verification_shares[params.i]
);
let mut group_key = C::G::identity();
for j in 1 ..= params.n {
group_key += commitments[j][0];
}
let group_key = commitments.iter().map(|(_, commitments)| commitments[0]).sum();
// TODO: Clear serialized and shares
@ -382,9 +303,9 @@ pub struct StateMachine<C: Curve> {
context: String,
state: State,
coefficients: Option<Vec<C::F>>,
our_commitments: Option<Vec<C::G>>,
our_commitments: Option<Vec<u8>>,
secret: Option<C::F>,
commitments: Option<Vec<Vec<C::G>>>
commitments: Option<HashMap<u16, Vec<C::G>>>
}
impl<C: Curve> StateMachine<C> {
@ -413,14 +334,14 @@ impl<C: Curve> StateMachine<C> {
Err(FrostError::InvalidKeyGenTransition(State::Fresh, self.state))?;
}
let (coefficients, commitments, serialized) = generate_key_r1::<R, C>(
let (coefficients, serialized) = generate_key_r1::<R, C>(
rng,
&self.params,
&self.context,
);
self.coefficients = Some(coefficients);
self.our_commitments = Some(commitments);
self.our_commitments = Some(serialized.clone());
self.state = State::GeneratedCoefficients;
Ok(serialized)
}
@ -433,8 +354,8 @@ impl<C: Curve> StateMachine<C> {
pub fn generate_secret_shares<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
commitments: Vec<Vec<u8>>,
) -> Result<Vec<Vec<u8>>, FrostError> {
commitments: HashMap<u16, Vec<u8>>,
) -> Result<HashMap<u16, Vec<u8>>, FrostError> {
if self.state != State::GeneratedCoefficients {
Err(FrostError::InvalidKeyGenTransition(State::GeneratedCoefficients, self.state))?;
}
@ -445,7 +366,7 @@ impl<C: Curve> StateMachine<C> {
&self.context,
self.coefficients.take().unwrap(),
self.our_commitments.take().unwrap(),
&commitments,
commitments,
)?;
self.secret = Some(secret);
@ -462,8 +383,8 @@ impl<C: Curve> StateMachine<C> {
/// wait for all participants to report as such
pub fn complete(
&mut self,
shares: Vec<Vec<u8>>,
) -> Result<MultisigKeys<C>, FrostError> {
shares: HashMap<u16, Vec<u8>>,
) -> Result<MultisigKeys<C>, FrostError> {
if self.state != State::GeneratedSecretShares {
Err(FrostError::InvalidKeyGenTransition(State::GeneratedSecretShares, self.state))?;
}
@ -471,7 +392,7 @@ impl<C: Curve> StateMachine<C> {
let keys = complete_r2(
self.params,
self.secret.take().unwrap(),
&self.commitments.take().unwrap(),
self.commitments.take().unwrap(),
shares,
)?;

View file

@ -1,4 +1,5 @@
use core::{ops::Mul, fmt::Debug};
use std::collections::HashMap;
use thiserror::Error;
@ -10,7 +11,6 @@ pub use multiexp::multiexp_vartime;
pub mod key_gen;
pub mod algorithm;
pub mod sign;
use sign::lagrange;
/// Set of errors for curve-related operations, namely encoding and decoding
#[derive(Error, Debug)]
@ -118,27 +118,23 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug {
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct MultisigParams {
/// Participants needed to sign on behalf of the group
t: usize,
t: u16,
/// Amount of participants
n: usize,
n: u16,
/// Index of the participant being acted for
i: usize,
i: u16,
}
impl MultisigParams {
pub fn new(
t: usize,
n: usize,
i: usize
t: u16,
n: u16,
i: u16
) -> Result<MultisigParams, FrostError> {
if (t == 0) || (n == 0) {
Err(FrostError::ZeroParameter(t, n))?;
}
if u16::try_from(n).is_err() {
Err(FrostError::TooManyParticipants(n, u16::MAX))?;
}
// When t == n, this shouldn't be used (MuSig2 and other variants of MuSig exist for a reason),
// but it's not invalid to do so
if t > n {
@ -151,21 +147,21 @@ impl MultisigParams {
Ok(MultisigParams{ t, n, i })
}
pub fn t(&self) -> usize { self.t }
pub fn n(&self) -> usize { self.n }
pub fn i(&self) -> usize { self.i }
pub fn t(&self) -> u16 { self.t }
pub fn n(&self) -> u16 { self.n }
pub fn i(&self) -> u16 { self.i }
}
#[derive(Error, Debug)]
pub enum FrostError {
#[error("a parameter was 0 (required {0}, participants {1})")]
ZeroParameter(usize, usize),
ZeroParameter(u16, u16),
#[error("too many participants (max {1}, got {0})")]
TooManyParticipants(usize, u16),
#[error("invalid amount of required participants (max {1}, got {0})")]
InvalidRequiredQuantity(usize, usize),
InvalidRequiredQuantity(u16, u16),
#[error("invalid participant index (0 < index <= {0}, yet index is {1})")]
InvalidParticipantIndex(usize, usize),
InvalidParticipantIndex(u16, u16),
#[error("invalid signing set ({0})")]
InvalidSigningSet(String),
@ -173,16 +169,14 @@ pub enum FrostError {
InvalidParticipantQuantity(usize, usize),
#[error("duplicated participant index ({0})")]
DuplicatedIndex(usize),
#[error("participant 0 provided data despite not existing")]
NonEmptyParticipantZero,
#[error("invalid commitment quantity (participant {0}, expected {1}, got {2})")]
InvalidCommitmentQuantity(usize, usize, usize),
#[error("missing participant {0}")]
MissingParticipant(u16),
#[error("invalid commitment (participant {0})")]
InvalidCommitment(usize),
InvalidCommitment(u16),
#[error("invalid proof of knowledge (participant {0})")]
InvalidProofOfKnowledge(usize),
InvalidProofOfKnowledge(u16),
#[error("invalid share (participant {0})")]
InvalidShare(usize),
InvalidShare(u16),
#[error("invalid key generation state machine transition (expected {0}, was {1})")]
InvalidKeyGenTransition(key_gen::State, key_gen::State),
@ -197,9 +191,9 @@ pub enum FrostError {
#[derive(Clone)]
pub struct MultisigView<C: Curve> {
group_key: C::G,
included: Vec<usize>,
included: Vec<u16>,
secret_share: C::F,
verification_shares: Vec<C::G>,
verification_shares: HashMap<u16, C::G>,
}
impl<C: Curve> MultisigView<C> {
@ -207,7 +201,7 @@ impl<C: Curve> MultisigView<C> {
self.group_key
}
pub fn included(&self) -> Vec<usize> {
pub fn included(&self) -> Vec<u16> {
self.included.clone()
}
@ -215,11 +209,33 @@ impl<C: Curve> MultisigView<C> {
self.secret_share
}
pub fn verification_share(&self, l: usize) -> C::G {
self.verification_shares[l]
pub fn verification_share(&self, l: u16) -> C::G {
self.verification_shares[&l]
}
}
/// Calculate the lagrange coefficient for a signing set
pub fn lagrange<F: PrimeField>(
i: u16,
included: &[u16],
) -> F {
let mut num = F::one();
let mut denom = F::one();
for l in included {
if i == *l {
continue;
}
let share = F::from(u64::try_from(*l).unwrap());
num *= share;
denom *= share - F::from(u64::try_from(i).unwrap());
}
// Safe as this will only be 0 if we're part of the above loop
// (which we have an if case to avoid)
num * denom.invert().unwrap()
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct MultisigKeys<C: Curve> {
/// Multisig Parameters
@ -230,7 +246,7 @@ pub struct MultisigKeys<C: Curve> {
/// Group key
group_key: C::G,
/// Verification shares
verification_shares: Vec<C::G>,
verification_shares: HashMap<u16, C::G>,
/// Offset applied to these keys
offset: Option<C::F>,
@ -258,12 +274,12 @@ impl<C: Curve> MultisigKeys<C> {
self.group_key
}
pub fn verification_shares(&self) -> Vec<C::G> {
pub fn verification_shares(&self) -> HashMap<u16, C::G> {
self.verification_shares.clone()
}
pub fn view(&self, included: &[usize]) -> Result<MultisigView<C>, FrostError> {
if (included.len() < self.params.t) || (self.params.n < included.len()) {
pub fn view(&self, included: &[u16]) -> Result<MultisigView<C>, FrostError> {
if (included.len() < self.params.t.into()) || (usize::from(self.params.n) < included.len()) {
Err(FrostError::InvalidSigningSet("invalid amount of participants included".to_string()))?;
}
@ -274,16 +290,18 @@ impl<C: Curve> MultisigKeys<C> {
Ok(MultisigView {
group_key: self.group_key + (C::generator_table() * offset),
secret_share: secret_share + offset_share,
verification_shares: self.verification_shares.clone().iter().enumerate().map(
|(l, share)| (*share * lagrange::<C::F>(l, &included)) +
(C::generator_table() * offset_share)
verification_shares: self.verification_shares.iter().map(
|(l, share)| (
*l,
(*share * lagrange::<C::F>(*l, &included)) + (C::generator_table() * offset_share)
)
).collect(),
included: included.to_vec(),
})
}
pub fn serialized_len(n: usize) -> usize {
1 + usize::from(C::id_len()) + (3 * 8) + C::F_len() + C::G_len() + (n * C::G_len())
pub fn serialized_len(n: u16) -> usize {
1 + usize::from(C::id_len()) + (3 * 2) + C::F_len() + C::G_len() + (usize::from(n) * C::G_len())
}
pub fn serialize(&self) -> Vec<u8> {
@ -292,13 +310,13 @@ impl<C: Curve> MultisigKeys<C> {
);
serialized.push(C::id_len());
serialized.extend(C::id().as_bytes());
serialized.extend(&(self.params.n as u64).to_le_bytes());
serialized.extend(&(self.params.t as u64).to_le_bytes());
serialized.extend(&(self.params.i as u64).to_le_bytes());
serialized.extend(&self.params.n.to_le_bytes());
serialized.extend(&self.params.t.to_le_bytes());
serialized.extend(&self.params.i.to_le_bytes());
serialized.extend(&C::F_to_bytes(&self.secret_share));
serialized.extend(&C::G_to_bytes(&self.group_key));
for i in 1 ..= self.params.n {
serialized.extend(&C::G_to_bytes(&self.verification_shares[i]));
for l in 1 ..= self.params.n.into() {
serialized.extend(&C::G_to_bytes(&self.verification_shares[&l]));
}
serialized
@ -330,19 +348,16 @@ impl<C: Curve> MultisigKeys<C> {
Err(FrostError::InternalError("participant quantity wasn't included".to_string()))?;
}
let n = u64::from_le_bytes(serialized[cursor .. (cursor + 8)].try_into().unwrap()).try_into()
.map_err(|_| FrostError::InternalError("parameter doesn't fit into usize".to_string()))?;
cursor += 8;
let n = u16::from_le_bytes(serialized[cursor .. (cursor + 2)].try_into().unwrap());
cursor += 2;
if serialized.len() != MultisigKeys::<C>::serialized_len(n) {
Err(FrostError::InternalError("incorrect serialization length".to_string()))?;
}
let t = u64::from_le_bytes(serialized[cursor .. (cursor + 8)].try_into().unwrap()).try_into()
.map_err(|_| FrostError::InternalError("parameter doesn't fit into usize".to_string()))?;
cursor += 8;
let i = u64::from_le_bytes(serialized[cursor .. (cursor + 8)].try_into().unwrap()).try_into()
.map_err(|_| FrostError::InternalError("parameter doesn't fit into usize".to_string()))?;
cursor += 8;
let t = u16::from_le_bytes(serialized[cursor .. (cursor + 2)].try_into().unwrap());
cursor += 2;
let i = u16::from_le_bytes(serialized[cursor .. (cursor + 2)].try_into().unwrap());
cursor += 2;
let secret_share = C::F_from_slice(&serialized[cursor .. (cursor + C::F_len())])
.map_err(|_| FrostError::InternalError("invalid secret share".to_string()))?;
@ -351,10 +366,10 @@ impl<C: Curve> MultisigKeys<C> {
.map_err(|_| FrostError::InternalError("invalid group key".to_string()))?;
cursor += C::G_len();
let mut verification_shares = vec![C::G::identity()];
verification_shares.reserve_exact(n + 1);
for _ in 0 .. n {
verification_shares.push(
let mut verification_shares = HashMap::new();
for l in 1 ..= n {
verification_shares.insert(
l,
C::G_from_slice(&serialized[cursor .. (cursor + C::G_len())])
.map_err(|_| FrostError::InternalError("invalid verification share".to_string()))?
);
@ -373,3 +388,24 @@ impl<C: Curve> MultisigKeys<C> {
)
}
}
// Validate a map of serialized values to have the expected included participants
pub(crate) fn validate_map<T>(
map: &mut HashMap<u16, T>,
included: &[u16],
ours: (u16, T)
) -> Result<(), FrostError> {
map.insert(ours.0, ours.1);
if map.len() != included.len() {
Err(FrostError::InvalidParticipantQuantity(included.len(), map.len()))?;
}
for included in included {
if !map.contains_key(included) {
Err(FrostError::MissingParticipant(*included))?;
}
}
Ok(())
}

View file

@ -1,36 +1,20 @@
use core::{convert::TryFrom, cmp::min, fmt};
use std::rc::Rc;
use core::fmt;
use std::{rc::Rc, collections::HashMap};
use rand_core::{RngCore, CryptoRng};
use ff::{Field, PrimeField};
use ff::Field;
use group::Group;
use transcript::Transcript;
use crate::{Curve, FrostError, MultisigParams, MultisigKeys, MultisigView, algorithm::Algorithm};
/// Calculate the lagrange coefficient
pub fn lagrange<F: PrimeField>(
i: usize,
included: &[usize],
) -> F {
let mut num = F::one();
let mut denom = F::one();
for l in included {
if i == *l {
continue;
}
let share = F::from(u64::try_from(*l).unwrap());
num *= share;
denom *= share - F::from(u64::try_from(i).unwrap());
}
// Safe as this will only be 0 if we're part of the above loop
// (which we have an if case to avoid)
num * denom.invert().unwrap()
}
use crate::{
Curve,
FrostError,
MultisigParams, MultisigKeys, MultisigView,
algorithm::Algorithm,
validate_map
};
/// Pairing of an Algorithm with a MultisigKeys instance and this specific signing set
#[derive(Clone)]
@ -45,13 +29,13 @@ impl<C: Curve, A: Algorithm<C>> Params<C, A> {
pub fn new(
algorithm: A,
keys: Rc<MultisigKeys<C>>,
included: &[usize],
included: &[u16],
) -> Result<Params<C, A>, FrostError> {
let mut included = included.to_vec();
(&mut included).sort_unstable();
// Included < threshold
if included.len() < keys.params.t {
if included.len() < usize::from(keys.params.t) {
Err(FrostError::InvalidSigningSet("not enough signers".to_string()))?;
}
// Invalid index
@ -65,7 +49,7 @@ impl<C: Curve, A: Algorithm<C>> Params<C, A> {
// Same signer included multiple times
for i in 0 .. included.len() - 1 {
if included[i] == included[i + 1] {
Err(FrostError::DuplicatedIndex(included[i]))?;
Err(FrostError::DuplicatedIndex(included[i].into()))?;
}
}
// Not included
@ -88,7 +72,6 @@ impl<C: Curve, A: Algorithm<C>> Params<C, A> {
struct PreprocessPackage<C: Curve> {
nonces: [C::F; 2],
commitments: [C::G; 2],
serialized: Vec<u8>,
}
@ -111,14 +94,14 @@ fn preprocess<R: RngCore + CryptoRng, C: Curve, A: Algorithm<C>>(
)
);
PreprocessPackage { nonces, commitments, serialized }
PreprocessPackage { nonces, serialized }
}
#[allow(non_snake_case)]
struct Package<C: Curve> {
Ris: Vec<C::G>,
Ris: HashMap<u16, C::G>,
R: C::G,
share: C::F
share: Vec<u8>
}
// Has every signer perform the role of the signature aggregator
@ -127,22 +110,15 @@ struct Package<C: Curve> {
fn sign_with_share<C: Curve, A: Algorithm<C>>(
params: &mut Params<C, A>,
our_preprocess: PreprocessPackage<C>,
commitments: &[Option<Vec<u8>>],
mut commitments: HashMap<u16, Vec<u8>>,
msg: &[u8],
) -> Result<(Package<C>, Vec<u8>), FrostError> {
let multisig_params = params.multisig_params();
if commitments.len() != (multisig_params.n + 1) {
Err(
FrostError::InvalidParticipantQuantity(
multisig_params.n,
commitments.len() - min(1, commitments.len())
)
)?;
}
if commitments[0].is_some() {
Err(FrostError::NonEmptyParticipantZero)?;
}
validate_map(
&mut commitments,
&params.view.included,
(multisig_params.i, our_preprocess.serialized)
)?;
{
let transcript = params.algorithm.transcript();
@ -155,108 +131,66 @@ fn sign_with_share<C: Curve, A: Algorithm<C>>(
}
#[allow(non_snake_case)]
let mut B = Vec::with_capacity(multisig_params.n + 1);
B.push(None);
let mut B = HashMap::<u16, _>::with_capacity(params.view.included.len());
// Commitments + a presumed 32-byte hash of the message
let commitments_len = 2 * C::G_len();
// Parse the commitments and prepare the binding factor
for l in 1 ..= multisig_params.n {
if l == multisig_params.i {
if commitments[l].is_some() {
Err(FrostError::DuplicatedIndex(l))?;
}
B.push(Some(our_preprocess.commitments));
{
let transcript = params.algorithm.transcript();
transcript.append_message(b"participant", &u16::try_from(l).unwrap().to_be_bytes());
transcript.append_message(
b"commitments",
&our_preprocess.serialized[0 .. (C::G_len() * 2)]
);
}
continue;
}
let included = params.view.included.contains(&l);
if commitments[l].is_some() && (!included) {
Err(FrostError::InvalidCommitmentQuantity(l, 0, commitments.len() / C::G_len()))?;
}
if commitments[l].is_none() {
if included {
Err(FrostError::InvalidCommitmentQuantity(l, 2, 0))?;
}
B.push(None);
continue;
}
let commitments = commitments[l].as_ref().unwrap();
if commitments.len() < commitments_len {
Err(FrostError::InvalidCommitmentQuantity(l, 2, commitments.len() / C::G_len()))?;
}
#[allow(non_snake_case)]
let D = C::G_from_slice(&commitments[0 .. C::G_len()])
.map_err(|_| FrostError::InvalidCommitment(l))?;
#[allow(non_snake_case)]
let E = C::G_from_slice(&commitments[C::G_len() .. commitments_len])
.map_err(|_| FrostError::InvalidCommitment(l))?;
B.push(Some([D, E]));
{
let transcript = params.algorithm.transcript();
transcript.append_message(b"participant", &u16::try_from(l).unwrap().to_be_bytes());
transcript.append_message(b"commitments", &commitments[0 .. commitments_len]);
}
}
// Add the message to the binding factor
// Get the binding factor
let mut addendums = HashMap::new();
let binding = {
let transcript = params.algorithm.transcript();
// Parse the commitments
for l in &params.view.included {
transcript.append_message(b"participant", &l.to_be_bytes());
let commitments = commitments.remove(l).unwrap();
let mut read_commitment = |c, label| {
let commitment = &commitments[c .. c + C::G_len()];
transcript.append_message(label, commitment);
C::G_from_slice(commitment).map_err(|_| FrostError::InvalidCommitment(*l))
};
#[allow(non_snake_case)]
let mut read_D_E = || Ok(
[read_commitment(0, b"commitment_D")?, read_commitment(C::G_len(), b"commitment_E")?]
);
B.insert(*l, read_D_E()?);
addendums.insert(*l, commitments[(C::G_len() * 2) ..].to_vec());
}
// Append the message to the transcript
transcript.append_message(b"message", &C::hash_msg(&msg));
// Calculate the binding factor
C::hash_to_F(&transcript.challenge(b"binding"))
};
// Process the commitments and addendums
let view = &params.view;
// Process the addendums
for l in &params.view.included {
params.algorithm.process_addendum(
view,
*l,
B[*l].as_ref().unwrap(),
if *l == multisig_params.i {
&our_preprocess.serialized[commitments_len .. our_preprocess.serialized.len()]
} else {
&commitments[*l].as_ref().unwrap()[
commitments_len .. commitments[*l].as_ref().unwrap().len()
]
}
)?;
params.algorithm.process_addendum(&params.view, *l, &B[l], &addendums[l])?;
}
#[allow(non_snake_case)]
let mut Ris = vec![];
let mut Ris = HashMap::with_capacity(params.view.included.len());
#[allow(non_snake_case)]
let mut R = C::G::identity();
for i in 0 .. params.view.included.len() {
let commitments = B[params.view.included[i]].unwrap();
for l in &params.view.included {
#[allow(non_snake_case)]
let this_R = commitments[0] + (commitments[1] * binding);
Ris.push(this_R);
let this_R = B[l][0] + (B[l][1] * binding);
Ris.insert(*l, this_R);
R += this_R;
}
let view = &params.view;
let share = params.algorithm.sign_share(
view,
R,
binding,
our_preprocess.nonces[0] + (our_preprocess.nonces[1] * binding),
msg
let share = C::F_to_bytes(
&params.algorithm.sign_share(
&params.view,
R,
binding,
our_preprocess.nonces[0] + (our_preprocess.nonces[1] * binding),
msg
)
);
Ok((Package { Ris, R, share }, C::F_to_bytes(&share)))
Ok((Package { Ris, R, share: share.clone() }, share))
}
// This doesn't check the signing set is as expected and unexpected changes can cause false blames
@ -265,37 +199,17 @@ fn sign_with_share<C: Curve, A: Algorithm<C>>(
fn complete<C: Curve, A: Algorithm<C>>(
sign_params: &Params<C, A>,
sign: Package<C>,
serialized: &[Option<Vec<u8>>],
mut shares: HashMap<u16, Vec<u8>>,
) -> Result<A::Signature, FrostError> {
let params = sign_params.multisig_params();
if serialized.len() != (params.n + 1) {
Err(
FrostError::InvalidParticipantQuantity(params.n, serialized.len() - min(1, serialized.len()))
)?;
}
validate_map(&mut shares, &sign_params.view.included, (params.i(), sign.share))?;
if serialized[0].is_some() {
Err(FrostError::NonEmptyParticipantZero)?;
}
let mut responses = Vec::with_capacity(params.t);
let mut sum = sign.share;
for i in 0 .. sign_params.view.included.len() {
let l = sign_params.view.included[i];
if l == params.i {
responses.push(None);
continue;
}
// Make sure they actually provided a share
if serialized[l].is_none() {
Err(FrostError::InvalidShare(l))?;
}
let part = C::F_from_slice(serialized[l].as_ref().unwrap())
.map_err(|_| FrostError::InvalidShare(l))?;
let mut responses = HashMap::new();
let mut sum = C::F::zero();
for l in &sign_params.view.included {
let part = C::F_from_slice(&shares[l]).map_err(|_| FrostError::InvalidShare(*l))?;
sum += part;
responses.push(Some(part));
responses.insert(*l, part);
}
// Perform signature validation instead of individual share validation
@ -307,21 +221,13 @@ fn complete<C: Curve, A: Algorithm<C>>(
}
// Find out who misbehaved
for i in 0 .. sign_params.view.included.len() {
match responses[i] {
Some(part) => {
let l = sign_params.view.included[i];
if !sign_params.algorithm.verify_share(
sign_params.view.verification_share(l),
sign.Ris[i],
part
) {
Err(FrostError::InvalidShare(l))?;
}
},
// Happens when l == i
None => {}
for l in &sign_params.view.included {
if !sign_params.algorithm.verify_share(
sign_params.view.verification_share(*l),
sign.Ris[l],
responses[l]
) {
Err(FrostError::InvalidShare(*l))?;
}
}
@ -366,7 +272,7 @@ pub trait StateMachine {
/// for every other participant to receive, over an authenticated channel
fn sign(
&mut self,
commitments: &[Option<Vec<u8>>],
commitments: HashMap<u16, Vec<u8>>,
msg: &[u8],
) -> Result<Vec<u8>, FrostError>;
@ -374,7 +280,7 @@ pub trait StateMachine {
/// Takes in everyone elses' shares submitted to us as a Vec, expecting participant index =
/// Vec index with None at index 0 and index i. Returns a byte vector representing the serialized
/// signature
fn complete(&mut self, shares: &[Option<Vec<u8>>]) -> Result<Self::Signature, FrostError>;
fn complete(&mut self, shares: HashMap<u16, Vec<u8>>) -> Result<Self::Signature, FrostError>;
fn multisig_params(&self) -> MultisigParams;
@ -395,7 +301,7 @@ impl<C: Curve, A: Algorithm<C>> AlgorithmMachine<C, A> {
pub fn new(
algorithm: A,
keys: Rc<MultisigKeys<C>>,
included: &[usize],
included: &[u16],
) -> Result<AlgorithmMachine<C, A>, FrostError> {
Ok(
AlgorithmMachine {
@ -427,7 +333,7 @@ impl<C: Curve, A: Algorithm<C>> StateMachine for AlgorithmMachine<C, A> {
fn sign(
&mut self,
commitments: &[Option<Vec<u8>>],
commitments: HashMap<u16, Vec<u8>>,
msg: &[u8],
) -> Result<Vec<u8>, FrostError> {
if self.state != State::Preprocessed {
@ -446,7 +352,7 @@ impl<C: Curve, A: Algorithm<C>> StateMachine for AlgorithmMachine<C, A> {
Ok(serialized)
}
fn complete(&mut self, shares: &[Option<Vec<u8>>]) -> Result<A::Signature, FrostError> {
fn complete(&mut self, shares: HashMap<u16, Vec<u8>>) -> Result<A::Signature, FrostError> {
if self.state != State::Signed {
Err(FrostError::InvalidSignTransition(State::Signed, self.state))?;
}

View file

@ -1,8 +1,6 @@
use std::rc::Rc;
use std::{rc::Rc, collections::HashMap};
use rand::{RngCore, rngs::OsRng};
use sha2::{Digest, Sha256};
use rand::rngs::OsRng;
use frost::{
Curve,
@ -15,51 +13,120 @@ use frost::{
mod common;
use common::{Secp256k1, TestHram};
const PARTICIPANTS: usize = 8;
const PARTICIPANTS: u16 = 8;
fn clone_without<K: Clone + std::cmp::Eq + std::hash::Hash, V: Clone>(
map: &HashMap<K, V>,
without: &K
) -> HashMap<K, V> {
let mut res = map.clone();
res.remove(without).unwrap();
res
}
fn key_gen<C: Curve>() -> HashMap<u16, Rc<MultisigKeys<C>>> {
let mut params = HashMap::new();
let mut machines = HashMap::new();
let mut commitments = HashMap::new();
for i in 1 ..= PARTICIPANTS {
params.insert(
i,
MultisigParams::new(
((PARTICIPANTS / 3) * 2) + 1,
PARTICIPANTS,
i
).unwrap()
);
machines.insert(
i,
key_gen::StateMachine::<C>::new(
params[&i],
"FROST test key_gen".to_string()
)
);
commitments.insert(
i,
machines.get_mut(&i).unwrap().generate_coefficients(&mut OsRng).unwrap()
);
}
let mut secret_shares = HashMap::new();
for (l, machine) in machines.iter_mut() {
secret_shares.insert(
*l,
machine.generate_secret_shares(&mut OsRng, clone_without(&commitments, l)).unwrap()
);
}
let mut verification_shares = None;
let mut group_key = None;
let mut keys = HashMap::new();
for (i, machine) in machines.iter_mut() {
let mut our_secret_shares = HashMap::new();
for (l, shares) in &secret_shares {
if i == l {
continue;
}
our_secret_shares.insert(*l, shares[&i].clone());
}
let these_keys = machine.complete(our_secret_shares).unwrap();
// Test serialization
assert_eq!(
MultisigKeys::<C>::deserialize(&these_keys.serialize()).unwrap(),
these_keys
);
if verification_shares.is_none() {
verification_shares = Some(these_keys.verification_shares());
}
assert_eq!(verification_shares.as_ref().unwrap(), &these_keys.verification_shares());
if group_key.is_none() {
group_key = Some(these_keys.group_key());
}
assert_eq!(group_key.unwrap(), these_keys.group_key());
keys.insert(*i, Rc::new(these_keys.clone()));
}
keys
}
fn sign<C: Curve, A: Algorithm<C, Signature = SchnorrSignature<C>>>(
algorithm: A,
keys: Vec<Rc<MultisigKeys<C>>>
keys: &HashMap<u16, Rc<MultisigKeys<C>>>
) {
let t = keys[0].params().t();
let mut machines = vec![];
let mut commitments = Vec::with_capacity(PARTICIPANTS + 1);
commitments.resize(PARTICIPANTS + 1, None);
let t = keys[&1].params().t();
let mut machines = HashMap::new();
let mut commitments = HashMap::new();
for i in 1 ..= t {
machines.push(
machines.insert(
i,
AlgorithmMachine::new(
algorithm.clone(),
keys[i - 1].clone(),
&(1 ..= t).collect::<Vec<usize>>()
keys[&i].clone(),
&(1 ..= t).collect::<Vec<_>>()
).unwrap()
);
commitments[i] = Some(machines[i - 1].preprocess(&mut OsRng).unwrap());
commitments.insert(
i,
machines.get_mut(&i).unwrap().preprocess(&mut OsRng).unwrap()
);
}
let mut shares = Vec::with_capacity(PARTICIPANTS + 1);
shares.resize(PARTICIPANTS + 1, None);
for i in 1 ..= t {
shares[i] = Some(
machines[i - 1].sign(
&commitments
.iter()
.enumerate()
.map(|(idx, value)| if idx == i { None } else { value.to_owned() })
.collect::<Vec<Option<Vec<u8>>>>(),
b"Hello World"
).unwrap()
let mut shares = HashMap::new();
for (i, machine) in machines.iter_mut() {
shares.insert(
*i,
machine.sign(clone_without(&commitments, i), b"Hello World").unwrap()
);
}
let mut signature = None;
for i in 1 ..= t {
let sig = machines[i - 1].complete(
&shares
.iter()
.enumerate()
.map(|(idx, value)| if idx == i { None } else { value.to_owned() })
.collect::<Vec<Option<Vec<u8>>>>()
).unwrap();
for (i, machine) in machines.iter_mut() {
let sig = machine.complete(clone_without(&shares, i)).unwrap();
if signature.is_none() {
signature = Some(sig);
}
@ -69,75 +136,12 @@ fn sign<C: Curve, A: Algorithm<C, Signature = SchnorrSignature<C>>>(
#[test]
fn key_gen_and_sign() {
let mut params = vec![];
let mut machines = vec![];
let mut commitments = vec![vec![]];
for i in 1 ..= PARTICIPANTS {
params.push(
MultisigParams::new(
((PARTICIPANTS / 3) * 2) + 1,
PARTICIPANTS,
i
).unwrap()
);
machines.push(
key_gen::StateMachine::<Secp256k1>::new(
params[i - 1],
"FF/Group Rust key_gen test".to_string()
)
);
commitments.push(machines[i - 1].generate_coefficients(&mut OsRng).unwrap());
let mut keys = key_gen::<Secp256k1>();
sign(Schnorr::<Secp256k1, TestHram>::new(), &keys);
for i in 1 ..= u16::try_from(PARTICIPANTS).unwrap() {
keys.insert(i, Rc::new(keys[&i].offset(Secp256k1::hash_to_F(b"offset"))));
}
let mut secret_shares = vec![];
for i in 1 ..= PARTICIPANTS {
secret_shares.push(
machines[i - 1].generate_secret_shares(
&mut OsRng,
commitments
.iter()
.enumerate()
.map(|(idx, commitments)| if idx == i { vec![] } else { commitments.to_vec() })
.collect()
).unwrap()
);
}
let mut verification_shares = vec![];
let mut group_key = None;
let mut keys = vec![];
for i in 1 ..= PARTICIPANTS {
let mut our_secret_shares = vec![vec![]];
our_secret_shares.extend(
secret_shares.iter().map(|shares| shares[i].clone()).collect::<Vec<Vec<u8>>>()
);
let these_keys = machines[i - 1].complete(our_secret_shares).unwrap();
assert_eq!(
MultisigKeys::<Secp256k1>::deserialize(&these_keys.serialize()).unwrap(),
these_keys
);
keys.push(Rc::new(these_keys.clone()));
if verification_shares.len() == 0 {
verification_shares = these_keys.verification_shares();
}
assert_eq!(verification_shares, these_keys.verification_shares());
if group_key.is_none() {
group_key = Some(these_keys.group_key());
}
assert_eq!(group_key.unwrap(), these_keys.group_key());
}
sign(Schnorr::<Secp256k1, TestHram>::new(), keys.clone());
let mut randomization = [0; 64];
(&mut OsRng).fill_bytes(&mut randomization);
sign(
Schnorr::<Secp256k1, TestHram>::new(),
keys.iter().map(
|keys| Rc::new(keys.offset(Secp256k1::hash_to_F(&Sha256::digest(&randomization))))
).collect()
);
sign(Schnorr::<Secp256k1, TestHram>::new(), &keys);
}