mirror of
https://github.com/serai-dex/serai.git
synced 2025-01-24 11:36:18 +00:00
25f1549c6c
While the previous construction achieved n/2 average detection, this will run in log2(n). Unfortunately, the need to keep entropy around (or take in an RNG here) remains.
280 lines
9.8 KiB
Rust
280 lines
9.8 KiB
Rust
// FROST defines its nonce as sum(Di, Ei * bi)
|
|
// Monero needs not just the nonce over G however, yet also over H
|
|
// Then there is a signature (a modified Chaum Pedersen proof) using multiple nonces at once
|
|
//
|
|
// Accordingly, in order for this library to be robust, it supports generating an arbitrary amount
|
|
// of nonces, each against an arbitrary list of basepoints
|
|
//
|
|
// Each nonce remains of the form (d, e) and made into a proper nonce with d + (e * b)
|
|
// When multiple D, E pairs are provided, a DLEq proof is also provided to confirm their integrity
|
|
|
|
use core::ops::Deref;
|
|
use std::{
|
|
io::{self, Read, Write},
|
|
collections::HashMap,
|
|
};
|
|
|
|
use rand_core::{RngCore, CryptoRng};
|
|
|
|
use zeroize::{Zeroize, Zeroizing};
|
|
|
|
use transcript::Transcript;
|
|
|
|
use group::{ff::PrimeField, Group, GroupEncoding};
|
|
use multiexp::multiexp_vartime;
|
|
|
|
use dleq::DLEqProof;
|
|
|
|
use crate::curve::Curve;
|
|
|
|
// Every participant proves for their commitments at the start of the protocol
|
|
// These proofs are verified sequentially, requiring independent transcripts
|
|
// In order to make these transcripts more robust, the FROST transcript (at time of preprocess) is
|
|
// challenged in order to create a commitment to it, carried in each independent transcript
|
|
// (effectively forking the original transcript)
|
|
//
|
|
// For FROST, as defined by the IETF, this will do nothing (and this transcript will never even be
|
|
// constructed). For higher level protocols, the transcript may have contextual info these proofs
|
|
// will then be bound to
|
|
fn dleq_transcript<T: Transcript>(context: &[u8]) -> T {
|
|
let mut transcript = T::new(b"FROST_commitments");
|
|
transcript.append_message(b"context", context);
|
|
transcript
|
|
}
|
|
|
|
// Each nonce is actually a pair of random scalars, notated as d, e under the FROST paper
|
|
// This is considered a single nonce as r = d + be
|
|
#[derive(Clone, Zeroize)]
|
|
pub(crate) struct Nonce<C: Curve>(pub(crate) [Zeroizing<C::F>; 2]);
|
|
|
|
// Commitments to a specific generator for this nonce
|
|
#[derive(Copy, Clone, PartialEq, Eq)]
|
|
pub(crate) struct GeneratorCommitments<C: Curve>(pub(crate) [C::G; 2]);
|
|
impl<C: Curve> GeneratorCommitments<C> {
|
|
fn read<R: Read>(reader: &mut R) -> io::Result<GeneratorCommitments<C>> {
|
|
Ok(GeneratorCommitments([<C as Curve>::read_G(reader)?, <C as Curve>::read_G(reader)?]))
|
|
}
|
|
|
|
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
|
writer.write_all(self.0[0].to_bytes().as_ref())?;
|
|
writer.write_all(self.0[1].to_bytes().as_ref())
|
|
}
|
|
}
|
|
|
|
// A single nonce's commitments and relevant proofs
|
|
#[derive(Clone, PartialEq, Eq)]
|
|
pub(crate) struct NonceCommitments<C: Curve> {
|
|
// Called generators as these commitments are indexed by generator
|
|
pub(crate) generators: Vec<GeneratorCommitments<C>>,
|
|
// DLEq Proofs proving that these commitments are generated using the same scalar pair
|
|
// This could be further optimized with a multi-nonce proof, offering just one proof for all
|
|
// nonces. See https://github.com/serai-dex/serai/issues/38
|
|
// TODO
|
|
pub(crate) dleqs: Option<[DLEqProof<C::G>; 2]>,
|
|
}
|
|
|
|
impl<C: Curve> NonceCommitments<C> {
|
|
pub(crate) fn new<R: RngCore + CryptoRng, T: Transcript>(
|
|
rng: &mut R,
|
|
secret_share: &Zeroizing<C::F>,
|
|
generators: &[C::G],
|
|
context: &[u8],
|
|
) -> (Nonce<C>, NonceCommitments<C>) {
|
|
let nonce = Nonce::<C>([
|
|
C::random_nonce(secret_share, &mut *rng),
|
|
C::random_nonce(secret_share, &mut *rng),
|
|
]);
|
|
|
|
let mut commitments = Vec::with_capacity(generators.len());
|
|
for generator in generators {
|
|
commitments.push(GeneratorCommitments([
|
|
*generator * nonce.0[0].deref(),
|
|
*generator * nonce.0[1].deref(),
|
|
]));
|
|
}
|
|
|
|
let mut dleqs = None;
|
|
if generators.len() >= 2 {
|
|
let mut dleq = |nonce| {
|
|
// Uses an independent transcript as each signer must prove this with their commitments,
|
|
// yet they're validated while processing everyone's data sequentially, by the global order
|
|
// This avoids needing to clone and fork the transcript around
|
|
DLEqProof::prove(&mut *rng, &mut dleq_transcript::<T>(context), generators, nonce)
|
|
};
|
|
dleqs = Some([dleq(&nonce.0[0]), dleq(&nonce.0[1])]);
|
|
}
|
|
|
|
(nonce, NonceCommitments { generators: commitments, dleqs })
|
|
}
|
|
|
|
fn read<R: Read, T: Transcript>(
|
|
reader: &mut R,
|
|
generators: &[C::G],
|
|
context: &[u8],
|
|
) -> io::Result<NonceCommitments<C>> {
|
|
let commitments: Vec<GeneratorCommitments<C>> = (0 .. generators.len())
|
|
.map(|_| GeneratorCommitments::read(reader))
|
|
.collect::<Result<_, _>>()?;
|
|
|
|
let mut dleqs = None;
|
|
if generators.len() >= 2 {
|
|
let mut verify = |i| -> io::Result<_> {
|
|
let dleq = DLEqProof::deserialize(reader)?;
|
|
dleq
|
|
.verify(
|
|
&mut dleq_transcript::<T>(context),
|
|
generators,
|
|
&commitments.iter().map(|commitments| commitments.0[i]).collect::<Vec<_>>(),
|
|
)
|
|
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid DLEq proof"))?;
|
|
Ok(dleq)
|
|
};
|
|
dleqs = Some([verify(0)?, verify(1)?]);
|
|
}
|
|
|
|
Ok(NonceCommitments { generators: commitments, dleqs })
|
|
}
|
|
|
|
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
|
for generator in &self.generators {
|
|
generator.write(writer)?;
|
|
}
|
|
if let Some(dleqs) = &self.dleqs {
|
|
dleqs[0].serialize(writer)?;
|
|
dleqs[1].serialize(writer)?;
|
|
}
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
#[derive(Clone, PartialEq, Eq)]
|
|
pub(crate) struct Commitments<C: Curve> {
|
|
// Called nonces as these commitments are indexed by nonce
|
|
pub(crate) nonces: Vec<NonceCommitments<C>>,
|
|
}
|
|
|
|
impl<C: Curve> Commitments<C> {
|
|
pub(crate) fn new<R: RngCore + CryptoRng, T: Transcript>(
|
|
rng: &mut R,
|
|
secret_share: &Zeroizing<C::F>,
|
|
planned_nonces: &[Vec<C::G>],
|
|
context: &[u8],
|
|
) -> (Vec<Nonce<C>>, Commitments<C>) {
|
|
let mut nonces = vec![];
|
|
let mut commitments = vec![];
|
|
for generators in planned_nonces {
|
|
let (nonce, these_commitments) =
|
|
NonceCommitments::new::<_, T>(&mut *rng, secret_share, generators, context);
|
|
nonces.push(nonce);
|
|
commitments.push(these_commitments);
|
|
}
|
|
(nonces, Commitments { nonces: commitments })
|
|
}
|
|
|
|
pub(crate) fn transcript<T: Transcript>(&self, t: &mut T) {
|
|
for nonce in &self.nonces {
|
|
for commitments in &nonce.generators {
|
|
t.append_message(b"commitment_D", commitments.0[0].to_bytes());
|
|
t.append_message(b"commitment_E", commitments.0[1].to_bytes());
|
|
}
|
|
|
|
// Transcripting the DLEqs implicitly transcripts the exact generators used for this nonce
|
|
// This means it shouldn't be possible for variadic generators to cause conflicts as they're
|
|
// committed to as their entire series per-nonce, not as isolates
|
|
if let Some(dleqs) = &nonce.dleqs {
|
|
let mut transcript_dleq = |label, dleq: &DLEqProof<C::G>| {
|
|
let mut buf = vec![];
|
|
dleq.serialize(&mut buf).unwrap();
|
|
t.append_message(label, &buf);
|
|
};
|
|
transcript_dleq(b"dleq_D", &dleqs[0]);
|
|
transcript_dleq(b"dleq_E", &dleqs[1]);
|
|
}
|
|
}
|
|
}
|
|
|
|
pub(crate) fn read<R: Read, T: Transcript>(
|
|
reader: &mut R,
|
|
nonces: &[Vec<C::G>],
|
|
context: &[u8],
|
|
) -> io::Result<Self> {
|
|
Ok(Commitments {
|
|
nonces: (0 .. nonces.len())
|
|
.map(|i| NonceCommitments::read::<_, T>(reader, &nonces[i], context))
|
|
.collect::<Result<_, _>>()?,
|
|
})
|
|
}
|
|
|
|
pub(crate) fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
|
for nonce in &self.nonces {
|
|
nonce.write(writer)?;
|
|
}
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
pub(crate) struct IndividualBinding<C: Curve> {
|
|
commitments: Commitments<C>,
|
|
binding_factors: Option<Vec<C::F>>,
|
|
}
|
|
|
|
pub(crate) struct BindingFactor<C: Curve>(pub(crate) HashMap<u16, IndividualBinding<C>>);
|
|
|
|
impl<C: Curve> BindingFactor<C> {
|
|
pub(crate) fn insert(&mut self, i: u16, commitments: Commitments<C>) {
|
|
self.0.insert(i, IndividualBinding { commitments, binding_factors: None });
|
|
}
|
|
|
|
pub(crate) fn calculate_binding_factors<T: Clone + Transcript>(&mut self, transcript: &mut T) {
|
|
for (l, binding) in self.0.iter_mut() {
|
|
let mut transcript = transcript.clone();
|
|
transcript.append_message(b"participant", C::F::from(u64::from(*l)).to_repr());
|
|
// It *should* be perfectly fine to reuse a binding factor for multiple nonces
|
|
// This generates a binding factor per nonce just to ensure it never comes up as a question
|
|
binding.binding_factors = Some(
|
|
(0 .. binding.commitments.nonces.len())
|
|
.map(|_| C::hash_binding_factor(transcript.challenge(b"rho").as_ref()))
|
|
.collect(),
|
|
);
|
|
}
|
|
}
|
|
|
|
pub(crate) fn binding_factors(&self, i: u16) -> &[C::F] {
|
|
self.0[&i].binding_factors.as_ref().unwrap()
|
|
}
|
|
|
|
// Get the bound nonces for a specific party
|
|
pub(crate) fn bound(&self, l: u16) -> Vec<Vec<C::G>> {
|
|
let mut res = vec![];
|
|
for (i, (nonce, rho)) in
|
|
self.0[&l].commitments.nonces.iter().zip(self.binding_factors(l).iter()).enumerate()
|
|
{
|
|
res.push(vec![]);
|
|
for generator in &nonce.generators {
|
|
res[i].push(generator.0[0] + (generator.0[1] * rho));
|
|
}
|
|
}
|
|
res
|
|
}
|
|
|
|
// Get the nonces for this signing session
|
|
pub(crate) fn nonces(&self, planned_nonces: &[Vec<C::G>]) -> Vec<Vec<C::G>> {
|
|
let mut nonces = Vec::with_capacity(planned_nonces.len());
|
|
for n in 0 .. planned_nonces.len() {
|
|
nonces.push(Vec::with_capacity(planned_nonces[n].len()));
|
|
for g in 0 .. planned_nonces[n].len() {
|
|
#[allow(non_snake_case)]
|
|
let mut D = C::G::identity();
|
|
let mut statements = Vec::with_capacity(self.0.len());
|
|
#[allow(non_snake_case)]
|
|
for IndividualBinding { commitments, binding_factors } in self.0.values() {
|
|
D += commitments.nonces[n].generators[g].0[0];
|
|
statements
|
|
.push((binding_factors.as_ref().unwrap()[n], commitments.nonces[n].generators[g].0[1]));
|
|
}
|
|
nonces[n].push(D + multiexp_vartime(&statements));
|
|
}
|
|
}
|
|
nonces
|
|
}
|
|
}
|