Luke Parker 2022-07-12 01:28:01 -04:00
parent cf28967754
commit 5eb61f3a87
No known key found for this signature in database
GPG key ID: F9F1386DB1E119B6
11 changed files with 198 additions and 123 deletions

View file

@ -7,7 +7,7 @@ use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use group::{Group, GroupEncoding}; use group::{Group, GroupEncoding};
use transcript::RecommendedTranscript; use transcript::{Transcript, RecommendedTranscript};
use dalek_ff_group as dfg; use dalek_ff_group as dfg;
use dleq::{Generators, DLEqProof}; use dleq::{Generators, DLEqProof};
@ -21,6 +21,10 @@ pub enum MultisigError {
InvalidKeyImage(u16) InvalidKeyImage(u16)
} }
fn transcript() -> RecommendedTranscript {
RecommendedTranscript::new(b"monero_key_image_dleq")
}
#[allow(non_snake_case)] #[allow(non_snake_case)]
pub(crate) fn write_dleq<R: RngCore + CryptoRng>( pub(crate) fn write_dleq<R: RngCore + CryptoRng>(
rng: &mut R, rng: &mut R,
@ -35,7 +39,7 @@ pub(crate) fn write_dleq<R: RngCore + CryptoRng>(
// the proper order if they want to reach consensus // the proper order if they want to reach consensus
// It'd be a poor API to have CLSAG define a new transcript solely to pass here, just to try to // It'd be a poor API to have CLSAG define a new transcript solely to pass here, just to try to
// merge later in some form, when it should instead just merge xH (as it does) // merge later in some form, when it should instead just merge xH (as it does)
&mut RecommendedTranscript::new(b"DLEq Proof"), &mut transcript(),
Generators::new(dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)), Generators::new(dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)),
dfg::Scalar(x) dfg::Scalar(x)
).serialize(&mut res).unwrap(); ).serialize(&mut res).unwrap();
@ -45,16 +49,15 @@ pub(crate) fn write_dleq<R: RngCore + CryptoRng>(
#[allow(non_snake_case)] #[allow(non_snake_case)]
pub(crate) fn read_dleq( pub(crate) fn read_dleq(
serialized: &[u8], serialized: &[u8],
start: usize,
H: EdwardsPoint, H: EdwardsPoint,
l: u16, l: u16,
xG: dfg::EdwardsPoint xG: dfg::EdwardsPoint
) -> Result<dfg::EdwardsPoint, MultisigError> { ) -> Result<dfg::EdwardsPoint, MultisigError> {
if serialized.len() < start + 96 { if serialized.len() != 96 {
Err(MultisigError::InvalidDLEqProof(l))?; Err(MultisigError::InvalidDLEqProof(l))?;
} }
let bytes = (&serialized[(start + 0) .. (start + 32)]).try_into().unwrap(); let bytes = (&serialized[.. 32]).try_into().unwrap();
// dfg ensures the point is torsion free // dfg ensures the point is torsion free
let xH = Option::<dfg::EdwardsPoint>::from( let xH = Option::<dfg::EdwardsPoint>::from(
dfg::EdwardsPoint::from_bytes(&bytes)).ok_or(MultisigError::InvalidDLEqProof(l) dfg::EdwardsPoint::from_bytes(&bytes)).ok_or(MultisigError::InvalidDLEqProof(l)
@ -64,13 +67,13 @@ pub(crate) fn read_dleq(
Err(MultisigError::InvalidDLEqProof(l))?; Err(MultisigError::InvalidDLEqProof(l))?;
} }
let proof = DLEqProof::<dfg::EdwardsPoint>::deserialize( DLEqProof::<dfg::EdwardsPoint>::deserialize(
&mut Cursor::new(&serialized[(start + 32) .. (start + 96)]) &mut Cursor::new(&serialized[32 ..])
).map_err(|_| MultisigError::InvalidDLEqProof(l))?.verify(
&mut transcript(),
Generators::new(dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)),
(xG, xH)
).map_err(|_| MultisigError::InvalidDLEqProof(l))?; ).map_err(|_| MultisigError::InvalidDLEqProof(l))?;
let mut transcript = RecommendedTranscript::new(b"DLEq Proof");
proof.verify(&mut transcript, Generators::new(dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)), (xG, xH))
.map_err(|_| MultisigError::InvalidDLEqProof(l))?;
Ok(xH) Ok(xH)
} }

View file

@ -6,7 +6,7 @@ use rand_chacha::ChaCha12Rng;
use curve25519_dalek::{ use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE, constants::ED25519_BASEPOINT_TABLE,
traits::Identity, traits::{Identity, IsIdentity},
scalar::Scalar, scalar::Scalar,
edwards::EdwardsPoint edwards::EdwardsPoint
}; };
@ -76,7 +76,6 @@ pub struct ClsagMultisig {
H: EdwardsPoint, H: EdwardsPoint,
// Merged here as CLSAG needs it, passing it would be a mess, yet having it beforehand requires a round // Merged here as CLSAG needs it, passing it would be a mess, yet having it beforehand requires a round
image: EdwardsPoint, image: EdwardsPoint,
AH: (dfg::EdwardsPoint, dfg::EdwardsPoint),
details: Arc<RwLock<Option<ClsagDetails>>>, details: Arc<RwLock<Option<ClsagDetails>>>,
@ -87,15 +86,15 @@ pub struct ClsagMultisig {
impl ClsagMultisig { impl ClsagMultisig {
pub fn new( pub fn new(
transcript: RecommendedTranscript, transcript: RecommendedTranscript,
output_key: EdwardsPoint,
details: Arc<RwLock<Option<ClsagDetails>>> details: Arc<RwLock<Option<ClsagDetails>>>
) -> Result<ClsagMultisig, MultisigError> { ) -> Result<ClsagMultisig, MultisigError> {
Ok( Ok(
ClsagMultisig { ClsagMultisig {
transcript, transcript,
H: EdwardsPoint::identity(), H: hash_to_point(output_key),
image: EdwardsPoint::identity(), image: EdwardsPoint::identity(),
AH: (dfg::EdwardsPoint::identity(), dfg::EdwardsPoint::identity()),
details, details,
@ -106,7 +105,7 @@ impl ClsagMultisig {
} }
pub fn serialized_len() -> usize { pub fn serialized_len() -> usize {
3 * (32 + 64) 32 + (2 * 32)
} }
fn input(&self) -> ClsagInput { fn input(&self) -> ClsagInput {
@ -122,22 +121,18 @@ impl Algorithm<Ed25519> for ClsagMultisig {
type Transcript = RecommendedTranscript; type Transcript = RecommendedTranscript;
type Signature = (Clsag, EdwardsPoint); type Signature = (Clsag, EdwardsPoint);
fn nonces(&self) -> Vec<Vec<dfg::EdwardsPoint>> {
vec![vec![dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)]]
}
fn preprocess_addendum<R: RngCore + CryptoRng>( fn preprocess_addendum<R: RngCore + CryptoRng>(
&mut self, &mut self,
rng: &mut R, rng: &mut R,
view: &FrostView<Ed25519>, view: &FrostView<Ed25519>
nonces: &[dfg::Scalar; 2]
) -> Vec<u8> { ) -> Vec<u8> {
self.H = hash_to_point(view.group_key().0); let mut serialized = Vec::with_capacity(Self::serialized_len());
let mut serialized = Vec::with_capacity(ClsagMultisig::serialized_len());
serialized.extend((view.secret_share().0 * self.H).compress().to_bytes()); serialized.extend((view.secret_share().0 * self.H).compress().to_bytes());
serialized.extend(write_dleq(rng, self.H, view.secret_share().0)); serialized.extend(write_dleq(rng, self.H, view.secret_share().0));
serialized.extend((nonces[0].0 * self.H).compress().to_bytes());
serialized.extend(write_dleq(rng, self.H, nonces[0].0));
serialized.extend((nonces[1].0 * self.H).compress().to_bytes());
serialized.extend(write_dleq(rng, self.H, nonces[1].0));
serialized serialized
} }
@ -145,42 +140,27 @@ impl Algorithm<Ed25519> for ClsagMultisig {
&mut self, &mut self,
view: &FrostView<Ed25519>, view: &FrostView<Ed25519>,
l: u16, l: u16,
commitments: &[dfg::EdwardsPoint; 2],
serialized: &[u8] serialized: &[u8]
) -> Result<(), FrostError> { ) -> Result<(), FrostError> {
if serialized.len() != ClsagMultisig::serialized_len() { if serialized.len() != Self::serialized_len() {
// Not an optimal error but... // Not an optimal error but...
Err(FrostError::InvalidCommitment(l))?; Err(FrostError::InvalidCommitment(l))?;
} }
if self.AH.0.is_identity().into() { if self.image.is_identity().into() {
self.transcript.domain_separate(b"CLSAG"); self.transcript.domain_separate(b"CLSAG");
self.input().transcript(&mut self.transcript); self.input().transcript(&mut self.transcript);
self.transcript.append_message(b"mask", &self.mask().to_bytes()); self.transcript.append_message(b"mask", &self.mask().to_bytes());
} }
// Uses the same format FROST does for the expected commitments (nonce * G where this is nonce * H)
// The following technically shouldn't need to be committed to, as we've committed to equivalents,
// yet it doesn't hurt and may resolve some unknown issues
self.transcript.append_message(b"participant", &l.to_be_bytes()); self.transcript.append_message(b"participant", &l.to_be_bytes());
self.transcript.append_message(b"key_image_share", &serialized[.. 32]);
let mut cursor = 0;
self.transcript.append_message(b"image_share", &serialized[cursor .. (cursor + 32)]);
self.image += read_dleq( self.image += read_dleq(
serialized, serialized,
cursor,
self.H, self.H,
l, l,
view.verification_share(l) view.verification_share(l)
).map_err(|_| FrostError::InvalidCommitment(l))?.0; ).map_err(|_| FrostError::InvalidCommitment(l))?.0;
cursor += 96;
self.transcript.append_message(b"commitment_D_H", &serialized[cursor .. (cursor + 32)]);
self.AH.0 += read_dleq(serialized, cursor, self.H, l, commitments[0]).map_err(|_| FrostError::InvalidCommitment(l))?;
cursor += 96;
self.transcript.append_message(b"commitment_E_H", &serialized[cursor .. (cursor + 32)]);
self.AH.1 += read_dleq(serialized, cursor, self.H, l, commitments[1]).map_err(|_| FrostError::InvalidCommitment(l))?;
Ok(()) Ok(())
} }
@ -192,14 +172,10 @@ impl Algorithm<Ed25519> for ClsagMultisig {
fn sign_share( fn sign_share(
&mut self, &mut self,
view: &FrostView<Ed25519>, view: &FrostView<Ed25519>,
nonce_sum: dfg::EdwardsPoint, nonce_sums: &[Vec<dfg::EdwardsPoint>],
b: dfg::Scalar, nonces: &[dfg::Scalar],
nonce: dfg::Scalar,
msg: &[u8] msg: &[u8]
) -> dfg::Scalar { ) -> dfg::Scalar {
// Apply the binding factor to the H variant of the nonce
self.AH.0 += self.AH.1 * b;
// Use the transcript to get a seeded random number generator // Use the transcript to get a seeded random number generator
// The transcript contains private data, preventing passive adversaries from recreating this // The transcript contains private data, preventing passive adversaries from recreating this
// process even if they have access to commitments (specifically, the ring index being signed // process even if they have access to commitments (specifically, the ring index being signed
@ -216,12 +192,12 @@ impl Algorithm<Ed25519> for ClsagMultisig {
&self.input(), &self.input(),
self.mask(), self.mask(),
&self.msg.as_ref().unwrap(), &self.msg.as_ref().unwrap(),
nonce_sum.0, nonce_sums[0][0].0,
self.AH.0.0 nonce_sums[0][1].0
); );
self.interim = Some(Interim { p, c, clsag, pseudo_out }); self.interim = Some(Interim { p, c, clsag, pseudo_out });
let share = dfg::Scalar(nonce.0 - (p * view.secret_share().0)); let share = dfg::Scalar(nonces[0].0 - (p * view.secret_share().0));
share share
} }
@ -230,7 +206,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
fn verify( fn verify(
&self, &self,
_: dfg::EdwardsPoint, _: dfg::EdwardsPoint,
_: dfg::EdwardsPoint, _: &[Vec<dfg::EdwardsPoint>],
sum: dfg::Scalar sum: dfg::Scalar
) -> Option<Self::Signature> { ) -> Option<Self::Signature> {
let interim = self.interim.as_ref().unwrap(); let interim = self.interim.as_ref().unwrap();
@ -251,12 +227,12 @@ impl Algorithm<Ed25519> for ClsagMultisig {
fn verify_share( fn verify_share(
&self, &self,
verification_share: dfg::EdwardsPoint, verification_share: dfg::EdwardsPoint,
nonce: dfg::EdwardsPoint, nonces: &[Vec<dfg::EdwardsPoint>],
share: dfg::Scalar, share: dfg::Scalar,
) -> bool { ) -> bool {
let interim = self.interim.as_ref().unwrap(); let interim = self.interim.as_ref().unwrap();
return (&share.0 * &ED25519_BASEPOINT_TABLE) == ( return (&share.0 * &ED25519_BASEPOINT_TABLE) == (
nonce.0 - (interim.p * verification_share.0) nonces[0][0].0 - (interim.p * verification_share.0)
); );
} }
} }

View file

@ -6,7 +6,7 @@ use rand::{RngCore, rngs::OsRng};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar}; use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
#[cfg(feature = "multisig")] #[cfg(feature = "multisig")]
use transcript::RecommendedTranscript; use transcript::{Transcript, RecommendedTranscript};
#[cfg(feature = "multisig")] #[cfg(feature = "multisig")]
use frost::curve::Ed25519; use frost::curve::Ed25519;
@ -102,6 +102,7 @@ fn clsag_multisig() -> Result<(), MultisigError> {
&mut OsRng, &mut OsRng,
ClsagMultisig::new( ClsagMultisig::new(
RecommendedTranscript::new(b"Monero Serai CLSAG Test"), RecommendedTranscript::new(b"Monero Serai CLSAG Test"),
keys[&1].group_key().0,
Arc::new(RwLock::new(Some( Arc::new(RwLock::new(Some(
ClsagDetails::new( ClsagDetails::new(
ClsagInput::new( ClsagInput::new(

View file

@ -112,6 +112,7 @@ impl SignableTransaction {
AlgorithmMachine::new( AlgorithmMachine::new(
ClsagMultisig::new( ClsagMultisig::new(
transcript.clone(), transcript.clone(),
input.key,
inputs[i].clone() inputs[i].clone()
).map_err(|e| TransactionError::MultisigError(e))?, ).map_err(|e| TransactionError::MultisigError(e))?,
Arc::new(offset), Arc::new(offset),
@ -159,7 +160,10 @@ impl PreprocessMachine for TransactionMachine {
rng: &mut R rng: &mut R
) -> (TransactionSignMachine, Vec<u8>) { ) -> (TransactionSignMachine, Vec<u8>) {
// Iterate over each CLSAG calling preprocess // Iterate over each CLSAG calling preprocess
let mut serialized = Vec::with_capacity(self.clsags.len() * (64 + ClsagMultisig::serialized_len())); let mut serialized = Vec::with_capacity(
// D_{G, H}, E_{G, H}, DLEqs, key image addendum
self.clsags.len() * ((2 * (32 + 32)) + (2 * (32 + 32)) + ClsagMultisig::serialized_len())
);
let clsags = self.clsags.drain(..).map(|clsag| { let clsags = self.clsags.drain(..).map(|clsag| {
let (clsag, preprocess) = clsag.preprocess(rng); let (clsag, preprocess) = clsag.preprocess(rng);
serialized.extend(&preprocess); serialized.extend(&preprocess);
@ -224,8 +228,8 @@ impl SignMachine<Transaction> for TransactionSignMachine {
} }
} }
// FROST commitments, image, H commitments, and their proofs // FROST commitments and their DLEqs, and the image and its DLEq
let clsag_len = 64 + ClsagMultisig::serialized_len(); let clsag_len = (2 * (32 + 32)) + (2 * (32 + 32)) + ClsagMultisig::serialized_len();
for (l, commitments) in &commitments { for (l, commitments) in &commitments {
if commitments.len() != (self.clsags.len() * clsag_len) { if commitments.len() != (self.clsags.len() * clsag_len) {
Err(FrostError::InvalidCommitment(*l))?; Err(FrostError::InvalidCommitment(*l))?;
@ -246,7 +250,7 @@ impl SignMachine<Transaction> for TransactionSignMachine {
for c in 0 .. self.clsags.len() { for c in 0 .. self.clsags.len() {
for (l, preprocess) in &commitments[c] { for (l, preprocess) in &commitments[c] {
images[c] += CompressedEdwardsY( images[c] += CompressedEdwardsY(
preprocess[64 .. 96].try_into().map_err(|_| FrostError::InvalidCommitment(*l))? preprocess[(clsag_len - 96) .. (clsag_len - 64)].try_into().map_err(|_| FrostError::InvalidCommitment(*l))?
).decompress().ok_or(FrostError::InvalidCommitment(*l))?; ).decompress().ok_or(FrostError::InvalidCommitment(*l))?;
} }
} }

View file

@ -14,7 +14,7 @@ use curve25519_dalek::constants::ED25519_BASEPOINT_TABLE;
#[cfg(feature = "multisig")] #[cfg(feature = "multisig")]
use dalek_ff_group::Scalar; use dalek_ff_group::Scalar;
#[cfg(feature = "multisig")] #[cfg(feature = "multisig")]
use transcript::RecommendedTranscript; use transcript::{Transcript, RecommendedTranscript};
#[cfg(feature = "multisig")] #[cfg(feature = "multisig")]
use frost::{curve::Ed25519, tests::{THRESHOLD, key_gen, sign}}; use frost::{curve::Ed25519, tests::{THRESHOLD, key_gen, sign}};

View file

@ -28,6 +28,8 @@ transcript = { package = "flexible-transcript", path = "../transcript", version
multiexp = { path = "../multiexp", version = "0.1", features = ["batch"] } multiexp = { path = "../multiexp", version = "0.1", features = ["batch"] }
dleq = { package = "dleq", path = "../dleq", version = "0.1", features = ["serialize"] }
[dev-dependencies] [dev-dependencies]
rand = "0.8" rand = "0.8"

View file

@ -13,14 +13,18 @@ pub trait Algorithm<C: Curve>: Clone {
/// The resulting type of the signatures this algorithm will produce /// The resulting type of the signatures this algorithm will produce
type Signature: Clone + PartialEq + Debug; type Signature: Clone + PartialEq + Debug;
/// Obtain a mutable borrow of the underlying transcript
fn transcript(&mut self) -> &mut Self::Transcript; fn transcript(&mut self) -> &mut Self::Transcript;
/// Obtain the list of nonces to generate, as specified by the basepoints to create commitments
/// against per-nonce. These are not committed to by FROST on the underlying transcript
fn nonces(&self) -> Vec<Vec<C::G>>;
/// Generate an addendum to FROST"s preprocessing stage /// Generate an addendum to FROST"s preprocessing stage
fn preprocess_addendum<R: RngCore + CryptoRng>( fn preprocess_addendum<R: RngCore + CryptoRng>(
&mut self, &mut self,
rng: &mut R, rng: &mut R,
params: &FrostView<C>, params: &FrostView<C>,
nonces: &[C::F; 2],
) -> Vec<u8>; ) -> Vec<u8>;
/// Proccess the addendum for the specified participant. Guaranteed to be ordered /// Proccess the addendum for the specified participant. Guaranteed to be ordered
@ -28,7 +32,6 @@ pub trait Algorithm<C: Curve>: Clone {
&mut self, &mut self,
params: &FrostView<C>, params: &FrostView<C>,
l: u16, l: u16,
commitments: &[C::G; 2],
serialized: &[u8], serialized: &[u8],
) -> Result<(), FrostError>; ) -> Result<(), FrostError>;
@ -39,15 +42,14 @@ pub trait Algorithm<C: Curve>: Clone {
fn sign_share( fn sign_share(
&mut self, &mut self,
params: &FrostView<C>, params: &FrostView<C>,
nonce_sum: C::G, nonce_sums: &[Vec<C::G>],
binding: C::F, nonces: &[C::F],
nonce: C::F,
msg: &[u8], msg: &[u8],
) -> C::F; ) -> C::F;
/// Verify a signature /// Verify a signature
#[must_use] #[must_use]
fn verify(&self, group_key: C::G, nonce: C::G, sum: C::F) -> Option<Self::Signature>; fn verify(&self, group_key: C::G, nonces: &[Vec<C::G>], sum: C::F) -> Option<Self::Signature>;
/// Verify a specific share given as a response. Used to determine blame if signature /// Verify a specific share given as a response. Used to determine blame if signature
/// verification fails /// verification fails
@ -55,7 +57,7 @@ pub trait Algorithm<C: Curve>: Clone {
fn verify_share( fn verify_share(
&self, &self,
verification_share: C::G, verification_share: C::G,
nonce: C::G, nonces: &[Vec<C::G>],
share: C::F, share: C::F,
) -> bool; ) -> bool;
} }
@ -66,6 +68,10 @@ pub struct IetfTranscript(Vec<u8>);
impl Transcript for IetfTranscript { impl Transcript for IetfTranscript {
type Challenge = Vec<u8>; type Challenge = Vec<u8>;
fn new(_: &'static [u8]) -> IetfTranscript {
unimplemented!("IetfTranscript should not be used with multiple nonce protocols");
}
fn domain_separate(&mut self, _: &[u8]) {} fn domain_separate(&mut self, _: &[u8]) {}
fn append_message(&mut self, _: &'static [u8], message: &[u8]) { fn append_message(&mut self, _: &'static [u8], message: &[u8]) {
@ -115,11 +121,14 @@ impl<C: Curve, H: Hram<C>> Algorithm<C> for Schnorr<C, H> {
&mut self.transcript &mut self.transcript
} }
fn nonces(&self) -> Vec<Vec<C::G>> {
vec![vec![C::GENERATOR]]
}
fn preprocess_addendum<R: RngCore + CryptoRng>( fn preprocess_addendum<R: RngCore + CryptoRng>(
&mut self, &mut self,
_: &mut R, _: &mut R,
_: &FrostView<C>, _: &FrostView<C>,
_: &[C::F; 2],
) -> Vec<u8> { ) -> Vec<u8> {
vec![] vec![]
} }
@ -128,7 +137,6 @@ impl<C: Curve, H: Hram<C>> Algorithm<C> for Schnorr<C, H> {
&mut self, &mut self,
_: &FrostView<C>, _: &FrostView<C>,
_: u16, _: u16,
_: &[C::G; 2],
_: &[u8], _: &[u8],
) -> Result<(), FrostError> { ) -> Result<(), FrostError> {
Ok(()) Ok(())
@ -137,19 +145,18 @@ impl<C: Curve, H: Hram<C>> Algorithm<C> for Schnorr<C, H> {
fn sign_share( fn sign_share(
&mut self, &mut self,
params: &FrostView<C>, params: &FrostView<C>,
nonce_sum: C::G, nonce_sums: &[Vec<C::G>],
_: C::F, nonces: &[C::F],
nonce: C::F,
msg: &[u8], msg: &[u8],
) -> C::F { ) -> C::F {
let c = H::hram(&nonce_sum, &params.group_key(), msg); let c = H::hram(&nonce_sums[0][0], &params.group_key(), msg);
self.c = Some(c); self.c = Some(c);
schnorr::sign::<C>(params.secret_share(), nonce, c).s schnorr::sign::<C>(params.secret_share(), nonces[0], c).s
} }
#[must_use] #[must_use]
fn verify(&self, group_key: C::G, nonce: C::G, sum: C::F) -> Option<Self::Signature> { fn verify(&self, group_key: C::G, nonces: &[Vec<C::G>], sum: C::F) -> Option<Self::Signature> {
let sig = SchnorrSignature { R: nonce, s: sum }; let sig = SchnorrSignature { R: nonces[0][0], s: sum };
if schnorr::verify::<C>(group_key, self.c.unwrap(), &sig) { if schnorr::verify::<C>(group_key, self.c.unwrap(), &sig) {
Some(sig) Some(sig)
} else { } else {
@ -161,13 +168,13 @@ impl<C: Curve, H: Hram<C>> Algorithm<C> for Schnorr<C, H> {
fn verify_share( fn verify_share(
&self, &self,
verification_share: C::G, verification_share: C::G,
nonce: C::G, nonces: &[Vec<C::G>],
share: C::F, share: C::F,
) -> bool { ) -> bool {
schnorr::verify::<C>( schnorr::verify::<C>(
verification_share, verification_share,
self.c.unwrap(), self.c.unwrap(),
&SchnorrSignature { R: nonce, s: share} &SchnorrSignature { R: nonces[0][0], s: share}
) )
} }
} }

View file

@ -3,12 +3,14 @@ use std::{sync::Arc, collections::HashMap};
use rand_core::{RngCore, CryptoRng}; use rand_core::{RngCore, CryptoRng};
use group::{ff::{Field, PrimeField}, GroupEncoding}; use group::{ff::{Field, PrimeField}, Group, GroupEncoding};
use transcript::Transcript; use transcript::Transcript;
use dleq::{Generators, DLEqProof};
use crate::{ use crate::{
curve::{Curve, G_len, F_from_slice, G_from_slice}, curve::{Curve, F_len, G_len, F_from_slice, G_from_slice},
FrostError, FrostError,
FrostParams, FrostKeys, FrostView, FrostParams, FrostKeys, FrostView,
algorithm::Algorithm, algorithm::Algorithm,
@ -69,8 +71,12 @@ impl<C: Curve, A: Algorithm<C>> Params<C, A> {
} }
} }
fn nonce_transcript<T: Transcript>() -> T {
T::new(b"FROST_nonce_dleq")
}
pub(crate) struct PreprocessPackage<C: Curve> { pub(crate) struct PreprocessPackage<C: Curve> {
pub(crate) nonces: [C::F; 2], pub(crate) nonces: Vec<[C::F; 2]>,
pub(crate) serialized: Vec<u8>, pub(crate) serialized: Vec<u8>,
} }
@ -80,30 +86,53 @@ fn preprocess<R: RngCore + CryptoRng, C: Curve, A: Algorithm<C>>(
rng: &mut R, rng: &mut R,
params: &mut Params<C, A>, params: &mut Params<C, A>,
) -> PreprocessPackage<C> { ) -> PreprocessPackage<C> {
let nonces = [ let mut serialized = Vec::with_capacity(2 * G_len::<C>());
C::random_nonce(params.view().secret_share(), &mut *rng), let nonces = params.algorithm.nonces().iter().cloned().map(
C::random_nonce(params.view().secret_share(), &mut *rng) |mut generators| {
]; let nonces = [
let commitments = [C::GENERATOR * nonces[0], C::GENERATOR * nonces[1]]; C::random_nonce(params.view().secret_share(), &mut *rng),
let mut serialized = commitments[0].to_bytes().as_ref().to_vec(); C::random_nonce(params.view().secret_share(), &mut *rng)
serialized.extend(commitments[1].to_bytes().as_ref()); ];
serialized.extend( let commit = |generator: C::G| {
&params.algorithm.preprocess_addendum( let commitments = [generator * nonces[0], generator * nonces[1]];
rng, [commitments[0].to_bytes().as_ref(), commitments[1].to_bytes().as_ref()].concat().to_vec()
&params.view, };
&nonces
) let first = generators.remove(0);
); serialized.extend(commit(first));
// Iterate over the rest
for generator in generators.iter() {
serialized.extend(commit(*generator));
// Provide a DLEq to verify these commitments are for the same nonce
// TODO: Provide a single DLEq. See https://github.com/serai-dex/serai/issues/34
for nonce in nonces {
DLEqProof::prove(
&mut *rng,
// Uses an independent transcript as each signer must do this now, yet we validate them
// sequentially by the global order. Avoids needing to clone the transcript around
&mut nonce_transcript::<A::Transcript>(),
Generators::new(first, *generator),
nonce
).serialize(&mut serialized).unwrap();
}
}
nonces
}
).collect::<Vec<_>>();
serialized.extend(&params.algorithm.preprocess_addendum(rng, &params.view));
PreprocessPackage { nonces, serialized } PreprocessPackage { nonces, serialized }
} }
#[allow(non_snake_case)] #[allow(non_snake_case)]
struct Package<C: Curve> { struct Package<C: Curve> {
B: HashMap<u16, [C::G; 2]>, B: HashMap<u16, Vec<Vec<[C::G; 2]>>>,
binding: C::F, binding: C::F,
R: C::G, Rs: Vec<Vec<C::G>>,
share: Vec<u8> share: Vec<u8>
} }
@ -137,27 +166,59 @@ fn sign_with_share<C: Curve, A: Algorithm<C>>(
let mut B = HashMap::<u16, _>::with_capacity(params.view.included.len()); let mut B = HashMap::<u16, _>::with_capacity(params.view.included.len());
// Get the binding factor // Get the binding factor
let nonces = params.algorithm.nonces();
let mut addendums = HashMap::new(); let mut addendums = HashMap::new();
let binding = { let binding = {
let transcript = params.algorithm.transcript(); let transcript = params.algorithm.transcript();
// Parse the commitments // Parse the commitments
for l in &params.view.included { for l in &params.view.included {
transcript.append_message(b"participant", &l.to_be_bytes()); transcript.append_message(b"participant", &l.to_be_bytes());
let serialized = commitments.remove(l).unwrap();
let commitments = commitments.remove(l).unwrap();
let mut read_commitment = |c, label| { let mut read_commitment = |c, label| {
let commitment = &commitments[c .. (c + G_len::<C>())]; let commitment = &serialized[c .. (c + G_len::<C>())];
transcript.append_message(label, commitment); transcript.append_message(label, commitment);
G_from_slice::<C::G>(commitment).map_err(|_| FrostError::InvalidCommitment(*l)) G_from_slice::<C::G>(commitment).map_err(|_| FrostError::InvalidCommitment(*l))
}; };
// While this doesn't note which nonce/basepoint this is for, those are expected to be
// static. Beyond that, they're committed to in the DLEq proof transcripts, ensuring
// consistency. While this is suboptimal, it maintains IETF compliance, and Algorithm is
// documented accordingly
#[allow(non_snake_case)] #[allow(non_snake_case)]
let mut read_D_E = || Ok( let mut read_D_E = |c| Ok([
[read_commitment(0, b"commitment_D")?, read_commitment(G_len::<C>(), b"commitment_E")?] read_commitment(c, b"commitment_D")?,
); read_commitment(c + G_len::<C>(), b"commitment_E")?
]);
B.insert(*l, read_D_E()?); let mut c = 0;
addendums.insert(*l, commitments[(G_len::<C>() * 2) ..].to_vec()); let mut commitments = Vec::with_capacity(nonces.len());
for (n, nonce_generators) in nonces.clone().iter_mut().enumerate() {
commitments.push(Vec::with_capacity(nonce_generators.len()));
let first = nonce_generators.remove(0);
commitments[n].push(read_D_E(c)?);
c += 2 * G_len::<C>();
let mut c = 2 * G_len::<C>();
for generator in nonce_generators {
commitments[n].push(read_D_E(c)?);
c += 2 * G_len::<C>();
for de in 0 .. 2 {
DLEqProof::deserialize(
&mut std::io::Cursor::new(&serialized[c .. (c + (2 * F_len::<C>()))])
).map_err(|_| FrostError::InvalidCommitment(*l))?.verify(
&mut nonce_transcript::<A::Transcript>(),
Generators::new(first, *generator),
(commitments[n][0][de], commitments[n][commitments[n].len() - 1][de])
).map_err(|_| FrostError::InvalidCommitment(*l))?;
c += 2 * F_len::<C>();
}
}
addendums.insert(*l, serialized[c ..].to_vec());
}
B.insert(*l, commitments);
} }
// Append the message to the transcript // Append the message to the transcript
@ -169,22 +230,32 @@ fn sign_with_share<C: Curve, A: Algorithm<C>>(
// Process the addendums // Process the addendums
for l in &params.view.included { for l in &params.view.included {
params.algorithm.process_addendum(&params.view, *l, &B[l], &addendums[l])?; params.algorithm.process_addendum(&params.view, *l, &addendums[l])?;
} }
#[allow(non_snake_case)] #[allow(non_snake_case)]
let R = { let mut Rs = Vec::with_capacity(nonces.len());
B.values().map(|B| B[0]).sum::<C::G>() + (B.values().map(|B| B[1]).sum::<C::G>() * binding) for n in 0 .. nonces.len() {
}; Rs.push(vec![C::G::identity(); nonces[n].len()]);
#[allow(non_snake_case)]
for g in 0 .. nonces[n].len() {
Rs[n][g] = {
B.values().map(|B| B[n][g][0]).sum::<C::G>() +
(B.values().map(|B| B[n][g][1]).sum::<C::G>() * binding)
};
}
}
let share = params.algorithm.sign_share( let share = params.algorithm.sign_share(
&params.view, &params.view,
R, &Rs,
binding, &our_preprocess.nonces.iter().map(
our_preprocess.nonces[0] + (our_preprocess.nonces[1] * binding), |nonces| nonces[0] + (nonces[1] * binding)
).collect::<Vec<_>>(),
msg msg
).to_repr().as_ref().to_vec(); ).to_repr().as_ref().to_vec();
Ok((Package { B, binding, R, share: share.clone() }, share)) Ok((Package { B, binding, Rs, share: share.clone() }, share))
} }
fn complete<C: Curve, A: Algorithm<C>>( fn complete<C: Curve, A: Algorithm<C>>(
@ -206,7 +277,7 @@ fn complete<C: Curve, A: Algorithm<C>>(
// Perform signature validation instead of individual share validation // Perform signature validation instead of individual share validation
// For the success route, which should be much more frequent, this should be faster // For the success route, which should be much more frequent, this should be faster
// It also acts as an integrity check of this library's signing function // It also acts as an integrity check of this library's signing function
let res = sign_params.algorithm.verify(sign_params.view.group_key, sign.R, sum); let res = sign_params.algorithm.verify(sign_params.view.group_key, &sign.Rs, sum);
if let Some(res) = res { if let Some(res) = res {
return Ok(res); return Ok(res);
} }
@ -216,7 +287,11 @@ fn complete<C: Curve, A: Algorithm<C>>(
for l in &sign_params.view.included { for l in &sign_params.view.included {
if !sign_params.algorithm.verify_share( if !sign_params.algorithm.verify_share(
sign_params.view.verification_share(*l), sign_params.view.verification_share(*l),
sign.B[l][0] + (sign.B[l][1] * sign.binding), &sign.B[l].iter().map(
|nonces| nonces.iter().map(
|commitments| commitments[0] + (commitments[1] * sign.binding)
).collect()
).collect::<Vec<_>>(),
responses[l] responses[l]
) { ) {
Err(FrostError::InvalidShare(*l))?; Err(FrostError::InvalidShare(*l))?;

View file

@ -105,7 +105,7 @@ pub fn test_with_vectors<
serialized.extend((C::GENERATOR * nonces[1]).to_bytes().as_ref()); serialized.extend((C::GENERATOR * nonces[1]).to_bytes().as_ref());
let (machine, serialized) = machine.unsafe_override_preprocess( let (machine, serialized) = machine.unsafe_override_preprocess(
PreprocessPackage { nonces, serialized: serialized.clone() } PreprocessPackage { nonces: vec![nonces], serialized: serialized.clone() }
); );
commitments.insert(i, serialized); commitments.insert(i, serialized);

View file

@ -10,6 +10,9 @@ use digest::{typenum::type_operators::IsGreaterOrEqual, consts::U256, Digest, Ou
pub trait Transcript { pub trait Transcript {
type Challenge: Clone + Send + Sync + AsRef<[u8]>; type Challenge: Clone + Send + Sync + AsRef<[u8]>;
/// Create a new transcript with the specified name
fn new(name: &'static [u8]) -> Self;
/// Apply a domain separator to the transcript /// Apply a domain separator to the transcript
fn domain_separate(&mut self, label: &'static [u8]); fn domain_separate(&mut self, label: &'static [u8]);
@ -62,17 +65,17 @@ impl<D: SecureDigest> DigestTranscript<D> {
self.0.update(u64::try_from(value.len()).unwrap().to_le_bytes()); self.0.update(u64::try_from(value.len()).unwrap().to_le_bytes());
self.0.update(value); self.0.update(value);
} }
pub fn new(name: &'static [u8]) -> Self {
let mut res = DigestTranscript(D::new());
res.append(DigestTranscriptMember::Name, name);
res
}
} }
impl<D: SecureDigest> Transcript for DigestTranscript<D> { impl<D: SecureDigest> Transcript for DigestTranscript<D> {
type Challenge = Output<D>; type Challenge = Output<D>;
fn new(name: &'static [u8]) -> Self {
let mut res = DigestTranscript(D::new());
res.append(DigestTranscriptMember::Name, name);
res
}
fn domain_separate(&mut self, label: &[u8]) { fn domain_separate(&mut self, label: &[u8]) {
self.append(DigestTranscriptMember::Domain, label); self.append(DigestTranscriptMember::Domain, label);
} }

View file

@ -17,6 +17,10 @@ impl Transcript for MerlinTranscript {
// this wrapper should be secure with this setting // this wrapper should be secure with this setting
type Challenge = [u8; 64]; type Challenge = [u8; 64];
fn new(name: &'static [u8]) -> Self {
MerlinTranscript(merlin::Transcript::new(name))
}
fn domain_separate(&mut self, label: &'static [u8]) { fn domain_separate(&mut self, label: &'static [u8]) {
self.append_message(b"dom-sep", label); self.append_message(b"dom-sep", label);
} }