Update the DLEq proof for any amount of generators

The two-generator limit wasn't required nor beneficial. This does 
theoretically optimize FROST, yet not for any current constructions. A 
follow up proof which would optimize current constructions has been 
noted in #38.

Adds explicit no_std support to the core DLEq proof.

Closes #34.
This commit is contained in:
Luke Parker 2022-07-13 23:29:48 -04:00
parent 46975812c3
commit 5ede5b9e8f
No known key found for this signature in database
GPG key ID: F9F1386DB1E119B6
9 changed files with 110 additions and 105 deletions

View file

@ -9,7 +9,7 @@ use group::{Group, GroupEncoding};
use transcript::{Transcript, RecommendedTranscript}; use transcript::{Transcript, RecommendedTranscript};
use dalek_ff_group as dfg; use dalek_ff_group as dfg;
use dleq::{Generators, DLEqProof}; use dleq::DLEqProof;
#[derive(Clone, Error, Debug)] #[derive(Clone, Error, Debug)]
pub enum MultisigError { pub enum MultisigError {
@ -40,7 +40,7 @@ pub(crate) fn write_dleq<R: RngCore + CryptoRng>(
// It'd be a poor API to have CLSAG define a new transcript solely to pass here, just to try to // It'd be a poor API to have CLSAG define a new transcript solely to pass here, just to try to
// merge later in some form, when it should instead just merge xH (as it does) // merge later in some form, when it should instead just merge xH (as it does)
&mut transcript(), &mut transcript(),
Generators::new(dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)), &[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)],
dfg::Scalar(x) dfg::Scalar(x)
).serialize(&mut res).unwrap(); ).serialize(&mut res).unwrap();
res res
@ -68,8 +68,8 @@ pub(crate) fn read_dleq<Re: Read>(
serialized serialized
).map_err(|_| MultisigError::InvalidDLEqProof(l))?.verify( ).map_err(|_| MultisigError::InvalidDLEqProof(l))?.verify(
&mut transcript(), &mut transcript(),
Generators::new(dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)), &[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)],
(xG, xH) &[xG, xH]
).map_err(|_| MultisigError::InvalidDLEqProof(l))?; ).map_err(|_| MultisigError::InvalidDLEqProof(l))?;
Ok(xH) Ok(xH)

View file

@ -30,9 +30,10 @@ dalek-ff-group = { path = "../dalek-ff-group" }
transcript = { package = "flexible-transcript", path = "../transcript", features = ["recommended"] } transcript = { package = "flexible-transcript", path = "../transcript", features = ["recommended"] }
[features] [features]
serialize = [] std = []
experimental = ["multiexp"] serialize = ["std"]
experimental = ["std", "multiexp"]
secure_capacity_difference = [] secure_capacity_difference = []
# Only applies to cross_group, yet is default to ensure security # Only applies to experimental, yet is default to ensure security
default = ["secure_capacity_difference"] default = ["secure_capacity_difference"]

View file

@ -6,9 +6,8 @@ use group::{ff::{Field, PrimeFieldBits}, prime::PrimeGroup};
use multiexp::BatchVerifier; use multiexp::BatchVerifier;
use crate::{ use crate::cross_group::{
Generators, Generators, DLEqError, scalar::{scalar_convert, mutual_scalar_from_bytes}
cross_group::{DLEqError, scalar::{scalar_convert, mutual_scalar_from_bytes}}
}; };
#[cfg(feature = "serialize")] #[cfg(feature = "serialize")]

View file

@ -5,7 +5,7 @@ use transcript::Transcript;
use group::{ff::PrimeFieldBits, prime::PrimeGroup}; use group::{ff::PrimeFieldBits, prime::PrimeGroup};
use multiexp::BatchVerifier; use multiexp::BatchVerifier;
use crate::{Generators, cross_group::{DLEqError, aos::{Re, Aos}}}; use crate::cross_group::{Generators, DLEqError, aos::{Re, Aos}};
#[cfg(feature = "serialize")] #[cfg(feature = "serialize")]
use std::io::{Read, Write}; use std::io::{Read, Write};

View file

@ -8,8 +8,6 @@ use transcript::Transcript;
use group::{ff::{Field, PrimeField, PrimeFieldBits}, prime::PrimeGroup}; use group::{ff::{Field, PrimeField, PrimeFieldBits}, prime::PrimeGroup};
use multiexp::BatchVerifier; use multiexp::BatchVerifier;
use crate::Generators;
pub mod scalar; pub mod scalar;
use scalar::{scalar_convert, mutual_scalar_from_bytes}; use scalar::{scalar_convert, mutual_scalar_from_bytes};
@ -35,6 +33,24 @@ pub(crate) fn read_point<R: Read, G: PrimeGroup>(r: &mut R) -> std::io::Result<G
Ok(point.unwrap()) Ok(point.unwrap())
} }
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct Generators<G: PrimeGroup> {
pub primary: G,
pub alt: G
}
impl<G: PrimeGroup> Generators<G> {
pub fn new(primary: G, alt: G) -> Generators<G> {
Generators { primary, alt }
}
fn transcript<T: Transcript>(&self, transcript: &mut T) {
transcript.domain_separate(b"generators");
transcript.append_message(b"primary", self.primary.to_bytes().as_ref());
transcript.append_message(b"alternate", self.alt.to_bytes().as_ref());
}
}
#[derive(Error, PartialEq, Eq, Debug)] #[derive(Error, PartialEq, Eq, Debug)]
pub enum DLEqError { pub enum DLEqError {
#[error("invalid proof of knowledge")] #[error("invalid proof of knowledge")]

View file

@ -1,4 +1,5 @@
use thiserror::Error; #![cfg_attr(not(feature = "std"), no_std)]
use rand_core::{RngCore, CryptoRng}; use rand_core::{RngCore, CryptoRng};
use transcript::Transcript; use transcript::Transcript;
@ -15,24 +16,6 @@ pub mod cross_group;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct Generators<G: PrimeGroup> {
primary: G,
alt: G
}
impl<G: PrimeGroup> Generators<G> {
pub fn new(primary: G, alt: G) -> Generators<G> {
Generators { primary, alt }
}
fn transcript<T: Transcript>(&self, transcript: &mut T) {
transcript.domain_separate(b"generators");
transcript.append_message(b"primary", self.primary.to_bytes().as_ref());
transcript.append_message(b"alternate", self.alt.to_bytes().as_ref());
}
}
pub(crate) fn challenge<T: Transcript, F: PrimeField>(transcript: &mut T) -> F { pub(crate) fn challenge<T: Transcript, F: PrimeField>(transcript: &mut T) -> F {
// From here, there are three ways to get a scalar under the ff/group API // From here, there are three ways to get a scalar under the ff/group API
// 1: Scalar::random(ChaCha12Rng::from_seed(self.transcript.rng_seed(b"challenge"))) // 1: Scalar::random(ChaCha12Rng::from_seed(self.transcript.rng_seed(b"challenge")))
@ -70,9 +53,8 @@ fn read_scalar<R: Read, F: PrimeField>(r: &mut R) -> io::Result<F> {
Ok(scalar.unwrap()) Ok(scalar.unwrap())
} }
#[derive(Error, Debug)] #[derive(Debug)]
pub enum DLEqError { pub enum DLEqError {
#[error("invalid proof")]
InvalidProof InvalidProof
} }
@ -84,34 +66,26 @@ pub struct DLEqProof<G: PrimeGroup> {
#[allow(non_snake_case)] #[allow(non_snake_case)]
impl<G: PrimeGroup> DLEqProof<G> { impl<G: PrimeGroup> DLEqProof<G> {
fn challenge<T: Transcript>( fn transcript<T: Transcript>(transcript: &mut T, generator: G, nonce: G, point: G) {
transcript: &mut T, transcript.append_message(b"generator", generator.to_bytes().as_ref());
generators: Generators<G>, transcript.append_message(b"nonce", nonce.to_bytes().as_ref());
nonces: (G, G), transcript.append_message(b"point", point.to_bytes().as_ref());
points: (G, G)
) -> G::Scalar {
generators.transcript(transcript);
transcript.domain_separate(b"dleq");
transcript.append_message(b"nonce_primary", nonces.0.to_bytes().as_ref());
transcript.append_message(b"nonce_alternate", nonces.1.to_bytes().as_ref());
transcript.append_message(b"point_primary", points.0.to_bytes().as_ref());
transcript.append_message(b"point_alternate", points.1.to_bytes().as_ref());
challenge(transcript)
} }
pub fn prove<R: RngCore + CryptoRng, T: Transcript>( pub fn prove<R: RngCore + CryptoRng, T: Transcript>(
rng: &mut R, rng: &mut R,
transcript: &mut T, transcript: &mut T,
generators: Generators<G>, generators: &[G],
scalar: G::Scalar scalar: G::Scalar
) -> DLEqProof<G> { ) -> DLEqProof<G> {
let r = G::Scalar::random(rng); let r = G::Scalar::random(rng);
let c = Self::challenge(
transcript, transcript.domain_separate(b"dleq");
generators, for generator in generators {
(generators.primary * r, generators.alt * r), Self::transcript(transcript, *generator, *generator * r, *generator * scalar);
(generators.primary * scalar, generators.alt * scalar) }
);
let c = challenge(transcript);
let s = r + (c * scalar); let s = r + (c * scalar);
DLEqProof { c, s } DLEqProof { c, s }
@ -120,18 +94,19 @@ impl<G: PrimeGroup> DLEqProof<G> {
pub fn verify<T: Transcript>( pub fn verify<T: Transcript>(
&self, &self,
transcript: &mut T, transcript: &mut T,
generators: Generators<G>, generators: &[G],
points: (G, G) points: &[G]
) -> Result<(), DLEqError> { ) -> Result<(), DLEqError> {
if self.c != Self::challenge( if generators.len() != points.len() {
transcript, Err(DLEqError::InvalidProof)?;
generators, }
(
(generators.primary * self.s) - (points.0 * self.c), transcript.domain_separate(b"dleq");
(generators.alt * self.s) - (points.1 * self.c) for (generator, point) in generators.iter().zip(points) {
), Self::transcript(transcript, *generator, (*generator * self.s) - (*point * self.c), *point);
points }
) {
if self.c != challenge(transcript) {
Err(DLEqError::InvalidProof)?; Err(DLEqError::InvalidProof)?;
} }

View file

@ -12,10 +12,9 @@ use dalek_ff_group::{self as dfg, EdwardsPoint};
use transcript::{Transcript, RecommendedTranscript}; use transcript::{Transcript, RecommendedTranscript};
use crate::{ use crate::{
Generators,
cross_group::{ cross_group::{
scalar::mutual_scalar_from_bytes, scalar::mutual_scalar_from_bytes,
ClassicLinearDLEq, EfficientLinearDLEq, ConciseLinearDLEq, CompromiseLinearDLEq Generators, ClassicLinearDLEq, EfficientLinearDLEq, ConciseLinearDLEq, CompromiseLinearDLEq
} }
}; };

View file

@ -11,24 +11,38 @@ use k256::{Scalar, ProjectivePoint};
use transcript::{Transcript, RecommendedTranscript}; use transcript::{Transcript, RecommendedTranscript};
use crate::{Generators, DLEqProof}; use crate::DLEqProof;
#[test] #[test]
fn test_dleq() { fn test_dleq() {
let transcript = || RecommendedTranscript::new(b"DLEq Proof Test"); let transcript = || RecommendedTranscript::new(b"DLEq Proof Test");
let generators = Generators::new( let generators = [
ProjectivePoint::GENERATOR, ProjectivePoint::GENERATOR,
ProjectivePoint::from_bytes( ProjectivePoint::from_bytes(
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac0").into()) &(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac0").into())
).unwrap(),
// Just an increment of the last byte from the previous, where the previous two are valid
ProjectivePoint::from_bytes(
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac4").into())
).unwrap(),
ProjectivePoint::from_bytes(
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803aca").into())
).unwrap(),
ProjectivePoint::from_bytes(
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803acb").into())
).unwrap() ).unwrap()
); ];
for i in 0 .. 5 {
let key = Scalar::random(&mut OsRng); let key = Scalar::random(&mut OsRng);
let proof = DLEqProof::prove(&mut OsRng, &mut transcript(), generators, key); let proof = DLEqProof::prove(&mut OsRng, &mut transcript(), &generators[.. i], key);
let keys = (generators.primary * key, generators.alt * key); let mut keys = [ProjectivePoint::GENERATOR; 5];
proof.verify(&mut transcript(), generators, keys).unwrap(); for k in 0 .. 5 {
keys[k] = generators[k] * key;
}
proof.verify(&mut transcript(), &generators[.. i], &keys[.. i]).unwrap();
#[cfg(feature = "serialize")] #[cfg(feature = "serialize")]
{ {
@ -38,6 +52,6 @@ fn test_dleq() {
&mut std::io::Cursor::new(&buf) &mut std::io::Cursor::new(&buf)
).unwrap(); ).unwrap();
assert_eq!(proof, deserialized); assert_eq!(proof, deserialized);
deserialized.verify(&mut transcript(), generators, keys).unwrap(); }
} }
} }

View file

@ -8,7 +8,7 @@ use transcript::Transcript;
use group::{ff::{Field, PrimeField}, Group, GroupEncoding}; use group::{ff::{Field, PrimeField}, Group, GroupEncoding};
use multiexp::multiexp_vartime; use multiexp::multiexp_vartime;
use dleq::{Generators, DLEqProof}; use dleq::DLEqProof;
use crate::{ use crate::{
curve::Curve, curve::Curve,
@ -88,8 +88,8 @@ fn preprocess<R: RngCore + CryptoRng, C: Curve, A: Algorithm<C>>(
params: &mut Params<C, A>, params: &mut Params<C, A>,
) -> (PreprocessPackage<C>, Vec<u8>) { ) -> (PreprocessPackage<C>, Vec<u8>) {
let mut serialized = Vec::with_capacity(2 * C::G_len()); let mut serialized = Vec::with_capacity(2 * C::G_len());
let (nonces, commitments) = params.algorithm.nonces().iter().cloned().map( let (nonces, commitments) = params.algorithm.nonces().iter().map(
|mut generators| { |generators| {
let nonces = [ let nonces = [
C::random_nonce(params.view().secret_share(), &mut *rng), C::random_nonce(params.view().secret_share(), &mut *rng),
C::random_nonce(params.view().secret_share(), &mut *rng) C::random_nonce(params.view().secret_share(), &mut *rng)
@ -103,21 +103,23 @@ fn preprocess<R: RngCore + CryptoRng, C: Curve, A: Algorithm<C>>(
}; };
let mut commitments = Vec::with_capacity(generators.len()); let mut commitments = Vec::with_capacity(generators.len());
let first = generators.remove(0);
commitments.push(commit(first, &mut serialized));
// Iterate over the rest
for generator in generators.iter() { for generator in generators.iter() {
commitments.push(commit(*generator, &mut serialized)); commitments.push(commit(*generator, &mut serialized));
// Provide a DLEq to verify these commitments are for the same nonce }
// TODO: Provide a single DLEq. See https://github.com/serai-dex/serai/issues/34
// Provide a DLEq proof to verify these commitments are for the same nonce
if generators.len() >= 2 {
// Uses an independent transcript as each signer must do this now, yet we validate them
// sequentially by the global order. Avoids needing to clone and fork the transcript around
let mut transcript = nonce_transcript::<A::Transcript>();
// This could be further optimized with a multi-nonce proof.
// See https://github.com/serai-dex/serai/issues/38
for nonce in nonces { for nonce in nonces {
DLEqProof::prove( DLEqProof::prove(
&mut *rng, &mut *rng,
// Uses an independent transcript as each signer must do this now, yet we validate them &mut transcript,
// sequentially by the global order. Avoids needing to clone the transcript around &generators,
&mut nonce_transcript::<A::Transcript>(),
Generators::new(first, *generator),
nonce nonce
).serialize(&mut serialized).unwrap(); ).serialize(&mut serialized).unwrap();
} }
@ -203,21 +205,20 @@ fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
let mut commitments = Vec::with_capacity(nonces.len()); let mut commitments = Vec::with_capacity(nonces.len());
for (n, nonce_generators) in nonces.clone().iter_mut().enumerate() { for (n, nonce_generators) in nonces.clone().iter_mut().enumerate() {
commitments.push(Vec::with_capacity(nonce_generators.len())); commitments.push(Vec::with_capacity(nonce_generators.len()));
for _ in 0 .. nonce_generators.len() {
let first = nonce_generators.remove(0);
commitments[n].push(read_D_E::<_, C>(&mut cursor, *l)?);
transcript(params.algorithm.transcript(), commitments[n][0]);
for generator in nonce_generators {
commitments[n].push(read_D_E::<_, C>(&mut cursor, *l)?); commitments[n].push(read_D_E::<_, C>(&mut cursor, *l)?);
transcript(params.algorithm.transcript(), commitments[n][commitments[n].len() - 1]); transcript(params.algorithm.transcript(), commitments[n][commitments[n].len() - 1]);
}
if nonce_generators.len() >= 2 {
let mut transcript = nonce_transcript::<A::Transcript>();
for de in 0 .. 2 { for de in 0 .. 2 {
DLEqProof::deserialize( DLEqProof::deserialize(
&mut cursor &mut cursor
).map_err(|_| FrostError::InvalidCommitment(*l))?.verify( ).map_err(|_| FrostError::InvalidCommitment(*l))?.verify(
&mut nonce_transcript::<A::Transcript>(), &mut transcript,
Generators::new(first, *generator), &nonce_generators,
(commitments[n][0][de], commitments[n][commitments[n].len() - 1][de]) &commitments[n].iter().map(|commitments| commitments[de]).collect::<Vec<_>>(),
).map_err(|_| FrostError::InvalidCommitment(*l))?; ).map_err(|_| FrostError::InvalidCommitment(*l))?;
} }
} }