mirror of
https://github.com/serai-dex/serai.git
synced 2024-12-23 03:59:22 +00:00
Offer a multi-DLEq proof which simply merges challenges for n underlying proofs
This converts proofs from 2n elements to 1+n. Moves FROST over to it. Additionally, for FROST's binomial nonces, provides a single DLEq proof (2, not 1+2 elements) by proving the discrete log equality of their aggregate (with an appropriate binding factor). This may be split back up depending on later commentary...
This commit is contained in:
parent
49c4acffbb
commit
eeca440fa7
6 changed files with 291 additions and 86 deletions
|
@ -36,7 +36,7 @@ type FrostError<C> = DkgError<EncryptionKeyProof<C>>;
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
fn challenge<C: Ciphersuite>(context: &str, l: u16, R: &[u8], Am: &[u8]) -> C::F {
|
fn challenge<C: Ciphersuite>(context: &str, l: u16, R: &[u8], Am: &[u8]) -> C::F {
|
||||||
let mut transcript = RecommendedTranscript::new(b"DKG FROST v0.2");
|
let mut transcript = RecommendedTranscript::new(b"DKG FROST v0.2");
|
||||||
transcript.domain_separate(b"Schnorr Proof of Knowledge");
|
transcript.domain_separate(b"schnorr_proof_of_knowledge");
|
||||||
transcript.append_message(b"context", context.as_bytes());
|
transcript.append_message(b"context", context.as_bytes());
|
||||||
transcript.append_message(b"participant", l.to_le_bytes());
|
transcript.append_message(b"participant", l.to_le_bytes());
|
||||||
transcript.append_message(b"nonce", R);
|
transcript.append_message(b"nonce", R);
|
||||||
|
|
|
@ -177,3 +177,115 @@ impl<G: PrimeGroup> DLEqProof<G> {
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "std")]
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||||
|
pub struct MultiDLEqProof<G: PrimeGroup> {
|
||||||
|
c: G::Scalar,
|
||||||
|
s: Vec<G::Scalar>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "std")]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
impl<G: PrimeGroup> MultiDLEqProof<G> {
|
||||||
|
pub fn prove<R: RngCore + CryptoRng, T: Transcript>(
|
||||||
|
rng: &mut R,
|
||||||
|
transcript: &mut T,
|
||||||
|
generators: &[Vec<G>],
|
||||||
|
scalars: &[Zeroizing<G::Scalar>],
|
||||||
|
) -> MultiDLEqProof<G>
|
||||||
|
where
|
||||||
|
G::Scalar: Zeroize,
|
||||||
|
{
|
||||||
|
transcript.domain_separate(b"multi-dleq");
|
||||||
|
|
||||||
|
let mut nonces = vec![];
|
||||||
|
for (i, (scalar, generators)) in scalars.iter().zip(generators).enumerate() {
|
||||||
|
// Delineate between discrete logarithms
|
||||||
|
transcript.append_message(b"discrete_logarithm", i.to_le_bytes());
|
||||||
|
|
||||||
|
let nonce = Zeroizing::new(G::Scalar::random(&mut *rng));
|
||||||
|
for generator in generators {
|
||||||
|
DLEqProof::transcript(
|
||||||
|
transcript,
|
||||||
|
*generator,
|
||||||
|
*generator * nonce.deref(),
|
||||||
|
*generator * scalar.deref(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
nonces.push(nonce);
|
||||||
|
}
|
||||||
|
|
||||||
|
let c = challenge(transcript);
|
||||||
|
|
||||||
|
let mut s = vec![];
|
||||||
|
for (scalar, nonce) in scalars.iter().zip(nonces) {
|
||||||
|
s.push((c * scalar.deref()) + nonce.deref());
|
||||||
|
}
|
||||||
|
|
||||||
|
MultiDLEqProof { c, s }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn verify<T: Transcript>(
|
||||||
|
&self,
|
||||||
|
transcript: &mut T,
|
||||||
|
generators: &[Vec<G>],
|
||||||
|
points: &[Vec<G>],
|
||||||
|
) -> Result<(), DLEqError> {
|
||||||
|
if points.len() != generators.len() {
|
||||||
|
Err(DLEqError::InvalidProof)?;
|
||||||
|
}
|
||||||
|
if self.s.len() != generators.len() {
|
||||||
|
Err(DLEqError::InvalidProof)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
transcript.domain_separate(b"multi-dleq");
|
||||||
|
for (i, (generators, points)) in generators.iter().zip(points).enumerate() {
|
||||||
|
if points.len() != generators.len() {
|
||||||
|
Err(DLEqError::InvalidProof)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
transcript.append_message(b"discrete_logarithm", i.to_le_bytes());
|
||||||
|
for (generator, point) in generators.iter().zip(points) {
|
||||||
|
DLEqProof::transcript(
|
||||||
|
transcript,
|
||||||
|
*generator,
|
||||||
|
(*generator * self.s[i]) - (*point * self.c),
|
||||||
|
*point,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.c != challenge(transcript) {
|
||||||
|
Err(DLEqError::InvalidProof)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "serialize")]
|
||||||
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
|
w.write_all(self.c.to_repr().as_ref())?;
|
||||||
|
for s in &self.s {
|
||||||
|
w.write_all(s.to_repr().as_ref())?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "serialize")]
|
||||||
|
pub fn read<R: Read>(r: &mut R, discrete_logs: usize) -> io::Result<MultiDLEqProof<G>> {
|
||||||
|
let c = read_scalar(r)?;
|
||||||
|
let mut s = vec![];
|
||||||
|
for _ in 0 .. discrete_logs {
|
||||||
|
s.push(read_scalar(r)?);
|
||||||
|
}
|
||||||
|
Ok(MultiDLEqProof { c, s })
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "serialize")]
|
||||||
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
|
let mut res = vec![];
|
||||||
|
self.write(&mut res).unwrap();
|
||||||
|
res
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -13,16 +13,13 @@ use k256::{Scalar, ProjectivePoint};
|
||||||
|
|
||||||
use transcript::{Transcript, RecommendedTranscript};
|
use transcript::{Transcript, RecommendedTranscript};
|
||||||
|
|
||||||
use crate::DLEqProof;
|
use crate::{DLEqProof, MultiDLEqProof};
|
||||||
|
|
||||||
#[cfg(feature = "experimental")]
|
#[cfg(feature = "experimental")]
|
||||||
mod cross_group;
|
mod cross_group;
|
||||||
|
|
||||||
#[test]
|
fn generators() -> [k256::ProjectivePoint; 5] {
|
||||||
fn test_dleq() {
|
[
|
||||||
let transcript = || RecommendedTranscript::new(b"DLEq Proof Test");
|
|
||||||
|
|
||||||
let generators = [
|
|
||||||
ProjectivePoint::GENERATOR,
|
ProjectivePoint::GENERATOR,
|
||||||
ProjectivePoint::from_bytes(
|
ProjectivePoint::from_bytes(
|
||||||
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac0").into()),
|
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac0").into()),
|
||||||
|
@ -41,7 +38,13 @@ fn test_dleq() {
|
||||||
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803acb").into()),
|
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803acb").into()),
|
||||||
)
|
)
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
];
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_dleq() {
|
||||||
|
let generators = generators();
|
||||||
|
let transcript = || RecommendedTranscript::new(b"DLEq Proof Test");
|
||||||
|
|
||||||
for i in 0 .. 5 {
|
for i in 0 .. 5 {
|
||||||
let key = Zeroizing::new(Scalar::random(&mut OsRng));
|
let key = Zeroizing::new(Scalar::random(&mut OsRng));
|
||||||
|
@ -61,6 +64,9 @@ fn test_dleq() {
|
||||||
)
|
)
|
||||||
.is_err());
|
.is_err());
|
||||||
|
|
||||||
|
// All of these following tests should effectively be a different challenge and accordingly
|
||||||
|
// pointless. They're still nice to have though
|
||||||
|
|
||||||
// We could edit these tests to always test with at least two generators
|
// We could edit these tests to always test with at least two generators
|
||||||
// Then we don't test proofs with zero/one generator(s)
|
// Then we don't test proofs with zero/one generator(s)
|
||||||
// While those are stupid, and pointless, and potentially point to a failure in the caller,
|
// While those are stupid, and pointless, and potentially point to a failure in the caller,
|
||||||
|
@ -94,3 +100,53 @@ fn test_dleq() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_multi_dleq() {
|
||||||
|
let generators = generators();
|
||||||
|
let transcript = || RecommendedTranscript::new(b"MultiDLEq Proof Test");
|
||||||
|
|
||||||
|
// Test up to 3 keys
|
||||||
|
for k in 0 ..= 3 {
|
||||||
|
let mut keys = vec![];
|
||||||
|
let mut these_generators = vec![];
|
||||||
|
let mut pub_keys = vec![];
|
||||||
|
for i in 0 .. k {
|
||||||
|
let key = Zeroizing::new(Scalar::random(&mut OsRng));
|
||||||
|
// For each key, test a variable set of generators
|
||||||
|
// 0: 0
|
||||||
|
// 1: 1, 2
|
||||||
|
// 2: 2, 3, 4
|
||||||
|
let key_generators = generators[i .. (i + i + 1)].to_vec();
|
||||||
|
let mut these_pub_keys = vec![];
|
||||||
|
for generator in &key_generators {
|
||||||
|
these_pub_keys.push(generator * key.deref());
|
||||||
|
}
|
||||||
|
keys.push(key);
|
||||||
|
these_generators.push(key_generators);
|
||||||
|
pub_keys.push(these_pub_keys);
|
||||||
|
}
|
||||||
|
|
||||||
|
let proof = MultiDLEqProof::prove(&mut OsRng, &mut transcript(), &these_generators, &keys);
|
||||||
|
|
||||||
|
proof.verify(&mut transcript(), &these_generators, &pub_keys).unwrap();
|
||||||
|
// Different challenge
|
||||||
|
assert!(proof
|
||||||
|
.verify(&mut RecommendedTranscript::new(b"different challenge"), &these_generators, &pub_keys)
|
||||||
|
.is_err());
|
||||||
|
|
||||||
|
// Test verifying for a different amount of keys fail
|
||||||
|
if k > 0 {
|
||||||
|
assert!(proof.verify(&mut transcript(), &these_generators, &pub_keys[.. k - 1]).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "serialize")]
|
||||||
|
{
|
||||||
|
let mut buf = vec![];
|
||||||
|
proof.write(&mut buf).unwrap();
|
||||||
|
let deserialized =
|
||||||
|
MultiDLEqProof::<ProjectivePoint>::read::<&[u8]>(&mut buf.as_ref(), k).unwrap();
|
||||||
|
assert_eq!(proof, deserialized);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -23,10 +23,17 @@ use transcript::Transcript;
|
||||||
use group::{ff::PrimeField, Group, GroupEncoding};
|
use group::{ff::PrimeField, Group, GroupEncoding};
|
||||||
use multiexp::multiexp_vartime;
|
use multiexp::multiexp_vartime;
|
||||||
|
|
||||||
use dleq::DLEqProof;
|
use dleq::MultiDLEqProof;
|
||||||
|
|
||||||
use crate::curve::Curve;
|
use crate::curve::Curve;
|
||||||
|
|
||||||
|
// Transcript used to aggregate binomial nonces for usage within a single DLEq proof.
|
||||||
|
fn aggregation_transcript<T: Transcript>(context: &[u8]) -> T {
|
||||||
|
let mut transcript = T::new(b"FROST DLEq Aggregation v0.5");
|
||||||
|
transcript.append_message(b"context", context);
|
||||||
|
transcript
|
||||||
|
}
|
||||||
|
|
||||||
// Every participant proves for their commitments at the start of the protocol
|
// Every participant proves for their commitments at the start of the protocol
|
||||||
// These proofs are verified sequentially, requiring independent transcripts
|
// These proofs are verified sequentially, requiring independent transcripts
|
||||||
// In order to make these transcripts more robust, the FROST transcript (at time of preprocess) is
|
// In order to make these transcripts more robust, the FROST transcript (at time of preprocess) is
|
||||||
|
@ -37,7 +44,7 @@ use crate::curve::Curve;
|
||||||
// constructed). For higher level protocols, the transcript may have contextual info these proofs
|
// constructed). For higher level protocols, the transcript may have contextual info these proofs
|
||||||
// will then be bound to
|
// will then be bound to
|
||||||
fn dleq_transcript<T: Transcript>(context: &[u8]) -> T {
|
fn dleq_transcript<T: Transcript>(context: &[u8]) -> T {
|
||||||
let mut transcript = T::new(b"FROST_commitments");
|
let mut transcript = T::new(b"FROST Commitments DLEq v0.5");
|
||||||
transcript.append_message(b"context", context);
|
transcript.append_message(b"context", context);
|
||||||
transcript
|
transcript
|
||||||
}
|
}
|
||||||
|
@ -47,7 +54,7 @@ fn dleq_transcript<T: Transcript>(context: &[u8]) -> T {
|
||||||
#[derive(Clone, Zeroize)]
|
#[derive(Clone, Zeroize)]
|
||||||
pub(crate) struct Nonce<C: Curve>(pub(crate) [Zeroizing<C::F>; 2]);
|
pub(crate) struct Nonce<C: Curve>(pub(crate) [Zeroizing<C::F>; 2]);
|
||||||
|
|
||||||
// Commitments to a specific generator for this nonce
|
// Commitments to a specific generator for this binomial nonce
|
||||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||||
pub(crate) struct GeneratorCommitments<C: Curve>(pub(crate) [C::G; 2]);
|
pub(crate) struct GeneratorCommitments<C: Curve>(pub(crate) [C::G; 2]);
|
||||||
impl<C: Curve> GeneratorCommitments<C> {
|
impl<C: Curve> GeneratorCommitments<C> {
|
||||||
|
@ -64,13 +71,8 @@ impl<C: Curve> GeneratorCommitments<C> {
|
||||||
// A single nonce's commitments and relevant proofs
|
// A single nonce's commitments and relevant proofs
|
||||||
#[derive(Clone, PartialEq, Eq)]
|
#[derive(Clone, PartialEq, Eq)]
|
||||||
pub(crate) struct NonceCommitments<C: Curve> {
|
pub(crate) struct NonceCommitments<C: Curve> {
|
||||||
// Called generators as these commitments are indexed by generator
|
// Called generators as these commitments are indexed by generator later on
|
||||||
pub(crate) generators: Vec<GeneratorCommitments<C>>,
|
pub(crate) generators: Vec<GeneratorCommitments<C>>,
|
||||||
// DLEq Proofs proving that these commitments are generated using the same scalar pair
|
|
||||||
// This could be further optimized with a multi-nonce proof, offering just one proof for all
|
|
||||||
// nonces. See https://github.com/serai-dex/serai/issues/38
|
|
||||||
// TODO
|
|
||||||
pub(crate) dleqs: Option<[DLEqProof<C::G>; 2]>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<C: Curve> NonceCommitments<C> {
|
impl<C: Curve> NonceCommitments<C> {
|
||||||
|
@ -78,7 +80,6 @@ impl<C: Curve> NonceCommitments<C> {
|
||||||
rng: &mut R,
|
rng: &mut R,
|
||||||
secret_share: &Zeroizing<C::F>,
|
secret_share: &Zeroizing<C::F>,
|
||||||
generators: &[C::G],
|
generators: &[C::G],
|
||||||
context: &[u8],
|
|
||||||
) -> (Nonce<C>, NonceCommitments<C>) {
|
) -> (Nonce<C>, NonceCommitments<C>) {
|
||||||
let nonce = Nonce::<C>([
|
let nonce = Nonce::<C>([
|
||||||
C::random_nonce(secret_share, &mut *rng),
|
C::random_nonce(secret_share, &mut *rng),
|
||||||
|
@ -93,64 +94,49 @@ impl<C: Curve> NonceCommitments<C> {
|
||||||
]));
|
]));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut dleqs = None;
|
(nonce, NonceCommitments { generators: commitments })
|
||||||
if generators.len() >= 2 {
|
|
||||||
let mut dleq = |nonce| {
|
|
||||||
// Uses an independent transcript as each signer must prove this with their commitments,
|
|
||||||
// yet they're validated while processing everyone's data sequentially, by the global order
|
|
||||||
// This avoids needing to clone and fork the transcript around
|
|
||||||
DLEqProof::prove(&mut *rng, &mut dleq_transcript::<T>(context), generators, nonce)
|
|
||||||
};
|
|
||||||
dleqs = Some([dleq(&nonce.0[0]), dleq(&nonce.0[1])]);
|
|
||||||
}
|
|
||||||
|
|
||||||
(nonce, NonceCommitments { generators: commitments, dleqs })
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read<R: Read, T: Transcript>(
|
fn read<R: Read, T: Transcript>(
|
||||||
reader: &mut R,
|
reader: &mut R,
|
||||||
generators: &[C::G],
|
generators: &[C::G],
|
||||||
context: &[u8],
|
|
||||||
) -> io::Result<NonceCommitments<C>> {
|
) -> io::Result<NonceCommitments<C>> {
|
||||||
let commitments: Vec<GeneratorCommitments<C>> = (0 .. generators.len())
|
Ok(NonceCommitments {
|
||||||
.map(|_| GeneratorCommitments::read(reader))
|
generators: (0 .. generators.len())
|
||||||
.collect::<Result<_, _>>()?;
|
.map(|_| GeneratorCommitments::read(reader))
|
||||||
|
.collect::<Result<_, _>>()?,
|
||||||
let mut dleqs = None;
|
})
|
||||||
if generators.len() >= 2 {
|
|
||||||
let mut verify = |i| -> io::Result<_> {
|
|
||||||
let dleq = DLEqProof::read(reader)?;
|
|
||||||
dleq
|
|
||||||
.verify(
|
|
||||||
&mut dleq_transcript::<T>(context),
|
|
||||||
generators,
|
|
||||||
&commitments.iter().map(|commitments| commitments.0[i]).collect::<Vec<_>>(),
|
|
||||||
)
|
|
||||||
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid DLEq proof"))?;
|
|
||||||
Ok(dleq)
|
|
||||||
};
|
|
||||||
dleqs = Some([verify(0)?, verify(1)?]);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(NonceCommitments { generators: commitments, dleqs })
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||||
for generator in &self.generators {
|
for generator in &self.generators {
|
||||||
generator.write(writer)?;
|
generator.write(writer)?;
|
||||||
}
|
}
|
||||||
if let Some(dleqs) = &self.dleqs {
|
|
||||||
dleqs[0].write(writer)?;
|
|
||||||
dleqs[1].write(writer)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn transcript<T: Transcript>(&self, t: &mut T) {
|
||||||
|
t.domain_separate(b"nonce");
|
||||||
|
for commitments in &self.generators {
|
||||||
|
t.append_message(b"commitment_D", commitments.0[0].to_bytes());
|
||||||
|
t.append_message(b"commitment_E", commitments.0[1].to_bytes());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn aggregation_factor<T: Transcript>(&self, context: &[u8]) -> C::F {
|
||||||
|
let mut transcript = aggregation_transcript::<T>(context);
|
||||||
|
self.transcript(&mut transcript);
|
||||||
|
<C as Curve>::hash_to_F(b"dleq_aggregation", transcript.challenge(b"binding").as_ref())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq)]
|
#[derive(Clone, PartialEq, Eq)]
|
||||||
pub(crate) struct Commitments<C: Curve> {
|
pub(crate) struct Commitments<C: Curve> {
|
||||||
// Called nonces as these commitments are indexed by nonce
|
// Called nonces as these commitments are indexed by nonce
|
||||||
pub(crate) nonces: Vec<NonceCommitments<C>>,
|
pub(crate) nonces: Vec<NonceCommitments<C>>,
|
||||||
|
// DLEq Proof proving that each set of commitments were generated using a single pair of discrete
|
||||||
|
// logarithms
|
||||||
|
pub(crate) dleq: Option<MultiDLEqProof<C::G>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<C: Curve> Commitments<C> {
|
impl<C: Curve> Commitments<C> {
|
||||||
|
@ -162,53 +148,96 @@ impl<C: Curve> Commitments<C> {
|
||||||
) -> (Vec<Nonce<C>>, Commitments<C>) {
|
) -> (Vec<Nonce<C>>, Commitments<C>) {
|
||||||
let mut nonces = vec![];
|
let mut nonces = vec![];
|
||||||
let mut commitments = vec![];
|
let mut commitments = vec![];
|
||||||
|
|
||||||
|
let mut dleq_generators = vec![];
|
||||||
|
let mut dleq_nonces = vec![];
|
||||||
for generators in planned_nonces {
|
for generators in planned_nonces {
|
||||||
let (nonce, these_commitments) =
|
let (nonce, these_commitments): (Nonce<C>, _) =
|
||||||
NonceCommitments::new::<_, T>(&mut *rng, secret_share, generators, context);
|
NonceCommitments::new::<_, T>(&mut *rng, secret_share, generators);
|
||||||
|
|
||||||
|
if generators.len() > 1 {
|
||||||
|
dleq_generators.push(generators.clone());
|
||||||
|
dleq_nonces.push(Zeroizing::new(
|
||||||
|
(these_commitments.aggregation_factor::<T>(context) * nonce.0[1].deref()) +
|
||||||
|
nonce.0[0].deref(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
nonces.push(nonce);
|
nonces.push(nonce);
|
||||||
commitments.push(these_commitments);
|
commitments.push(these_commitments);
|
||||||
}
|
}
|
||||||
(nonces, Commitments { nonces: commitments })
|
|
||||||
|
let dleq = if !dleq_generators.is_empty() {
|
||||||
|
Some(MultiDLEqProof::prove(
|
||||||
|
rng,
|
||||||
|
&mut dleq_transcript::<T>(context),
|
||||||
|
&dleq_generators,
|
||||||
|
&dleq_nonces,
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
(nonces, Commitments { nonces: commitments, dleq })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn transcript<T: Transcript>(&self, t: &mut T) {
|
pub(crate) fn transcript<T: Transcript>(&self, t: &mut T) {
|
||||||
|
t.domain_separate(b"commitments");
|
||||||
for nonce in &self.nonces {
|
for nonce in &self.nonces {
|
||||||
for commitments in &nonce.generators {
|
nonce.transcript(t);
|
||||||
t.append_message(b"commitment_D", commitments.0[0].to_bytes());
|
}
|
||||||
t.append_message(b"commitment_E", commitments.0[1].to_bytes());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Transcripting the DLEqs implicitly transcripts the exact generators used for this nonce
|
// Transcripting the DLEqs implicitly transcripts the exact generators used for the nonces in
|
||||||
// This means it shouldn't be possible for variadic generators to cause conflicts as they're
|
// an exact order
|
||||||
// committed to as their entire series per-nonce, not as isolates
|
// This means it shouldn't be possible for variadic generators to cause conflicts
|
||||||
if let Some(dleqs) = &nonce.dleqs {
|
if let Some(dleq) = &self.dleq {
|
||||||
let mut transcript_dleq = |label, dleq: &DLEqProof<C::G>| {
|
t.append_message(b"dleq", dleq.serialize());
|
||||||
let mut buf = vec![];
|
|
||||||
dleq.write(&mut buf).unwrap();
|
|
||||||
t.append_message(label, &buf);
|
|
||||||
};
|
|
||||||
transcript_dleq(b"dleq_D", &dleqs[0]);
|
|
||||||
transcript_dleq(b"dleq_E", &dleqs[1]);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read<R: Read, T: Transcript>(
|
pub(crate) fn read<R: Read, T: Transcript>(
|
||||||
reader: &mut R,
|
reader: &mut R,
|
||||||
nonces: &[Vec<C::G>],
|
generators: &[Vec<C::G>],
|
||||||
context: &[u8],
|
context: &[u8],
|
||||||
) -> io::Result<Self> {
|
) -> io::Result<Self> {
|
||||||
Ok(Commitments {
|
let nonces = (0 .. generators.len())
|
||||||
nonces: (0 .. nonces.len())
|
.map(|i| NonceCommitments::read::<_, T>(reader, &generators[i]))
|
||||||
.map(|i| NonceCommitments::read::<_, T>(reader, &nonces[i], context))
|
.collect::<Result<Vec<NonceCommitments<C>>, _>>()?;
|
||||||
.collect::<Result<_, _>>()?,
|
|
||||||
})
|
let mut dleq_generators = vec![];
|
||||||
|
let mut dleq_nonces = vec![];
|
||||||
|
for (generators, nonce) in generators.iter().cloned().zip(&nonces) {
|
||||||
|
if generators.len() > 1 {
|
||||||
|
let binding = nonce.aggregation_factor::<T>(context);
|
||||||
|
let mut aggregated = vec![];
|
||||||
|
for commitments in &nonce.generators {
|
||||||
|
aggregated.push(commitments.0[0] + (commitments.0[1] * binding));
|
||||||
|
}
|
||||||
|
dleq_generators.push(generators);
|
||||||
|
dleq_nonces.push(aggregated);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let dleq = if !dleq_generators.is_empty() {
|
||||||
|
let dleq = MultiDLEqProof::read(reader, dleq_generators.len())?;
|
||||||
|
dleq
|
||||||
|
.verify(&mut dleq_transcript::<T>(context), &dleq_generators, &dleq_nonces)
|
||||||
|
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid DLEq proof"))?;
|
||||||
|
Some(dleq)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Commitments { nonces, dleq })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
pub(crate) fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||||
for nonce in &self.nonces {
|
for nonce in &self.nonces {
|
||||||
nonce.write(writer)?;
|
nonce.write(writer)?;
|
||||||
}
|
}
|
||||||
|
if let Some(dleq) = &self.dleq {
|
||||||
|
dleq.write(writer)?;
|
||||||
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -182,8 +182,8 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
|
||||||
commitments: Commitments {
|
commitments: Commitments {
|
||||||
nonces: vec![NonceCommitments {
|
nonces: vec![NonceCommitments {
|
||||||
generators: vec![GeneratorCommitments(these_commitments)],
|
generators: vec![GeneratorCommitments(these_commitments)],
|
||||||
dleqs: None,
|
|
||||||
}],
|
}],
|
||||||
|
dleq: None,
|
||||||
},
|
},
|
||||||
addendum: (),
|
addendum: (),
|
||||||
},
|
},
|
||||||
|
|
|
@ -18,9 +18,12 @@ multiple generators, FROST supports providing a nonce's commitments across
|
||||||
multiple generators. In order to ensure their correctness, an extended
|
multiple generators. In order to ensure their correctness, an extended
|
||||||
[CP93's Discrete Log Equality Proof](https://chaum.com/wp-content/uploads/2021/12/Wallet_Databases.pdf)
|
[CP93's Discrete Log Equality Proof](https://chaum.com/wp-content/uploads/2021/12/Wallet_Databases.pdf)
|
||||||
is used. The extension is simply to transcript `n` generators, instead of just
|
is used. The extension is simply to transcript `n` generators, instead of just
|
||||||
two, enabling proving for all of them at once. Since FROST nonces are binomial,
|
two, enabling proving for all of them at once.
|
||||||
two DLEq proofs are provided, one for each nonce component. In the future, a
|
|
||||||
modified proof proving for both components simultaneously may be used.
|
Since FROST nonces are binomial, every nonce would require two DLEq proofs. To
|
||||||
|
make this more efficient, we hash their commitments to obtain a binding factor,
|
||||||
|
before doing a single DLEq proof for `d + be`, similar to how FROST calculates
|
||||||
|
its nonces (as well as MuSig's key aggregation).
|
||||||
|
|
||||||
As some algorithms require multiple nonces, effectively including multiple
|
As some algorithms require multiple nonces, effectively including multiple
|
||||||
Schnorr signatures within one signature, the library also supports providing
|
Schnorr signatures within one signature, the library also supports providing
|
||||||
|
@ -29,12 +32,17 @@ multiplied by a per-participant binding factor to ensure the security of FROST.
|
||||||
When additional nonces are used, this is actually a per-nonce per-participant
|
When additional nonces are used, this is actually a per-nonce per-participant
|
||||||
binding factor.
|
binding factor.
|
||||||
|
|
||||||
|
When multiple nonces are used, with multiple generators, we use a single DLEq
|
||||||
|
proof for all nonces, merging their challenges. This provides a proof of `1 + n`
|
||||||
|
elements instead of `2n`.
|
||||||
|
|
||||||
Finally, to support additive offset signing schemes (accounts, stealth
|
Finally, to support additive offset signing schemes (accounts, stealth
|
||||||
addresses, randomization), it's possible to specify a scalar offset for keys.
|
addresses, randomization), it's possible to specify a scalar offset for keys.
|
||||||
The public key signed for is also offset by this value. During the signing
|
The public key signed for is also offset by this value. During the signing
|
||||||
process, the offset is explicitly transcripted. Then, the offset is divided by
|
process, the offset is explicitly transcripted. Then, the offset is divided by
|
||||||
`p`, the amount of participating signers, and each signer adds it to their
|
`p`, the amount of participating signers, and each signer adds it to their
|
||||||
post-interpolation key share.
|
post-interpolation key share. This maintains a leaderless protocol while still
|
||||||
|
being correct.
|
||||||
|
|
||||||
# Caching
|
# Caching
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue