mirror of
https://github.com/serai-dex/serai.git
synced 2024-12-22 19:49:22 +00:00
Use a challenge from the FROST transcript as context in the DLEq proofs
This commit is contained in:
parent
ace7506172
commit
783a445a3e
3 changed files with 46 additions and 11 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -4631,8 +4631,8 @@ dependencies = [
|
|||
"hkdf",
|
||||
"minimal-ed448",
|
||||
"multiexp",
|
||||
"rand 0.8.5",
|
||||
"rand_chacha 0.3.1",
|
||||
"rand_core 0.6.4",
|
||||
"schnorr-signatures",
|
||||
"serde_json",
|
||||
"subtle",
|
||||
|
|
|
@ -27,8 +27,19 @@ use dleq::DLEqProof;
|
|||
|
||||
use crate::curve::Curve;
|
||||
|
||||
fn dleq_transcript<T: Transcript>() -> T {
|
||||
T::new(b"FROST_nonce_dleq")
|
||||
// Every participant proves for their commitments at the start of the protocol
|
||||
// These proofs are verified sequentially, requiring independent transcripts
|
||||
// In order to make these transcripts more robust, the FROST transcript (at time of preprocess) is
|
||||
// challenged in order to create a commitment to it, carried in each independent transcript
|
||||
// (effectively forking the original transcript)
|
||||
//
|
||||
// For FROST, as defined by the IETF, this will do nothing (and this transcript will never even be
|
||||
// constructed). For higher level protocols, the transcript may have contextual info these proofs
|
||||
// will then be bound to
|
||||
fn dleq_transcript<T: Transcript>(context: &[u8]) -> T {
|
||||
let mut transcript = T::new(b"FROST_commitments");
|
||||
transcript.append_message(b"context", context);
|
||||
transcript
|
||||
}
|
||||
|
||||
// Each nonce is actually a pair of random scalars, notated as d, e under the FROST paper
|
||||
|
@ -67,6 +78,7 @@ impl<C: Curve> NonceCommitments<C> {
|
|||
rng: &mut R,
|
||||
secret_share: &Zeroizing<C::F>,
|
||||
generators: &[C::G],
|
||||
context: &[u8],
|
||||
) -> (Nonce<C>, NonceCommitments<C>) {
|
||||
let nonce = Nonce::<C>([
|
||||
C::random_nonce(secret_share, &mut *rng),
|
||||
|
@ -87,8 +99,7 @@ impl<C: Curve> NonceCommitments<C> {
|
|||
// Uses an independent transcript as each signer must prove this with their commitments,
|
||||
// yet they're validated while processing everyone's data sequentially, by the global order
|
||||
// This avoids needing to clone and fork the transcript around
|
||||
// TODO: At least include a challenge from the existing transcript
|
||||
DLEqProof::prove(&mut *rng, &mut dleq_transcript::<T>(), generators, nonce)
|
||||
DLEqProof::prove(&mut *rng, &mut dleq_transcript::<T>(context), generators, nonce)
|
||||
};
|
||||
dleqs = Some([dleq(&nonce.0[0]), dleq(&nonce.0[1])]);
|
||||
}
|
||||
|
@ -99,6 +110,7 @@ impl<C: Curve> NonceCommitments<C> {
|
|||
fn read<R: Read, T: Transcript>(
|
||||
reader: &mut R,
|
||||
generators: &[C::G],
|
||||
context: &[u8],
|
||||
) -> io::Result<NonceCommitments<C>> {
|
||||
let commitments: Vec<GeneratorCommitments<C>> = (0 .. generators.len())
|
||||
.map(|_| GeneratorCommitments::read(reader))
|
||||
|
@ -110,7 +122,7 @@ impl<C: Curve> NonceCommitments<C> {
|
|||
let dleq = DLEqProof::deserialize(reader)?;
|
||||
dleq
|
||||
.verify(
|
||||
&mut dleq_transcript::<T>(),
|
||||
&mut dleq_transcript::<T>(context),
|
||||
generators,
|
||||
&commitments.iter().map(|commitments| commitments.0[i]).collect::<Vec<_>>(),
|
||||
)
|
||||
|
@ -146,12 +158,13 @@ impl<C: Curve> Commitments<C> {
|
|||
rng: &mut R,
|
||||
secret_share: &Zeroizing<C::F>,
|
||||
planned_nonces: &[Vec<C::G>],
|
||||
context: &[u8],
|
||||
) -> (Vec<Nonce<C>>, Commitments<C>) {
|
||||
let mut nonces = vec![];
|
||||
let mut commitments = vec![];
|
||||
for generators in planned_nonces {
|
||||
let (nonce, these_commitments) =
|
||||
NonceCommitments::new::<_, T>(&mut *rng, secret_share, generators);
|
||||
NonceCommitments::new::<_, T>(&mut *rng, secret_share, generators, context);
|
||||
nonces.push(nonce);
|
||||
commitments.push(these_commitments);
|
||||
}
|
||||
|
@ -183,10 +196,11 @@ impl<C: Curve> Commitments<C> {
|
|||
pub(crate) fn read<R: Read, T: Transcript>(
|
||||
reader: &mut R,
|
||||
nonces: &[Vec<C::G>],
|
||||
context: &[u8],
|
||||
) -> io::Result<Self> {
|
||||
Ok(Commitments {
|
||||
nonces: (0 .. nonces.len())
|
||||
.map(|i| NonceCommitments::read::<_, T>(reader, &nonces[i]))
|
||||
.map(|i| NonceCommitments::read::<_, T>(reader, &nonces[i], context))
|
||||
.collect::<Result<_, _>>()?,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -115,10 +115,13 @@ impl<C: Curve, A: Algorithm<C>> AlgorithmMachine<C, A> {
|
|||
let mut params = self.params;
|
||||
|
||||
let mut rng = ChaCha20Rng::from_seed(seed.0);
|
||||
// Get a challenge to the existing transcript for use when proving for the commitments
|
||||
let commitments_challenge = params.algorithm.transcript().challenge(b"commitments");
|
||||
let (nonces, commitments) = Commitments::new::<_, A::Transcript>(
|
||||
&mut rng,
|
||||
params.keys.secret_share(),
|
||||
¶ms.algorithm.nonces(),
|
||||
commitments_challenge.as_ref(),
|
||||
);
|
||||
let addendum = params.algorithm.preprocess_addendum(&mut rng, ¶ms.keys);
|
||||
|
||||
|
@ -128,22 +131,35 @@ impl<C: Curve, A: Algorithm<C>> AlgorithmMachine<C, A> {
|
|||
let mut blame_entropy = [0; 32];
|
||||
rng.fill_bytes(&mut blame_entropy);
|
||||
(
|
||||
AlgorithmSignMachine { params, seed, nonces, preprocess: preprocess.clone(), blame_entropy },
|
||||
AlgorithmSignMachine {
|
||||
params,
|
||||
seed,
|
||||
commitments_challenge,
|
||||
nonces,
|
||||
preprocess: preprocess.clone(),
|
||||
blame_entropy,
|
||||
},
|
||||
preprocess,
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "tests"))]
|
||||
pub(crate) fn unsafe_override_preprocess(
|
||||
self,
|
||||
mut self,
|
||||
nonces: Vec<Nonce<C>>,
|
||||
preprocess: Preprocess<C, A::Addendum>,
|
||||
) -> AlgorithmSignMachine<C, A> {
|
||||
AlgorithmSignMachine {
|
||||
commitments_challenge: self.params.algorithm.transcript().challenge(b"commitments"),
|
||||
|
||||
params: self.params,
|
||||
seed: Zeroizing::new(CachedPreprocess([0; 32])),
|
||||
|
||||
nonces,
|
||||
preprocess,
|
||||
// Uses 0s since this is just used to protect against a malicious participant from
|
||||
// deliberately increasing the amount of time needed to identify them (and is accordingly
|
||||
// not necessary to function)
|
||||
blame_entropy: [0; 32],
|
||||
}
|
||||
}
|
||||
|
@ -221,6 +237,7 @@ pub struct AlgorithmSignMachine<C: Curve, A: Algorithm<C>> {
|
|||
params: Params<C, A>,
|
||||
seed: Zeroizing<CachedPreprocess>,
|
||||
|
||||
commitments_challenge: <A::Transcript as Transcript>::Challenge,
|
||||
pub(crate) nonces: Vec<Nonce<C>>,
|
||||
#[zeroize(skip)]
|
||||
pub(crate) preprocess: Preprocess<C, A::Addendum>,
|
||||
|
@ -249,7 +266,11 @@ impl<C: Curve, A: Algorithm<C>> SignMachine<A::Signature> for AlgorithmSignMachi
|
|||
|
||||
fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess> {
|
||||
Ok(Preprocess {
|
||||
commitments: Commitments::read::<_, A::Transcript>(reader, &self.params.algorithm.nonces())?,
|
||||
commitments: Commitments::read::<_, A::Transcript>(
|
||||
reader,
|
||||
&self.params.algorithm.nonces(),
|
||||
self.commitments_challenge.as_ref(),
|
||||
)?,
|
||||
addendum: self.params.algorithm.read_addendum(reader)?,
|
||||
})
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue