mirror of
https://github.com/serai-dex/serai.git
synced 2025-01-03 17:40:34 +00:00
Support transcripts with 32-byte challenges in the DLEq crate
This commit is contained in:
parent
f8760ae021
commit
5942492519
2 changed files with 12 additions and 7 deletions
|
@ -34,22 +34,27 @@ impl<G: PrimeGroup> Generators<G> {
|
|||
}
|
||||
|
||||
pub(crate) fn challenge<T: Transcript, F: PrimeField>(transcript: &mut T) -> F {
|
||||
assert!(F::NUM_BITS <= 384);
|
||||
|
||||
// From here, there are three ways to get a scalar under the ff/group API
|
||||
// 1: Scalar::random(ChaCha12Rng::from_seed(self.transcript.rng_seed(b"challenge")))
|
||||
// 2: Grabbing a UInt library to perform reduction by the modulus, then determining endianess
|
||||
// and loading it in
|
||||
// 3: Iterating over each byte and manually doubling/adding. This is simplest
|
||||
let challenge_bytes = transcript.challenge(b"challenge");
|
||||
assert!(challenge_bytes.as_ref().len() == 64);
|
||||
|
||||
// Get a wide amount of bytes to safely reduce without bias
|
||||
let target = ((usize::try_from(F::NUM_BITS).unwrap() + 7) / 8) * 2;
|
||||
let mut challenge_bytes = transcript.challenge(b"challenge").as_ref().to_vec();
|
||||
while challenge_bytes.len() < target {
|
||||
// Secure given transcripts updating on challenge
|
||||
challenge_bytes.extend(transcript.challenge(b"challenge_extension").as_ref());
|
||||
}
|
||||
challenge_bytes.truncate(target);
|
||||
|
||||
let mut challenge = F::zero();
|
||||
for b in challenge_bytes.as_ref() {
|
||||
for b in challenge_bytes {
|
||||
for _ in 0 .. 8 {
|
||||
challenge = challenge.double();
|
||||
}
|
||||
challenge += F::from(u64::from(*b));
|
||||
challenge += F::from(u64::from(b));
|
||||
}
|
||||
challenge
|
||||
}
|
||||
|
|
|
@ -158,7 +158,7 @@ fn test_rejection_sampling() {
|
|||
// Either would work
|
||||
EfficientLinearDLEq::prove_without_bias(
|
||||
&mut OsRng,
|
||||
&mut RecommendedTranscript::new(b""),
|
||||
&mut transcript(),
|
||||
generators(),
|
||||
pow_2
|
||||
).is_none()
|
||||
|
|
Loading…
Reference in a new issue