2022-06-30 09:42:29 +00:00
|
|
|
use thiserror::Error;
|
2022-07-07 11:30:10 +00:00
|
|
|
|
2022-06-30 09:42:29 +00:00
|
|
|
use rand_core::{RngCore, CryptoRng};
|
2022-07-07 11:30:10 +00:00
|
|
|
use digest::Digest;
|
2022-06-30 09:42:29 +00:00
|
|
|
|
|
|
|
use transcript::Transcript;
|
|
|
|
|
2022-07-07 11:30:10 +00:00
|
|
|
use group::{ff::{Field, PrimeField, PrimeFieldBits}, prime::PrimeGroup};
|
|
|
|
use multiexp::BatchVerifier;
|
2022-06-30 09:42:29 +00:00
|
|
|
|
2022-07-02 06:45:26 +00:00
|
|
|
use crate::Generators;
|
2022-06-30 09:42:29 +00:00
|
|
|
|
|
|
|
pub mod scalar;
|
2022-07-07 11:30:10 +00:00
|
|
|
use scalar::{scalar_convert, mutual_scalar_from_bytes};
|
2022-06-30 09:42:29 +00:00
|
|
|
|
|
|
|
pub(crate) mod schnorr;
|
|
|
|
use schnorr::SchnorrPoK;
|
|
|
|
|
2022-07-07 11:30:10 +00:00
|
|
|
pub(crate) mod aos;
|
2022-07-05 23:10:30 +00:00
|
|
|
|
2022-07-07 11:30:10 +00:00
|
|
|
mod bits;
|
|
|
|
use bits::{BitSignature, Bits};
|
2022-07-05 23:10:30 +00:00
|
|
|
|
2022-06-30 09:42:29 +00:00
|
|
|
#[cfg(feature = "serialize")]
|
2022-07-07 11:30:10 +00:00
|
|
|
use std::io::{Read, Write};
|
2022-06-30 09:42:29 +00:00
|
|
|
|
|
|
|
#[cfg(feature = "serialize")]
|
|
|
|
pub(crate) fn read_point<R: Read, G: PrimeGroup>(r: &mut R) -> std::io::Result<G> {
|
|
|
|
let mut repr = G::Repr::default();
|
|
|
|
r.read_exact(repr.as_mut())?;
|
|
|
|
let point = G::from_bytes(&repr);
|
|
|
|
if point.is_none().into() {
|
|
|
|
Err(std::io::Error::new(std::io::ErrorKind::Other, "invalid point"))?;
|
|
|
|
}
|
|
|
|
Ok(point.unwrap())
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Error, PartialEq, Eq, Debug)]
|
|
|
|
pub enum DLEqError {
|
|
|
|
#[error("invalid proof of knowledge")]
|
|
|
|
InvalidProofOfKnowledge,
|
|
|
|
#[error("invalid proof length")]
|
|
|
|
InvalidProofLength,
|
2022-06-30 15:23:13 +00:00
|
|
|
#[error("invalid challenge")]
|
|
|
|
InvalidChallenge,
|
2022-06-30 09:42:29 +00:00
|
|
|
#[error("invalid proof")]
|
|
|
|
InvalidProof
|
|
|
|
}
|
|
|
|
|
|
|
|
// Debug would be such a dump of data this likely isn't helpful, but at least it's available to
|
|
|
|
// anyone who wants it
|
|
|
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
2022-07-05 23:10:30 +00:00
|
|
|
pub struct DLEqProof<
|
|
|
|
G0: PrimeGroup,
|
|
|
|
G1: PrimeGroup,
|
2022-07-07 11:30:10 +00:00
|
|
|
const SIGNATURE: u8,
|
|
|
|
const RING_LEN: usize,
|
|
|
|
const REMAINDER_RING_LEN: usize
|
2022-07-05 23:10:30 +00:00
|
|
|
> where G0::Scalar: PrimeFieldBits, G1::Scalar: PrimeFieldBits {
|
2022-07-07 11:30:10 +00:00
|
|
|
bits: Vec<Bits<G0, G1, SIGNATURE, RING_LEN>>,
|
|
|
|
remainder: Option<Bits<G0, G1, SIGNATURE, REMAINDER_RING_LEN>>,
|
2022-06-30 09:42:29 +00:00
|
|
|
poks: (SchnorrPoK<G0>, SchnorrPoK<G1>)
|
|
|
|
}
|
|
|
|
|
2022-07-07 12:26:59 +00:00
|
|
|
macro_rules! dleq {
|
2022-07-07 12:46:11 +00:00
|
|
|
($name: ident, $signature: expr, $remainder: literal) => {
|
2022-07-07 12:26:59 +00:00
|
|
|
pub type $name<G0, G1> = DLEqProof<
|
|
|
|
G0,
|
|
|
|
G1,
|
|
|
|
{ $signature.to_u8() },
|
|
|
|
{ $signature.ring_len() },
|
|
|
|
// There may not be a remainder, yet if there is one, it'll be just one bit
|
|
|
|
// A ring for one bit has a RING_LEN of 2
|
|
|
|
{ if $remainder { 2 } else { 0 } }
|
|
|
|
>;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Proves for 1-bit at a time with the signature form (e, s), as originally described in MRL-0010.
|
|
|
|
// Uses a merged challenge, unlike MRL-0010, for the ring signature, saving an element from each
|
|
|
|
// bit and removing a hash while slightly reducing challenge security. This security reduction is
|
|
|
|
// already applied to the scalar being proven for, a result of the requirement it's mutually valid
|
|
|
|
// over both scalar fields, hence its application here as well. This is mainly here as a point of
|
2022-07-07 12:46:11 +00:00
|
|
|
// reference for the following DLEq proofs, all which use merged challenges, and isn't performant
|
|
|
|
// in comparison to the others
|
2022-07-07 12:26:59 +00:00
|
|
|
dleq!(ClassicLinearDLEq, BitSignature::ClassicLinear, false);
|
|
|
|
|
|
|
|
// Proves for 2-bits at a time to save 3/7 elements of every other bit
|
2022-07-07 12:46:11 +00:00
|
|
|
// <9% smaller than CompromiseLinear, yet ~12% slower
|
2022-07-07 12:26:59 +00:00
|
|
|
dleq!(ConciseLinearDLEq, BitSignature::ConciseLinear, true);
|
|
|
|
|
|
|
|
// Uses AOS signatures of the form R, s, to enable the final step of the ring signature to be
|
|
|
|
// batch verified, at the cost of adding an additional element per bit
|
|
|
|
dleq!(EfficientLinearDLEq, BitSignature::EfficientLinear, false);
|
|
|
|
|
|
|
|
// Proves for 2-bits at a time while using the R, s form. This saves 3/7 elements of every other
|
|
|
|
// bit, while adding 1 element to every bit, and is more efficient than ConciseLinear yet less
|
|
|
|
// efficient than EfficientLinear due to having more ring signature steps which aren't batched
|
2022-07-07 12:46:11 +00:00
|
|
|
// >25% smaller than EfficientLinear and just 11% slower, making it the recommended option
|
2022-07-07 12:26:59 +00:00
|
|
|
dleq!(CompromiseLinearDLEq, BitSignature::CompromiseLinear, true);
|
2022-07-07 11:30:10 +00:00
|
|
|
|
2022-07-05 23:10:30 +00:00
|
|
|
impl<
|
|
|
|
G0: PrimeGroup,
|
|
|
|
G1: PrimeGroup,
|
2022-07-07 11:30:10 +00:00
|
|
|
const SIGNATURE: u8,
|
|
|
|
const RING_LEN: usize,
|
|
|
|
const REMAINDER_RING_LEN: usize
|
|
|
|
> DLEqProof<G0, G1, SIGNATURE, RING_LEN, REMAINDER_RING_LEN> where
|
|
|
|
G0::Scalar: PrimeFieldBits, G1::Scalar: PrimeFieldBits {
|
|
|
|
|
|
|
|
pub(crate) fn transcript<T: Transcript>(
|
2022-06-30 09:42:29 +00:00
|
|
|
transcript: &mut T,
|
|
|
|
generators: (Generators<G0>, Generators<G1>),
|
|
|
|
keys: (G0, G1)
|
|
|
|
) {
|
2022-07-07 11:30:10 +00:00
|
|
|
transcript.domain_separate(b"cross_group_dleq");
|
2022-06-30 09:42:29 +00:00
|
|
|
generators.0.transcript(transcript);
|
|
|
|
generators.1.transcript(transcript);
|
|
|
|
transcript.domain_separate(b"points");
|
|
|
|
transcript.append_message(b"point_0", keys.0.to_bytes().as_ref());
|
|
|
|
transcript.append_message(b"point_1", keys.1.to_bytes().as_ref());
|
|
|
|
}
|
|
|
|
|
2022-07-05 23:10:30 +00:00
|
|
|
pub(crate) fn blinding_key<R: RngCore + CryptoRng, F: PrimeField>(
|
2022-06-30 09:42:29 +00:00
|
|
|
rng: &mut R,
|
|
|
|
total: &mut F,
|
|
|
|
last: bool
|
|
|
|
) -> F {
|
|
|
|
let blinding_key = if last {
|
2022-07-05 09:18:12 +00:00
|
|
|
-*total
|
2022-06-30 09:42:29 +00:00
|
|
|
} else {
|
|
|
|
F::random(&mut *rng)
|
|
|
|
};
|
2022-07-05 09:18:12 +00:00
|
|
|
*total += blinding_key;
|
2022-06-30 09:42:29 +00:00
|
|
|
blinding_key
|
|
|
|
}
|
|
|
|
|
|
|
|
fn reconstruct_keys(&self) -> (G0, G1) {
|
2022-07-05 23:10:30 +00:00
|
|
|
let mut res = (
|
|
|
|
self.bits.iter().map(|bit| bit.commitments.0).sum::<G0>(),
|
|
|
|
self.bits.iter().map(|bit| bit.commitments.1).sum::<G1>()
|
2022-06-30 09:42:29 +00:00
|
|
|
);
|
|
|
|
|
2022-07-05 19:01:33 +00:00
|
|
|
if let Some(bit) = &self.remainder {
|
2022-07-05 23:10:30 +00:00
|
|
|
res.0 += bit.commitments.0;
|
|
|
|
res.1 += bit.commitments.1;
|
2022-06-30 09:42:29 +00:00
|
|
|
}
|
|
|
|
|
2022-07-05 23:10:30 +00:00
|
|
|
res
|
2022-06-30 09:42:29 +00:00
|
|
|
}
|
2022-07-07 11:30:10 +00:00
|
|
|
|
|
|
|
fn prove_internal<R: RngCore + CryptoRng, T: Clone + Transcript>(
|
|
|
|
rng: &mut R,
|
|
|
|
transcript: &mut T,
|
|
|
|
generators: (Generators<G0>, Generators<G1>),
|
|
|
|
f: (G0::Scalar, G1::Scalar)
|
|
|
|
) -> (Self, (G0::Scalar, G1::Scalar)) {
|
|
|
|
Self::transcript(
|
|
|
|
transcript,
|
|
|
|
generators,
|
|
|
|
((generators.0.primary * f.0), (generators.1.primary * f.1))
|
|
|
|
);
|
|
|
|
|
|
|
|
let poks = (
|
|
|
|
SchnorrPoK::<G0>::prove(rng, transcript, generators.0.primary, f.0),
|
|
|
|
SchnorrPoK::<G1>::prove(rng, transcript, generators.1.primary, f.1)
|
|
|
|
);
|
|
|
|
|
|
|
|
let mut blinding_key_total = (G0::Scalar::zero(), G1::Scalar::zero());
|
|
|
|
let mut blinding_key = |rng: &mut R, last| {
|
|
|
|
let blinding_key = (
|
|
|
|
Self::blinding_key(&mut *rng, &mut blinding_key_total.0, last),
|
|
|
|
Self::blinding_key(&mut *rng, &mut blinding_key_total.1, last)
|
|
|
|
);
|
|
|
|
if last {
|
|
|
|
debug_assert_eq!(blinding_key_total.0, G0::Scalar::zero());
|
|
|
|
debug_assert_eq!(blinding_key_total.1, G1::Scalar::zero());
|
|
|
|
}
|
|
|
|
blinding_key
|
|
|
|
};
|
|
|
|
|
|
|
|
let capacity = usize::try_from(G0::Scalar::CAPACITY.min(G1::Scalar::CAPACITY)).unwrap();
|
|
|
|
let bits_per_group = BitSignature::from(SIGNATURE).bits();
|
|
|
|
|
|
|
|
let mut pow_2 = (generators.0.primary, generators.1.primary);
|
|
|
|
|
|
|
|
let raw_bits = f.0.to_le_bits();
|
|
|
|
let mut bits = Vec::with_capacity(capacity);
|
|
|
|
let mut these_bits: u8 = 0;
|
|
|
|
for (i, bit) in raw_bits.iter().enumerate() {
|
|
|
|
if i == capacity {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
let bit = *bit as u8;
|
|
|
|
debug_assert_eq!(bit | 1, 1);
|
|
|
|
|
|
|
|
// Accumulate this bit
|
|
|
|
these_bits |= bit << (i % bits_per_group);
|
|
|
|
if (i % bits_per_group) == (bits_per_group - 1) {
|
|
|
|
let last = i == (capacity - 1);
|
|
|
|
let blinding_key = blinding_key(&mut *rng, last);
|
|
|
|
bits.push(
|
|
|
|
Bits::prove(
|
|
|
|
&mut *rng,
|
|
|
|
transcript,
|
|
|
|
generators,
|
|
|
|
i / bits_per_group,
|
|
|
|
&mut pow_2,
|
|
|
|
these_bits,
|
|
|
|
blinding_key
|
|
|
|
)
|
|
|
|
);
|
|
|
|
these_bits = 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
debug_assert_eq!(bits.len(), capacity / bits_per_group);
|
|
|
|
|
|
|
|
let mut remainder = None;
|
|
|
|
if capacity != ((capacity / bits_per_group) * bits_per_group) {
|
|
|
|
let blinding_key = blinding_key(&mut *rng, true);
|
|
|
|
remainder = Some(
|
|
|
|
Bits::prove(
|
|
|
|
&mut *rng,
|
|
|
|
transcript,
|
|
|
|
generators,
|
|
|
|
capacity / bits_per_group,
|
|
|
|
&mut pow_2,
|
|
|
|
these_bits,
|
|
|
|
blinding_key
|
|
|
|
)
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
let proof = DLEqProof { bits, remainder, poks };
|
|
|
|
debug_assert_eq!(
|
|
|
|
proof.reconstruct_keys(),
|
|
|
|
(generators.0.primary * f.0, generators.1.primary * f.1)
|
|
|
|
);
|
|
|
|
(proof, f)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Prove the cross-Group Discrete Log Equality for the points derived from the scalar created as
|
|
|
|
/// the output of the passed in Digest. Given the non-standard requirements to achieve
|
|
|
|
/// uniformity, needing to be < 2^x instead of less than a prime moduli, this is the simplest way
|
|
|
|
/// to safely and securely generate a Scalar, without risk of failure, nor bias
|
|
|
|
/// It also ensures a lack of determinable relation between keys, guaranteeing security in the
|
|
|
|
/// currently expected use case for this, atomic swaps, where each swap leaks the key. Knowing
|
|
|
|
/// the relationship between keys would allow breaking all swaps after just one
|
|
|
|
pub fn prove<R: RngCore + CryptoRng, T: Clone + Transcript, D: Digest>(
|
|
|
|
rng: &mut R,
|
|
|
|
transcript: &mut T,
|
|
|
|
generators: (Generators<G0>, Generators<G1>),
|
|
|
|
digest: D
|
|
|
|
) -> (Self, (G0::Scalar, G1::Scalar)) {
|
|
|
|
Self::prove_internal(
|
|
|
|
rng,
|
|
|
|
transcript,
|
|
|
|
generators,
|
|
|
|
mutual_scalar_from_bytes(digest.finalize().as_ref())
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Prove the cross-Group Discrete Log Equality for the points derived from the scalar passed in,
|
|
|
|
/// failing if it's not mutually valid. This allows for rejection sampling externally derived
|
|
|
|
/// scalars until they're safely usable, as needed
|
|
|
|
pub fn prove_without_bias<R: RngCore + CryptoRng, T: Clone + Transcript>(
|
|
|
|
rng: &mut R,
|
|
|
|
transcript: &mut T,
|
|
|
|
generators: (Generators<G0>, Generators<G1>),
|
|
|
|
f0: G0::Scalar
|
|
|
|
) -> Option<(Self, (G0::Scalar, G1::Scalar))> {
|
|
|
|
scalar_convert(f0).map(|f1| Self::prove_internal(rng, transcript, generators, (f0, f1)))
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Verify a cross-Group Discrete Log Equality statement, returning the points proven for
|
|
|
|
pub fn verify<R: RngCore + CryptoRng, T: Clone + Transcript>(
|
|
|
|
&self,
|
|
|
|
rng: &mut R,
|
|
|
|
transcript: &mut T,
|
|
|
|
generators: (Generators<G0>, Generators<G1>)
|
|
|
|
) -> Result<(G0, G1), DLEqError> {
|
|
|
|
let capacity = usize::try_from(
|
|
|
|
G0::Scalar::CAPACITY.min(G1::Scalar::CAPACITY)
|
|
|
|
).unwrap();
|
|
|
|
let bits_per_group = BitSignature::from(SIGNATURE).bits();
|
|
|
|
let has_remainder = (capacity % bits_per_group) != 0;
|
|
|
|
|
|
|
|
// These shouldn't be possible, as locally created and deserialized proofs should be properly
|
|
|
|
// formed in these regards, yet it doesn't hurt to check and would be problematic if true
|
|
|
|
if (self.bits.len() != (capacity / bits_per_group)) || (
|
|
|
|
(self.remainder.is_none() && has_remainder) || (self.remainder.is_some() && !has_remainder)
|
|
|
|
) {
|
|
|
|
return Err(DLEqError::InvalidProofLength);
|
|
|
|
}
|
|
|
|
|
|
|
|
let keys = self.reconstruct_keys();
|
|
|
|
Self::transcript(transcript, generators, keys);
|
|
|
|
|
|
|
|
let batch_capacity = match BitSignature::from(SIGNATURE) {
|
2022-07-07 12:26:59 +00:00
|
|
|
BitSignature::ClassicLinear => 3,
|
2022-07-07 11:30:10 +00:00
|
|
|
BitSignature::ConciseLinear => 3,
|
2022-07-07 12:26:59 +00:00
|
|
|
BitSignature::EfficientLinear => (self.bits.len() + 1) * 3,
|
|
|
|
BitSignature::CompromiseLinear => (self.bits.len() + 1) * 3
|
2022-07-07 11:30:10 +00:00
|
|
|
};
|
|
|
|
let mut batch = (BatchVerifier::new(batch_capacity), BatchVerifier::new(batch_capacity));
|
|
|
|
|
|
|
|
self.poks.0.verify(&mut *rng, transcript, generators.0.primary, keys.0, &mut batch.0);
|
|
|
|
self.poks.1.verify(&mut *rng, transcript, generators.1.primary, keys.1, &mut batch.1);
|
|
|
|
|
|
|
|
let mut pow_2 = (generators.0.primary, generators.1.primary);
|
|
|
|
for (i, bits) in self.bits.iter().enumerate() {
|
|
|
|
bits.verify(&mut *rng, transcript, generators, &mut batch, i, &mut pow_2)?;
|
|
|
|
}
|
|
|
|
if let Some(bit) = &self.remainder {
|
|
|
|
bit.verify(&mut *rng, transcript, generators, &mut batch, self.bits.len(), &mut pow_2)?;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!batch.0.verify_vartime()) || (!batch.1.verify_vartime()) {
|
|
|
|
Err(DLEqError::InvalidProof)?;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(keys)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(feature = "serialize")]
|
|
|
|
pub fn serialize<W: Write>(&self, w: &mut W) -> std::io::Result<()> {
|
|
|
|
for bit in &self.bits {
|
|
|
|
bit.serialize(w)?;
|
|
|
|
}
|
|
|
|
if let Some(bit) = &self.remainder {
|
|
|
|
bit.serialize(w)?;
|
|
|
|
}
|
|
|
|
self.poks.0.serialize(w)?;
|
|
|
|
self.poks.1.serialize(w)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(feature = "serialize")]
|
|
|
|
pub fn deserialize<R: Read>(r: &mut R) -> std::io::Result<Self> {
|
|
|
|
let capacity = usize::try_from(
|
|
|
|
G0::Scalar::CAPACITY.min(G1::Scalar::CAPACITY)
|
|
|
|
).unwrap();
|
|
|
|
let bits_per_group = BitSignature::from(SIGNATURE).bits();
|
|
|
|
|
|
|
|
let mut bits = Vec::with_capacity(capacity / bits_per_group);
|
|
|
|
for _ in 0 .. (capacity / bits_per_group) {
|
|
|
|
bits.push(Bits::deserialize(r)?);
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut remainder = None;
|
|
|
|
if (capacity % bits_per_group) != 0 {
|
|
|
|
remainder = Some(Bits::deserialize(r)?);
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(
|
|
|
|
DLEqProof {
|
|
|
|
bits,
|
|
|
|
remainder,
|
|
|
|
poks: (SchnorrPoK::deserialize(r)?, SchnorrPoK::deserialize(r)?)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
2022-06-30 09:42:29 +00:00
|
|
|
}
|