Implement a DLEq library

While Serai only needs the simple DLEq which was already present under 
monero, this migrates the implementation of the cross-group DLEq I 
maintain into Serai. This was to have full access to the ecosystem of 
libraries built under Serai while also ensuring support for it.

The cross_group curve, which is extremely experimental, is feature 
flagged off. So is the built in serialization functionality, as this 
should be possible to make nostd once const generics are full featured, 
yet the implemented serialization adds the additional barrier of 
std::io.
This commit is contained in:
Luke Parker 2022-06-30 05:42:29 -04:00
parent 2e168204f0
commit 5d115f1e1c
No known key found for this signature in database
GPG key ID: F9F1386DB1E119B6
15 changed files with 854 additions and 111 deletions

View file

@ -6,6 +6,7 @@ members = [
"crypto/dalek-ff-group",
"crypto/multiexp",
"crypto/dleq",
"crypto/frost",
"coins/monero",

View file

@ -26,6 +26,7 @@ group = { version = "0.12", optional = true }
dalek-ff-group = { path = "../../crypto/dalek-ff-group", optional = true }
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", features = ["recommended"], optional = true }
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["ed25519"], optional = true }
dleq = { path = "../../crypto/dleq", features = ["serialize"], optional = true }
base58-monero = "1"
monero = "0.16"
@ -38,7 +39,7 @@ reqwest = { version = "0.11", features = ["json"] }
[features]
experimental = []
multisig = ["rand_chacha", "blake2", "group", "dalek-ff-group", "transcript", "frost"]
multisig = ["rand_chacha", "blake2", "group", "dalek-ff-group", "transcript", "frost", "dleq"]
[dev-dependencies]
sha2 = "0.10"

View file

@ -1,20 +1,15 @@
use core::convert::TryInto;
use std::{convert::TryInto, io::Cursor};
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use group::GroupEncoding;
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE as DTable,
scalar::Scalar as DScalar,
edwards::EdwardsPoint as DPoint
};
use group::{Group, GroupEncoding};
use transcript::{Transcript, RecommendedTranscript};
use transcript::RecommendedTranscript;
use dalek_ff_group as dfg;
use crate::random_scalar;
use dleq::{Generators, DLEqProof};
#[derive(Clone, Error, Debug)]
pub enum MultisigError {
@ -26,105 +21,34 @@ pub enum MultisigError {
InvalidKeyImage(u16)
}
// Used to prove legitimacy of key images and nonces which both involve other basepoints
#[derive(Clone)]
pub struct DLEqProof {
s: DScalar,
c: DScalar
}
#[allow(non_snake_case)]
impl DLEqProof {
fn challenge(H: &DPoint, xG: &DPoint, xH: &DPoint, rG: &DPoint, rH: &DPoint) -> DScalar {
pub(crate) fn write_dleq<R: RngCore + CryptoRng>(
rng: &mut R,
H: EdwardsPoint,
x: Scalar
) -> Vec<u8> {
let mut res = Vec::with_capacity(64);
DLEqProof::prove(
rng,
// Doesn't take in a larger transcript object due to the usage of this
// Every prover would immediately write their own DLEq proof, when they can only do so in
// the proper order if they want to reach consensus
// It'd be a poor API to have CLSAG define a new transcript solely to pass here, just to try to
// merge later in some form, when it should instead just merge xH (as it does)
let mut transcript = RecommendedTranscript::new(b"DLEq Proof");
// Bit redundant, keeps things consistent
transcript.domain_separate(b"DLEq");
// Doesn't include G which is constant, does include H which isn't, even though H manipulation
// shouldn't be possible in practice as it's independently calculated as a product of known data
transcript.append_message(b"H", &H.compress().to_bytes());
transcript.append_message(b"xG", &xG.compress().to_bytes());
transcript.append_message(b"xH", &xH.compress().to_bytes());
transcript.append_message(b"rG", &rG.compress().to_bytes());
transcript.append_message(b"rH", &rH.compress().to_bytes());
DScalar::from_bytes_mod_order_wide(
&transcript.challenge(b"challenge").try_into().expect("Blake2b512 output wasn't 64 bytes")
)
}
pub fn prove<R: RngCore + CryptoRng>(
rng: &mut R,
H: &DPoint,
secret: &DScalar
) -> DLEqProof {
let r = random_scalar(rng);
let rG = &DTable * &r;
let rH = r * H;
// We can frequently (always?) save a scalar mul if we accept xH as an arg, yet it opens room
// for incorrect data to be passed, and therefore faults, making it not worth having
// We could also return xH but... it's really micro-optimizing
let c = DLEqProof::challenge(H, &(secret * &DTable), &(secret * H), &rG, &rH);
let s = r + (c * secret);
DLEqProof { s, c }
}
pub fn verify(
&self,
H: &DPoint,
l: u16,
xG: &DPoint,
xH: &DPoint
) -> Result<(), MultisigError> {
let s = self.s;
let c = self.c;
let rG = (&s * &DTable) - (c * xG);
let rH = (s * H) - (c * xH);
if c != DLEqProof::challenge(H, &xG, &xH, &rG, &rH) {
Err(MultisigError::InvalidDLEqProof(l))?;
}
Ok(())
}
pub fn serialize(
&self
) -> Vec<u8> {
let mut res = Vec::with_capacity(64);
res.extend(self.s.to_bytes());
res.extend(self.c.to_bytes());
res
}
pub fn deserialize(
serialized: &[u8]
) -> Option<DLEqProof> {
if serialized.len() != 64 {
return None;
}
DScalar::from_canonical_bytes(serialized[0 .. 32].try_into().unwrap()).and_then(
|s| DScalar::from_canonical_bytes(serialized[32 .. 64].try_into().unwrap()).and_then(
|c| Some(DLEqProof { s, c })
)
)
}
&mut RecommendedTranscript::new(b"DLEq Proof"),
Generators::new(dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)),
dfg::Scalar(x)
).serialize(&mut res).unwrap();
res
}
#[allow(non_snake_case)]
pub(crate) fn read_dleq(
serialized: &[u8],
start: usize,
H: &DPoint,
H: EdwardsPoint,
l: u16,
xG: &DPoint
xG: dfg::EdwardsPoint
) -> Result<dfg::EdwardsPoint, MultisigError> {
if serialized.len() < start + 96 {
Err(MultisigError::InvalidDLEqProof(l))?;
@ -132,17 +56,21 @@ pub(crate) fn read_dleq(
let bytes = (&serialized[(start + 0) .. (start + 32)]).try_into().unwrap();
// dfg ensures the point is torsion free
let other = Option::<dfg::EdwardsPoint>::from(
let xH = Option::<dfg::EdwardsPoint>::from(
dfg::EdwardsPoint::from_bytes(&bytes)).ok_or(MultisigError::InvalidDLEqProof(l)
)?;
// Ensure this is a canonical point
if other.to_bytes() != bytes {
if xH.to_bytes() != bytes {
Err(MultisigError::InvalidDLEqProof(l))?;
}
DLEqProof::deserialize(&serialized[(start + 32) .. (start + 96)])
.ok_or(MultisigError::InvalidDLEqProof(l))?
.verify(H, l, xG, &other).map_err(|_| MultisigError::InvalidDLEqProof(l))?;
let proof = DLEqProof::<dfg::EdwardsPoint>::deserialize(
&mut Cursor::new(&serialized[(start + 32) .. (start + 96)])
).map_err(|_| MultisigError::InvalidDLEqProof(l))?;
Ok(other)
let mut transcript = RecommendedTranscript::new(b"DLEq Proof");
proof.verify(&mut transcript, Generators::new(dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)), (xG, xH))
.map_err(|_| MultisigError::InvalidDLEqProof(l))?;
Ok(xH)
}

View file

@ -19,7 +19,7 @@ use dalek_ff_group as dfg;
use crate::{
hash_to_point,
frost::{MultisigError, DLEqProof, read_dleq},
frost::{MultisigError, write_dleq, read_dleq},
ringct::clsag::{ClsagInput, Clsag}
};
@ -133,12 +133,12 @@ impl Algorithm<Ed25519> for ClsagMultisig {
let mut serialized = Vec::with_capacity(ClsagMultisig::serialized_len());
serialized.extend((view.secret_share().0 * self.H).compress().to_bytes());
serialized.extend(DLEqProof::prove(rng, &self.H, &view.secret_share().0).serialize());
serialized.extend(write_dleq(rng, self.H, view.secret_share().0));
serialized.extend((nonces[0].0 * self.H).compress().to_bytes());
serialized.extend(&DLEqProof::prove(rng, &self.H, &nonces[0].0).serialize());
serialized.extend(write_dleq(rng, self.H, nonces[0].0));
serialized.extend((nonces[1].0 * self.H).compress().to_bytes());
serialized.extend(&DLEqProof::prove(rng, &self.H, &nonces[1].0).serialize());
serialized.extend(write_dleq(rng, self.H, nonces[1].0));
serialized
}
@ -170,18 +170,18 @@ impl Algorithm<Ed25519> for ClsagMultisig {
self.image += read_dleq(
serialized,
cursor,
&self.H,
self.H,
l,
&view.verification_share(l).0
view.verification_share(l)
).map_err(|_| FrostError::InvalidCommitment(l))?.0;
cursor += 96;
self.transcript.append_message(b"commitment_D_H", &serialized[cursor .. (cursor + 32)]);
self.AH.0 += read_dleq(serialized, cursor, &self.H, l, &commitments[0]).map_err(|_| FrostError::InvalidCommitment(l))?;
self.AH.0 += read_dleq(serialized, cursor, self.H, l, commitments[0]).map_err(|_| FrostError::InvalidCommitment(l))?;
cursor += 96;
self.transcript.append_message(b"commitment_E_H", &serialized[cursor .. (cursor + 32)]);
self.AH.1 += read_dleq(serialized, cursor, &self.H, l, &commitments[1]).map_err(|_| FrostError::InvalidCommitment(l))?;
self.AH.1 += read_dleq(serialized, cursor, self.H, l, commitments[1]).map_err(|_| FrostError::InvalidCommitment(l))?;
Ok(())
}

29
crypto/dleq/Cargo.toml Normal file
View file

@ -0,0 +1,29 @@
[package]
name = "dleq"
version = "0.1.0"
description = "Implementation of single and cross-curve Discrete Log Equality proofs"
license = "MIT"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
[dependencies]
thiserror = "1"
rand_core = "0.6"
ff = "0.12"
group = "0.12"
transcript = { package = "flexible-transcript", path = "../transcript", version = "0.1" }
[dev-dependencies]
hex-literal = "0.3"
k256 = { version = "0.11", features = ["arithmetic", "bits"] }
dalek-ff-group = { path = "../dalek-ff-group" }
transcript = { package = "flexible-transcript", path = "../transcript", features = ["recommended"] }
[features]
serialize = []
cross_group = []
secure_capacity_difference = []
default = ["secure_capacity_difference"]

21
crypto/dleq/LICENSE Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020-2022 Luke Parker, Lee Bousfield
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

10
crypto/dleq/README.md Normal file
View file

@ -0,0 +1,10 @@
# Discrete Log Equality
Implementation of discrete log equality both within a group and across groups,
the latter being extremely experimental, for curves implementing the ff/group
APIs. This library has not undergone auditing.
The cross-group DLEq is the one described in
https://web.getmonero.org/resources/research-lab/pubs/MRL-0010.pdf, augmented
with a pair of Schnorr Proof of Knowledges in order to correct for a mistake
present in the paper.

View file

@ -0,0 +1,324 @@
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use transcript::Transcript;
use group::{ff::{Field, PrimeField, PrimeFieldBits}, prime::PrimeGroup};
use crate::{Generators, challenge};
pub mod scalar;
use scalar::scalar_normalize;
pub(crate) mod schnorr;
use schnorr::SchnorrPoK;
#[cfg(feature = "serialize")]
use std::io::{Read, Write};
#[cfg(feature = "serialize")]
use crate::read_scalar;
#[cfg(feature = "serialize")]
pub(crate) fn read_point<R: Read, G: PrimeGroup>(r: &mut R) -> std::io::Result<G> {
let mut repr = G::Repr::default();
r.read_exact(repr.as_mut())?;
let point = G::from_bytes(&repr);
if point.is_none().into() {
Err(std::io::Error::new(std::io::ErrorKind::Other, "invalid point"))?;
}
Ok(point.unwrap())
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Bit<G0: PrimeGroup, G1: PrimeGroup> {
commitments: (G0, G1),
e: (G0::Scalar, G1::Scalar),
s: [(G0::Scalar, G1::Scalar); 2]
}
impl<G0: PrimeGroup, G1: PrimeGroup> Bit<G0, G1> {
#[cfg(feature = "serialize")]
pub fn serialize<W: Write>(&self, w: &mut W) -> std::io::Result<()> {
w.write_all(self.commitments.0.to_bytes().as_ref())?;
w.write_all(self.commitments.1.to_bytes().as_ref())?;
w.write_all(self.e.0.to_repr().as_ref())?;
w.write_all(self.e.1.to_repr().as_ref())?;
for i in 0 .. 2 {
w.write_all(self.s[i].0.to_repr().as_ref())?;
w.write_all(self.s[i].1.to_repr().as_ref())?;
}
Ok(())
}
#[cfg(feature = "serialize")]
pub fn deserialize<R: Read>(r: &mut R) -> std::io::Result<Bit<G0, G1>> {
Ok(
Bit {
commitments: (read_point(r)?, read_point(r)?),
e: (read_scalar(r)?, read_scalar(r)?),
s: [
(read_scalar(r)?, read_scalar(r)?),
(read_scalar(r)?, read_scalar(r)?)
]
}
)
}
}
#[derive(Error, PartialEq, Eq, Debug)]
pub enum DLEqError {
#[error("invalid proof of knowledge")]
InvalidProofOfKnowledge,
#[error("invalid proof length")]
InvalidProofLength,
#[error("invalid proof")]
InvalidProof
}
// Debug would be such a dump of data this likely isn't helpful, but at least it's available to
// anyone who wants it
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct DLEqProof<G0: PrimeGroup, G1: PrimeGroup> {
bits: Vec<Bit<G0, G1>>,
poks: (SchnorrPoK<G0>, SchnorrPoK<G1>)
}
impl<G0: PrimeGroup, G1: PrimeGroup> DLEqProof<G0, G1> {
fn initialize_transcript<T: Transcript>(
transcript: &mut T,
generators: (Generators<G0>, Generators<G1>),
keys: (G0, G1)
) {
generators.0.transcript(transcript);
generators.1.transcript(transcript);
transcript.domain_separate(b"points");
transcript.append_message(b"point_0", keys.0.to_bytes().as_ref());
transcript.append_message(b"point_1", keys.1.to_bytes().as_ref());
}
fn blinding_key<R: RngCore + CryptoRng, F: PrimeField>(
rng: &mut R,
total: &mut F,
pow_2: &mut F,
last: bool
) -> F {
let blinding_key = if last {
-*total * pow_2.invert().unwrap()
} else {
F::random(&mut *rng)
};
*total += blinding_key * *pow_2;
*pow_2 = pow_2.double();
blinding_key
}
#[allow(non_snake_case)]
fn nonces<T: Transcript>(mut transcript: T, nonces: (G0, G1)) -> (G0::Scalar, G1::Scalar) {
transcript.append_message(b"nonce_0", nonces.0.to_bytes().as_ref());
transcript.append_message(b"nonce_1", nonces.1.to_bytes().as_ref());
(challenge(&mut transcript, b"challenge_G"), challenge(&mut transcript, b"challenge_H"))
}
#[allow(non_snake_case)]
fn R_nonces<T: Transcript>(
transcript: T,
generators: (Generators<G0>, Generators<G1>),
s: (G0::Scalar, G1::Scalar),
A: (G0, G1),
e: (G0::Scalar, G1::Scalar)
) -> (G0::Scalar, G1::Scalar) {
Self::nonces(
transcript,
(((generators.0.alt * s.0) - (A.0 * e.0)), ((generators.1.alt * s.1) - (A.1 * e.1)))
)
}
// TODO: Use multiexp here after https://github.com/serai-dex/serai/issues/17
fn reconstruct_key<G: PrimeGroup>(commitments: impl Iterator<Item = G>) -> G {
let mut pow_2 = G::Scalar::one();
commitments.fold(G::identity(), |key, commitment| {
let res = key + (commitment * pow_2);
pow_2 = pow_2.double();
res
})
}
fn reconstruct_keys(&self) -> (G0, G1) {
(
Self::reconstruct_key(self.bits.iter().map(|bit| bit.commitments.0)),
Self::reconstruct_key(self.bits.iter().map(|bit| bit.commitments.1))
)
}
fn transcript_bit<T: Transcript>(transcript: &mut T, i: usize, commitments: (G0, G1)) {
if i == 0 {
transcript.domain_separate(b"cross_group_dleq");
}
transcript.append_message(b"bit", &u16::try_from(i).unwrap().to_le_bytes());
transcript.append_message(b"commitment_0", commitments.0.to_bytes().as_ref());
transcript.append_message(b"commitment_1", commitments.1.to_bytes().as_ref());
}
/// Prove the cross-Group Discrete Log Equality for the points derived from the provided Scalar.
/// Since DLEq is proven for the same Scalar in both fields, and the provided Scalar may not be
/// valid in the other Scalar field, the Scalar is normalized as needed and the normalized forms
/// are returned. These are the actually equal discrete logarithms. The passed in Scalar is
/// solely to enable various forms of Scalar generation, such as deterministic schemes
pub fn prove<R: RngCore + CryptoRng, T: Clone + Transcript>(
rng: &mut R,
transcript: &mut T,
generators: (Generators<G0>, Generators<G1>),
f: G0::Scalar
) -> (
Self,
(G0::Scalar, G1::Scalar)
) where G0::Scalar: PrimeFieldBits, G1::Scalar: PrimeFieldBits {
// At least one bit will be dropped from either field element, making it irrelevant which one
// we get a random element in
let f = scalar_normalize::<_, G1::Scalar>(f);
Self::initialize_transcript(
transcript,
generators,
((generators.0.primary * f.0), (generators.1.primary * f.1))
);
let poks = (
SchnorrPoK::<G0>::prove(rng, transcript, generators.0.primary, f.0),
SchnorrPoK::<G1>::prove(rng, transcript, generators.1.primary, f.1)
);
let mut blinding_key_total = (G0::Scalar::zero(), G1::Scalar::zero());
let mut pow_2 = (G0::Scalar::one(), G1::Scalar::one());
let raw_bits = f.0.to_le_bits();
let capacity = usize::try_from(G0::Scalar::CAPACITY.min(G1::Scalar::CAPACITY)).unwrap();
let mut bits = Vec::with_capacity(capacity);
for (i, bit) in raw_bits.iter().enumerate() {
let last = i == (capacity - 1);
let blinding_key = (
Self::blinding_key(&mut *rng, &mut blinding_key_total.0, &mut pow_2.0, last),
Self::blinding_key(&mut *rng, &mut blinding_key_total.1, &mut pow_2.1, last)
);
if last {
debug_assert_eq!(blinding_key_total.0, G0::Scalar::zero());
debug_assert_eq!(blinding_key_total.1, G1::Scalar::zero());
}
let mut commitments = (
(generators.0.alt * blinding_key.0),
(generators.1.alt * blinding_key.1)
);
// TODO: Not constant time
if *bit {
commitments.0 += generators.0.primary;
commitments.1 += generators.1.primary;
}
Self::transcript_bit(transcript, i, commitments);
let nonces = (G0::Scalar::random(&mut *rng), G1::Scalar::random(&mut *rng));
let e_0 = Self::nonces(
transcript.clone(),
((generators.0.alt * nonces.0), (generators.1.alt * nonces.1))
);
let s_0 = (G0::Scalar::random(&mut *rng), G1::Scalar::random(&mut *rng));
let e_1 = Self::R_nonces(
transcript.clone(),
generators,
(s_0.0, s_0.1),
if *bit {
commitments
} else {
((commitments.0 - generators.0.primary), (commitments.1 - generators.1.primary))
},
e_0
);
let s_1 = (nonces.0 + (e_1.0 * blinding_key.0), nonces.1 + (e_1.1 * blinding_key.1));
bits.push(
if *bit {
Bit { commitments, e: e_0, s: [s_1, s_0] }
} else {
Bit { commitments, e: e_1, s: [s_0, s_1] }
}
);
if last {
break;
}
}
let proof = DLEqProof { bits, poks };
debug_assert_eq!(
proof.reconstruct_keys(),
(generators.0.primary * f.0, generators.1.primary * f.1)
);
(proof, f)
}
/// Verify a cross-Group Discrete Log Equality statement, returning the points proven for
pub fn verify<T: Clone + Transcript>(
&self,
transcript: &mut T,
generators: (Generators<G0>, Generators<G1>)
) -> Result<(G0, G1), DLEqError> where G0::Scalar: PrimeFieldBits, G1::Scalar: PrimeFieldBits {
let capacity = G0::Scalar::CAPACITY.min(G1::Scalar::CAPACITY);
if self.bits.len() != capacity.try_into().unwrap() {
return Err(DLEqError::InvalidProofLength);
}
let keys = self.reconstruct_keys();
Self::initialize_transcript(transcript, generators, keys);
if !(
self.poks.0.verify(transcript, generators.0.primary, keys.0) &&
self.poks.1.verify(transcript, generators.1.primary, keys.1)
) {
Err(DLEqError::InvalidProofOfKnowledge)?;
}
for (i, bit) in self.bits.iter().enumerate() {
Self::transcript_bit(transcript, i, bit.commitments);
if bit.e != Self::R_nonces(
transcript.clone(),
generators,
bit.s[0],
(
bit.commitments.0 - generators.0.primary,
bit.commitments.1 - generators.1.primary
),
Self::R_nonces(
transcript.clone(),
generators,
bit.s[1],
bit.commitments,
bit.e
)
) {
return Err(DLEqError::InvalidProof);
}
}
Ok(keys)
}
#[cfg(feature = "serialize")]
pub fn serialize<W: Write>(&self, w: &mut W) -> std::io::Result<()> {
for bit in &self.bits {
bit.serialize(w)?;
}
self.poks.0.serialize(w)?;
self.poks.1.serialize(w)
}
#[cfg(feature = "serialize")]
pub fn deserialize<R: Read>(r: &mut R) -> std::io::Result<DLEqProof<G0, G1>> {
let capacity = G0::Scalar::CAPACITY.min(G1::Scalar::CAPACITY);
let mut bits = Vec::with_capacity(capacity.try_into().unwrap());
for _ in 0 .. capacity {
bits.push(Bit::deserialize(r)?);
}
Ok(DLEqProof { bits, poks: (SchnorrPoK::deserialize(r)?, SchnorrPoK::deserialize(r)?) })
}
}

View file

@ -0,0 +1,34 @@
use ff::PrimeFieldBits;
/// Convert a uniform scalar into one usable on both fields, clearing the top bits as needed
pub fn scalar_normalize<F0: PrimeFieldBits, F1: PrimeFieldBits>(scalar: F0) -> (F0, F1) {
let mutual_capacity = F0::CAPACITY.min(F1::CAPACITY);
// The security of a mutual key is the security of the lower field. Accordingly, this bans a
// difference of more than 4 bits
#[cfg(feature = "secure_capacity_difference")]
assert!((F0::CAPACITY.max(F1::CAPACITY) - mutual_capacity) < 4);
let mut res1 = F0::zero();
let mut res2 = F1::zero();
// Uses the bit view API to ensure a consistent endianess
let mut bits = scalar.to_le_bits();
// Convert it to big endian
bits.reverse();
for bit in bits.iter().skip(bits.len() - usize::try_from(mutual_capacity).unwrap()) {
res1 = res1.double();
res2 = res2.double();
if *bit {
res1 += F0::one();
res2 += F1::one();
}
}
(res1, res2)
}
/// Helper to convert a scalar between fields. Returns None if the scalar isn't mutually valid
pub fn scalar_convert<F0: PrimeFieldBits, F1: PrimeFieldBits>(scalar: F0) -> Option<F1> {
let (valid, converted) = scalar_normalize(scalar);
Some(converted).filter(|_| scalar == valid)
}

View file

@ -0,0 +1,71 @@
use rand_core::{RngCore, CryptoRng};
use transcript::Transcript;
use group::{ff::Field, prime::PrimeGroup};
use crate::challenge;
#[cfg(feature = "serialize")]
use std::io::{Read, Write};
#[cfg(feature = "serialize")]
use ff::PrimeField;
#[cfg(feature = "serialize")]
use crate::{read_scalar, cross_group::read_point};
#[allow(non_snake_case)]
#[derive(Clone, PartialEq, Eq, Debug)]
pub(crate) struct SchnorrPoK<G: PrimeGroup> {
R: G,
s: G::Scalar
}
impl<G: PrimeGroup> SchnorrPoK<G> {
// Not hram due to the lack of m
#[allow(non_snake_case)]
fn hra<T: Transcript>(transcript: &mut T, generator: G, R: G, A: G) -> G::Scalar {
transcript.domain_separate(b"schnorr_proof_of_knowledge");
transcript.append_message(b"generator", generator.to_bytes().as_ref());
transcript.append_message(b"nonce", R.to_bytes().as_ref());
transcript.append_message(b"public_key", A.to_bytes().as_ref());
challenge(transcript, b"challenge")
}
pub(crate) fn prove<R: RngCore + CryptoRng, T: Transcript>(
rng: &mut R,
transcript: &mut T,
generator: G,
private_key: G::Scalar
) -> SchnorrPoK<G> {
let nonce = G::Scalar::random(rng);
#[allow(non_snake_case)]
let R = generator * nonce;
SchnorrPoK {
R,
s: nonce + (private_key * SchnorrPoK::hra(transcript, generator, R, generator * private_key))
}
}
#[must_use]
pub(crate) fn verify<T: Transcript>(
&self,
transcript: &mut T,
generator: G,
public_key: G
) -> bool {
(generator * self.s) == (
self.R + (public_key * Self::hra(transcript, generator, self.R, public_key))
)
}
#[cfg(feature = "serialize")]
pub fn serialize<W: Write>(&self, w: &mut W) -> std::io::Result<()> {
w.write_all(self.R.to_bytes().as_ref())?;
w.write_all(self.s.to_repr().as_ref())
}
#[cfg(feature = "serialize")]
pub fn deserialize<R: Read>(r: &mut R) -> std::io::Result<SchnorrPoK<G>> {
Ok(SchnorrPoK { R: read_point(r)?, s: read_scalar(r)? })
}
}

149
crypto/dleq/src/lib.rs Normal file
View file

@ -0,0 +1,149 @@
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use transcript::Transcript;
use ff::{Field, PrimeField};
use group::prime::PrimeGroup;
#[cfg(feature = "serialize")]
use std::io::{self, ErrorKind, Error, Read, Write};
#[cfg(feature = "cross_group")]
pub mod cross_group;
#[cfg(test)]
mod tests;
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct Generators<G: PrimeGroup> {
primary: G,
alt: G
}
impl<G: PrimeGroup> Generators<G> {
pub fn new(primary: G, alt: G) -> Generators<G> {
Generators { primary, alt }
}
fn transcript<T: Transcript>(&self, transcript: &mut T) {
transcript.domain_separate(b"generators");
transcript.append_message(b"primary", self.primary.to_bytes().as_ref());
transcript.append_message(b"alternate", self.alt.to_bytes().as_ref());
}
}
pub(crate) fn challenge<T: Transcript, F: PrimeField>(
transcript: &mut T,
label: &'static [u8]
) -> F {
assert!(F::NUM_BITS <= 384);
// From here, there are three ways to get a scalar under the ff/group API
// 1: Scalar::random(ChaCha12Rng::from_seed(self.transcript.rng_seed(b"challenge")))
// 2: Grabbing a UInt library to perform reduction by the modulus, then determining endianess
// and loading it in
// 3: Iterating over each byte and manually doubling/adding. This is simplest
let challenge_bytes = transcript.challenge(label);
assert!(challenge_bytes.as_ref().len() == 64);
let mut challenge = F::zero();
for b in challenge_bytes.as_ref() {
for _ in 0 .. 8 {
challenge = challenge.double();
}
challenge += F::from(u64::from(*b));
}
challenge
}
#[cfg(feature = "serialize")]
fn read_scalar<R: Read, F: PrimeField>(r: &mut R) -> io::Result<F> {
let mut repr = F::Repr::default();
r.read_exact(repr.as_mut())?;
let scalar = F::from_repr(repr);
if scalar.is_none().into() {
Err(Error::new(ErrorKind::Other, "invalid scalar"))?;
}
Ok(scalar.unwrap())
}
#[derive(Error, Debug)]
pub enum DLEqError {
#[error("invalid proof")]
InvalidProof
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct DLEqProof<G: PrimeGroup> {
c: G::Scalar,
s: G::Scalar
}
#[allow(non_snake_case)]
impl<G: PrimeGroup> DLEqProof<G> {
fn challenge<T: Transcript>(
transcript: &mut T,
generators: Generators<G>,
nonces: (G, G),
points: (G, G)
) -> G::Scalar {
generators.transcript(transcript);
transcript.domain_separate(b"dleq");
transcript.append_message(b"nonce_primary", nonces.0.to_bytes().as_ref());
transcript.append_message(b"nonce_alternate", nonces.1.to_bytes().as_ref());
transcript.append_message(b"point_primary", points.0.to_bytes().as_ref());
transcript.append_message(b"point_alternate", points.1.to_bytes().as_ref());
challenge(transcript, b"challenge")
}
pub fn prove<R: RngCore + CryptoRng, T: Transcript>(
rng: &mut R,
transcript: &mut T,
generators: Generators<G>,
scalar: G::Scalar
) -> DLEqProof<G> {
let r = G::Scalar::random(rng);
let c = Self::challenge(
transcript,
generators,
(generators.primary * r, generators.alt * r),
(generators.primary * scalar, generators.alt * scalar)
);
let s = r + (c * scalar);
DLEqProof { c, s }
}
pub fn verify<T: Transcript>(
&self,
transcript: &mut T,
generators: Generators<G>,
points: (G, G)
) -> Result<(), DLEqError> {
if self.c != Self::challenge(
transcript,
generators,
(
(generators.primary * self.s) - (points.0 * self.c),
(generators.alt * self.s) - (points.1 * self.c)
),
points
) {
Err(DLEqError::InvalidProof)?;
}
Ok(())
}
#[cfg(feature = "serialize")]
pub fn serialize<W: Write>(&self, w: &mut W) -> io::Result<()> {
w.write_all(self.c.to_repr().as_ref())?;
w.write_all(self.s.to_repr().as_ref())
}
#[cfg(feature = "serialize")]
pub fn deserialize<R: Read>(r: &mut R) -> io::Result<DLEqProof<G>> {
Ok(DLEqProof { c: read_scalar(r)?, s: read_scalar(r)? })
}
}

View file

@ -0,0 +1,54 @@
mod scalar;
mod schnorr;
use hex_literal::hex;
use rand_core::OsRng;
use ff::Field;
use group::{Group, GroupEncoding};
use k256::{Scalar, ProjectivePoint};
use dalek_ff_group::{EdwardsPoint, CompressedEdwardsY};
use transcript::RecommendedTranscript;
use crate::{Generators, cross_group::DLEqProof};
#[test]
fn test_dleq() {
let transcript = || RecommendedTranscript::new(b"Cross-Group DLEq Proof Test");
let generators = (
Generators::new(
ProjectivePoint::GENERATOR,
ProjectivePoint::from_bytes(
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac0").into())
).unwrap()
),
Generators::new(
EdwardsPoint::generator(),
CompressedEdwardsY::new(
hex!("8b655970153799af2aeadc9ff1add0ea6c7251d54154cfa92c173a0dd39c1f94")
).decompress().unwrap()
)
);
let key = Scalar::random(&mut OsRng);
let (proof, keys) = DLEqProof::prove(&mut OsRng, &mut transcript(), generators, key);
let public_keys = proof.verify(&mut transcript(), generators).unwrap();
assert_eq!(generators.0.primary * keys.0, public_keys.0);
assert_eq!(generators.1.primary * keys.1, public_keys.1);
#[cfg(feature = "serialize")]
{
let mut buf = vec![];
proof.serialize(&mut buf).unwrap();
let deserialized = DLEqProof::<ProjectivePoint, EdwardsPoint>::deserialize(
&mut std::io::Cursor::new(&buf)
).unwrap();
assert_eq!(proof, deserialized);
deserialized.verify(&mut transcript(), generators).unwrap();
}
}

View file

@ -0,0 +1,47 @@
use rand_core::OsRng;
use ff::{Field, PrimeField};
use k256::Scalar as K256Scalar;
use dalek_ff_group::Scalar as DalekScalar;
use crate::cross_group::scalar::{scalar_normalize, scalar_convert};
#[test]
fn test_scalar() {
assert_eq!(
scalar_normalize::<_, DalekScalar>(K256Scalar::zero()),
(K256Scalar::zero(), DalekScalar::zero())
);
assert_eq!(
scalar_normalize::<_, DalekScalar>(K256Scalar::one()),
(K256Scalar::one(), DalekScalar::one())
);
let mut initial;
while {
initial = K256Scalar::random(&mut OsRng);
let (k, ed) = scalar_normalize::<_, DalekScalar>(initial);
// The initial scalar should equal the new scalar with Ed25519's capacity
let mut initial_bytes = (&initial.to_repr()).to_vec();
// Drop the first 4 bits to hit 252
initial_bytes[0] = initial_bytes[0] & 0b00001111;
let k_bytes = (&k.to_repr()).to_vec();
assert_eq!(initial_bytes, k_bytes);
let mut ed_bytes = ed.to_repr().as_ref().to_vec();
// Reverse to big endian
ed_bytes.reverse();
assert_eq!(k_bytes, ed_bytes);
// Verify conversion works as expected
assert_eq!(scalar_convert::<_, DalekScalar>(k), Some(ed));
// Run this test again if this secp256k1 scalar didn't have any bits cleared
initial == k
} {}
// Verify conversion returns None when the scalar isn't mutually valid
assert!(scalar_convert::<_, DalekScalar>(initial).is_none());
}

View file

@ -0,0 +1,31 @@
use rand_core::OsRng;
use group::{ff::Field, prime::PrimeGroup};
use transcript::RecommendedTranscript;
use crate::cross_group::schnorr::SchnorrPoK;
fn test_schnorr<G: PrimeGroup>() {
let private = G::Scalar::random(&mut OsRng);
let transcript = RecommendedTranscript::new(b"Schnorr Test");
assert!(
SchnorrPoK::prove(
&mut OsRng,
&mut transcript.clone(),
G::generator(),
private
).verify(&mut transcript.clone(), G::generator(), G::generator() * private)
);
}
#[test]
fn test_secp256k1() {
test_schnorr::<k256::ProjectivePoint>();
}
#[test]
fn test_ed25519() {
test_schnorr::<dalek_ff_group::EdwardsPoint>();
}

View file

@ -0,0 +1,43 @@
#[cfg(feature = "cross_group")]
mod cross_group;
use hex_literal::hex;
use rand_core::OsRng;
use ff::Field;
use group::GroupEncoding;
use k256::{Scalar, ProjectivePoint};
use transcript::RecommendedTranscript;
use crate::{Generators, DLEqProof};
#[test]
fn test_dleq() {
let transcript = || RecommendedTranscript::new(b"DLEq Proof Test");
let generators = Generators::new(
ProjectivePoint::GENERATOR,
ProjectivePoint::from_bytes(
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac0").into())
).unwrap()
);
let key = Scalar::random(&mut OsRng);
let proof = DLEqProof::prove(&mut OsRng, &mut transcript(), generators, key);
let keys = (generators.primary * key, generators.alt * key);
proof.verify(&mut transcript(), generators, keys).unwrap();
#[cfg(feature = "serialize")]
{
let mut buf = vec![];
proof.serialize(&mut buf).unwrap();
let deserialized = DLEqProof::<ProjectivePoint>::deserialize(
&mut std::io::Cursor::new(&buf)
).unwrap();
assert_eq!(proof, deserialized);
deserialized.verify(&mut transcript(), generators, keys).unwrap();
}
}