Luke Parker 2022-11-10 22:35:09 -05:00
parent d714f2202d
commit 84de427d72
No known key found for this signature in database
GPG key ID: F9F1386DB1E119B6
32 changed files with 313 additions and 278 deletions

11
Cargo.lock generated
View file

@ -1657,7 +1657,7 @@ dependencies = [
[[package]]
name = "dkg"
version = "0.1.0"
version = "0.2.0"
dependencies = [
"chacha20 0.9.0",
"ciphersuite",
@ -1677,7 +1677,7 @@ dependencies = [
[[package]]
name = "dleq"
version = "0.1.2"
version = "0.2.0"
dependencies = [
"blake2",
"dalek-ff-group",
@ -4588,7 +4588,7 @@ dependencies = [
[[package]]
name = "modular-frost"
version = "0.4.1"
version = "0.5.0"
dependencies = [
"chacha20 0.9.0",
"ciphersuite",
@ -4634,7 +4634,7 @@ dependencies = [
[[package]]
name = "monero-serai"
version = "0.1.1-alpha"
version = "0.1.2-alpha"
dependencies = [
"base58-monero",
"blake2",
@ -7349,7 +7349,7 @@ dependencies = [
[[package]]
name = "schnorr-signatures"
version = "0.1.0"
version = "0.2.0"
dependencies = [
"blake2",
"ciphersuite",
@ -7619,6 +7619,7 @@ dependencies = [
"serde_json",
"thiserror",
"tokio",
"zeroize",
]
[[package]]

View file

@ -1,6 +1,6 @@
[package]
name = "monero-serai"
version = "0.1.1-alpha"
version = "0.1.2-alpha"
description = "A modern Monero transaction library"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero"
@ -34,8 +34,8 @@ dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.1" }
multiexp = { path = "../../crypto/multiexp", version = "0.2", features = ["batch"] }
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.2", features = ["recommended"], optional = true }
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.4", features = ["ed25519"], optional = true }
dleq = { path = "../../crypto/dleq", version = "0.1", features = ["serialize"], optional = true }
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.5", features = ["ed25519"], optional = true }
dleq = { path = "../../crypto/dleq", version = "0.2", features = ["serialize"], optional = true }
monero-generators = { path = "generators", version = "0.1" }
@ -55,7 +55,7 @@ monero-generators = { path = "generators", version = "0.1" }
[dev-dependencies]
tokio = { version = "1", features = ["full"] }
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.4", features = ["ed25519", "tests"] }
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.5", features = ["ed25519", "tests"] }
[features]
multisig = ["rand_chacha", "blake2", "transcript", "frost", "dleq"]

View file

@ -1,10 +1,12 @@
#![allow(non_snake_case)]
use core::ops::Deref;
use lazy_static::lazy_static;
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use zeroize::{Zeroize, ZeroizeOnDrop};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use subtle::{ConstantTimeEq, Choice, CtOption};
use curve25519_dalek::{
@ -233,7 +235,7 @@ impl Clsag {
/// sum_outputs is for the sum of the outputs' commitment masks.
pub fn sign<R: RngCore + CryptoRng>(
rng: &mut R,
mut inputs: Vec<(Scalar, EdwardsPoint, ClsagInput)>,
mut inputs: Vec<(Zeroizing<Scalar>, EdwardsPoint, ClsagInput)>,
sum_outputs: Scalar,
msg: [u8; 32],
) -> Vec<(Clsag, EdwardsPoint)> {
@ -247,17 +249,19 @@ impl Clsag {
sum_pseudo_outs += mask;
}
let mut nonce = random_scalar(rng);
let mut nonce = Zeroizing::new(random_scalar(rng));
let (mut clsag, pseudo_out, p, c) = Clsag::sign_core(
rng,
&inputs[i].1,
&inputs[i].2,
mask,
&msg,
&nonce * &ED25519_BASEPOINT_TABLE,
nonce * hash_to_point(inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0]),
nonce.deref() * &ED25519_BASEPOINT_TABLE,
nonce.deref() *
hash_to_point(inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0]),
);
clsag.s[usize::from(inputs[i].2.decoys.i)] = nonce - ((p * inputs[i].0) + c);
clsag.s[usize::from(inputs[i].2.decoys.i)] =
(-((p * inputs[i].0.deref()) + c)) + nonce.deref();
inputs[i].0.zeroize();
nonce.zeroize();

View file

@ -1,4 +1,4 @@
use core::fmt::Debug;
use core::{ops::Deref, fmt::Debug};
use std::{
io::{self, Read, Write},
sync::{Arc, RwLock},
@ -7,7 +7,7 @@ use std::{
use rand_core::{RngCore, CryptoRng, SeedableRng};
use rand_chacha::ChaCha20Rng;
use zeroize::{Zeroize, ZeroizeOnDrop};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
@ -157,7 +157,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
view: &ThresholdView<Ed25519>,
) -> ClsagAddendum {
ClsagAddendum {
key_image: dfg::EdwardsPoint(self.H * view.secret_share().0),
key_image: dfg::EdwardsPoint(self.H) * view.secret_share().deref(),
dleq: DLEqProof::prove(
rng,
// Doesn't take in a larger transcript object due to the usage of this
@ -167,7 +167,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
// try to merge later in some form, when it should instead just merge xH (as it does)
&mut dleq_transcript(),
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)],
dfg::Scalar(view.secret_share().0),
view.secret_share(),
),
}
}
@ -223,7 +223,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
&mut self,
view: &ThresholdView<Ed25519>,
nonce_sums: &[Vec<dfg::EdwardsPoint>],
nonces: &[dfg::Scalar],
nonces: Vec<Zeroizing<dfg::Scalar>>,
msg: &[u8],
) -> dfg::Scalar {
// Use the transcript to get a seeded random number generator
@ -247,7 +247,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
);
self.interim = Some(Interim { p, c, clsag, pseudo_out });
nonces[0] - (dfg::Scalar(p) * view.secret_share())
(-(dfg::Scalar(p) * view.secret_share().deref())) + nonces[0].deref()
}
#[must_use]

View file

@ -1,4 +1,6 @@
use zeroize::Zeroize;
use core::ops::Deref;
use zeroize::Zeroizing;
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
@ -17,10 +19,8 @@ use crate::{
};
/// Generate a key image for a given key. Defined as `x * hash_to_point(xG)`.
pub fn generate_key_image(mut secret: Scalar) -> EdwardsPoint {
let res = secret * hash_to_point(&secret * &ED25519_BASEPOINT_TABLE);
secret.zeroize();
res
pub fn generate_key_image(secret: &Zeroizing<Scalar>) -> EdwardsPoint {
hash_to_point(&ED25519_BASEPOINT_TABLE * secret.deref()) * secret.deref()
}
#[derive(Clone, PartialEq, Eq, Debug)]

View file

@ -1,6 +1,8 @@
use core::ops::Deref;
#[cfg(feature = "multisig")]
use std::sync::{Arc, RwLock};
use zeroize::Zeroizing;
use rand_core::{RngCore, OsRng};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
@ -35,29 +37,30 @@ fn clsag() {
for real in 0 .. RING_LEN {
let msg = [1; 32];
let mut secrets = [Scalar::zero(), Scalar::zero()];
let mut secrets = (Zeroizing::new(Scalar::zero()), Scalar::zero());
let mut ring = vec![];
for i in 0 .. RING_LEN {
let dest = random_scalar(&mut OsRng);
let dest = Zeroizing::new(random_scalar(&mut OsRng));
let mask = random_scalar(&mut OsRng);
let amount;
if i == u64::from(real) {
secrets = [dest, mask];
secrets = (dest.clone(), mask);
amount = AMOUNT;
} else {
amount = OsRng.next_u64();
}
ring.push([&dest * &ED25519_BASEPOINT_TABLE, Commitment::new(mask, amount).calculate()]);
ring
.push([dest.deref() * &ED25519_BASEPOINT_TABLE, Commitment::new(mask, amount).calculate()]);
}
let image = generate_key_image(secrets[0]);
let image = generate_key_image(&secrets.0);
let (clsag, pseudo_out) = Clsag::sign(
&mut OsRng,
vec![(
secrets[0],
secrets.0,
image,
ClsagInput::new(
Commitment::new(secrets[1], AMOUNT),
Commitment::new(secrets.1, AMOUNT),
Decoys {
i: u8::try_from(real).unwrap(),
offsets: (1 ..= RING_LEN).into_iter().collect(),

View file

@ -1,9 +1,11 @@
use core::ops::Deref;
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use rand::seq::SliceRandom;
use zeroize::{Zeroize, ZeroizeOnDrop};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
@ -108,9 +110,9 @@ async fn prepare_inputs<R: RngCore + CryptoRng>(
rpc: &Rpc,
ring_len: usize,
inputs: &[SpendableOutput],
spend: &Scalar,
spend: &Zeroizing<Scalar>,
tx: &mut Transaction,
) -> Result<Vec<(Scalar, EdwardsPoint, ClsagInput)>, TransactionError> {
) -> Result<Vec<(Zeroizing<Scalar>, EdwardsPoint, ClsagInput)>, TransactionError> {
let mut signable = Vec::with_capacity(inputs.len());
// Select decoys
@ -125,9 +127,11 @@ async fn prepare_inputs<R: RngCore + CryptoRng>(
.map_err(TransactionError::RpcError)?;
for (i, input) in inputs.iter().enumerate() {
let input_spend = Zeroizing::new(input.key_offset() + spend.deref());
let image = generate_key_image(&input_spend);
signable.push((
spend + input.key_offset(),
generate_key_image(spend + input.key_offset()),
input_spend,
image,
ClsagInput::new(input.commitment().clone(), decoys[i].clone())
.map_err(TransactionError::ClsagError)?,
));
@ -358,16 +362,16 @@ impl SignableTransaction {
&mut self,
rng: &mut R,
rpc: &Rpc,
spend: &Scalar,
spend: &Zeroizing<Scalar>,
) -> Result<Transaction, TransactionError> {
let mut images = Vec::with_capacity(self.inputs.len());
for input in &self.inputs {
let mut offset = spend + input.key_offset();
if (&offset * &ED25519_BASEPOINT_TABLE) != input.key() {
let mut offset = Zeroizing::new(spend.deref() + input.key_offset());
if (offset.deref() * &ED25519_BASEPOINT_TABLE) != input.key() {
Err(TransactionError::WrongPrivateKey)?;
}
images.push(generate_key_image(offset));
images.push(generate_key_image(&offset));
offset.zeroize();
}
images.sort_by(key_image_sort);

View file

@ -1,9 +1,10 @@
use core::ops::Deref;
use std::{sync::Mutex, collections::HashSet};
#[cfg(feature = "multisig")]
use std::collections::HashMap;
use lazy_static::lazy_static;
use zeroize::Zeroizing;
use rand_core::OsRng;
#[cfg(feature = "multisig")]
@ -55,11 +56,11 @@ async fn send_core(test: usize, multisig: bool) {
let rpc = rpc().await;
// Generate an address
let spend = random_scalar(&mut OsRng);
let spend = Zeroizing::new(random_scalar(&mut OsRng));
#[allow(unused_mut)]
let mut view = random_scalar(&mut OsRng);
#[allow(unused_mut)]
let mut spend_pub = &spend * &ED25519_BASEPOINT_TABLE;
let mut spend_pub = spend.deref() * &ED25519_BASEPOINT_TABLE;
#[cfg(feature = "multisig")]
let keys = key_gen::<_, Ed25519>(&mut OsRng);

View file

@ -1,6 +1,6 @@
[package]
name = "dkg"
version = "0.1.0"
version = "0.2.0"
description = "Distributed key generation over ff/group"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg"
@ -35,8 +35,8 @@ transcript = { package = "flexible-transcript", path = "../transcript", version
multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] }
schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "0.1.0" }
dleq = { path = "../dleq", version = "^0.1.2", features = ["serialize"] }
schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "0.2" }
dleq = { path = "../dleq", version = "0.2", features = ["serialize"] }
[features]
tests = []

View file

@ -1,12 +1,13 @@
use std::{
marker::PhantomData,
ops::Deref,
io::{self, Read, Write},
collections::HashMap,
};
use rand_core::{RngCore, CryptoRng};
use zeroize::{Zeroize, ZeroizeOnDrop};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use digest::Digest;
use hkdf::{Hkdf, hmac::SimpleHmac};
@ -48,12 +49,6 @@ pub struct Commitments<C: Ciphersuite> {
cached_msg: Vec<u8>,
sig: SchnorrSignature<C>,
}
impl<C: Ciphersuite> Drop for Commitments<C> {
fn drop(&mut self) {
self.zeroize();
}
}
impl<C: Ciphersuite> ZeroizeOnDrop for Commitments<C> {}
impl<C: Ciphersuite> Commitments<C> {
pub fn read<R: Read>(reader: &mut R, params: ThresholdParams) -> io::Result<Self> {
@ -117,9 +112,9 @@ impl<C: Ciphersuite> KeyGenMachine<C> {
for i in 0 .. t {
// Step 1: Generate t random values to form a polynomial with
coefficients.push(C::random_nonzero_F(&mut *rng));
coefficients.push(Zeroizing::new(C::random_nonzero_F(&mut *rng)));
// Step 3: Generate public commitments
commitments.push(C::generator() * coefficients[i]);
commitments.push(C::generator() * coefficients[i].deref());
cached_msg.extend(commitments[i].to_bytes().as_ref());
}
@ -127,27 +122,22 @@ impl<C: Ciphersuite> KeyGenMachine<C> {
// It would probably be perfectly fine to use one of our polynomial elements, yet doing so
// puts the integrity of FROST at risk. While there's almost no way it could, as it's used in
// an ECDH with validated group elemnents, better to avoid any questions on it
let enc_key = C::random_nonzero_F(&mut *rng);
let pub_enc_key = C::generator() * enc_key;
let enc_key = Zeroizing::new(C::random_nonzero_F(&mut *rng));
let pub_enc_key = C::generator() * enc_key.deref();
cached_msg.extend(pub_enc_key.to_bytes().as_ref());
// Step 2: Provide a proof of knowledge
let mut r = C::random_nonzero_F(rng);
let r = Zeroizing::new(C::random_nonzero_F(rng));
let nonce = C::generator() * r.deref();
let sig = SchnorrSignature::<C>::sign(
coefficients[0],
&coefficients[0],
// This could be deterministic as the PoK is a singleton never opened up to cooperative
// discussion
// There's no reason to spend the time and effort to make this deterministic besides a
// general obsession with canonicity and determinism though
r,
challenge::<C>(
&self.context,
self.params.i(),
(C::generator() * r).to_bytes().as_ref(),
&cached_msg,
),
challenge::<C>(&self.context, self.params.i(), nonce.to_bytes().as_ref(), &cached_msg),
);
r.zeroize();
// Step 4: Broadcast
(
@ -157,19 +147,20 @@ impl<C: Ciphersuite> KeyGenMachine<C> {
coefficients,
our_commitments: commitments.clone(),
enc_key,
pub_enc_key,
},
Commitments { commitments, enc_key: pub_enc_key, cached_msg, sig },
)
}
}
fn polynomial<F: PrimeField>(coefficients: &[F], l: u16) -> F {
fn polynomial<F: PrimeField + Zeroize>(coefficients: &[Zeroizing<F>], l: u16) -> Zeroizing<F> {
let l = F::from(u64::from(l));
let mut share = F::zero();
let mut share = Zeroizing::new(F::zero());
for (idx, coefficient) in coefficients.iter().rev().enumerate() {
share += coefficient;
*share += coefficient.deref();
if idx != (coefficients.len() - 1) {
share *= l;
*share *= l;
}
}
share
@ -250,16 +241,11 @@ fn create_ciphers<C: Ciphersuite>(
pub struct SecretShareMachine<C: Ciphersuite> {
params: ThresholdParams,
context: String,
coefficients: Vec<C::F>,
coefficients: Vec<Zeroizing<C::F>>,
our_commitments: Vec<C::G>,
enc_key: C::F,
enc_key: Zeroizing<C::F>,
pub_enc_key: C::G,
}
impl<C: Ciphersuite> Drop for SecretShareMachine<C> {
fn drop(&mut self) {
self.zeroize()
}
}
impl<C: Ciphersuite> ZeroizeOnDrop for SecretShareMachine<C> {}
impl<C: Ciphersuite> SecretShareMachine<C> {
/// Verify the data from the previous round (canonicity, PoKs, message authenticity)
@ -276,7 +262,6 @@ impl<C: Ciphersuite> SecretShareMachine<C> {
.drain()
.map(|(l, mut msg)| {
enc_keys.insert(l, msg.enc_key);
msg.enc_key.zeroize();
// Step 5: Validate each proof of knowledge
// This is solely the prep step for the latter batch verification
@ -309,7 +294,7 @@ impl<C: Ciphersuite> SecretShareMachine<C> {
let (commitments, mut enc_keys) = self.verify_r1(&mut *rng, commitments)?;
// Step 1: Generate secret shares for all other parties
let mut sender = (C::generator() * self.enc_key).to_bytes();
let sender = self.pub_enc_key.to_bytes();
let mut ciphers = HashMap::new();
let mut res = HashMap::new();
for l in 1 ..= self.params.n() {
@ -321,7 +306,7 @@ impl<C: Ciphersuite> SecretShareMachine<C> {
let (mut cipher_send, cipher_recv) = {
let receiver = enc_keys.get_mut(&l).unwrap();
let mut ecdh = (*receiver * self.enc_key).to_bytes();
let mut ecdh = (*receiver * self.enc_key.deref()).to_bytes();
create_ciphers::<C>(sender, &mut receiver.to_bytes(), &mut ecdh)
};
@ -338,11 +323,9 @@ impl<C: Ciphersuite> SecretShareMachine<C> {
share_bytes.as_mut().zeroize();
}
self.enc_key.zeroize();
sender.as_mut().zeroize();
// Calculate our own share
let share = polynomial(&self.coefficients, self.params.i());
self.coefficients.zeroize();
Ok((KeyMachine { params: self.params, secret: share, commitments, ciphers }, res))
@ -352,7 +335,7 @@ impl<C: Ciphersuite> SecretShareMachine<C> {
/// Final step of the key generation protocol.
pub struct KeyMachine<C: Ciphersuite> {
params: ThresholdParams,
secret: C::F,
secret: Zeroizing<C::F>,
ciphers: HashMap<u16, ChaCha20>,
commitments: HashMap<u16, Vec<C::G>>,
}
@ -390,9 +373,6 @@ impl<C: Ciphersuite> KeyMachine<C> {
rng: &mut R,
mut shares: HashMap<u16, SecretShare<C::F>>,
) -> Result<ThresholdCore<C>, DkgError> {
let mut secret_share = self.secret;
self.secret.zeroize();
validate_map(&shares, &(1 ..= self.params.n()).collect::<Vec<_>>(), self.params.i())?;
// Calculate the exponent for a given participant and apply it to a series of commitments
@ -414,17 +394,19 @@ impl<C: Ciphersuite> KeyMachine<C> {
cipher.apply_keystream(share_bytes.0.as_mut());
drop(cipher);
let mut share: C::F =
Option::from(C::F::from_repr(share_bytes.0)).ok_or(DkgError::InvalidShare(l))?;
let mut share = Zeroizing::new(
Option::<C::F>::from(C::F::from_repr(share_bytes.0)).ok_or(DkgError::InvalidShare(l))?,
);
share_bytes.zeroize();
secret_share += share;
*self.secret += share.deref();
// This can be insecurely linearized from n * t to just n using the below sums for a given
// stripe. Doing so uses naive addition which is subject to malleability. The only way to
// ensure that malleability isn't present is to use this n * t algorithm, which runs
// per sender and not as an aggregate of all senders, which also enables blame
let mut values = exponential(self.params.i, &self.commitments[&l]);
values.push((-share, C::generator()));
// multiexp will Zeroize this when it's done with it
values.push((-*share.deref(), C::generator()));
share.zeroize();
batch.queue(rng, l, values);
@ -443,14 +425,19 @@ impl<C: Ciphersuite> KeyMachine<C> {
// Calculate each user's verification share
let mut verification_shares = HashMap::new();
for i in 1 ..= self.params.n() {
verification_shares.insert(i, multiexp_vartime(&exponential(i, &stripes)));
verification_shares.insert(
i,
if i == self.params.i() {
C::generator() * self.secret.deref()
} else {
multiexp_vartime(&exponential(i, &stripes))
},
);
}
// Removing this check would enable optimizing the above from t + (n * t) to t + ((n - 1) * t)
debug_assert_eq!(C::generator() * secret_share, verification_shares[&self.params.i()]);
Ok(ThresholdCore {
params: self.params,
secret_share,
secret_share: self.secret.clone(),
group_key: stripes[0],
verification_shares,
})

View file

@ -6,12 +6,12 @@
//! Additional utilities around them, such as promotion from one generator to another, are also
//! provided.
use core::fmt::Debug;
use core::{fmt::Debug, ops::Deref};
use std::{io::Read, sync::Arc, collections::HashMap};
use thiserror::Error;
use zeroize::{Zeroize, ZeroizeOnDrop};
use zeroize::{Zeroize, Zeroizing};
use group::{
ff::{Field, PrimeField},
@ -153,7 +153,7 @@ pub struct ThresholdCore<C: Ciphersuite> {
params: ThresholdParams,
/// Secret share key.
secret_share: C::F,
secret_share: Zeroizing<C::F>,
/// Group key.
group_key: C::G,
/// Verification shares.
@ -170,17 +170,11 @@ impl<C: Ciphersuite> Zeroize for ThresholdCore<C> {
}
}
}
impl<C: Ciphersuite> Drop for ThresholdCore<C> {
fn drop(&mut self) {
self.zeroize()
}
}
impl<C: Ciphersuite> ZeroizeOnDrop for ThresholdCore<C> {}
impl<C: Ciphersuite> ThresholdCore<C> {
pub(crate) fn new(
params: ThresholdParams,
secret_share: C::F,
secret_share: Zeroizing<C::F>,
verification_shares: HashMap<u16, C::G>,
) -> ThresholdCore<C> {
#[cfg(debug_assertions)]
@ -198,8 +192,8 @@ impl<C: Ciphersuite> ThresholdCore<C> {
self.params
}
pub fn secret_share(&self) -> C::F {
self.secret_share
pub fn secret_share(&self) -> &Zeroizing<C::F> {
&self.secret_share
}
pub fn group_key(&self) -> C::G {
@ -253,8 +247,9 @@ impl<C: Ciphersuite> ThresholdCore<C> {
(read_u16()?, read_u16()?, read_u16()?)
};
let secret_share =
C::read_F(reader).map_err(|_| DkgError::InternalError("invalid secret share"))?;
let secret_share = Zeroizing::new(
C::read_F(reader).map_err(|_| DkgError::InternalError("invalid secret share"))?,
);
let mut verification_shares = HashMap::new();
for l in 1 ..= n {
@ -284,32 +279,17 @@ pub struct ThresholdKeys<C: Ciphersuite> {
pub(crate) offset: Option<C::F>,
}
// Manually implement Drop due to https://github.com/RustCrypto/utils/issues/786
impl<C: Ciphersuite> Drop for ThresholdKeys<C> {
fn drop(&mut self) {
self.zeroize()
}
}
impl<C: Ciphersuite> ZeroizeOnDrop for ThresholdKeys<C> {}
/// View of keys passed to algorithm implementations.
#[derive(Clone, Zeroize)]
pub struct ThresholdView<C: Ciphersuite> {
group_key: C::G,
#[zeroize(skip)]
included: Vec<u16>,
secret_share: C::F,
secret_share: Zeroizing<C::F>,
#[zeroize(skip)]
verification_shares: HashMap<u16, C::G>,
}
impl<C: Ciphersuite> Drop for ThresholdView<C> {
fn drop(&mut self) {
self.zeroize()
}
}
impl<C: Ciphersuite> ZeroizeOnDrop for ThresholdView<C> {}
impl<C: Ciphersuite> ThresholdKeys<C> {
pub fn new(core: ThresholdCore<C>) -> ThresholdKeys<C> {
ThresholdKeys { core: Arc::new(core), offset: None }
@ -336,8 +316,8 @@ impl<C: Ciphersuite> ThresholdKeys<C> {
self.core.params
}
pub fn secret_share(&self) -> C::F {
self.core.secret_share
pub fn secret_share(&self) -> &Zeroizing<C::F> {
&self.core.secret_share
}
/// Returns the group key with any offset applied.
@ -366,8 +346,9 @@ impl<C: Ciphersuite> ThresholdKeys<C> {
Ok(ThresholdView {
group_key: self.group_key(),
secret_share: (self.secret_share() * lagrange::<C::F>(self.params().i, included)) +
offset_share,
secret_share: Zeroizing::new(
(lagrange::<C::F>(self.params().i, included) * self.secret_share().deref()) + offset_share,
),
verification_shares: self
.verification_shares()
.iter()
@ -389,8 +370,8 @@ impl<C: Ciphersuite> ThresholdView<C> {
self.included.clone()
}
pub fn secret_share(&self) -> C::F {
self.secret_share
pub fn secret_share(&self) -> &Zeroizing<C::F> {
&self.secret_share
}
pub fn verification_share(&self, l: u16) -> C::G {

View file

@ -1,5 +1,5 @@
use core::{marker::PhantomData, ops::Deref};
use std::{
marker::PhantomData,
io::{self, Read, Write},
sync::Arc,
collections::HashMap,
@ -82,7 +82,7 @@ where
) -> (GeneratorPromotion<C1, C2>, GeneratorProof<C1>) {
// Do a DLEqProof for the new generator
let proof = GeneratorProof {
share: C2::generator() * base.secret_share(),
share: C2::generator() * base.secret_share().deref(),
proof: DLEqProof::prove(
rng,
&mut transcript(base.core.group_key(), base.params().i),
@ -120,7 +120,11 @@ where
}
Ok(ThresholdKeys {
core: Arc::new(ThresholdCore::new(params, self.base.secret_share(), verification_shares)),
core: Arc::new(ThresholdCore::new(
params,
self.base.secret_share().clone(),
verification_shares,
)),
offset: None,
})
}

View file

@ -1,3 +1,4 @@
use core::ops::Deref;
use std::collections::HashMap;
use rand_core::{RngCore, CryptoRng};
@ -38,7 +39,7 @@ pub fn recover_key<C: Ciphersuite>(keys: &HashMap<u16, ThresholdKeys<C>>) -> C::
let included = keys.keys().cloned().collect::<Vec<_>>();
let group_private = keys.iter().fold(C::F::zero(), |accum, (i, keys)| {
accum + (keys.secret_share() * lagrange::<C::F>(*i, &included))
accum + (lagrange::<C::F>(*i, &included) * keys.secret_share().deref())
});
assert_eq!(C::generator() * group_private, first.group_key(), "failed to recover keys");
group_private

View file

@ -1,4 +1,5 @@
use std::{marker::PhantomData, collections::HashMap};
use core::{marker::PhantomData, ops::Deref};
use std::collections::HashMap;
use rand_core::{RngCore, CryptoRng};
@ -54,7 +55,10 @@ pub(crate) fn test_generator_promotion<R: RngCore + CryptoRng, C: Ciphersuite>(r
assert_eq!(keys[&i].secret_share(), promoted.secret_share());
assert_eq!(new_group_key, promoted.group_key());
for (l, verification_share) in promoted.verification_shares() {
assert_eq!(AltGenerator::<C>::generator() * keys[&l].secret_share(), verification_share);
assert_eq!(
AltGenerator::<C>::generator() * keys[&l].secret_share().deref(),
verification_share
);
}
}
}

View file

@ -1,6 +1,6 @@
[package]
name = "dleq"
version = "0.1.2"
version = "0.2.0"
description = "Implementation of single and cross-curve Discrete Log Equality proofs"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dleq"

View file

@ -1,8 +1,10 @@
use core::ops::Deref;
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use zeroize::{Zeroize, Zeroizing};
use digest::{Digest, HashMarker};
@ -18,7 +20,7 @@ pub mod scalar;
use scalar::{scalar_convert, mutual_scalar_from_bytes};
pub(crate) mod schnorr;
use schnorr::SchnorrPoK;
use self::schnorr::SchnorrPoK;
pub(crate) mod aos;
@ -185,17 +187,17 @@ where
rng: &mut R,
transcript: &mut T,
generators: (Generators<G0>, Generators<G1>),
f: (G0::Scalar, G1::Scalar),
) -> (Self, (G0::Scalar, G1::Scalar)) {
f: (Zeroizing<G0::Scalar>, Zeroizing<G1::Scalar>),
) -> (Self, (Zeroizing<G0::Scalar>, Zeroizing<G1::Scalar>)) {
Self::transcript(
transcript,
generators,
((generators.0.primary * f.0), (generators.1.primary * f.1)),
((generators.0.primary * f.0.deref()), (generators.1.primary * f.1.deref())),
);
let poks = (
SchnorrPoK::<G0>::prove(rng, transcript, generators.0.primary, f.0),
SchnorrPoK::<G1>::prove(rng, transcript, generators.1.primary, f.1),
SchnorrPoK::<G0>::prove(rng, transcript, generators.0.primary, &f.0),
SchnorrPoK::<G1>::prove(rng, transcript, generators.1.primary, &f.1),
);
let mut blinding_key_total = (G0::Scalar::zero(), G1::Scalar::zero());
@ -269,7 +271,7 @@ where
let proof = __DLEqProof { bits, remainder, poks };
debug_assert_eq!(
proof.reconstruct_keys(),
(generators.0.primary * f.0, generators.1.primary * f.1)
(generators.0.primary * f.0.deref(), generators.1.primary * f.1.deref())
);
(proof, f)
}
@ -286,13 +288,17 @@ where
transcript: &mut T,
generators: (Generators<G0>, Generators<G1>),
digest: D,
) -> (Self, (G0::Scalar, G1::Scalar)) {
Self::prove_internal(
rng,
transcript,
generators,
mutual_scalar_from_bytes(digest.finalize().as_ref()),
)
) -> (Self, (Zeroizing<G0::Scalar>, Zeroizing<G1::Scalar>)) {
// This pattern theoretically prevents the compiler from moving it, so our protection against
// a copy remaining un-zeroized is actually what's causing a copy. There's still a feeling of
// safety granted by it, even if there's a loss in performance.
let (mut f0, mut f1) =
mutual_scalar_from_bytes::<G0::Scalar, G1::Scalar>(digest.finalize().as_ref());
let f = (Zeroizing::new(f0), Zeroizing::new(f1));
f0.zeroize();
f1.zeroize();
Self::prove_internal(rng, transcript, generators, f)
}
/// Prove the cross-Group Discrete Log Equality for the points derived from the scalar passed in,
@ -302,9 +308,10 @@ where
rng: &mut R,
transcript: &mut T,
generators: (Generators<G0>, Generators<G1>),
f0: G0::Scalar,
) -> Option<(Self, (G0::Scalar, G1::Scalar))> {
scalar_convert(f0).map(|f1| Self::prove_internal(rng, transcript, generators, (f0, f1)))
f0: Zeroizing<G0::Scalar>,
) -> Option<(Self, (Zeroizing<G0::Scalar>, Zeroizing<G1::Scalar>))> {
scalar_convert(*f0.deref()) // scalar_convert will zeroize it, though this is unfortunate
.map(|f1| Self::prove_internal(rng, transcript, generators, (f0, Zeroizing::new(f1))))
}
/// Verify a cross-Group Discrete Log Equality statement, returning the points proven for.

View file

@ -1,6 +1,8 @@
use core::ops::Deref;
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use zeroize::{Zeroize, Zeroizing};
use transcript::Transcript;
@ -44,18 +46,17 @@ where
rng: &mut R,
transcript: &mut T,
generator: G,
mut private_key: G::Scalar,
private_key: &Zeroizing<G::Scalar>,
) -> SchnorrPoK<G> {
let mut nonce = G::Scalar::random(rng);
let nonce = Zeroizing::new(G::Scalar::random(rng));
#[allow(non_snake_case)]
let R = generator * nonce;
let res = SchnorrPoK {
let R = generator * nonce.deref();
SchnorrPoK {
R,
s: nonce + (private_key * SchnorrPoK::hra(transcript, generator, R, generator * private_key)),
};
private_key.zeroize();
nonce.zeroize();
res
s: (SchnorrPoK::hra(transcript, generator, R, generator * private_key.deref()) *
private_key.deref()) +
nonce.deref(),
}
}
pub(crate) fn verify<R: RngCore + CryptoRng, T: Transcript>(

View file

@ -1,9 +1,11 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![cfg_attr(not(feature = "std"), no_std)]
use core::ops::Deref;
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use zeroize::{Zeroize, Zeroizing};
use transcript::Transcript;
@ -79,23 +81,20 @@ impl<G: PrimeGroup> DLEqProof<G> {
rng: &mut R,
transcript: &mut T,
generators: &[G],
mut scalar: G::Scalar,
scalar: &Zeroizing<G::Scalar>,
) -> DLEqProof<G>
where
G::Scalar: Zeroize,
{
let mut r = G::Scalar::random(rng);
let r = Zeroizing::new(G::Scalar::random(rng));
transcript.domain_separate(b"dleq");
for generator in generators {
Self::transcript(transcript, *generator, *generator * r, *generator * scalar);
Self::transcript(transcript, *generator, *generator * r.deref(), *generator * scalar.deref());
}
let c = challenge(transcript);
let s = r + (c * scalar);
scalar.zeroize();
r.zeroize();
let s = (c * scalar.deref()) + r.deref();
DLEqProof { c, s }
}

View file

@ -1,4 +1,8 @@
use core::ops::Deref;
use hex_literal::hex;
use zeroize::Zeroizing;
use rand_core::{RngCore, OsRng};
use ff::{Field, PrimeField};
@ -19,7 +23,6 @@ use crate::{
};
mod scalar;
mod schnorr;
mod aos;
type G0 = ProjectivePoint;
@ -51,8 +54,8 @@ pub(crate) fn generators() -> (Generators<G0>, Generators<G1>) {
macro_rules! verify_and_deserialize {
($type: ty, $proof: ident, $generators: ident, $keys: ident) => {
let public_keys = $proof.verify(&mut OsRng, &mut transcript(), $generators).unwrap();
assert_eq!($generators.0.primary * $keys.0, public_keys.0);
assert_eq!($generators.1.primary * $keys.1, public_keys.1);
assert_eq!($generators.0.primary * $keys.0.deref(), public_keys.0);
assert_eq!($generators.1.primary * $keys.1.deref(), public_keys.1);
#[cfg(feature = "serialize")]
{
@ -117,8 +120,8 @@ macro_rules! test_dleq {
let mut key;
let mut res;
while {
key = Scalar::random(&mut OsRng);
res = $type::prove_without_bias(&mut OsRng, &mut transcript(), generators, key);
key = Zeroizing::new(Scalar::random(&mut OsRng));
res = $type::prove_without_bias(&mut OsRng, &mut transcript(), generators, key.clone());
res.is_none()
} {}
let res = res.unwrap();
@ -156,8 +159,13 @@ fn test_rejection_sampling() {
assert!(
// Either would work
EfficientLinearDLEq::prove_without_bias(&mut OsRng, &mut transcript(), generators(), pow_2)
.is_none()
EfficientLinearDLEq::prove_without_bias(
&mut OsRng,
&mut transcript(),
generators(),
Zeroizing::new(pow_2)
)
.is_none()
);
}
@ -167,13 +175,18 @@ fn test_remainder() {
assert_eq!(Scalar::CAPACITY, 255);
let generators = (generators().0, generators().0);
// This will ignore any unused bits, ensuring every remaining one is set
let keys = mutual_scalar_from_bytes(&[0xFF; 32]);
assert_eq!(keys.0 + Scalar::one(), Scalar::from(2u64).pow_vartime(&[255]));
let keys = mutual_scalar_from_bytes::<Scalar, Scalar>(&[0xFF; 32]);
let keys = (Zeroizing::new(keys.0), Zeroizing::new(keys.1));
assert_eq!(Scalar::one() + keys.0.deref(), Scalar::from(2u64).pow_vartime(&[255]));
assert_eq!(keys.0, keys.1);
let (proof, res) =
ConciseLinearDLEq::prove_without_bias(&mut OsRng, &mut transcript(), generators, keys.0)
.unwrap();
let (proof, res) = ConciseLinearDLEq::prove_without_bias(
&mut OsRng,
&mut transcript(),
generators,
keys.0.clone(),
)
.unwrap();
assert_eq!(keys, res);
verify_and_deserialize!(

View file

@ -1,3 +1,5 @@
use core::ops::Deref;
use rand_core::OsRng;
use zeroize::Zeroize;
@ -20,12 +22,12 @@ where
let mut batch = BatchVerifier::new(10);
for _ in 0 .. 10 {
let private = G::Scalar::random(&mut OsRng);
SchnorrPoK::prove(&mut OsRng, &mut transcript.clone(), G::generator(), private).verify(
let private = Zeroizing::new(G::Scalar::random(&mut OsRng));
SchnorrPoK::prove(&mut OsRng, &mut transcript.clone(), G::generator(), &private).verify(
&mut OsRng,
&mut transcript.clone(),
G::generator(),
G::generator() * private,
G::generator() * private.deref(),
&mut batch,
);
}

View file

@ -1,9 +1,11 @@
#[cfg(feature = "experimental")]
mod cross_group;
use core::ops::Deref;
use hex_literal::hex;
use rand_core::OsRng;
use zeroize::Zeroizing;
use ff::Field;
use group::GroupEncoding;
@ -13,6 +15,9 @@ use transcript::{Transcript, RecommendedTranscript};
use crate::DLEqProof;
#[cfg(feature = "experimental")]
mod cross_group;
#[test]
fn test_dleq() {
let transcript = || RecommendedTranscript::new(b"DLEq Proof Test");
@ -39,12 +44,12 @@ fn test_dleq() {
];
for i in 0 .. 5 {
let key = Scalar::random(&mut OsRng);
let proof = DLEqProof::prove(&mut OsRng, &mut transcript(), &generators[.. i], key);
let key = Zeroizing::new(Scalar::random(&mut OsRng));
let proof = DLEqProof::prove(&mut OsRng, &mut transcript(), &generators[.. i], &key);
let mut keys = [ProjectivePoint::GENERATOR; 5];
for k in 0 .. 5 {
keys[k] = generators[k] * key;
keys[k] = generators[k] * key.deref();
}
proof.verify(&mut transcript(), &generators[.. i], &keys[.. i]).unwrap();

View file

@ -1,6 +1,6 @@
[package]
name = "modular-frost"
version = "0.4.1"
version = "0.5.0"
description = "Modular implementation of FROST over ff/group"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/frost"
@ -38,10 +38,10 @@ transcript = { package = "flexible-transcript", path = "../transcript", version
multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] }
schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "0.1.0" }
dleq = { path = "../dleq", version = "^0.1.2", features = ["serialize"] }
schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "0.2" }
dleq = { path = "../dleq", version = "0.2", features = ["serialize"] }
dkg = { path = "../dkg", version = "0.1.0" }
dkg = { path = "../dkg", version = "0.2" }
[dev-dependencies]
serde_json = "1"

View file

@ -1,6 +1,7 @@
use core::{marker::PhantomData, fmt::Debug};
use std::io::{self, Read, Write};
use zeroize::Zeroizing;
use rand_core::{RngCore, CryptoRng};
use transcript::Transcript;
@ -66,7 +67,7 @@ pub trait Algorithm<C: Curve>: Clone {
&mut self,
params: &ThresholdView<C>,
nonce_sums: &[Vec<C::G>],
nonces: &[C::F],
nonces: Vec<Zeroizing<C::F>>,
msg: &[u8],
) -> C::F;
@ -161,12 +162,12 @@ impl<C: Curve, H: Hram<C>> Algorithm<C> for Schnorr<C, H> {
&mut self,
params: &ThresholdView<C>,
nonce_sums: &[Vec<C::G>],
nonces: &[C::F],
mut nonces: Vec<Zeroizing<C::F>>,
msg: &[u8],
) -> C::F {
let c = H::hram(&nonce_sums[0][0], &params.group_key(), msg);
self.c = Some(c);
SchnorrSignature::<C>::sign(params.secret_share(), nonces[0], c).s
SchnorrSignature::<C>::sign(params.secret_share(), nonces.swap_remove(0), c).s
}
#[must_use]

View file

@ -1,8 +1,9 @@
use core::ops::Deref;
use std::io::{self, Read};
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use zeroize::{Zeroize, Zeroizing};
use subtle::ConstantTimeEq;
use digest::Digest;
@ -67,26 +68,29 @@ pub trait Curve: Ciphersuite {
}
/// Securely generate a random nonce. H3 from the IETF draft.
fn random_nonce<R: RngCore + CryptoRng>(mut secret: Self::F, rng: &mut R) -> Self::F {
let mut seed = vec![0; 32];
rng.fill_bytes(&mut seed);
fn random_nonce<R: RngCore + CryptoRng>(
secret: &Zeroizing<Self::F>,
rng: &mut R,
) -> Zeroizing<Self::F> {
let mut seed = Zeroizing::new(vec![0; 32]);
rng.fill_bytes(seed.as_mut());
let mut repr = secret.to_repr();
secret.zeroize();
let mut res;
while {
seed.extend(repr.as_ref());
res = <Self as Curve>::hash_to_F(b"nonce", &seed);
res = Zeroizing::new(<Self as Curve>::hash_to_F(b"nonce", seed.deref()));
res.ct_eq(&Self::F::zero()).into()
} {
seed = Zeroizing::new(vec![0; 32]);
rng.fill_bytes(&mut seed);
}
for i in repr.as_mut() {
i.zeroize();
}
seed.zeroize();
res
}

View file

@ -8,6 +8,7 @@
// Each nonce remains of the form (d, e) and made into a proper nonce with d + (e * b)
// When multiple D, E pairs are provided, a DLEq proof is also provided to confirm their integrity
use core::ops::Deref;
use std::{
io::{self, Read, Write},
collections::HashMap,
@ -15,7 +16,7 @@ use std::{
use rand_core::{RngCore, CryptoRng};
use zeroize::{Zeroize, ZeroizeOnDrop};
use zeroize::{Zeroize, Zeroizing};
use transcript::Transcript;
@ -33,13 +34,7 @@ fn dleq_transcript<T: Transcript>() -> T {
// Each nonce is actually a pair of random scalars, notated as d, e under the FROST paper
// This is considered a single nonce as r = d + be
#[derive(Clone, Zeroize)]
pub(crate) struct Nonce<C: Curve>(pub(crate) [C::F; 2]);
impl<C: Curve> Drop for Nonce<C> {
fn drop(&mut self) {
self.zeroize();
}
}
impl<C: Curve> ZeroizeOnDrop for Nonce<C> {}
pub(crate) struct Nonce<C: Curve>(pub(crate) [Zeroizing<C::F>; 2]);
// Commitments to a specific generator for this nonce
#[derive(Copy, Clone, PartialEq, Eq)]
@ -70,16 +65,20 @@ pub(crate) struct NonceCommitments<C: Curve> {
impl<C: Curve> NonceCommitments<C> {
pub(crate) fn new<R: RngCore + CryptoRng, T: Transcript>(
rng: &mut R,
mut secret_share: C::F,
secret_share: &Zeroizing<C::F>,
generators: &[C::G],
) -> (Nonce<C>, NonceCommitments<C>) {
let nonce =
Nonce([C::random_nonce(secret_share, &mut *rng), C::random_nonce(secret_share, &mut *rng)]);
secret_share.zeroize();
let nonce = Nonce::<C>([
C::random_nonce(secret_share, &mut *rng),
C::random_nonce(secret_share, &mut *rng),
]);
let mut commitments = Vec::with_capacity(generators.len());
for generator in generators {
commitments.push(GeneratorCommitments([*generator * nonce.0[0], *generator * nonce.0[1]]));
commitments.push(GeneratorCommitments([
*generator * nonce.0[0].deref(),
*generator * nonce.0[1].deref(),
]));
}
let mut dleqs = None;
@ -91,7 +90,7 @@ impl<C: Curve> NonceCommitments<C> {
// TODO: At least include a challenge from the existing transcript
DLEqProof::prove(&mut *rng, &mut dleq_transcript::<T>(), generators, nonce)
};
dleqs = Some([dleq(nonce.0[0]), dleq(nonce.0[1])]);
dleqs = Some([dleq(&nonce.0[0]), dleq(&nonce.0[1])]);
}
(nonce, NonceCommitments { generators: commitments, dleqs })
@ -145,7 +144,7 @@ pub(crate) struct Commitments<C: Curve> {
impl<C: Curve> Commitments<C> {
pub(crate) fn new<R: RngCore + CryptoRng, T: Transcript>(
rng: &mut R,
secret_share: C::F,
secret_share: &Zeroizing<C::F>,
planned_nonces: &[Vec<C::G>],
) -> (Vec<Nonce<C>>, Commitments<C>) {
let mut nonces = vec![];

View file

@ -1,4 +1,4 @@
use core::fmt;
use core::{ops::Deref, fmt::Debug};
use std::{
io::{self, Read, Write},
collections::HashMap,
@ -6,7 +6,7 @@ use std::{
use rand_core::{RngCore, CryptoRng};
use zeroize::{Zeroize, ZeroizeOnDrop};
use zeroize::Zeroize;
use transcript::Transcript;
@ -49,12 +49,6 @@ pub struct Params<C: Curve, A: Algorithm<C>> {
keys: ThresholdKeys<C>,
view: ThresholdView<C>,
}
impl<C: Curve, A: Algorithm<C>> Drop for Params<C, A> {
fn drop(&mut self) {
self.zeroize()
}
}
impl<C: Curve, A: Algorithm<C>> ZeroizeOnDrop for Params<C, A> {}
impl<C: Curve, A: Algorithm<C>> Params<C, A> {
pub fn new(
@ -122,7 +116,7 @@ pub trait PreprocessMachine {
/// Preprocess message for this machine.
type Preprocess: Clone + PartialEq + Writable;
/// Signature produced by this machine.
type Signature: Clone + PartialEq + fmt::Debug;
type Signature: Clone + PartialEq + Debug;
/// SignMachine this PreprocessMachine turns into.
type SignMachine: SignMachine<Self::Signature, Preprocess = Self::Preprocess>;
@ -213,22 +207,13 @@ pub trait SignMachine<S> {
}
/// Next step of the state machine for the signing process.
#[derive(Zeroize)]
pub struct AlgorithmSignMachine<C: Curve, A: Algorithm<C>> {
params: Params<C, A>,
pub(crate) nonces: Vec<Nonce<C>>,
#[zeroize(skip)]
pub(crate) preprocess: Preprocess<C, A::Addendum>,
}
impl<C: Curve, A: Algorithm<C>> Zeroize for AlgorithmSignMachine<C, A> {
fn zeroize(&mut self) {
self.nonces.zeroize()
}
}
impl<C: Curve, A: Algorithm<C>> Drop for AlgorithmSignMachine<C, A> {
fn drop(&mut self) {
self.zeroize()
}
}
impl<C: Curve, A: Algorithm<C>> ZeroizeOnDrop for AlgorithmSignMachine<C, A> {}
impl<C: Curve, A: Algorithm<C>> SignMachine<A::Signature> for AlgorithmSignMachine<C, A> {
type Preprocess = Preprocess<C, A::Addendum>;
@ -336,16 +321,19 @@ impl<C: Curve, A: Algorithm<C>> SignMachine<A::Signature> for AlgorithmSignMachi
let Rs = B.nonces(&nonces);
let our_binding_factors = B.binding_factors(multisig_params.i());
let mut nonces = self
let nonces = self
.nonces
.iter()
.drain(..)
.enumerate()
.map(|(n, nonces)| nonces.0[0] + (nonces.0[1] * our_binding_factors[n]))
.map(|(n, nonces)| {
let [base, mut actual] = nonces.0;
*actual *= our_binding_factors[n];
*actual += base.deref();
actual
})
.collect::<Vec<_>>();
self.nonces.zeroize();
let share = self.params.algorithm.sign_share(&self.params.view, &Rs, &nonces, msg);
nonces.zeroize();
let share = self.params.algorithm.sign_share(&self.params.view, &Rs, nonces, msg);
Ok((
AlgorithmSignatureMachine { params: self.params.clone(), B, Rs, share },

View file

@ -1,7 +1,10 @@
use core::ops::Deref;
use std::collections::HashMap;
#[cfg(test)]
use std::str::FromStr;
use zeroize::Zeroizing;
use rand_core::{RngCore, CryptoRng};
use group::{ff::PrimeField, GroupEncoding};
@ -103,7 +106,7 @@ fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, Thresho
assert_eq!(these_keys.params().t(), vectors.threshold);
assert_eq!(usize::from(these_keys.params().n()), shares.len());
assert_eq!(these_keys.params().i(), i);
assert_eq!(these_keys.secret_share(), shares[usize::from(i - 1)]);
assert_eq!(these_keys.secret_share().deref(), &shares[usize::from(i - 1)]);
assert_eq!(hex::encode(these_keys.group_key().to_bytes().as_ref()), vectors.group_key);
keys.insert(i, ThresholdKeys::new(these_keys));
}
@ -148,12 +151,15 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
let mut machines = machines
.drain(..)
.map(|(i, machine)| {
let nonces = [
C::read_F::<&[u8]>(&mut hex::decode(&vectors.nonces[c][0]).unwrap().as_ref()).unwrap(),
C::read_F::<&[u8]>(&mut hex::decode(&vectors.nonces[c][1]).unwrap().as_ref()).unwrap(),
];
let nonce = |i| {
Zeroizing::new(
C::read_F::<&[u8]>(&mut hex::decode(&vectors.nonces[c][i]).unwrap().as_ref()).unwrap(),
)
};
let nonces = [nonce(0), nonce(1)];
c += 1;
let these_commitments = [C::generator() * nonces[0], C::generator() * nonces[1]];
let these_commitments =
[C::generator() * nonces[0].deref(), C::generator() * nonces[1].deref()];
let machine = machine.unsafe_override_preprocess(
vec![Nonce(nonces)],
Preprocess {

View file

@ -1,6 +1,6 @@
[package]
name = "schnorr-signatures"
version = "0.1.0"
version = "0.2.0"
description = "Minimal Schnorr signatures crate hosting common code"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/schnorr"

View file

@ -1,8 +1,9 @@
use core::ops::Deref;
use std::io::{self, Read, Write};
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use zeroize::{Zeroize, Zeroizing};
use group::{
ff::{Field, PrimeField},
@ -46,11 +47,16 @@ impl<C: Ciphersuite> SchnorrSignature<C> {
}
/// Sign a Schnorr signature with the given nonce for the specified challenge.
pub fn sign(mut private_key: C::F, mut nonce: C::F, challenge: C::F) -> SchnorrSignature<C> {
let res = SchnorrSignature { R: C::generator() * nonce, s: nonce + (private_key * challenge) };
private_key.zeroize();
nonce.zeroize();
res
pub fn sign(
private_key: &Zeroizing<C::F>,
nonce: Zeroizing<C::F>,
challenge: C::F,
) -> SchnorrSignature<C> {
SchnorrSignature {
// Uses deref instead of * as * returns C::F yet deref returns &C::F, preventing a copy
R: C::generator() * nonce.deref(),
s: (challenge * private_key.deref()) + nonce.deref(),
}
}
/// Verify a Schnorr signature for the given key with the specified challenge.

View file

@ -1,5 +1,9 @@
use core::ops::Deref;
use rand_core::OsRng;
use zeroize::Zeroizing;
use blake2::{digest::typenum::U32, Blake2b};
type Blake2b256 = Blake2b<U32>;
@ -14,11 +18,11 @@ use crate::{
};
pub(crate) fn sign<C: Ciphersuite>() {
let private_key = C::random_nonzero_F(&mut OsRng);
let nonce = C::random_nonzero_F(&mut OsRng);
let private_key = Zeroizing::new(C::random_nonzero_F(&mut OsRng));
let nonce = Zeroizing::new(C::random_nonzero_F(&mut OsRng));
let challenge = C::random_nonzero_F(&mut OsRng); // Doesn't bother to craft an HRAm
assert!(SchnorrSignature::<C>::sign(private_key, nonce, challenge)
.verify(C::generator() * private_key, challenge));
assert!(SchnorrSignature::<C>::sign(&private_key, nonce, challenge)
.verify(C::generator() * private_key.deref(), challenge));
}
// The above sign function verifies signing works
@ -35,16 +39,20 @@ pub(crate) fn batch_verify<C: Ciphersuite>() {
let mut challenges = vec![];
let mut sigs = vec![];
for i in 0 .. 5 {
keys.push(C::random_nonzero_F(&mut OsRng));
keys.push(Zeroizing::new(C::random_nonzero_F(&mut OsRng)));
challenges.push(C::random_nonzero_F(&mut OsRng));
sigs.push(SchnorrSignature::<C>::sign(keys[i], C::random_nonzero_F(&mut OsRng), challenges[i]));
sigs.push(SchnorrSignature::<C>::sign(
&keys[i],
Zeroizing::new(C::random_nonzero_F(&mut OsRng)),
challenges[i],
));
}
// Batch verify
{
let mut batch = BatchVerifier::new(5);
for (i, sig) in sigs.iter().enumerate() {
sig.batch_verify(&mut OsRng, &mut batch, i, C::generator() * keys[i], challenges[i]);
sig.batch_verify(&mut OsRng, &mut batch, i, C::generator() * keys[i].deref(), challenges[i]);
}
batch.verify_with_vartime_blame().unwrap();
}
@ -60,7 +68,7 @@ pub(crate) fn batch_verify<C: Ciphersuite>() {
if i == 2 {
sig.s -= C::F::one();
}
sig.batch_verify(&mut OsRng, &mut batch, i, C::generator() * keys[i], challenges[i]);
sig.batch_verify(&mut OsRng, &mut batch, i, C::generator() * keys[i].deref(), challenges[i]);
}
if let Err(blame) = batch.verify_with_vartime_blame() {
assert!((blame == 1) || (blame == 2));
@ -76,12 +84,16 @@ pub(crate) fn aggregate<C: Ciphersuite>() {
let mut challenges = vec![];
let mut aggregator = SchnorrAggregator::<Blake2b256, C>::new();
for i in 0 .. 5 {
keys.push(C::random_nonzero_F(&mut OsRng));
keys.push(Zeroizing::new(C::random_nonzero_F(&mut OsRng)));
challenges.push(C::random_nonzero_F(&mut OsRng));
aggregator.aggregate(
C::generator() * keys[i],
C::generator() * keys[i].deref(),
challenges[i],
SchnorrSignature::<C>::sign(keys[i], C::random_nonzero_F(&mut OsRng), challenges[i]),
SchnorrSignature::<C>::sign(
&keys[i],
Zeroizing::new(C::random_nonzero_F(&mut OsRng)),
challenges[i],
),
);
}
@ -91,7 +103,7 @@ pub(crate) fn aggregate<C: Ciphersuite>() {
assert!(aggregate.verify::<Blake2b256>(
keys
.iter()
.map(|key| C::generator() * key)
.map(|key| C::generator() * key.deref())
.zip(challenges.iter().cloned())
.collect::<Vec<_>>()
.as_ref()

View file

@ -15,8 +15,9 @@ rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
async-trait = "0.1"
rand_core = "0.6"
zeroize = "1.5"
thiserror = "1"
rand_core = "0.6"
group = "0.12"

View file

@ -236,6 +236,7 @@ impl Coin for Monero {
#[cfg(test)]
async fn test_send(&self, address: Self::Address) {
use zeroize::Zeroizing;
use rand_core::OsRng;
let new_block = self.get_latest_block_number().await.unwrap() + 1;
@ -263,7 +264,7 @@ impl Coin for Monero {
self.rpc.get_fee().await.unwrap(),
)
.unwrap()
.sign(&mut OsRng, &self.rpc, &Scalar::one())
.sign(&mut OsRng, &self.rpc, &Zeroizing::new(Scalar::one()))
.await
.unwrap();
self.rpc.publish_transaction(&tx).await.unwrap();