Finish updating crypto to new clippy

This commit is contained in:
Luke Parker 2023-07-07 23:08:14 -04:00
parent 3a626cc51e
commit dd5fb0df47
No known key found for this signature in database
19 changed files with 230 additions and 209 deletions

View file

@ -5,18 +5,22 @@
-D clippy::nursery
-D clippy::pedantic
# Not worth the effort
-A clippy::implicit_hasher
# Stylistic preferrence
-A clippy::option-if-let-else
-A clippy::option_if_let_else
# Too many false/irrelevant positives
-A clippy::redundant-pub-crate
-A clippy::redundant_pub_crate
-A clippy::similar_names
# Frequently used
-A clippy::wildcard-imports
-A clippy::wildcard_imports
-A clippy::too_many_lines
# Used to avoid doing &* on copy-able items, with the * being the concern
-A clippy::explicit-deref-methods
-A clippy::explicit_deref_methods
# Lints from clippy::restrictions
@ -39,7 +43,6 @@
-D clippy::get_unwrap
-D clippy::if_then_some_else_none
-D clippy::rest_pat_in_fully_bound_structs
-D clippy::self_named_module_files
-D clippy::semicolon_inside_block
-D clippy::tests_outside_test_module
@ -48,18 +51,18 @@
-D clippy::string_to_string
# Flagged on tests being named test_
-A clippy::module-name-repetitions
-A clippy::module_name_repetitions
# Flagged on items passed by value which implemented Copy
-A clippy::needless-pass-by-value
-A clippy::needless_pass_by_value
# Flagged on embedded functions defined when needed/relevant
-A clippy::items_after_statements
# These potentially should be enabled in the future
-A clippy::missing-errors-doc
-A clippy::missing-panics-doc
-A clippy::doc-markdown
-A clippy::missing_errors_doc
-A clippy::missing_panics_doc
-A clippy::doc_markdown
# TODO: Enable this
# -D clippy::cargo

View file

@ -1,4 +1,3 @@
#![allow(clippy::self_named_module_files)] // False positive?
#![allow(clippy::tests_outside_test_module)]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("lib.md")]

View file

@ -1,8 +1,6 @@
use core::{ops::Deref, fmt};
use std::{io, collections::HashMap};
use thiserror::Error;
use zeroize::{Zeroize, Zeroizing};
use rand_core::{RngCore, CryptoRng};
@ -70,7 +68,7 @@ impl<C: Ciphersuite, M: Message> EncryptionKeyMessage<C, M> {
}
#[cfg(any(test, feature = "tests"))]
pub(crate) fn enc_key(&self) -> C::G {
pub(crate) const fn enc_key(&self) -> C::G {
self.enc_key
}
}
@ -110,6 +108,7 @@ fn cipher<C: Ciphersuite>(context: &str, ecdh: &Zeroizing<C::G>) -> ChaCha20 {
transcript.append_message(b"shared_key", ecdh.as_ref());
ecdh.as_mut().zeroize();
#[allow(clippy::redundant_closure_for_method_calls)] // Not redundant due to typing
let zeroize = |buf: &mut [u8]| buf.zeroize();
let mut key = Cc20Key::default();
@ -329,13 +328,19 @@ fn encryption_key_transcript(context: &str) -> RecommendedTranscript {
transcript
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
pub(crate) enum DecryptionError {
#[error("accused provided an invalid signature")]
InvalidSignature,
#[error("accuser provided an invalid decryption key")]
InvalidProof,
#[allow(clippy::std_instead_of_core)]
mod decryption_error {
use thiserror::Error;
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
pub(crate) enum DecryptionError {
#[error("accused provided an invalid signature")]
InvalidSignature,
#[error("accuser provided an invalid decryption key")]
InvalidProof,
}
}
pub(crate) use decryption_error::DecryptionError;
// A simple box for managing encryption.
#[derive(Clone)]
@ -381,7 +386,7 @@ impl<C: Ciphersuite> Encryption<C> {
}
}
pub(crate) fn registration<M: Message>(&self, msg: M) -> EncryptionKeyMessage<C, M> {
pub(crate) const fn registration<M: Message>(&self, msg: M) -> EncryptionKeyMessage<C, M> {
EncryptionKeyMessage { msg, enc_key: self.enc_pub_key }
}
@ -390,9 +395,10 @@ impl<C: Ciphersuite> Encryption<C> {
participant: Participant,
msg: EncryptionKeyMessage<C, M>,
) -> M {
if self.enc_keys.contains_key(&participant) {
panic!("Re-registering encryption key for a participant");
}
assert!(
!self.enc_keys.contains_key(&participant),
"Re-registering encryption key for a participant"
);
self.enc_keys.insert(participant, msg.enc_key);
msg.msg
}

View file

@ -73,7 +73,7 @@ impl<C: Ciphersuite> ReadWrite for Commitments<C> {
commitments.push(read_G()?);
}
Ok(Commitments { commitments, cached_msg, sig: SchnorrSignature::read(reader)? })
Ok(Self { commitments, cached_msg, sig: SchnorrSignature::read(reader)? })
}
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
@ -87,14 +87,16 @@ impl<C: Ciphersuite> ReadWrite for Commitments<C> {
pub struct KeyGenMachine<C: Ciphersuite> {
params: ThresholdParams,
context: String,
_curve: PhantomData<C>,
curve: PhantomData<C>,
}
impl<C: Ciphersuite> KeyGenMachine<C> {
/// Create a new machine to generate a key.
// The context string should be unique among multisigs.
pub fn new(params: ThresholdParams, context: String) -> KeyGenMachine<C> {
KeyGenMachine { params, context, _curve: PhantomData }
///
/// The context string should be unique among multisigs.
#[must_use]
pub const fn new(params: ThresholdParams, context: String) -> Self {
Self { params, context, curve: PhantomData }
}
/// Start generating a key according to the FROST DKG spec.
@ -171,7 +173,6 @@ fn polynomial<F: PrimeField + Zeroize>(
/// channel.
///
/// If any participant sends multiple secret shares to another participant, they are faulty.
// This should presumably be written as SecretShare(Zeroizing<F::Repr>).
// It's unfortunately not possible as F::Repr doesn't have Zeroize as a bound.
// The encryption system also explicitly uses Zeroizing<M> so it can ensure anything being
@ -195,7 +196,7 @@ impl<F: PrimeField> fmt::Debug for SecretShare<F> {
}
impl<F: PrimeField> Zeroize for SecretShare<F> {
fn zeroize(&mut self) {
self.0.as_mut().zeroize()
self.0.as_mut().zeroize();
}
}
// Still manually implement ZeroizeOnDrop to ensure these don't stick around.
@ -213,7 +214,7 @@ impl<F: PrimeField> ReadWrite for SecretShare<F> {
fn read<R: Read>(reader: &mut R, _: ThresholdParams) -> io::Result<Self> {
let mut repr = F::Repr::default();
reader.read_exact(repr.as_mut())?;
Ok(SecretShare(repr))
Ok(Self(repr))
}
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
@ -353,7 +354,7 @@ impl<C: Ciphersuite> Zeroize for KeyMachine<C> {
fn zeroize(&mut self) {
self.params.zeroize();
self.secret.zeroize();
for (_, commitments) in self.commitments.iter_mut() {
for commitments in self.commitments.values_mut() {
commitments.zeroize();
}
self.encryption.zeroize();
@ -466,7 +467,7 @@ impl<C: Ciphersuite> KeyMachine<C> {
);
}
let KeyMachine { commitments, encryption, params, secret } = self;
let Self { commitments, encryption, params, secret } = self;
Ok(BlameMachine {
commitments,
encryption,
@ -499,7 +500,7 @@ impl<C: Ciphersuite> fmt::Debug for BlameMachine<C> {
impl<C: Ciphersuite> Zeroize for BlameMachine<C> {
fn zeroize(&mut self) {
for (_, commitments) in self.commitments.iter_mut() {
for commitments in self.commitments.values_mut() {
commitments.zeroize();
}
self.encryption.zeroize();
@ -517,6 +518,7 @@ impl<C: Ciphersuite> BlameMachine<C> {
/// territory of consensus protocols. This library does not handle that nor does it provide any
/// tooling to do so. This function is solely intended to force users to acknowledge they're
/// completing the protocol, not processing any blame.
#[allow(clippy::missing_const_for_fn)] // False positive
pub fn complete(self) -> ThresholdCore<C> {
self.result
}
@ -536,10 +538,9 @@ impl<C: Ciphersuite> BlameMachine<C> {
Err(DecryptionError::InvalidProof) => return recipient,
};
let share = match Option::<C::F>::from(C::F::from_repr(share_bytes.0)) {
Some(share) => share,
let Some(share) = Option::<C::F>::from(C::F::from_repr(share_bytes.0)) else {
// If this isn't a valid scalar, the sender is faulty
None => return sender,
return sender;
};
// If this isn't a valid share, the sender is faulty

View file

@ -3,9 +3,7 @@
#![cfg_attr(not(feature = "std"), no_std)]
use core::fmt::{self, Debug};
#[cfg(feature = "std")]
use thiserror::Error;
extern crate alloc;
use zeroize::Zeroize;
@ -35,23 +33,25 @@ pub mod tests;
pub struct Participant(pub(crate) u16);
impl Participant {
/// Create a new Participant identifier from a u16.
pub fn new(i: u16) -> Option<Participant> {
#[must_use]
pub const fn new(i: u16) -> Option<Self> {
if i == 0 {
None
} else {
Some(Participant(i))
Some(Self(i))
}
}
/// Convert a Participant identifier to bytes.
#[allow(clippy::wrong_self_convention)]
pub fn to_bytes(&self) -> [u8; 2] {
#[must_use]
pub const fn to_bytes(&self) -> [u8; 2] {
self.0.to_le_bytes()
}
}
impl From<Participant> for u16 {
fn from(participant: Participant) -> u16 {
fn from(participant: Participant) -> Self {
participant.0
}
}
@ -63,49 +63,58 @@ impl fmt::Display for Participant {
}
/// Various errors possible during key generation.
#[derive(Clone, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(Error))]
pub enum DkgError<B: Clone + PartialEq + Eq + Debug> {
/// A parameter was zero.
#[cfg_attr(feature = "std", error("a parameter was 0 (threshold {0}, participants {1})"))]
ZeroParameter(u16, u16),
/// The threshold exceeded the amount of participants.
#[cfg_attr(feature = "std", error("invalid threshold (max {1}, got {0})"))]
InvalidThreshold(u16, u16),
/// Invalid participant identifier.
#[cfg_attr(
feature = "std",
error("invalid participant (0 < participant <= {0}, yet participant is {1})")
)]
InvalidParticipant(u16, Participant),
#[allow(clippy::std_instead_of_core)]
mod dkg_error {
use core::fmt::Debug;
use thiserror::Error;
use super::Participant;
/// Invalid signing set.
#[cfg_attr(feature = "std", error("invalid signing set"))]
InvalidSigningSet,
/// Invalid amount of participants.
#[cfg_attr(feature = "std", error("invalid participant quantity (expected {0}, got {1})"))]
InvalidParticipantQuantity(usize, usize),
/// A participant was duplicated.
#[cfg_attr(feature = "std", error("duplicated participant ({0})"))]
DuplicatedParticipant(Participant),
/// A participant was missing.
#[cfg_attr(feature = "std", error("missing participant {0}"))]
MissingParticipant(Participant),
#[derive(Clone, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(Error))]
pub enum DkgError<B: Clone + PartialEq + Eq + Debug> {
/// A parameter was zero.
#[cfg_attr(feature = "std", error("a parameter was 0 (threshold {0}, participants {1})"))]
ZeroParameter(u16, u16),
/// The threshold exceeded the amount of participants.
#[cfg_attr(feature = "std", error("invalid threshold (max {1}, got {0})"))]
InvalidThreshold(u16, u16),
/// Invalid participant identifier.
#[cfg_attr(
feature = "std",
error("invalid participant (0 < participant <= {0}, yet participant is {1})")
)]
InvalidParticipant(u16, Participant),
/// An invalid proof of knowledge was provided.
#[cfg_attr(feature = "std", error("invalid proof of knowledge (participant {0})"))]
InvalidProofOfKnowledge(Participant),
/// An invalid DKG share was provided.
#[cfg_attr(feature = "std", error("invalid share (participant {participant}, blame {blame})"))]
InvalidShare { participant: Participant, blame: Option<B> },
/// Invalid signing set.
#[cfg_attr(feature = "std", error("invalid signing set"))]
InvalidSigningSet,
/// Invalid amount of participants.
#[cfg_attr(feature = "std", error("invalid participant quantity (expected {0}, got {1})"))]
InvalidParticipantQuantity(usize, usize),
/// A participant was duplicated.
#[cfg_attr(feature = "std", error("duplicated participant ({0})"))]
DuplicatedParticipant(Participant),
/// A participant was missing.
#[cfg_attr(feature = "std", error("missing participant {0}"))]
MissingParticipant(Participant),
/// An invalid proof of knowledge was provided.
#[cfg_attr(feature = "std", error("invalid proof of knowledge (participant {0})"))]
InvalidProofOfKnowledge(Participant),
/// An invalid DKG share was provided.
#[cfg_attr(feature = "std", error("invalid share (participant {participant}, blame {blame})"))]
InvalidShare { participant: Participant, blame: Option<B> },
}
}
pub use dkg_error::DkgError;
#[cfg(feature = "std")]
mod lib {
pub use super::*;
use core::ops::Deref;
use std::{io, sync::Arc, collections::HashMap};
use alloc::sync::Arc;
use std::{io, collections::HashMap};
use zeroize::Zeroizing;
@ -158,7 +167,7 @@ mod lib {
impl ThresholdParams {
/// Create a new set of parameters.
pub fn new(t: u16, n: u16, i: Participant) -> Result<ThresholdParams, DkgError<()>> {
pub fn new(t: u16, n: u16, i: Participant) -> Result<Self, DkgError<()>> {
if (t == 0) || (n == 0) {
Err(DkgError::ZeroParameter(t, n))?;
}
@ -170,24 +179,28 @@ mod lib {
Err(DkgError::InvalidParticipant(n, i))?;
}
Ok(ThresholdParams { t, n, i })
Ok(Self { t, n, i })
}
/// Return the threshold for a multisig with these parameters.
pub fn t(&self) -> u16 {
#[must_use]
pub const fn t(&self) -> u16 {
self.t
}
/// Return the amount of participants for a multisig with these parameters.
pub fn n(&self) -> u16 {
#[must_use]
pub const fn n(&self) -> u16 {
self.n
}
/// Return the participant index of the share with these parameters.
pub fn i(&self) -> Participant {
#[must_use]
pub const fn i(&self) -> Participant {
self.i
}
}
/// Calculate the lagrange coefficient for a signing set.
#[must_use]
pub fn lagrange<F: PrimeField>(i: Participant, included: &[Participant]) -> F {
let i_f = F::from(u64::from(u16::from(i)));
@ -239,20 +252,21 @@ mod lib {
self.params.zeroize();
self.secret_share.zeroize();
self.group_key.zeroize();
for (_, share) in self.verification_shares.iter_mut() {
for share in self.verification_shares.values_mut() {
share.zeroize();
}
}
}
impl<C: Ciphersuite> ThresholdCore<C> {
#[must_use]
pub(crate) fn new(
params: ThresholdParams,
secret_share: Zeroizing<C::F>,
verification_shares: HashMap<Participant, C::G>,
) -> ThresholdCore<C> {
) -> Self {
let t = (1 ..= params.t()).map(Participant).collect::<Vec<_>>();
ThresholdCore {
Self {
params,
secret_share,
group_key: t.iter().map(|i| verification_shares[i] * lagrange::<C::F>(*i, &t)).sum(),
@ -261,17 +275,17 @@ mod lib {
}
/// Parameters for these keys.
pub fn params(&self) -> ThresholdParams {
pub const fn params(&self) -> ThresholdParams {
self.params
}
/// Secret share for these keys.
pub fn secret_share(&self) -> &Zeroizing<C::F> {
pub const fn secret_share(&self) -> &Zeroizing<C::F> {
&self.secret_share
}
/// Group key for these keys.
pub fn group_key(&self) -> C::G {
pub const fn group_key(&self) -> C::G {
self.group_key
}
@ -304,7 +318,7 @@ mod lib {
}
/// Read keys from a type satisfying std::io::Read.
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<ThresholdCore<C>> {
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
{
let different =
|| io::Error::new(io::ErrorKind::Other, "deserializing ThresholdCore for another curve");
@ -332,7 +346,7 @@ mod lib {
read_u16()?,
read_u16()?,
Participant::new(read_u16()?)
.ok_or(io::Error::new(io::ErrorKind::Other, "invalid participant index"))?,
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid participant index"))?,
)
};
@ -343,7 +357,7 @@ mod lib {
verification_shares.insert(l, <C as Ciphersuite>::read_G(reader)?);
}
Ok(ThresholdCore::new(
Ok(Self::new(
ThresholdParams::new(t, n, i)
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid parameters"))?,
secret_share,
@ -395,10 +409,10 @@ mod lib {
self.group_key.zeroize();
self.included.zeroize();
self.secret_share.zeroize();
for (_, share) in self.original_verification_shares.iter_mut() {
for share in self.original_verification_shares.values_mut() {
share.zeroize();
}
for (_, share) in self.verification_shares.iter_mut() {
for share in self.verification_shares.values_mut() {
share.zeroize();
}
}
@ -406,8 +420,9 @@ mod lib {
impl<C: Ciphersuite> ThresholdKeys<C> {
/// Create a new set of ThresholdKeys from a ThresholdCore.
pub fn new(core: ThresholdCore<C>) -> ThresholdKeys<C> {
ThresholdKeys { core: Arc::new(core), offset: None }
#[must_use]
pub fn new(core: ThresholdCore<C>) -> Self {
Self { core: Arc::new(core), offset: None }
}
/// Offset the keys by a given scalar to allow for various account and privacy schemes.
@ -415,7 +430,7 @@ mod lib {
/// This offset is ephemeral and will not be included when these keys are serialized. It also
/// accumulates, so calling offset multiple times will produce a offset of the offsets' sum.
#[must_use]
pub fn offset(&self, offset: C::F) -> ThresholdKeys<C> {
pub fn offset(&self, offset: C::F) -> Self {
let mut res = self.clone();
// Carry any existing offset
// Enables schemes like Monero's subaddresses which have a per-subaddress offset and then a
@ -425,7 +440,7 @@ mod lib {
}
/// Return the current offset in-use for these keys.
pub fn current_offset(&self) -> Option<C::F> {
pub const fn current_offset(&self) -> Option<C::F> {
self.offset
}
@ -469,7 +484,7 @@ mod lib {
);
let mut verification_shares = self.verification_shares();
for (i, share) in verification_shares.iter_mut() {
for (i, share) in &mut verification_shares {
*share *= lagrange::<C::F>(*i, &included);
}
@ -492,19 +507,19 @@ mod lib {
}
impl<C: Ciphersuite> From<ThresholdCore<C>> for ThresholdKeys<C> {
fn from(keys: ThresholdCore<C>) -> ThresholdKeys<C> {
ThresholdKeys::new(keys)
fn from(keys: ThresholdCore<C>) -> Self {
Self::new(keys)
}
}
impl<C: Ciphersuite> ThresholdView<C> {
/// Return the offset for this view.
pub fn offset(&self) -> C::F {
pub const fn offset(&self) -> C::F {
self.offset
}
/// Return the group key.
pub fn group_key(&self) -> C::G {
pub const fn group_key(&self) -> C::G {
self.group_key
}
@ -514,7 +529,7 @@ mod lib {
}
/// Return the interpolated, offset secret share.
pub fn secret_share(&self) -> &Zeroizing<C::F> {
pub const fn secret_share(&self) -> &Zeroizing<C::F> {
&self.secret_share
}

View file

@ -1,7 +1,7 @@
use core::{marker::PhantomData, ops::Deref};
use alloc::sync::Arc;
use std::{
io::{self, Read, Write},
sync::Arc,
collections::HashMap,
};
@ -45,11 +45,8 @@ impl<C: Ciphersuite> GeneratorProof<C> {
self.proof.write(writer)
}
pub fn read<R: Read>(reader: &mut R) -> io::Result<GeneratorProof<C>> {
Ok(GeneratorProof {
share: <C as Ciphersuite>::read_G(reader)?,
proof: DLEqProof::read(reader)?,
})
pub fn read<R: Read>(reader: &mut R) -> io::Result<Self> {
Ok(Self { share: <C as Ciphersuite>::read_G(reader)?, proof: DLEqProof::read(reader)? })
}
pub fn serialize(&self) -> Vec<u8> {
@ -70,16 +67,13 @@ pub struct GeneratorPromotion<C1: Ciphersuite, C2: Ciphersuite> {
_c2: PhantomData<C2>,
}
impl<C1: Ciphersuite, C2: Ciphersuite> GeneratorPromotion<C1, C2>
where
C2: Ciphersuite<F = C1::F, G = C1::G>,
{
impl<C1: Ciphersuite, C2: Ciphersuite<F = C1::F, G = C1::G>> GeneratorPromotion<C1, C2> {
/// Begin promoting keys from one generator to another. Returns a proof this share was properly
/// promoted.
pub fn promote<R: RngCore + CryptoRng>(
rng: &mut R,
base: ThresholdKeys<C1>,
) -> (GeneratorPromotion<C1, C2>, GeneratorProof<C1>) {
) -> (Self, GeneratorProof<C1>) {
// Do a DLEqProof for the new generator
let proof = GeneratorProof {
share: C2::generator() * base.secret_share().deref(),
@ -91,7 +85,7 @@ where
),
};
(GeneratorPromotion { base, proof, _c2: PhantomData::<C2> }, proof)
(Self { base, proof, _c2: PhantomData::<C2> }, proof)
}
/// Complete promotion by taking in the proofs from all other participants.

View file

@ -25,7 +25,7 @@ pub const PARTICIPANTS: u16 = 5;
pub const THRESHOLD: u16 = ((PARTICIPANTS * 2) / 3) + 1;
/// Clone a map without a specific value.
pub fn clone_without<K: Clone + std::cmp::Eq + std::hash::Hash, V: Clone>(
pub fn clone_without<K: Clone + core::cmp::Eq + core::hash::Hash, V: Clone>(
map: &HashMap<K, V>,
without: &K,
) -> HashMap<K, V> {
@ -40,7 +40,7 @@ pub fn clone_without<K: Clone + std::cmp::Eq + std::hash::Hash, V: Clone>(
pub fn recover_key<C: Ciphersuite>(keys: &HashMap<Participant, ThresholdKeys<C>>) -> C::F {
let first = keys.values().next().expect("no keys provided");
assert!(keys.len() >= first.params().t().into(), "not enough keys provided");
let included = keys.keys().cloned().collect::<Vec<_>>();
let included = keys.keys().copied().collect::<Vec<_>>();
let group_private = keys.iter().fold(C::F::ZERO, |accum, (i, keys)| {
accum + (lagrange::<C::F>(*i, &included) * keys.secret_share().deref())
@ -95,6 +95,7 @@ pub fn test_ciphersuite<R: RngCore + CryptoRng, C: Ciphersuite>(rng: &mut R) {
test_generator_promotion::<_, C>(rng);
}
#[allow(clippy::tests_outside_test_module)]
#[test]
fn test_with_ristretto() {
test_ciphersuite::<_, ciphersuite::Ristretto>(&mut rand_core::OsRng);

View file

@ -24,9 +24,9 @@ pub fn test_musig<R: RngCore + CryptoRng, C: Ciphersuite>(rng: &mut R) {
const CONTEXT: &[u8] = b"MuSig Test";
// Empty signing set
assert!(musig::<C>(CONTEXT, &Zeroizing::new(C::F::ZERO), &[]).is_err());
musig::<C>(CONTEXT, &Zeroizing::new(C::F::ZERO), &[]).unwrap_err();
// Signing set we're not part of
assert!(musig::<C>(CONTEXT, &Zeroizing::new(C::F::ZERO), &[C::generator()]).is_err());
musig::<C>(CONTEXT, &Zeroizing::new(C::F::ZERO), &[C::generator()]).unwrap_err();
// Test with n keys
{
@ -55,7 +55,8 @@ pub fn test_musig<R: RngCore + CryptoRng, C: Ciphersuite>(rng: &mut R) {
}
}
#[allow(clippy::tests_outside_test_module)]
#[test]
fn musig_literal() {
test_musig::<_, ciphersuite::Ristretto>(&mut rand_core::OsRng)
test_musig::<_, ciphersuite::Ristretto>(&mut rand_core::OsRng);
}

View file

@ -14,7 +14,7 @@ use crate::{
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
struct AltGenerator<C: Ciphersuite> {
_curve: PhantomData<C>,
curve: PhantomData<C>,
}
impl<C: Ciphersuite> Ciphersuite for AltGenerator<C> {

View file

@ -97,8 +97,8 @@ mod sealed {
impl Transcript for IetfTranscript {
type Challenge = Vec<u8>;
fn new(_: &'static [u8]) -> IetfTranscript {
IetfTranscript(vec![])
fn new(_: &'static [u8]) -> Self {
Self(vec![])
}
fn domain_separate(&mut self, _: &[u8]) {}
@ -147,8 +147,9 @@ pub type IetfSchnorr<C, H> = Schnorr<C, IetfTranscript, H>;
impl<C: Curve, T: Sync + Clone + Debug + Transcript, H: Hram<C>> Schnorr<C, T, H> {
/// Construct a Schnorr algorithm continuing the specified transcript.
pub fn new(transcript: T) -> Schnorr<C, T, H> {
Schnorr { transcript, c: None, _hram: PhantomData }
#[must_use]
pub const fn new(transcript: T) -> Self {
Self { transcript, c: None, _hram: PhantomData }
}
}
@ -156,8 +157,9 @@ impl<C: Curve, H: Hram<C>> IetfSchnorr<C, H> {
/// Construct a IETF-compatible Schnorr algorithm.
///
/// Please see the `IetfSchnorr` documentation for the full details of this.
pub fn ietf() -> IetfSchnorr<C, H> {
Schnorr::new(IetfTranscript(vec![]))
#[must_use]
pub const fn ietf() -> Self {
Self::new(IetfTranscript(vec![]))
}
}

View file

@ -46,6 +46,7 @@ pub trait Curve: Ciphersuite {
const CONTEXT: &'static [u8];
/// Hash the given dst and data to a byte vector. Used to instantiate H4 and H5.
#[must_use]
fn hash(dst: &[u8], data: &[u8]) -> Output<Self::H> {
Self::H::digest([Self::CONTEXT, dst, data].concat())
}
@ -53,26 +54,31 @@ pub trait Curve: Ciphersuite {
/// Field element from hash. Used during key gen and by other crates under Serai as a general
/// utility. Used to instantiate H1 and H3.
#[allow(non_snake_case)]
#[must_use]
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F {
<Self as Ciphersuite>::hash_to_F(&[Self::CONTEXT, dst].concat(), msg)
}
/// Hash the message for the binding factor. H4 from the IETF draft.
#[must_use]
fn hash_msg(msg: &[u8]) -> Output<Self::H> {
Self::hash(b"msg", msg)
}
/// Hash the commitments for the binding factor. H5 from the IETF draft.
#[must_use]
fn hash_commitments(commitments: &[u8]) -> Output<Self::H> {
Self::hash(b"com", commitments)
}
/// Hash the commitments and message to calculate the binding factor. H1 from the IETF draft.
#[must_use]
fn hash_binding_factor(binding: &[u8]) -> Self::F {
<Self as Curve>::hash_to_F(b"rho", binding)
}
/// Securely generate a random nonce. H3 from the IETF draft.
#[must_use]
fn random_nonce<R: RngCore + CryptoRng>(
secret: &Zeroizing<Self::F>,
rng: &mut R,

View file

@ -1,11 +1,8 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
use core::fmt::Debug;
use std::collections::HashMap;
use thiserror::Error;
/// Distributed key generation protocol.
pub use dkg::{self, Participant, ThresholdParams, ThresholdCore, ThresholdKeys, ThresholdView};
@ -23,25 +20,32 @@ pub mod sign;
#[cfg(any(test, feature = "tests"))]
pub mod tests;
/// Various errors possible during signing.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
pub enum FrostError {
#[error("invalid participant (0 < participant <= {0}, yet participant is {1})")]
InvalidParticipant(u16, Participant),
#[error("invalid signing set ({0})")]
InvalidSigningSet(&'static str),
#[error("invalid participant quantity (expected {0}, got {1})")]
InvalidParticipantQuantity(usize, usize),
#[error("duplicated participant ({0})")]
DuplicatedParticipant(Participant),
#[error("missing participant {0}")]
MissingParticipant(Participant),
#[allow(clippy::std_instead_of_core)]
mod frost_error {
use core::fmt::Debug;
use thiserror::Error;
use dkg::Participant;
/// Various errors possible during signing.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
pub enum FrostError {
#[error("invalid participant (0 < participant <= {0}, yet participant is {1})")]
InvalidParticipant(u16, Participant),
#[error("invalid signing set ({0})")]
InvalidSigningSet(&'static str),
#[error("invalid participant quantity (expected {0}, got {1})")]
InvalidParticipantQuantity(usize, usize),
#[error("duplicated participant ({0})")]
DuplicatedParticipant(Participant),
#[error("missing participant {0}")]
MissingParticipant(Participant),
#[error("invalid preprocess (participant {0})")]
InvalidPreprocess(Participant),
#[error("invalid share (participant {0})")]
InvalidShare(Participant),
#[error("invalid preprocess (participant {0})")]
InvalidPreprocess(Participant),
#[error("invalid share (participant {0})")]
InvalidShare(Participant),
}
}
pub use frost_error::FrostError;
/// Validate a map of values to have the expected participants.
pub fn validate_map<T>(

View file

@ -59,8 +59,8 @@ pub(crate) struct Nonce<C: Curve>(pub(crate) [Zeroizing<C::F>; 2]);
#[derive(Copy, Clone, PartialEq, Eq)]
pub(crate) struct GeneratorCommitments<C: Curve>(pub(crate) [C::G; 2]);
impl<C: Curve> GeneratorCommitments<C> {
fn read<R: Read>(reader: &mut R) -> io::Result<GeneratorCommitments<C>> {
Ok(GeneratorCommitments([<C as Curve>::read_G(reader)?, <C as Curve>::read_G(reader)?]))
fn read<R: Read>(reader: &mut R) -> io::Result<Self> {
Ok(Self([<C as Curve>::read_G(reader)?, <C as Curve>::read_G(reader)?]))
}
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
@ -82,7 +82,7 @@ impl<C: Curve> NonceCommitments<C> {
rng: &mut R,
secret_share: &Zeroizing<C::F>,
generators: &[C::G],
) -> (Nonce<C>, NonceCommitments<C>) {
) -> (Nonce<C>, Self) {
let nonce = Nonce::<C>([
C::random_nonce(secret_share, &mut *rng),
C::random_nonce(secret_share, &mut *rng),
@ -96,11 +96,11 @@ impl<C: Curve> NonceCommitments<C> {
]));
}
(nonce, NonceCommitments { generators: commitments })
(nonce, Self { generators: commitments })
}
fn read<R: Read>(reader: &mut R, generators: &[C::G]) -> io::Result<NonceCommitments<C>> {
Ok(NonceCommitments {
fn read<R: Read>(reader: &mut R, generators: &[C::G]) -> io::Result<Self> {
Ok(Self {
generators: (0 .. generators.len())
.map(|_| GeneratorCommitments::read(reader))
.collect::<Result<_, _>>()?,
@ -146,7 +146,7 @@ impl<C: Curve> Commitments<C> {
secret_share: &Zeroizing<C::F>,
planned_nonces: &[Vec<C::G>],
context: &[u8],
) -> (Vec<Nonce<C>>, Commitments<C>) {
) -> (Vec<Nonce<C>>, Self) {
let mut nonces = vec![];
let mut commitments = vec![];
@ -168,18 +168,18 @@ impl<C: Curve> Commitments<C> {
commitments.push(these_commitments);
}
let dleq = if !dleq_generators.is_empty() {
let dleq = if dleq_generators.is_empty() {
None
} else {
Some(MultiDLEqProof::prove(
rng,
&mut dleq_transcript::<T>(context),
&dleq_generators,
&dleq_nonces,
))
} else {
None
};
(nonces, Commitments { nonces: commitments, dleq })
(nonces, Self { nonces: commitments, dleq })
}
pub(crate) fn transcript<T: Transcript>(&self, t: &mut T) {
@ -219,17 +219,17 @@ impl<C: Curve> Commitments<C> {
}
}
let dleq = if !dleq_generators.is_empty() {
let dleq = if dleq_generators.is_empty() {
None
} else {
let dleq = MultiDLEqProof::read(reader, dleq_generators.len())?;
dleq
.verify(&mut dleq_transcript::<T>(context), &dleq_generators, &dleq_nonces)
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid DLEq proof"))?;
Some(dleq)
} else {
None
};
Ok(Commitments { nonces, dleq })
Ok(Self { nonces, dleq })
}
pub(crate) fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
@ -256,7 +256,7 @@ impl<C: Curve> BindingFactor<C> {
}
pub(crate) fn calculate_binding_factors<T: Clone + Transcript>(&mut self, transcript: &mut T) {
for (l, binding) in self.0.iter_mut() {
for (l, binding) in &mut self.0 {
let mut transcript = transcript.clone();
transcript.append_message(b"participant", C::F::from(u64::from(u16::from(*l))).to_repr());
// It *should* be perfectly fine to reuse a binding factor for multiple nonces

View file

@ -53,8 +53,8 @@ struct Params<C: Curve, A: Algorithm<C>> {
}
impl<C: Curve, A: Algorithm<C>> Params<C, A> {
fn new(algorithm: A, keys: ThresholdKeys<C>) -> Params<C, A> {
Params { algorithm, keys }
const fn new(algorithm: A, keys: ThresholdKeys<C>) -> Self {
Self { algorithm, keys }
}
fn multisig_params(&self) -> ThresholdParams {
@ -111,8 +111,8 @@ pub struct AlgorithmMachine<C: Curve, A: Algorithm<C>> {
impl<C: Curve, A: Algorithm<C>> AlgorithmMachine<C, A> {
/// Creates a new machine to generate a signature with the specified keys.
pub fn new(algorithm: A, keys: ThresholdKeys<C>) -> AlgorithmMachine<C, A> {
AlgorithmMachine { params: Params::new(algorithm, keys) }
pub const fn new(algorithm: A, keys: ThresholdKeys<C>) -> Self {
Self { params: Params::new(algorithm, keys) }
}
fn seeded_preprocess(

View file

@ -27,7 +27,7 @@ pub const PARTICIPANTS: u16 = 5;
pub const THRESHOLD: u16 = ((PARTICIPANTS * 2) / 3) + 1;
/// Clone a map without a specific value.
pub fn clone_without<K: Clone + std::cmp::Eq + std::hash::Hash, V: Clone>(
pub fn clone_without<K: Clone + core::cmp::Eq + core::hash::Hash, V: Clone>(
map: &HashMap<K, V>,
without: &K,
) -> HashMap<K, V> {
@ -57,11 +57,7 @@ pub fn algorithm_machines<R: RngCore, C: Curve, A: Algorithm<C>>(
keys
.iter()
.filter_map(|(i, keys)| {
if included.contains(i) {
Some((*i, AlgorithmMachine::new(algorithm.clone(), keys.clone())))
} else {
None
}
included.contains(i).then(|| (*i, AlgorithmMachine::new(algorithm.clone(), keys.clone())))
})
.collect()
}
@ -177,8 +173,8 @@ pub fn sign<R: RngCore + CryptoRng, M: PreprocessMachine>(
machines,
|rng, machines| {
// Cache and rebuild half of the machines
let mut included = machines.keys().cloned().collect::<Vec<_>>();
for i in included.drain(..) {
let included = machines.keys().copied().collect::<Vec<_>>();
for i in included {
if (rng.next_u64() % 2) == 0 {
let cache = machines.remove(&i).unwrap().cache();
machines.insert(
@ -208,13 +204,13 @@ pub fn test_schnorr_with_keys<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
/// Test a basic Schnorr signature.
pub fn test_schnorr<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(rng: &mut R) {
let keys = key_gen(&mut *rng);
test_schnorr_with_keys::<_, _, H>(&mut *rng, keys)
test_schnorr_with_keys::<_, _, H>(&mut *rng, keys);
}
/// Test a basic Schnorr signature, yet with MuSig.
pub fn test_musig_schnorr<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(rng: &mut R) {
let keys = musig_key_gen(&mut *rng);
test_schnorr_with_keys::<_, _, H>(&mut *rng, keys)
test_schnorr_with_keys::<_, _, H>(&mut *rng, keys);
}
/// Test an offset Schnorr signature.
@ -226,7 +222,7 @@ pub fn test_offset_schnorr<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(rng: &m
let offset = C::F::from(5);
let offset_key = group_key + (C::generator() * offset);
for (_, keys) in keys.iter_mut() {
for keys in keys.values_mut() {
*keys = keys.offset(offset);
assert_eq!(keys.group_key(), offset_key);
}

View file

@ -26,8 +26,8 @@ struct MultiNonce<C: Curve> {
}
impl<C: Curve> MultiNonce<C> {
fn new() -> MultiNonce<C> {
MultiNonce {
fn new() -> Self {
Self {
transcript: RecommendedTranscript::new(b"FROST MultiNonce Algorithm Test"),
nonces: None,
}
@ -173,16 +173,10 @@ pub fn test_invalid_commitment<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
let mut preprocess = preprocesses.remove(&faulty).unwrap();
// Mutate one of the commitments
let nonce =
preprocess.commitments.nonces.get_mut(usize::try_from(rng.next_u64()).unwrap() % 2).unwrap();
let nonce = &mut preprocess.commitments.nonces[usize::try_from(rng.next_u64()).unwrap() % 2];
let generators_len = nonce.generators.len();
*nonce
.generators
.get_mut(usize::try_from(rng.next_u64()).unwrap() % generators_len)
.unwrap()
.0
.get_mut(usize::try_from(rng.next_u64()).unwrap() % 2)
.unwrap() = C::G::random(&mut *rng);
nonce.generators[usize::try_from(rng.next_u64()).unwrap() % generators_len].0
[usize::try_from(rng.next_u64()).unwrap() % 2] = C::G::random(&mut *rng);
// The commitments are validated at time of deserialization (read_preprocess)
// Accordingly, serialize it and read it again to make sure that errors

View file

@ -1,8 +1,8 @@
use core::ops::Deref;
use std::collections::HashMap;
#[cfg(test)]
use std::str::FromStr;
use core::str::FromStr;
use std::collections::HashMap;
use zeroize::Zeroizing;
@ -45,11 +45,12 @@ pub struct Vectors {
// Vectors are expected to be formatted per the IETF proof of concept
// The included vectors are direcly from
// https://github.com/cfrg/draft-irtf-cfrg-frost/tree/draft-irtf-cfrg-frost-11/poc
#[allow(clippy::fallible_impl_from)]
#[cfg(test)]
impl From<serde_json::Value> for Vectors {
fn from(value: serde_json::Value) -> Vectors {
fn from(value: serde_json::Value) -> Self {
let to_str = |value: &serde_json::Value| value.as_str().unwrap().to_string();
Vectors {
Self {
threshold: u16::from_str(value["config"]["NUM_PARTICIPANTS"].as_str().unwrap()).unwrap(),
group_secret: to_str(&value["inputs"]["group_secret_key"]),
@ -166,8 +167,9 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
}
let mut commitments = HashMap::new();
let mut machines = machines
.drain(..)
#[allow(clippy::needless_collect)] // Fails to compile without it due to borrow checking
let machines = machines
.into_iter()
.enumerate()
.map(|(c, (i, machine))| {
let nonce = |i| {
@ -224,8 +226,8 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
.collect::<Vec<_>>();
let mut shares = HashMap::new();
let mut machines = machines
.drain(..)
let machines = machines
.into_iter()
.enumerate()
.map(|(c, (i, machine))| {
let (machine, share) = machine
@ -244,7 +246,7 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
})
.collect::<HashMap<_, _>>();
for (i, machine) in machines.drain() {
for (i, machine) in machines {
let sig = machine.complete(clone_without(&shares, i)).unwrap();
let mut serialized = sig.R.to_bytes().as_ref().to_vec();
serialized.extend(sig.s.to_repr().as_ref());
@ -265,7 +267,7 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
unimplemented!()
}
fn fill_bytes(&mut self, dest: &mut [u8]) {
dest.copy_from_slice(&self.0.remove(0))
dest.copy_from_slice(&self.0.remove(0));
}
fn try_fill_bytes(&mut self, _: &mut [u8]) -> Result<(), rand_core::Error> {
unimplemented!()
@ -347,7 +349,7 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
machines.push((i, AlgorithmMachine::new(IetfSchnorr::<C, H>::ietf(), keys[i].clone())));
}
for (i, machine) in machines.drain(..) {
for (i, machine) in machines {
let (_, preprocess) = machine.preprocess(&mut frosts.clone());
// Calculate the expected nonces

View file

@ -62,12 +62,9 @@ impl Schnorrkel {
/// Create a new algorithm with the specified context.
///
/// If the context is greater than or equal to 4 GB in size, this will panic.
pub fn new(context: &'static [u8]) -> Schnorrkel {
Schnorrkel {
context,
schnorr: Schnorr::new(MerlinTranscript::new(b"FROST Schnorrkel")),
msg: None,
}
#[must_use]
pub fn new(context: &'static [u8]) -> Self {
Self { context, schnorr: Schnorr::new(MerlinTranscript::new(b"FROST Schnorrkel")), msg: None }
}
}

View file

@ -21,5 +21,5 @@ fn test() {
let signature = sign(&mut OsRng, Schnorrkel::new(CONTEXT), keys, machines, MSG);
let key = PublicKey::from_bytes(key.to_bytes().as_ref()).unwrap();
key.verify(&mut SigningContext::new(CONTEXT).bytes(MSG), &signature).unwrap()
key.verify(&mut SigningContext::new(CONTEXT).bytes(MSG), &signature).unwrap();
}