Meaningful changes from aggressive-clippy

I do want to enable a few specific lints, yet aggressive-clippy as a whole
isn't worthwhile.
This commit is contained in:
Luke Parker 2023-07-08 11:29:05 -04:00
parent 3c6cc42c23
commit 93b1656f86
No known key found for this signature in database
39 changed files with 127 additions and 143 deletions

View file

@ -140,7 +140,7 @@ pub struct Commitment {
}
impl Commitment {
/// The zero commitment, defined as a mask of 1 (as to not be the identity) and a 0 amount.
/// A commitment to zero, defined with a mask of 1 (as to not be the identity).
pub fn zero() -> Commitment {
Commitment { mask: Scalar::one(), amount: 0 }
}

View file

@ -2,7 +2,7 @@ use std_shims::vec::Vec;
use crate::hash;
pub fn merkle_root(root: [u8; 32], leafs: &[[u8; 32]]) -> [u8; 32] {
pub(crate) fn merkle_root(root: [u8; 32], leafs: &[[u8; 32]]) -> [u8; 32] {
match leafs.len() {
0 => root,
1 => hash(&[root, leafs[0]].concat()),

View file

@ -35,10 +35,10 @@ impl BorromeanSignatures {
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
for s0 in self.s0.iter() {
for s0 in &self.s0 {
w.write_all(s0)?;
}
for s1 in self.s1.iter() {
for s1 in &self.s1 {
w.write_all(s1)?;
}
w.write_all(&self.ee)

View file

@ -50,7 +50,7 @@ pub(crate) fn vector_exponent(
pub(crate) fn hash_cache(cache: &mut Scalar, mash: &[[u8; 32]]) -> Scalar {
let slice =
&[cache.to_bytes().as_ref(), mash.iter().cloned().flatten().collect::<Vec<_>>().as_ref()]
&[cache.to_bytes().as_ref(), mash.iter().copied().flatten().collect::<Vec<_>>().as_ref()]
.concat();
*cache = hash_to_scalar(slice);
*cache
@ -118,9 +118,9 @@ pub(crate) fn LR_statements(
let mut res = a
.0
.iter()
.cloned()
.zip(G_i.iter().cloned())
.chain(b.0.iter().cloned().zip(H_i.iter().cloned()))
.copied()
.zip(G_i.iter().copied())
.chain(b.0.iter().copied().zip(H_i.iter().copied()))
.collect::<Vec<_>>();
res.push((cL, U));
res

View file

@ -190,7 +190,7 @@ impl OriginalStruct {
}
// Rebuild all challenges
let (mut cache, commitments) = hash_commitments(commitments.iter().cloned());
let (mut cache, commitments) = hash_commitments(commitments.iter().copied());
let y = hash_cache(&mut cache, &[self.A.compress().to_bytes(), self.S.compress().to_bytes()]);
let z = hash_to_scalar(&y.to_bytes());

View file

@ -196,7 +196,7 @@ impl PlusStruct {
}
// Rebuild all challenges
let (mut cache, commitments) = hash_plus(commitments.iter().cloned());
let (mut cache, commitments) = hash_plus(commitments.iter().copied());
let y = hash_cache(&mut cache, &[self.A.compress().to_bytes()]);
let yinv = y.invert().unwrap();
let z = hash_to_scalar(&y.to_bytes());
@ -220,8 +220,6 @@ impl PlusStruct {
let A1 = normalize(&self.A1);
let B = normalize(&self.B);
let mut commitments = commitments.iter().map(|c| c.mul_by_cofactor()).collect::<Vec<_>>();
// Verify it
let mut proof = Vec::with_capacity(logMN + 5 + (2 * (MN + logMN)));
@ -237,7 +235,7 @@ impl PlusStruct {
let esq = e * e;
let minus_esq = -esq;
let commitment_weight = minus_esq * yMNy;
for (i, commitment) in commitments.drain(..).enumerate() {
for (i, commitment) in commitments.iter().map(EdwardsPoint::mul_by_cofactor).enumerate() {
proof.push((commitment_weight * zpow[i], commitment));
}

View file

@ -119,7 +119,7 @@ impl Mul<&[EdwardsPoint]> for &ScalarVector {
type Output = EdwardsPoint;
fn mul(self, b: &[EdwardsPoint]) -> EdwardsPoint {
debug_assert_eq!(self.len(), b.len());
multiexp(&self.0.iter().cloned().zip(b.iter().cloned()).collect::<Vec<_>>())
multiexp(&self.0.iter().copied().zip(b.iter().copied()).collect::<Vec<_>>())
}
}

View file

@ -19,7 +19,7 @@ pub struct Mlsag {
impl Mlsag {
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
for ss in self.ss.iter() {
for ss in &self.ss {
write_raw_vec(write_scalar, ss, w)?;
}
write_scalar(&self.cc, w)

View file

@ -299,9 +299,9 @@ impl<R: RpcConnection> Rpc<R> {
match self.get_block(self.get_block_hash(number).await?).await {
Ok(block) => {
// Make sure this is actually the block for this number
match block.miner_tx.prefix.inputs[0] {
Input::Gen(actual) => {
if usize::try_from(actual).unwrap() == number {
match block.miner_tx.prefix.inputs.get(0) {
Some(Input::Gen(actual)) => {
if usize::try_from(*actual).unwrap() == number {
Ok(block)
} else {
Err(RpcError::InvalidNode)

View file

@ -125,7 +125,7 @@ pub(crate) fn read_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
pub(crate) fn read_torsion_free_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
read_point(r)
.ok()
.filter(|point| point.is_torsion_free())
.filter(EdwardsPoint::is_torsion_free)
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))
}

View file

@ -345,14 +345,13 @@ impl Transaction {
hashes.extend(hash(&buf));
buf.clear();
match self.rct_signatures.prunable {
RctPrunable::Null => buf.resize(32, 0),
hashes.extend(&match self.rct_signatures.prunable {
RctPrunable::Null => [0; 32],
_ => {
self.rct_signatures.prunable.write(&mut buf, self.rct_signatures.rct_type()).unwrap();
buf = hash(&buf).to_vec();
hash(&buf)
}
}
hashes.extend(&buf);
});
hash(&hashes)
}

View file

@ -241,9 +241,13 @@ impl ZeroizeOnDrop for Scanner {}
impl Scanner {
/// Create a Scanner from a ViewPair.
///
/// burning_bug is a HashSet of used keys, intended to prevent key reuse which would burn funds.
///
/// When an output is successfully scanned, the output key MUST be saved to disk.
///
/// When a new scanner is created, ALL saved output keys must be passed in to be secure.
///
/// If None is passed, a modified shared key derivation is used which is immune to the burning
/// bug (specifically the Guaranteed feature from Featured Addresses).
pub fn from_view(pair: ViewPair, burning_bug: Option<HashSet<CompressedEdwardsY>>) -> Scanner {

View file

@ -263,6 +263,7 @@ impl<O: Clone + Zeroize> Timelocked<O> {
}
/// Return the outputs if they're not timelocked, or an empty vector if they are.
#[must_use]
pub fn not_locked(&self) -> Vec<O> {
if self.0 == Timelock::None {
return self.1.clone();
@ -271,6 +272,7 @@ impl<O: Clone + Zeroize> Timelocked<O> {
}
/// Returns None if the Timelocks aren't comparable. Returns Some(vec![]) if none are unlocked.
#[must_use]
pub fn unlocked(&self, timelock: Timelock) -> Option<Vec<O>> {
// If the Timelocks are comparable, return the outputs if they're now unlocked
if self.0 <= timelock {
@ -280,6 +282,7 @@ impl<O: Clone + Zeroize> Timelocked<O> {
}
}
#[must_use]
pub fn ignore_timelock(&self) -> Vec<O> {
self.1.clone()
}
@ -293,16 +296,11 @@ impl Scanner {
return Timelocked(tx.prefix.timelock, vec![]);
}
let extra = Extra::read::<&[u8]>(&mut tx.prefix.extra.as_ref());
let extra = if let Ok(extra) = extra {
extra
} else {
let Ok(extra) = Extra::read::<&[u8]>(&mut tx.prefix.extra.as_ref()) else {
return Timelocked(tx.prefix.timelock, vec![]);
};
let (tx_key, additional) = if let Some((tx_key, additional)) = extra.keys() {
(tx_key, additional)
} else {
let Some((tx_key, additional)) = extra.keys() else {
return Timelocked(tx.prefix.timelock, vec![]);
};
@ -453,7 +451,7 @@ impl Scanner {
};
let mut res = vec![];
for tx in txs.drain(..) {
for tx in txs {
if let Some(timelock) = map(self.scan_transaction(&tx), index) {
res.push(timelock);
}

View file

@ -296,7 +296,7 @@ impl SignableTransaction {
protocol: Protocol,
r_seed: Option<Zeroizing<[u8; 32]>>,
inputs: Vec<SpendableOutput>,
mut payments: Vec<(MoneroAddress, u64)>,
payments: Vec<(MoneroAddress, u64)>,
change_address: Option<Change>,
data: Vec<Vec<u8>>,
fee_rate: Fee,
@ -382,7 +382,7 @@ impl SignableTransaction {
Err(TransactionError::TooManyOutputs)?;
}
let mut payments = payments.drain(..).map(InternalPayment::Payment).collect::<Vec<_>>();
let mut payments = payments.into_iter().map(InternalPayment::Payment).collect::<Vec<_>>();
if let Some(change) = change_address {
payments.push(InternalPayment::Change(change, in_amount - out_amount));
}
@ -562,11 +562,12 @@ impl SignableTransaction {
}
/// Returns the eventuality of this transaction.
///
/// The eventuality is defined as the TX extra/outputs this transaction will create, if signed
/// with the specified seed. This eventuality can be compared to on-chain transactions to see
/// if the transaction has already been signed and published.
pub fn eventuality(&self) -> Option<Eventuality> {
let inputs = self.inputs.iter().map(|input| input.key()).collect::<Vec<_>>();
let inputs = self.inputs.iter().map(SpendableOutput::key).collect::<Vec<_>>();
let (tx_key, additional, outputs, id) = Self::prepare_payments(
self.r_seed.as_ref()?,
&inputs,
@ -606,7 +607,7 @@ impl SignableTransaction {
let (tx_key, additional, outputs, id) = Self::prepare_payments(
&r_seed,
&self.inputs.iter().map(|input| input.key()).collect::<Vec<_>>(),
&self.inputs.iter().map(SpendableOutput::key).collect::<Vec<_>>(),
&mut self.payments,
uniqueness,
);
@ -656,7 +657,7 @@ impl SignableTransaction {
fee,
encrypted_amounts,
pseudo_outs: vec![],
commitments: commitments.iter().map(|commitment| commitment.calculate()).collect(),
commitments: commitments.iter().map(Commitment::calculate).collect(),
},
prunable: RctPrunable::Clsag { bulletproofs: bp, clsags: vec![], pseudo_outs: vec![] },
},
@ -713,13 +714,18 @@ impl SignableTransaction {
impl Eventuality {
/// Enables building a HashMap of Extra -> Eventuality for efficiently checking if an on-chain
/// transaction may match this eventuality.
///
/// This extra is cryptographically bound to:
/// 1) A specific set of inputs (via their output key)
/// 2) A specific seed for the ephemeral keys
///
/// This extra may be used in a transaction with a distinct set of inputs, yet no honest
/// transaction which doesn't satisfy this Eventuality will contain it.
pub fn extra(&self) -> &[u8] {
&self.extra
}
#[must_use]
pub fn matches(&self, tx: &Transaction) -> bool {
if self.payments.len() != tx.prefix.outputs.len() {
return false;
@ -752,9 +758,10 @@ impl Eventuality {
}
// TODO: Remove this when the following for loop is updated
if !rct_type.compact_encrypted_amounts() {
panic!("created an Eventuality for a very old RctType we don't support proving for");
}
assert!(
rct_type.compact_encrypted_amounts(),
"created an Eventuality for a very old RctType we don't support proving for"
);
for (o, (expected, actual)) in outputs.iter().zip(tx.prefix.outputs.iter()).enumerate() {
// Verify the output, commitment, and encrypted amount.
@ -815,7 +822,7 @@ impl Eventuality {
String::from_utf8(read_vec(read_byte, r)?)
.ok()
.and_then(|str| MoneroAddress::from_str_raw(&str).ok())
.ok_or(io::Error::new(io::ErrorKind::Other, "invalid address"))
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid address"))
}
fn read_payment<R: io::Read>(r: &mut R) -> io::Result<InternalPayment> {

View file

@ -274,7 +274,7 @@ impl SignMachine<Transaction> for TransactionSignMachine {
// Find out who's included
// This may not be a valid set of signers yet the algorithm machine will error if it's not
commitments.remove(&self.i); // Remove, if it was included for some reason
let mut included = commitments.keys().cloned().collect::<Vec<_>>();
let mut included = commitments.keys().copied().collect::<Vec<_>>();
included.push(self.i);
included.sort_unstable();
@ -325,7 +325,7 @@ impl SignMachine<Transaction> for TransactionSignMachine {
// Remove our preprocess which shouldn't be here. It was just the easiest way to implement the
// above
for map in commitments.iter_mut() {
for map in &mut commitments {
map.remove(&self.i);
}
@ -430,7 +430,9 @@ impl SignatureMachine<Transaction> for TransactionSignatureMachine {
pseudo_outs.push(pseudo_out);
}
}
_ => unreachable!("attempted to sign a multisig TX which wasn't CLSAG"),
RctPrunable::MlsagBorromean { .. } | RctPrunable::MlsagBulletproofs { .. } => {
unreachable!("attempted to sign a multisig TX which wasn't CLSAG")
}
}
Ok(tx)
}

View file

@ -12,8 +12,7 @@ test!(
let arbitrary_data = vec![b'\0'; MAX_ARBITRARY_DATA_SIZE - 1];
// make sure we can add to tx
let result = builder.add_data(arbitrary_data.clone());
assert!(result.is_ok());
builder.add_data(arbitrary_data.clone()).unwrap();
builder.add_payment(addr, 5);
(builder.build().unwrap(), (arbitrary_data,))
@ -37,8 +36,7 @@ test!(
// Add data multiple times
for data in &data {
let result = builder.add_data(data.clone());
assert!(result.is_ok());
builder.add_data(data.clone()).unwrap();
}
builder.add_payment(addr, 5);
@ -65,7 +63,7 @@ test!(
// Reduce data size and retry. The data will now be 255 bytes long (including the added
// marker), exactly
data.pop();
assert!(builder.add_data(data.clone()).is_ok());
builder.add_data(data.clone()).unwrap();
builder.add_payment(addr, 5);
(builder.build().unwrap(), data)

View file

@ -37,8 +37,8 @@ test!(
},
),
(
|rpc, mut builder: Builder, addr, mut outputs: Vec<ReceivedOutput>| async move {
for output in outputs.drain(..) {
|rpc, mut builder: Builder, addr, outputs: Vec<ReceivedOutput>| async move {
for output in outputs {
builder.add_input(SpendableOutput::from(&rpc, output).await.unwrap());
}
builder.add_payment(addr, 6);

View file

@ -70,7 +70,7 @@ async fn from_wallet_rpc_to_self(spec: AddressSpec) {
// make an addr
let (_, view_pair, _) = runner::random_address();
let addr = Address::from_str(&view_pair.address(Network::Mainnet, spec).to_string()[..]).unwrap();
let addr = Address::from_str(&view_pair.address(Network::Mainnet, spec).to_string()).unwrap();
// refresh & make a tx
wallet_rpc.refresh(None).await.unwrap();
@ -103,7 +103,9 @@ async fn from_wallet_rpc_to_self(spec: AddressSpec) {
assert_eq!(output.metadata.payment_id, payment_id);
assert_eq!(output.metadata.subaddress, None);
}
_ => assert_eq!(output.metadata.subaddress, None),
AddressSpec::Standard | AddressSpec::Featured { .. } => {
assert_eq!(output.metadata.subaddress, None)
}
}
assert_eq!(output.commitment().amount, 1000000000000);
}
@ -228,7 +230,7 @@ test!(
for _ in 0 .. 2 {
// Subtract 1 since we prefix data with 127
let data = vec![b'a'; MAX_TX_EXTRA_NONCE_SIZE - 1];
assert!(builder.add_data(data).is_ok());
builder.add_data(data).unwrap();
}
(builder.build().unwrap(), (wallet_rpc,))

View file

@ -23,7 +23,7 @@ pub trait Db: 'static + Send + Sync + Clone + Debug + Get {
fn key(db_dst: &'static [u8], item_dst: &'static [u8], key: impl AsRef<[u8]>) -> Vec<u8> {
let db_len = u8::try_from(db_dst.len()).unwrap();
let dst_len = u8::try_from(item_dst.len()).unwrap();
[[db_len].as_ref(), db_dst, [dst_len].as_ref(), item_dst, key.as_ref()].concat().to_vec()
[[db_len].as_ref(), db_dst, [dst_len].as_ref(), item_dst, key.as_ref()].concat()
}
fn txn(&mut self) -> Self::Transaction<'_>;
}
@ -38,7 +38,11 @@ impl<'a> Get for MemDbTxn<'a> {
if self.2.contains(key.as_ref()) {
return None;
}
self.1.get(key.as_ref()).cloned().or(self.0 .0.read().unwrap().get(key.as_ref()).cloned())
self
.1
.get(key.as_ref())
.cloned()
.or_else(|| self.0 .0.read().unwrap().get(key.as_ref()).cloned())
}
}
impl<'a> DbTxn for MemDbTxn<'a> {

View file

@ -53,10 +53,7 @@ mod shims {
impl Read for &[u8] {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
let mut read = buf.len();
if self.len() < buf.len() {
read = self.len();
}
let read = buf.len().min(self.len());
buf[.. read].copy_from_slice(&self[.. read]);
*self = &self[read ..];
Ok(read)

View file

@ -2,33 +2,12 @@
#![doc = include_str!("../README.md")]
#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(not(feature = "std"))]
#[allow(unused_imports)]
#[doc(hidden)]
#[macro_use]
pub extern crate alloc;
pub mod sync;
pub mod collections;
pub mod io;
pub mod vec {
#[cfg(not(feature = "std"))]
pub use alloc::vec::*;
#[cfg(feature = "std")]
pub use std::vec::*;
}
pub mod str {
#[cfg(not(feature = "std"))]
pub use alloc::str::*;
#[cfg(feature = "std")]
pub use std::str::*;
}
pub mod string {
#[cfg(not(feature = "std"))]
pub use alloc::string::*;
#[cfg(feature = "std")]
pub use std::string::*;
}
pub use alloc::vec;
pub use alloc::str;
pub use alloc::string;

View file

@ -1,4 +1,5 @@
pub use core::sync::*;
pub use alloc::sync::*;
mod mutex_shim {
#[cfg(feature = "std")]
@ -57,7 +58,7 @@ mod oncelock_shim {
let mut lock = self.0.lock();
if !*lock {
unsafe {
(core::ptr::addr_of!(self.1) as *mut Option<_>).write_unaligned(Some(f()));
core::ptr::addr_of!(self.1).cast_mut().write_unaligned(Some(f()));
}
}
*lock = true;

View file

@ -134,7 +134,7 @@ fn test_secp256k1() {
)
.to_repr()
.iter()
.cloned()
.copied()
.collect::<Vec<_>>(),
hex::decode("acc83278035223c1ba464e2d11bfacfc872b2b23e1041cf5f6130da21e4d8068").unwrap()
);
@ -167,7 +167,7 @@ f4e8cf80aec3f888d997900ac7e3e349944b5a6b47649fc32186d2f1238103c6\
)
.to_repr()
.iter()
.cloned()
.copied()
.collect::<Vec<_>>(),
hex::decode("f871dfcf6bcd199342651adc361b92c941cb6a0d8c8c1a3b91d79e2c1bf3722d").unwrap()
);

View file

@ -1,5 +1,5 @@
use core::{
ops::{DerefMut, Add, AddAssign, Sub, SubAssign, Neg, Mul, MulAssign},
ops::{Add, AddAssign, Sub, SubAssign, Neg, Mul, MulAssign},
iter::{Sum, Product},
};
@ -234,7 +234,7 @@ impl FieldElement {
let mut bits = 0;
for (i, mut bit) in other.to_le_bits().iter_mut().rev().enumerate() {
bits <<= 1;
let mut bit = u8_from_bool(bit.deref_mut());
let mut bit = u8_from_bool(&mut bit);
bits |= bit;
bit.zeroize();
@ -300,7 +300,7 @@ impl Sum<FieldElement> for FieldElement {
impl<'a> Sum<&'a FieldElement> for FieldElement {
fn sum<I: Iterator<Item = &'a FieldElement>>(iter: I) -> FieldElement {
iter.cloned().sum()
iter.copied().sum()
}
}
@ -316,7 +316,7 @@ impl Product<FieldElement> for FieldElement {
impl<'a> Product<&'a FieldElement> for FieldElement {
fn product<I: Iterator<Item = &'a FieldElement>>(iter: I) -> FieldElement {
iter.cloned().product()
iter.copied().product()
}
}

View file

@ -4,7 +4,7 @@
use core::{
borrow::Borrow,
ops::{Deref, DerefMut, Add, AddAssign, Sub, SubAssign, Neg, Mul, MulAssign},
ops::{Deref, Add, AddAssign, Sub, SubAssign, Neg, Mul, MulAssign},
iter::{Iterator, Sum, Product},
hash::{Hash, Hasher},
};
@ -201,7 +201,7 @@ impl Scalar {
let mut bits = 0;
for (i, mut bit) in other.to_le_bits().iter_mut().rev().enumerate() {
bits <<= 1;
let mut bit = u8_from_bool(bit.deref_mut());
let mut bit = u8_from_bool(&mut bit);
bits |= bit;
bit.zeroize();
@ -337,7 +337,7 @@ impl PrimeField for Scalar {
// methods does not
// We do not use one of its methods to ensure we write via zeroize
for mut bit in bits.iter_mut() {
bit.deref_mut().zeroize();
bit.zeroize();
}
res
}

View file

@ -390,9 +390,10 @@ impl<C: Ciphersuite> Encryption<C> {
participant: Participant,
msg: EncryptionKeyMessage<C, M>,
) -> M {
if self.enc_keys.contains_key(&participant) {
panic!("Re-registering encryption key for a participant");
}
assert!(
!self.enc_keys.contains_key(&participant),
"Re-registering encryption key for a participant"
);
self.enc_keys.insert(participant, msg.enc_key);
msg.msg
}

View file

@ -92,7 +92,8 @@ pub struct KeyGenMachine<C: Ciphersuite> {
impl<C: Ciphersuite> KeyGenMachine<C> {
/// Create a new machine to generate a key.
// The context string should be unique among multisigs.
///
/// The context string should be unique among multisigs.
pub fn new(params: ThresholdParams, context: String) -> KeyGenMachine<C> {
KeyGenMachine { params, context, _curve: PhantomData }
}
@ -171,7 +172,6 @@ fn polynomial<F: PrimeField + Zeroize>(
/// channel.
///
/// If any participant sends multiple secret shares to another participant, they are faulty.
// This should presumably be written as SecretShare(Zeroizing<F::Repr>).
// It's unfortunately not possible as F::Repr doesn't have Zeroize as a bound.
// The encryption system also explicitly uses Zeroizing<M> so it can ensure anything being
@ -353,7 +353,7 @@ impl<C: Ciphersuite> Zeroize for KeyMachine<C> {
fn zeroize(&mut self) {
self.params.zeroize();
self.secret.zeroize();
for (_, commitments) in self.commitments.iter_mut() {
for commitments in self.commitments.values_mut() {
commitments.zeroize();
}
self.encryption.zeroize();
@ -499,7 +499,7 @@ impl<C: Ciphersuite> fmt::Debug for BlameMachine<C> {
impl<C: Ciphersuite> Zeroize for BlameMachine<C> {
fn zeroize(&mut self) {
for (_, commitments) in self.commitments.iter_mut() {
for commitments in self.commitments.values_mut() {
commitments.zeroize();
}
self.encryption.zeroize();
@ -536,10 +536,9 @@ impl<C: Ciphersuite> BlameMachine<C> {
Err(DecryptionError::InvalidProof) => return recipient,
};
let share = match Option::<C::F>::from(C::F::from_repr(share_bytes.0)) {
Some(share) => share,
let Some(share) = Option::<C::F>::from(C::F::from_repr(share_bytes.0)) else {
// If this isn't a valid scalar, the sender is faulty
None => return sender,
return sender;
};
// If this isn't a valid share, the sender is faulty

View file

@ -25,7 +25,7 @@ pub const PARTICIPANTS: u16 = 5;
pub const THRESHOLD: u16 = ((PARTICIPANTS * 2) / 3) + 1;
/// Clone a map without a specific value.
pub fn clone_without<K: Clone + std::cmp::Eq + std::hash::Hash, V: Clone>(
pub fn clone_without<K: Clone + core::cmp::Eq + core::hash::Hash, V: Clone>(
map: &HashMap<K, V>,
without: &K,
) -> HashMap<K, V> {
@ -40,7 +40,7 @@ pub fn clone_without<K: Clone + std::cmp::Eq + std::hash::Hash, V: Clone>(
pub fn recover_key<C: Ciphersuite>(keys: &HashMap<Participant, ThresholdKeys<C>>) -> C::F {
let first = keys.values().next().expect("no keys provided");
assert!(keys.len() >= first.params().t().into(), "not enough keys provided");
let included = keys.keys().cloned().collect::<Vec<_>>();
let included = keys.keys().copied().collect::<Vec<_>>();
let group_private = keys.iter().fold(C::F::ZERO, |accum, (i, keys)| {
accum + (lagrange::<C::F>(*i, &included) * keys.secret_share().deref())

View file

@ -24,9 +24,9 @@ pub fn test_musig<R: RngCore + CryptoRng, C: Ciphersuite>(rng: &mut R) {
const CONTEXT: &[u8] = b"MuSig Test";
// Empty signing set
assert!(musig::<C>(CONTEXT, &Zeroizing::new(C::F::ZERO), &[]).is_err());
musig::<C>(CONTEXT, &Zeroizing::new(C::F::ZERO), &[]).unwrap_err();
// Signing set we're not part of
assert!(musig::<C>(CONTEXT, &Zeroizing::new(C::F::ZERO), &[C::generator()]).is_err());
musig::<C>(CONTEXT, &Zeroizing::new(C::F::ZERO), &[C::generator()]).unwrap_err();
// Test with n keys
{

View file

@ -1,6 +1,6 @@
use core::ops::{Deref, DerefMut};
#[cfg(feature = "serialize")]
use std::io::{Read, Write};
use std::io::{self, Read, Write};
use thiserror::Error;
@ -51,15 +51,15 @@ fn u8_from_bool(bit_ref: &mut bool) -> u8 {
}
#[cfg(feature = "serialize")]
pub(crate) fn read_point<R: Read, G: PrimeGroup>(r: &mut R) -> std::io::Result<G> {
pub(crate) fn read_point<R: Read, G: PrimeGroup>(r: &mut R) -> io::Result<G> {
let mut repr = G::Repr::default();
r.read_exact(repr.as_mut())?;
let point = G::from_bytes(&repr);
let Some(point) = Option::<G>::from(point) else {
Err(std::io::Error::new(std::io::ErrorKind::Other, "invalid point"))?
Err(io::Error::new(io::ErrorKind::Other, "invalid point"))?
};
if point.to_bytes().as_ref() != repr.as_ref() {
Err(std::io::Error::new(std::io::ErrorKind::Other, "non-canonical point"))?;
Err(io::Error::new(io::ErrorKind::Other, "non-canonical point"))?;
}
Ok(point)
}
@ -439,7 +439,7 @@ where
/// Write a Cross-Group Discrete Log Equality proof to a type satisfying std::io::Write.
#[cfg(feature = "serialize")]
pub fn write<W: Write>(&self, w: &mut W) -> std::io::Result<()> {
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
for bit in &self.bits {
bit.write(w)?;
}
@ -452,7 +452,7 @@ where
/// Read a Cross-Group Discrete Log Equality proof from a type satisfying std::io::Read.
#[cfg(feature = "serialize")]
pub fn read<R: Read>(r: &mut R) -> std::io::Result<Self> {
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
let capacity = usize::try_from(G0::Scalar::CAPACITY.min(G1::Scalar::CAPACITY)).unwrap();
let bits_per_group = BitSignature::from(SIGNATURE).bits();

View file

@ -77,7 +77,7 @@ fn test_dleq() {
assert!(proof
.verify(
&mut transcript(),
generators[.. i].iter().cloned().rev().collect::<Vec<_>>().as_ref(),
generators[.. i].iter().copied().rev().collect::<Vec<_>>().as_ref(),
&keys[.. i]
)
.is_err());
@ -86,7 +86,7 @@ fn test_dleq() {
.verify(
&mut transcript(),
&generators[.. i],
keys[.. i].iter().cloned().rev().collect::<Vec<_>>().as_ref()
keys[.. i].iter().copied().rev().collect::<Vec<_>>().as_ref()
)
.is_err());
}

View file

@ -80,7 +80,7 @@ macro_rules! field {
$DELTA: expr,
) => {
use core::{
ops::{DerefMut, Add, AddAssign, Neg, Sub, SubAssign, Mul, MulAssign},
ops::{Add, AddAssign, Neg, Sub, SubAssign, Mul, MulAssign},
iter::{Sum, Product},
};
@ -150,7 +150,7 @@ macro_rules! field {
let mut bits = 0;
for (i, mut bit) in other.to_le_bits().iter_mut().rev().enumerate() {
bits <<= 1;
let mut bit = u8_from_bool(bit.deref_mut());
let mut bit = u8_from_bool(&mut bit);
bits |= bit;
bit.zeroize();

View file

@ -1,5 +1,5 @@
use core::{
ops::{DerefMut, Add, AddAssign, Neg, Sub, SubAssign, Mul, MulAssign},
ops::{Add, AddAssign, Neg, Sub, SubAssign, Mul, MulAssign},
iter::Sum,
};
@ -232,7 +232,7 @@ impl Mul<Scalar> for Point {
let mut bits = 0;
for (i, mut bit) in other.to_le_bits().iter_mut().rev().enumerate() {
bits <<= 1;
let mut bit = u8_from_bool(bit.deref_mut());
let mut bit = u8_from_bool(&mut bit);
bits |= bit;
bit.zeroize();

View file

@ -27,7 +27,7 @@ pub const PARTICIPANTS: u16 = 5;
pub const THRESHOLD: u16 = ((PARTICIPANTS * 2) / 3) + 1;
/// Clone a map without a specific value.
pub fn clone_without<K: Clone + std::cmp::Eq + std::hash::Hash, V: Clone>(
pub fn clone_without<K: Clone + core::cmp::Eq + core::hash::Hash, V: Clone>(
map: &HashMap<K, V>,
without: &K,
) -> HashMap<K, V> {
@ -177,8 +177,8 @@ pub fn sign<R: RngCore + CryptoRng, M: PreprocessMachine>(
machines,
|rng, machines| {
// Cache and rebuild half of the machines
let mut included = machines.keys().cloned().collect::<Vec<_>>();
for i in included.drain(..) {
let included = machines.keys().cloned().collect::<Vec<_>>();
for i in included {
if (rng.next_u64() % 2) == 0 {
let cache = machines.remove(&i).unwrap().cache();
machines.insert(
@ -226,7 +226,7 @@ pub fn test_offset_schnorr<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(rng: &m
let offset = C::F::from(5);
let offset_key = group_key + (C::generator() * offset);
for (_, keys) in keys.iter_mut() {
for keys in keys.values_mut() {
*keys = keys.offset(offset);
assert_eq!(keys.group_key(), offset_key);
}

View file

@ -176,13 +176,8 @@ pub fn test_invalid_commitment<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
let nonce =
preprocess.commitments.nonces.get_mut(usize::try_from(rng.next_u64()).unwrap() % 2).unwrap();
let generators_len = nonce.generators.len();
*nonce
.generators
.get_mut(usize::try_from(rng.next_u64()).unwrap() % generators_len)
.unwrap()
.0
.get_mut(usize::try_from(rng.next_u64()).unwrap() % 2)
.unwrap() = C::G::random(&mut *rng);
nonce.generators[usize::try_from(rng.next_u64()).unwrap() % generators_len].0
[usize::try_from(rng.next_u64()).unwrap() % 2] = C::G::random(&mut *rng);
// The commitments are validated at time of deserialization (read_preprocess)
// Accordingly, serialize it and read it again to make sure that errors

View file

@ -166,8 +166,8 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
}
let mut commitments = HashMap::new();
let mut machines = machines
.drain(..)
let machines = machines
.into_iter()
.enumerate()
.map(|(c, (i, machine))| {
let nonce = |i| {
@ -224,8 +224,8 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
.collect::<Vec<_>>();
let mut shares = HashMap::new();
let mut machines = machines
.drain(..)
let machines = machines
.into_iter()
.enumerate()
.map(|(c, (i, machine))| {
let (machine, share) = machine
@ -242,9 +242,9 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
shares.insert(*i, machine.read_share::<&[u8]>(&mut share.as_ref()).unwrap());
(i, machine)
})
.collect::<HashMap<_, _>>();
.collect::<Vec<_>>();
for (i, machine) in machines.drain() {
for (i, machine) in machines {
let sig = machine.complete(clone_without(&shares, i)).unwrap();
let mut serialized = sig.R.to_bytes().as_ref().to_vec();
serialized.extend(sig.s.to_repr().as_ref());
@ -347,7 +347,7 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
machines.push((i, AlgorithmMachine::new(IetfSchnorr::<C, H>::ietf(), keys[i].clone())));
}
for (i, machine) in machines.drain(..) {
for (i, machine) in machines {
let (_, preprocess) = machine.preprocess(&mut frosts.clone());
// Calculate the expected nonces

View file

@ -18,7 +18,7 @@ fn flat<Id: Copy + Zeroize, G: Group + Zeroize>(
where
<G as Group>::Scalar: PrimeFieldBits + Zeroize,
{
Zeroizing::new(slice.iter().flat_map(|pairs| pairs.1.iter()).cloned().collect::<Vec<_>>())
Zeroizing::new(slice.iter().flat_map(|pairs| pairs.1.iter()).copied().collect::<Vec<_>>())
}
/// A batch verifier intended to verify a series of statements are each equivalent to zero.
@ -35,7 +35,8 @@ where
<G as Group>::Scalar: PrimeFieldBits + Zeroize,
{
/// Create a new batch verifier, expected to verify the following amount of statements.
/// This is a size hint and is not required to be accurate.
///
/// `capacity` is a size hint and is not required to be accurate.
pub fn new(capacity: usize) -> BatchVerifier<Id, G> {
BatchVerifier(Zeroizing::new(Vec::with_capacity(capacity)))
}

View file

@ -2,7 +2,6 @@
#![doc = include_str!("../README.md")]
#![cfg_attr(not(feature = "std"), no_std)]
use core::ops::DerefMut;
#[cfg(not(feature = "std"))]
#[macro_use]
extern crate alloc;
@ -62,7 +61,7 @@ where
groupings.push(vec![0; (bits.len() + (w_usize - 1)) / w_usize]);
for (i, mut bit) in bits.iter_mut().enumerate() {
let mut bit = u8_from_bool(bit.deref_mut());
let mut bit = u8_from_bool(&mut bit);
groupings[p][i / w_usize] |= bit << (i % w_usize);
bit.zeroize();
}

View file

@ -106,7 +106,7 @@ pub(crate) fn aggregate<C: Ciphersuite>() {
keys
.iter()
.map(|key| C::generator() * key.deref())
.zip(challenges.iter().cloned())
.zip(challenges.iter().copied())
.collect::<Vec<_>>()
.as_ref(),
));