mirror of
https://github.com/serai-dex/serai.git
synced 2024-12-22 19:49:22 +00:00
Latest clippy
This commit is contained in:
parent
c338b92067
commit
53d86e2a29
12 changed files with 21 additions and 13 deletions
|
@ -95,7 +95,7 @@ async fn check_block(rpc: Arc<Rpc<HttpRpc>>, block_i: usize) {
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut batch = BatchVerifier::new(block.txs.len());
|
let mut batch = BatchVerifier::new(block.txs.len());
|
||||||
for (tx_hash, tx_res) in block.txs.into_iter().zip(all_txs.into_iter()) {
|
for (tx_hash, tx_res) in block.txs.into_iter().zip(all_txs) {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tx_res.tx_hash,
|
tx_res.tx_hash,
|
||||||
hex::encode(tx_hash),
|
hex::encode(tx_hash),
|
||||||
|
|
|
@ -12,6 +12,7 @@ use multiexp::multiexp;
|
||||||
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
|
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
|
||||||
macro_rules! math_op {
|
macro_rules! math_op {
|
||||||
($Op: ident, $op: ident, $f: expr) => {
|
($Op: ident, $op: ident, $f: expr) => {
|
||||||
|
#[allow(clippy::redundant_closure_call)]
|
||||||
impl $Op<Scalar> for ScalarVector {
|
impl $Op<Scalar> for ScalarVector {
|
||||||
type Output = ScalarVector;
|
type Output = ScalarVector;
|
||||||
fn $op(self, b: Scalar) -> ScalarVector {
|
fn $op(self, b: Scalar) -> ScalarVector {
|
||||||
|
@ -19,6 +20,7 @@ macro_rules! math_op {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::redundant_closure_call)]
|
||||||
impl $Op<Scalar> for &ScalarVector {
|
impl $Op<Scalar> for &ScalarVector {
|
||||||
type Output = ScalarVector;
|
type Output = ScalarVector;
|
||||||
fn $op(self, b: Scalar) -> ScalarVector {
|
fn $op(self, b: Scalar) -> ScalarVector {
|
||||||
|
@ -26,6 +28,7 @@ macro_rules! math_op {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::redundant_closure_call)]
|
||||||
impl $Op<ScalarVector> for ScalarVector {
|
impl $Op<ScalarVector> for ScalarVector {
|
||||||
type Output = ScalarVector;
|
type Output = ScalarVector;
|
||||||
fn $op(self, b: ScalarVector) -> ScalarVector {
|
fn $op(self, b: ScalarVector) -> ScalarVector {
|
||||||
|
@ -34,6 +37,7 @@ macro_rules! math_op {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::redundant_closure_call)]
|
||||||
impl $Op<&ScalarVector> for &ScalarVector {
|
impl $Op<&ScalarVector> for &ScalarVector {
|
||||||
type Output = ScalarVector;
|
type Output = ScalarVector;
|
||||||
fn $op(self, b: &ScalarVector) -> ScalarVector {
|
fn $op(self, b: &ScalarVector) -> ScalarVector {
|
||||||
|
|
|
@ -34,7 +34,7 @@ async fn add_inputs(
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let inputs = spendable_outputs.into_iter().zip(decoys.into_iter()).collect::<Vec<_>>();
|
let inputs = spendable_outputs.into_iter().zip(decoys).collect::<Vec<_>>();
|
||||||
|
|
||||||
builder.add_inputs(&inputs);
|
builder.add_inputs(&inputs);
|
||||||
}
|
}
|
||||||
|
|
|
@ -84,8 +84,7 @@ async fn handle_new_set<
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn handle_key_gen<D: Db, Pro: Processors>(
|
async fn handle_key_gen<Pro: Processors>(
|
||||||
db: &mut D,
|
|
||||||
key: &Zeroizing<<Ristretto as Ciphersuite>::F>,
|
key: &Zeroizing<<Ristretto as Ciphersuite>::F>,
|
||||||
processors: &Pro,
|
processors: &Pro,
|
||||||
serai: &Serai,
|
serai: &Serai,
|
||||||
|
@ -215,6 +214,7 @@ async fn handle_batch_and_burns<Pro: Processors>(
|
||||||
|
|
||||||
// Handle a specific Substrate block, returning an error when it fails to get data
|
// Handle a specific Substrate block, returning an error when it fails to get data
|
||||||
// (not blocking / holding)
|
// (not blocking / holding)
|
||||||
|
#[allow(clippy::needless_pass_by_ref_mut)] // False positive?
|
||||||
async fn handle_block<
|
async fn handle_block<
|
||||||
D: Db,
|
D: Db,
|
||||||
Fut: Future<Output = ()>,
|
Fut: Future<Output = ()>,
|
||||||
|
@ -265,7 +265,7 @@ async fn handle_block<
|
||||||
for key_gen in serai.get_key_gen_events(hash).await? {
|
for key_gen in serai.get_key_gen_events(hash).await? {
|
||||||
if !SubstrateDb::<D>::handled_event(&db.0, hash, event_id) {
|
if !SubstrateDb::<D>::handled_event(&db.0, hash, event_id) {
|
||||||
if let ValidatorSetsEvent::KeyGen { set, key_pair } = key_gen {
|
if let ValidatorSetsEvent::KeyGen { set, key_pair } = key_gen {
|
||||||
handle_key_gen(&mut db.0, key, processors, serai, &block, set, key_pair).await?;
|
handle_key_gen(key, processors, serai, &block, set, key_pair).await?;
|
||||||
} else {
|
} else {
|
||||||
panic!("KeyGen event wasn't KeyGen: {key_gen:?}");
|
panic!("KeyGen event wasn't KeyGen: {key_gen:?}");
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,6 +30,7 @@ pub enum RecognizedIdType {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle a specific Tributary block
|
// Handle a specific Tributary block
|
||||||
|
#[allow(clippy::needless_pass_by_ref_mut)] // False positive?
|
||||||
async fn handle_block<D: Db, Pro: Processors>(
|
async fn handle_block<D: Db, Pro: Processors>(
|
||||||
db: &mut TributaryDb<D>,
|
db: &mut TributaryDb<D>,
|
||||||
key: &Zeroizing<<Ristretto as Ciphersuite>::F>,
|
key: &Zeroizing<<Ristretto as Ciphersuite>::F>,
|
||||||
|
|
|
@ -2,6 +2,8 @@
|
||||||
#![no_std] // Prevents writing new code, in what should be a simple wrapper, which requires std
|
#![no_std] // Prevents writing new code, in what should be a simple wrapper, which requires std
|
||||||
#![doc = include_str!("../README.md")]
|
#![doc = include_str!("../README.md")]
|
||||||
|
|
||||||
|
#![allow(clippy::redundant_closure_call)]
|
||||||
|
|
||||||
use core::{
|
use core::{
|
||||||
borrow::Borrow,
|
borrow::Borrow,
|
||||||
ops::{Deref, Add, AddAssign, Sub, SubAssign, Neg, Mul, MulAssign},
|
ops::{Deref, Add, AddAssign, Sub, SubAssign, Neg, Mul, MulAssign},
|
||||||
|
|
|
@ -2,6 +2,8 @@
|
||||||
#![doc = include_str!("../README.md")]
|
#![doc = include_str!("../README.md")]
|
||||||
#![no_std]
|
#![no_std]
|
||||||
|
|
||||||
|
#![allow(clippy::redundant_closure_call)]
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
mod backend;
|
mod backend;
|
||||||
|
|
||||||
|
|
|
@ -255,7 +255,7 @@ impl<C: Curve> BindingFactor<C> {
|
||||||
self.0.insert(i, IndividualBinding { commitments, binding_factors: None });
|
self.0.insert(i, IndividualBinding { commitments, binding_factors: None });
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn calculate_binding_factors<T: Clone + Transcript>(&mut self, transcript: &mut T) {
|
pub(crate) fn calculate_binding_factors<T: Clone + Transcript>(&mut self, transcript: &T) {
|
||||||
for (l, binding) in self.0.iter_mut() {
|
for (l, binding) in self.0.iter_mut() {
|
||||||
let mut transcript = transcript.clone();
|
let mut transcript = transcript.clone();
|
||||||
transcript.append_message(b"participant", C::F::from(u64::from(u16::from(*l))).to_repr());
|
transcript.append_message(b"participant", C::F::from(u64::from(u16::from(*l))).to_repr());
|
||||||
|
|
|
@ -388,7 +388,7 @@ impl<C: Curve, A: Algorithm<C>> SignMachine<A::Signature> for AlgorithmSignMachi
|
||||||
);
|
);
|
||||||
|
|
||||||
// Generate the per-signer binding factors
|
// Generate the per-signer binding factors
|
||||||
B.calculate_binding_factors(&mut rho_transcript);
|
B.calculate_binding_factors(&rho_transcript);
|
||||||
|
|
||||||
// Merge the rho transcript back into the global one to ensure its advanced, while
|
// Merge the rho transcript back into the global one to ensure its advanced, while
|
||||||
// simultaneously committing to everything
|
// simultaneously committing to everything
|
||||||
|
|
|
@ -159,7 +159,7 @@ where
|
||||||
// Attempt to get them marked as read
|
// Attempt to get them marked as read
|
||||||
|
|
||||||
#[rustversion::since(1.66)]
|
#[rustversion::since(1.66)]
|
||||||
fn mark_read<D: Send + Clone + SecureDigest>(transcript: &mut DigestTranscript<D>) {
|
fn mark_read<D: Send + Clone + SecureDigest>(transcript: &DigestTranscript<D>) {
|
||||||
// Just get a challenge from the state
|
// Just get a challenge from the state
|
||||||
let mut challenge = core::hint::black_box(transcript.0.clone().finalize());
|
let mut challenge = core::hint::black_box(transcript.0.clone().finalize());
|
||||||
challenge.as_mut().zeroize();
|
challenge.as_mut().zeroize();
|
||||||
|
|
|
@ -410,7 +410,7 @@ impl Network for Bitcoin {
|
||||||
}
|
}
|
||||||
|
|
||||||
let this_block_hash = block.id();
|
let this_block_hash = block.id();
|
||||||
let this_block_num = (|| async {
|
let this_block_num = (async {
|
||||||
loop {
|
loop {
|
||||||
match self.rpc.get_block_number(&this_block_hash).await {
|
match self.rpc.get_block_number(&this_block_hash).await {
|
||||||
Ok(number) => return number,
|
Ok(number) => return number,
|
||||||
|
@ -420,8 +420,7 @@ impl Network for Bitcoin {
|
||||||
}
|
}
|
||||||
sleep(Duration::from_secs(60)).await;
|
sleep(Duration::from_secs(60)).await;
|
||||||
}
|
}
|
||||||
})()
|
}).await;
|
||||||
.await;
|
|
||||||
|
|
||||||
for block_num in (eventualities.block_number + 1) .. this_block_num {
|
for block_num in (eventualities.block_number + 1) .. this_block_num {
|
||||||
let block = {
|
let block = {
|
||||||
|
|
|
@ -419,7 +419,7 @@ impl Network for Monero {
|
||||||
.map_err(|_| NetworkError::ConnectionError)
|
.map_err(|_| NetworkError::ConnectionError)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let inputs = spendable_outputs.into_iter().zip(decoys.into_iter()).collect::<Vec<_>>();
|
let inputs = spendable_outputs.into_iter().zip(decoys).collect::<Vec<_>>();
|
||||||
|
|
||||||
let signable = |mut plan: Plan<Self>, tx_fee: Option<_>| {
|
let signable = |mut plan: Plan<Self>, tx_fee: Option<_>| {
|
||||||
// Monero requires at least two outputs
|
// Monero requires at least two outputs
|
||||||
|
@ -617,7 +617,7 @@ impl Network for Monero {
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let inputs = outputs.into_iter().zip(decoys.into_iter()).collect::<Vec<_>>();
|
let inputs = outputs.into_iter().zip(decoys).collect::<Vec<_>>();
|
||||||
|
|
||||||
let tx = MSignableTransaction::new(
|
let tx = MSignableTransaction::new(
|
||||||
protocol,
|
protocol,
|
||||||
|
|
Loading…
Reference in a new issue