Lint previous commit

This commit is contained in:
Luke Parker 2022-08-22 13:35:49 -04:00
parent 5c106cecf6
commit 5b2940e161
No known key found for this signature in database
GPG key ID: F9F1386DB1E119B6
6 changed files with 40 additions and 22 deletions

View file

@ -165,7 +165,7 @@ impl Rpc {
) )
.await?; .await?;
if txs.missed_tx.len() != 0 { if !txs.missed_tx.is_empty() {
Err(RpcError::TransactionsNotFound( Err(RpcError::TransactionsNotFound(
txs.missed_tx.iter().map(|hash| hex::decode(&hash).unwrap().try_into().unwrap()).collect(), txs.missed_tx.iter().map(|hash| hex::decode(&hash).unwrap().try_into().unwrap()).collect(),
))?; ))?;

View file

@ -141,7 +141,7 @@ impl Decoys {
let mut outputs = Vec::with_capacity(inputs.len()); let mut outputs = Vec::with_capacity(inputs.len());
for input in inputs { for input in inputs {
real.push(input.global_index); real.push(input.global_index);
outputs.push((real[real.len() - 1], [input.output.data.key, input.commitment().calculate()])); outputs.push((real[real.len() - 1], [input.key(), input.commitment().calculate()]));
} }
let distribution_len = { let distribution_len = {

View file

@ -94,6 +94,14 @@ pub struct ReceivedOutput {
} }
impl ReceivedOutput { impl ReceivedOutput {
pub fn key(&self) -> EdwardsPoint {
self.data.key
}
pub fn key_offset(&self) -> Scalar {
self.data.key_offset
}
pub fn commitment(&self) -> Commitment { pub fn commitment(&self) -> Commitment {
self.data.commitment.clone() self.data.commitment.clone()
} }
@ -133,6 +141,14 @@ impl SpendableOutput {
Ok(output) Ok(output)
} }
pub fn key(&self) -> EdwardsPoint {
self.output.key()
}
pub fn key_offset(&self) -> Scalar {
self.output.key_offset()
}
pub fn commitment(&self) -> Commitment { pub fn commitment(&self) -> Commitment {
self.output.commitment() self.output.commitment()
} }
@ -182,7 +198,7 @@ impl<O: Clone + Zeroize> Timelocked<O> {
} }
impl Scanner { impl Scanner {
pub fn scan_stateless(&mut self, tx: &Transaction) -> Timelocked<ReceivedOutput> { pub fn scan_transaction(&mut self, tx: &Transaction) -> Timelocked<ReceivedOutput> {
let extra = Extra::deserialize(&mut Cursor::new(&tx.prefix.extra)); let extra = Extra::deserialize(&mut Cursor::new(&tx.prefix.extra));
let keys; let keys;
let extra = if let Ok(extra) = extra { let extra = if let Ok(extra) = extra {
@ -204,7 +220,7 @@ impl Scanner {
} }
for key in &keys { for key in &keys {
let (view_tag, key_offset, payment_id_xor) = shared_key( let (view_tag, shared_key, payment_id_xor) = shared_key(
if self.burning_bug.is_none() { Some(uniqueness(&tx.prefix.inputs)) } else { None }, if self.burning_bug.is_none() { Some(uniqueness(&tx.prefix.inputs)) } else { None },
&self.pair.view, &self.pair.view,
key, key,
@ -227,11 +243,17 @@ impl Scanner {
// P - shared == spend // P - shared == spend
let subaddress = self let subaddress = self
.subaddresses .subaddresses
.get(&(output.key - (&key_offset * &ED25519_BASEPOINT_TABLE)).compress()); .get(&(output.key - (&shared_key * &ED25519_BASEPOINT_TABLE)).compress());
if subaddress.is_none() { if subaddress.is_none() {
continue; continue;
} }
// If it has torsion, it'll substract the non-torsioned shared key to a torsioned key
// We will not have a torsioned key in our HashMap of keys, so we wouldn't identify it as
// ours
// If we did, it'd enable bypassing the included burning bug protection however
debug_assert!(output.key.is_torsion_free());
let key_offset = shared_key + self.pair.subaddress(*subaddress.unwrap());
// Since we've found an output to us, get its amount // Since we've found an output to us, get its amount
let mut commitment = Commitment::zero(); let mut commitment = Commitment::zero();
@ -241,14 +263,14 @@ impl Scanner {
// Regular transaction // Regular transaction
} else { } else {
let amount = match tx.rct_signatures.base.ecdh_info.get(o) { let amount = match tx.rct_signatures.base.ecdh_info.get(o) {
Some(amount) => amount_decryption(*amount, key_offset), Some(amount) => amount_decryption(*amount, shared_key),
// This should never happen, yet it may be possible with miner transactions? // This should never happen, yet it may be possible with miner transactions?
// Using get just decreases the possibility of a panic and lets us move on in that case // Using get just decreases the possibility of a panic and lets us move on in that case
None => break, None => break,
}; };
// Rebuild the commitment to verify it // Rebuild the commitment to verify it
commitment = Commitment::new(commitment_mask(key_offset), amount); commitment = Commitment::new(commitment_mask(shared_key), amount);
// If this is a malicious commitment, move to the next output // If this is a malicious commitment, move to the next output
// Any other R value will calculate to a different spend key and are therefore ignorable // Any other R value will calculate to a different spend key and are therefore ignorable
if Some(&commitment.calculate()) != tx.rct_signatures.base.commitments.get(o) { if Some(&commitment.calculate()) != tx.rct_signatures.base.commitments.get(o) {
@ -260,11 +282,7 @@ impl Scanner {
res.push(ReceivedOutput { res.push(ReceivedOutput {
absolute: AbsoluteId { tx: tx.hash(), o: o.try_into().unwrap() }, absolute: AbsoluteId { tx: tx.hash(), o: o.try_into().unwrap() },
data: OutputData { data: OutputData { key: output.key, key_offset, commitment },
key: output.key,
key_offset: key_offset + self.pair.subaddress(*subaddress.unwrap()),
commitment,
},
metadata: Metadata { subaddress: (0, 0), payment_id }, metadata: Metadata { subaddress: (0, 0), payment_id },
}); });
@ -311,7 +329,7 @@ impl Scanner {
let mut res = vec![]; let mut res = vec![];
for tx in txs { for tx in txs {
if let Some(timelock) = map(self.scan_stateless(&tx), index) { if let Some(timelock) = map(self.scan_transaction(&tx), index) {
res.push(timelock); res.push(timelock);
} }
index += u64::try_from(tx.prefix.outputs.len()).unwrap(); index += u64::try_from(tx.prefix.outputs.len()).unwrap();

View file

@ -129,8 +129,8 @@ async fn prepare_inputs<R: RngCore + CryptoRng>(
for (i, input) in inputs.iter().enumerate() { for (i, input) in inputs.iter().enumerate() {
signable.push(( signable.push((
spend + input.output.data.key_offset, spend + input.key_offset(),
generate_key_image(spend + input.output.data.key_offset), generate_key_image(spend + input.key_offset()),
ClsagInput::new(input.commitment().clone(), decoys[i].clone()) ClsagInput::new(input.commitment().clone(), decoys[i].clone())
.map_err(TransactionError::ClsagError)?, .map_err(TransactionError::ClsagError)?,
)); ));
@ -345,8 +345,8 @@ impl SignableTransaction {
) -> Result<Transaction, TransactionError> { ) -> Result<Transaction, TransactionError> {
let mut images = Vec::with_capacity(self.inputs.len()); let mut images = Vec::with_capacity(self.inputs.len());
for input in &self.inputs { for input in &self.inputs {
let mut offset = spend + input.output.data.key_offset; let mut offset = spend + input.key_offset();
if (&offset * &ED25519_BASEPOINT_TABLE) != input.output.data.key { if (&offset * &ED25519_BASEPOINT_TABLE) != input.key() {
Err(TransactionError::WrongPrivateKey)?; Err(TransactionError::WrongPrivateKey)?;
} }

View file

@ -104,7 +104,7 @@ impl SignableTransaction {
transcript.append_message(b"input_output_index", &[input.output.absolute.o]); transcript.append_message(b"input_output_index", &[input.output.absolute.o]);
// Not including this, with a doxxed list of payments, would allow brute forcing the inputs // Not including this, with a doxxed list of payments, would allow brute forcing the inputs
// to determine RNG seeds and therefore the true spends // to determine RNG seeds and therefore the true spends
transcript.append_message(b"input_shared_key", &input.output.data.key_offset.to_bytes()); transcript.append_message(b"input_shared_key", &input.key_offset().to_bytes());
} }
for payment in &self.payments { for payment in &self.payments {
transcript.append_message(b"payment_address", payment.0.to_string().as_bytes()); transcript.append_message(b"payment_address", payment.0.to_string().as_bytes());
@ -116,14 +116,14 @@ impl SignableTransaction {
for (i, input) in self.inputs.iter().enumerate() { for (i, input) in self.inputs.iter().enumerate() {
// Check this the right set of keys // Check this the right set of keys
let offset = keys.offset(dalek_ff_group::Scalar(input.output.data.key_offset)); let offset = keys.offset(dalek_ff_group::Scalar(input.key_offset()));
if offset.group_key().0 != input.output.data.key { if offset.group_key().0 != input.key() {
Err(TransactionError::WrongPrivateKey)?; Err(TransactionError::WrongPrivateKey)?;
} }
clsags.push( clsags.push(
AlgorithmMachine::new( AlgorithmMachine::new(
ClsagMultisig::new(transcript.clone(), input.output.data.key, inputs[i].clone()) ClsagMultisig::new(transcript.clone(), input.key(), inputs[i].clone())
.map_err(TransactionError::MultisigError)?, .map_err(TransactionError::MultisigError)?,
offset, offset,
&included, &included,

View file

@ -98,7 +98,7 @@ async fn send_core(test: usize, multisig: bool) {
// Grab the largest output available // Grab the largest output available
let output = { let output = {
let mut outputs = scanner.scan_stateless(tx.as_ref().unwrap()).ignore_timelock(); let mut outputs = scanner.scan_transaction(tx.as_ref().unwrap()).ignore_timelock();
outputs.sort_by(|x, y| x.commitment().amount.cmp(&y.commitment().amount).reverse()); outputs.sort_by(|x, y| x.commitment().amount.cmp(&y.commitment().amount).reverse());
outputs.swap_remove(0) outputs.swap_remove(0)
}; };