Cache the cost to aggregate

This commit is contained in:
Luke Parker 2024-08-28 23:45:17 -04:00
parent 04a971a024
commit 612c67c537
2 changed files with 21 additions and 5 deletions

View file

@ -141,7 +141,7 @@ pub trait ScannerFeed: 'static + Send + Sync + Clone {
async fn cost_to_aggregate( async fn cost_to_aggregate(
&self, &self,
coin: Coin, coin: Coin,
block_number: u64, reference_block: &Self::Block,
) -> Result<Amount, Self::EphemeralError>; ) -> Result<Amount, Self::EphemeralError>;
/// The dust threshold for the specified coin. /// The dust threshold for the specified coin.

View file

@ -1,3 +1,5 @@
use std::collections::HashMap;
use scale::Decode; use scale::Decode;
use serai_db::{Get, DbTxn, Db}; use serai_db::{Get, DbTxn, Db};
@ -129,14 +131,17 @@ impl<D: Db, S: ScannerFeed> ContinuallyRan for ScanTask<D, S> {
let keys = ScannerGlobalDb::<S>::active_keys_as_of_next_to_scan_for_outputs_block(&txn) let keys = ScannerGlobalDb::<S>::active_keys_as_of_next_to_scan_for_outputs_block(&txn)
.expect("scanning for a blockchain without any keys set"); .expect("scanning for a blockchain without any keys set");
// The scan data for this block
let mut scan_data = SenderScanData { let mut scan_data = SenderScanData {
block_number: b, block_number: b,
received_external_outputs: vec![], received_external_outputs: vec![],
forwards: vec![], forwards: vec![],
returns: vec![], returns: vec![],
}; };
// The InInstructions for this block
let mut in_instructions = vec![]; let mut in_instructions = vec![];
// The outputs queued for this block
let queued_outputs = { let queued_outputs = {
let mut queued_outputs = ScanDb::<S>::take_queued_outputs(&mut txn, b); let mut queued_outputs = ScanDb::<S>::take_queued_outputs(&mut txn, b);
// Sort the queued outputs in case they weren't queued in a deterministic fashion // Sort the queued outputs in case they weren't queued in a deterministic fashion
@ -148,6 +153,11 @@ impl<D: Db, S: ScannerFeed> ContinuallyRan for ScanTask<D, S> {
in_instructions.push(queued_output.in_instruction); in_instructions.push(queued_output.in_instruction);
} }
// We subtract the cost to aggregate from some outputs we scan
// This cost is fetched with an asynchronous function which may be non-trivial
// We cache the result of this function here to avoid calling it multiple times
let mut costs_to_aggregate = HashMap::with_capacity(1);
// Scan for each key // Scan for each key
for key in keys { for key in keys {
for output in block.scan_for_outputs(key.key) { for output in block.scan_for_outputs(key.key) {
@ -207,13 +217,19 @@ impl<D: Db, S: ScannerFeed> ContinuallyRan for ScanTask<D, S> {
// Check this isn't dust // Check this isn't dust
let balance_to_use = { let balance_to_use = {
let mut balance = output.balance(); let mut balance = output.balance();
// First, subtract 2 * the cost to aggregate, as detailed in // First, subtract 2 * the cost to aggregate, as detailed in
// `spec/processor/UTXO Management.md` // `spec/processor/UTXO Management.md`
// TODO: Cache this
let cost_to_aggregate = // We cache this, so if it isn't yet cached, insert it into the cache
self.feed.cost_to_aggregate(balance.coin, b).await.map_err(|e| { if let std::collections::hash_map::Entry::Vacant(e) =
costs_to_aggregate.entry(balance.coin)
{
e.insert(self.feed.cost_to_aggregate(balance.coin, &block).await.map_err(|e| {
format!("couldn't fetch cost to aggregate {:?} at {b}: {e:?}", balance.coin) format!("couldn't fetch cost to aggregate {:?} at {b}: {e:?}", balance.coin)
})?; })?);
}
let cost_to_aggregate = costs_to_aggregate[&balance.coin];
balance.amount.0 -= 2 * cost_to_aggregate.0; balance.amount.0 -= 2 * cost_to_aggregate.0;
// Now, check it's still past the dust threshold // Now, check it's still past the dust threshold