Handle dust

This commit is contained in:
Luke Parker 2024-08-20 18:20:28 -04:00
parent 951872b026
commit 155ad48f4c
5 changed files with 55 additions and 19 deletions

1
Cargo.lock generated
View file

@ -8669,6 +8669,7 @@ dependencies = [
"log", "log",
"parity-scale-codec", "parity-scale-codec",
"serai-db", "serai-db",
"serai-primitives",
"serai-processor-messages", "serai-processor-messages",
"serai-processor-primitives", "serai-processor-primitives",
"thiserror", "thiserror",

View file

@ -34,6 +34,24 @@ pub trait Id:
} }
impl<const N: usize> Id for [u8; N] where [u8; N]: Default {} impl<const N: usize> Id for [u8; N] where [u8; N]: Default {}
/// A wrapper for a group element which implements the borsh traits.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct BorshG<G: GroupEncoding>(pub G);
impl<G: GroupEncoding> BorshSerialize for BorshG<G> {
fn serialize<W: borsh::io::Write>(&self, writer: &mut W) -> borsh::io::Result<()> {
writer.write_all(self.0.to_bytes().as_ref())
}
}
impl<G: GroupEncoding> BorshDeserialize for BorshG<G> {
fn deserialize_reader<R: borsh::io::Read>(reader: &mut R) -> borsh::io::Result<Self> {
let mut repr = G::Repr::default();
reader.read_exact(repr.as_mut())?;
Ok(Self(
Option::<G>::from(G::from_bytes(&repr)).ok_or(borsh::io::Error::other("invalid point"))?,
))
}
}
/// The type of the output. /// The type of the output.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum OutputType { pub enum OutputType {
@ -171,21 +189,3 @@ pub trait Block: Send + Sync + Sized + Clone + Debug {
/// Scan all outputs within this block to find the outputs spendable by this key. /// Scan all outputs within this block to find the outputs spendable by this key.
fn scan_for_outputs(&self, key: Self::Key) -> Vec<Self::Output>; fn scan_for_outputs(&self, key: Self::Key) -> Vec<Self::Output>;
} }
/// A wrapper for a group element which implements the borsh traits.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct BorshG<G: GroupEncoding>(pub G);
impl<G: GroupEncoding> BorshSerialize for BorshG<G> {
fn serialize<W: borsh::io::Write>(&self, writer: &mut W) -> borsh::io::Result<()> {
writer.write_all(self.0.to_bytes().as_ref())
}
}
impl<G: GroupEncoding> BorshDeserialize for BorshG<G> {
fn deserialize_reader<R: borsh::io::Read>(reader: &mut R) -> borsh::io::Result<Self> {
let mut repr = G::Repr::default();
reader.read_exact(repr.as_mut())?;
Ok(Self(
Option::<G>::from(G::from_bytes(&repr)).ok_or(borsh::io::Error::other("invalid point"))?,
))
}
}

View file

@ -35,5 +35,7 @@ tokio = { version = "1", default-features = false, features = ["rt-multi-thread"
serai-db = { path = "../../common/db" } serai-db = { path = "../../common/db" }
serai-primitives = { path = "../../substrate/primitives", default-features = false, features = ["std"] }
messages = { package = "serai-processor-messages", path = "../messages" } messages = { package = "serai-processor-messages", path = "../messages" }
primitives = { package = "serai-processor-primitives", path = "../primitives" } primitives = { package = "serai-processor-primitives", path = "../primitives" }

View file

@ -2,6 +2,7 @@ use core::{fmt::Debug, time::Duration};
use tokio::sync::mpsc; use tokio::sync::mpsc;
use serai_primitives::{Coin, Amount};
use primitives::{ReceivedOutput, BlockHeader, Block}; use primitives::{ReceivedOutput, BlockHeader, Block};
mod db; mod db;
@ -57,6 +58,20 @@ pub trait ScannerFeed: Send + Sync {
/// Fetch a block by its number. /// Fetch a block by its number.
async fn block_by_number(&self, number: u64) -> Result<Self::Block, Self::EphemeralError>; async fn block_by_number(&self, number: u64) -> Result<Self::Block, Self::EphemeralError>;
/// The cost to aggregate an input as of the specified block.
///
/// This is defined as the transaction fee for a 2-input, 1-output transaction.
async fn cost_to_aggregate(
&self,
coin: Coin,
block_number: u64,
) -> Result<Amount, Self::EphemeralError>;
/// The dust threshold for the specified coin.
///
/// This should be a value worth handling at a human level.
fn dust(&self, coin: Coin) -> Amount;
} }
type BlockIdFor<S> = <<<S as ScannerFeed>::Block as Block>::Header as BlockHeader>::Id; type BlockIdFor<S> = <<<S as ScannerFeed>::Block as Block>::Header as BlockHeader>::Id;

View file

@ -62,7 +62,25 @@ impl<D: Db, S: ScannerFeed> ContinuallyRan for ScanForOutputsTask<D, S> {
for output in block.scan_for_outputs(key.key.0) { for output in block.scan_for_outputs(key.key.0) {
assert_eq!(output.key(), key.key.0); assert_eq!(output.key(), key.key.0);
// TODO: Check for dust
// Check this isn't dust
{
let mut balance = output.balance();
// First, subtract 2 * the cost to aggregate, as detailed in
// `spec/processor/UTXO Management.md`
// TODO: Cache this
let cost_to_aggregate =
self.feed.cost_to_aggregate(balance.coin, b).await.map_err(|e| {
format!("couldn't fetch cost to aggregate {:?} at {b}: {e:?}", balance.coin)
})?;
balance.amount.0 -= 2 * cost_to_aggregate.0;
// Now, check it's still past the dust threshold
if balance.amount.0 < self.feed.dust(balance.coin).0 {
continue;
}
}
outputs.push(output); outputs.push(output);
} }
} }