Fill in various DB functions

This commit is contained in:
Luke Parker 2024-08-28 23:31:31 -04:00
parent 738636c238
commit 04a971a024
7 changed files with 77 additions and 19 deletions

View file

@ -23,9 +23,9 @@ pub trait Eventuality: Sized + Send + Sync {
fn forwarded_output(&self) -> Option<Self::OutputId>;
/// Read an Eventuality.
fn read<R: io::Read>(reader: &mut R) -> io::Result<Self>;
/// Serialize an Eventuality to a `Vec<u8>`.
fn serialize(&self) -> Vec<u8>;
fn read(reader: &mut impl io::Read) -> io::Result<Self>;
/// Write an Eventuality.
fn write(&self, writer: &mut impl io::Write) -> io::Result<()>;
}
/// A tracker of unresolved Eventualities.
@ -36,3 +36,16 @@ pub struct EventualityTracker<E: Eventuality> {
/// These are keyed by their lookups.
pub active_eventualities: HashMap<Vec<u8>, E>,
}
impl<E: Eventuality> Default for EventualityTracker<E> {
fn default() -> Self {
EventualityTracker { active_eventualities: HashMap::new() }
}
}
impl<E: Eventuality> EventualityTracker<E> {
/// Insert an Eventuality into the tracker.
pub fn insert(&mut self, eventuality: E) {
self.active_eventualities.insert(eventuality.lookup(), eventuality);
}
}

View file

@ -8,7 +8,12 @@ use serai_primitives::{ExternalAddress, Balance};
use crate::Id;
/// An address on the external network.
pub trait Address: Send + Sync + TryFrom<ExternalAddress> {}
pub trait Address: Send + Sync + TryFrom<ExternalAddress> {
/// Write this address.
fn write(&self, writer: &mut impl io::Write) -> io::Result<()>;
/// Read an address.
fn read(reader: &mut impl io::Read) -> io::Result<Self>;
}
/// The type of the output.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]

View file

@ -1,13 +1,13 @@
use core::marker::PhantomData;
use std::io;
use scale::Encode;
use scale::{Encode, Decode, IoReader};
use borsh::{BorshSerialize, BorshDeserialize};
use serai_db::{Get, DbTxn, create_db, db_channel};
use serai_in_instructions_primitives::InInstructionWithBalance;
use primitives::{EncodableG, ReceivedOutput};
use primitives::{EncodableG, Address, ReceivedOutput};
use crate::{
lifetime::LifetimeStage, ScannerFeed, KeyFor, AddressFor, OutputFor, Return,
@ -38,9 +38,30 @@ pub(crate) struct OutputWithInInstruction<S: ScannerFeed> {
}
impl<S: ScannerFeed> OutputWithInInstruction<S> {
pub(crate) fn read(reader: &mut impl io::Read) -> io::Result<Self> {
let output = OutputFor::<S>::read(reader)?;
let return_address = {
let mut opt = [0xff];
reader.read_exact(&mut opt)?;
assert!((opt[0] == 0) || (opt[0] == 1));
if opt[0] == 0 {
None
} else {
Some(AddressFor::<S>::read(reader)?)
}
};
let in_instruction =
InInstructionWithBalance::decode(&mut IoReader(reader)).map_err(io::Error::other)?;
Ok(Self { output, return_address, in_instruction })
}
pub(crate) fn write(&self, writer: &mut impl io::Write) -> io::Result<()> {
self.output.write(writer)?;
// TODO self.return_address.write(writer)?;
if let Some(return_address) = &self.return_address {
writer.write_all(&[1])?;
return_address.write(writer)?;
} else {
writer.write_all(&[0])?;
}
self.in_instruction.encode_to(writer);
Ok(())
}

View file

@ -1,22 +1,18 @@
use core::marker::PhantomData;
use borsh::{BorshSerialize, BorshDeserialize};
use scale::Encode;
use serai_db::{Get, DbTxn, create_db};
use primitives::EventualityTracker;
use primitives::{EncodableG, Eventuality, EventualityTracker};
use crate::{ScannerFeed, KeyFor, EventualityFor};
// The DB macro doesn't support `BorshSerialize + BorshDeserialize` as a bound, hence this.
trait Borshy: BorshSerialize + BorshDeserialize {}
impl<T: BorshSerialize + BorshDeserialize> Borshy for T {}
create_db!(
ScannerEventuality {
// The next block to check for resolving eventualities
NextToCheckForEventualitiesBlock: () -> u64,
SerializedEventualities: <K: Borshy>() -> Vec<u8>,
SerializedEventualities: <K: Encode>(key: K) -> Vec<u8>,
}
);
@ -41,13 +37,25 @@ impl<S: ScannerFeed> EventualityDb<S> {
key: KeyFor<S>,
eventualities: &EventualityTracker<EventualityFor<S>>,
) {
todo!("TODO")
let mut serialized = Vec::with_capacity(eventualities.active_eventualities.len() * 128);
for eventuality in eventualities.active_eventualities.values() {
eventuality.write(&mut serialized).unwrap();
}
SerializedEventualities::set(txn, EncodableG(key), &serialized);
}
pub(crate) fn eventualities(
getter: &impl Get,
key: KeyFor<S>,
) -> EventualityTracker<EventualityFor<S>> {
todo!("TODO")
let serialized = SerializedEventualities::get(getter, EncodableG(key)).unwrap_or(vec![]);
let mut serialized = serialized.as_slice();
let mut res = EventualityTracker::default();
while !serialized.is_empty() {
let eventuality = EventualityFor::<S>::read(&mut serialized).unwrap();
res.insert(eventuality);
}
res
}
}

View file

@ -263,7 +263,7 @@ impl<D: Db, S: ScannerFeed, Sch: Scheduler<S>> ContinuallyRan for EventualityTas
let mut eventualities = EventualityDb::<S>::eventualities(&txn, key);
for new_eventuality in new_eventualities {
eventualities.active_eventualities.insert(new_eventuality.lookup(), new_eventuality);
eventualities.insert(new_eventuality);
}
EventualityDb::<S>::set_eventualities(&mut txn, key, &eventualities);
}

View file

@ -4,6 +4,8 @@ create_db!(
ScannerReport {
// The next block to potentially report
NextToPotentiallyReportBlock: () -> u64,
// The next Batch ID to use
NextBatchId: () -> u32,
}
);
@ -20,6 +22,8 @@ impl ReportDb {
}
pub(crate) fn acquire_batch_id(txn: &mut impl DbTxn) -> u32 {
todo!("TODO")
let id = NextBatchId::get(txn).unwrap_or(0);
NextBatchId::set(txn, &(id + 1));
id
}
}

View file

@ -29,7 +29,14 @@ impl<S: ScannerFeed> ScanDb<S> {
txn: &mut impl DbTxn,
block_number: u64,
) -> Vec<OutputWithInInstruction<S>> {
todo!("TODO")
let serialized = SerializedQueuedOutputs::get(txn, block_number).unwrap_or(vec![]);
let mut serialized = serialized.as_slice();
let mut res = Vec::with_capacity(serialized.len() / 128);
while !serialized.is_empty() {
res.push(OutputWithInInstruction::<S>::read(&mut serialized).unwrap());
}
res
}
pub(crate) fn queue_output_until_block(