Update procesor/correct prior commit

This commit is contained in:
Luke Parker 2023-03-25 04:06:25 -04:00
parent 839734354a
commit 9157f8d0a0
No known key found for this signature in database
11 changed files with 64 additions and 45 deletions

View file

@ -140,9 +140,9 @@ impl Metadata {
/// A received output, defined as its absolute ID, data, and metadara.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct ReceivedOutput {
pub(crate) absolute: AbsoluteId,
pub(crate) data: OutputData,
pub(crate) metadata: Metadata,
pub absolute: AbsoluteId,
pub data: OutputData,
pub metadata: Metadata,
}
impl ReceivedOutput {

View file

@ -550,7 +550,7 @@ impl SignableTransaction {
let mut serialized = Vec::with_capacity(extra_len);
extra.write(&mut serialized).unwrap();
debug_assert_eq!(extra_len, extra);
debug_assert_eq!(extra_len, serialized.len());
serialized
}

View file

@ -10,10 +10,9 @@ use serde::{Serialize, Deserialize};
use dkg::{Participant, ThresholdParams};
use serai_primitives::WithAmount;
use in_instructions_primitives::InInstruction;
use tokens_primitives::OutInstruction;
use validator_sets_primitives::ValidatorSetInstance;
use in_instructions_primitives::InInstructionWithBalance;
use tokens_primitives::OutInstructionWithBalance;
use validator_sets_primitives::ValidatorSet;
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize, Serialize, Deserialize)]
pub struct SubstrateContext {
@ -26,7 +25,7 @@ pub mod key_gen {
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Zeroize, Serialize, Deserialize)]
pub struct KeyGenId {
pub set: ValidatorSetInstance,
pub set: ValidatorSet,
pub attempt: u32,
}
@ -123,12 +122,12 @@ pub mod substrate {
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, Serialize, Deserialize)]
pub enum CoordinatorMessage {
BlockAcknowledged { context: SubstrateContext, key: Vec<u8>, block: Vec<u8> },
Burns { context: SubstrateContext, burns: Vec<WithAmount<OutInstruction>> },
Burns { context: SubstrateContext, burns: Vec<OutInstructionWithBalance> },
}
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, Serialize, Deserialize)]
pub enum ProcessorMessage {
Update { key: Vec<u8>, block: Vec<u8>, instructions: Vec<WithAmount<InInstruction>> },
Update { key: Vec<u8>, block: Vec<u8>, instructions: Vec<InInstructionWithBalance> },
}
}

View file

@ -36,7 +36,10 @@ use bitcoin_serai::bitcoin::{
PackedLockTime, Sequence, Script, Witness, TxIn, TxOut, Address as BAddress,
};
use serai_client::{primitives::MAX_DATA_LEN, coins::bitcoin::Address};
use serai_client::{
primitives::{MAX_DATA_LEN, BITCOIN, Amount, Balance},
coins::bitcoin::Address,
};
use crate::{
coins::{
@ -93,8 +96,8 @@ impl OutputTrait for Output {
res
}
fn amount(&self) -> u64 {
self.output.value()
fn balance(&self) -> Balance {
Balance { coin: BITCOIN, amount: Amount(self.output.value()) }
}
fn data(&self) -> &[u8] {
@ -342,7 +345,7 @@ impl Coin for Bitcoin {
let offset_repr_ref: &[u8] = offset_repr.as_ref();
let kind = kinds[offset_repr_ref];
let data = if kind == OutputType::External {
let mut data = if kind == OutputType::External {
(|| {
for output in &tx.output {
if output.script_pubkey.is_op_return() {

View file

@ -10,6 +10,8 @@ use frost::{
sign::PreprocessMachine,
};
use serai_client::primitives::Balance;
#[cfg(feature = "bitcoin")]
pub mod bitcoin;
#[cfg(feature = "bitcoin")]
@ -94,8 +96,11 @@ pub trait Output: Send + Sync + Sized + Clone + PartialEq + Eq + Debug {
fn kind(&self) -> OutputType;
fn id(&self) -> Self::Id;
fn amount(&self) -> u64;
fn balance(&self) -> Balance;
fn amount(&self) -> u64 {
self.balance().amount.0
}
fn data(&self) -> &[u8];
fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()>;

View file

@ -25,7 +25,10 @@ use monero_serai::{
use tokio::time::sleep;
pub use serai_client::{primitives::MAX_DATA_LEN, coins::monero::Address};
pub use serai_client::{
primitives::{MAX_DATA_LEN, MONERO, Amount, Balance},
coins::monero::Address,
};
use crate::{
Payment, Plan, additional_key,
@ -62,8 +65,8 @@ impl OutputTrait for Output {
self.0.output.data.key.compress().to_bytes()
}
fn amount(&self) -> u64 {
self.0.commitment().amount
fn balance(&self) -> Balance {
Balance { coin: MONERO, amount: Amount(self.0.commitment().amount) }
}
fn data(&self) -> &[u8] {

View file

@ -15,7 +15,7 @@ use frost::{
use log::info;
use serai_client::validator_sets::primitives::ValidatorSetInstance;
use serai_client::validator_sets::primitives::ValidatorSet;
use messages::key_gen::*;
use crate::{DbTxn, Db, coins::Coin};
@ -33,18 +33,18 @@ impl<C: Coin, D: Db> KeyGenDb<C, D> {
D::key(b"KEY_GEN", dst, key)
}
fn params_key(set: &ValidatorSetInstance) -> Vec<u8> {
fn params_key(set: &ValidatorSet) -> Vec<u8> {
Self::key_gen_key(b"params", bincode::serialize(set).unwrap())
}
fn save_params(
&mut self,
txn: &mut D::Transaction,
set: &ValidatorSetInstance,
set: &ValidatorSet,
params: &ThresholdParams,
) {
txn.put(Self::params_key(set), bincode::serialize(params).unwrap());
}
fn params(&self, set: &ValidatorSetInstance) -> ThresholdParams {
fn params(&self, set: &ValidatorSet) -> ThresholdParams {
// Directly unwraps the .get() as this will only be called after being set
bincode::deserialize(&self.0.get(Self::params_key(set)).unwrap()).unwrap()
}
@ -121,8 +121,8 @@ pub struct KeyGen<C: Coin, D: Db> {
db: KeyGenDb<C, D>,
entropy: Zeroizing<[u8; 32]>,
active_commit: HashMap<ValidatorSetInstance, SecretShareMachine<C::Curve>>,
active_share: HashMap<ValidatorSetInstance, KeyMachine<C::Curve>>,
active_commit: HashMap<ValidatorSet, SecretShareMachine<C::Curve>>,
active_share: HashMap<ValidatorSet, KeyMachine<C::Curve>>,
}
impl<C: Coin, D: Db> KeyGen<C, D> {
@ -145,8 +145,8 @@ impl<C: Coin, D: Db> KeyGen<C, D> {
let context = |id: &KeyGenId| {
// TODO2: Also embed the chain ID/genesis block
format!(
"Serai Key Gen. Session: {}, Index: {}, Attempt: {}",
id.set.session.0, id.set.index.0, id.attempt
"Serai Key Gen. Session: {:?}, Network: {:?}, Attempt: {}",
id.set.session, id.set.network, id.attempt
)
};

View file

@ -19,9 +19,9 @@ use tokio::time::sleep;
use scale::Decode;
use serai_client::{
primitives::{MAX_DATA_LEN, Amount, WithAmount},
tokens::primitives::OutInstruction,
in_instructions::primitives::{Shorthand, RefundableInInstruction},
primitives::MAX_DATA_LEN,
tokens::primitives::{OutInstruction, OutInstructionWithBalance},
in_instructions::primitives::{Shorthand, RefundableInInstruction, InInstructionWithBalance},
};
use messages::{SubstrateContext, sign, substrate, CoordinatorMessage, ProcessorMessage};
@ -383,12 +383,15 @@ async fn run<C: Coin, D: Db, Co: Coordinator>(raw_db: D, coin: C, mut coordinato
let mut payments = vec![];
for out in burns.clone() {
let WithAmount { data: OutInstruction { address, data }, amount } = out;
let OutInstructionWithBalance {
instruction: OutInstruction { address, data },
balance,
} = out;
if let Ok(address) = C::Address::try_from(address.consume()) {
payments.push(Payment {
address,
data: data.map(|data| data.consume()),
amount: amount.0,
amount: balance.amount.0,
});
}
}
@ -425,20 +428,24 @@ async fn run<C: Coin, D: Db, Co: Coordinator>(raw_db: D, coin: C, mut coordinato
return None;
}
let data = output.data();
if data.len() > MAX_DATA_LEN {
let mut data = output.data();
let max_data_len = MAX_DATA_LEN.try_into().unwrap();
if data.len() > max_data_len {
error!(
"data in output {} exceeded MAX_DATA_LEN ({MAX_DATA_LEN}): {}",
hex::encode(output.id()),
data.len(),
);
data = data[.. MAX_DATA_LEN];
data = &data[.. max_data_len];
}
let shorthand = Shorthand::decode(&mut data).ok()?;
let instruction = RefundableInInstruction::try_from(shorthand).ok()?;
// TODO2: Set instruction.origin if not set (and handle refunds in general)
Some(WithAmount { data: instruction.instruction, amount: Amount(output.amount()) })
Some(InInstructionWithBalance {
instruction: instruction.instruction,
balance: output.balance(),
})
}).collect(),
})).await;
},

View file

@ -377,7 +377,7 @@ impl<C: Coin, D: Db> Scanner<C, D> {
"block {} had output {} worth {}",
hex::encode(&block_id),
hex::encode(&id),
output.amount()
output.amount(),
);
// On Bitcoin, the output ID should be unique for a given chain

View file

@ -117,15 +117,16 @@ impl<C: Coin> Scheduler<C> {
// If we can fulfill planned TXs with this output, do so
// We could limit this to UTXOs where `utxo.kind() == OutputType::Branch`, yet there's no
// practical benefit in doing so
if let Some(plans) = self.plans.get_mut(&utxo.amount()) {
let amount = utxo.amount();
if let Some(plans) = self.plans.get_mut(&amount) {
// Execute the first set of payments possible with an output of this amount
let payments = plans.pop_front().unwrap();
// They won't be equal if we dropped payments due to being dust
assert!(utxo.amount() >= payments.iter().map(|payment| payment.amount).sum::<u64>());
assert!(amount >= payments.iter().map(|payment| payment.amount).sum::<u64>());
// If we've grabbed the last plan for this output amount, remove it from the map
if plans.is_empty() {
self.plans.remove(&utxo.amount());
self.plans.remove(&amount);
}
// Create a TX for these payments

View file

@ -7,7 +7,10 @@ use rand_core::{RngCore, OsRng};
use group::GroupEncoding;
use frost::{Participant, ThresholdParams, tests::clone_without};
use serai_client::validator_sets::primitives::{Session, ValidatorSetIndex, ValidatorSetInstance};
use serai_client::{
primitives::MONERO_NET_ID,
validator_sets::primitives::{Session, ValidatorSet},
};
use messages::{SubstrateContext, key_gen::*};
use crate::{
@ -16,10 +19,8 @@ use crate::{
tests::util::db::MemDb,
};
const ID: KeyGenId = KeyGenId {
set: ValidatorSetInstance { session: Session(1), index: ValidatorSetIndex(2) },
attempt: 3,
};
const ID: KeyGenId =
KeyGenId { set: ValidatorSet { session: Session(1), network: MONERO_NET_ID }, attempt: 3 };
pub async fn test_key_gen<C: Coin>() {
let mut entropies = HashMap::new();