mirror of
https://github.com/serai-dex/serai.git
synced 2024-12-25 21:19:35 +00:00
Smash out the router library
This commit is contained in:
parent
a7d5640642
commit
cc75a92641
13 changed files with 749 additions and 445 deletions
1
.github/workflows/tests.yml
vendored
1
.github/workflows/tests.yml
vendored
|
@ -55,6 +55,7 @@ jobs:
|
||||||
-p serai-processor-ethereum-contracts \
|
-p serai-processor-ethereum-contracts \
|
||||||
-p serai-processor-ethereum-primitives \
|
-p serai-processor-ethereum-primitives \
|
||||||
-p serai-processor-ethereum-deployer \
|
-p serai-processor-ethereum-deployer \
|
||||||
|
-p serai-processor-ethereum-router \
|
||||||
-p serai-processor-ethereum-erc20 \
|
-p serai-processor-ethereum-erc20 \
|
||||||
-p ethereum-serai \
|
-p ethereum-serai \
|
||||||
-p serai-ethereum-processor \
|
-p serai-ethereum-processor \
|
||||||
|
|
26
Cargo.lock
generated
26
Cargo.lock
generated
|
@ -8760,6 +8760,32 @@ dependencies = [
|
||||||
"k256",
|
"k256",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serai-processor-ethereum-router"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"alloy-consensus",
|
||||||
|
"alloy-core",
|
||||||
|
"alloy-provider",
|
||||||
|
"alloy-rpc-types-eth",
|
||||||
|
"alloy-simple-request-transport",
|
||||||
|
"alloy-sol-macro",
|
||||||
|
"alloy-sol-macro-expander",
|
||||||
|
"alloy-sol-macro-input",
|
||||||
|
"alloy-sol-types",
|
||||||
|
"alloy-transport",
|
||||||
|
"build-solidity-contracts",
|
||||||
|
"ethereum-schnorr-contract",
|
||||||
|
"group",
|
||||||
|
"k256",
|
||||||
|
"serai-client",
|
||||||
|
"serai-processor-ethereum-deployer",
|
||||||
|
"serai-processor-ethereum-erc20",
|
||||||
|
"serai-processor-ethereum-primitives",
|
||||||
|
"syn 2.0.77",
|
||||||
|
"syn-solidity",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serai-processor-frost-attempt-manager"
|
name = "serai-processor-frost-attempt-manager"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
|
|
|
@ -90,6 +90,7 @@ members = [
|
||||||
"processor/ethereum/contracts",
|
"processor/ethereum/contracts",
|
||||||
"processor/ethereum/primitives",
|
"processor/ethereum/primitives",
|
||||||
"processor/ethereum/deployer",
|
"processor/ethereum/deployer",
|
||||||
|
"processor/ethereum/router",
|
||||||
"processor/ethereum/erc20",
|
"processor/ethereum/erc20",
|
||||||
"processor/ethereum/ethereum-serai",
|
"processor/ethereum/ethereum-serai",
|
||||||
"processor/ethereum",
|
"processor/ethereum",
|
||||||
|
|
|
@ -62,6 +62,7 @@ exceptions = [
|
||||||
{ allow = ["AGPL-3.0"], name = "serai-processor-ethereum-contracts" },
|
{ allow = ["AGPL-3.0"], name = "serai-processor-ethereum-contracts" },
|
||||||
{ allow = ["AGPL-3.0"], name = "serai-processor-ethereum-primitives" },
|
{ allow = ["AGPL-3.0"], name = "serai-processor-ethereum-primitives" },
|
||||||
{ allow = ["AGPL-3.0"], name = "serai-processor-ethereum-deployer" },
|
{ allow = ["AGPL-3.0"], name = "serai-processor-ethereum-deployer" },
|
||||||
|
{ allow = ["AGPL-3.0"], name = "serai-processor-ethereum-router" },
|
||||||
{ allow = ["AGPL-3.0"], name = "serai-processor-ethereum-erc20" },
|
{ allow = ["AGPL-3.0"], name = "serai-processor-ethereum-erc20" },
|
||||||
{ allow = ["AGPL-3.0"], name = "ethereum-serai" },
|
{ allow = ["AGPL-3.0"], name = "ethereum-serai" },
|
||||||
{ allow = ["AGPL-3.0"], name = "serai-ethereum-processor" },
|
{ allow = ["AGPL-3.0"], name = "serai-ethereum-processor" },
|
||||||
|
|
|
@ -1,434 +0,0 @@
|
||||||
use std::{sync::Arc, io, collections::HashSet};
|
|
||||||
|
|
||||||
use k256::{
|
|
||||||
elliptic_curve::{group::GroupEncoding, sec1},
|
|
||||||
ProjectivePoint,
|
|
||||||
};
|
|
||||||
|
|
||||||
use alloy_core::primitives::{hex::FromHex, Address, U256, Bytes, TxKind};
|
|
||||||
#[cfg(test)]
|
|
||||||
use alloy_core::primitives::B256;
|
|
||||||
use alloy_consensus::TxLegacy;
|
|
||||||
|
|
||||||
use alloy_sol_types::{SolValue, SolConstructor, SolCall, SolEvent};
|
|
||||||
|
|
||||||
use alloy_rpc_types_eth::Filter;
|
|
||||||
#[cfg(test)]
|
|
||||||
use alloy_rpc_types_eth::{BlockId, TransactionRequest, TransactionInput};
|
|
||||||
use alloy_simple_request_transport::SimpleRequest;
|
|
||||||
use alloy_provider::{Provider, RootProvider};
|
|
||||||
|
|
||||||
pub use crate::{
|
|
||||||
Error,
|
|
||||||
crypto::{PublicKey, Signature},
|
|
||||||
abi::{erc20::Transfer, router as abi},
|
|
||||||
};
|
|
||||||
use abi::{SeraiKeyUpdated, InInstruction as InInstructionEvent, Executed as ExecutedEvent};
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub enum Coin {
|
|
||||||
Ether,
|
|
||||||
Erc20([u8; 20]),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Coin {
|
|
||||||
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
|
|
||||||
let mut kind = [0xff];
|
|
||||||
reader.read_exact(&mut kind)?;
|
|
||||||
Ok(match kind[0] {
|
|
||||||
0 => Coin::Ether,
|
|
||||||
1 => {
|
|
||||||
let mut address = [0; 20];
|
|
||||||
reader.read_exact(&mut address)?;
|
|
||||||
Coin::Erc20(address)
|
|
||||||
}
|
|
||||||
_ => Err(io::Error::other("unrecognized Coin type"))?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
|
|
||||||
match self {
|
|
||||||
Coin::Ether => writer.write_all(&[0]),
|
|
||||||
Coin::Erc20(token) => {
|
|
||||||
writer.write_all(&[1])?;
|
|
||||||
writer.write_all(token)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct InInstruction {
|
|
||||||
pub id: ([u8; 32], u64),
|
|
||||||
pub from: [u8; 20],
|
|
||||||
pub coin: Coin,
|
|
||||||
pub amount: U256,
|
|
||||||
pub data: Vec<u8>,
|
|
||||||
pub key_at_end_of_block: ProjectivePoint,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl InInstruction {
|
|
||||||
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
|
|
||||||
let id = {
|
|
||||||
let mut id_hash = [0; 32];
|
|
||||||
reader.read_exact(&mut id_hash)?;
|
|
||||||
let mut id_pos = [0; 8];
|
|
||||||
reader.read_exact(&mut id_pos)?;
|
|
||||||
let id_pos = u64::from_le_bytes(id_pos);
|
|
||||||
(id_hash, id_pos)
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut from = [0; 20];
|
|
||||||
reader.read_exact(&mut from)?;
|
|
||||||
|
|
||||||
let coin = Coin::read(reader)?;
|
|
||||||
let mut amount = [0; 32];
|
|
||||||
reader.read_exact(&mut amount)?;
|
|
||||||
let amount = U256::from_le_slice(&amount);
|
|
||||||
|
|
||||||
let mut data_len = [0; 4];
|
|
||||||
reader.read_exact(&mut data_len)?;
|
|
||||||
let data_len = usize::try_from(u32::from_le_bytes(data_len))
|
|
||||||
.map_err(|_| io::Error::other("InInstruction data exceeded 2**32 in length"))?;
|
|
||||||
let mut data = vec![0; data_len];
|
|
||||||
reader.read_exact(&mut data)?;
|
|
||||||
|
|
||||||
let mut key_at_end_of_block = <ProjectivePoint as GroupEncoding>::Repr::default();
|
|
||||||
reader.read_exact(&mut key_at_end_of_block)?;
|
|
||||||
let key_at_end_of_block = Option::from(ProjectivePoint::from_bytes(&key_at_end_of_block))
|
|
||||||
.ok_or(io::Error::other("InInstruction had key at end of block which wasn't valid"))?;
|
|
||||||
|
|
||||||
Ok(InInstruction { id, from, coin, amount, data, key_at_end_of_block })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
|
|
||||||
writer.write_all(&self.id.0)?;
|
|
||||||
writer.write_all(&self.id.1.to_le_bytes())?;
|
|
||||||
|
|
||||||
writer.write_all(&self.from)?;
|
|
||||||
|
|
||||||
self.coin.write(writer)?;
|
|
||||||
writer.write_all(&self.amount.as_le_bytes())?;
|
|
||||||
|
|
||||||
writer.write_all(
|
|
||||||
&u32::try_from(self.data.len())
|
|
||||||
.map_err(|_| {
|
|
||||||
io::Error::other("InInstruction being written had data exceeding 2**32 in length")
|
|
||||||
})?
|
|
||||||
.to_le_bytes(),
|
|
||||||
)?;
|
|
||||||
writer.write_all(&self.data)?;
|
|
||||||
|
|
||||||
writer.write_all(&self.key_at_end_of_block.to_bytes())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct Executed {
|
|
||||||
pub tx_id: [u8; 32],
|
|
||||||
pub nonce: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The contract Serai uses to manage its state.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Router(Arc<RootProvider<SimpleRequest>>, Address);
|
|
||||||
impl Router {
|
|
||||||
pub(crate) fn code() -> Vec<u8> {
|
|
||||||
let bytecode = contracts::router::BYTECODE;
|
|
||||||
Bytes::from_hex(bytecode).expect("compiled-in Router bytecode wasn't valid hex").to_vec()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn init_code(key: &PublicKey) -> Vec<u8> {
|
|
||||||
let mut bytecode = Self::code();
|
|
||||||
// Append the constructor arguments
|
|
||||||
bytecode.extend((abi::constructorCall { initialSeraiKey: key.eth_repr().into() }).abi_encode());
|
|
||||||
bytecode
|
|
||||||
}
|
|
||||||
|
|
||||||
// This isn't pub in order to force users to use `Deployer::find_router`.
|
|
||||||
pub(crate) fn new(provider: Arc<RootProvider<SimpleRequest>>, address: Address) -> Self {
|
|
||||||
Self(provider, address)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn address(&self) -> [u8; 20] {
|
|
||||||
**self.1
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the key for Serai at the specified block.
|
|
||||||
#[cfg(test)]
|
|
||||||
pub async fn serai_key(&self, at: [u8; 32]) -> Result<PublicKey, Error> {
|
|
||||||
let call = TransactionRequest::default()
|
|
||||||
.to(self.1)
|
|
||||||
.input(TransactionInput::new(abi::seraiKeyCall::new(()).abi_encode().into()));
|
|
||||||
let bytes = self
|
|
||||||
.0
|
|
||||||
.call(&call)
|
|
||||||
.block(BlockId::Hash(B256::from(at).into()))
|
|
||||||
.await
|
|
||||||
.map_err(|_| Error::ConnectionError)?;
|
|
||||||
let res =
|
|
||||||
abi::seraiKeyCall::abi_decode_returns(&bytes, true).map_err(|_| Error::ConnectionError)?;
|
|
||||||
PublicKey::from_eth_repr(res._0.0).ok_or(Error::ConnectionError)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the message to be signed in order to update the key for Serai.
|
|
||||||
pub(crate) fn update_serai_key_message(chain_id: U256, nonce: U256, key: &PublicKey) -> Vec<u8> {
|
|
||||||
let mut buffer = b"updateSeraiKey".to_vec();
|
|
||||||
buffer.extend(&chain_id.to_be_bytes::<32>());
|
|
||||||
buffer.extend(&nonce.to_be_bytes::<32>());
|
|
||||||
buffer.extend(&key.eth_repr());
|
|
||||||
buffer
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Update the key representing Serai.
|
|
||||||
pub fn update_serai_key(&self, public_key: &PublicKey, sig: &Signature) -> TxLegacy {
|
|
||||||
// TODO: Set a more accurate gas
|
|
||||||
TxLegacy {
|
|
||||||
to: TxKind::Call(self.1),
|
|
||||||
input: abi::updateSeraiKeyCall::new((public_key.eth_repr().into(), sig.into()))
|
|
||||||
.abi_encode()
|
|
||||||
.into(),
|
|
||||||
gas_limit: 100_000,
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the current nonce for the published batches.
|
|
||||||
#[cfg(test)]
|
|
||||||
pub async fn nonce(&self, at: [u8; 32]) -> Result<U256, Error> {
|
|
||||||
let call = TransactionRequest::default()
|
|
||||||
.to(self.1)
|
|
||||||
.input(TransactionInput::new(abi::nonceCall::new(()).abi_encode().into()));
|
|
||||||
let bytes = self
|
|
||||||
.0
|
|
||||||
.call(&call)
|
|
||||||
.block(BlockId::Hash(B256::from(at).into()))
|
|
||||||
.await
|
|
||||||
.map_err(|_| Error::ConnectionError)?;
|
|
||||||
let res =
|
|
||||||
abi::nonceCall::abi_decode_returns(&bytes, true).map_err(|_| Error::ConnectionError)?;
|
|
||||||
Ok(res._0)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the message to be signed in order to update the key for Serai.
|
|
||||||
pub(crate) fn execute_message(
|
|
||||||
chain_id: U256,
|
|
||||||
nonce: U256,
|
|
||||||
outs: Vec<abi::OutInstruction>,
|
|
||||||
) -> Vec<u8> {
|
|
||||||
("execute".to_string(), chain_id, nonce, outs).abi_encode_params()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Execute a batch of `OutInstruction`s.
|
|
||||||
pub fn execute(&self, outs: &[abi::OutInstruction], sig: &Signature) -> TxLegacy {
|
|
||||||
TxLegacy {
|
|
||||||
to: TxKind::Call(self.1),
|
|
||||||
input: abi::executeCall::new((outs.to_vec(), sig.into())).abi_encode().into(),
|
|
||||||
// TODO
|
|
||||||
gas_limit: 100_000 + ((200_000 + 10_000) * u128::try_from(outs.len()).unwrap()),
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn key_at_end_of_block(&self, block: u64) -> Result<Option<ProjectivePoint>, Error> {
|
|
||||||
let filter = Filter::new().from_block(0).to_block(block).address(self.1);
|
|
||||||
let filter = filter.event_signature(SeraiKeyUpdated::SIGNATURE_HASH);
|
|
||||||
let all_keys = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
|
|
||||||
if all_keys.is_empty() {
|
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
|
|
||||||
let last_key_x_coordinate_log = all_keys.last().ok_or(Error::ConnectionError)?;
|
|
||||||
let last_key_x_coordinate = last_key_x_coordinate_log
|
|
||||||
.log_decode::<SeraiKeyUpdated>()
|
|
||||||
.map_err(|_| Error::ConnectionError)?
|
|
||||||
.inner
|
|
||||||
.data
|
|
||||||
.key;
|
|
||||||
|
|
||||||
let mut compressed_point = <ProjectivePoint as GroupEncoding>::Repr::default();
|
|
||||||
compressed_point[0] = u8::from(sec1::Tag::CompressedEvenY);
|
|
||||||
compressed_point[1 ..].copy_from_slice(last_key_x_coordinate.as_slice());
|
|
||||||
|
|
||||||
let key =
|
|
||||||
Option::from(ProjectivePoint::from_bytes(&compressed_point)).ok_or(Error::ConnectionError)?;
|
|
||||||
Ok(Some(key))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn in_instructions(
|
|
||||||
&self,
|
|
||||||
block: u64,
|
|
||||||
allowed_tokens: &HashSet<[u8; 20]>,
|
|
||||||
) -> Result<Vec<InInstruction>, Error> {
|
|
||||||
let Some(key_at_end_of_block) = self.key_at_end_of_block(block).await? else {
|
|
||||||
return Ok(vec![]);
|
|
||||||
};
|
|
||||||
|
|
||||||
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
|
|
||||||
let filter = filter.event_signature(InInstructionEvent::SIGNATURE_HASH);
|
|
||||||
let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
|
|
||||||
|
|
||||||
let mut transfer_check = HashSet::new();
|
|
||||||
let mut in_instructions = vec![];
|
|
||||||
for log in logs {
|
|
||||||
// Double check the address which emitted this log
|
|
||||||
if log.address() != self.1 {
|
|
||||||
Err(Error::ConnectionError)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let id = (
|
|
||||||
log.block_hash.ok_or(Error::ConnectionError)?.into(),
|
|
||||||
log.log_index.ok_or(Error::ConnectionError)?,
|
|
||||||
);
|
|
||||||
|
|
||||||
let tx_hash = log.transaction_hash.ok_or(Error::ConnectionError)?;
|
|
||||||
let tx = self
|
|
||||||
.0
|
|
||||||
.get_transaction_by_hash(tx_hash)
|
|
||||||
.await
|
|
||||||
.ok()
|
|
||||||
.flatten()
|
|
||||||
.ok_or(Error::ConnectionError)?;
|
|
||||||
|
|
||||||
let log =
|
|
||||||
log.log_decode::<InInstructionEvent>().map_err(|_| Error::ConnectionError)?.inner.data;
|
|
||||||
|
|
||||||
let coin = if log.coin.0 == [0; 20] {
|
|
||||||
Coin::Ether
|
|
||||||
} else {
|
|
||||||
let token = *log.coin.0;
|
|
||||||
|
|
||||||
if !allowed_tokens.contains(&token) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If this also counts as a top-level transfer via the token, drop it
|
|
||||||
//
|
|
||||||
// Necessary in order to handle a potential edge case with some theoretical token
|
|
||||||
// implementations
|
|
||||||
//
|
|
||||||
// This will either let it be handled by the top-level transfer hook or will drop it
|
|
||||||
// entirely on the side of caution
|
|
||||||
if tx.to == Some(token.into()) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get all logs for this TX
|
|
||||||
let receipt = self
|
|
||||||
.0
|
|
||||||
.get_transaction_receipt(tx_hash)
|
|
||||||
.await
|
|
||||||
.map_err(|_| Error::ConnectionError)?
|
|
||||||
.ok_or(Error::ConnectionError)?;
|
|
||||||
let tx_logs = receipt.inner.logs();
|
|
||||||
|
|
||||||
// Find a matching transfer log
|
|
||||||
let mut found_transfer = false;
|
|
||||||
for tx_log in tx_logs {
|
|
||||||
let log_index = tx_log.log_index.ok_or(Error::ConnectionError)?;
|
|
||||||
// Ensure we didn't already use this transfer to check a distinct InInstruction event
|
|
||||||
if transfer_check.contains(&log_index) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if this log is from the token we expected to be transferred
|
|
||||||
if tx_log.address().0 != token {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
// Check if this is a transfer log
|
|
||||||
// https://github.com/alloy-rs/core/issues/589
|
|
||||||
if tx_log.topics()[0] != Transfer::SIGNATURE_HASH {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
let Ok(transfer) = Transfer::decode_log(&tx_log.inner.clone(), true) else { continue };
|
|
||||||
// Check if this is a transfer to us for the expected amount
|
|
||||||
if (transfer.to == self.1) && (transfer.value == log.amount) {
|
|
||||||
transfer_check.insert(log_index);
|
|
||||||
found_transfer = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !found_transfer {
|
|
||||||
// This shouldn't be a ConnectionError
|
|
||||||
// This is an exploit, a non-conforming ERC20, or an invalid connection
|
|
||||||
// This should halt the process which is sufficient, yet this is sub-optimal
|
|
||||||
// TODO
|
|
||||||
Err(Error::ConnectionError)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Coin::Erc20(token)
|
|
||||||
};
|
|
||||||
|
|
||||||
in_instructions.push(InInstruction {
|
|
||||||
id,
|
|
||||||
from: *log.from.0,
|
|
||||||
coin,
|
|
||||||
amount: log.amount,
|
|
||||||
data: log.instruction.as_ref().to_vec(),
|
|
||||||
key_at_end_of_block,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(in_instructions)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn executed_commands(&self, block: u64) -> Result<Vec<Executed>, Error> {
|
|
||||||
let mut res = vec![];
|
|
||||||
|
|
||||||
{
|
|
||||||
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
|
|
||||||
let filter = filter.event_signature(SeraiKeyUpdated::SIGNATURE_HASH);
|
|
||||||
let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
|
|
||||||
|
|
||||||
for log in logs {
|
|
||||||
// Double check the address which emitted this log
|
|
||||||
if log.address() != self.1 {
|
|
||||||
Err(Error::ConnectionError)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let tx_id = log.transaction_hash.ok_or(Error::ConnectionError)?.into();
|
|
||||||
|
|
||||||
let log =
|
|
||||||
log.log_decode::<SeraiKeyUpdated>().map_err(|_| Error::ConnectionError)?.inner.data;
|
|
||||||
|
|
||||||
res.push(Executed {
|
|
||||||
tx_id,
|
|
||||||
nonce: log.nonce.try_into().map_err(|_| Error::ConnectionError)?,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
|
|
||||||
let filter = filter.event_signature(ExecutedEvent::SIGNATURE_HASH);
|
|
||||||
let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
|
|
||||||
|
|
||||||
for log in logs {
|
|
||||||
// Double check the address which emitted this log
|
|
||||||
if log.address() != self.1 {
|
|
||||||
Err(Error::ConnectionError)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let tx_id = log.transaction_hash.ok_or(Error::ConnectionError)?.into();
|
|
||||||
|
|
||||||
let log = log.log_decode::<ExecutedEvent>().map_err(|_| Error::ConnectionError)?.inner.data;
|
|
||||||
|
|
||||||
res.push(Executed {
|
|
||||||
tx_id,
|
|
||||||
nonce: log.nonce.try_into().map_err(|_| Error::ConnectionError)?,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "tests")]
|
|
||||||
pub fn key_updated_filter(&self) -> Filter {
|
|
||||||
Filter::new().address(self.1).event_signature(SeraiKeyUpdated::SIGNATURE_HASH)
|
|
||||||
}
|
|
||||||
#[cfg(feature = "tests")]
|
|
||||||
pub fn executed_filter(&self) -> Filter {
|
|
||||||
Filter::new().address(self.1).event_signature(ExecutedEvent::SIGNATURE_HASH)
|
|
||||||
}
|
|
||||||
}
|
|
49
processor/ethereum/router/Cargo.toml
Normal file
49
processor/ethereum/router/Cargo.toml
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
[package]
|
||||||
|
name = "serai-processor-ethereum-router"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "The Router used by the Serai Processor for Ethereum"
|
||||||
|
license = "AGPL-3.0-only"
|
||||||
|
repository = "https://github.com/serai-dex/serai/tree/develop/processor/ethereum/router"
|
||||||
|
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||||
|
edition = "2021"
|
||||||
|
publish = false
|
||||||
|
rust-version = "1.79"
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
group = { version = "0.13", default-features = false }
|
||||||
|
k256 = { version = "^0.13.1", default-features = false, features = ["std", "ecdsa", "arithmetic"] }
|
||||||
|
|
||||||
|
alloy-core = { version = "0.8", default-features = false }
|
||||||
|
alloy-consensus = { version = "0.3", default-features = false }
|
||||||
|
|
||||||
|
alloy-sol-types = { version = "0.8", default-features = false }
|
||||||
|
alloy-sol-macro = { version = "0.8", default-features = false }
|
||||||
|
|
||||||
|
alloy-rpc-types-eth = { version = "0.3", default-features = false }
|
||||||
|
alloy-transport = { version = "0.3", default-features = false }
|
||||||
|
alloy-simple-request-transport = { path = "../../../networks/ethereum/alloy-simple-request-transport", default-features = false }
|
||||||
|
alloy-provider = { version = "0.3", default-features = false }
|
||||||
|
|
||||||
|
ethereum-schnorr = { package = "ethereum-schnorr-contract", path = "../../../networks/ethereum/schnorr", default-features = false }
|
||||||
|
|
||||||
|
ethereum-primitives = { package = "serai-processor-ethereum-primitives", path = "../primitives", default-features = false }
|
||||||
|
ethereum-deployer = { package = "serai-processor-ethereum-deployer", path = "../deployer", default-features = false }
|
||||||
|
erc20 = { package = "serai-processor-ethereum-erc20", path = "../erc20", default-features = false }
|
||||||
|
|
||||||
|
serai-client = { path = "../../../substrate/client", default-features = false, features = ["ethereum"] }
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
build-solidity-contracts = { path = "../../../networks/ethereum/build-contracts", default-features = false }
|
||||||
|
|
||||||
|
syn = { version = "2", default-features = false, features = ["proc-macro"] }
|
||||||
|
|
||||||
|
syn-solidity = { version = "0.8", default-features = false }
|
||||||
|
alloy-sol-macro-input = { version = "0.8", default-features = false }
|
||||||
|
alloy-sol-macro-expander = { version = "0.8", default-features = false }
|
15
processor/ethereum/router/LICENSE
Normal file
15
processor/ethereum/router/LICENSE
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
AGPL-3.0-only license
|
||||||
|
|
||||||
|
Copyright (c) 2022-2024 Luke Parker
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU Affero General Public License Version 3 as
|
||||||
|
published by the Free Software Foundation.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Affero General Public License
|
||||||
|
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
1
processor/ethereum/router/README.md
Normal file
1
processor/ethereum/router/README.md
Normal file
|
@ -0,0 +1 @@
|
||||||
|
# Ethereum Router
|
42
processor/ethereum/router/build.rs
Normal file
42
processor/ethereum/router/build.rs
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
use std::{env, fs};
|
||||||
|
|
||||||
|
use alloy_sol_macro_input::SolInputKind;
|
||||||
|
|
||||||
|
fn write(sol: syn_solidity::File, file: &str) {
|
||||||
|
let sol = alloy_sol_macro_expander::expand::expand(sol).unwrap();
|
||||||
|
fs::write(file, sol.to_string()).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sol(sol_files: &[&str], file: &str) {
|
||||||
|
let mut sol = String::new();
|
||||||
|
for sol_file in sol_files {
|
||||||
|
sol += &fs::read_to_string(sol_file).unwrap();
|
||||||
|
}
|
||||||
|
let SolInputKind::Sol(sol) = syn::parse_str(&sol).unwrap() else {
|
||||||
|
panic!("parsed .sols file wasn't SolInputKind::Sol");
|
||||||
|
};
|
||||||
|
write(sol, file);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let artifacts_path =
|
||||||
|
env::var("OUT_DIR").unwrap().to_string() + "/serai-processor-ethereum-router";
|
||||||
|
|
||||||
|
if !fs::exists(&artifacts_path).unwrap() {
|
||||||
|
fs::create_dir(&artifacts_path).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
build_solidity_contracts::build(
|
||||||
|
&["../../../networks/ethereum/schnorr/contracts", "../erc20/contracts"],
|
||||||
|
"contracts",
|
||||||
|
&artifacts_path,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// This cannot be handled with the sol! macro. The Solidity requires an import
|
||||||
|
// https://github.com/alloy-rs/core/issues/602
|
||||||
|
sol(
|
||||||
|
&["../../../networks/ethereum/schnorr/contracts/Schnorr.sol", "contracts/Router.sol"],
|
||||||
|
&(artifacts_path + "/router.rs"),
|
||||||
|
);
|
||||||
|
}
|
|
@ -1,7 +1,7 @@
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
pragma solidity ^0.8.26;
|
pragma solidity ^0.8.26;
|
||||||
|
|
||||||
import "./IERC20.sol";
|
import "IERC20.sol";
|
||||||
|
|
||||||
import "Schnorr.sol";
|
import "Schnorr.sol";
|
||||||
|
|
||||||
|
@ -22,6 +22,15 @@ contract Router {
|
||||||
Code
|
Code
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct AddressDestination {
|
||||||
|
address destination;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct CodeDestination {
|
||||||
|
uint32 gas;
|
||||||
|
bytes code;
|
||||||
|
}
|
||||||
|
|
||||||
struct OutInstruction {
|
struct OutInstruction {
|
||||||
DestinationType destinationType;
|
DestinationType destinationType;
|
||||||
bytes destination;
|
bytes destination;
|
||||||
|
@ -38,7 +47,7 @@ contract Router {
|
||||||
event InInstruction(
|
event InInstruction(
|
||||||
address indexed from, address indexed coin, uint256 amount, bytes instruction
|
address indexed from, address indexed coin, uint256 amount, bytes instruction
|
||||||
);
|
);
|
||||||
event Executed(uint256 indexed nonce, bytes32 indexed batch);
|
event Executed(uint256 indexed nonce, bytes32 indexed message_hash);
|
||||||
|
|
||||||
error InvalidSignature();
|
error InvalidSignature();
|
||||||
error InvalidAmount();
|
error InvalidAmount();
|
||||||
|
@ -68,7 +77,7 @@ contract Router {
|
||||||
external
|
external
|
||||||
_updateSeraiKeyAtEndOfFn(_nonce, newSeraiKey)
|
_updateSeraiKeyAtEndOfFn(_nonce, newSeraiKey)
|
||||||
{
|
{
|
||||||
bytes memory message = abi.encodePacked("updateSeraiKey", block.chainid, _nonce, newSeraiKey);
|
bytes32 message = keccak256(abi.encodePacked("updateSeraiKey", block.chainid, _nonce, newSeraiKey));
|
||||||
_nonce++;
|
_nonce++;
|
||||||
|
|
||||||
if (!Schnorr.verify(_seraiKey, message, signature.c, signature.s)) {
|
if (!Schnorr.verify(_seraiKey, message, signature.c, signature.s)) {
|
||||||
|
@ -132,6 +141,7 @@ contract Router {
|
||||||
*/
|
*/
|
||||||
if (coin == address(0)) {
|
if (coin == address(0)) {
|
||||||
// Enough gas to service the transfer and a minimal amount of logic
|
// Enough gas to service the transfer and a minimal amount of logic
|
||||||
|
// TODO: If we're constructing a contract, we can do this at the same time as construction
|
||||||
to.call{ value: value, gas: 5_000 }("");
|
to.call{ value: value, gas: 5_000 }("");
|
||||||
} else {
|
} else {
|
||||||
coin.call{ gas: 100_000 }(abi.encodeWithSelector(IERC20.transfer.selector, msg.sender, value));
|
coin.call{ gas: 100_000 }(abi.encodeWithSelector(IERC20.transfer.selector, msg.sender, value));
|
||||||
|
@ -156,13 +166,16 @@ contract Router {
|
||||||
// Execute a list of transactions if they were signed by the current key with the current nonce
|
// Execute a list of transactions if they were signed by the current key with the current nonce
|
||||||
function execute(OutInstruction[] calldata transactions, Signature calldata signature) external {
|
function execute(OutInstruction[] calldata transactions, Signature calldata signature) external {
|
||||||
// Verify the signature
|
// Verify the signature
|
||||||
bytes memory message = abi.encode("execute", block.chainid, _nonce, transactions);
|
// We hash the message here as we need the message's hash for the Executed event
|
||||||
|
// Since we're already going to hash it, hashing it prior to verifying the signature reduces the
|
||||||
|
// amount of words hashed by its challenge function (reducing our gas costs)
|
||||||
|
bytes32 message = keccak256(abi.encode("execute", block.chainid, _nonce, transactions));
|
||||||
if (!Schnorr.verify(_seraiKey, message, signature.c, signature.s)) {
|
if (!Schnorr.verify(_seraiKey, message, signature.c, signature.s)) {
|
||||||
revert InvalidSignature();
|
revert InvalidSignature();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Since the signature was verified, perform execution
|
// Since the signature was verified, perform execution
|
||||||
emit Executed(_nonce, keccak256(message));
|
emit Executed(_nonce, message);
|
||||||
// While this is sufficient to prevent replays, it's still technically possible for instructions
|
// While this is sufficient to prevent replays, it's still technically possible for instructions
|
||||||
// from later batches to be executed before these instructions upon re-entrancy
|
// from later batches to be executed before these instructions upon re-entrancy
|
||||||
_nonce++;
|
_nonce++;
|
||||||
|
@ -172,8 +185,8 @@ contract Router {
|
||||||
if (transactions[i].destinationType == DestinationType.Address) {
|
if (transactions[i].destinationType == DestinationType.Address) {
|
||||||
// This may cause a panic and the contract to become stuck if the destination isn't actually
|
// This may cause a panic and the contract to become stuck if the destination isn't actually
|
||||||
// 20 bytes. Serai is trusted to not pass a malformed destination
|
// 20 bytes. Serai is trusted to not pass a malformed destination
|
||||||
(address destination) = abi.decode(transactions[i].destination, (address));
|
(AddressDestination memory destination) = abi.decode(transactions[i].destination, (AddressDestination));
|
||||||
_transferOut(destination, transactions[i].coin, transactions[i].value);
|
_transferOut(destination.destination, transactions[i].coin, transactions[i].value);
|
||||||
} else {
|
} else {
|
||||||
// The destination is a piece of initcode. We calculate the hash of the will-be contract,
|
// The destination is a piece of initcode. We calculate the hash of the will-be contract,
|
||||||
// transfer to it, and then run the initcode
|
// transfer to it, and then run the initcode
|
||||||
|
@ -184,9 +197,9 @@ contract Router {
|
||||||
_transferOut(nextAddress, transactions[i].coin, transactions[i].value);
|
_transferOut(nextAddress, transactions[i].coin, transactions[i].value);
|
||||||
|
|
||||||
// Perform the calls with a set gas budget
|
// Perform the calls with a set gas budget
|
||||||
(uint32 gas, bytes memory code) = abi.decode(transactions[i].destination, (uint32, bytes));
|
(CodeDestination memory destination) = abi.decode(transactions[i].destination, (CodeDestination));
|
||||||
address(this).call{ gas: gas }(
|
address(this).call{ gas: destination.gas }(
|
||||||
abi.encodeWithSelector(Router.arbitaryCallOut.selector, code)
|
abi.encodeWithSelector(Router.arbitaryCallOut.selector, destination.code)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
582
processor/ethereum/router/src/lib.rs
Normal file
582
processor/ethereum/router/src/lib.rs
Normal file
|
@ -0,0 +1,582 @@
|
||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![doc = include_str!("../README.md")]
|
||||||
|
#![deny(missing_docs)]
|
||||||
|
|
||||||
|
use std::{sync::Arc, io, collections::HashSet};
|
||||||
|
|
||||||
|
use group::ff::PrimeField;
|
||||||
|
|
||||||
|
/*
|
||||||
|
use k256::{
|
||||||
|
elliptic_curve::{group::GroupEncoding, sec1},
|
||||||
|
ProjectivePoint,
|
||||||
|
};
|
||||||
|
*/
|
||||||
|
|
||||||
|
use alloy_core::primitives::{hex::FromHex, Address, U256, Bytes, TxKind};
|
||||||
|
use alloy_consensus::TxLegacy;
|
||||||
|
|
||||||
|
use alloy_sol_types::{SolValue, SolConstructor, SolCall, SolEvent};
|
||||||
|
|
||||||
|
use alloy_rpc_types_eth::Filter;
|
||||||
|
use alloy_transport::{TransportErrorKind, RpcError};
|
||||||
|
use alloy_simple_request_transport::SimpleRequest;
|
||||||
|
use alloy_provider::{Provider, RootProvider};
|
||||||
|
|
||||||
|
use ethereum_schnorr::{PublicKey, Signature};
|
||||||
|
use ethereum_deployer::Deployer;
|
||||||
|
use erc20::Transfer;
|
||||||
|
|
||||||
|
use serai_client::{primitives::Amount, networks::ethereum::Address as SeraiAddress};
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
#[expect(warnings)]
|
||||||
|
#[expect(needless_pass_by_value)]
|
||||||
|
#[expect(clippy::all)]
|
||||||
|
#[expect(clippy::ignored_unit_patterns)]
|
||||||
|
#[expect(clippy::redundant_closure_for_method_calls)]
|
||||||
|
mod _abi {
|
||||||
|
include!(concat!(env!("OUT_DIR"), "/serai-processor-ethereum-router/router.rs"));
|
||||||
|
}
|
||||||
|
use _abi::Router as abi;
|
||||||
|
use abi::{
|
||||||
|
SeraiKeyUpdated as SeraiKeyUpdatedEvent, InInstruction as InInstructionEvent,
|
||||||
|
Executed as ExecutedEvent,
|
||||||
|
};
|
||||||
|
|
||||||
|
impl From<&Signature> for abi::Signature {
|
||||||
|
fn from(signature: &Signature) -> Self {
|
||||||
|
Self {
|
||||||
|
c: <[u8; 32]>::from(signature.c().to_repr()).into(),
|
||||||
|
s: <[u8; 32]>::from(signature.s().to_repr()).into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A coin on Ethereum.
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub enum Coin {
|
||||||
|
/// Ether, the native coin of Ethereum.
|
||||||
|
Ether,
|
||||||
|
/// An ERC20 token.
|
||||||
|
Erc20([u8; 20]),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Coin {
|
||||||
|
/// Read a `Coin`.
|
||||||
|
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
|
||||||
|
let mut kind = [0xff];
|
||||||
|
reader.read_exact(&mut kind)?;
|
||||||
|
Ok(match kind[0] {
|
||||||
|
0 => Coin::Ether,
|
||||||
|
1 => {
|
||||||
|
let mut address = [0; 20];
|
||||||
|
reader.read_exact(&mut address)?;
|
||||||
|
Coin::Erc20(address)
|
||||||
|
}
|
||||||
|
_ => Err(io::Error::other("unrecognized Coin type"))?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write the `Coin`.
|
||||||
|
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||||
|
match self {
|
||||||
|
Coin::Ether => writer.write_all(&[0]),
|
||||||
|
Coin::Erc20(token) => {
|
||||||
|
writer.write_all(&[1])?;
|
||||||
|
writer.write_all(token)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An InInstruction from the Router.
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub struct InInstruction {
|
||||||
|
/// The ID for this `InInstruction`.
|
||||||
|
pub id: ([u8; 32], u64),
|
||||||
|
/// The address which transferred these coins to Serai.
|
||||||
|
pub from: [u8; 20],
|
||||||
|
/// The coin transferred.
|
||||||
|
pub coin: Coin,
|
||||||
|
/// The amount transferred.
|
||||||
|
pub amount: U256,
|
||||||
|
/// The data associated with the transfer.
|
||||||
|
pub data: Vec<u8>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InInstruction {
|
||||||
|
/// Read an `InInstruction`.
|
||||||
|
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
|
||||||
|
let id = {
|
||||||
|
let mut id_hash = [0; 32];
|
||||||
|
reader.read_exact(&mut id_hash)?;
|
||||||
|
let mut id_pos = [0; 8];
|
||||||
|
reader.read_exact(&mut id_pos)?;
|
||||||
|
let id_pos = u64::from_le_bytes(id_pos);
|
||||||
|
(id_hash, id_pos)
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut from = [0; 20];
|
||||||
|
reader.read_exact(&mut from)?;
|
||||||
|
|
||||||
|
let coin = Coin::read(reader)?;
|
||||||
|
let mut amount = [0; 32];
|
||||||
|
reader.read_exact(&mut amount)?;
|
||||||
|
let amount = U256::from_le_slice(&amount);
|
||||||
|
|
||||||
|
let mut data_len = [0; 4];
|
||||||
|
reader.read_exact(&mut data_len)?;
|
||||||
|
let data_len = usize::try_from(u32::from_le_bytes(data_len))
|
||||||
|
.map_err(|_| io::Error::other("InInstruction data exceeded 2**32 in length"))?;
|
||||||
|
let mut data = vec![0; data_len];
|
||||||
|
reader.read_exact(&mut data)?;
|
||||||
|
|
||||||
|
Ok(InInstruction { id, from, coin, amount, data })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write the `InInstruction`.
|
||||||
|
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||||
|
writer.write_all(&self.id.0)?;
|
||||||
|
writer.write_all(&self.id.1.to_le_bytes())?;
|
||||||
|
|
||||||
|
writer.write_all(&self.from)?;
|
||||||
|
|
||||||
|
self.coin.write(writer)?;
|
||||||
|
writer.write_all(&self.amount.as_le_bytes())?;
|
||||||
|
|
||||||
|
writer.write_all(
|
||||||
|
&u32::try_from(self.data.len())
|
||||||
|
.map_err(|_| {
|
||||||
|
io::Error::other("InInstruction being written had data exceeding 2**32 in length")
|
||||||
|
})?
|
||||||
|
.to_le_bytes(),
|
||||||
|
)?;
|
||||||
|
writer.write_all(&self.data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Executed an command.
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub enum Executed {
|
||||||
|
/// Set a new key.
|
||||||
|
SetKey {
|
||||||
|
/// The nonce this was done with.
|
||||||
|
nonce: u64,
|
||||||
|
/// The key set.
|
||||||
|
key: [u8; 32],
|
||||||
|
},
|
||||||
|
/// Executed Batch.
|
||||||
|
Batch {
|
||||||
|
/// The nonce this was done with.
|
||||||
|
nonce: u64,
|
||||||
|
/// The hash of the signed message for the Batch executed.
|
||||||
|
message_hash: [u8; 32],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Executed {
|
||||||
|
/// The nonce consumed by this executed event.
|
||||||
|
pub fn nonce(&self) -> u64 {
|
||||||
|
match self {
|
||||||
|
Executed::SetKey { nonce, .. } | Executed::Batch { nonce, .. } => *nonce,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A view of the Router for Serai.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Router(Arc<RootProvider<SimpleRequest>>, Address);
|
||||||
|
impl Router {
|
||||||
|
pub(crate) fn code() -> Vec<u8> {
|
||||||
|
const BYTECODE: &[u8] =
|
||||||
|
include_bytes!(concat!(env!("OUT_DIR"), "/serai-processor-ethereum-router/Router.bin"));
|
||||||
|
Bytes::from_hex(BYTECODE).expect("compiled-in Router bytecode wasn't valid hex").to_vec()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn init_code(key: &PublicKey) -> Vec<u8> {
|
||||||
|
let mut bytecode = Self::code();
|
||||||
|
// Append the constructor arguments
|
||||||
|
bytecode.extend((abi::constructorCall { initialSeraiKey: key.eth_repr().into() }).abi_encode());
|
||||||
|
bytecode
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new view of the Router.
|
||||||
|
///
|
||||||
|
/// This performs an on-chain lookup for the first deployed Router constructed with this public
|
||||||
|
/// key. This lookup is of a constant amount of calls and does not read any logs.
|
||||||
|
pub async fn new(
|
||||||
|
provider: Arc<RootProvider<SimpleRequest>>,
|
||||||
|
initial_serai_key: &PublicKey,
|
||||||
|
) -> Result<Option<Self>, RpcError<TransportErrorKind>> {
|
||||||
|
let Some(deployer) = Deployer::new(provider.clone()).await? else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
let Some(deployment) = deployer
|
||||||
|
.find_deployment(ethereum_primitives::keccak256(Self::init_code(initial_serai_key)))
|
||||||
|
.await?
|
||||||
|
else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
Ok(Some(Self(provider, deployment)))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The address of the router.
|
||||||
|
pub fn address(&self) -> Address {
|
||||||
|
self.1
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct a transaction to update the key representing Serai.
|
||||||
|
pub fn update_serai_key(&self, public_key: &PublicKey, sig: &Signature) -> TxLegacy {
|
||||||
|
// TODO: Set a more accurate gas
|
||||||
|
TxLegacy {
|
||||||
|
to: TxKind::Call(self.1),
|
||||||
|
input: abi::updateSeraiKeyCall::new((public_key.eth_repr().into(), sig.into()))
|
||||||
|
.abi_encode()
|
||||||
|
.into(),
|
||||||
|
gas_limit: 100_000,
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct a transaction to execute a batch of `OutInstruction`s.
|
||||||
|
pub fn execute(&self, outs: &[(SeraiAddress, (Coin, Amount))], sig: &Signature) -> TxLegacy {
|
||||||
|
TxLegacy {
|
||||||
|
to: TxKind::Call(self.1),
|
||||||
|
input: abi::executeCall::new((
|
||||||
|
outs
|
||||||
|
.iter()
|
||||||
|
.map(|(address, (coin, amount))| {
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let (destinationType, destination) = match address {
|
||||||
|
SeraiAddress::Address(address) => (
|
||||||
|
abi::DestinationType::Address,
|
||||||
|
(abi::AddressDestination { destination: Address::from(address) }).abi_encode(),
|
||||||
|
),
|
||||||
|
SeraiAddress::Contract(contract) => (
|
||||||
|
abi::DestinationType::Code,
|
||||||
|
(abi::CodeDestination {
|
||||||
|
gas: contract.gas(),
|
||||||
|
code: contract.code().to_vec().into(),
|
||||||
|
})
|
||||||
|
.abi_encode(),
|
||||||
|
),
|
||||||
|
};
|
||||||
|
abi::OutInstruction {
|
||||||
|
destinationType,
|
||||||
|
destination: destination.into(),
|
||||||
|
coin: match coin {
|
||||||
|
Coin::Ether => [0; 20].into(),
|
||||||
|
Coin::Erc20(address) => address.into(),
|
||||||
|
},
|
||||||
|
value: amount.0.try_into().expect("couldn't convert u64 to u256"),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
sig.into(),
|
||||||
|
))
|
||||||
|
.abi_encode()
|
||||||
|
.into(),
|
||||||
|
// TODO
|
||||||
|
gas_limit: 100_000 + ((200_000 + 10_000) * u128::try_from(outs.len()).unwrap()),
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
/// Get the key for Serai at the specified block.
|
||||||
|
#[cfg(test)]
|
||||||
|
pub async fn serai_key(&self, at: [u8; 32]) -> Result<PublicKey, RpcError<TransportErrorKind>> {
|
||||||
|
let call = TransactionRequest::default()
|
||||||
|
.to(self.1)
|
||||||
|
.input(TransactionInput::new(abi::seraiKeyCall::new(()).abi_encode().into()));
|
||||||
|
let bytes = self
|
||||||
|
.0
|
||||||
|
.call(&call)
|
||||||
|
.block(BlockId::Hash(B256::from(at).into()))
|
||||||
|
.await
|
||||||
|
?;
|
||||||
|
let res =
|
||||||
|
abi::seraiKeyCall::abi_decode_returns(&bytes, true)?;
|
||||||
|
PublicKey::from_eth_repr(res._0.0).ok_or_else(|| TransportErrorKind::Custom(
|
||||||
|
"TODO".to_string().into()))
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
/// Get the message to be signed in order to update the key for Serai.
|
||||||
|
pub(crate) fn update_serai_key_message(chain_id: U256, nonce: U256, key: &PublicKey) -> Vec<u8> {
|
||||||
|
let mut buffer = b"updateSeraiKey".to_vec();
|
||||||
|
buffer.extend(&chain_id.to_be_bytes::<32>());
|
||||||
|
buffer.extend(&nonce.to_be_bytes::<32>());
|
||||||
|
buffer.extend(&key.eth_repr());
|
||||||
|
buffer
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
/// Get the current nonce for the published batches.
|
||||||
|
#[cfg(test)]
|
||||||
|
pub async fn nonce(&self, at: [u8; 32]) -> Result<U256, RpcError<TransportErrorKind>> {
|
||||||
|
let call = TransactionRequest::default()
|
||||||
|
.to(self.1)
|
||||||
|
.input(TransactionInput::new(abi::nonceCall::new(()).abi_encode().into()));
|
||||||
|
let bytes = self
|
||||||
|
.0
|
||||||
|
.call(&call)
|
||||||
|
.block(BlockId::Hash(B256::from(at).into()))
|
||||||
|
.await
|
||||||
|
?;
|
||||||
|
let res =
|
||||||
|
abi::nonceCall::abi_decode_returns(&bytes, true)?;
|
||||||
|
Ok(res._0)
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
/// Get the message to be signed in order to update the key for Serai.
|
||||||
|
pub(crate) fn execute_message(
|
||||||
|
chain_id: U256,
|
||||||
|
nonce: U256,
|
||||||
|
outs: Vec<abi::OutInstruction>,
|
||||||
|
) -> Vec<u8> {
|
||||||
|
("execute".to_string(), chain_id, nonce, outs).abi_encode_params()
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
/// Fetch the `InInstruction`s emitted by the Router from this block.
|
||||||
|
pub async fn in_instructions(
|
||||||
|
&self,
|
||||||
|
block: u64,
|
||||||
|
allowed_tokens: &HashSet<[u8; 20]>,
|
||||||
|
) -> Result<Vec<InInstruction>, RpcError<TransportErrorKind>> {
|
||||||
|
// The InInstruction events for this block
|
||||||
|
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
|
||||||
|
let filter = filter.event_signature(InInstructionEvent::SIGNATURE_HASH);
|
||||||
|
let logs = self.0.get_logs(&filter).await?;
|
||||||
|
|
||||||
|
/*
|
||||||
|
We check that for all InInstructions for ERC20s emitted, a corresponding transfer occurred.
|
||||||
|
In order to prevent a transfer from being used to justify multiple distinct InInstructions,
|
||||||
|
we insert the transfer's log index into this HashSet.
|
||||||
|
*/
|
||||||
|
let mut transfer_check = HashSet::new();
|
||||||
|
|
||||||
|
let mut in_instructions = vec![];
|
||||||
|
for log in logs {
|
||||||
|
// Double check the address which emitted this log
|
||||||
|
if log.address() != self.1 {
|
||||||
|
Err(TransportErrorKind::Custom(
|
||||||
|
"node returned a log from a different address than requested".to_string().into(),
|
||||||
|
))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let id = (
|
||||||
|
log
|
||||||
|
.block_hash
|
||||||
|
.ok_or_else(|| {
|
||||||
|
TransportErrorKind::Custom("log didn't have its block hash set".to_string().into())
|
||||||
|
})?
|
||||||
|
.into(),
|
||||||
|
log.log_index.ok_or_else(|| {
|
||||||
|
TransportErrorKind::Custom("log didn't have its index set".to_string().into())
|
||||||
|
})?,
|
||||||
|
);
|
||||||
|
|
||||||
|
let tx_hash = log.transaction_hash.ok_or_else(|| {
|
||||||
|
TransportErrorKind::Custom("log didn't have its transaction hash set".to_string().into())
|
||||||
|
})?;
|
||||||
|
let tx = self.0.get_transaction_by_hash(tx_hash).await?.ok_or_else(|| {
|
||||||
|
TransportErrorKind::Custom(
|
||||||
|
"node didn't have a transaction it had the logs of".to_string().into(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let log = log
|
||||||
|
.log_decode::<InInstructionEvent>()
|
||||||
|
.map_err(|e| {
|
||||||
|
TransportErrorKind::Custom(
|
||||||
|
format!("filtered to InInstructionEvent yet couldn't decode log: {e:?}").into(),
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
.inner
|
||||||
|
.data;
|
||||||
|
|
||||||
|
let coin = if log.coin.0 == [0; 20] {
|
||||||
|
Coin::Ether
|
||||||
|
} else {
|
||||||
|
let token = *log.coin.0;
|
||||||
|
|
||||||
|
if !allowed_tokens.contains(&token) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
If this also counts as a top-level transfer of a token, drop it.
|
||||||
|
|
||||||
|
This event will only exist if there's an ERC20 which has some form of programmability
|
||||||
|
(`onTransferFrom`), and when a top-level transfer was made, that hook made its own call
|
||||||
|
into the Serai router.
|
||||||
|
|
||||||
|
If such an ERC20 exists, Serai would parse it as a top-level transfer and as a router
|
||||||
|
InInstruction. While no such ERC20 is planned to be integrated, this enures we don't
|
||||||
|
allow a double-spend on that premise.
|
||||||
|
|
||||||
|
TODO: See below note.
|
||||||
|
*/
|
||||||
|
if tx.to == Some(token.into()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get all logs for this TX
|
||||||
|
let receipt = self.0.get_transaction_receipt(tx_hash).await?.ok_or_else(|| {
|
||||||
|
TransportErrorKind::Custom(
|
||||||
|
"node didn't have the receipt for a transaction it had".to_string().into(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
let tx_logs = receipt.inner.logs();
|
||||||
|
|
||||||
|
/*
|
||||||
|
TODO: If this is also a top-level transfer, drop the log from the top-level transfer and
|
||||||
|
only iterate over the rest of the logs.
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Find a matching transfer log
|
||||||
|
let mut found_transfer = false;
|
||||||
|
for tx_log in tx_logs {
|
||||||
|
let log_index = tx_log.log_index.ok_or_else(|| {
|
||||||
|
TransportErrorKind::Custom(
|
||||||
|
"log in transaction receipt didn't have its log index set".to_string().into(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
// Ensure we didn't already use this transfer to check a distinct InInstruction event
|
||||||
|
if transfer_check.contains(&log_index) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this log is from the token we expected to be transferred
|
||||||
|
if tx_log.address().0 != token {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// Check if this is a transfer log
|
||||||
|
// https://github.com/alloy-rs/core/issues/589
|
||||||
|
if tx_log.topics()[0] != Transfer::SIGNATURE_HASH {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let Ok(transfer) = Transfer::decode_log(&tx_log.inner.clone(), true) else { continue };
|
||||||
|
// Check if this is a transfer to us for the expected amount
|
||||||
|
if (transfer.to == self.1) && (transfer.value == log.amount) {
|
||||||
|
transfer_check.insert(log_index);
|
||||||
|
found_transfer = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !found_transfer {
|
||||||
|
// This shouldn't be a simple error
|
||||||
|
// This is an exploit, a non-conforming ERC20, or a malicious connection
|
||||||
|
// This should halt the process. While this is sufficient, it's sub-optimal
|
||||||
|
// TODO
|
||||||
|
Err(TransportErrorKind::Custom(
|
||||||
|
"ERC20 InInstruction with no matching transfer log".to_string().into(),
|
||||||
|
))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Coin::Erc20(token)
|
||||||
|
};
|
||||||
|
|
||||||
|
in_instructions.push(InInstruction {
|
||||||
|
id,
|
||||||
|
from: *log.from.0,
|
||||||
|
coin,
|
||||||
|
amount: log.amount,
|
||||||
|
data: log.instruction.as_ref().to_vec(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(in_instructions)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fetch the executed actions from this block.
|
||||||
|
pub async fn executed(&self, block: u64) -> Result<Vec<Executed>, RpcError<TransportErrorKind>> {
|
||||||
|
let mut res = vec![];
|
||||||
|
|
||||||
|
{
|
||||||
|
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
|
||||||
|
let filter = filter.event_signature(SeraiKeyUpdatedEvent::SIGNATURE_HASH);
|
||||||
|
let logs = self.0.get_logs(&filter).await?;
|
||||||
|
|
||||||
|
for log in logs {
|
||||||
|
// Double check the address which emitted this log
|
||||||
|
if log.address() != self.1 {
|
||||||
|
Err(TransportErrorKind::Custom(
|
||||||
|
"node returned a log from a different address than requested".to_string().into(),
|
||||||
|
))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let log = log
|
||||||
|
.log_decode::<SeraiKeyUpdatedEvent>()
|
||||||
|
.map_err(|e| {
|
||||||
|
TransportErrorKind::Custom(
|
||||||
|
format!("filtered to SeraiKeyUpdatedEvent yet couldn't decode log: {e:?}").into(),
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
.inner
|
||||||
|
.data;
|
||||||
|
|
||||||
|
res.push(Executed::SetKey {
|
||||||
|
nonce: log.nonce.try_into().map_err(|e| {
|
||||||
|
TransportErrorKind::Custom(format!("filtered to convert nonce to u64: {e:?}").into())
|
||||||
|
})?,
|
||||||
|
key: log.key.into(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
|
||||||
|
let filter = filter.event_signature(ExecutedEvent::SIGNATURE_HASH);
|
||||||
|
let logs = self.0.get_logs(&filter).await?;
|
||||||
|
|
||||||
|
for log in logs {
|
||||||
|
// Double check the address which emitted this log
|
||||||
|
if log.address() != self.1 {
|
||||||
|
Err(TransportErrorKind::Custom(
|
||||||
|
"node returned a log from a different address than requested".to_string().into(),
|
||||||
|
))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let log = log
|
||||||
|
.log_decode::<ExecutedEvent>()
|
||||||
|
.map_err(|e| {
|
||||||
|
TransportErrorKind::Custom(
|
||||||
|
format!("filtered to ExecutedEvent yet couldn't decode log: {e:?}").into(),
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
.inner
|
||||||
|
.data;
|
||||||
|
|
||||||
|
res.push(Executed::Batch {
|
||||||
|
nonce: log.nonce.try_into().map_err(|e| {
|
||||||
|
TransportErrorKind::Custom(format!("filtered to convert nonce to u64: {e:?}").into())
|
||||||
|
})?,
|
||||||
|
message_hash: log.message_hash.into(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
res.sort_by_key(Executed::nonce);
|
||||||
|
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
#[cfg(feature = "tests")]
|
||||||
|
pub fn key_updated_filter(&self) -> Filter {
|
||||||
|
Filter::new().address(self.1).event_signature(SeraiKeyUpdated::SIGNATURE_HASH)
|
||||||
|
}
|
||||||
|
#[cfg(feature = "tests")]
|
||||||
|
pub fn executed_filter(&self) -> Filter {
|
||||||
|
Filter::new().address(self.1).event_signature(ExecutedEvent::SIGNATURE_HASH)
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
}
|
|
@ -24,7 +24,7 @@ bitvec = { version = "1", default-features = false, features = ["alloc", "serde"
|
||||||
|
|
||||||
hex = "0.4"
|
hex = "0.4"
|
||||||
scale = { package = "parity-scale-codec", version = "3" }
|
scale = { package = "parity-scale-codec", version = "3" }
|
||||||
borsh = { version = "1" }
|
borsh = { version = "1", features = ["derive"] }
|
||||||
serde = { version = "1", features = ["derive"], optional = true }
|
serde = { version = "1", features = ["derive"], optional = true }
|
||||||
serde_json = { version = "1", optional = true }
|
serde_json = { version = "1", optional = true }
|
||||||
|
|
||||||
|
|
|
@ -29,6 +29,13 @@ impl ContractDeployment {
|
||||||
}
|
}
|
||||||
Some(Self { gas, code })
|
Some(Self { gas, code })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn gas(&self) -> u32 {
|
||||||
|
self.gas
|
||||||
|
}
|
||||||
|
pub fn code(&self) -> &[u8] {
|
||||||
|
&self.code
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A representation of an Ethereum address.
|
/// A representation of an Ethereum address.
|
||||||
|
|
Loading…
Reference in a new issue