mirror of
https://github.com/serai-dex/serai.git
synced 2024-11-17 09:27:36 +00:00
Implement shared key derivation according to https://github.com/monero-project/research-lab/issues/103
Currently solely used for single signer change outputs, intended to be used for funds into Serai and multisig change outputs (dependent on #2). Also cleans the file layout, makes scanning a bit more robust, doesn't return outputs of amount 0, and shuffles outputs.
This commit is contained in:
parent
8945b50988
commit
573f847a9b
4 changed files with 229 additions and 142 deletions
|
@ -11,8 +11,9 @@ lazy_static = "1"
|
||||||
thiserror = "1"
|
thiserror = "1"
|
||||||
|
|
||||||
rand_core = "0.6"
|
rand_core = "0.6"
|
||||||
rand_distr = "0.4"
|
|
||||||
rand_chacha = { version = "0.3", optional = true }
|
rand_chacha = { version = "0.3", optional = true }
|
||||||
|
rand = "0.8"
|
||||||
|
rand_distr = "0.4"
|
||||||
|
|
||||||
tiny-keccak = { version = "2", features = ["keccak"] }
|
tiny-keccak = { version = "2", features = ["keccak"] }
|
||||||
blake2 = "0.10"
|
blake2 = "0.10"
|
||||||
|
@ -39,6 +40,4 @@ experimental = []
|
||||||
multisig = ["ff", "group", "rand_chacha", "transcript", "frost", "dalek-ff-group"]
|
multisig = ["ff", "group", "rand_chacha", "transcript", "frost", "dalek-ff-group"]
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
rand = "0.8"
|
|
||||||
|
|
||||||
tokio = { version = "1", features = ["full"] }
|
tokio = { version = "1", features = ["full"] }
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
use rand::seq::SliceRandom;
|
||||||
|
|
||||||
use curve25519_dalek::{
|
use curve25519_dalek::{
|
||||||
constants::ED25519_BASEPOINT_TABLE,
|
constants::ED25519_BASEPOINT_TABLE,
|
||||||
|
@ -42,6 +43,174 @@ pub mod decoys;
|
||||||
#[cfg(feature = "multisig")]
|
#[cfg(feature = "multisig")]
|
||||||
mod multisig;
|
mod multisig;
|
||||||
|
|
||||||
|
// https://github.com/monero-project/research-lab/issues/103
|
||||||
|
fn uniqueness(inputs: &[TxIn]) -> [u8; 32] {
|
||||||
|
let mut u = b"domain_separator".to_vec();
|
||||||
|
for input in inputs {
|
||||||
|
match input {
|
||||||
|
// If Gen, this should be the only input, making this loop somewhat pointless
|
||||||
|
// This works and even if there were somehow multiple inputs, it'd be a false negative
|
||||||
|
TxIn::Gen { height } => { height.consensus_encode(&mut u).unwrap(); },
|
||||||
|
TxIn::ToKey { k_image, .. } => u.extend(&k_image.image.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
hash(&u)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hs(8Ra || o) with https://github.com/monero-project/research-lab/issues/103 as an option
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
fn shared_key(uniqueness: Option<[u8; 32]>, s: Scalar, P: &EdwardsPoint, o: usize) -> Scalar {
|
||||||
|
// uniqueness
|
||||||
|
let mut shared = uniqueness.map_or(vec![], |uniqueness| uniqueness.to_vec());
|
||||||
|
// || 8Ra
|
||||||
|
shared.extend((s * P).mul_by_cofactor().compress().to_bytes().to_vec());
|
||||||
|
// || o
|
||||||
|
VarInt(o.try_into().unwrap()).consensus_encode(&mut shared).unwrap();
|
||||||
|
// Hs()
|
||||||
|
hash_to_scalar(&shared)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn commitment_mask(shared_key: Scalar) -> Scalar {
|
||||||
|
let mut mask = b"commitment_mask".to_vec();
|
||||||
|
mask.extend(shared_key.to_bytes());
|
||||||
|
hash_to_scalar(&mask)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn amount_decryption(amount: [u8; 8], key: Scalar) -> u64 {
|
||||||
|
let mut amount_mask = b"amount".to_vec();
|
||||||
|
amount_mask.extend(key.to_bytes());
|
||||||
|
u64::from_le_bytes(amount) ^ u64::from_le_bytes(hash(&amount_mask)[0 .. 8].try_into().unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn amount_encryption(amount: u64, key: Scalar) -> Hash8 {
|
||||||
|
Hash8(amount_decryption(amount.to_le_bytes(), key).to_le_bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct SpendableOutput {
|
||||||
|
pub tx: Hash,
|
||||||
|
pub o: usize,
|
||||||
|
pub key: EdwardsPoint,
|
||||||
|
pub key_offset: Scalar,
|
||||||
|
pub commitment: Commitment
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Enable disabling one of the shared key derivations and solely using one
|
||||||
|
// Change outputs currently always use unique derivations, so that must also be corrected
|
||||||
|
pub fn scan(
|
||||||
|
tx: &Transaction,
|
||||||
|
view: Scalar,
|
||||||
|
spend: EdwardsPoint
|
||||||
|
) -> Vec<SpendableOutput> {
|
||||||
|
let mut pubkeys = vec![];
|
||||||
|
if let Some(key) = tx.tx_pubkey() {
|
||||||
|
pubkeys.push(key);
|
||||||
|
}
|
||||||
|
if let Some(keys) = tx.tx_additional_pubkeys() {
|
||||||
|
pubkeys.extend(&keys);
|
||||||
|
}
|
||||||
|
let pubkeys: Vec<EdwardsPoint> = pubkeys.iter().map(|key| key.point.decompress()).filter_map(|key| key).collect();
|
||||||
|
|
||||||
|
let rct_sig = tx.rct_signatures.sig.as_ref();
|
||||||
|
if rct_sig.is_none() {
|
||||||
|
return vec![];
|
||||||
|
}
|
||||||
|
let rct_sig = rct_sig.unwrap();
|
||||||
|
|
||||||
|
let mut res = vec![];
|
||||||
|
for (o, output, output_key) in tx.prefix.outputs.iter().enumerate().filter_map(
|
||||||
|
|(o, output)| if let TxOutTarget::ToKey { key } = output.target {
|
||||||
|
key.point.decompress().map(|output_key| (o, output, output_key))
|
||||||
|
} else { None }
|
||||||
|
) {
|
||||||
|
// TODO: This may be replaceable by pubkeys[o]
|
||||||
|
for pubkey in &pubkeys {
|
||||||
|
let mut commitment = Commitment::zero();
|
||||||
|
|
||||||
|
// P - shared == spend
|
||||||
|
let matches = |shared_key| (output_key - (&shared_key * &ED25519_BASEPOINT_TABLE)) == spend;
|
||||||
|
let test = |shared_key| Some(shared_key).filter(|shared_key| matches(*shared_key));
|
||||||
|
|
||||||
|
// Get the traditional shared key and unique shared key, testing if either matches for this output
|
||||||
|
let traditional = test(shared_key(None, view, pubkey, o));
|
||||||
|
let unique = test(shared_key(Some(uniqueness(&tx.prefix.inputs)), view, pubkey, o));
|
||||||
|
|
||||||
|
// If either matches, grab it and decode the amount
|
||||||
|
if let Some(key_offset) = traditional.or(unique) {
|
||||||
|
// Miner transaction
|
||||||
|
if output.amount.0 != 0 {
|
||||||
|
commitment.amount = output.amount.0;
|
||||||
|
// Regular transaction
|
||||||
|
} else {
|
||||||
|
let amount = match rct_sig.ecdh_info.get(o) {
|
||||||
|
// TODO: Support the legacy Monero amount encryption
|
||||||
|
Some(EcdhInfo::Standard { .. }) => continue,
|
||||||
|
Some(EcdhInfo::Bulletproof { amount }) => amount_decryption(amount.0, key_offset),
|
||||||
|
// This should never happen, yet it may be possible to get a miner transaction with a
|
||||||
|
// pointless 0 output, therefore not having EcdhInfo while this will expect it
|
||||||
|
// Using get just decreases the possibility of a panic and lets us move on in that case
|
||||||
|
None => continue
|
||||||
|
};
|
||||||
|
|
||||||
|
// Rebuild the commitment to verify it
|
||||||
|
commitment = Commitment::new(commitment_mask(key_offset), amount);
|
||||||
|
// If this is a malicious commitment, move to the next output
|
||||||
|
// Any other R value will calculate to a different spend key and are therefore ignorable
|
||||||
|
if commitment.calculate().compress().to_bytes() != rct_sig.out_pk[o].mask.key {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if commitment.amount != 0 {
|
||||||
|
res.push(SpendableOutput { tx: tx.hash(), o, key: output_key, key_offset, commitment });
|
||||||
|
}
|
||||||
|
// Break to prevent public keys from being included multiple times, triggering multiple
|
||||||
|
// inclusions of the same output
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
struct Output {
|
||||||
|
R: EdwardsPoint,
|
||||||
|
dest: EdwardsPoint,
|
||||||
|
mask: Scalar,
|
||||||
|
amount: Hash8
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Output {
|
||||||
|
pub fn new<R: RngCore + CryptoRng>(
|
||||||
|
rng: &mut R,
|
||||||
|
unique: Option<[u8; 32]>,
|
||||||
|
output: (Address, u64),
|
||||||
|
o: usize
|
||||||
|
) -> Result<Output, TransactionError> {
|
||||||
|
let r = random_scalar(rng);
|
||||||
|
let shared_key = shared_key(
|
||||||
|
unique,
|
||||||
|
r,
|
||||||
|
&output.0.public_view.point.decompress().ok_or(TransactionError::InvalidAddress)?,
|
||||||
|
o
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(
|
||||||
|
Output {
|
||||||
|
R: &r * &ED25519_BASEPOINT_TABLE,
|
||||||
|
dest: (
|
||||||
|
(&shared_key * &ED25519_BASEPOINT_TABLE) +
|
||||||
|
output.0.public_spend.point.decompress().ok_or(TransactionError::InvalidAddress)?
|
||||||
|
),
|
||||||
|
mask: commitment_mask(shared_key),
|
||||||
|
amount: amount_encryption(output.1, shared_key)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
pub enum TransactionError {
|
pub enum TransactionError {
|
||||||
#[error("no inputs")]
|
#[error("no inputs")]
|
||||||
|
@ -68,126 +237,6 @@ pub enum TransactionError {
|
||||||
MultisigError(MultisigError)
|
MultisigError(MultisigError)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct SpendableOutput {
|
|
||||||
pub tx: Hash,
|
|
||||||
pub o: usize,
|
|
||||||
pub key: EdwardsPoint,
|
|
||||||
pub key_offset: Scalar,
|
|
||||||
pub commitment: Commitment
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn scan(tx: &Transaction, view: Scalar, spend: EdwardsPoint) -> Vec<SpendableOutput> {
|
|
||||||
let mut pubkeys = vec![];
|
|
||||||
if let Some(key) = tx.tx_pubkey() {
|
|
||||||
pubkeys.push(key);
|
|
||||||
}
|
|
||||||
if let Some(keys) = tx.tx_additional_pubkeys() {
|
|
||||||
pubkeys.extend(&keys);
|
|
||||||
}
|
|
||||||
let pubkeys: Vec<EdwardsPoint> = pubkeys.iter().map(|key| key.point.decompress()).filter_map(|key| key).collect();
|
|
||||||
|
|
||||||
let rct_sig = tx.rct_signatures.sig.as_ref();
|
|
||||||
if rct_sig.is_none() {
|
|
||||||
return vec![];
|
|
||||||
}
|
|
||||||
let rct_sig = rct_sig.unwrap();
|
|
||||||
|
|
||||||
let mut res = vec![];
|
|
||||||
for (o, output_key) in tx.prefix.outputs.iter().enumerate().filter_map(
|
|
||||||
|(o, output)| if let TxOutTarget::ToKey { key } = output.target {
|
|
||||||
key.point.decompress().map(|output_key| (o, output_key))
|
|
||||||
} else { None }
|
|
||||||
) {
|
|
||||||
// TODO: This may be replaceable by pubkeys[o]
|
|
||||||
for pubkey in &pubkeys {
|
|
||||||
// Hs(8Ra || o)
|
|
||||||
let key_offset = shared_key(view, pubkey, o);
|
|
||||||
let mut commitment = Commitment::zero();
|
|
||||||
|
|
||||||
// P - shared == spend
|
|
||||||
if output_key - (&key_offset * &ED25519_BASEPOINT_TABLE) == spend {
|
|
||||||
if tx.prefix.outputs[o].amount.0 != 0 {
|
|
||||||
commitment.amount = tx.prefix.outputs[o].amount.0;
|
|
||||||
} else {
|
|
||||||
let amount = match rct_sig.ecdh_info[o] {
|
|
||||||
EcdhInfo::Standard { .. } => continue,
|
|
||||||
EcdhInfo::Bulletproof { amount } => amount_decryption(amount.0, key_offset)
|
|
||||||
};
|
|
||||||
|
|
||||||
// Rebuild the commitment to verify it
|
|
||||||
commitment = Commitment::new(commitment_mask(key_offset), amount);
|
|
||||||
if commitment.calculate().compress().to_bytes() != rct_sig.out_pk[o].mask.key {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
res.push(SpendableOutput { tx: tx.hash(), o, key: output_key, key_offset, commitment });
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
fn shared_key(s: Scalar, P: &EdwardsPoint, o: usize) -> Scalar {
|
|
||||||
let mut shared = (s * P).mul_by_cofactor().compress().to_bytes().to_vec();
|
|
||||||
VarInt(o.try_into().unwrap()).consensus_encode(&mut shared).unwrap();
|
|
||||||
hash_to_scalar(&shared)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn commitment_mask(shared_key: Scalar) -> Scalar {
|
|
||||||
let mut mask = b"commitment_mask".to_vec();
|
|
||||||
mask.extend(shared_key.to_bytes());
|
|
||||||
hash_to_scalar(&mask)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn amount_decryption(amount: [u8; 8], key: Scalar) -> u64 {
|
|
||||||
let mut amount_mask = b"amount".to_vec();
|
|
||||||
amount_mask.extend(key.to_bytes());
|
|
||||||
u64::from_le_bytes(amount) ^ u64::from_le_bytes(hash(&amount_mask)[0 .. 8].try_into().unwrap())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn amount_encryption(amount: u64, key: Scalar) -> Hash8 {
|
|
||||||
Hash8(amount_decryption(amount.to_le_bytes(), key).to_le_bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
struct Output {
|
|
||||||
R: EdwardsPoint,
|
|
||||||
dest: EdwardsPoint,
|
|
||||||
mask: Scalar,
|
|
||||||
amount: Hash8
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Output {
|
|
||||||
pub fn new<R: RngCore + CryptoRng>(
|
|
||||||
rng: &mut R,
|
|
||||||
output: (Address, u64),
|
|
||||||
o: usize
|
|
||||||
) -> Result<Output, TransactionError> {
|
|
||||||
let r = random_scalar(rng);
|
|
||||||
let shared_key = shared_key(
|
|
||||||
r,
|
|
||||||
&output.0.public_view.point.decompress().ok_or(TransactionError::InvalidAddress)?,
|
|
||||||
o
|
|
||||||
);
|
|
||||||
Ok(
|
|
||||||
Output {
|
|
||||||
R: &r * &ED25519_BASEPOINT_TABLE,
|
|
||||||
dest: (
|
|
||||||
(&shared_key * &ED25519_BASEPOINT_TABLE) +
|
|
||||||
output.0.public_spend.point.decompress().ok_or(TransactionError::InvalidAddress)?
|
|
||||||
),
|
|
||||||
mask: commitment_mask(shared_key),
|
|
||||||
amount: amount_encryption(output.1, shared_key)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn prepare_inputs<R: RngCore + CryptoRng>(
|
async fn prepare_inputs<R: RngCore + CryptoRng>(
|
||||||
rng: &mut R,
|
rng: &mut R,
|
||||||
rpc: &Rpc,
|
rpc: &Rpc,
|
||||||
|
@ -224,8 +273,8 @@ async fn prepare_inputs<R: RngCore + CryptoRng>(
|
||||||
|
|
||||||
signable.sort_by(|x, y| x.1.compress().to_bytes().cmp(&y.1.compress().to_bytes()).reverse());
|
signable.sort_by(|x, y| x.1.compress().to_bytes().cmp(&y.1.compress().to_bytes()).reverse());
|
||||||
tx.prefix.inputs.sort_by(|x, y| if let (
|
tx.prefix.inputs.sort_by(|x, y| if let (
|
||||||
TxIn::ToKey{ k_image: x, ..},
|
TxIn::ToKey { k_image: x, ..},
|
||||||
TxIn::ToKey{ k_image: y, ..}
|
TxIn::ToKey { k_image: y, ..}
|
||||||
) = (x, y) {
|
) = (x, y) {
|
||||||
x.image.cmp(&y.image).reverse()
|
x.image.cmp(&y.image).reverse()
|
||||||
} else {
|
} else {
|
||||||
|
@ -275,7 +324,8 @@ impl SignableTransaction {
|
||||||
|
|
||||||
fn prepare_outputs<R: RngCore + CryptoRng>(
|
fn prepare_outputs<R: RngCore + CryptoRng>(
|
||||||
&mut self,
|
&mut self,
|
||||||
rng: &mut R
|
rng: &mut R,
|
||||||
|
uniqueness: Option<[u8; 32]>
|
||||||
) -> Result<(Vec<Commitment>, Scalar), TransactionError> {
|
) -> Result<(Vec<Commitment>, Scalar), TransactionError> {
|
||||||
self.fee = self.fee_per_byte * 2000; // TODO
|
self.fee = self.fee_per_byte * 2000; // TODO
|
||||||
|
|
||||||
|
@ -288,21 +338,40 @@ impl SignableTransaction {
|
||||||
Err(TransactionError::NotEnoughFunds(in_amount, out_amount))?;
|
Err(TransactionError::NotEnoughFunds(in_amount, out_amount))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add the change output
|
let mut temp_outputs = Vec::with_capacity(self.payments.len() + 1);
|
||||||
let mut payments = self.payments.clone();
|
// Add the payments to the outputs
|
||||||
payments.push((self.change, in_amount - out_amount));
|
for payment in &self.payments {
|
||||||
|
temp_outputs.push((None, (payment.0, payment.1)));
|
||||||
|
}
|
||||||
|
// Ideally, the change output would always have uniqueness, as we control this wallet software
|
||||||
|
// Unfortunately, if this is used with multisig, doing so would add an extra round due to the
|
||||||
|
// fact Bulletproofs use a leader protocol reliant on this shared key before the first round of
|
||||||
|
// communication. Making the change output unique would require Bulletproofs not be a leader
|
||||||
|
// protocol, using a seeded random
|
||||||
|
// There is a vector where the multisig participants leak the output key they're about to send
|
||||||
|
// to, and someone could use that key, forcing some funds to be burnt accordingly if they win
|
||||||
|
// the race. Any multisig wallet, with this current setup, must only keep change keys in context
|
||||||
|
// accordingly, preferably as soon as they are proposed, even before they appear as confirmed
|
||||||
|
// Using another source of uniqueness would also be possible, yet it'd make scanning a tri-key
|
||||||
|
// system (currently dual for the simpler API, yet would be dual even with a more complex API
|
||||||
|
// under this decision)
|
||||||
|
// TODO after https://github.com/serai-dex/serai/issues/2
|
||||||
|
temp_outputs.push((uniqueness, (self.change, in_amount - out_amount)));
|
||||||
|
|
||||||
// TODO randomly sort outputs
|
// Shuffle the outputs
|
||||||
|
temp_outputs.shuffle(rng);
|
||||||
|
|
||||||
self.outputs.clear();
|
// Actually create the outputs
|
||||||
self.outputs = Vec::with_capacity(payments.len());
|
self.outputs = Vec::with_capacity(temp_outputs.len());
|
||||||
let mut commitments = Vec::with_capacity(payments.len());
|
let mut commitments = Vec::with_capacity(temp_outputs.len());
|
||||||
for o in 0 .. payments.len() {
|
let mut mask_sum = Scalar::zero();
|
||||||
self.outputs.push(Output::new(rng, payments[o], o)?);
|
for (o, output) in temp_outputs.iter().enumerate() {
|
||||||
commitments.push(Commitment::new(self.outputs[o].mask, payments[o].1));
|
self.outputs.push(Output::new(rng, output.0, output.1, o)?);
|
||||||
|
commitments.push(Commitment::new(self.outputs[o].mask, output.1.1));
|
||||||
|
mask_sum += self.outputs[o].mask;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok((commitments, self.outputs.iter().map(|output| output.mask).sum()))
|
Ok((commitments, mask_sum))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn prepare_transaction(
|
fn prepare_transaction(
|
||||||
|
@ -367,7 +436,20 @@ impl SignableTransaction {
|
||||||
rpc: &Rpc,
|
rpc: &Rpc,
|
||||||
spend: &Scalar
|
spend: &Scalar
|
||||||
) -> Result<Transaction, TransactionError> {
|
) -> Result<Transaction, TransactionError> {
|
||||||
let (commitments, mask_sum) = self.prepare_outputs(rng)?;
|
let (commitments, mask_sum) = self.prepare_outputs(
|
||||||
|
rng,
|
||||||
|
Some(
|
||||||
|
uniqueness(
|
||||||
|
&self.inputs.iter().map(|input| TxIn::ToKey {
|
||||||
|
amount: VarInt(0),
|
||||||
|
key_offsets: vec![],
|
||||||
|
k_image: KeyImage {
|
||||||
|
image: Hash(generate_key_image(&(spend + input.key_offset)).compress().to_bytes())
|
||||||
|
}
|
||||||
|
}).collect::<Vec<_>>()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)?;
|
||||||
let mut tx = self.prepare_transaction(&commitments, bulletproofs::generate(&commitments)?);
|
let mut tx = self.prepare_transaction(&commitments, bulletproofs::generate(&commitments)?);
|
||||||
|
|
||||||
let signable = prepare_inputs(rng, rpc, &self.inputs, spend, &mut tx).await?;
|
let signable = prepare_inputs(rng, rpc, &self.inputs, spend, &mut tx).await?;
|
||||||
|
|
|
@ -106,7 +106,7 @@ impl SignableTransaction {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify these outputs by a dummy prep
|
// Verify these outputs by a dummy prep
|
||||||
self.prepare_outputs(rng)?;
|
self.prepare_outputs(rng, None)?;
|
||||||
|
|
||||||
Ok(TransactionMachine {
|
Ok(TransactionMachine {
|
||||||
leader: keys.params().i() == included[0],
|
leader: keys.params().i() == included[0],
|
||||||
|
@ -152,7 +152,7 @@ impl StateMachine for TransactionMachine {
|
||||||
|
|
||||||
let mut rng = ChaCha12Rng::from_seed(self.transcript.rng_seed(b"tx_keys", Some(entropy)));
|
let mut rng = ChaCha12Rng::from_seed(self.transcript.rng_seed(b"tx_keys", Some(entropy)));
|
||||||
// Safe to unwrap thanks to the dummy prepare
|
// Safe to unwrap thanks to the dummy prepare
|
||||||
let (commitments, output_masks) = self.signable.prepare_outputs(&mut rng).unwrap();
|
let (commitments, output_masks) = self.signable.prepare_outputs(&mut rng, None).unwrap();
|
||||||
self.output_masks = Some(output_masks);
|
self.output_masks = Some(output_masks);
|
||||||
|
|
||||||
let bp = bulletproofs::generate(&commitments).unwrap();
|
let bp = bulletproofs::generate(&commitments).unwrap();
|
||||||
|
@ -194,7 +194,8 @@ impl StateMachine for TransactionMachine {
|
||||||
b"tx_keys",
|
b"tx_keys",
|
||||||
Some(prep[clsag_lens .. (clsag_lens + 32)].try_into().map_err(|_| FrostError::InvalidShare(l))?)
|
Some(prep[clsag_lens .. (clsag_lens + 32)].try_into().map_err(|_| FrostError::InvalidShare(l))?)
|
||||||
)
|
)
|
||||||
)
|
),
|
||||||
|
None
|
||||||
).map_err(|_| FrostError::InvalidShare(l))?;
|
).map_err(|_| FrostError::InvalidShare(l))?;
|
||||||
self.output_masks.replace(output_masks);
|
self.output_masks.replace(output_masks);
|
||||||
|
|
||||||
|
|
|
@ -84,7 +84,12 @@ pub async fn send_core(test: usize, multisig: bool) {
|
||||||
tx = Some(rpc.get_block_transactions(start).await.unwrap().swap_remove(0));
|
tx = Some(rpc.get_block_transactions(start).await.unwrap().swap_remove(0));
|
||||||
}
|
}
|
||||||
|
|
||||||
let output = transaction::scan(tx.as_ref().unwrap(), view, spend_pub).swap_remove(0);
|
// Grab the largest output available
|
||||||
|
let output = {
|
||||||
|
let mut outputs = transaction::scan(tx.as_ref().unwrap(), view, spend_pub);
|
||||||
|
outputs.sort_by(|x, y| x.commitment.amount.cmp(&y.commitment.amount).reverse());
|
||||||
|
outputs.swap_remove(0)
|
||||||
|
};
|
||||||
// Test creating a zero change output and a non-zero change output
|
// Test creating a zero change output and a non-zero change output
|
||||||
amount = output.commitment.amount - u64::try_from(i).unwrap();
|
amount = output.commitment.amount - u64::try_from(i).unwrap();
|
||||||
outputs.push(output);
|
outputs.push(output);
|
||||||
|
|
Loading…
Reference in a new issue