mirror of
https://github.com/serai-dex/serai.git
synced 2024-12-23 03:59:22 +00:00
Changes meant for the previous commit
This commit is contained in:
parent
517db6448a
commit
aa5d95ef1d
3 changed files with 90 additions and 409 deletions
|
@ -1,23 +1,21 @@
|
|||
#[cfg(feature = "multisig")]
|
||||
use std::{rc::Rc, cell::RefCell};
|
||||
use std::{cell::RefCell, rc::Rc};
|
||||
|
||||
use rand::{RngCore, rngs::OsRng};
|
||||
|
||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
|
||||
|
||||
use monero_serai::{
|
||||
use crate::{
|
||||
Commitment,
|
||||
random_scalar, generate_key_image,
|
||||
wallet::decoys::Decoys,
|
||||
wallet::Decoys,
|
||||
clsag::{ClsagInput, Clsag}
|
||||
};
|
||||
#[cfg(feature = "multisig")]
|
||||
use monero_serai::{frost::{MultisigError, Transcript}, clsag::{ClsagDetails, ClsagMultisig}};
|
||||
use crate::{frost::{MultisigError, Transcript}, clsag::{ClsagDetails, ClsagMultisig}};
|
||||
|
||||
#[cfg(feature = "multisig")]
|
||||
mod frost;
|
||||
#[cfg(feature = "multisig")]
|
||||
use crate::frost::{THRESHOLD, generate_keys, sign};
|
||||
use crate::tests::frost::{THRESHOLD, generate_keys, sign};
|
||||
|
||||
const RING_LEN: u64 = 11;
|
||||
const AMOUNT: u64 = 1337;
|
||||
|
@ -62,7 +60,7 @@ fn clsag() {
|
|||
)],
|
||||
random_scalar(&mut OsRng),
|
||||
msg
|
||||
).unwrap().swap_remove(0);
|
||||
).swap_remove(0);
|
||||
clsag.verify(&ring, &image, &pseudo_out, &msg).unwrap();
|
||||
#[cfg(feature = "experimental")]
|
||||
clsag.rust_verify(&ring, &image, &pseudo_out, &msg).unwrap();
|
||||
|
@ -98,7 +96,7 @@ fn clsag_multisig() -> Result<(), MultisigError> {
|
|||
for i in 1 ..= t {
|
||||
machines.push(
|
||||
sign::AlgorithmMachine::new(
|
||||
clsag::Multisig::new(
|
||||
ClsagMultisig::new(
|
||||
Transcript::new(b"Monero Serai CLSAG Test".to_vec()),
|
||||
Rc::new(RefCell::new(Some(
|
||||
ClsagDetails::new(
|
||||
|
|
|
@ -99,100 +99,100 @@ impl Decoys {
|
|||
pub fn len(&self) -> usize {
|
||||
self.offsets.len()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn select<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
rpc: &Rpc,
|
||||
height: usize,
|
||||
inputs: &[SpendableOutput]
|
||||
) -> Result<Vec<Decoys>, RpcError> {
|
||||
// Convert the inputs in question to the raw output data
|
||||
let mut outputs = Vec::with_capacity(inputs.len());
|
||||
for input in inputs {
|
||||
outputs.push((
|
||||
rpc.get_o_indexes(input.tx).await?[input.o],
|
||||
[input.key, input.commitment.calculate()]
|
||||
));
|
||||
}
|
||||
pub(crate) async fn select<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
rpc: &Rpc,
|
||||
height: usize,
|
||||
inputs: &[SpendableOutput]
|
||||
) -> Result<Vec<Decoys>, RpcError> {
|
||||
// Convert the inputs in question to the raw output data
|
||||
let mut outputs = Vec::with_capacity(inputs.len());
|
||||
for input in inputs {
|
||||
outputs.push((
|
||||
rpc.get_o_indexes(input.tx).await?[input.o],
|
||||
[input.key, input.commitment.calculate()]
|
||||
));
|
||||
}
|
||||
|
||||
let distribution = rpc.get_output_distribution(height).await?;
|
||||
let high = distribution[distribution.len() - 1];
|
||||
let per_second = {
|
||||
let blocks = distribution.len().min(BLOCKS_PER_YEAR);
|
||||
let outputs = high - distribution[distribution.len().saturating_sub(blocks + 1)];
|
||||
(outputs as f64) / ((blocks * BLOCK_TIME) as f64)
|
||||
};
|
||||
let distribution = rpc.get_output_distribution(height).await?;
|
||||
let high = distribution[distribution.len() - 1];
|
||||
let per_second = {
|
||||
let blocks = distribution.len().min(BLOCKS_PER_YEAR);
|
||||
let outputs = high - distribution[distribution.len().saturating_sub(blocks + 1)];
|
||||
(outputs as f64) / ((blocks * BLOCK_TIME) as f64)
|
||||
};
|
||||
|
||||
let mut used = HashSet::<u64>::new();
|
||||
for o in &outputs {
|
||||
used.insert(o.0);
|
||||
}
|
||||
let mut used = HashSet::<u64>::new();
|
||||
for o in &outputs {
|
||||
used.insert(o.0);
|
||||
}
|
||||
|
||||
let mut res = Vec::with_capacity(inputs.len());
|
||||
for (i, o) in outputs.iter().enumerate() {
|
||||
// If there's only the target amount of decoys available, remove the index of the output we're spending
|
||||
// So we don't infinite loop while ignoring it
|
||||
// TODO: If we're spending 2 outputs of a possible 11 outputs, this will still fail
|
||||
used.remove(&o.0);
|
||||
let mut res = Vec::with_capacity(inputs.len());
|
||||
for (i, o) in outputs.iter().enumerate() {
|
||||
// If there's only the target amount of decoys available, remove the index of the output we're spending
|
||||
// So we don't infinite loop while ignoring it
|
||||
// TODO: If we're spending 2 outputs of a possible 11 outputs, this will still fail
|
||||
used.remove(&o.0);
|
||||
|
||||
// Select the full amount of ring members in decoys, instead of just the actual decoys, in order
|
||||
// to increase sample size
|
||||
let mut decoys = select_n(rng, rpc, height, &distribution, high, per_second, &mut used, DECOYS).await?;
|
||||
decoys.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
// Select the full amount of ring members in decoys, instead of just the actual decoys, in order
|
||||
// to increase sample size
|
||||
let mut decoys = select_n(rng, rpc, height, &distribution, high, per_second, &mut used, DECOYS).await?;
|
||||
decoys.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
|
||||
// Add back this output
|
||||
used.insert(o.0);
|
||||
// Add back this output
|
||||
used.insert(o.0);
|
||||
|
||||
// Make sure the TX passes the sanity check that the median output is within the last 40%
|
||||
// This actually checks the median is within the last third, a slightly more aggressive boundary,
|
||||
// as the height used in this calculation will be slightly under the height this is sanity
|
||||
// checked against
|
||||
let target_median = high * 2 / 3;
|
||||
// Make sure the TX passes the sanity check that the median output is within the last 40%
|
||||
// This actually checks the median is within the last third, a slightly more aggressive boundary,
|
||||
// as the height used in this calculation will be slightly under the height this is sanity
|
||||
// checked against
|
||||
let target_median = high * 2 / 3;
|
||||
|
||||
// Sanity checks are only run when 1000 outputs are available
|
||||
// We run this check whenever it's possible to satisfy
|
||||
// This means we need the middle possible decoy to be above the target_median
|
||||
// TODO: This will break if timelocks are used other than maturity on very small chains/chains
|
||||
// of any size which use timelocks extremely frequently, as it'll try to satisfy an impossible
|
||||
// condition
|
||||
// Reduce target_median by each timelocked output found?
|
||||
if (high - MATURITY) >= target_median {
|
||||
while decoys[DECOYS / 2].0 < target_median {
|
||||
// If it's not, update the bottom half with new values to ensure the median only moves up
|
||||
for m in 0 .. DECOYS / 2 {
|
||||
// We could not remove this, saving CPU time and removing low values as possibilities, yet
|
||||
// it'd increase the amount of decoys required to create this transaction and some banned
|
||||
// outputs may be the best options
|
||||
used.remove(&decoys[m].0);
|
||||
// Sanity checks are only run when 1000 outputs are available
|
||||
// We run this check whenever it's possible to satisfy
|
||||
// This means we need the middle possible decoy to be above the target_median
|
||||
// TODO: This will break if timelocks are used other than maturity on very small chains/chains
|
||||
// of any size which use timelocks extremely frequently, as it'll try to satisfy an impossible
|
||||
// condition
|
||||
// Reduce target_median by each timelocked output found?
|
||||
if (high - MATURITY) >= target_median {
|
||||
while decoys[DECOYS / 2].0 < target_median {
|
||||
// If it's not, update the bottom half with new values to ensure the median only moves up
|
||||
for m in 0 .. DECOYS / 2 {
|
||||
// We could not remove this, saving CPU time and removing low values as possibilities, yet
|
||||
// it'd increase the amount of decoys required to create this transaction and some banned
|
||||
// outputs may be the best options
|
||||
used.remove(&decoys[m].0);
|
||||
}
|
||||
|
||||
decoys.splice(
|
||||
0 .. DECOYS / 2,
|
||||
select_n(rng, rpc, height, &distribution, high, per_second, &mut used, DECOYS / 2).await?
|
||||
);
|
||||
decoys.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
}
|
||||
|
||||
decoys.splice(
|
||||
0 .. DECOYS / 2,
|
||||
select_n(rng, rpc, height, &distribution, high, per_second, &mut used, DECOYS / 2).await?
|
||||
);
|
||||
decoys.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
}
|
||||
|
||||
// Replace the closest selected decoy with the actual
|
||||
let mut replace = 0;
|
||||
let mut distance = u64::MAX;
|
||||
for m in 0 .. decoys.len() {
|
||||
let diff = decoys[m].0.abs_diff(o.0);
|
||||
if diff < distance {
|
||||
replace = m;
|
||||
distance = diff;
|
||||
}
|
||||
}
|
||||
|
||||
decoys[replace] = outputs[i];
|
||||
res.push(Decoys {
|
||||
i: u8::try_from(replace).unwrap(),
|
||||
offsets: offset(&decoys.iter().map(|output| output.0).collect::<Vec<_>>()),
|
||||
ring: decoys.iter().map(|output| output.1).collect()
|
||||
});
|
||||
}
|
||||
|
||||
// Replace the closest selected decoy with the actual
|
||||
let mut replace = 0;
|
||||
let mut distance = u64::MAX;
|
||||
for m in 0 .. decoys.len() {
|
||||
let diff = decoys[m].0.abs_diff(o.0);
|
||||
if diff < distance {
|
||||
replace = m;
|
||||
distance = diff;
|
||||
}
|
||||
}
|
||||
|
||||
decoys[replace] = outputs[i];
|
||||
res.push(Decoys {
|
||||
i: u8::try_from(replace).unwrap(),
|
||||
offsets: offset(&decoys.iter().map(|output| output.0).collect::<Vec<_>>()),
|
||||
ring: decoys.iter().map(|output| output.1).collect()
|
||||
});
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
|
|
@ -1,317 +0,0 @@
|
|||
use std::{rc::Rc, cell::RefCell};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng, SeedableRng};
|
||||
use rand_chacha::ChaCha12Rng;
|
||||
|
||||
use curve25519_dalek::{traits::Identity, scalar::Scalar, edwards::{EdwardsPoint, CompressedEdwardsY}};
|
||||
|
||||
use monero::{
|
||||
Hash, VarInt,
|
||||
util::ringct::Key,
|
||||
blockdata::transaction::{KeyImage, TxIn, Transaction}
|
||||
};
|
||||
|
||||
use transcript::Transcript as TranscriptTrait;
|
||||
use frost::{FrostError, MultisigKeys, MultisigParams, sign::{State, StateMachine, AlgorithmMachine}};
|
||||
|
||||
use crate::{
|
||||
frost::{Transcript, Ed25519},
|
||||
random_scalar, bulletproofs::Bulletproofs, clsag,
|
||||
rpc::Rpc,
|
||||
wallet::{TransactionError, SignableTransaction, decoys::{self, Decoys}}
|
||||
};
|
||||
|
||||
pub struct TransactionMachine {
|
||||
leader: bool,
|
||||
signable: SignableTransaction,
|
||||
transcript: Transcript,
|
||||
|
||||
decoys: Vec<Decoys>,
|
||||
|
||||
images: Vec<EdwardsPoint>,
|
||||
output_masks: Option<Scalar>,
|
||||
inputs: Vec<Rc<RefCell<Option<ClsagDetails>>>>,
|
||||
clsags: Vec<AlgorithmMachine<Ed25519, ClsagMultisig>>,
|
||||
|
||||
tx: Option<Transaction>
|
||||
}
|
||||
|
||||
impl SignableTransaction {
|
||||
pub async fn multisig<R: RngCore + CryptoRng>(
|
||||
mut self,
|
||||
label: Vec<u8>,
|
||||
rng: &mut R,
|
||||
rpc: &Rpc,
|
||||
height: usize,
|
||||
keys: MultisigKeys<Ed25519>,
|
||||
included: &[usize]
|
||||
) -> Result<TransactionMachine, TransactionError> {
|
||||
let mut images = vec![];
|
||||
images.resize(self.inputs.len(), EdwardsPoint::identity());
|
||||
let mut inputs = vec![];
|
||||
for _ in 0 .. self.inputs.len() {
|
||||
// Doesn't resize as that will use a single Rc for the entire Vec
|
||||
inputs.push(Rc::new(RefCell::new(None)));
|
||||
}
|
||||
let mut clsags = vec![];
|
||||
|
||||
// Create a RNG out of the input shared keys, which either requires the view key or being every
|
||||
// sender, and the payments (address and amount), which a passive adversary may be able to know
|
||||
// depending on how these transactions are coordinated
|
||||
|
||||
let mut transcript = Transcript::new(label);
|
||||
// Also include the spend_key as below only the key offset is included, so this confirms the sum product
|
||||
// Useful as confirming the sum product confirms the key image, further guaranteeing the one time
|
||||
// properties noted below
|
||||
transcript.append_message(b"spend_key", &keys.group_key().0.compress().to_bytes());
|
||||
for input in &self.inputs {
|
||||
// These outputs can only be spent once. Therefore, it forces all RNGs derived from this
|
||||
// transcript (such as the one used to create one time keys) to be unique
|
||||
transcript.append_message(b"input_hash", &input.tx.0);
|
||||
transcript.append_message(b"input_output_index", &u16::try_from(input.o).unwrap().to_le_bytes());
|
||||
// Not including this, with a doxxed list of payments, would allow brute forcing the inputs
|
||||
// to determine RNG seeds and therefore the true spends
|
||||
transcript.append_message(b"input_shared_key", &input.key_offset.to_bytes());
|
||||
}
|
||||
for payment in &self.payments {
|
||||
transcript.append_message(b"payment_address", &payment.0.as_bytes());
|
||||
transcript.append_message(b"payment_amount", &payment.1.to_le_bytes());
|
||||
}
|
||||
transcript.append_message(b"change", &self.change.as_bytes());
|
||||
|
||||
// Select decoys
|
||||
// Ideally, this would be done post entropy, instead of now, yet doing so would require sign
|
||||
// to be async which isn't feasible. This should be suitably competent though
|
||||
// While this inability means we can immediately create the input, moving it out of the
|
||||
// Rc RefCell, keeping it within an Rc RefCell keeps our options flexible
|
||||
let decoys = decoys::select(
|
||||
&mut ChaCha12Rng::from_seed(transcript.rng_seed(b"decoys", None)),
|
||||
rpc,
|
||||
height,
|
||||
&self.inputs
|
||||
).await.map_err(|e| TransactionError::RpcError(e))?;
|
||||
|
||||
for (i, input) in self.inputs.iter().enumerate() {
|
||||
clsags.push(
|
||||
AlgorithmMachine::new(
|
||||
ClsagMultisig::new(
|
||||
transcript.clone(),
|
||||
inputs[i].clone()
|
||||
).map_err(|e| TransactionError::MultisigError(e))?,
|
||||
Rc::new(keys.offset(dalek_ff_group::Scalar(input.key_offset))),
|
||||
included
|
||||
).map_err(|e| TransactionError::FrostError(e))?
|
||||
);
|
||||
}
|
||||
|
||||
// Verify these outputs by a dummy prep
|
||||
self.prepare_outputs(rng, None)?;
|
||||
|
||||
Ok(TransactionMachine {
|
||||
leader: keys.params().i() == included[0],
|
||||
signable: self,
|
||||
transcript,
|
||||
|
||||
decoys,
|
||||
|
||||
images,
|
||||
output_masks: None,
|
||||
inputs,
|
||||
clsags,
|
||||
|
||||
tx: None
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl StateMachine for TransactionMachine {
|
||||
type Signature = Transaction;
|
||||
|
||||
fn preprocess<R: RngCore + CryptoRng>(
|
||||
&mut self,
|
||||
rng: &mut R
|
||||
) -> Result<Vec<u8>, FrostError> {
|
||||
if self.state() != State::Fresh {
|
||||
Err(FrostError::InvalidSignTransition(State::Fresh, self.state()))?;
|
||||
}
|
||||
|
||||
// Iterate over each CLSAG calling preprocess
|
||||
let mut serialized = vec![];
|
||||
for (i, clsag) in self.clsags.iter_mut().enumerate() {
|
||||
let preprocess = clsag.preprocess(rng)?;
|
||||
// First 64 bytes are FROST's commitments
|
||||
self.images[i] += CompressedEdwardsY(preprocess[64 .. 96].try_into().unwrap()).decompress().unwrap();
|
||||
serialized.extend(&preprocess);
|
||||
}
|
||||
|
||||
if self.leader {
|
||||
let mut entropy = [0; 32];
|
||||
rng.fill_bytes(&mut entropy);
|
||||
serialized.extend(&entropy);
|
||||
|
||||
let mut rng = ChaCha12Rng::from_seed(self.transcript.rng_seed(b"tx_keys", Some(entropy)));
|
||||
// Safe to unwrap thanks to the dummy prepare
|
||||
let (commitments, output_masks) = self.signable.prepare_outputs(&mut rng, None).unwrap();
|
||||
self.output_masks = Some(output_masks);
|
||||
|
||||
let bp = Bulletproofs::new(&commitments).unwrap();
|
||||
serialized.extend(&bp.serialize());
|
||||
|
||||
let tx = self.signable.prepare_transaction(&commitments, bp);
|
||||
self.tx = Some(tx);
|
||||
}
|
||||
|
||||
Ok(serialized)
|
||||
}
|
||||
|
||||
fn sign(
|
||||
&mut self,
|
||||
commitments: &[Option<Vec<u8>>],
|
||||
_: &[u8]
|
||||
) -> Result<Vec<u8>, FrostError> {
|
||||
if self.state() != State::Preprocessed {
|
||||
Err(FrostError::InvalidSignTransition(State::Preprocessed, self.state()))?;
|
||||
}
|
||||
|
||||
// FROST commitments, image, commitments, and their proofs
|
||||
let clsag_len = 64 + ClsagMultisig::serialized_len();
|
||||
let clsag_lens = clsag_len * self.clsags.len();
|
||||
|
||||
// Split out the prep and update the TX
|
||||
let mut tx;
|
||||
if self.leader {
|
||||
tx = self.tx.take().unwrap();
|
||||
} else {
|
||||
let (l, prep) = commitments.iter().enumerate().filter(|(_, prep)| prep.is_some()).next()
|
||||
.ok_or(FrostError::InternalError("no participants".to_string()))?;
|
||||
let prep = prep.as_ref().unwrap();
|
||||
|
||||
// Not invalid outputs due to doing a dummy prep as leader
|
||||
let (commitments, output_masks) = self.signable.prepare_outputs(
|
||||
&mut ChaCha12Rng::from_seed(
|
||||
self.transcript.rng_seed(
|
||||
b"tx_keys",
|
||||
Some(prep[clsag_lens .. (clsag_lens + 32)].try_into().map_err(|_| FrostError::InvalidShare(l))?)
|
||||
)
|
||||
),
|
||||
None
|
||||
).map_err(|_| FrostError::InvalidShare(l))?;
|
||||
self.output_masks.replace(output_masks);
|
||||
|
||||
// Verify the provided bulletproofs if not leader
|
||||
let bp = Bulletproofs::deserialize(
|
||||
&mut std::io::Cursor::new(&prep[(clsag_lens + 32) .. prep.len()])
|
||||
).map_err(|_| FrostError::InvalidShare(l))?;
|
||||
if !bp.verify(&commitments.iter().map(|c| c.calculate()).collect::<Vec<EdwardsPoint>>()) {
|
||||
Err(FrostError::InvalidShare(l))?;
|
||||
}
|
||||
|
||||
tx = self.signable.prepare_transaction(&commitments, bp);
|
||||
}
|
||||
|
||||
for c in 0 .. self.clsags.len() {
|
||||
// Calculate the key images in order to update the TX
|
||||
// Multisig will parse/calculate/validate this as needed, yet doing so here as well provides
|
||||
// the easiest API overall
|
||||
for (l, serialized) in commitments.iter().enumerate().filter(|(_, s)| s.is_some()) {
|
||||
self.images[c] += CompressedEdwardsY(
|
||||
serialized.as_ref().unwrap()[((c * clsag_len) + 64) .. ((c * clsag_len) + 96)]
|
||||
.try_into().map_err(|_| FrostError::InvalidCommitment(l))?
|
||||
).decompress().ok_or(FrostError::InvalidCommitment(l))?;
|
||||
}
|
||||
}
|
||||
|
||||
let mut commitments = (0 .. self.inputs.len()).map(|c| commitments.iter().map(
|
||||
|commitments| commitments.clone().map(
|
||||
|commitments| commitments[(c * clsag_len) .. ((c * clsag_len) + clsag_len)].to_vec()
|
||||
)
|
||||
).collect::<Vec<_>>()).collect::<Vec<_>>();
|
||||
|
||||
let mut sorted = Vec::with_capacity(self.decoys.len());
|
||||
while self.decoys.len() != 0 {
|
||||
sorted.push((
|
||||
self.signable.inputs.swap_remove(0),
|
||||
self.decoys.swap_remove(0),
|
||||
self.images.swap_remove(0),
|
||||
self.inputs.swap_remove(0),
|
||||
self.clsags.swap_remove(0),
|
||||
commitments.swap_remove(0)
|
||||
));
|
||||
}
|
||||
sorted.sort_by(|x, y| x.2.compress().to_bytes().cmp(&y.2.compress().to_bytes()).reverse());
|
||||
|
||||
let mut rng = ChaCha12Rng::from_seed(self.transcript.rng_seed(b"pseudo_out_masks", None));
|
||||
let mut sum_pseudo_outs = Scalar::zero();
|
||||
while sorted.len() != 0 {
|
||||
let value = sorted.remove(0);
|
||||
|
||||
let mut mask = random_scalar(&mut rng);
|
||||
if sorted.len() == 0 {
|
||||
mask = self.output_masks.unwrap() - sum_pseudo_outs;
|
||||
} else {
|
||||
sum_pseudo_outs += mask;
|
||||
}
|
||||
|
||||
tx.prefix.inputs.push(
|
||||
Input::ToKey {
|
||||
amount: VarInt(0),
|
||||
key_offsets: value.1.offsets.clone().iter().map(|x| VarInt(*x)).collect(),
|
||||
k_image: KeyImage { image: Hash(value.2.compress().to_bytes()) }
|
||||
}
|
||||
);
|
||||
|
||||
value.3.replace(
|
||||
Some(
|
||||
ClsagDetails::new(
|
||||
clsag::Input::new(
|
||||
value.0.commitment,
|
||||
value.1
|
||||
).map_err(|_| panic!("Signing an input which isn't present in the ring we created for it"))?,
|
||||
mask
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
self.clsags.push(value.4);
|
||||
commitments.push(value.5);
|
||||
}
|
||||
|
||||
let msg = tx.signature_hash().unwrap().0;
|
||||
self.tx = Some(tx);
|
||||
|
||||
// Iterate over each CLSAG calling sign
|
||||
let mut serialized = Vec::with_capacity(self.clsags.len() * 32);
|
||||
for (c, clsag) in self.clsags.iter_mut().enumerate() {
|
||||
serialized.extend(&clsag.sign(&commitments[c], &msg)?);
|
||||
}
|
||||
|
||||
Ok(serialized)
|
||||
}
|
||||
|
||||
fn complete(&mut self, shares: &[Option<Vec<u8>>]) -> Result<Transaction, FrostError> {
|
||||
if self.state() != State::Signed {
|
||||
Err(FrostError::InvalidSignTransition(State::Signed, self.state()))?;
|
||||
}
|
||||
|
||||
let mut tx = self.tx.take().unwrap();
|
||||
let mut prunable = tx.rct_signatures.p.unwrap();
|
||||
for (c, clsag) in self.clsags.iter_mut().enumerate() {
|
||||
let (clsag, pseudo_out) = clsag.complete(&shares.iter().map(
|
||||
|share| share.clone().map(|share| share[(c * 32) .. ((c * 32) + 32)].to_vec())
|
||||
).collect::<Vec<_>>())?;
|
||||
prunable.Clsags.push(clsag);
|
||||
prunable.pseudo_outs.push(pseudo_out.compress().to_bytes());
|
||||
}
|
||||
tx.rct_signatures.p = Some(prunable);
|
||||
|
||||
Ok(tx)
|
||||
}
|
||||
|
||||
fn multisig_params(&self) -> MultisigParams {
|
||||
self.clsags[0].multisig_params()
|
||||
}
|
||||
|
||||
fn state(&self) -> State {
|
||||
self.clsags[0].state()
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue