Remove monero-rs types

Still missing an updated RPC file. Restructures the library as it makes 
sense
This commit is contained in:
Luke Parker 2022-05-21 15:33:35 -04:00
parent 573f847a9b
commit 517db6448a
No known key found for this signature in database
GPG key ID: F9F1386DB1E119B6
18 changed files with 1636 additions and 812 deletions

View file

@ -1,52 +1,119 @@
#![allow(non_snake_case)]
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use monero::{consensus::{Encodable, deserialize}, util::ringct::Bulletproof};
use crate::{Commitment, wallet::TransactionError, serialize::*};
use crate::{Commitment, transaction::TransactionError};
#[link(name = "wrapper")]
extern "C" {
fn free(ptr: *const u8);
fn c_generate_bp(len: u8, amounts: *const u64, masks: *const [u8; 32]) -> *const u8;
fn c_verify_bp(
serialized_len: usize,
serialized: *const u8,
commitments_len: u8,
commitments: *const [u8; 32]
) -> bool;
pub struct Bulletproofs {
pub A: EdwardsPoint,
pub S: EdwardsPoint,
pub T1: EdwardsPoint,
pub T2: EdwardsPoint,
pub taux: Scalar,
pub mu: Scalar,
pub L: Vec<EdwardsPoint>,
pub R: Vec<EdwardsPoint>,
pub a: Scalar,
pub b: Scalar,
pub t: Scalar
}
pub fn generate(outputs: &[Commitment]) -> Result<Bulletproof, TransactionError> {
if outputs.len() > 16 {
return Err(TransactionError::TooManyOutputs)?;
impl Bulletproofs {
pub fn new(outputs: &[Commitment]) -> Result<Bulletproofs, TransactionError> {
if outputs.len() > 16 {
return Err(TransactionError::TooManyOutputs)?;
}
let masks: Vec<[u8; 32]> = outputs.iter().map(|commitment| commitment.mask.to_bytes()).collect();
let amounts: Vec<u64> = outputs.iter().map(|commitment| commitment.amount).collect();
let res: Bulletproofs;
unsafe {
#[link(name = "wrapper")]
extern "C" {
fn free(ptr: *const u8);
fn c_generate_bp(len: u8, amounts: *const u64, masks: *const [u8; 32]) -> *const u8;
}
let ptr = c_generate_bp(outputs.len() as u8, amounts.as_ptr(), masks.as_ptr());
let len = ((ptr.read() as usize) << 8) + (ptr.add(1).read() as usize);
res = Bulletproofs::deserialize(
// Wrap in a cursor to provide a mutable Reader
&mut std::io::Cursor::new(std::slice::from_raw_parts(ptr.add(2), len))
).expect("Couldn't deserialize Bulletproofs from Monero");
free(ptr);
}
Ok(res.into())
}
let masks: Vec<[u8; 32]> = outputs.iter().map(|commitment| commitment.mask.to_bytes()).collect();
let amounts: Vec<u64> = outputs.iter().map(|commitment| commitment.amount).collect();
let res;
unsafe {
let ptr = c_generate_bp(outputs.len() as u8, amounts.as_ptr(), masks.as_ptr());
let len = ((ptr.read() as usize) << 8) + (ptr.add(1).read() as usize);
res = deserialize(
std::slice::from_raw_parts(ptr.add(2), len)
).expect("Couldn't deserialize Bulletproof from Monero");
free(ptr);
pub fn verify(&self, commitments: &[EdwardsPoint]) -> bool {
if commitments.len() > 16 {
return false;
}
let mut serialized = Vec::with_capacity((9 + (2 * self.L.len())) * 32);
self.serialize(&mut serialized).unwrap();
let commitments: Vec<[u8; 32]> = commitments.iter().map(
|commitment| (commitment * Scalar::from(8 as u8).invert()).compress().to_bytes()
).collect();
unsafe {
#[link(name = "wrapper")]
extern "C" {
fn c_verify_bp(
serialized_len: usize,
serialized: *const u8,
commitments_len: u8,
commitments: *const [u8; 32]
) -> bool;
}
c_verify_bp(serialized.len(), serialized.as_ptr(), commitments.len() as u8, commitments.as_ptr())
}
}
Ok(res)
}
pub fn verify(bp: &Bulletproof, commitments: &[EdwardsPoint]) -> bool {
if commitments.len() > 16 {
return false;
fn serialize_core<
W: std::io::Write,
F: Fn(&[EdwardsPoint], &mut W) -> std::io::Result<()>
>(&self, w: &mut W, specific_write_vec: F) -> std::io::Result<()> {
write_point(&self.A, w)?;
write_point(&self.S, w)?;
write_point(&self.T1, w)?;
write_point(&self.T2, w)?;
write_scalar(&self.taux, w)?;
write_scalar(&self.mu, w)?;
specific_write_vec(&self.L, w)?;
specific_write_vec(&self.R, w)?;
write_scalar(&self.a, w)?;
write_scalar(&self.b, w)?;
write_scalar(&self.t, w)
}
let mut serialized = vec![];
bp.consensus_encode(&mut serialized).unwrap();
let commitments: Vec<[u8; 32]> = commitments.iter().map(
|commitment| (commitment * Scalar::from(8 as u8).invert()).compress().to_bytes()
).collect();
unsafe {
c_verify_bp(serialized.len(), serialized.as_ptr(), commitments.len() as u8, commitments.as_ptr())
pub fn signature_serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
self.serialize_core(w, |points, w| write_raw_vec(write_point, points, w))
}
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
self.serialize_core(w, |points, w| write_vec(write_point, points, w))
}
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Bulletproofs> {
let bp = Bulletproofs {
A: read_point(r)?,
S: read_point(r)?,
T1: read_point(r)?,
T2: read_point(r)?,
taux: read_scalar(r)?,
mu: read_scalar(r)?,
L: read_vec(r, read_point)?,
R: read_vec(r, read_point)?,
a: read_scalar(r)?,
b: read_scalar(r)?,
t: read_scalar(r)?
};
if bp.L.len() != bp.R.len() {
Err(std::io::Error::new(std::io::ErrorKind::Other, "mismatched L/R len"))?;
}
Ok(bp)
}
}

View file

@ -10,26 +10,25 @@ use curve25519_dalek::{
traits::VartimePrecomputedMultiscalarMul,
edwards::{EdwardsPoint, VartimeEdwardsPrecomputation}
};
#[cfg(feature = "experimental")]
use curve25519_dalek::edwards::CompressedEdwardsY;
use monero::{consensus::Encodable, util::ringct::{Key, Clsag}};
use crate::{
Commitment,
transaction::decoys::Decoys,
random_scalar,
hash_to_scalar,
hash_to_point
wallet::decoys::Decoys,
random_scalar, hash_to_scalar, hash_to_point,
serialize::*
};
#[cfg(feature = "multisig")]
mod multisig;
#[cfg(feature = "multisig")]
pub use multisig::{Details, Multisig};
pub use multisig::{ClsagDetails, ClsagMultisig};
lazy_static! {
static ref INV_EIGHT: Scalar = Scalar::from(8 as u8).invert();
}
#[derive(Error, Debug)]
pub enum Error {
pub enum ClsagError {
#[error("internal error ({0})")]
InternalError(String),
#[error("invalid ring member (member {0}, ring size {1})")]
@ -45,36 +44,32 @@ pub enum Error {
}
#[derive(Clone, Debug)]
pub struct Input {
pub struct ClsagInput {
// The actual commitment for the true spend
pub commitment: Commitment,
// True spend index, offsets, and ring
pub decoys: Decoys
}
lazy_static! {
static ref INV_EIGHT: Scalar = Scalar::from(8 as u8).invert();
}
impl Input {
impl ClsagInput {
pub fn new(
commitment: Commitment,
decoys: Decoys
) -> Result<Input, Error> {
) -> Result<ClsagInput, ClsagError> {
let n = decoys.len();
if n > u8::MAX.into() {
Err(Error::InternalError("max ring size in this library is u8 max".to_string()))?;
Err(ClsagError::InternalError("max ring size in this library is u8 max".to_string()))?;
}
if decoys.i >= (n as u8) {
Err(Error::InvalidRingMember(decoys.i, n as u8))?;
Err(ClsagError::InvalidRingMember(decoys.i, n as u8))?;
}
// Validate the commitment matches
if decoys.ring[usize::from(decoys.i)][1] != commitment.calculate() {
Err(Error::InvalidCommitment)?;
Err(ClsagError::InvalidCommitment)?;
}
Ok(Input { commitment, decoys })
Ok(ClsagInput { commitment, decoys })
}
}
@ -84,6 +79,8 @@ enum Mode {
Verify(Scalar)
}
// Core of the CLSAG algorithm, applicable to both sign and verify with minimal differences
// Said differences are covered via the above Mode
fn core(
ring: &[[EdwardsPoint; 2]],
I: &EdwardsPoint,
@ -91,55 +88,55 @@ fn core(
msg: &[u8; 32],
D: &EdwardsPoint,
s: &[Scalar],
// Use a Result as Either for sign/verify
A_c1: Mode
) -> (([u8; 32], Scalar, Scalar), Scalar) {
) -> ((EdwardsPoint, Scalar, Scalar), Scalar) {
let n = ring.len();
// Doesn't use a constant time table as dalek takes longer to generate those then they save
let images_precomp = VartimeEdwardsPrecomputation::new([I, D]);
let D = D * *INV_EIGHT;
// Generate the transcript
// Instead of generating multiple, a single transcript is created and then edited as needed
let mut to_hash = vec![];
to_hash.reserve_exact(((2 * n) + 5) * 32);
const PREFIX: &str = "CLSAG_";
const AGG_0: &str = "CLSAG_agg_0";
const ROUND: &str = "round";
to_hash.extend(AGG_0.bytes());
const PREFIX: &[u8] = "CLSAG_".as_bytes();
const AGG_0: &[u8] = "CLSAG_agg_0".as_bytes();
const ROUND: &[u8] = "round".as_bytes();
to_hash.extend(AGG_0);
to_hash.extend([0; 32 - AGG_0.len()]);
let mut P = vec![];
P.reserve_exact(n);
let mut C = vec![];
C.reserve_exact(n);
let mut P = Vec::with_capacity(n);
for member in ring {
P.push(member[0]);
C.push(member[1] - pseudo_out);
}
for member in ring {
to_hash.extend(member[0].compress().to_bytes());
}
let mut C = Vec::with_capacity(n);
for member in ring {
C.push(member[1] - pseudo_out);
to_hash.extend(member[1].compress().to_bytes());
}
to_hash.extend(I.compress().to_bytes());
let D_bytes = D.compress().to_bytes();
to_hash.extend(D_bytes);
to_hash.extend(D.compress().to_bytes());
to_hash.extend(pseudo_out.compress().to_bytes());
// mu_P with agg_0
let mu_P = hash_to_scalar(&to_hash);
// mu_C with agg_1
to_hash[AGG_0.len() - 1] = '1' as u8;
let mu_C = hash_to_scalar(&to_hash);
// Truncate it for the round transcript, altering the DST as needed
to_hash.truncate(((2 * n) + 1) * 32);
for i in 0 .. ROUND.len() {
to_hash[PREFIX.len() + i] = ROUND.as_bytes()[i] as u8;
to_hash[PREFIX.len() + i] = ROUND[i] as u8;
}
// Unfortunately, it's I D pseudo_out instead of pseudo_out I D, meaning this needs to be
// truncated just to add it back
to_hash.extend(pseudo_out.compress().to_bytes());
to_hash.extend(msg);
// Configure the loop based on if we're signing or verifying
let start;
let end;
let mut c;
@ -160,6 +157,7 @@ fn core(
}
}
// Perform the core loop
let mut c1 = None;
for i in (start .. end).map(|i| i % n) {
if i == 0 {
@ -180,158 +178,166 @@ fn core(
c = hash_to_scalar(&to_hash);
}
((D_bytes, c * mu_P, c * mu_C), c1.unwrap_or(c))
// This first tuple is needed to continue signing, the latter is the c to be tested/worked with
((D, c * mu_P, c * mu_C), c1.unwrap_or(c))
}
pub(crate) fn sign_core<R: RngCore + CryptoRng>(
rng: &mut R,
I: &EdwardsPoint,
input: &Input,
mask: Scalar,
msg: &[u8; 32],
A: EdwardsPoint,
AH: EdwardsPoint
) -> (Clsag, EdwardsPoint, Scalar, Scalar) {
let r: usize = input.decoys.i.into();
let pseudo_out = Commitment::new(mask, input.commitment.amount).calculate();
let z = input.commitment.mask - mask;
let H = hash_to_point(&input.decoys.ring[r][0]);
let D = H * z;
let mut s = Vec::with_capacity(input.decoys.ring.len());
for _ in 0 .. input.decoys.ring.len() {
s.push(random_scalar(rng));
}
let ((D_bytes, p, c), c1) = core(&input.decoys.ring, I, &pseudo_out, msg, &D, &s, Mode::Sign(r, A, AH));
(
Clsag {
D: Key { key: D_bytes },
s: s.iter().map(|s| Key { key: s.to_bytes() }).collect(),
c1: Key { key: c1.to_bytes() }
},
pseudo_out,
p,
c * z
)
#[derive(Clone, Debug)]
pub struct Clsag {
pub D: EdwardsPoint,
pub s: Vec<Scalar>,
pub c1: Scalar
}
pub fn sign<R: RngCore + CryptoRng>(
rng: &mut R,
inputs: &[(Scalar, EdwardsPoint, Input)],
sum_outputs: Scalar,
msg: [u8; 32]
) -> Option<Vec<(Clsag, EdwardsPoint)>> {
if inputs.len() == 0 {
return None;
}
impl Clsag {
// Sign core is the extension of core as needed for signing, yet is shared between single signer
// and multisig, hence why it's still core
pub(crate) fn sign_core<R: RngCore + CryptoRng>(
rng: &mut R,
I: &EdwardsPoint,
input: &ClsagInput,
mask: Scalar,
msg: &[u8; 32],
A: EdwardsPoint,
AH: EdwardsPoint
) -> (Clsag, EdwardsPoint, Scalar, Scalar) {
let r: usize = input.decoys.i.into();
let nonce = random_scalar(rng);
let mut rand_source = [0; 64];
rng.fill_bytes(&mut rand_source);
let pseudo_out = Commitment::new(mask, input.commitment.amount).calculate();
let z = input.commitment.mask - mask;
let mut res = Vec::with_capacity(inputs.len());
let mut sum_pseudo_outs = Scalar::zero();
for i in 0 .. inputs.len() {
let mut mask = random_scalar(rng);
if i == (inputs.len() - 1) {
mask = sum_outputs - sum_pseudo_outs;
} else {
sum_pseudo_outs += mask;
let H = hash_to_point(&input.decoys.ring[r][0]);
let D = H * z;
let mut s = Vec::with_capacity(input.decoys.ring.len());
for _ in 0 .. input.decoys.ring.len() {
s.push(random_scalar(rng));
}
let ((D, p, c), c1) = core(&input.decoys.ring, I, &pseudo_out, msg, &D, &s, Mode::Sign(r, A, AH));
(
Clsag { D, s, c1 },
pseudo_out,
p,
c * z
)
}
// Single signer CLSAG
pub fn sign<R: RngCore + CryptoRng>(
rng: &mut R,
inputs: &[(Scalar, EdwardsPoint, ClsagInput)],
sum_outputs: Scalar,
msg: [u8; 32]
) -> Vec<(Clsag, EdwardsPoint)> {
let nonce = random_scalar(rng);
let mut rand_source = [0; 64];
rng.fill_bytes(&mut rand_source);
let (mut clsag, pseudo_out, p, c) = sign_core(
rng,
&inputs[i].1,
&inputs[i].2,
mask,
&msg,
&nonce * &ED25519_BASEPOINT_TABLE,
nonce * hash_to_point(&inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0])
let mut res = Vec::with_capacity(inputs.len());
let mut sum_pseudo_outs = Scalar::zero();
for i in 0 .. inputs.len() {
let mut mask = random_scalar(rng);
if i == (inputs.len() - 1) {
mask = sum_outputs - sum_pseudo_outs;
} else {
sum_pseudo_outs += mask;
}
let mut rand_source = [0; 64];
rng.fill_bytes(&mut rand_source);
let (mut clsag, pseudo_out, p, c) = Clsag::sign_core(
rng,
&inputs[i].1,
&inputs[i].2,
mask,
&msg,
&nonce * &ED25519_BASEPOINT_TABLE,
nonce * hash_to_point(&inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0])
);
clsag.s[inputs[i].2.decoys.i as usize] = nonce - ((p * inputs[i].0) + c);
res.push((clsag, pseudo_out));
}
res
}
// Not extensively tested nor guaranteed to have expected parity with Monero
#[cfg(feature = "experimental")]
pub fn rust_verify(
&self,
ring: &[[EdwardsPoint; 2]],
I: &EdwardsPoint,
pseudo_out: &EdwardsPoint,
msg: &[u8; 32]
) -> Result<(), ClsagError> {
let (_, c1) = core(
ring,
I,
pseudo_out,
msg,
&self.D.mul_by_cofactor(),
&self.s,
Mode::Verify(self.c1)
);
clsag.s[inputs[i].2.decoys.i as usize] = Key {
key: (nonce - ((p * inputs[i].0) + c)).to_bytes()
};
res.push((clsag, pseudo_out));
if c1 != self.c1 {
Err(ClsagError::InvalidC1)?;
}
Ok(())
}
Some(res)
}
// Not extensively tested nor guaranteed to have expected parity with Monero
#[cfg(feature = "experimental")]
pub fn rust_verify(
clsag: &Clsag,
ring: &[[EdwardsPoint; 2]],
I: &EdwardsPoint,
pseudo_out: &EdwardsPoint,
msg: &[u8; 32]
) -> Result<(), Error> {
let c1 = Scalar::from_canonical_bytes(clsag.c1.key).ok_or(Error::InvalidC1)?;
let (_, c1_calculated) = core(
ring,
I,
pseudo_out,
msg,
&CompressedEdwardsY(clsag.D.key).decompress().ok_or(Error::InvalidD)?.mul_by_cofactor(),
&clsag.s.iter().map(|s| Scalar::from_canonical_bytes(s.key).ok_or(Error::InvalidS)).collect::<Result<Vec<_>, _>>()?,
Mode::Verify(c1)
);
if c1_calculated != c1 {
Err(Error::InvalidC1)?;
}
Ok(())
}
// Uses Monero's C verification function to ensure compatibility with Monero
#[link(name = "wrapper")]
extern "C" {
pub(crate) fn c_verify_clsag(
serialized_len: usize,
serialized: *const u8,
ring_size: u8,
ring: *const u8,
I: *const u8,
pseudo_out: *const u8,
msg: *const u8
) -> bool;
}
pub fn verify(
clsag: &Clsag,
ring: &[[EdwardsPoint; 2]],
I: &EdwardsPoint,
pseudo_out: &EdwardsPoint,
msg: &[u8; 32]
) -> Result<(), Error> {
// Workaround for the fact monero-rs doesn't include the length of clsag.s in clsag encoding
// despite it being part of clsag encoding. Reason for the patch version pin
let mut serialized = vec![clsag.s.len() as u8];
clsag.consensus_encode(&mut serialized).unwrap();
let I_bytes = I.compress().to_bytes();
let mut ring_bytes = vec![];
for member in ring {
ring_bytes.extend(&member[0].compress().to_bytes());
ring_bytes.extend(&member[1].compress().to_bytes());
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
write_raw_vec(write_scalar, &self.s, w)?;
w.write_all(&self.c1.to_bytes())?;
write_point(&self.D, w)
}
let pseudo_out_bytes = pseudo_out.compress().to_bytes();
pub fn verify(
&self,
ring: &[[EdwardsPoint; 2]],
I: &EdwardsPoint,
pseudo_out: &EdwardsPoint,
msg: &[u8; 32]
) -> Result<(), ClsagError> {
// Serialize it to pass the struct to Monero without extensive FFI
let mut serialized = Vec::with_capacity(1 + ((self.s.len() + 2) * 32));
write_varint(&self.s.len().try_into().unwrap(), &mut serialized).unwrap();
self.serialize(&mut serialized).unwrap();
unsafe {
if c_verify_clsag(
serialized.len(), serialized.as_ptr(),
ring.len() as u8, ring_bytes.as_ptr(),
I_bytes.as_ptr(), pseudo_out_bytes.as_ptr(), msg.as_ptr()
) {
Ok(())
} else {
Err(Error::InvalidC1)
let I_bytes = I.compress().to_bytes();
let mut ring_bytes = vec![];
for member in ring {
ring_bytes.extend(&member[0].compress().to_bytes());
ring_bytes.extend(&member[1].compress().to_bytes());
}
let pseudo_out_bytes = pseudo_out.compress().to_bytes();
unsafe {
// Uses Monero's C verification function to ensure compatibility with Monero
#[link(name = "wrapper")]
extern "C" {
pub(crate) fn c_verify_clsag(
serialized_len: usize,
serialized: *const u8,
ring_size: u8,
ring: *const u8,
I: *const u8,
pseudo_out: *const u8,
msg: *const u8
) -> bool;
}
if c_verify_clsag(
serialized.len(), serialized.as_ptr(),
ring.len() as u8, ring_bytes.as_ptr(),
I_bytes.as_ptr(), pseudo_out_bytes.as_ptr(), msg.as_ptr()
) {
Ok(())
} else {
Err(ClsagError::InvalidC1)
}
}
}
}

View file

@ -11,8 +11,6 @@ use curve25519_dalek::{
edwards::EdwardsPoint
};
use monero::util::ringct::{Key, Clsag};
use group::Group;
use transcript::Transcript as TranscriptTrait;
@ -22,10 +20,10 @@ use dalek_ff_group as dfg;
use crate::{
hash_to_point,
frost::{Transcript, MultisigError, Ed25519, DLEqProof, read_dleq},
clsag::{Input, sign_core, verify}
clsag::{ClsagInput, Clsag}
};
impl Input {
impl ClsagInput {
fn transcript<T: TranscriptTrait>(&self, transcript: &mut T) {
// Doesn't domain separate as this is considered part of the larger CLSAG proof
@ -53,14 +51,14 @@ impl Input {
// While we could move the CLSAG test inside this crate, that'd require duplicating the FROST test
// helper, and isn't worth doing right now when this is harmless enough (semver? TODO)
#[derive(Clone, Debug)]
pub struct Details {
input: Input,
pub struct ClsagDetails {
input: ClsagInput,
mask: Scalar
}
impl Details {
pub fn new(input: Input, mask: Scalar) -> Details {
Details { input, mask }
impl ClsagDetails {
pub fn new(input: ClsagInput, mask: Scalar) -> ClsagDetails {
ClsagDetails { input, mask }
}
}
@ -76,7 +74,7 @@ struct Interim {
#[allow(non_snake_case)]
#[derive(Clone, Debug)]
pub struct Multisig {
pub struct ClsagMultisig {
transcript: Transcript,
H: EdwardsPoint,
@ -84,19 +82,19 @@ pub struct Multisig {
image: EdwardsPoint,
AH: (dfg::EdwardsPoint, dfg::EdwardsPoint),
details: Rc<RefCell<Option<Details>>>,
details: Rc<RefCell<Option<ClsagDetails>>>,
msg: Option<[u8; 32]>,
interim: Option<Interim>
}
impl Multisig {
impl ClsagMultisig {
pub fn new(
transcript: Transcript,
details: Rc<RefCell<Option<Details>>>
) -> Result<Multisig, MultisigError> {
details: Rc<RefCell<Option<ClsagDetails>>>
) -> Result<ClsagMultisig, MultisigError> {
Ok(
Multisig {
ClsagMultisig {
transcript,
H: EdwardsPoint::identity(),
@ -115,7 +113,7 @@ impl Multisig {
3 * (32 + 64)
}
fn input(&self) -> Input {
fn input(&self) -> ClsagInput {
self.details.borrow().as_ref().unwrap().input.clone()
}
@ -124,7 +122,7 @@ impl Multisig {
}
}
impl Algorithm<Ed25519> for Multisig {
impl Algorithm<Ed25519> for ClsagMultisig {
type Transcript = Transcript;
type Signature = (Clsag, EdwardsPoint);
@ -136,7 +134,7 @@ impl Algorithm<Ed25519> for Multisig {
) -> Vec<u8> {
self.H = hash_to_point(&view.group_key().0);
let mut serialized = Vec::with_capacity(Multisig::serialized_len());
let mut serialized = Vec::with_capacity(ClsagMultisig::serialized_len());
serialized.extend((view.secret_share().0 * self.H).compress().to_bytes());
serialized.extend(DLEqProof::prove(rng, &self.H, &view.secret_share().0).serialize());
@ -154,7 +152,7 @@ impl Algorithm<Ed25519> for Multisig {
commitments: &[dfg::EdwardsPoint; 2],
serialized: &[u8]
) -> Result<(), FrostError> {
if serialized.len() != Multisig::serialized_len() {
if serialized.len() != ClsagMultisig::serialized_len() {
// Not an optimal error but...
Err(FrostError::InvalidCommitmentQuantity(l, 9, serialized.len() / 32))?;
}
@ -217,7 +215,7 @@ impl Algorithm<Ed25519> for Multisig {
self.msg = Some(msg.try_into().expect("CLSAG message should be 32-bytes"));
#[allow(non_snake_case)]
let (clsag, pseudo_out, p, c) = sign_core(
let (clsag, pseudo_out, p, c) = Clsag::sign_core(
&mut rng,
&self.image,
&self.input(),
@ -241,9 +239,8 @@ impl Algorithm<Ed25519> for Multisig {
) -> Option<Self::Signature> {
let interim = self.interim.as_ref().unwrap();
let mut clsag = interim.clsag.clone();
clsag.s[usize::from(self.input().decoys.i)] = Key { key: (sum.0 - interim.c).to_bytes() };
if verify(
&clsag,
clsag.s[usize::from(self.input().decoys.i)] = sum.0 - interim.c;
if clsag.verify(
&self.input().decoys.ring,
&self.image,
&interim.pseudo_out,

View file

@ -1,5 +1,4 @@
use lazy_static::lazy_static;
use rand_core::{RngCore, CryptoRng};
use tiny_keccak::{Hasher, Keccak};
@ -10,24 +9,27 @@ use curve25519_dalek::{
edwards::{EdwardsPoint, EdwardsBasepointTable, CompressedEdwardsY}
};
use monero::util::key::H;
#[cfg(feature = "multisig")]
pub mod frost;
mod serialize;
pub mod bulletproofs;
pub mod clsag;
pub mod rpc;
pub mod transaction;
pub mod wallet;
#[link(name = "wrapper")]
extern "C" {
fn c_hash_to_point(point: *const u8);
}
#[cfg(test)]
mod tests;
lazy_static! {
static ref H_TABLE: EdwardsBasepointTable = EdwardsBasepointTable::create(&H.point.decompress().unwrap());
static ref H: EdwardsPoint = CompressedEdwardsY(
hex::decode("8b655970153799af2aeadc9ff1add0ea6c7251d54154cfa92c173a0dd39c1f94").unwrap().try_into().unwrap()
).decompress().unwrap();
static ref H_TABLE: EdwardsBasepointTable = EdwardsBasepointTable::create(&*H);
}
#[allow(non_snake_case)]
@ -72,7 +74,14 @@ pub fn hash_to_scalar(data: &[u8]) -> Scalar {
pub fn hash_to_point(point: &EdwardsPoint) -> EdwardsPoint {
let mut bytes = point.compress().to_bytes();
unsafe { c_hash_to_point(bytes.as_mut_ptr()); }
unsafe {
#[link(name = "wrapper")]
extern "C" {
fn c_hash_to_point(point: *const u8);
}
c_hash_to_point(bytes.as_mut_ptr());
}
CompressedEdwardsY::from_slice(&bytes).decompress().unwrap()
}

View file

@ -1,25 +1,16 @@
use std::{fmt::Debug, str::FromStr};
use std::fmt::Debug;
use thiserror::Error;
use hex::ToHex;
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
use monero::{
Hash,
blockdata::{
transaction::{TxIn, Transaction},
block::Block
},
consensus::encode::{serialize, deserialize}
};
use serde::{Serialize, Deserialize, de::DeserializeOwned};
use serde_json::json;
use reqwest;
use crate::transaction::Transaction;
#[derive(Deserialize, Debug)]
pub struct EmptyResponse {}
#[derive(Deserialize, Debug)]
@ -106,7 +97,7 @@ impl Rpc {
Ok(self.rpc_call::<Option<()>, HeightResponse>("get_height", None).await?.height)
}
pub async fn get_transactions(&self, hashes: Vec<Hash>) -> Result<Vec<Transaction>, RpcError> {
pub async fn get_transactions(&self, hashes: Vec<[u8; 32]>) -> Result<Vec<Transaction>, RpcError> {
#[derive(Deserialize, Debug)]
struct TransactionResponse {
as_hex: String,
@ -118,35 +109,37 @@ impl Rpc {
}
let txs: TransactionsResponse = self.rpc_call("get_transactions", Some(json!({
"txs_hashes": hashes.iter().map(|hash| hash.encode_hex()).collect::<Vec<String>>()
"txs_hashes": hashes.iter().map(|hash| hex::encode(&hash)).collect::<Vec<String>>()
}))).await?;
if txs.txs.len() != hashes.len() {
Err(RpcError::TransactionsNotFound(txs.txs.len(), hashes.len()))?;
}
let mut res: Vec<Transaction> = Vec::with_capacity(txs.txs.len());
for tx in txs.txs {
res.push(
deserialize(
&rpc_hex(if tx.as_hex.len() != 0 { &tx.as_hex } else { &tx.pruned_as_hex })?
).map_err(|_| RpcError::InvalidTransaction)?
);
if tx.as_hex.len() == 0 {
match res[res.len() - 1].prefix.inputs[0] {
TxIn::Gen { .. } => 0,
_ => Err(RpcError::TransactionsNotFound(hashes.len() - 1, hashes.len()))?
};
}
}
Ok(res)
/*
Ok(
txs.txs.iter().filter_map(
|tx| rpc_hex(if tx.as_hex.len() != 0 { &tx.as_hex } else { &tx.pruned_as_hex }).ok()
.and_then(|mut bytes| Transaction::deserialize(&mut bytes).ok())
// https://github.com/monero-project/monero/issues/8311
.filter(
if tx.as_hex.len() == 0 {
match res[res.len() - 1].prefix.inputs[0] {
Input::Gen { .. } => true,
_ => false
}
}
)
)
)
*/
Ok(vec![])
}
/*
pub async fn get_block(&self, height: usize) -> Result<Block, RpcError> {
#[derive(Deserialize, Debug)]
struct BlockResponse {
blob: String
json: String
}
let block: JsonRpcResponse<BlockResponse> = self.rpc_call("json_rpc", Some(json!({
@ -162,17 +155,21 @@ impl Rpc {
).expect("Monero returned a block we couldn't deserialize")
)
}
*/
pub async fn get_block_transactions(&self, height: usize) -> Result<Vec<Transaction>, RpcError> {
/*
let block = self.get_block(height).await?;
let mut res = vec![block.miner_tx];
if block.tx_hashes.len() != 0 {
res.extend(self.get_transactions(block.tx_hashes).await?);
res.extend(self.get_transactions(block.tx_hashes.iter().map(|hash| hash.0).collect()).await?);
}
Ok(res)
*/
Ok(vec![])
}
pub async fn get_o_indexes(&self, hash: Hash) -> Result<Vec<u64>, RpcError> {
pub async fn get_o_indexes(&self, hash: [u8; 32]) -> Result<Vec<u64>, RpcError> {
#[derive(Serialize, Debug)]
struct Request {
txid: [u8; 32]
@ -190,8 +187,8 @@ impl Rpc {
let indexes: OIndexes = self.bin_call("get_o_indexes.bin", monero_epee_bin_serde::to_bytes(
&Request {
txid: hash.0
}).expect("Couldn't serialize a request")
txid: hash
}).unwrap()
).await?;
Ok(indexes.o_indexes)
@ -223,13 +220,16 @@ impl Rpc {
}))).await?;
let txs = self.get_transactions(
outs.outs.iter().map(|out| Hash::from_str(&out.txid).expect("Monero returned an invalid hash")).collect()
outs.outs.iter().map(|out|
rpc_hex(&out.txid).expect("Monero returned an invalidly encoded hash")
.try_into().expect("Monero returned an invalid sized hash")
).collect()
).await?;
// TODO: Support time based lock times. These shouldn't be needed, and it may be painful to
// get the median time for the given height, yet we do need to in order to be complete
outs.outs.iter().enumerate().map(
|(i, out)| Ok(
if txs[i].prefix.unlock_time.0 <= u64::try_from(height).unwrap() {
if txs[i].prefix.unlock_time <= u64::try_from(height).unwrap() {
Some([rpc_point(&out.key)?, rpc_point(&out.mask)?])
} else { None }
)
@ -279,8 +279,10 @@ impl Rpc {
reason: String
}
let mut buf = Vec::with_capacity(2048);
tx.serialize(&mut buf).unwrap();
let res: SendRawResponse = self.rpc_call("send_raw_transaction", Some(json!({
"tx_as_hex": hex::encode(&serialize(tx))
"tx_as_hex": hex::encode(&buf)
}))).await?;
if res.status != "OK" {

View file

@ -0,0 +1,97 @@
use std::io;
use curve25519_dalek::{scalar::Scalar, edwards::{EdwardsPoint, CompressedEdwardsY}};
pub const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
pub fn write_varint<W: io::Write>(varint: &u64, w: &mut W) -> io::Result<()> {
let mut varint = *varint;
while {
let mut b = (varint & u64::from(!VARINT_CONTINUATION_MASK)) as u8;
varint >>= 7;
if varint != 0 {
b |= VARINT_CONTINUATION_MASK;
}
w.write_all(&[b])?;
varint != 0
} {}
Ok(())
}
pub fn write_scalar<W: io::Write>(scalar: &Scalar, w: &mut W) -> io::Result<()> {
w.write_all(&scalar.to_bytes())
}
pub fn write_point<W: io::Write>(point: &EdwardsPoint, w: &mut W) -> io::Result<()> {
w.write_all(&point.compress().to_bytes())
}
pub fn write_raw_vec<
T,
W: io::Write,
F: Fn(&T, &mut W) -> io::Result<()>
>(f: F, values: &[T], w: &mut W) -> io::Result<()> {
for value in values {
f(value, w)?;
}
Ok(())
}
pub fn write_vec<
T,
W: io::Write,
F: Fn(&T, &mut W) -> io::Result<()>
>(f: F, values: &[T], w: &mut W) -> io::Result<()> {
write_varint(&values.len().try_into().unwrap(), w)?;
write_raw_vec(f, &values, w)
}
pub fn read_byte<R: io::Read>(r: &mut R) -> io::Result<u8> {
let mut res = [0; 1];
r.read_exact(&mut res)?;
Ok(res[0])
}
pub fn read_varint<R: io::Read>(r: &mut R) -> io::Result<u64> {
let mut bits = 0;
let mut res = 0;
while {
let b = read_byte(r)?;
res += u64::from(b & (!VARINT_CONTINUATION_MASK)) << bits;
// TODO: Error if bits exceed u64
bits += 7;
b & VARINT_CONTINUATION_MASK == VARINT_CONTINUATION_MASK
} {}
Ok(res)
}
pub fn read_32<R: io::Read>(r: &mut R) -> io::Result<[u8; 32]> {
let mut res = [0; 32];
r.read_exact(&mut res)?;
Ok(res)
}
// TODO: Potentially update to Monero's parsing rules on scalars/points, which should be any arbitrary 32-bytes
// We may be able to consider such transactions as malformed and accordingly be opinionated in ignoring them
pub fn read_scalar<R: io::Read>(r: &mut R) -> io::Result<Scalar> {
Scalar::from_canonical_bytes(
read_32(r)?
).ok_or(io::Error::new(io::ErrorKind::Other, "unreduced scalar"))
}
pub fn read_point<R: io::Read>(r: &mut R) -> io::Result<EdwardsPoint> {
CompressedEdwardsY(
read_32(r)?
).decompress().filter(|point| point.is_torsion_free()).ok_or(io::Error::new(io::ErrorKind::Other, "invalid point"))
}
pub fn read_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(r: &mut R, f: F) -> io::Result<Vec<T>> {
let len = read_varint(r)?;
let mut res = Vec::with_capacity(
len.try_into().map_err(|_| io::Error::new(io::ErrorKind::Other, "length exceeds usize"))?
);
for _ in 0 .. len {
res.push(f(r)?);
}
Ok(res)
}

View file

@ -5,11 +5,14 @@ use rand::{RngCore, rngs::OsRng};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
use monero::VarInt;
use monero_serai::{Commitment, random_scalar, generate_key_image, transaction::decoys::Decoys, clsag};
use monero_serai::{
Commitment,
random_scalar, generate_key_image,
wallet::decoys::Decoys,
clsag::{ClsagInput, Clsag}
};
#[cfg(feature = "multisig")]
use monero_serai::frost::{MultisigError, Transcript};
use monero_serai::{frost::{MultisigError, Transcript}, clsag::{ClsagDetails, ClsagMultisig}};
#[cfg(feature = "multisig")]
mod frost;
@ -43,16 +46,16 @@ fn clsag() {
}
let image = generate_key_image(&secrets[0]);
let (clsag, pseudo_out) = clsag::sign(
let (clsag, pseudo_out) = Clsag::sign(
&mut OsRng,
&vec![(
secrets[0],
image,
clsag::Input::new(
ClsagInput::new(
Commitment::new(secrets[1], AMOUNT),
Decoys {
i: u8::try_from(real).unwrap(),
offsets: (1 ..= RING_LEN).into_iter().map(|o| VarInt(o)).collect(),
offsets: (1 ..= RING_LEN).into_iter().collect(),
ring: ring.clone()
}
).unwrap()
@ -60,9 +63,9 @@ fn clsag() {
random_scalar(&mut OsRng),
msg
).unwrap().swap_remove(0);
clsag::verify(&clsag, &ring, &image, &pseudo_out, &msg).unwrap();
clsag.verify(&ring, &image, &pseudo_out, &msg).unwrap();
#[cfg(feature = "experimental")]
clsag::rust_verify(&clsag, &ring, &image, &pseudo_out, &msg).unwrap();
clsag.rust_verify(&ring, &image, &pseudo_out, &msg).unwrap();
}
}
@ -98,12 +101,12 @@ fn clsag_multisig() -> Result<(), MultisigError> {
clsag::Multisig::new(
Transcript::new(b"Monero Serai CLSAG Test".to_vec()),
Rc::new(RefCell::new(Some(
clsag::Details::new(
clsag::Input::new(
ClsagDetails::new(
ClsagInput::new(
Commitment::new(randomness, AMOUNT),
Decoys {
i: RING_INDEX,
offsets: (1 ..= RING_LEN).into_iter().map(|o| VarInt(o)).collect(),
offsets: (1 ..= RING_LEN).into_iter().collect(),
ring: ring.clone()
}
).unwrap(),

View file

@ -0,0 +1,108 @@
#![cfg(feature = "multisig")]
use rand::rngs::OsRng;
use ff::Field;
use dalek_ff_group::{ED25519_BASEPOINT_TABLE, Scalar};
pub use frost::{
FrostError, MultisigParams, MultisigKeys,
key_gen, algorithm::Algorithm, sign::{self, lagrange}
};
use crate::frost::Ed25519;
pub const THRESHOLD: usize = 3;
pub const PARTICIPANTS: usize = 5;
pub fn generate_keys() -> (Vec<MultisigKeys<Ed25519>>, Scalar) {
let mut params = vec![];
let mut machines = vec![];
let mut commitments = vec![vec![]];
for i in 1 ..= PARTICIPANTS {
params.push(
MultisigParams::new(THRESHOLD, PARTICIPANTS, i).unwrap()
);
machines.push(
key_gen::StateMachine::<Ed25519>::new(
params[i - 1],
"monero-sign-rs test suite".to_string()
)
);
commitments.push(machines[i - 1].generate_coefficients(&mut OsRng).unwrap());
}
let mut secret_shares = vec![];
for i in 1 ..= PARTICIPANTS {
secret_shares.push(
machines[i - 1].generate_secret_shares(
&mut OsRng,
commitments
.iter()
.enumerate()
.map(|(idx, commitments)| if idx == i { vec![] } else { commitments.to_vec() })
.collect()
).unwrap()
);
}
let mut keys = vec![];
for i in 1 ..= PARTICIPANTS {
let mut our_secret_shares = vec![vec![]];
our_secret_shares.extend(
secret_shares.iter().map(|shares| shares[i].clone()).collect::<Vec<Vec<u8>>>()
);
keys.push(machines[i - 1].complete(our_secret_shares).unwrap().clone());
}
let mut group_private = Scalar::zero();
for i in 1 ..= THRESHOLD {
group_private += keys[i - 1].secret_share() * lagrange::<Scalar>(
i,
&(1 ..= THRESHOLD).collect::<Vec<usize>>()
);
}
assert_eq!(&ED25519_BASEPOINT_TABLE * group_private, keys[0].group_key());
(keys, group_private)
}
#[allow(dead_code)] // Currently has some false positive
pub fn sign<S, M: sign::StateMachine<Signature = S>>(machines: &mut Vec<M>, msg: &[u8]) -> Vec<S> {
assert!(machines.len() >= THRESHOLD);
let mut commitments = Vec::with_capacity(PARTICIPANTS + 1);
commitments.resize(PARTICIPANTS + 1, None);
for i in 1 ..= THRESHOLD {
commitments[i] = Some(machines[i - 1].preprocess(&mut OsRng).unwrap());
}
let mut shares = Vec::with_capacity(PARTICIPANTS + 1);
shares.resize(PARTICIPANTS + 1, None);
for i in 1 ..= THRESHOLD {
shares[i] = Some(
machines[i - 1].sign(
&commitments
.iter()
.enumerate()
.map(|(idx, value)| if idx == i { None } else { value.to_owned() })
.collect::<Vec<Option<Vec<u8>>>>(),
msg
).unwrap()
);
}
let mut res = Vec::with_capacity(THRESHOLD);
for i in 1 ..= THRESHOLD {
res.push(
machines[i - 1].complete(
&shares
.iter()
.enumerate()
.map(|(idx, value)| if idx == i { None } else { value.to_owned() })
.collect::<Vec<Option<Vec<u8>>>>()
).unwrap()
);
}
res
}

View file

@ -0,0 +1,4 @@
#[cfg(feature = "multisig")]
mod frost;
mod clsag;

View file

@ -0,0 +1,194 @@
use curve25519_dalek::edwards::EdwardsPoint;
use crate::{
hash,
serialize::*,
bulletproofs::Bulletproofs, clsag::Clsag,
};
pub enum Input {
Gen(u64),
ToKey {
amount: u64,
key_offsets: Vec<u64>,
key_image: EdwardsPoint
}
}
impl Input {
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
match self {
Input::Gen(height) => {
w.write_all(&[0])?;
write_varint(height, w)
},
Input::ToKey { amount, key_offsets, key_image } => {
w.write_all(&[2])?;
write_varint(amount, w)?;
write_vec(write_varint, key_offsets, w)?;
write_point(key_image, w)
}
}
}
}
// Doesn't bother moving to an enum for the unused Script classes
pub struct Output {
pub amount: u64,
pub key: EdwardsPoint,
pub tag: Option<u8>
}
impl Output {
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
write_varint(&self.amount, w)?;
w.write_all(&[2 + (if self.tag.is_some() { 1 } else { 0 })])?;
write_point(&self.key, w)?;
if let Some(tag) = self.tag {
w.write_all(&[tag])?;
}
Ok(())
}
}
pub struct TransactionPrefix {
pub version: u64,
pub unlock_time: u64,
pub inputs: Vec<Input>,
pub outputs: Vec<Output>,
pub extra: Vec<u8>
}
impl TransactionPrefix {
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
write_varint(&self.version, w)?;
write_varint(&self.unlock_time, w)?;
write_vec(Input::serialize, &self.inputs, w)?;
write_vec(Output::serialize, &self.outputs, w)?;
write_varint(&self.extra.len().try_into().unwrap(), w)?;
w.write_all(&self.extra)
}
}
pub struct RctBase {
pub fee: u64,
pub ecdh_info: Vec<[u8; 8]>,
pub commitments: Vec<EdwardsPoint>
}
impl RctBase {
pub fn serialize<W: std::io::Write>(&self, w: &mut W, rct_type: u8) -> std::io::Result<()> {
w.write_all(&[rct_type])?;
write_varint(&self.fee, w)?;
for ecdh in &self.ecdh_info {
w.write_all(ecdh)?;
}
write_raw_vec(write_point, &self.commitments, w)
}
}
pub enum RctPrunable {
Null,
Clsag {
bulletproofs: Vec<Bulletproofs>,
clsags: Vec<Clsag>,
pseudo_outs: Vec<EdwardsPoint>
}
}
impl RctPrunable {
pub fn rct_type(&self) -> u8 {
match self {
RctPrunable::Null => 0,
RctPrunable::Clsag { .. } => 5
}
}
pub fn signature_serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
match self {
RctPrunable::Null => panic!("Serializing RctPrunable::Null for a signature"),
RctPrunable::Clsag { bulletproofs, .. } => bulletproofs.iter().map(|bp| bp.signature_serialize(w)).collect(),
}
}
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
match self {
RctPrunable::Null => Ok(()),
RctPrunable::Clsag { bulletproofs, clsags, pseudo_outs } => {
write_vec(Bulletproofs::serialize, &bulletproofs, w)?;
write_raw_vec(Clsag::serialize, &clsags, w)?;
write_raw_vec(write_point, &pseudo_outs, w)
}
}
}
}
pub struct RctSignatures {
pub base: RctBase,
pub prunable: RctPrunable
}
impl RctSignatures {
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
self.base.serialize(w, self.prunable.rct_type())?;
self.prunable.serialize(w)
}
}
pub struct Transaction {
pub prefix: TransactionPrefix,
pub rct_signatures: RctSignatures
}
impl Transaction {
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
self.prefix.serialize(w)?;
self.rct_signatures.serialize(w)
}
pub fn hash(&self) -> [u8; 32] {
let mut serialized = Vec::with_capacity(2048);
if self.prefix.version == 1 {
self.serialize(&mut serialized).unwrap();
hash(&serialized)
} else {
let mut sig_hash = Vec::with_capacity(96);
self.prefix.serialize(&mut serialized).unwrap();
sig_hash.extend(hash(&serialized));
serialized.clear();
self.rct_signatures.base.serialize(
&mut serialized,
self.rct_signatures.prunable.rct_type()
).unwrap();
sig_hash.extend(hash(&serialized));
serialized.clear();
self.rct_signatures.prunable.serialize(&mut serialized).unwrap();
sig_hash.extend(hash(&serialized));
hash(&sig_hash)
}
}
pub fn signature_hash(&self) -> [u8; 32] {
let mut serialized = Vec::with_capacity(2048);
let mut sig_hash = Vec::with_capacity(96);
self.prefix.serialize(&mut serialized).unwrap();
sig_hash.extend(hash(&serialized));
serialized.clear();
self.rct_signatures.base.serialize(&mut serialized, self.rct_signatures.prunable.rct_type()).unwrap();
sig_hash.extend(hash(&serialized));
serialized.clear();
self.rct_signatures.prunable.signature_serialize(&mut serialized).unwrap();
sig_hash.extend(&hash(&serialized));
hash(&sig_hash)
}
}

View file

@ -1,469 +0,0 @@
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use rand::seq::SliceRandom;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
edwards::EdwardsPoint
};
use monero::{
cryptonote::hash::{Hashable, Hash8, Hash},
consensus::encode::{Encodable, VarInt},
blockdata::transaction::{
KeyImage,
TxIn, TxOutTarget, TxOut,
SubField, ExtraField,
TransactionPrefix, Transaction
},
util::{
key::PublicKey,
ringct::{Key, CtKey, EcdhInfo, Bulletproof, RctType, RctSigBase, RctSigPrunable, RctSig},
address::Address
}
};
#[cfg(feature = "multisig")]
use frost::FrostError;
use crate::{
Commitment,
random_scalar,
hash, hash_to_scalar,
generate_key_image, bulletproofs, clsag,
rpc::{Rpc, RpcError}
};
#[cfg(feature = "multisig")]
use crate::frost::MultisigError;
pub mod decoys;
#[cfg(feature = "multisig")]
mod multisig;
// https://github.com/monero-project/research-lab/issues/103
fn uniqueness(inputs: &[TxIn]) -> [u8; 32] {
let mut u = b"domain_separator".to_vec();
for input in inputs {
match input {
// If Gen, this should be the only input, making this loop somewhat pointless
// This works and even if there were somehow multiple inputs, it'd be a false negative
TxIn::Gen { height } => { height.consensus_encode(&mut u).unwrap(); },
TxIn::ToKey { k_image, .. } => u.extend(&k_image.image.0)
}
}
hash(&u)
}
// Hs(8Ra || o) with https://github.com/monero-project/research-lab/issues/103 as an option
#[allow(non_snake_case)]
fn shared_key(uniqueness: Option<[u8; 32]>, s: Scalar, P: &EdwardsPoint, o: usize) -> Scalar {
// uniqueness
let mut shared = uniqueness.map_or(vec![], |uniqueness| uniqueness.to_vec());
// || 8Ra
shared.extend((s * P).mul_by_cofactor().compress().to_bytes().to_vec());
// || o
VarInt(o.try_into().unwrap()).consensus_encode(&mut shared).unwrap();
// Hs()
hash_to_scalar(&shared)
}
fn commitment_mask(shared_key: Scalar) -> Scalar {
let mut mask = b"commitment_mask".to_vec();
mask.extend(shared_key.to_bytes());
hash_to_scalar(&mask)
}
fn amount_decryption(amount: [u8; 8], key: Scalar) -> u64 {
let mut amount_mask = b"amount".to_vec();
amount_mask.extend(key.to_bytes());
u64::from_le_bytes(amount) ^ u64::from_le_bytes(hash(&amount_mask)[0 .. 8].try_into().unwrap())
}
fn amount_encryption(amount: u64, key: Scalar) -> Hash8 {
Hash8(amount_decryption(amount.to_le_bytes(), key).to_le_bytes())
}
#[derive(Clone, Debug)]
pub struct SpendableOutput {
pub tx: Hash,
pub o: usize,
pub key: EdwardsPoint,
pub key_offset: Scalar,
pub commitment: Commitment
}
// TODO: Enable disabling one of the shared key derivations and solely using one
// Change outputs currently always use unique derivations, so that must also be corrected
pub fn scan(
tx: &Transaction,
view: Scalar,
spend: EdwardsPoint
) -> Vec<SpendableOutput> {
let mut pubkeys = vec![];
if let Some(key) = tx.tx_pubkey() {
pubkeys.push(key);
}
if let Some(keys) = tx.tx_additional_pubkeys() {
pubkeys.extend(&keys);
}
let pubkeys: Vec<EdwardsPoint> = pubkeys.iter().map(|key| key.point.decompress()).filter_map(|key| key).collect();
let rct_sig = tx.rct_signatures.sig.as_ref();
if rct_sig.is_none() {
return vec![];
}
let rct_sig = rct_sig.unwrap();
let mut res = vec![];
for (o, output, output_key) in tx.prefix.outputs.iter().enumerate().filter_map(
|(o, output)| if let TxOutTarget::ToKey { key } = output.target {
key.point.decompress().map(|output_key| (o, output, output_key))
} else { None }
) {
// TODO: This may be replaceable by pubkeys[o]
for pubkey in &pubkeys {
let mut commitment = Commitment::zero();
// P - shared == spend
let matches = |shared_key| (output_key - (&shared_key * &ED25519_BASEPOINT_TABLE)) == spend;
let test = |shared_key| Some(shared_key).filter(|shared_key| matches(*shared_key));
// Get the traditional shared key and unique shared key, testing if either matches for this output
let traditional = test(shared_key(None, view, pubkey, o));
let unique = test(shared_key(Some(uniqueness(&tx.prefix.inputs)), view, pubkey, o));
// If either matches, grab it and decode the amount
if let Some(key_offset) = traditional.or(unique) {
// Miner transaction
if output.amount.0 != 0 {
commitment.amount = output.amount.0;
// Regular transaction
} else {
let amount = match rct_sig.ecdh_info.get(o) {
// TODO: Support the legacy Monero amount encryption
Some(EcdhInfo::Standard { .. }) => continue,
Some(EcdhInfo::Bulletproof { amount }) => amount_decryption(amount.0, key_offset),
// This should never happen, yet it may be possible to get a miner transaction with a
// pointless 0 output, therefore not having EcdhInfo while this will expect it
// Using get just decreases the possibility of a panic and lets us move on in that case
None => continue
};
// Rebuild the commitment to verify it
commitment = Commitment::new(commitment_mask(key_offset), amount);
// If this is a malicious commitment, move to the next output
// Any other R value will calculate to a different spend key and are therefore ignorable
if commitment.calculate().compress().to_bytes() != rct_sig.out_pk[o].mask.key {
break;
}
}
if commitment.amount != 0 {
res.push(SpendableOutput { tx: tx.hash(), o, key: output_key, key_offset, commitment });
}
// Break to prevent public keys from being included multiple times, triggering multiple
// inclusions of the same output
break;
}
}
}
res
}
#[allow(non_snake_case)]
#[derive(Clone, Debug)]
struct Output {
R: EdwardsPoint,
dest: EdwardsPoint,
mask: Scalar,
amount: Hash8
}
impl Output {
pub fn new<R: RngCore + CryptoRng>(
rng: &mut R,
unique: Option<[u8; 32]>,
output: (Address, u64),
o: usize
) -> Result<Output, TransactionError> {
let r = random_scalar(rng);
let shared_key = shared_key(
unique,
r,
&output.0.public_view.point.decompress().ok_or(TransactionError::InvalidAddress)?,
o
);
Ok(
Output {
R: &r * &ED25519_BASEPOINT_TABLE,
dest: (
(&shared_key * &ED25519_BASEPOINT_TABLE) +
output.0.public_spend.point.decompress().ok_or(TransactionError::InvalidAddress)?
),
mask: commitment_mask(shared_key),
amount: amount_encryption(output.1, shared_key)
}
)
}
}
#[derive(Error, Debug)]
pub enum TransactionError {
#[error("no inputs")]
NoInputs,
#[error("no outputs")]
NoOutputs,
#[error("too many outputs")]
TooManyOutputs,
#[error("not enough funds (in {0}, out {1})")]
NotEnoughFunds(u64, u64),
#[error("invalid address")]
InvalidAddress,
#[error("rpc error ({0})")]
RpcError(RpcError),
#[error("clsag error ({0})")]
ClsagError(clsag::Error),
#[error("invalid transaction ({0})")]
InvalidTransaction(RpcError),
#[cfg(feature = "multisig")]
#[error("frost error {0}")]
FrostError(FrostError),
#[cfg(feature = "multisig")]
#[error("multisig error {0}")]
MultisigError(MultisigError)
}
async fn prepare_inputs<R: RngCore + CryptoRng>(
rng: &mut R,
rpc: &Rpc,
inputs: &[SpendableOutput],
spend: &Scalar,
tx: &mut Transaction
) -> Result<Vec<(Scalar, EdwardsPoint, clsag::Input)>, TransactionError> {
let mut signable = Vec::with_capacity(inputs.len());
// Select decoys
let decoys = decoys::select(
rng,
rpc,
rpc.get_height().await.map_err(|e| TransactionError::RpcError(e))? - 10,
inputs
).await.map_err(|e| TransactionError::RpcError(e))?;
for (i, input) in inputs.iter().enumerate() {
signable.push((
spend + input.key_offset,
generate_key_image(&(spend + input.key_offset)),
clsag::Input::new(
input.commitment,
decoys[i].clone()
).map_err(|e| TransactionError::ClsagError(e))?
));
tx.prefix.inputs.push(TxIn::ToKey {
amount: VarInt(0),
key_offsets: decoys[i].offsets.clone(),
k_image: KeyImage { image: Hash(signable[i].1.compress().to_bytes()) }
});
}
signable.sort_by(|x, y| x.1.compress().to_bytes().cmp(&y.1.compress().to_bytes()).reverse());
tx.prefix.inputs.sort_by(|x, y| if let (
TxIn::ToKey { k_image: x, ..},
TxIn::ToKey { k_image: y, ..}
) = (x, y) {
x.image.cmp(&y.image).reverse()
} else {
panic!("TxIn wasn't ToKey")
});
Ok(signable)
}
#[derive(Clone, Debug)]
pub struct SignableTransaction {
inputs: Vec<SpendableOutput>,
payments: Vec<(Address, u64)>,
change: Address,
fee_per_byte: u64,
fee: u64,
outputs: Vec<Output>
}
impl SignableTransaction {
pub fn new(
inputs: Vec<SpendableOutput>,
payments: Vec<(Address, u64)>,
change: Address,
fee_per_byte: u64
) -> Result<SignableTransaction, TransactionError> {
if inputs.len() == 0 {
Err(TransactionError::NoInputs)?;
}
if payments.len() == 0 {
Err(TransactionError::NoOutputs)?;
}
Ok(
SignableTransaction {
inputs,
payments,
change,
fee_per_byte,
fee: 0,
outputs: vec![]
}
)
}
fn prepare_outputs<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
uniqueness: Option<[u8; 32]>
) -> Result<(Vec<Commitment>, Scalar), TransactionError> {
self.fee = self.fee_per_byte * 2000; // TODO
// TODO TX MAX SIZE
// Make sure we have enough funds
let in_amount = self.inputs.iter().map(|input| input.commitment.amount).sum();
let out_amount = self.fee + self.payments.iter().map(|payment| payment.1).sum::<u64>();
if in_amount < out_amount {
Err(TransactionError::NotEnoughFunds(in_amount, out_amount))?;
}
let mut temp_outputs = Vec::with_capacity(self.payments.len() + 1);
// Add the payments to the outputs
for payment in &self.payments {
temp_outputs.push((None, (payment.0, payment.1)));
}
// Ideally, the change output would always have uniqueness, as we control this wallet software
// Unfortunately, if this is used with multisig, doing so would add an extra round due to the
// fact Bulletproofs use a leader protocol reliant on this shared key before the first round of
// communication. Making the change output unique would require Bulletproofs not be a leader
// protocol, using a seeded random
// There is a vector where the multisig participants leak the output key they're about to send
// to, and someone could use that key, forcing some funds to be burnt accordingly if they win
// the race. Any multisig wallet, with this current setup, must only keep change keys in context
// accordingly, preferably as soon as they are proposed, even before they appear as confirmed
// Using another source of uniqueness would also be possible, yet it'd make scanning a tri-key
// system (currently dual for the simpler API, yet would be dual even with a more complex API
// under this decision)
// TODO after https://github.com/serai-dex/serai/issues/2
temp_outputs.push((uniqueness, (self.change, in_amount - out_amount)));
// Shuffle the outputs
temp_outputs.shuffle(rng);
// Actually create the outputs
self.outputs = Vec::with_capacity(temp_outputs.len());
let mut commitments = Vec::with_capacity(temp_outputs.len());
let mut mask_sum = Scalar::zero();
for (o, output) in temp_outputs.iter().enumerate() {
self.outputs.push(Output::new(rng, output.0, output.1, o)?);
commitments.push(Commitment::new(self.outputs[o].mask, output.1.1));
mask_sum += self.outputs[o].mask;
}
Ok((commitments, mask_sum))
}
fn prepare_transaction(
&self,
commitments: &[Commitment],
bp: Bulletproof
) -> Transaction {
// Create the TX extra
let mut extra = ExtraField(vec![
SubField::TxPublicKey(PublicKey { point: self.outputs[0].R.compress() })
]);
extra.0.push(SubField::AdditionalPublickKey(
self.outputs[1 .. self.outputs.len()].iter().map(|output| PublicKey { point: output.R.compress() }).collect()
));
// Format it for monero-rs
let mut mrs_outputs = Vec::with_capacity(self.outputs.len());
let mut out_pk = Vec::with_capacity(self.outputs.len());
let mut ecdh_info = Vec::with_capacity(self.outputs.len());
for o in 0 .. self.outputs.len() {
mrs_outputs.push(TxOut {
amount: VarInt(0),
target: TxOutTarget::ToKey { key: PublicKey { point: self.outputs[o].dest.compress() } }
});
out_pk.push(CtKey {
mask: Key { key: commitments[o].calculate().compress().to_bytes() }
});
ecdh_info.push(EcdhInfo::Bulletproof { amount: self.outputs[o].amount });
}
Transaction {
prefix: TransactionPrefix {
version: VarInt(2),
unlock_time: VarInt(0),
inputs: vec![],
outputs: mrs_outputs,
extra
},
signatures: vec![],
rct_signatures: RctSig {
sig: Some(RctSigBase {
rct_type: RctType::Clsag,
txn_fee: VarInt(self.fee),
pseudo_outs: vec![],
ecdh_info,
out_pk
}),
p: Some(RctSigPrunable {
range_sigs: vec![],
bulletproofs: vec![bp],
MGs: vec![],
Clsags: vec![],
pseudo_outs: vec![]
})
}
}
}
pub async fn sign<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
rpc: &Rpc,
spend: &Scalar
) -> Result<Transaction, TransactionError> {
let (commitments, mask_sum) = self.prepare_outputs(
rng,
Some(
uniqueness(
&self.inputs.iter().map(|input| TxIn::ToKey {
amount: VarInt(0),
key_offsets: vec![],
k_image: KeyImage {
image: Hash(generate_key_image(&(spend + input.key_offset)).compress().to_bytes())
}
}).collect::<Vec<_>>()
)
)
)?;
let mut tx = self.prepare_transaction(&commitments, bulletproofs::generate(&commitments)?);
let signable = prepare_inputs(rng, rpc, &self.inputs, spend, &mut tx).await?;
let clsags = clsag::sign(
rng,
&signable,
mask_sum,
tx.signature_hash().expect("Couldn't get the signature hash").0
).unwrap(); // None if no inputs which new checks for
let mut prunable = tx.rct_signatures.p.unwrap();
prunable.Clsags = clsags.iter().map(|clsag| clsag.0.clone()).collect();
prunable.pseudo_outs = clsags.iter().map(|clsag| Key { key: clsag.1.compress().to_bytes() }).collect();
tx.rct_signatures.p = Some(prunable);
Ok(tx)
}
}

View file

@ -7,9 +7,7 @@ use rand_distr::{Distribution, Gamma};
use curve25519_dalek::edwards::EdwardsPoint;
use monero::VarInt;
use crate::{transaction::SpendableOutput, rpc::{RpcError, Rpc}};
use crate::{wallet::SpendableOutput, rpc::{RpcError, Rpc}};
const LOCK_WINDOW: usize = 10;
const MATURITY: u64 = 60;
@ -81,12 +79,11 @@ async fn select_n<R: RngCore + CryptoRng>(
Ok(confirmed)
}
// Uses VarInt as this is solely used for key_offsets which is serialized by monero-rs
fn offset(decoys: &[u64]) -> Vec<VarInt> {
let mut res = vec![VarInt(decoys[0])];
res.resize(decoys.len(), VarInt(0));
fn offset(decoys: &[u64]) -> Vec<u64> {
let mut res = vec![decoys[0]];
res.resize(decoys.len(), 0);
for m in (1 .. decoys.len()).rev() {
res[m] = VarInt(decoys[m] - decoys[m - 1]);
res[m] = decoys[m] - decoys[m - 1];
}
res
}
@ -94,7 +91,7 @@ fn offset(decoys: &[u64]) -> Vec<VarInt> {
#[derive(Clone, Debug)]
pub struct Decoys {
pub i: u8,
pub offsets: Vec<VarInt>,
pub offsets: Vec<u64>,
pub ring: Vec<[EdwardsPoint; 2]>
}

View file

@ -0,0 +1,59 @@
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use crate::{
hash, hash_to_scalar,
serialize::write_varint,
transaction::Input
};
mod scan;
pub use scan::SpendableOutput;
pub(crate) mod decoys;
pub(crate) use decoys::Decoys;
mod send;
pub use send::{TransactionError, SignableTransaction};
// https://github.com/monero-project/research-lab/issues/103
pub(crate) fn uniqueness(inputs: &[Input]) -> [u8; 32] {
let mut u = b"domain_separator".to_vec();
for input in inputs {
match input {
// If Gen, this should be the only input, making this loop somewhat pointless
// This works and even if there were somehow multiple inputs, it'd be a false negative
Input::Gen(height) => { write_varint(&(*height).try_into().unwrap(), &mut u).unwrap(); },
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes())
}
}
hash(&u)
}
// Hs(8Ra || o) with https://github.com/monero-project/research-lab/issues/103 as an option
#[allow(non_snake_case)]
pub(crate) fn shared_key(uniqueness: Option<[u8; 32]>, s: Scalar, P: &EdwardsPoint, o: usize) -> Scalar {
// uniqueness
let mut shared = uniqueness.map_or(vec![], |uniqueness| uniqueness.to_vec());
// || 8Ra
shared.extend((s * P).mul_by_cofactor().compress().to_bytes().to_vec());
// || o
write_varint(&o.try_into().unwrap(), &mut shared).unwrap();
// Hs()
hash_to_scalar(&shared)
}
pub(crate) fn amount_encryption(amount: u64, key: Scalar) -> [u8; 8] {
let mut amount_mask = b"amount".to_vec();
amount_mask.extend(key.to_bytes());
(amount ^ u64::from_le_bytes(hash(&amount_mask)[0 .. 8].try_into().unwrap())).to_le_bytes()
}
fn amount_decryption(amount: [u8; 8], key: Scalar) -> u64 {
u64::from_le_bytes(amount_encryption(u64::from_le_bytes(amount), key))
}
pub(crate) fn commitment_mask(shared_key: Scalar) -> Scalar {
let mut mask = b"commitment_mask".to_vec();
mask.extend(shared_key.to_bytes());
hash_to_scalar(&mask)
}

View file

@ -7,7 +7,6 @@ use curve25519_dalek::{traits::Identity, scalar::Scalar, edwards::{EdwardsPoint,
use monero::{
Hash, VarInt,
consensus::{Encodable, deserialize},
util::ringct::Key,
blockdata::transaction::{KeyImage, TxIn, Transaction}
};
@ -17,9 +16,9 @@ use frost::{FrostError, MultisigKeys, MultisigParams, sign::{State, StateMachine
use crate::{
frost::{Transcript, Ed25519},
random_scalar, bulletproofs, clsag,
random_scalar, bulletproofs::Bulletproofs, clsag,
rpc::Rpc,
transaction::{TransactionError, SignableTransaction, decoys::{self, Decoys}}
wallet::{TransactionError, SignableTransaction, decoys::{self, Decoys}}
};
pub struct TransactionMachine {
@ -31,8 +30,8 @@ pub struct TransactionMachine {
images: Vec<EdwardsPoint>,
output_masks: Option<Scalar>,
inputs: Vec<Rc<RefCell<Option<clsag::Details>>>>,
clsags: Vec<AlgorithmMachine<Ed25519, clsag::Multisig>>,
inputs: Vec<Rc<RefCell<Option<ClsagDetails>>>>,
clsags: Vec<AlgorithmMachine<Ed25519, ClsagMultisig>>,
tx: Option<Transaction>
}
@ -95,7 +94,7 @@ impl SignableTransaction {
for (i, input) in self.inputs.iter().enumerate() {
clsags.push(
AlgorithmMachine::new(
clsag::Multisig::new(
ClsagMultisig::new(
transcript.clone(),
inputs[i].clone()
).map_err(|e| TransactionError::MultisigError(e))?,
@ -155,8 +154,8 @@ impl StateMachine for TransactionMachine {
let (commitments, output_masks) = self.signable.prepare_outputs(&mut rng, None).unwrap();
self.output_masks = Some(output_masks);
let bp = bulletproofs::generate(&commitments).unwrap();
bp.consensus_encode(&mut serialized).unwrap();
let bp = Bulletproofs::new(&commitments).unwrap();
serialized.extend(&bp.serialize());
let tx = self.signable.prepare_transaction(&commitments, bp);
self.tx = Some(tx);
@ -175,7 +174,7 @@ impl StateMachine for TransactionMachine {
}
// FROST commitments, image, commitments, and their proofs
let clsag_len = 64 + clsag::Multisig::serialized_len();
let clsag_len = 64 + ClsagMultisig::serialized_len();
let clsag_lens = clsag_len * self.clsags.len();
// Split out the prep and update the TX
@ -200,8 +199,10 @@ impl StateMachine for TransactionMachine {
self.output_masks.replace(output_masks);
// Verify the provided bulletproofs if not leader
let bp = deserialize(&prep[(clsag_lens + 32) .. prep.len()]).map_err(|_| FrostError::InvalidShare(l))?;
if !bulletproofs::verify(&bp, &commitments.iter().map(|c| c.calculate()).collect::<Vec<EdwardsPoint>>()) {
let bp = Bulletproofs::deserialize(
&mut std::io::Cursor::new(&prep[(clsag_lens + 32) .. prep.len()])
).map_err(|_| FrostError::InvalidShare(l))?;
if !bp.verify(&commitments.iter().map(|c| c.calculate()).collect::<Vec<EdwardsPoint>>()) {
Err(FrostError::InvalidShare(l))?;
}
@ -252,16 +253,16 @@ impl StateMachine for TransactionMachine {
}
tx.prefix.inputs.push(
TxIn::ToKey {
Input::ToKey {
amount: VarInt(0),
key_offsets: value.1.offsets.clone(),
key_offsets: value.1.offsets.clone().iter().map(|x| VarInt(*x)).collect(),
k_image: KeyImage { image: Hash(value.2.compress().to_bytes()) }
}
);
value.3.replace(
Some(
clsag::Details::new(
ClsagDetails::new(
clsag::Input::new(
value.0.commitment,
value.1
@ -299,7 +300,7 @@ impl StateMachine for TransactionMachine {
|share| share.clone().map(|share| share[(c * 32) .. ((c * 32) + 32)].to_vec())
).collect::<Vec<_>>())?;
prunable.Clsags.push(clsag);
prunable.pseudo_outs.push(Key { key: pseudo_out.compress().to_bytes() });
prunable.pseudo_outs.push(pseudo_out.compress().to_bytes());
}
tx.rct_signatures.p = Some(prunable);

View file

@ -0,0 +1,103 @@
use std::convert::TryFrom;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
edwards::EdwardsPoint
};
use monero::{consensus::deserialize, blockdata::transaction::ExtraField};
use crate::{
Commitment,
serialize::write_varint,
transaction::Transaction,
wallet::{uniqueness, shared_key, amount_decryption, commitment_mask}
};
#[derive(Clone, Debug)]
pub struct SpendableOutput {
pub tx: [u8; 32],
pub o: usize,
pub key: EdwardsPoint,
pub key_offset: Scalar,
pub commitment: Commitment
}
// TODO: Enable disabling one of the shared key derivations and solely using one
// Change outputs currently always use unique derivations, so that must also be corrected
impl Transaction {
pub fn scan(
&self,
view: Scalar,
spend: EdwardsPoint
) -> Vec<SpendableOutput> {
let mut extra = vec![];
write_varint(&u64::try_from(self.prefix.extra.len()).unwrap(), &mut extra).unwrap();
extra.extend(&self.prefix.extra);
let extra = deserialize::<ExtraField>(&extra);
let pubkeys: Vec<EdwardsPoint>;
if let Ok(extra) = extra {
let mut m_pubkeys = vec![];
if let Some(key) = extra.tx_pubkey() {
m_pubkeys.push(key);
}
if let Some(keys) = extra.tx_additional_pubkeys() {
m_pubkeys.extend(&keys);
}
pubkeys = m_pubkeys.iter().map(|key| key.point.decompress()).filter_map(|key| key).collect();
} else {
return vec![];
};
let mut res = vec![];
for (o, output) in self.prefix.outputs.iter().enumerate() {
// TODO: This may be replaceable by pubkeys[o]
for pubkey in &pubkeys {
let mut commitment = Commitment::zero();
// P - shared == spend
let matches = |shared_key| (output.key - (&shared_key * &ED25519_BASEPOINT_TABLE)) == spend;
let test = |shared_key| Some(shared_key).filter(|shared_key| matches(*shared_key));
// Get the traditional shared key and unique shared key, testing if either matches for this output
let traditional = test(shared_key(None, view, pubkey, o));
let unique = test(shared_key(Some(uniqueness(&self.prefix.inputs)), view, pubkey, o));
// If either matches, grab it and decode the amount
if let Some(key_offset) = traditional.or(unique) {
// Miner transaction
if output.amount != 0 {
commitment.amount = output.amount;
// Regular transaction
} else {
let amount = match self.rct_signatures.base.ecdh_info.get(o) {
Some(amount) => amount_decryption(*amount, key_offset),
// This should never happen, yet it may be possible with miner transactions?
// Using get just decreases the possibility of a panic and lets us move on in that case
None => continue
};
// Rebuild the commitment to verify it
commitment = Commitment::new(commitment_mask(key_offset), amount);
// If this is a malicious commitment, move to the next output
// Any other R value will calculate to a different spend key and are therefore ignorable
if Some(&commitment.calculate()) != self.rct_signatures.base.commitments.get(o) {
break;
}
}
if commitment.amount != 0 {
res.push(SpendableOutput { tx: self.hash(), o, key: output.key, key_offset, commitment });
}
// Break to prevent public keys from being included multiple times, triggering multiple
// inclusions of the same output
break;
}
}
}
res
}
}

View file

@ -0,0 +1,319 @@
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use rand::seq::SliceRandom;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
edwards::EdwardsPoint
};
use monero::{
consensus::Encodable,
util::{key::PublicKey, address::Address},
blockdata::transaction::SubField
};
#[cfg(feature = "multisig")]
use frost::FrostError;
use crate::{
Commitment,
random_scalar,
generate_key_image, bulletproofs::Bulletproofs, clsag::{ClsagError, ClsagInput, Clsag},
rpc::{Rpc, RpcError},
transaction::*,
wallet::{uniqueness, shared_key, commitment_mask, amount_encryption, SpendableOutput, Decoys}
};
#[cfg(feature = "multisig")]
use crate::frost::MultisigError;
#[cfg(feature = "multisig")]
mod multisig;
#[allow(non_snake_case)]
#[derive(Clone, Debug)]
struct SendOutput {
R: EdwardsPoint,
dest: EdwardsPoint,
mask: Scalar,
amount: [u8; 8]
}
impl SendOutput {
fn new<R: RngCore + CryptoRng>(
rng: &mut R,
unique: Option<[u8; 32]>,
output: (Address, u64),
o: usize
) -> Result<SendOutput, TransactionError> {
let r = random_scalar(rng);
let shared_key = shared_key(
unique,
r,
&output.0.public_view.point.decompress().ok_or(TransactionError::InvalidAddress)?,
o
);
Ok(
SendOutput {
R: &r * &ED25519_BASEPOINT_TABLE,
dest: (
(&shared_key * &ED25519_BASEPOINT_TABLE) +
output.0.public_spend.point.decompress().ok_or(TransactionError::InvalidAddress)?
),
mask: commitment_mask(shared_key),
amount: amount_encryption(output.1, shared_key)
}
)
}
}
#[derive(Error, Debug)]
pub enum TransactionError {
#[error("no inputs")]
NoInputs,
#[error("no outputs")]
NoOutputs,
#[error("too many outputs")]
TooManyOutputs,
#[error("not enough funds (in {0}, out {1})")]
NotEnoughFunds(u64, u64),
#[error("invalid address")]
InvalidAddress,
#[error("rpc error ({0})")]
RpcError(RpcError),
#[error("clsag error ({0})")]
ClsagError(ClsagError),
#[error("invalid transaction ({0})")]
InvalidTransaction(RpcError),
#[cfg(feature = "multisig")]
#[error("frost error {0}")]
FrostError(FrostError),
#[cfg(feature = "multisig")]
#[error("multisig error {0}")]
MultisigError(MultisigError)
}
async fn prepare_inputs<R: RngCore + CryptoRng>(
rng: &mut R,
rpc: &Rpc,
inputs: &[SpendableOutput],
spend: &Scalar,
tx: &mut Transaction
) -> Result<Vec<(Scalar, EdwardsPoint, ClsagInput)>, TransactionError> {
let mut signable = Vec::with_capacity(inputs.len());
// Select decoys
let decoys = Decoys::select(
rng,
rpc,
rpc.get_height().await.map_err(|e| TransactionError::RpcError(e))? - 10,
inputs
).await.map_err(|e| TransactionError::RpcError(e))?;
for (i, input) in inputs.iter().enumerate() {
signable.push((
spend + input.key_offset,
generate_key_image(&(spend + input.key_offset)),
ClsagInput::new(
input.commitment,
decoys[i].clone()
).map_err(|e| TransactionError::ClsagError(e))?
));
tx.prefix.inputs.push(Input::ToKey {
amount: 0,
key_offsets: decoys[i].offsets.clone(),
key_image: signable[i].1
});
}
signable.sort_by(|x, y| x.1.compress().to_bytes().cmp(&y.1.compress().to_bytes()).reverse());
tx.prefix.inputs.sort_by(|x, y| if let (
Input::ToKey { key_image: x, ..},
Input::ToKey { key_image: y, ..}
) = (x, y) {
x.compress().to_bytes().cmp(&y.compress().to_bytes()).reverse()
} else {
panic!("Input wasn't ToKey")
});
Ok(signable)
}
#[derive(Clone, Debug)]
pub struct SignableTransaction {
inputs: Vec<SpendableOutput>,
payments: Vec<(Address, u64)>,
change: Address,
fee_per_byte: u64,
fee: u64,
outputs: Vec<SendOutput>
}
impl SignableTransaction {
pub fn new(
inputs: Vec<SpendableOutput>,
payments: Vec<(Address, u64)>,
change: Address,
fee_per_byte: u64
) -> Result<SignableTransaction, TransactionError> {
if inputs.len() == 0 {
Err(TransactionError::NoInputs)?;
}
if payments.len() == 0 {
Err(TransactionError::NoOutputs)?;
}
Ok(
SignableTransaction {
inputs,
payments,
change,
fee_per_byte,
fee: 0,
outputs: vec![]
}
)
}
fn prepare_outputs<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
uniqueness: Option<[u8; 32]>
) -> Result<(Vec<Commitment>, Scalar), TransactionError> {
self.fee = self.fee_per_byte * 2000; // TODO
// TODO TX MAX SIZE
// Make sure we have enough funds
let in_amount = self.inputs.iter().map(|input| input.commitment.amount).sum();
let out_amount = self.fee + self.payments.iter().map(|payment| payment.1).sum::<u64>();
if in_amount < out_amount {
Err(TransactionError::NotEnoughFunds(in_amount, out_amount))?;
}
let mut temp_outputs = Vec::with_capacity(self.payments.len() + 1);
// Add the payments to the outputs
for payment in &self.payments {
temp_outputs.push((None, (payment.0, payment.1)));
}
// Ideally, the change output would always have uniqueness, as we control this wallet software
// Unfortunately, if this is used with multisig, doing so would add an extra round due to the
// fact Bulletproofs use a leader protocol reliant on this shared key before the first round of
// communication. Making the change output unique would require Bulletproofs not be a leader
// protocol, using a seeded random
// There is a vector where the multisig participants leak the output key they're about to send
// to, and someone could use that key, forcing some funds to be burnt accordingly if they win
// the race. Any multisig wallet, with this current setup, must only keep change keys in context
// accordingly, preferably as soon as they are proposed, even before they appear as confirmed
// Using another source of uniqueness would also be possible, yet it'd make scanning a tri-key
// system (currently dual for the simpler API, yet would be dual even with a more complex API
// under this decision)
// TODO after https://github.com/serai-dex/serai/issues/2
temp_outputs.push((uniqueness, (self.change, in_amount - out_amount)));
// Shuffle the outputs
temp_outputs.shuffle(rng);
// Actually create the outputs
self.outputs = Vec::with_capacity(temp_outputs.len());
let mut commitments = Vec::with_capacity(temp_outputs.len());
let mut mask_sum = Scalar::zero();
for (o, output) in temp_outputs.iter().enumerate() {
self.outputs.push(SendOutput::new(rng, output.0, output.1, o)?);
commitments.push(Commitment::new(self.outputs[o].mask, output.1.1));
mask_sum += self.outputs[o].mask;
}
Ok((commitments, mask_sum))
}
fn prepare_transaction(
&self,
commitments: &[Commitment],
bp: Bulletproofs
) -> Transaction {
// Create the TX extra
let mut extra = vec![];
SubField::TxPublicKey(
PublicKey { point: self.outputs[0].R.compress() }
).consensus_encode(&mut extra).unwrap();
SubField::AdditionalPublickKey(
self.outputs[1 .. self.outputs.len()].iter().map(|output| PublicKey { point: output.R.compress() }).collect()
).consensus_encode(&mut extra).unwrap();
// Format it for monero-rs
let mut tx_outputs = Vec::with_capacity(self.outputs.len());
let mut ecdh_info = Vec::with_capacity(self.outputs.len());
for o in 0 .. self.outputs.len() {
tx_outputs.push(Output {
amount: 0,
key: self.outputs[o].dest,
tag: None
});
ecdh_info.push(self.outputs[o].amount);
}
Transaction {
prefix: TransactionPrefix {
version: 2,
unlock_time: 0,
inputs: vec![],
outputs: tx_outputs,
extra
},
rct_signatures: RctSignatures {
base: RctBase {
fee: self.fee,
ecdh_info,
commitments: commitments.iter().map(|commitment| commitment.calculate()).collect()
},
prunable: RctPrunable::Clsag {
bulletproofs: vec![bp],
clsags: vec![],
pseudo_outs: vec![]
}
}
}
}
pub async fn sign<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
rpc: &Rpc,
spend: &Scalar
) -> Result<Transaction, TransactionError> {
let (commitments, mask_sum) = self.prepare_outputs(
rng,
Some(
uniqueness(
&self.inputs.iter().map(|input| Input::ToKey {
amount: 0,
key_offsets: vec![],
key_image: generate_key_image(&(spend + input.key_offset))
}).collect::<Vec<_>>()
)
)
)?;
let mut tx = self.prepare_transaction(&commitments, Bulletproofs::new(&commitments)?);
let signable = prepare_inputs(rng, rpc, &self.inputs, spend, &mut tx).await?;
let clsag_pairs = Clsag::sign(rng, &signable, mask_sum, tx.signature_hash());
match tx.rct_signatures.prunable {
RctPrunable::Null => panic!("Signing for RctPrunable::Null"),
RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. } => {
clsags.append(&mut clsag_pairs.iter().map(|clsag| clsag.0.clone()).collect::<Vec<_>>());
pseudo_outs.append(&mut clsag_pairs.iter().map(|clsag| clsag.1.clone()).collect::<Vec<_>>());
}
}
Ok(tx)
}
}

View file

@ -0,0 +1,314 @@
use std::{rc::Rc, cell::RefCell};
use rand_core::{RngCore, CryptoRng, SeedableRng};
use rand_chacha::ChaCha12Rng;
use curve25519_dalek::{traits::Identity, scalar::Scalar, edwards::{EdwardsPoint, CompressedEdwardsY}};
use transcript::Transcript as TranscriptTrait;
use frost::{FrostError, MultisigKeys, MultisigParams, sign::{State, StateMachine, AlgorithmMachine}};
use crate::{
frost::{Transcript, Ed25519},
random_scalar, bulletproofs::Bulletproofs, clsag::{ClsagInput, ClsagDetails, ClsagMultisig},
rpc::Rpc,
transaction::{Input, RctPrunable, Transaction},
wallet::{TransactionError, SignableTransaction, Decoys}
};
pub struct TransactionMachine {
leader: bool,
signable: SignableTransaction,
transcript: Transcript,
decoys: Vec<Decoys>,
images: Vec<EdwardsPoint>,
output_masks: Option<Scalar>,
inputs: Vec<Rc<RefCell<Option<ClsagDetails>>>>,
clsags: Vec<AlgorithmMachine<Ed25519, ClsagMultisig>>,
tx: Option<Transaction>
}
impl SignableTransaction {
pub async fn multisig<R: RngCore + CryptoRng>(
mut self,
label: Vec<u8>,
rng: &mut R,
rpc: &Rpc,
height: usize,
keys: MultisigKeys<Ed25519>,
included: &[usize]
) -> Result<TransactionMachine, TransactionError> {
let mut images = vec![];
images.resize(self.inputs.len(), EdwardsPoint::identity());
let mut inputs = vec![];
for _ in 0 .. self.inputs.len() {
// Doesn't resize as that will use a single Rc for the entire Vec
inputs.push(Rc::new(RefCell::new(None)));
}
let mut clsags = vec![];
// Create a RNG out of the input shared keys, which either requires the view key or being every
// sender, and the payments (address and amount), which a passive adversary may be able to know
// depending on how these transactions are coordinated
let mut transcript = Transcript::new(label);
// Also include the spend_key as below only the key offset is included, so this confirms the sum product
// Useful as confirming the sum product confirms the key image, further guaranteeing the one time
// properties noted below
transcript.append_message(b"spend_key", &keys.group_key().0.compress().to_bytes());
for input in &self.inputs {
// These outputs can only be spent once. Therefore, it forces all RNGs derived from this
// transcript (such as the one used to create one time keys) to be unique
transcript.append_message(b"input_hash", &input.tx);
transcript.append_message(b"input_output_index", &u16::try_from(input.o).unwrap().to_le_bytes());
// Not including this, with a doxxed list of payments, would allow brute forcing the inputs
// to determine RNG seeds and therefore the true spends
transcript.append_message(b"input_shared_key", &input.key_offset.to_bytes());
}
for payment in &self.payments {
transcript.append_message(b"payment_address", &payment.0.as_bytes());
transcript.append_message(b"payment_amount", &payment.1.to_le_bytes());
}
transcript.append_message(b"change", &self.change.as_bytes());
// Select decoys
// Ideally, this would be done post entropy, instead of now, yet doing so would require sign
// to be async which isn't feasible. This should be suitably competent though
// While this inability means we can immediately create the input, moving it out of the
// Rc RefCell, keeping it within an Rc RefCell keeps our options flexible
let decoys = Decoys::select(
&mut ChaCha12Rng::from_seed(transcript.rng_seed(b"decoys", None)),
rpc,
height,
&self.inputs
).await.map_err(|e| TransactionError::RpcError(e))?;
for (i, input) in self.inputs.iter().enumerate() {
clsags.push(
AlgorithmMachine::new(
ClsagMultisig::new(
transcript.clone(),
inputs[i].clone()
).map_err(|e| TransactionError::MultisigError(e))?,
Rc::new(keys.offset(dalek_ff_group::Scalar(input.key_offset))),
included
).map_err(|e| TransactionError::FrostError(e))?
);
}
// Verify these outputs by a dummy prep
self.prepare_outputs(rng, None)?;
Ok(TransactionMachine {
leader: keys.params().i() == included[0],
signable: self,
transcript,
decoys,
images,
output_masks: None,
inputs,
clsags,
tx: None
})
}
}
impl StateMachine for TransactionMachine {
type Signature = Transaction;
fn preprocess<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R
) -> Result<Vec<u8>, FrostError> {
if self.state() != State::Fresh {
Err(FrostError::InvalidSignTransition(State::Fresh, self.state()))?;
}
// Iterate over each CLSAG calling preprocess
let mut serialized = vec![];
for (i, clsag) in self.clsags.iter_mut().enumerate() {
let preprocess = clsag.preprocess(rng)?;
// First 64 bytes are FROST's commitments
self.images[i] += CompressedEdwardsY(preprocess[64 .. 96].try_into().unwrap()).decompress().unwrap();
serialized.extend(&preprocess);
}
if self.leader {
let mut entropy = [0; 32];
rng.fill_bytes(&mut entropy);
serialized.extend(&entropy);
let mut rng = ChaCha12Rng::from_seed(self.transcript.rng_seed(b"tx_keys", Some(entropy)));
// Safe to unwrap thanks to the dummy prepare
let (commitments, output_masks) = self.signable.prepare_outputs(&mut rng, None).unwrap();
self.output_masks = Some(output_masks);
let bp = Bulletproofs::new(&commitments).unwrap();
bp.serialize(&mut serialized).unwrap();
let tx = self.signable.prepare_transaction(&commitments, bp);
self.tx = Some(tx);
}
Ok(serialized)
}
fn sign(
&mut self,
commitments: &[Option<Vec<u8>>],
_: &[u8]
) -> Result<Vec<u8>, FrostError> {
if self.state() != State::Preprocessed {
Err(FrostError::InvalidSignTransition(State::Preprocessed, self.state()))?;
}
// FROST commitments, image, commitments, and their proofs
let clsag_len = 64 + ClsagMultisig::serialized_len();
let clsag_lens = clsag_len * self.clsags.len();
// Split out the prep and update the TX
let mut tx;
if self.leader {
tx = self.tx.take().unwrap();
} else {
let (l, prep) = commitments.iter().enumerate().filter(|(_, prep)| prep.is_some()).next()
.ok_or(FrostError::InternalError("no participants".to_string()))?;
let prep = prep.as_ref().unwrap();
// Not invalid outputs due to doing a dummy prep as leader
let (commitments, output_masks) = self.signable.prepare_outputs(
&mut ChaCha12Rng::from_seed(
self.transcript.rng_seed(
b"tx_keys",
Some(prep[clsag_lens .. (clsag_lens + 32)].try_into().map_err(|_| FrostError::InvalidShare(l))?)
)
),
None
).map_err(|_| FrostError::InvalidShare(l))?;
self.output_masks.replace(output_masks);
// Verify the provided bulletproofs if not leader
let bp = Bulletproofs::deserialize(
&mut std::io::Cursor::new(&prep[(clsag_lens + 32) .. prep.len()])
).map_err(|_| FrostError::InvalidShare(l))?;
if !bp.verify(&commitments.iter().map(|c| c.calculate()).collect::<Vec<EdwardsPoint>>()) {
Err(FrostError::InvalidShare(l))?;
}
tx = self.signable.prepare_transaction(&commitments, bp);
}
for c in 0 .. self.clsags.len() {
// Calculate the key images in order to update the TX
// Multisig will parse/calculate/validate this as needed, yet doing so here as well provides
// the easiest API overall
for (l, serialized) in commitments.iter().enumerate().filter(|(_, s)| s.is_some()) {
self.images[c] += CompressedEdwardsY(
serialized.as_ref().unwrap()[((c * clsag_len) + 64) .. ((c * clsag_len) + 96)]
.try_into().map_err(|_| FrostError::InvalidCommitment(l))?
).decompress().ok_or(FrostError::InvalidCommitment(l))?;
}
}
let mut commitments = (0 .. self.inputs.len()).map(|c| commitments.iter().map(
|commitments| commitments.clone().map(
|commitments| commitments[(c * clsag_len) .. ((c * clsag_len) + clsag_len)].to_vec()
)
).collect::<Vec<_>>()).collect::<Vec<_>>();
let mut sorted = Vec::with_capacity(self.decoys.len());
while self.decoys.len() != 0 {
sorted.push((
self.signable.inputs.swap_remove(0),
self.decoys.swap_remove(0),
self.images.swap_remove(0),
self.inputs.swap_remove(0),
self.clsags.swap_remove(0),
commitments.swap_remove(0)
));
}
sorted.sort_by(|x, y| x.2.compress().to_bytes().cmp(&y.2.compress().to_bytes()).reverse());
let mut rng = ChaCha12Rng::from_seed(self.transcript.rng_seed(b"pseudo_out_masks", None));
let mut sum_pseudo_outs = Scalar::zero();
while sorted.len() != 0 {
let value = sorted.remove(0);
let mut mask = random_scalar(&mut rng);
if sorted.len() == 0 {
mask = self.output_masks.unwrap() - sum_pseudo_outs;
} else {
sum_pseudo_outs += mask;
}
tx.prefix.inputs.push(
Input::ToKey {
amount: 0,
key_offsets: value.1.offsets.clone(),
key_image: value.2
}
);
value.3.replace(
Some(
ClsagDetails::new(
ClsagInput::new(
value.0.commitment,
value.1
).map_err(|_| panic!("Signing an input which isn't present in the ring we created for it"))?,
mask
)
)
);
self.clsags.push(value.4);
commitments.push(value.5);
}
let msg = tx.signature_hash();
self.tx = Some(tx);
// Iterate over each CLSAG calling sign
let mut serialized = Vec::with_capacity(self.clsags.len() * 32);
for (c, clsag) in self.clsags.iter_mut().enumerate() {
serialized.extend(&clsag.sign(&commitments[c], &msg)?);
}
Ok(serialized)
}
fn complete(&mut self, shares: &[Option<Vec<u8>>]) -> Result<Transaction, FrostError> {
if self.state() != State::Signed {
Err(FrostError::InvalidSignTransition(State::Signed, self.state()))?;
}
let mut tx = self.tx.take().unwrap();
match tx.rct_signatures.prunable {
RctPrunable::Null => panic!("Signing for RctPrunable::Null"),
RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. } => {
for (c, clsag) in self.clsags.iter_mut().enumerate() {
let (clsag, pseudo_out) = clsag.complete(&shares.iter().map(
|share| share.clone().map(|share| share[(c * 32) .. ((c * 32) + 32)].to_vec())
).collect::<Vec<_>>())?;
clsags.push(clsag);
pseudo_outs.push(pseudo_out);
}
}
}
Ok(tx)
}
fn multisig_params(&self) -> MultisigParams {
self.clsags[0].multisig_params()
}
fn state(&self) -> State {
self.clsags[0].state()
}
}

View file

@ -15,10 +15,8 @@ use monero::{
network::Network,
util::{key::PublicKey, address::Address}
};
#[cfg(feature = "multisig")]
use monero::cryptonote::hash::Hashable;
use monero_serai::{random_scalar, transaction::{self, SignableTransaction}};
use monero_serai::{random_scalar, wallet::SignableTransaction};
mod rpc;
use crate::rpc::{rpc, mine_block};
@ -32,8 +30,24 @@ lazy_static! {
static ref SEQUENTIAL: Mutex<()> = Mutex::new(());
}
pub async fn send_core(test: usize, multisig: bool) {
let _guard = SEQUENTIAL.lock().unwrap();
macro_rules! async_sequential {
($(async fn $name: ident() $body: block)*) => {
$(
#[tokio::test]
async fn $name() {
let guard = SEQUENTIAL.lock().unwrap();
let local = tokio::task::LocalSet::new();
local.run_until(async move {
if let Err(_) = tokio::task::spawn_local(async move { $body }).await {
drop(guard);
}
}).await;
}
)*
};
}
async fn send_core(test: usize, multisig: bool) {
let rpc = rpc().await;
// Generate an address
@ -86,7 +100,7 @@ pub async fn send_core(test: usize, multisig: bool) {
// Grab the largest output available
let output = {
let mut outputs = transaction::scan(tx.as_ref().unwrap(), view, spend_pub);
let mut outputs = tx.as_ref().unwrap().scan(view, spend_pub);
outputs.sort_by(|x, y| x.commitment.amount.cmp(&y.commitment.amount).reverse());
outputs.swap_remove(0)
};
@ -102,7 +116,7 @@ pub async fn send_core(test: usize, multisig: bool) {
for i in (start + 1) .. (start + 9) {
let tx = rpc.get_block_transactions(i).await.unwrap().swap_remove(0);
let output = transaction::scan(&tx, view, spend_pub).swap_remove(0);
let output = tx.scan(view, spend_pub).swap_remove(0);
amount += output.commitment.amount;
outputs.push(output);
}
@ -144,24 +158,23 @@ pub async fn send_core(test: usize, multisig: bool) {
}
}
#[tokio::test]
pub async fn send_single_input() {
send_core(0, false).await;
}
async_sequential! {
async fn send_single_input() {
send_core(0, false).await;
}
#[tokio::test]
pub async fn send_multiple_inputs() {
send_core(1, false).await;
async fn send_multiple_inputs() {
send_core(1, false).await;
}
}
#[cfg(feature = "multisig")]
#[tokio::test]
pub async fn multisig_send_single_input() {
send_core(0, true).await;
}
async_sequential! {
async fn multisig_send_single_input() {
send_core(0, true).await;
}
#[cfg(feature = "multisig")]
#[tokio::test]
pub async fn multisig_send_multiple_inputs() {
send_core(1, true).await;
async fn multisig_send_multiple_inputs() {
send_core(1, true).await;
}
}