Apply an initial set of rustfmt rules

This commit is contained in:
Luke Parker 2022-07-15 01:26:07 -04:00
parent 0b879a53fa
commit e67033a207
67 changed files with 1983 additions and 1796 deletions

16
.rustfmt.toml Normal file
View file

@ -0,0 +1,16 @@
tab_spaces = 2
max_width = 100
# Let the developer decide based on the 100 char line limit
use_small_heuristics = "Max"
error_on_line_overflow = true
error_on_unformatted = true
imports_granularity = "Crate"
reorder_imports = false
reorder_modules = false
unstable_features = true
spaces_around_ranges = true
binop_separator = "Back"

View file

@ -1,13 +1,23 @@
use std::{env, path::Path, process::Command};
fn main() {
if !Command::new("git").args(&["submodule", "update", "--init", "--recursive"]).status().unwrap().success() {
if !Command::new("git")
.args(&["submodule", "update", "--init", "--recursive"])
.status()
.unwrap()
.success()
{
panic!("git failed to init submodules");
}
if !Command ::new("mkdir").args(&["-p", ".build"])
.current_dir(&Path::new("c")).status().unwrap().success() {
panic!("failed to create a directory to track build progress");
if !Command::new("mkdir")
.args(&["-p", ".build"])
.current_dir(&Path::new("c"))
.status()
.unwrap()
.success()
{
panic!("failed to create a directory to track build progress");
}
let out_dir = &env::var("OUT_DIR").unwrap();
@ -16,18 +26,29 @@ fn main() {
// If the signaling file was deleted, run this script again to rebuild Monero though
println!("cargo:rerun-if-changed=c/.build/monero");
if !Path::new("c/.build/monero").exists() {
if !Command::new("make").arg(format!("-j{}", &env::var("THREADS").unwrap_or("2".to_string())))
.current_dir(&Path::new("c/monero")).status().unwrap().success() {
panic!("make failed to build Monero. Please check your dependencies");
if !Command::new("make")
.arg(format!("-j{}", &env::var("THREADS").unwrap_or("2".to_string())))
.current_dir(&Path::new("c/monero"))
.status()
.unwrap()
.success()
{
panic!("make failed to build Monero. Please check your dependencies");
}
if !Command::new("touch").arg("monero")
.current_dir(&Path::new("c/.build")).status().unwrap().success() {
panic!("failed to create a file to label Monero as built");
if !Command::new("touch")
.arg("monero")
.current_dir(&Path::new("c/.build"))
.status()
.unwrap()
.success()
{
panic!("failed to create a file to label Monero as built");
}
}
println!("cargo:rerun-if-changed=c/wrapper.cpp");
#[rustfmt::skip]
cc::Build::new()
.static_flag(true)
.warnings(false)

View file

@ -1,7 +1,4 @@
use crate::{
serialize::*,
transaction::Transaction
};
use crate::{serialize::*, transaction::Transaction};
#[derive(Clone, PartialEq, Debug)]
pub struct BlockHeader {
@ -9,7 +6,7 @@ pub struct BlockHeader {
pub minor_version: u64,
pub timestamp: u64,
pub previous: [u8; 32],
pub nonce: u32
pub nonce: u32,
}
impl BlockHeader {
@ -22,15 +19,21 @@ impl BlockHeader {
}
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<BlockHeader> {
Ok(
BlockHeader {
major_version: read_varint(r)?,
minor_version: read_varint(r)?,
timestamp: read_varint(r)?,
previous: { let mut previous = [0; 32]; r.read_exact(&mut previous)?; previous },
nonce: { let mut nonce = [0; 4]; r.read_exact(&mut nonce)?; u32::from_le_bytes(nonce) }
}
)
Ok(BlockHeader {
major_version: read_varint(r)?,
minor_version: read_varint(r)?,
timestamp: read_varint(r)?,
previous: {
let mut previous = [0; 32];
r.read_exact(&mut previous)?;
previous
},
nonce: {
let mut nonce = [0; 4];
r.read_exact(&mut nonce)?;
u32::from_le_bytes(nonce)
},
})
}
}
@ -38,7 +41,7 @@ impl BlockHeader {
pub struct Block {
pub header: BlockHeader,
pub miner_tx: Transaction,
pub txs: Vec<[u8; 32]>
pub txs: Vec<[u8; 32]>,
}
impl Block {
@ -53,14 +56,15 @@ impl Block {
}
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Block> {
Ok(
Block {
header: BlockHeader::deserialize(r)?,
miner_tx: Transaction::deserialize(r)?,
txs: (0 .. read_varint(r)?).map(
|_| { let mut tx = [0; 32]; r.read_exact(&mut tx).map(|_| tx) }
).collect::<Result<_, _>>()?
}
)
Ok(Block {
header: BlockHeader::deserialize(r)?,
miner_tx: Transaction::deserialize(r)?,
txs: (0 .. read_varint(r)?)
.map(|_| {
let mut tx = [0; 32];
r.read_exact(&mut tx).map(|_| tx)
})
.collect::<Result<_, _>>()?,
})
}
}

View file

@ -18,7 +18,7 @@ pub enum MultisigError {
#[error("invalid discrete log equality proof")]
InvalidDLEqProof(u16),
#[error("invalid key image {0}")]
InvalidKeyImage(u16)
InvalidKeyImage(u16),
}
fn transcript() -> RecommendedTranscript {
@ -29,7 +29,7 @@ fn transcript() -> RecommendedTranscript {
pub(crate) fn write_dleq<R: RngCore + CryptoRng>(
rng: &mut R,
H: EdwardsPoint,
x: Scalar
x: Scalar,
) -> Vec<u8> {
let mut res = Vec::with_capacity(64);
DLEqProof::prove(
@ -41,8 +41,10 @@ pub(crate) fn write_dleq<R: RngCore + CryptoRng>(
// merge later in some form, when it should instead just merge xH (as it does)
&mut transcript(),
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)],
dfg::Scalar(x)
).serialize(&mut res).unwrap();
dfg::Scalar(x),
)
.serialize(&mut res)
.unwrap();
res
}
@ -51,26 +53,22 @@ pub(crate) fn read_dleq<Re: Read>(
serialized: &mut Re,
H: EdwardsPoint,
l: u16,
xG: dfg::EdwardsPoint
xG: dfg::EdwardsPoint,
) -> Result<dfg::EdwardsPoint, MultisigError> {
let mut bytes = [0; 32];
serialized.read_exact(&mut bytes).map_err(|_| MultisigError::InvalidDLEqProof(l))?;
// dfg ensures the point is torsion free
let xH = Option::<dfg::EdwardsPoint>::from(
dfg::EdwardsPoint::from_bytes(&bytes)).ok_or(MultisigError::InvalidDLEqProof(l)
)?;
let xH = Option::<dfg::EdwardsPoint>::from(dfg::EdwardsPoint::from_bytes(&bytes))
.ok_or(MultisigError::InvalidDLEqProof(l))?;
// Ensure this is a canonical point
if xH.to_bytes() != bytes {
Err(MultisigError::InvalidDLEqProof(l))?;
}
DLEqProof::<dfg::EdwardsPoint>::deserialize(
serialized
).map_err(|_| MultisigError::InvalidDLEqProof(l))?.verify(
&mut transcript(),
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)],
&[xG, xH]
).map_err(|_| MultisigError::InvalidDLEqProof(l))?;
DLEqProof::<dfg::EdwardsPoint>::deserialize(serialized)
.map_err(|_| MultisigError::InvalidDLEqProof(l))?
.verify(&mut transcript(), &[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)], &[xG, xH])
.map_err(|_| MultisigError::InvalidDLEqProof(l))?;
Ok(xH)
}

View file

@ -10,7 +10,7 @@ use tiny_keccak::{Hasher, Keccak};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
edwards::{EdwardsPoint, EdwardsBasepointTable, CompressedEdwardsY}
edwards::{EdwardsPoint, EdwardsBasepointTable, CompressedEdwardsY},
};
#[cfg(feature = "multisig")]
@ -31,8 +31,13 @@ mod tests;
lazy_static! {
static ref H: EdwardsPoint = CompressedEdwardsY(
hex::decode("8b655970153799af2aeadc9ff1add0ea6c7251d54154cfa92c173a0dd39c1f94").unwrap().try_into().unwrap()
).decompress().unwrap();
hex::decode("8b655970153799af2aeadc9ff1add0ea6c7251d54154cfa92c173a0dd39c1f94")
.unwrap()
.try_into()
.unwrap()
)
.decompress()
.unwrap();
static ref H_TABLE: EdwardsBasepointTable = EdwardsBasepointTable::create(&*H);
}
@ -40,9 +45,7 @@ lazy_static! {
// need to link against libsodium
#[no_mangle]
unsafe extern "C" fn crypto_verify_32(a: *const u8, b: *const u8) -> isize {
isize::from(
slice::from_raw_parts(a, 32).ct_eq(slice::from_raw_parts(b, 32)).unwrap_u8()
) - 1
isize::from(slice::from_raw_parts(a, 32).ct_eq(slice::from_raw_parts(b, 32)).unwrap_u8()) - 1
}
// Offer a wide reduction to C. Our seeded RNG prevented Monero from defining an unbiased scalar
@ -51,9 +54,8 @@ unsafe extern "C" fn crypto_verify_32(a: *const u8, b: *const u8) -> isize {
// sampling however, hence the need for this function
#[no_mangle]
unsafe extern "C" fn monero_wide_reduce(value: *mut u8) {
let res = Scalar::from_bytes_mod_order_wide(
std::slice::from_raw_parts(value, 64).try_into().unwrap()
);
let res =
Scalar::from_bytes_mod_order_wide(std::slice::from_raw_parts(value, 64).try_into().unwrap());
for (i, b) in res.to_bytes().iter().enumerate() {
value.add(i).write(*b);
}
@ -63,12 +65,12 @@ unsafe extern "C" fn monero_wide_reduce(value: *mut u8) {
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct Commitment {
pub mask: Scalar,
pub amount: u64
pub amount: u64,
}
impl Commitment {
pub fn zero() -> Commitment {
Commitment { mask: Scalar::one(), amount: 0}
Commitment { mask: Scalar::one(), amount: 0 }
}
pub fn new(mask: Scalar, amount: u64) -> Commitment {

View file

@ -20,7 +20,7 @@ pub struct Bulletproofs {
pub R: Vec<EdwardsPoint>,
pub a: Scalar,
pub b: Scalar,
pub t: Scalar
pub t: Scalar,
}
impl Bulletproofs {
@ -38,7 +38,10 @@ impl Bulletproofs {
len + clawback
}
pub fn new<R: RngCore + CryptoRng>(rng: &mut R, outputs: &[Commitment]) -> Result<Bulletproofs, TransactionError> {
pub fn new<R: RngCore + CryptoRng>(
rng: &mut R,
outputs: &[Commitment],
) -> Result<Bulletproofs, TransactionError> {
if outputs.len() > MAX_OUTPUTS {
return Err(TransactionError::TooManyOutputs)?;
}
@ -54,22 +57,28 @@ impl Bulletproofs {
#[link(name = "wrapper")]
extern "C" {
fn free(ptr: *const u8);
fn c_generate_bp(seed: *const u8, len: u8, amounts: *const u64, masks: *const [u8; 32]) -> *const u8;
fn c_generate_bp(
seed: *const u8,
len: u8,
amounts: *const u64,
masks: *const [u8; 32],
) -> *const u8;
}
let ptr = c_generate_bp(
seed.as_ptr(),
u8::try_from(outputs.len()).unwrap(),
amounts.as_ptr(),
masks.as_ptr()
masks.as_ptr(),
);
let mut len = 6 * 32;
len += (2 * (1 + (usize::from(ptr.add(len).read()) * 32))) + (3 * 32);
res = Bulletproofs::deserialize(
// Wrap in a cursor to provide a mutable Reader
&mut std::io::Cursor::new(std::slice::from_raw_parts(ptr, len))
).expect("Couldn't deserialize Bulletproofs from Monero");
&mut std::io::Cursor::new(std::slice::from_raw_parts(ptr, len)),
)
.expect("Couldn't deserialize Bulletproofs from Monero");
free(ptr);
};
@ -87,9 +96,10 @@ impl Bulletproofs {
let mut serialized = Vec::with_capacity((9 + (2 * self.L.len())) * 32);
self.serialize(&mut serialized).unwrap();
let commitments: Vec<[u8; 32]> = commitments.iter().map(
|commitment| (commitment * Scalar::from(8u8).invert()).compress().to_bytes()
).collect();
let commitments: Vec<[u8; 32]> = commitments
.iter()
.map(|commitment| (commitment * Scalar::from(8u8).invert()).compress().to_bytes())
.collect();
unsafe {
#[link(name = "wrapper")]
@ -99,7 +109,7 @@ impl Bulletproofs {
serialized_len: usize,
serialized: *const u8,
commitments_len: u8,
commitments: *const [u8; 32]
commitments: *const [u8; 32],
) -> bool;
}
@ -108,15 +118,16 @@ impl Bulletproofs {
serialized.len(),
serialized.as_ptr(),
u8::try_from(commitments.len()).unwrap(),
commitments.as_ptr()
commitments.as_ptr(),
)
}
}
fn serialize_core<
W: std::io::Write,
F: Fn(&[EdwardsPoint], &mut W) -> std::io::Result<()>
>(&self, w: &mut W, specific_write_vec: F) -> std::io::Result<()> {
fn serialize_core<W: std::io::Write, F: Fn(&[EdwardsPoint], &mut W) -> std::io::Result<()>>(
&self,
w: &mut W,
specific_write_vec: F,
) -> std::io::Result<()> {
write_point(&self.A, w)?;
write_point(&self.S, w)?;
write_point(&self.T1, w)?;
@ -150,7 +161,7 @@ impl Bulletproofs {
R: read_vec(read_point, r)?,
a: read_scalar(r)?,
b: read_scalar(r)?,
t: read_scalar(r)?
t: read_scalar(r)?,
};
if bp.L.len() != bp.R.len() {

View file

@ -8,15 +8,12 @@ use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
traits::VartimePrecomputedMultiscalarMul,
edwards::{EdwardsPoint, VartimeEdwardsPrecomputation}
edwards::{EdwardsPoint, VartimeEdwardsPrecomputation},
};
use crate::{
Commitment, random_scalar, hash_to_scalar,
transaction::RING_LEN,
wallet::decoys::Decoys,
ringct::hash_to_point,
serialize::*
Commitment, random_scalar, hash_to_scalar, transaction::RING_LEN, wallet::decoys::Decoys,
ringct::hash_to_point, serialize::*,
};
#[cfg(feature = "multisig")]
@ -41,7 +38,7 @@ pub enum ClsagError {
#[error("invalid s")]
InvalidS,
#[error("invalid c1")]
InvalidC1
InvalidC1,
}
#[derive(Clone, PartialEq, Debug)]
@ -49,14 +46,11 @@ pub struct ClsagInput {
// The actual commitment for the true spend
pub commitment: Commitment,
// True spend index, offsets, and ring
pub decoys: Decoys
pub decoys: Decoys,
}
impl ClsagInput {
pub fn new(
commitment: Commitment,
decoys: Decoys
) -> Result<ClsagInput, ClsagError> {
pub fn new(commitment: Commitment, decoys: Decoys) -> Result<ClsagInput, ClsagError> {
let n = decoys.len();
if n > u8::MAX.into() {
Err(ClsagError::InternalError("max ring size in this library is u8 max".to_string()))?;
@ -78,7 +72,7 @@ impl ClsagInput {
enum Mode {
Sign(usize, EdwardsPoint, EdwardsPoint),
#[cfg(feature = "experimental")]
Verify(Scalar)
Verify(Scalar),
}
// Core of the CLSAG algorithm, applicable to both sign and verify with minimal differences
@ -90,7 +84,7 @@ fn core(
msg: &[u8; 32],
D: &EdwardsPoint,
s: &[Scalar],
A_c1: Mode
A_c1: Mode,
) -> ((EdwardsPoint, Scalar, Scalar), Scalar) {
let n = ring.len();
@ -99,13 +93,17 @@ fn core(
// Generate the transcript
// Instead of generating multiple, a single transcript is created and then edited as needed
let mut to_hash = vec![];
to_hash.reserve_exact(((2 * n) + 5) * 32);
const PREFIX: &[u8] = "CLSAG_".as_bytes();
const AGG_0: &[u8] = "CLSAG_agg_0".as_bytes();
const ROUND: &[u8] = "round".as_bytes();
const PREFIX: &[u8] = b"CLSAG_";
#[rustfmt::skip]
const AGG_0: &[u8] = b"agg_0";
#[rustfmt::skip]
const ROUND: &[u8] = b"round";
const PREFIX_AGG_0_LEN: usize = PREFIX.len() + AGG_0.len();
let mut to_hash = Vec::with_capacity(((2 * n) + 5) * 32);
to_hash.extend(PREFIX);
to_hash.extend(AGG_0);
to_hash.extend([0; 32 - AGG_0.len()]);
to_hash.extend([0; 32 - PREFIX_AGG_0_LEN.len()]);
let mut P = Vec::with_capacity(n);
for member in ring {
@ -125,7 +123,7 @@ fn core(
// mu_P with agg_0
let mu_P = hash_to_scalar(&to_hash);
// mu_C with agg_1
to_hash[AGG_0.len() - 1] = b'1';
to_hash[PREFIX_AGG_0_LEN.len() - 1] = b'1';
let mu_C = hash_to_scalar(&to_hash);
// Truncate it for the round transcript, altering the DST as needed
@ -149,7 +147,7 @@ fn core(
to_hash.extend(A.compress().to_bytes());
to_hash.extend(AH.compress().to_bytes());
c = hash_to_scalar(&to_hash);
},
}
#[cfg(feature = "experimental")]
Mode::Verify(c1) => {
@ -188,7 +186,7 @@ fn core(
pub struct Clsag {
pub D: EdwardsPoint,
pub s: Vec<Scalar>,
pub c1: Scalar
pub c1: Scalar,
}
impl Clsag {
@ -201,7 +199,7 @@ impl Clsag {
mask: Scalar,
msg: &[u8; 32],
A: EdwardsPoint,
AH: EdwardsPoint
AH: EdwardsPoint,
) -> (Clsag, EdwardsPoint, Scalar, Scalar) {
let r: usize = input.decoys.i.into();
@ -214,14 +212,10 @@ impl Clsag {
for _ in 0 .. input.decoys.ring.len() {
s.push(random_scalar(rng));
}
let ((D, p, c), c1) = core(&input.decoys.ring, I, &pseudo_out, msg, &D, &s, Mode::Sign(r, A, AH));
let ((D, p, c), c1) =
core(&input.decoys.ring, I, &pseudo_out, msg, &D, &s, Mode::Sign(r, A, AH));
(
Clsag { D, s, c1 },
pseudo_out,
p,
c * z
)
(Clsag { D, s, c1 }, pseudo_out, p, c * z)
}
// Single signer CLSAG
@ -229,7 +223,7 @@ impl Clsag {
rng: &mut R,
inputs: &[(Scalar, EdwardsPoint, ClsagInput)],
sum_outputs: Scalar,
msg: [u8; 32]
msg: [u8; 32],
) -> Vec<(Clsag, EdwardsPoint)> {
let nonce = random_scalar(rng);
let mut rand_source = [0; 64];
@ -254,7 +248,7 @@ impl Clsag {
mask,
&msg,
&nonce * &ED25519_BASEPOINT_TABLE,
nonce * hash_to_point(inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0])
nonce * hash_to_point(inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0]),
);
clsag.s[usize::from(inputs[i].2.decoys.i)] = nonce - ((p * inputs[i].0) + c);
@ -271,17 +265,10 @@ impl Clsag {
ring: &[[EdwardsPoint; 2]],
I: &EdwardsPoint,
pseudo_out: &EdwardsPoint,
msg: &[u8; 32]
msg: &[u8; 32],
) -> Result<(), ClsagError> {
let (_, c1) = core(
ring,
I,
pseudo_out,
msg,
&self.D.mul_by_cofactor(),
&self.s,
Mode::Verify(self.c1)
);
let (_, c1) =
core(ring, I, pseudo_out, msg, &self.D.mul_by_cofactor(), &self.s, Mode::Verify(self.c1));
if c1 != self.c1 {
Err(ClsagError::InvalidC1)?;
}
@ -299,13 +286,7 @@ impl Clsag {
}
pub fn deserialize<R: std::io::Read>(decoys: usize, r: &mut R) -> std::io::Result<Clsag> {
Ok(
Clsag {
s: read_raw_vec(read_scalar, decoys, r)?,
c1: read_scalar(r)?,
D: read_point(r)?
}
)
Ok(Clsag { s: read_raw_vec(read_scalar, decoys, r)?, c1: read_scalar(r)?, D: read_point(r)? })
}
pub fn verify(
@ -313,7 +294,7 @@ impl Clsag {
ring: &[[EdwardsPoint; 2]],
I: &EdwardsPoint,
pseudo_out: &EdwardsPoint,
msg: &[u8; 32]
msg: &[u8; 32],
) -> Result<(), ClsagError> {
// Serialize it to pass the struct to Monero without extensive FFI
let mut serialized = Vec::with_capacity(1 + ((self.s.len() + 2) * 32));
@ -341,15 +322,19 @@ impl Clsag {
ring: *const u8,
I: *const u8,
pseudo_out: *const u8,
msg: *const u8
msg: *const u8,
) -> bool;
}
if c_verify_clsag(
serialized.len(), serialized.as_ptr(),
u8::try_from(ring.len()).map_err(|_| ClsagError::InternalError("too large ring".to_string()))?,
serialized.len(),
serialized.as_ptr(),
u8::try_from(ring.len())
.map_err(|_| ClsagError::InternalError("too large ring".to_string()))?,
ring_bytes.as_ptr(),
I_bytes.as_ptr(), pseudo_out_bytes.as_ptr(), msg.as_ptr()
I_bytes.as_ptr(),
pseudo_out_bytes.as_ptr(),
msg.as_ptr(),
) {
Ok(())
} else {

View file

@ -1,5 +1,8 @@
use core::fmt::Debug;
use std::{io::Read, sync::{Arc, RwLock}};
use std::{
io::Read,
sync::{Arc, RwLock},
};
use rand_core::{RngCore, CryptoRng, SeedableRng};
use rand_chacha::ChaCha12Rng;
@ -8,7 +11,7 @@ use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
traits::{Identity, IsIdentity},
scalar::Scalar,
edwards::EdwardsPoint
edwards::EdwardsPoint,
};
use group::Group;
@ -19,7 +22,10 @@ use dalek_ff_group as dfg;
use crate::{
frost::{MultisigError, write_dleq, read_dleq},
ringct::{hash_to_point, clsag::{ClsagInput, Clsag}}
ringct::{
hash_to_point,
clsag::{ClsagInput, Clsag},
},
};
impl ClsagInput {
@ -49,7 +55,7 @@ impl ClsagInput {
#[derive(Clone, Debug)]
pub struct ClsagDetails {
input: ClsagInput,
mask: Scalar
mask: Scalar,
}
impl ClsagDetails {
@ -65,7 +71,7 @@ struct Interim {
c: Scalar,
clsag: Clsag,
pseudo_out: EdwardsPoint
pseudo_out: EdwardsPoint,
}
#[allow(non_snake_case)]
@ -74,34 +80,33 @@ pub struct ClsagMultisig {
transcript: RecommendedTranscript,
H: EdwardsPoint,
// Merged here as CLSAG needs it, passing it would be a mess, yet having it beforehand requires a round
// Merged here as CLSAG needs it, passing it would be a mess, yet having it beforehand requires
// an extra round
image: EdwardsPoint,
details: Arc<RwLock<Option<ClsagDetails>>>,
msg: Option<[u8; 32]>,
interim: Option<Interim>
interim: Option<Interim>,
}
impl ClsagMultisig {
pub fn new(
transcript: RecommendedTranscript,
output_key: EdwardsPoint,
details: Arc<RwLock<Option<ClsagDetails>>>
details: Arc<RwLock<Option<ClsagDetails>>>,
) -> Result<ClsagMultisig, MultisigError> {
Ok(
ClsagMultisig {
transcript,
Ok(ClsagMultisig {
transcript,
H: hash_to_point(output_key),
image: EdwardsPoint::identity(),
H: hash_to_point(output_key),
image: EdwardsPoint::identity(),
details,
details,
msg: None,
interim: None
}
)
msg: None,
interim: None,
})
}
pub const fn serialized_len() -> usize {
@ -128,7 +133,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
fn preprocess_addendum<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
view: &FrostView<Ed25519>
view: &FrostView<Ed25519>,
) -> Vec<u8> {
let mut serialized = Vec::with_capacity(Self::serialized_len());
serialized.extend((view.secret_share().0 * self.H).compress().to_bytes());
@ -140,7 +145,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
&mut self,
view: &FrostView<Ed25519>,
l: u16,
serialized: &mut Re
serialized: &mut Re,
) -> Result<(), FrostError> {
if self.image.is_identity().into() {
self.transcript.domain_separate(b"CLSAG");
@ -149,12 +154,9 @@ impl Algorithm<Ed25519> for ClsagMultisig {
}
self.transcript.append_message(b"participant", &l.to_be_bytes());
let image = read_dleq(
serialized,
self.H,
l,
view.verification_share(l)
).map_err(|_| FrostError::InvalidCommitment(l))?.0;
let image = read_dleq(serialized, self.H, l, view.verification_share(l))
.map_err(|_| FrostError::InvalidCommitment(l))?
.0;
self.transcript.append_message(b"key_image_share", image.compress().to_bytes().as_ref());
self.image += image;
@ -170,7 +172,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
view: &FrostView<Ed25519>,
nonce_sums: &[Vec<dfg::EdwardsPoint>],
nonces: &[dfg::Scalar],
msg: &[u8]
msg: &[u8],
) -> dfg::Scalar {
// Use the transcript to get a seeded random number generator
// The transcript contains private data, preventing passive adversaries from recreating this
@ -189,7 +191,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
self.mask(),
&self.msg.as_ref().unwrap(),
nonce_sums[0][0].0,
nonce_sums[0][1].0
nonce_sums[0][1].0,
);
self.interim = Some(Interim { p, c, clsag, pseudo_out });
@ -203,17 +205,20 @@ impl Algorithm<Ed25519> for ClsagMultisig {
&self,
_: dfg::EdwardsPoint,
_: &[Vec<dfg::EdwardsPoint>],
sum: dfg::Scalar
sum: dfg::Scalar,
) -> Option<Self::Signature> {
let interim = self.interim.as_ref().unwrap();
let mut clsag = interim.clsag.clone();
clsag.s[usize::from(self.input().decoys.i)] = sum.0 - interim.c;
if clsag.verify(
&self.input().decoys.ring,
&self.image,
&interim.pseudo_out,
&self.msg.as_ref().unwrap()
).is_ok() {
if clsag
.verify(
&self.input().decoys.ring,
&self.image,
&interim.pseudo_out,
&self.msg.as_ref().unwrap(),
)
.is_ok()
{
return Some((clsag, interim.pseudo_out));
}
return None;
@ -227,8 +232,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
share: dfg::Scalar,
) -> bool {
let interim = self.interim.as_ref().unwrap();
return (&share.0 * &ED25519_BASEPOINT_TABLE) == (
nonces[0][0].0 - (interim.p * verification_share.0)
);
return (&share.0 * &ED25519_BASEPOINT_TABLE) ==
(nonces[0][0].0 - (interim.p * verification_share.0));
}
}

View file

@ -8,7 +8,7 @@ pub mod bulletproofs;
use crate::{
serialize::*,
ringct::{clsag::Clsag, bulletproofs::Bulletproofs}
ringct::{clsag::Clsag, bulletproofs::Bulletproofs},
};
pub fn generate_key_image(secret: Scalar) -> EdwardsPoint {
@ -19,7 +19,7 @@ pub fn generate_key_image(secret: Scalar) -> EdwardsPoint {
pub struct RctBase {
pub fee: u64,
pub ecdh_info: Vec<[u8; 8]>,
pub commitments: Vec<EdwardsPoint>
pub commitments: Vec<EdwardsPoint>,
}
impl RctBase {
@ -37,12 +37,15 @@ impl RctBase {
w.write_all(ecdh)?;
}
write_raw_vec(write_point, &self.commitments, w)
},
_ => panic!("Serializing unknown RctType's Base")
}
_ => panic!("Serializing unknown RctType's Base"),
}
}
pub fn deserialize<R: std::io::Read>(outputs: usize, r: &mut R) -> std::io::Result<(RctBase, u8)> {
pub fn deserialize<R: std::io::Read>(
outputs: usize,
r: &mut R,
) -> std::io::Result<(RctBase, u8)> {
let mut rct_type = [0];
r.read_exact(&mut rct_type)?;
Ok((
@ -51,13 +54,16 @@ impl RctBase {
} else {
RctBase {
fee: read_varint(r)?,
ecdh_info: (0 .. outputs).map(
|_| { let mut ecdh = [0; 8]; r.read_exact(&mut ecdh).map(|_| ecdh) }
).collect::<Result<_, _>>()?,
commitments: read_raw_vec(read_point, outputs, r)?
ecdh_info: (0 .. outputs)
.map(|_| {
let mut ecdh = [0; 8];
r.read_exact(&mut ecdh).map(|_| ecdh)
})
.collect::<Result<_, _>>()?,
commitments: read_raw_vec(read_point, outputs, r)?,
}
},
rct_type[0]
rct_type[0],
))
}
}
@ -65,18 +71,14 @@ impl RctBase {
#[derive(Clone, PartialEq, Debug)]
pub enum RctPrunable {
Null,
Clsag {
bulletproofs: Vec<Bulletproofs>,
clsags: Vec<Clsag>,
pseudo_outs: Vec<EdwardsPoint>
}
Clsag { bulletproofs: Vec<Bulletproofs>, clsags: Vec<Clsag>, pseudo_outs: Vec<EdwardsPoint> },
}
impl RctPrunable {
pub fn rct_type(&self) -> u8 {
match self {
RctPrunable::Null => 0,
RctPrunable::Clsag { .. } => 5
RctPrunable::Clsag { .. } => 5,
}
}
@ -98,26 +100,30 @@ impl RctPrunable {
pub fn deserialize<R: std::io::Read>(
rct_type: u8,
decoys: &[usize],
r: &mut R
r: &mut R,
) -> std::io::Result<RctPrunable> {
Ok(
match rct_type {
0 => RctPrunable::Null,
5 => RctPrunable::Clsag {
// TODO: Can the amount of outputs be calculated from the BPs for any validly formed TX?
bulletproofs: read_vec(Bulletproofs::deserialize, r)?,
clsags: (0 .. decoys.len()).map(|o| Clsag::deserialize(decoys[o], r)).collect::<Result<_, _>>()?,
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?
},
_ => Err(std::io::Error::new(std::io::ErrorKind::Other, "Tried to deserialize unknown RCT type"))?
}
)
Ok(match rct_type {
0 => RctPrunable::Null,
5 => RctPrunable::Clsag {
bulletproofs: read_vec(Bulletproofs::deserialize, r)?,
clsags: (0 .. decoys.len())
.map(|o| Clsag::deserialize(decoys[o], r))
.collect::<Result<_, _>>()?,
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?,
},
_ => Err(std::io::Error::new(
std::io::ErrorKind::Other,
"Tried to deserialize unknown RCT type",
))?,
})
}
pub fn signature_serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
match self {
RctPrunable::Null => panic!("Serializing RctPrunable::Null for a signature"),
RctPrunable::Clsag { bulletproofs, .. } => bulletproofs.iter().map(|bp| bp.signature_serialize(w)).collect(),
RctPrunable::Clsag { bulletproofs, .. } => {
bulletproofs.iter().map(|bp| bp.signature_serialize(w)).collect()
}
}
}
}
@ -125,7 +131,7 @@ impl RctPrunable {
#[derive(Clone, PartialEq, Debug)]
pub struct RctSignatures {
pub base: RctBase,
pub prunable: RctPrunable
pub prunable: RctPrunable,
}
impl RctSignatures {
@ -138,7 +144,11 @@ impl RctSignatures {
self.prunable.serialize(w)
}
pub fn deserialize<R: std::io::Read>(decoys: Vec<usize>, outputs: usize, r: &mut R) -> std::io::Result<RctSignatures> {
pub fn deserialize<R: std::io::Read>(
decoys: Vec<usize>,
outputs: usize,
r: &mut R,
) -> std::io::Result<RctSignatures> {
let base = RctBase::deserialize(outputs, r)?;
Ok(RctSignatures { base: base.0, prunable: RctPrunable::deserialize(base.1, &decoys, r)? })
}

View file

@ -9,13 +9,17 @@ use serde_json::json;
use reqwest;
use crate::{transaction::{Input, Timelock, Transaction}, block::Block, wallet::Fee};
use crate::{
transaction::{Input, Timelock, Transaction},
block::Block,
wallet::Fee,
};
#[derive(Deserialize, Debug)]
pub struct EmptyResponse {}
#[derive(Deserialize, Debug)]
pub struct JsonRpcResponse<T> {
result: T
result: T,
}
#[derive(Clone, Error, Debug)]
@ -31,7 +35,7 @@ pub enum RpcError {
#[error("pruned transaction")]
PrunedTransaction,
#[error("invalid transaction ({0:?})")]
InvalidTransaction([u8; 32])
InvalidTransaction([u8; 32]),
}
fn rpc_hex(value: &str) -> Result<Vec<u8>, RpcError> {
@ -40,8 +44,10 @@ fn rpc_hex(value: &str) -> Result<Vec<u8>, RpcError> {
fn rpc_point(point: &str) -> Result<EdwardsPoint, RpcError> {
CompressedEdwardsY(
rpc_hex(point)?.try_into().map_err(|_| RpcError::InvalidPoint(point.to_string()))?
).decompress().ok_or(RpcError::InvalidPoint(point.to_string()))
rpc_hex(point)?.try_into().map_err(|_| RpcError::InvalidPoint(point.to_string()))?,
)
.decompress()
.ok_or(RpcError::InvalidPoint(point.to_string()))
}
#[derive(Clone, Debug)]
@ -52,10 +58,11 @@ impl Rpc {
Rpc(daemon)
}
pub async fn rpc_call<
Params: Serialize + Debug,
Response: DeserializeOwned + Debug
>(&self, method: &str, params: Option<Params>) -> Result<Response, RpcError> {
pub async fn rpc_call<Params: Serialize + Debug, Response: DeserializeOwned + Debug>(
&self,
method: &str,
params: Option<Params>,
) -> Result<Response, RpcError> {
let client = reqwest::Client::new();
let mut builder = client.post(&(self.0.clone() + "/" + method));
if let Some(params) = params.as_ref() {
@ -65,44 +72,43 @@ impl Rpc {
self.call_tail(method, builder).await
}
pub async fn bin_call<
Response: DeserializeOwned + Debug
>(&self, method: &str, params: Vec<u8>) -> Result<Response, RpcError> {
pub async fn bin_call<Response: DeserializeOwned + Debug>(
&self,
method: &str,
params: Vec<u8>,
) -> Result<Response, RpcError> {
let client = reqwest::Client::new();
let builder = client.post(&(self.0.clone() + "/" + method)).body(params);
self.call_tail(method, builder.header("Content-Type", "application/octet-stream")).await
}
async fn call_tail<
Response: DeserializeOwned + Debug
>(&self, method: &str, builder: reqwest::RequestBuilder) -> Result<Response, RpcError> {
let res = builder
.send()
.await
.map_err(|_| RpcError::ConnectionError)?;
async fn call_tail<Response: DeserializeOwned + Debug>(
&self,
method: &str,
builder: reqwest::RequestBuilder,
) -> Result<Response, RpcError> {
let res = builder.send().await.map_err(|_| RpcError::ConnectionError)?;
Ok(
if !method.ends_with(".bin") {
serde_json::from_str(&res.text().await.map_err(|_| RpcError::ConnectionError)?)
.map_err(|_| RpcError::InternalError("Failed to parse JSON response".to_string()))?
} else {
monero_epee_bin_serde::from_bytes(&res.bytes().await.map_err(|_| RpcError::ConnectionError)?)
.map_err(|_| RpcError::InternalError("Failed to parse binary response".to_string()))?
}
)
Ok(if !method.ends_with(".bin") {
serde_json::from_str(&res.text().await.map_err(|_| RpcError::ConnectionError)?)
.map_err(|_| RpcError::InternalError("Failed to parse JSON response".to_string()))?
} else {
monero_epee_bin_serde::from_bytes(&res.bytes().await.map_err(|_| RpcError::ConnectionError)?)
.map_err(|_| RpcError::InternalError("Failed to parse binary response".to_string()))?
})
}
pub async fn get_height(&self) -> Result<usize, RpcError> {
#[derive(Deserialize, Debug)]
struct HeightResponse {
height: usize
height: usize,
}
Ok(self.rpc_call::<Option<()>, HeightResponse>("get_height", None).await?.height)
}
async fn get_transactions_core(
&self,
hashes: &[[u8; 32]]
hashes: &[[u8; 32]],
) -> Result<(Vec<Result<Transaction, RpcError>>, Vec<[u8; 32]>), RpcError> {
if hashes.len() == 0 {
return Ok((vec![], vec![]));
@ -112,39 +118,48 @@ impl Rpc {
struct TransactionResponse {
tx_hash: String,
as_hex: String,
pruned_as_hex: String
pruned_as_hex: String,
}
#[derive(Deserialize, Debug)]
struct TransactionsResponse {
#[serde(default)]
missed_tx: Vec<String>,
txs: Vec<TransactionResponse>
txs: Vec<TransactionResponse>,
}
let txs: TransactionsResponse = self.rpc_call("get_transactions", Some(json!({
"txs_hashes": hashes.iter().map(|hash| hex::encode(&hash)).collect::<Vec<_>>()
}))).await?;
let txs: TransactionsResponse = self
.rpc_call(
"get_transactions",
Some(json!({
"txs_hashes": hashes.iter().map(|hash| hex::encode(&hash)).collect::<Vec<_>>()
})),
)
.await?;
Ok((
txs.txs.iter().map(|res| {
let tx = Transaction::deserialize(
&mut std::io::Cursor::new(
rpc_hex(if res.as_hex.len() != 0 { &res.as_hex } else { &res.pruned_as_hex }).unwrap()
)
).map_err(|_| RpcError::InvalidTransaction(hex::decode(&res.tx_hash).unwrap().try_into().unwrap()))?;
txs
.txs
.iter()
.map(|res| {
let tx = Transaction::deserialize(&mut std::io::Cursor::new(
rpc_hex(if res.as_hex.len() != 0 { &res.as_hex } else { &res.pruned_as_hex }).unwrap(),
))
.map_err(|_| {
RpcError::InvalidTransaction(hex::decode(&res.tx_hash).unwrap().try_into().unwrap())
})?;
// https://github.com/monero-project/monero/issues/8311
if res.as_hex.len() == 0 {
match tx.prefix.inputs.get(0) {
Some(Input::Gen { .. }) => (),
_ => Err(RpcError::PrunedTransaction)?
// https://github.com/monero-project/monero/issues/8311
if res.as_hex.len() == 0 {
match tx.prefix.inputs.get(0) {
Some(Input::Gen { .. }) => (),
_ => Err(RpcError::PrunedTransaction)?,
}
}
}
Ok(tx)
}).collect(),
txs.missed_tx.iter().map(|hash| hex::decode(&hash).unwrap().try_into().unwrap()).collect()
Ok(tx)
})
.collect(),
txs.missed_tx.iter().map(|hash| hex::decode(&hash).unwrap().try_into().unwrap()).collect(),
))
}
@ -158,7 +173,10 @@ impl Rpc {
txs.iter().cloned().collect::<Result<_, _>>()
}
pub async fn get_transactions_possible(&self, hashes: &[[u8; 32]]) -> Result<Vec<Transaction>, RpcError> {
pub async fn get_transactions_possible(
&self,
hashes: &[[u8; 32]],
) -> Result<Vec<Transaction>, RpcError> {
let (txs, _) = self.get_transactions_core(hashes).await?;
Ok(txs.iter().cloned().filter_map(|tx| tx.ok()).collect())
}
@ -166,37 +184,39 @@ impl Rpc {
pub async fn get_block(&self, height: usize) -> Result<Block, RpcError> {
#[derive(Deserialize, Debug)]
struct BlockResponse {
blob: String
blob: String,
}
let block: JsonRpcResponse<BlockResponse> = self.rpc_call("json_rpc", Some(json!({
"method": "get_block",
"params": {
"height": height
}
}))).await?;
let block: JsonRpcResponse<BlockResponse> = self
.rpc_call(
"json_rpc",
Some(json!({
"method": "get_block",
"params": {
"height": height
}
})),
)
.await?;
Ok(
Block::deserialize(
&mut std::io::Cursor::new(rpc_hex(&block.result.blob)?)
).expect("Monero returned a block we couldn't deserialize")
Block::deserialize(&mut std::io::Cursor::new(rpc_hex(&block.result.blob)?))
.expect("Monero returned a block we couldn't deserialize"),
)
}
async fn get_block_transactions_core(
&self,
height: usize,
possible: bool
possible: bool,
) -> Result<Vec<Transaction>, RpcError> {
let block = self.get_block(height).await?;
let mut res = vec![block.miner_tx];
res.extend(
if possible {
self.get_transactions_possible(&block.txs).await?
} else {
self.get_transactions(&block.txs).await?
}
);
res.extend(if possible {
self.get_transactions_possible(&block.txs).await?
} else {
self.get_transactions(&block.txs).await?
});
Ok(res)
}
@ -204,14 +224,17 @@ impl Rpc {
self.get_block_transactions_core(height, false).await
}
pub async fn get_block_transactions_possible(&self, height: usize) -> Result<Vec<Transaction>, RpcError> {
pub async fn get_block_transactions_possible(
&self,
height: usize,
) -> Result<Vec<Transaction>, RpcError> {
self.get_block_transactions_core(height, true).await
}
pub async fn get_o_indexes(&self, hash: [u8; 32]) -> Result<Vec<u64>, RpcError> {
#[derive(Serialize, Debug)]
struct Request {
txid: [u8; 32]
txid: [u8; 32],
}
#[allow(dead_code)]
@ -221,42 +244,52 @@ impl Rpc {
status: String,
untrusted: bool,
credits: usize,
top_hash: String
top_hash: String,
}
let indexes: OIndexes = self.bin_call("get_o_indexes.bin", monero_epee_bin_serde::to_bytes(
&Request {
txid: hash
}).unwrap()
).await?;
let indexes: OIndexes = self
.bin_call(
"get_o_indexes.bin",
monero_epee_bin_serde::to_bytes(&Request { txid: hash }).unwrap(),
)
.await?;
Ok(indexes.o_indexes)
}
// from and to are inclusive
pub async fn get_output_distribution(&self, from: usize, to: usize) -> Result<Vec<u64>, RpcError> {
pub async fn get_output_distribution(
&self,
from: usize,
to: usize,
) -> Result<Vec<u64>, RpcError> {
#[allow(dead_code)]
#[derive(Deserialize, Debug)]
pub struct Distribution {
distribution: Vec<u64>
distribution: Vec<u64>,
}
#[allow(dead_code)]
#[derive(Deserialize, Debug)]
struct Distributions {
distributions: Vec<Distribution>
distributions: Vec<Distribution>,
}
let mut distributions: JsonRpcResponse<Distributions> = self.rpc_call("json_rpc", Some(json!({
"method": "get_output_distribution",
"params": {
"binary": false,
"amounts": [0],
"cumulative": true,
"from_height": from,
"to_height": to
}
}))).await?;
let mut distributions: JsonRpcResponse<Distributions> = self
.rpc_call(
"json_rpc",
Some(json!({
"method": "get_output_distribution",
"params": {
"binary": false,
"amounts": [0],
"cumulative": true,
"from_height": from,
"to_height": to
}
})),
)
.await?;
Ok(distributions.result.distributions.swap_remove(0).distribution)
}
@ -264,46 +297,62 @@ impl Rpc {
pub async fn get_outputs(
&self,
indexes: &[u64],
height: usize
height: usize,
) -> Result<Vec<Option<[EdwardsPoint; 2]>>, RpcError> {
#[derive(Deserialize, Debug)]
pub struct Out {
key: String,
mask: String,
txid: String
txid: String,
}
#[derive(Deserialize, Debug)]
struct Outs {
outs: Vec<Out>
outs: Vec<Out>,
}
let outs: Outs = self.rpc_call("get_outs", Some(json!({
"get_txid": true,
"outputs": indexes.iter().map(|o| json!({
"amount": 0,
"index": o
})).collect::<Vec<_>>()
}))).await?;
let outs: Outs = self
.rpc_call(
"get_outs",
Some(json!({
"get_txid": true,
"outputs": indexes.iter().map(|o| json!({
"amount": 0,
"index": o
})).collect::<Vec<_>>()
})),
)
.await?;
let txs = self.get_transactions(
&outs.outs.iter().map(|out|
rpc_hex(&out.txid).expect("Monero returned an invalidly encoded hash")
.try_into().expect("Monero returned an invalid sized hash")
).collect::<Vec<_>>()
).await?;
let txs = self
.get_transactions(
&outs
.outs
.iter()
.map(|out| {
rpc_hex(&out.txid)
.expect("Monero returned an invalidly encoded hash")
.try_into()
.expect("Monero returned an invalid sized hash")
})
.collect::<Vec<_>>(),
)
.await?;
// TODO: Support time based lock times. These shouldn't be needed, and it may be painful to
// get the median time for the given height, yet we do need to in order to be complete
outs.outs.iter().enumerate().map(
|(i, out)| Ok(
Some([rpc_point(&out.key)?, rpc_point(&out.mask)?]).filter(|_| {
outs
.outs
.iter()
.enumerate()
.map(|(i, out)| {
Ok(Some([rpc_point(&out.key)?, rpc_point(&out.mask)?]).filter(|_| {
match txs[i].prefix.timelock {
Timelock::Block(t_height) => (t_height <= height),
_ => false
_ => false,
}
})
)
).collect()
}))
})
.collect()
}
pub async fn get_fee(&self) -> Result<Fee, RpcError> {
@ -311,12 +360,17 @@ impl Rpc {
#[derive(Deserialize, Debug)]
struct FeeResponse {
fee: u64,
quantization_mask: u64
quantization_mask: u64,
}
let res: JsonRpcResponse<FeeResponse> = self.rpc_call("json_rpc", Some(json!({
"method": "get_fee_estimate"
}))).await?;
let res: JsonRpcResponse<FeeResponse> = self
.rpc_call(
"json_rpc",
Some(json!({
"method": "get_fee_estimate"
})),
)
.await?;
Ok(Fee { per_weight: res.result.fee, mask: res.result.quantization_mask })
}
@ -335,14 +389,14 @@ impl Rpc {
overspend: bool,
too_big: bool,
too_few_outputs: bool,
reason: String
reason: String,
}
let mut buf = Vec::with_capacity(2048);
tx.serialize(&mut buf).unwrap();
let res: SendRawResponse = self.rpc_call("send_raw_transaction", Some(json!({
"tx_as_hex": hex::encode(&buf)
}))).await?;
let res: SendRawResponse = self
.rpc_call("send_raw_transaction", Some(json!({ "tx_as_hex": hex::encode(&buf) })))
.await?;
if res.status != "OK" {
Err(RpcError::InvalidTransaction(tx.hash()))?;

View file

@ -1,6 +1,9 @@
use std::io;
use curve25519_dalek::{scalar::Scalar, edwards::{EdwardsPoint, CompressedEdwardsY}};
use curve25519_dalek::{
scalar::Scalar,
edwards::{EdwardsPoint, CompressedEdwardsY},
};
pub const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
@ -30,22 +33,22 @@ pub fn write_point<W: io::Write>(point: &EdwardsPoint, w: &mut W) -> io::Result<
w.write_all(&point.compress().to_bytes())
}
pub fn write_raw_vec<
T,
W: io::Write,
F: Fn(&T, &mut W) -> io::Result<()>
>(f: F, values: &[T], w: &mut W) -> io::Result<()> {
pub fn write_raw_vec<T, W: io::Write, F: Fn(&T, &mut W) -> io::Result<()>>(
f: F,
values: &[T],
w: &mut W,
) -> io::Result<()> {
for value in values {
f(value, w)?;
}
Ok(())
}
pub fn write_vec<
T,
W: io::Write,
F: Fn(&T, &mut W) -> io::Result<()>
>(f: F, values: &[T], w: &mut W) -> io::Result<()> {
pub fn write_vec<T, W: io::Write, F: Fn(&T, &mut W) -> io::Result<()>>(
f: F,
values: &[T],
w: &mut W,
) -> io::Result<()> {
write_varint(&values.len().try_into().unwrap(), w)?;
write_raw_vec(f, &values, w)
}
@ -75,23 +78,26 @@ pub fn read_32<R: io::Read>(r: &mut R) -> io::Result<[u8; 32]> {
Ok(res)
}
// TODO: Potentially update to Monero's parsing rules on scalars/points, which should be any arbitrary 32-bytes
// We may be able to consider such transactions as malformed and accordingly be opinionated in ignoring them
// TODO: https://github.com/serai-dex/serai/issues/25
pub fn read_scalar<R: io::Read>(r: &mut R) -> io::Result<Scalar> {
Scalar::from_canonical_bytes(
read_32(r)?
).ok_or(io::Error::new(io::ErrorKind::Other, "unreduced scalar"))
Scalar::from_canonical_bytes(read_32(r)?)
.ok_or(io::Error::new(io::ErrorKind::Other, "unreduced scalar"))
}
pub fn read_point<R: io::Read>(r: &mut R) -> io::Result<EdwardsPoint> {
CompressedEdwardsY(
read_32(r)?
).decompress().filter(|point| point.is_torsion_free()).ok_or(io::Error::new(io::ErrorKind::Other, "invalid point"))
CompressedEdwardsY(read_32(r)?)
.decompress()
.filter(|point| point.is_torsion_free())
.ok_or(io::Error::new(io::ErrorKind::Other, "invalid point"))
}
pub fn read_raw_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(f: F, len: usize, r: &mut R) -> io::Result<Vec<T>> {
pub fn read_raw_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(
f: F,
len: usize,
r: &mut R,
) -> io::Result<Vec<T>> {
let mut res = Vec::with_capacity(
len.try_into().map_err(|_| io::Error::new(io::ErrorKind::Other, "length exceeds usize"))?
len.try_into().map_err(|_| io::Error::new(io::ErrorKind::Other, "length exceeds usize"))?,
);
for _ in 0 .. len {
res.push(f(r)?);
@ -99,6 +105,9 @@ pub fn read_raw_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(f: F, len: u
Ok(res)
}
pub fn read_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(f: F, r: &mut R) -> io::Result<Vec<T>> {
pub fn read_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(
f: F,
r: &mut R,
) -> io::Result<Vec<T>> {
read_raw_vec(f, read_varint(r)?.try_into().unwrap(), r)
}

View file

@ -5,14 +5,19 @@ use crate::wallet::address::{Network, AddressType, Address};
const SPEND: [u8; 32] = hex!("f8631661f6ab4e6fda310c797330d86e23a682f20d5bc8cc27b18051191f16d7");
const VIEW: [u8; 32] = hex!("4a1535063ad1fee2dabbf909d4fd9a873e29541b401f0944754e17c9a41820ce");
const STANDARD: &'static str = "4B33mFPMq6mKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KQH4pNey";
const STANDARD: &'static str =
"4B33mFPMq6mKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KQH4pNey";
const PAYMENT_ID: [u8; 8] = hex!("b8963a57855cf73f");
const INTEGRATED: &'static str = "4Ljin4CrSNHKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KbaTH6MnpXSn88oBX35";
const INTEGRATED: &'static str =
"4Ljin4CrSNHKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KbaTH6Mn\
pXSn88oBX35";
const SUB_SPEND: [u8; 32] = hex!("fe358188b528335ad1cfdc24a22a23988d742c882b6f19a602892eaab3c1b62b");
const SUB_SPEND: [u8; 32] =
hex!("fe358188b528335ad1cfdc24a22a23988d742c882b6f19a602892eaab3c1b62b");
const SUB_VIEW: [u8; 32] = hex!("9bc2b464de90d058468522098d5610c5019c45fd1711a9517db1eea7794f5470");
const SUBADDRESS: &'static str = "8C5zHM5ud8nGC4hC2ULiBLSWx9infi8JUUmWEat4fcTf8J4H38iWYVdFmPCA9UmfLTZxD43RsyKnGEdZkoGij6csDeUnbEB";
const SUBADDRESS: &'static str =
"8C5zHM5ud8nGC4hC2ULiBLSWx9infi8JUUmWEat4fcTf8J4H38iWYVdFmPCA9UmfLTZxD43RsyKnGEdZkoGij6csDeUnbEB";
#[test]
fn standard_address() {

View file

@ -11,13 +11,18 @@ use transcript::{Transcript, RecommendedTranscript};
use frost::curve::Ed25519;
use crate::{
Commitment,
random_scalar,
Commitment, random_scalar,
wallet::Decoys,
ringct::{generate_key_image, clsag::{ClsagInput, Clsag}}
ringct::{
generate_key_image,
clsag::{ClsagInput, Clsag},
},
};
#[cfg(feature = "multisig")]
use crate::{frost::MultisigError, ringct::clsag::{ClsagDetails, ClsagMultisig}};
use crate::{
frost::MultisigError,
ringct::clsag::{ClsagDetails, ClsagMultisig},
};
#[cfg(feature = "multisig")]
use frost::tests::{key_gen, algorithm_machines, sign};
@ -59,13 +64,15 @@ fn clsag() {
Decoys {
i: u8::try_from(real).unwrap(),
offsets: (1 ..= RING_LEN).into_iter().collect(),
ring: ring.clone()
}
).unwrap()
ring: ring.clone(),
},
)
.unwrap(),
)],
random_scalar(&mut OsRng),
msg
).swap_remove(0);
msg,
)
.swap_remove(0);
clsag.verify(&ring, &image, &pseudo_out, &msg).unwrap();
#[cfg(feature = "experimental")]
clsag.rust_verify(&ring, &image, &pseudo_out, &msg).unwrap();
@ -103,23 +110,23 @@ fn clsag_multisig() -> Result<(), MultisigError> {
ClsagMultisig::new(
RecommendedTranscript::new(b"Monero Serai CLSAG Test"),
keys[&1].group_key().0,
Arc::new(RwLock::new(Some(
ClsagDetails::new(
ClsagInput::new(
Commitment::new(randomness, AMOUNT),
Decoys {
i: RING_INDEX,
offsets: (1 ..= RING_LEN).into_iter().collect(),
ring: ring.clone()
}
).unwrap(),
mask_sum
Arc::new(RwLock::new(Some(ClsagDetails::new(
ClsagInput::new(
Commitment::new(randomness, AMOUNT),
Decoys {
i: RING_INDEX,
offsets: (1 ..= RING_LEN).into_iter().collect(),
ring: ring.clone(),
},
)
)))
).unwrap(),
&keys
.unwrap(),
mask_sum,
)))),
)
.unwrap(),
&keys,
),
&[1; 32]
&[1; 32],
);
Ok(())

View file

@ -2,7 +2,10 @@ use rand::rngs::OsRng;
use curve25519_dalek::constants::ED25519_BASEPOINT_TABLE;
use crate::{random_scalar, ringct::hash_to_point::{hash_to_point as c_hash_to_point, rust_hash_to_point}};
use crate::{
random_scalar,
ringct::hash_to_point::{hash_to_point as c_hash_to_point, rust_hash_to_point},
};
#[test]
fn hash_to_point() {

View file

@ -2,7 +2,11 @@ use core::cmp::Ordering;
use curve25519_dalek::edwards::EdwardsPoint;
use crate::{hash, serialize::*, ringct::{RctPrunable, RctSignatures}};
use crate::{
hash,
serialize::*,
ringct::{RctPrunable, RctSignatures},
};
pub const RING_LEN: usize = 11;
@ -10,11 +14,7 @@ pub const RING_LEN: usize = 11;
pub enum Input {
Gen(u64),
ToKey {
amount: u64,
key_offsets: Vec<u64>,
key_image: EdwardsPoint
}
ToKey { amount: u64, key_offsets: Vec<u64>, key_image: EdwardsPoint },
}
impl Input {
@ -30,7 +30,7 @@ impl Input {
Input::Gen(height) => {
w.write_all(&[255])?;
write_varint(height, w)
},
}
Input::ToKey { amount, key_offsets, key_image } => {
w.write_all(&[2])?;
@ -44,17 +44,18 @@ impl Input {
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Input> {
let mut variant = [0];
r.read_exact(&mut variant)?;
Ok(
match variant[0] {
255 => Input::Gen(read_varint(r)?),
2 => Input::ToKey {
amount: read_varint(r)?,
key_offsets: read_vec(read_varint, r)?,
key_image: read_point(r)?
},
_ => Err(std::io::Error::new(std::io::ErrorKind::Other, "Tried to deserialize unknown/unused input type"))?
}
)
Ok(match variant[0] {
255 => Input::Gen(read_varint(r)?),
2 => Input::ToKey {
amount: read_varint(r)?,
key_offsets: read_vec(read_varint, r)?,
key_image: read_point(r)?,
},
_ => Err(std::io::Error::new(
std::io::ErrorKind::Other,
"Tried to deserialize unknown/unused input type",
))?,
})
}
}
@ -63,7 +64,7 @@ impl Input {
pub struct Output {
pub amount: u64,
pub key: EdwardsPoint,
pub tag: Option<u8>
pub tag: Option<u8>,
}
impl Output {
@ -86,16 +87,22 @@ impl Output {
let mut tag = [0];
r.read_exact(&mut tag)?;
if (tag[0] != 2) && (tag[0] != 3) {
Err(std::io::Error::new(std::io::ErrorKind::Other, "Tried to deserialize unknown/unused output type"))?;
Err(std::io::Error::new(
std::io::ErrorKind::Other,
"Tried to deserialize unknown/unused output type",
))?;
}
Ok(
Output {
amount,
key: read_point(r)?,
tag: if tag[0] == 3 { r.read_exact(&mut tag)?; Some(tag[0]) } else { None }
}
)
Ok(Output {
amount,
key: read_point(r)?,
tag: if tag[0] == 3 {
r.read_exact(&mut tag)?;
Some(tag[0])
} else {
None
},
})
}
}
@ -103,7 +110,7 @@ impl Output {
pub enum Timelock {
None,
Block(usize),
Time(u64)
Time(u64),
}
impl Timelock {
@ -126,9 +133,9 @@ impl Timelock {
&match self {
Timelock::None => 0,
Timelock::Block(block) => (*block).try_into().unwrap(),
Timelock::Time(time) => *time
Timelock::Time(time) => *time,
},
w
w,
)
}
}
@ -139,7 +146,7 @@ impl PartialOrd for Timelock {
(Timelock::None, _) => Some(Ordering::Less),
(Timelock::Block(a), Timelock::Block(b)) => a.partial_cmp(b),
(Timelock::Time(a), Timelock::Time(b)) => a.partial_cmp(b),
_ => None
_ => None,
}
}
}
@ -150,17 +157,19 @@ pub struct TransactionPrefix {
pub timelock: Timelock,
pub inputs: Vec<Input>,
pub outputs: Vec<Output>,
pub extra: Vec<u8>
pub extra: Vec<u8>,
}
impl TransactionPrefix {
pub(crate) fn fee_weight(inputs: usize, outputs: usize, extra: usize) -> usize {
// Assumes Timelock::None since this library won't let you create a TX with a timelock
1 + 1 +
varint_len(inputs) + (inputs * Input::fee_weight()) +
// Only 16 outputs are possible under transactions by this lib
1 + (outputs * Output::fee_weight()) +
varint_len(extra) + extra
varint_len(inputs) +
(inputs * Input::fee_weight()) +
1 +
(outputs * Output::fee_weight()) +
varint_len(extra) +
extra
}
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
@ -178,7 +187,7 @@ impl TransactionPrefix {
timelock: Timelock::from_raw(read_varint(r)?),
inputs: read_vec(Input::deserialize, r)?,
outputs: read_vec(Output::deserialize, r)?,
extra: vec![]
extra: vec![],
};
let len = read_varint(r)?;
@ -192,12 +201,13 @@ impl TransactionPrefix {
#[derive(Clone, PartialEq, Debug)]
pub struct Transaction {
pub prefix: TransactionPrefix,
pub rct_signatures: RctSignatures
pub rct_signatures: RctSignatures,
}
impl Transaction {
pub(crate) fn fee_weight(inputs: usize, outputs: usize, extra: usize) -> usize {
TransactionPrefix::fee_weight(inputs, outputs, extra) + RctSignatures::fee_weight(inputs, outputs)
TransactionPrefix::fee_weight(inputs, outputs, extra) +
RctSignatures::fee_weight(inputs, outputs)
}
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
@ -207,19 +217,21 @@ impl Transaction {
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Transaction> {
let prefix = TransactionPrefix::deserialize(r)?;
Ok(
Transaction {
rct_signatures: RctSignatures::deserialize(
prefix.inputs.iter().map(|input| match input {
Input::Gen(_) => 0,
Input::ToKey { key_offsets, .. } => key_offsets.len()
}).collect(),
prefix.outputs.len(),
r
)?,
Ok(Transaction {
rct_signatures: RctSignatures::deserialize(
prefix
}
)
.inputs
.iter()
.map(|input| match input {
Input::Gen(_) => 0,
Input::ToKey { key_offsets, .. } => key_offsets.len(),
})
.collect(),
prefix.outputs.len(),
r,
)?,
prefix,
})
}
pub fn hash(&self) -> [u8; 32] {
@ -234,10 +246,11 @@ impl Transaction {
sig_hash.extend(hash(&serialized));
serialized.clear();
self.rct_signatures.base.serialize(
&mut serialized,
self.rct_signatures.prunable.rct_type()
).unwrap();
self
.rct_signatures
.base
.serialize(&mut serialized, self.rct_signatures.prunable.rct_type())
.unwrap();
sig_hash.extend(hash(&serialized));
serialized.clear();
@ -262,7 +275,11 @@ impl Transaction {
sig_hash.extend(hash(&serialized));
serialized.clear();
self.rct_signatures.base.serialize(&mut serialized, self.rct_signatures.prunable.rct_type()).unwrap();
self
.rct_signatures
.base
.serialize(&mut serialized, self.rct_signatures.prunable.rct_type())
.unwrap();
sig_hash.extend(hash(&serialized));
serialized.clear();

View file

@ -2,7 +2,10 @@ use std::string::ToString;
use thiserror::Error;
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, edwards::{EdwardsPoint, CompressedEdwardsY}};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
edwards::{EdwardsPoint, CompressedEdwardsY},
};
use base58_monero::base58::{encode_check, decode_check};
@ -12,14 +15,14 @@ use crate::wallet::ViewPair;
pub enum Network {
Mainnet,
Testnet,
Stagenet
Stagenet,
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum AddressType {
Standard,
Integrated([u8; 8]),
Subaddress
Subaddress,
}
impl AddressType {
@ -27,7 +30,7 @@ impl AddressType {
match network {
Network::Mainnet => (18, 19, 42),
Network::Testnet => (53, 54, 63),
Network::Stagenet => (24, 25, 36)
Network::Stagenet => (24, 25, 36),
}
}
}
@ -36,7 +39,7 @@ impl AddressType {
pub struct AddressMeta {
pub network: Network,
pub kind: AddressType,
pub guaranteed: bool
pub guaranteed: bool,
}
#[derive(Clone, Error, Debug)]
@ -50,7 +53,7 @@ pub enum AddressError {
#[error("different network than expected")]
DifferentNetwork,
#[error("invalid key")]
InvalidKey
InvalidKey,
}
impl AddressMeta {
@ -59,7 +62,7 @@ impl AddressMeta {
let byte = match self.kind {
AddressType::Standard => bytes.0,
AddressType::Integrated(_) => bytes.1,
AddressType::Subaddress => bytes.2
AddressType::Subaddress => bytes.2,
};
byte | (if self.guaranteed { 1 << 7 } else { 0 })
}
@ -76,7 +79,7 @@ impl AddressMeta {
_ if actual == standard => Some(AddressType::Standard),
_ if actual == integrated => Some(AddressType::Integrated([0; 8])),
_ if actual == subaddress => Some(AddressType::Subaddress),
_ => None
_ => None,
} {
meta = Some(AddressMeta { network, kind, guaranteed });
break;
@ -91,19 +94,15 @@ impl AddressMeta {
pub struct Address {
pub meta: AddressMeta,
pub spend: EdwardsPoint,
pub view: EdwardsPoint
pub view: EdwardsPoint,
}
impl ViewPair {
pub fn address(&self, network: Network, kind: AddressType, guaranteed: bool) -> Address {
Address {
meta: AddressMeta {
network,
kind,
guaranteed
},
meta: AddressMeta { network, kind, guaranteed },
spend: self.spend,
view: &self.view * &ED25519_BASEPOINT_TABLE
view: &self.view * &ED25519_BASEPOINT_TABLE,
}
}
}
@ -134,14 +133,18 @@ impl Address {
let len = match meta.kind {
AddressType::Standard | AddressType::Subaddress => 65,
AddressType::Integrated(_) => 73
AddressType::Integrated(_) => 73,
};
if raw.len() != len {
Err(AddressError::InvalidLength)?;
}
let spend = CompressedEdwardsY(raw[1 .. 33].try_into().unwrap()).decompress().ok_or(AddressError::InvalidKey)?;
let view = CompressedEdwardsY(raw[33 .. 65].try_into().unwrap()).decompress().ok_or(AddressError::InvalidKey)?;
let spend = CompressedEdwardsY(raw[1 .. 33].try_into().unwrap())
.decompress()
.ok_or(AddressError::InvalidKey)?;
let view = CompressedEdwardsY(raw[33 .. 65].try_into().unwrap())
.decompress()
.ok_or(AddressError::InvalidKey)?;
if let AddressType::Integrated(ref mut payment_id) = meta.kind {
payment_id.copy_from_slice(&raw[65 .. 73]);

View file

@ -7,7 +7,11 @@ use rand_distr::{Distribution, Gamma};
use curve25519_dalek::edwards::EdwardsPoint;
use crate::{transaction::RING_LEN, wallet::SpendableOutput, rpc::{RpcError, Rpc}};
use crate::{
transaction::RING_LEN,
wallet::SpendableOutput,
rpc::{RpcError, Rpc},
};
const LOCK_WINDOW: usize = 10;
const MATURITY: u64 = 60;
@ -30,7 +34,7 @@ async fn select_n<R: RngCore + CryptoRng>(
high: u64,
per_second: f64,
used: &mut HashSet<u64>,
count: usize
count: usize,
) -> Result<Vec<(u64, [EdwardsPoint; 2])>, RpcError> {
let mut iters = 0;
let mut confirmed = Vec::with_capacity(count);
@ -94,7 +98,7 @@ fn offset(ring: &[u64]) -> Vec<u64> {
pub struct Decoys {
pub i: u8,
pub offsets: Vec<u64>,
pub ring: Vec<[EdwardsPoint; 2]>
pub ring: Vec<[EdwardsPoint; 2]>,
}
impl Decoys {
@ -106,14 +110,14 @@ impl Decoys {
rng: &mut R,
rpc: &Rpc,
height: usize,
inputs: &[SpendableOutput]
inputs: &[SpendableOutput],
) -> Result<Vec<Decoys>, RpcError> {
// Convert the inputs in question to the raw output data
let mut outputs = Vec::with_capacity(inputs.len());
for input in inputs {
outputs.push((
rpc.get_o_indexes(input.tx).await?[usize::from(input.o)],
[input.key, input.commitment.calculate()]
[input.key, input.commitment.calculate()],
));
}
@ -153,17 +157,10 @@ impl Decoys {
}
// Select all decoys for this transaction, assuming we generate a sane transaction
// We should almost never naturally generate an insane transaction, hence why this doesn't bother
// with an overage
let mut decoys = select_n(
rng,
rpc,
height,
high,
per_second,
&mut used,
inputs.len() * DECOYS
).await?;
// We should almost never naturally generate an insane transaction, hence why this doesn't
// bother with an overage
let mut decoys =
select_n(rng, rpc, height, high, per_second, &mut used, inputs.len() * DECOYS).await?;
let mut res = Vec::with_capacity(inputs.len());
for o in outputs {
@ -178,8 +175,8 @@ impl Decoys {
// 500 outputs since while itself not being a sufficiently mature blockchain
// Considering Monero's p2p layer doesn't actually check transaction sanity, it should be
// fine for us to not have perfectly matching rules, especially since this code will infinite
// loop if it can't determine sanity, which is possible with sufficient inputs on sufficiently
// small chains
// loop if it can't determine sanity, which is possible with sufficient inputs on
// sufficiently small chains
if high > 500 {
// Make sure the TX passes the sanity check that the median output is within the last 40%
let target_median = high * 3 / 5;
@ -190,28 +187,30 @@ impl Decoys {
if removed.0 == o.0 {
ring.push(o);
} else {
// We could not remove this, saving CPU time and removing low values as possibilities, yet
// it'd increase the amount of decoys required to create this transaction and some removed
// outputs may be the best option (as we drop the first half, not just the bottom n)
// We could not remove this, saving CPU time and removing low values as
// possibilities, yet it'd increase the amount of decoys required to create this
// transaction and some removed outputs may be the best option (as we drop the first
// half, not just the bottom n)
used.remove(&removed.0);
}
}
// Select new outputs until we have a full sized ring again
ring.extend(
select_n(rng, rpc, height, high, per_second, &mut used, RING_LEN - ring.len()).await?
select_n(rng, rpc, height, high, per_second, &mut used, RING_LEN - ring.len()).await?,
);
ring.sort_by(|a, b| a.0.cmp(&b.0));
}
// The other sanity check rule is about duplicates, yet we already enforce unique ring members
// The other sanity check rule is about duplicates, yet we already enforce unique ring
// members
}
res.push(Decoys {
// Binary searches for the real spend since we don't know where it sorted to
i: u8::try_from(ring.partition_point(|x| x.0 < o.0)).unwrap(),
offsets: offset(&ring.iter().map(|output| output.0).collect::<Vec<_>>()),
ring: ring.iter().map(|output| output.1).collect()
ring: ring.iter().map(|output| output.1).collect(),
});
}

View file

@ -1,10 +1,6 @@
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use crate::{
hash, hash_to_scalar,
serialize::write_varint,
transaction::Input
};
use crate::{hash, hash_to_scalar, serialize::write_varint, transaction::Input};
pub mod address;
@ -30,8 +26,10 @@ pub(crate) fn uniqueness(inputs: &[Input]) -> [u8; 32] {
match input {
// If Gen, this should be the only input, making this loop somewhat pointless
// This works and even if there were somehow multiple inputs, it'd be a false negative
Input::Gen(height) => { write_varint(&(*height).try_into().unwrap(), &mut u).unwrap(); },
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes())
Input::Gen(height) => {
write_varint(&(*height).try_into().unwrap(), &mut u).unwrap();
}
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes()),
}
}
hash(&u)
@ -39,7 +37,12 @@ pub(crate) fn uniqueness(inputs: &[Input]) -> [u8; 32] {
// Hs(8Ra || o) with https://github.com/monero-project/research-lab/issues/103 as an option
#[allow(non_snake_case)]
pub(crate) fn shared_key(uniqueness: Option<[u8; 32]>, s: Scalar, P: &EdwardsPoint, o: usize) -> Scalar {
pub(crate) fn shared_key(
uniqueness: Option<[u8; 32]>,
s: Scalar,
P: &EdwardsPoint,
o: usize,
) -> Scalar {
// uniqueness
let mut shared = uniqueness.map_or(vec![], |uniqueness| uniqueness.to_vec());
// || 8Ra
@ -69,5 +72,5 @@ pub(crate) fn commitment_mask(shared_key: Scalar) -> Scalar {
#[derive(Clone, Copy)]
pub struct ViewPair {
pub spend: EdwardsPoint,
pub view: Scalar
pub view: Scalar,
}

View file

@ -1,10 +1,6 @@
use std::convert::TryFrom;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
edwards::EdwardsPoint
};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
use monero::{consensus::deserialize, blockdata::transaction::ExtraField};
@ -12,7 +8,7 @@ use crate::{
Commitment,
serialize::{write_varint, read_32, read_scalar, read_point},
transaction::{Timelock, Transaction},
wallet::{ViewPair, uniqueness, shared_key, amount_decryption, commitment_mask}
wallet::{ViewPair, uniqueness, shared_key, amount_decryption, commitment_mask},
};
#[derive(Clone, PartialEq, Debug)]
@ -21,7 +17,7 @@ pub struct SpendableOutput {
pub o: u8,
pub key: EdwardsPoint,
pub key_offset: Scalar,
pub commitment: Commitment
pub commitment: Commitment,
}
pub struct Timelocked(Timelock, Vec<SpendableOutput>);
@ -61,27 +57,26 @@ impl SpendableOutput {
}
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<SpendableOutput> {
Ok(
SpendableOutput {
tx: read_32(r)?,
o: { let mut o = [0; 1]; r.read_exact(&mut o)?; o[0] },
key: read_point(r)?,
key_offset: read_scalar(r)?,
commitment: Commitment::new(
read_scalar(r)?,
{ let mut amount = [0; 8]; r.read_exact(&mut amount)?; u64::from_le_bytes(amount) }
)
}
)
Ok(SpendableOutput {
tx: read_32(r)?,
o: {
let mut o = [0; 1];
r.read_exact(&mut o)?;
o[0]
},
key: read_point(r)?,
key_offset: read_scalar(r)?,
commitment: Commitment::new(read_scalar(r)?, {
let mut amount = [0; 8];
r.read_exact(&mut amount)?;
u64::from_le_bytes(amount)
}),
})
}
}
impl Transaction {
pub fn scan(
&self,
view: ViewPair,
guaranteed: bool
) -> Timelocked {
pub fn scan(&self, view: ViewPair, guaranteed: bool) -> Timelocked {
let mut extra = vec![];
write_varint(&u64::try_from(self.prefix.extra.len()).unwrap(), &mut extra).unwrap();
extra.extend(&self.prefix.extra);
@ -110,7 +105,7 @@ impl Transaction {
Some(uniqueness(&self.prefix.inputs)).filter(|_| guaranteed),
view.view,
pubkey,
o
o,
);
// P - shared == spend
if (output.key - (&key_offset * &ED25519_BASEPOINT_TABLE)) != view.spend {
@ -129,7 +124,7 @@ impl Transaction {
Some(amount) => amount_decryption(*amount, key_offset),
// This should never happen, yet it may be possible with miner transactions?
// Using get just decreases the possibility of a panic and lets us move on in that case
None => break
None => break,
};
// Rebuild the commitment to verify it
@ -147,7 +142,7 @@ impl Transaction {
o: o.try_into().unwrap(),
key: output.key,
key_offset,
commitment
commitment,
});
}
// Break to prevent public keys from being included multiple times, triggering multiple

View file

@ -3,11 +3,7 @@ use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use rand::seq::SliceRandom;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
edwards::EdwardsPoint
};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
use monero::{consensus::Encodable, PublicKey, blockdata::transaction::SubField};
@ -15,20 +11,20 @@ use monero::{consensus::Encodable, PublicKey, blockdata::transaction::SubField};
use frost::FrostError;
use crate::{
Commitment,
random_scalar,
Commitment, random_scalar,
ringct::{
generate_key_image,
clsag::{ClsagError, ClsagInput, Clsag},
bulletproofs::{MAX_OUTPUTS, Bulletproofs},
RctBase, RctPrunable, RctSignatures
RctBase, RctPrunable, RctSignatures,
},
transaction::{Input, Output, Timelock, TransactionPrefix, Transaction},
rpc::{Rpc, RpcError},
wallet::{
address::{AddressType, Address}, SpendableOutput, Decoys,
key_image_sort, uniqueness, shared_key, commitment_mask, amount_encryption
}
address::{AddressType, Address},
SpendableOutput, Decoys, key_image_sort, uniqueness, shared_key, commitment_mask,
amount_encryption,
},
};
#[cfg(feature = "multisig")]
use crate::frost::MultisigError;
@ -44,7 +40,7 @@ struct SendOutput {
R: EdwardsPoint,
dest: EdwardsPoint,
commitment: Commitment,
amount: [u8; 8]
amount: [u8; 8],
}
impl SendOutput {
@ -52,26 +48,24 @@ impl SendOutput {
rng: &mut R,
unique: [u8; 32],
output: (Address, u64),
o: usize
o: usize,
) -> SendOutput {
let r = random_scalar(rng);
let shared_key = shared_key(
Some(unique).filter(|_| output.0.meta.guaranteed),
r,
&output.0.view,
o
);
let shared_key =
shared_key(Some(unique).filter(|_| output.0.meta.guaranteed), r, &output.0.view, o);
let spend = output.0.spend;
SendOutput {
R: match output.0.meta.kind {
AddressType::Standard => &r * &ED25519_BASEPOINT_TABLE,
AddressType::Integrated(_) => unimplemented!("SendOutput::new doesn't support Integrated addresses"),
AddressType::Subaddress => &r * spend
AddressType::Integrated(_) => {
unimplemented!("SendOutput::new doesn't support Integrated addresses")
}
AddressType::Subaddress => &r * spend,
},
dest: ((&shared_key * &ED25519_BASEPOINT_TABLE) + spend),
commitment: Commitment::new(commitment_mask(shared_key), output.1),
amount: amount_encryption(output.1, shared_key)
amount: amount_encryption(output.1, shared_key),
}
}
}
@ -103,7 +97,7 @@ pub enum TransactionError {
FrostError(FrostError),
#[cfg(feature = "multisig")]
#[error("multisig error {0}")]
MultisigError(MultisigError)
MultisigError(MultisigError),
}
async fn prepare_inputs<R: RngCore + CryptoRng>(
@ -111,7 +105,7 @@ async fn prepare_inputs<R: RngCore + CryptoRng>(
rpc: &Rpc,
inputs: &[SpendableOutput],
spend: &Scalar,
tx: &mut Transaction
tx: &mut Transaction,
) -> Result<Vec<(Scalar, EdwardsPoint, ClsagInput)>, TransactionError> {
let mut signable = Vec::with_capacity(inputs.len());
@ -120,34 +114,33 @@ async fn prepare_inputs<R: RngCore + CryptoRng>(
rng,
rpc,
rpc.get_height().await.map_err(|e| TransactionError::RpcError(e))? - 10,
inputs
).await.map_err(|e| TransactionError::RpcError(e))?;
inputs,
)
.await
.map_err(|e| TransactionError::RpcError(e))?;
for (i, input) in inputs.iter().enumerate() {
signable.push((
spend + input.key_offset,
generate_key_image(spend + input.key_offset),
ClsagInput::new(
input.commitment,
decoys[i].clone()
).map_err(|e| TransactionError::ClsagError(e))?
ClsagInput::new(input.commitment, decoys[i].clone())
.map_err(|e| TransactionError::ClsagError(e))?,
));
tx.prefix.inputs.push(Input::ToKey {
amount: 0,
key_offsets: decoys[i].offsets.clone(),
key_image: signable[i].1
key_image: signable[i].1,
});
}
signable.sort_by(|x, y| x.1.compress().to_bytes().cmp(&y.1.compress().to_bytes()).reverse());
tx.prefix.inputs.sort_by(|x, y| if let (
Input::ToKey { key_image: x, ..},
Input::ToKey { key_image: y, ..}
) = (x, y) {
x.compress().to_bytes().cmp(&y.compress().to_bytes()).reverse()
} else {
panic!("Input wasn't ToKey")
tx.prefix.inputs.sort_by(|x, y| {
if let (Input::ToKey { key_image: x, .. }, Input::ToKey { key_image: y, .. }) = (x, y) {
x.compress().to_bytes().cmp(&y.compress().to_bytes()).reverse()
} else {
panic!("Input wasn't ToKey")
}
});
Ok(signable)
@ -156,7 +149,7 @@ async fn prepare_inputs<R: RngCore + CryptoRng>(
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct Fee {
pub per_weight: u64,
pub mask: u64
pub mask: u64,
}
impl Fee {
@ -170,7 +163,7 @@ pub struct SignableTransaction {
inputs: Vec<SpendableOutput>,
payments: Vec<(Address, u64)>,
outputs: Vec<SendOutput>,
fee: u64
fee: u64,
}
impl SignableTransaction {
@ -178,15 +171,13 @@ impl SignableTransaction {
inputs: Vec<SpendableOutput>,
mut payments: Vec<(Address, u64)>,
change_address: Option<Address>,
fee_rate: Fee
fee_rate: Fee,
) -> Result<SignableTransaction, TransactionError> {
// Make sure all addresses are valid
let test = |addr: Address| {
match addr.meta.kind {
AddressType::Standard => Ok(()),
AddressType::Integrated(..) => Err(TransactionError::InvalidAddress),
AddressType::Subaddress => Ok(())
}
let test = |addr: Address| match addr.meta.kind {
AddressType::Standard => Ok(()),
AddressType::Integrated(..) => Err(TransactionError::InvalidAddress),
AddressType::Subaddress => Ok(()),
};
for payment in &payments {
@ -229,7 +220,8 @@ impl SignableTransaction {
// If we have yet to add a change output, do so if it's economically viable
if (!change) && change_address.is_some() && (in_amount != out_amount) {
// Check even with the new fee, there's remaining funds
let change_fee = fee_rate.calculate(Transaction::fee_weight(inputs.len(), outputs + 1, extra)) - fee;
let change_fee =
fee_rate.calculate(Transaction::fee_weight(inputs.len(), outputs + 1, extra)) - fee;
if (out_amount + change_fee) < in_amount {
change = true;
outputs += 1;
@ -246,20 +238,13 @@ impl SignableTransaction {
payments.push((change_address.unwrap(), in_amount - out_amount));
}
Ok(
SignableTransaction {
inputs,
payments,
outputs: vec![],
fee
}
)
Ok(SignableTransaction { inputs, payments, outputs: vec![], fee })
}
fn prepare_outputs<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
uniqueness: [u8; 32]
uniqueness: [u8; 32],
) -> (Vec<Commitment>, Scalar) {
// Shuffle the payments
self.payments.shuffle(rng);
@ -275,29 +260,23 @@ impl SignableTransaction {
(commitments, sum)
}
fn prepare_transaction(
&self,
commitments: &[Commitment],
bp: Bulletproofs
) -> Transaction {
fn prepare_transaction(&self, commitments: &[Commitment], bp: Bulletproofs) -> Transaction {
// Create the TX extra
// TODO: Review this for canonicity with Monero
let mut extra = vec![];
SubField::TxPublicKey(
PublicKey { point: self.outputs[0].R.compress() }
).consensus_encode(&mut extra).unwrap();
SubField::TxPublicKey(PublicKey { point: self.outputs[0].R.compress() })
.consensus_encode(&mut extra)
.unwrap();
SubField::AdditionalPublickKey(
self.outputs[1 ..].iter().map(|output| PublicKey { point: output.R.compress() }).collect()
).consensus_encode(&mut extra).unwrap();
self.outputs[1 ..].iter().map(|output| PublicKey { point: output.R.compress() }).collect(),
)
.consensus_encode(&mut extra)
.unwrap();
let mut tx_outputs = Vec::with_capacity(self.outputs.len());
let mut ecdh_info = Vec::with_capacity(self.outputs.len());
for o in 0 .. self.outputs.len() {
tx_outputs.push(Output {
amount: 0,
key: self.outputs[o].dest,
tag: None
});
tx_outputs.push(Output { amount: 0, key: self.outputs[o].dest, tag: None });
ecdh_info.push(self.outputs[o].amount);
}
@ -307,20 +286,20 @@ impl SignableTransaction {
timelock: Timelock::None,
inputs: vec![],
outputs: tx_outputs,
extra
extra,
},
rct_signatures: RctSignatures {
base: RctBase {
fee: self.fee,
ecdh_info,
commitments: commitments.iter().map(|commitment| commitment.calculate()).collect()
commitments: commitments.iter().map(|commitment| commitment.calculate()).collect(),
},
prunable: RctPrunable::Clsag {
bulletproofs: vec![bp],
clsags: vec![],
pseudo_outs: vec![]
}
}
pseudo_outs: vec![],
},
},
}
}
@ -328,7 +307,7 @@ impl SignableTransaction {
&mut self,
rng: &mut R,
rpc: &Rpc,
spend: &Scalar
spend: &Scalar,
) -> Result<Transaction, TransactionError> {
let mut images = Vec::with_capacity(self.inputs.len());
for input in &self.inputs {
@ -344,12 +323,11 @@ impl SignableTransaction {
let (commitments, mask_sum) = self.prepare_outputs(
rng,
uniqueness(
&images.iter().map(|image| Input::ToKey {
amount: 0,
key_offsets: vec![],
key_image: *image
}).collect::<Vec<_>>()
)
&images
.iter()
.map(|image| Input::ToKey { amount: 0, key_offsets: vec![], key_image: *image })
.collect::<Vec<_>>(),
),
);
let mut tx = self.prepare_transaction(&commitments, Bulletproofs::new(rng, &commitments)?);
@ -361,7 +339,8 @@ impl SignableTransaction {
RctPrunable::Null => panic!("Signing for RctPrunable::Null"),
RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. } => {
clsags.append(&mut clsag_pairs.iter().map(|clsag| clsag.0.clone()).collect::<Vec<_>>());
pseudo_outs.append(&mut clsag_pairs.iter().map(|clsag| clsag.1.clone()).collect::<Vec<_>>());
pseudo_outs
.append(&mut clsag_pairs.iter().map(|clsag| clsag.1.clone()).collect::<Vec<_>>());
}
}
Ok(tx)

View file

@ -1,25 +1,38 @@
use std::{io::{Read, Cursor}, sync::{Arc, RwLock}, collections::HashMap};
use std::{
io::{Read, Cursor},
sync::{Arc, RwLock},
collections::HashMap,
};
use rand_core::{RngCore, CryptoRng, SeedableRng};
use rand_chacha::ChaCha12Rng;
use curve25519_dalek::{traits::Identity, scalar::Scalar, edwards::{EdwardsPoint, CompressedEdwardsY}};
use curve25519_dalek::{
traits::Identity,
scalar::Scalar,
edwards::{EdwardsPoint, CompressedEdwardsY},
};
use transcript::{Transcript, RecommendedTranscript};
use frost::{
curve::Ed25519,
FrostError, FrostKeys,
sign::{
PreprocessMachine, SignMachine, SignatureMachine,
AlgorithmMachine, AlgorithmSignMachine, AlgorithmSignatureMachine
}
PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine, AlgorithmSignMachine,
AlgorithmSignatureMachine,
},
};
use crate::{
random_scalar, ringct::{clsag::{ClsagInput, ClsagDetails, ClsagMultisig}, bulletproofs::Bulletproofs, RctPrunable},
random_scalar,
ringct::{
clsag::{ClsagInput, ClsagDetails, ClsagMultisig},
bulletproofs::Bulletproofs,
RctPrunable,
},
transaction::{Input, Transaction},
rpc::Rpc,
wallet::{TransactionError, SignableTransaction, Decoys, key_image_sort, uniqueness}
wallet::{TransactionError, SignableTransaction, Decoys, key_image_sort, uniqueness},
};
pub struct TransactionMachine {
@ -31,7 +44,7 @@ pub struct TransactionMachine {
decoys: Vec<Decoys>,
inputs: Vec<Arc<RwLock<Option<ClsagDetails>>>>,
clsags: Vec<AlgorithmMachine<Ed25519, ClsagMultisig>>
clsags: Vec<AlgorithmMachine<Ed25519, ClsagMultisig>>,
}
pub struct TransactionSignMachine {
@ -45,12 +58,12 @@ pub struct TransactionSignMachine {
inputs: Vec<Arc<RwLock<Option<ClsagDetails>>>>,
clsags: Vec<AlgorithmSignMachine<Ed25519, ClsagMultisig>>,
our_preprocess: Vec<u8>
our_preprocess: Vec<u8>,
}
pub struct TransactionSignatureMachine {
tx: Transaction,
clsags: Vec<AlgorithmSignatureMachine<Ed25519, ClsagMultisig>>
clsags: Vec<AlgorithmSignatureMachine<Ed25519, ClsagMultisig>>,
}
impl SignableTransaction {
@ -60,7 +73,7 @@ impl SignableTransaction {
keys: FrostKeys<Ed25519>,
mut transcript: RecommendedTranscript,
height: usize,
mut included: Vec<u16>
mut included: Vec<u16>,
) -> Result<TransactionMachine, TransactionError> {
let mut inputs = vec![];
for _ in 0 .. self.inputs.len() {
@ -80,9 +93,10 @@ impl SignableTransaction {
// The data itself will be included, making this unnecessary, yet a lot of this is technically
// unnecessary. Anything which further increases security at almost no cost should be followed
transcript.append_message(b"height", &u64::try_from(height).unwrap().to_le_bytes());
// Also include the spend_key as below only the key offset is included, so this confirms the sum product
// Useful as confirming the sum product confirms the key image, further guaranteeing the one time
// properties noted below
// Also include the spend_key as below only the key offset is included, so this transcripts the
// sum product
// Useful as transcripting the sum product effectively transcripts the key image, further
// guaranteeing the one time properties noted below
transcript.append_message(b"spend_key", &keys.group_key().0.compress().to_bytes());
for input in &self.inputs {
// These outputs can only be spent once. Therefore, it forces all RNGs derived from this
@ -110,14 +124,12 @@ impl SignableTransaction {
clsags.push(
AlgorithmMachine::new(
ClsagMultisig::new(
transcript.clone(),
input.key,
inputs[i].clone()
).map_err(|e| TransactionError::MultisigError(e))?,
ClsagMultisig::new(transcript.clone(), input.key, inputs[i].clone())
.map_err(|e| TransactionError::MultisigError(e))?,
Arc::new(offset),
&included
).map_err(|e| TransactionError::FrostError(e))?
&included,
)
.map_err(|e| TransactionError::FrostError(e))?,
);
}
@ -132,22 +144,22 @@ impl SignableTransaction {
&mut ChaCha12Rng::from_seed(transcript.rng_seed(b"decoys")),
rpc,
height,
&self.inputs
).await.map_err(|e| TransactionError::RpcError(e))?;
Ok(
TransactionMachine {
signable: self,
i: keys.params().i(),
included,
transcript,
decoys,
inputs,
clsags
}
&self.inputs,
)
.await
.map_err(|e| TransactionError::RpcError(e))?;
Ok(TransactionMachine {
signable: self,
i: keys.params().i(),
included,
transcript,
decoys,
inputs,
clsags,
})
}
}
@ -157,18 +169,22 @@ impl PreprocessMachine for TransactionMachine {
fn preprocess<R: RngCore + CryptoRng>(
mut self,
rng: &mut R
rng: &mut R,
) -> (TransactionSignMachine, Vec<u8>) {
// Iterate over each CLSAG calling preprocess
let mut serialized = Vec::with_capacity(
// D_{G, H}, E_{G, H}, DLEqs, key image addendum
self.clsags.len() * ((2 * (32 + 32)) + (2 * (32 + 32)) + ClsagMultisig::serialized_len())
self.clsags.len() * ((2 * (32 + 32)) + (2 * (32 + 32)) + ClsagMultisig::serialized_len()),
);
let clsags = self.clsags.drain(..).map(|clsag| {
let (clsag, preprocess) = clsag.preprocess(rng);
serialized.extend(&preprocess);
clsag
}).collect();
let clsags = self
.clsags
.drain(..)
.map(|clsag| {
let (clsag, preprocess) = clsag.preprocess(rng);
serialized.extend(&preprocess);
clsag
})
.collect();
let our_preprocess = serialized.clone();
// We could add further entropy here, and previous versions of this library did so
@ -194,7 +210,7 @@ impl PreprocessMachine for TransactionMachine {
our_preprocess,
},
serialized
serialized,
)
}
}
@ -205,14 +221,12 @@ impl SignMachine<Transaction> for TransactionSignMachine {
fn sign<Re: Read>(
mut self,
mut commitments: HashMap<u16, Re>,
msg: &[u8]
msg: &[u8],
) -> Result<(TransactionSignatureMachine, Vec<u8>), FrostError> {
if msg.len() != 0 {
Err(
FrostError::InternalError(
"message was passed to the TransactionMachine when it generates its own"
)
)?;
Err(FrostError::InternalError(
"message was passed to the TransactionMachine when it generates its own",
))?;
}
// FROST commitments and their DLEqs, and the image and its DLEq
@ -220,34 +234,46 @@ impl SignMachine<Transaction> for TransactionSignMachine {
// Convert the unified commitments to a Vec of the individual commitments
let mut images = vec![EdwardsPoint::identity(); self.clsags.len()];
let mut commitments = (0 .. self.clsags.len()).map(|c| {
let mut buf = [0; CLSAG_LEN];
(&self.included).iter().map(|l| {
// Add all commitments to the transcript for their entropy
// While each CLSAG will do this as they need to for security, they have their own transcripts
// cloned from this TX's initial premise's transcript. For our TX transcript to have the CLSAG
// data for entropy, it'll have to be added ourselves here
self.transcript.append_message(b"participant", &(*l).to_be_bytes());
if *l == self.i {
buf.copy_from_slice(self.our_preprocess.drain(.. CLSAG_LEN).as_slice());
} else {
commitments.get_mut(l).ok_or(FrostError::MissingParticipant(*l))?
.read_exact(&mut buf).map_err(|_| FrostError::InvalidCommitment(*l))?;
}
self.transcript.append_message(b"preprocess", &buf);
let mut commitments = (0 .. self.clsags.len())
.map(|c| {
let mut buf = [0; CLSAG_LEN];
(&self.included)
.iter()
.map(|l| {
// Add all commitments to the transcript for their entropy
// While each CLSAG will do this as they need to for security, they have their own
// transcripts cloned from this TX's initial premise's transcript. For our TX
// transcript to have the CLSAG data for entropy, it'll have to be added ourselves here
self.transcript.append_message(b"participant", &(*l).to_be_bytes());
if *l == self.i {
buf.copy_from_slice(self.our_preprocess.drain(.. CLSAG_LEN).as_slice());
} else {
commitments
.get_mut(l)
.ok_or(FrostError::MissingParticipant(*l))?
.read_exact(&mut buf)
.map_err(|_| FrostError::InvalidCommitment(*l))?;
}
self.transcript.append_message(b"preprocess", &buf);
// While here, calculate the key image
// Clsag will parse/calculate/validate this as needed, yet doing so here as well provides
// the easiest API overall, as this is where the TX is (which needs the key images in its
// message), along with where the outputs are determined (where our outputs may need
// these in order to guarantee uniqueness)
images[c] += CompressedEdwardsY(
buf[(CLSAG_LEN - 96) .. (CLSAG_LEN - 64)].try_into().map_err(|_| FrostError::InvalidCommitment(*l))?
).decompress().ok_or(FrostError::InvalidCommitment(*l))?;
// While here, calculate the key image
// Clsag will parse/calculate/validate this as needed, yet doing so here as well
// provides the easiest API overall, as this is where the TX is (which needs the key
// images in its message), along with where the outputs are determined (where our
// outputs may need these in order to guarantee uniqueness)
images[c] += CompressedEdwardsY(
buf[(CLSAG_LEN - 96) .. (CLSAG_LEN - 64)]
.try_into()
.map_err(|_| FrostError::InvalidCommitment(*l))?,
)
.decompress()
.ok_or(FrostError::InvalidCommitment(*l))?;
Ok((*l, Cursor::new(buf)))
}).collect::<Result<HashMap<_, _>, _>>()
}).collect::<Result<Vec<_>, _>>()?;
Ok((*l, Cursor::new(buf)))
})
.collect::<Result<HashMap<_, _>, _>>()
})
.collect::<Result<Vec<_>, _>>()?;
// Remove our preprocess which shouldn't be here. It was just the easiest way to implement the
// above
@ -265,20 +291,20 @@ impl SignMachine<Transaction> for TransactionSignMachine {
(commitments, output_masks) = self.signable.prepare_outputs(
&mut ChaCha12Rng::from_seed(self.transcript.rng_seed(b"tx_keys")),
uniqueness(
&images.iter().map(|image| Input::ToKey {
amount: 0,
key_offsets: vec![],
key_image: *image
}).collect::<Vec<_>>()
)
&images
.iter()
.map(|image| Input::ToKey { amount: 0, key_offsets: vec![], key_image: *image })
.collect::<Vec<_>>(),
),
);
self.signable.prepare_transaction(
&commitments,
Bulletproofs::new(
&mut ChaCha12Rng::from_seed(self.transcript.rng_seed(b"bulletproofs")),
&commitments
).unwrap()
&commitments,
)
.unwrap(),
)
};
@ -291,7 +317,7 @@ impl SignMachine<Transaction> for TransactionSignMachine {
self.decoys.swap_remove(0),
self.inputs.swap_remove(0),
self.clsags.swap_remove(0),
commitments.swap_remove(0)
commitments.swap_remove(0),
));
}
sorted.sort_by(|x, y| key_image_sort(&x.0, &y.0));
@ -308,23 +334,18 @@ impl SignMachine<Transaction> for TransactionSignMachine {
sum_pseudo_outs += mask;
}
tx.prefix.inputs.push(
Input::ToKey {
amount: 0,
key_offsets: value.2.offsets.clone(),
key_image: value.0
}
);
tx.prefix.inputs.push(Input::ToKey {
amount: 0,
key_offsets: value.2.offsets.clone(),
key_image: value.0,
});
*value.3.write().unwrap() = Some(
ClsagDetails::new(
ClsagInput::new(
value.1.commitment,
value.2
).map_err(|_| panic!("Signing an input which isn't present in the ring we created for it"))?,
mask
)
);
*value.3.write().unwrap() = Some(ClsagDetails::new(
ClsagInput::new(value.1.commitment, value.2).map_err(|_| {
panic!("Signing an input which isn't present in the ring we created for it")
})?,
mask,
));
self.clsags.push(value.4);
commitments.push(value.5);
@ -334,11 +355,15 @@ impl SignMachine<Transaction> for TransactionSignMachine {
// Iterate over each CLSAG calling sign
let mut serialized = Vec::with_capacity(self.clsags.len() * 32);
let clsags = self.clsags.drain(..).map(|clsag| {
let (clsag, share) = clsag.sign(commitments.remove(0), &msg)?;
serialized.extend(&share);
Ok(clsag)
}).collect::<Result<_, _>>()?;
let clsags = self
.clsags
.drain(..)
.map(|clsag| {
let (clsag, share) = clsag.sign(commitments.remove(0), &msg)?;
serialized.extend(&share);
Ok(clsag)
})
.collect::<Result<_, _>>()?;
Ok((TransactionSignatureMachine { tx, clsags }, serialized))
}
@ -352,11 +377,14 @@ impl SignatureMachine<Transaction> for TransactionSignatureMachine {
RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. } => {
for clsag in self.clsags {
let (clsag, pseudo_out) = clsag.complete(
shares.iter_mut().map(|(l, shares)| {
let mut buf = [0; 32];
shares.read_exact(&mut buf).map_err(|_| FrostError::InvalidShare(*l))?;
Ok((*l, Cursor::new(buf)))
}).collect::<Result<HashMap<_, _>, _>>()?
shares
.iter_mut()
.map(|(l, shares)| {
let mut buf = [0; 32];
shares.read_exact(&mut buf).map_err(|_| FrostError::InvalidShare(*l))?;
Ok((*l, Cursor::new(buf)))
})
.collect::<Result<HashMap<_, _>, _>>()?,
)?;
clsags.push(clsag);
pseudo_outs.push(pseudo_out);

View file

@ -6,10 +6,13 @@ use serde_json::json;
use monero::{
network::Network,
util::{key::PublicKey, address::Address}
util::{key::PublicKey, address::Address},
};
use monero_serai::{random_scalar, rpc::{EmptyResponse, RpcError, Rpc}};
use monero_serai::{
random_scalar,
rpc::{EmptyResponse, RpcError, Rpc},
};
pub async fn rpc() -> Rpc {
let rpc = Rpc::new("http://127.0.0.1:18081".to_string());
@ -22,8 +25,9 @@ pub async fn rpc() -> Rpc {
let addr = Address::standard(
Network::Mainnet,
PublicKey { point: (&random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE).compress() },
PublicKey { point: (&random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE).compress() }
).to_string();
PublicKey { point: (&random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE).compress() },
)
.to_string();
// Mine 10 blocks so we have 10 decoys so decoy selection doesn't fail
mine_block(&rpc, &addr).await.unwrap();
@ -32,11 +36,16 @@ pub async fn rpc() -> Rpc {
}
pub async fn mine_block(rpc: &Rpc, address: &str) -> Result<EmptyResponse, RpcError> {
rpc.rpc_call("json_rpc", Some(json!({
"method": "generateblocks",
"params": {
"wallet_address": address,
"amount_of_blocks": 10
},
}))).await
rpc
.rpc_call(
"json_rpc",
Some(json!({
"method": "generateblocks",
"params": {
"wallet_address": address,
"amount_of_blocks": 10
},
})),
)
.await
}

View file

@ -16,9 +16,19 @@ use dalek_ff_group::Scalar;
#[cfg(feature = "multisig")]
use transcript::{Transcript, RecommendedTranscript};
#[cfg(feature = "multisig")]
use frost::{curve::Ed25519, tests::{THRESHOLD, key_gen, sign}};
use frost::{
curve::Ed25519,
tests::{THRESHOLD, key_gen, sign},
};
use monero_serai::{random_scalar, wallet::{ViewPair, address::{Network, AddressType}, SignableTransaction}};
use monero_serai::{
random_scalar,
wallet::{
ViewPair,
address::{Network, AddressType},
SignableTransaction,
},
};
mod rpc;
use crate::rpc::{rpc, mine_block};
@ -122,9 +132,9 @@ async fn send_core(test: usize, multisig: bool) {
}
}
let mut signable = SignableTransaction::new(
outputs, vec![(addr, amount - 10000000000)], Some(addr), fee
).unwrap();
let mut signable =
SignableTransaction::new(outputs, vec![(addr, amount - 10000000000)], Some(addr), fee)
.unwrap();
if !multisig {
tx = Some(signable.sign(&mut OsRng, &rpc, &spend).await.unwrap());
@ -135,13 +145,17 @@ async fn send_core(test: usize, multisig: bool) {
for i in 1 ..= THRESHOLD {
machines.insert(
i,
signable.clone().multisig(
&rpc,
(*keys[&i]).clone(),
RecommendedTranscript::new(b"Monero Serai Test Transaction"),
rpc.get_height().await.unwrap() - 10,
(1 ..= THRESHOLD).collect::<Vec<_>>()
).await.unwrap()
signable
.clone()
.multisig(
&rpc,
(*keys[&i]).clone(),
RecommendedTranscript::new(b"Monero Serai Test Transaction"),
rpc.get_height().await.unwrap() - 10,
(1 ..= THRESHOLD).collect::<Vec<_>>(),
)
.await
.unwrap(),
);
}

View file

@ -9,16 +9,15 @@ use ff::{Field, PrimeField, FieldBits, PrimeFieldBits};
use crate::{choice, constant_time, math_op, math, from_wrapper, from_uint};
const FIELD_MODULUS: U256 = U256::from_be_hex(
"7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffed"
);
const FIELD_MODULUS: U256 =
U256::from_be_hex("7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffed");
#[derive(Clone, Copy, PartialEq, Eq, Debug, Default)]
pub struct FieldElement(U256);
pub const SQRT_M1: FieldElement = FieldElement(
U256::from_be_hex("2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0")
);
pub const SQRT_M1: FieldElement = FieldElement(U256::from_be_hex(
"2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0",
));
constant_time!(FieldElement, U256);
math!(
@ -33,7 +32,7 @@ math!(
let wide = U256::mul_wide(&x, &y);
U256::from_le_slice(
&U512::from((wide.1, wide.0)).reduce(&WIDE_MODULUS).unwrap().to_le_bytes()[.. 32]
&U512::from((wide.1, wide.0)).reduce(&WIDE_MODULUS).unwrap().to_le_bytes()[.. 32],
)
}
);
@ -41,7 +40,9 @@ from_uint!(FieldElement, U256);
impl Neg for FieldElement {
type Output = Self;
fn neg(self) -> Self::Output { Self(self.0.neg_mod(&FIELD_MODULUS)) }
fn neg(self) -> Self::Output {
Self(self.0.neg_mod(&FIELD_MODULUS))
}
}
impl Field for FieldElement {
@ -53,17 +54,23 @@ impl Field for FieldElement {
let WIDE_MODULUS: U512 = U512::from((U256::ZERO, FIELD_MODULUS));
debug_assert_eq!(FIELD_MODULUS.to_le_bytes()[..], WIDE_MODULUS.to_le_bytes()[.. 32]);
FieldElement(
U256::from_le_slice(
&U512::from_be_bytes(bytes).reduce(&WIDE_MODULUS).unwrap().to_le_bytes()[.. 32]
)
)
FieldElement(U256::from_le_slice(
&U512::from_be_bytes(bytes).reduce(&WIDE_MODULUS).unwrap().to_le_bytes()[.. 32],
))
}
fn zero() -> Self { Self(U256::ZERO) }
fn one() -> Self { Self(U256::ONE) }
fn square(&self) -> Self { *self * self }
fn double(&self) -> Self { *self + self }
fn zero() -> Self {
Self(U256::ZERO)
}
fn one() -> Self {
Self(U256::ONE)
}
fn square(&self) -> Self {
*self * self
}
fn double(&self) -> Self {
*self + self
}
fn invert(&self) -> CtOption<Self> {
CtOption::new(self.pow(-FieldElement(U256::from(2u64))), !self.is_zero())
@ -80,9 +87,15 @@ impl Field for FieldElement {
CtOption::new(Self::conditional_select(&tv2, &tv1, tv1.square().ct_eq(self)), 1.into())
}
fn is_zero(&self) -> Choice { self.0.ct_eq(&U256::ZERO) }
fn cube(&self) -> Self { *self * self * self }
fn pow_vartime<S: AsRef<[u64]>>(&self, _exp: S) -> Self { unimplemented!() }
fn is_zero(&self) -> Choice {
self.0.ct_eq(&U256::ZERO)
}
fn cube(&self) -> Self {
*self * self * self
}
fn pow_vartime<S: AsRef<[u64]>>(&self, _exp: S) -> Self {
unimplemented!()
}
}
impl PrimeField for FieldElement {
@ -93,15 +106,21 @@ impl PrimeField for FieldElement {
let res = Self(U256::from_le_bytes(bytes));
CtOption::new(res, res.0.add_mod(&U256::ZERO, &FIELD_MODULUS).ct_eq(&res.0))
}
fn to_repr(&self) -> [u8; 32] { self.0.to_le_bytes() }
fn to_repr(&self) -> [u8; 32] {
self.0.to_le_bytes()
}
const S: u32 = 2;
fn is_odd(&self) -> Choice { unimplemented!() }
fn multiplicative_generator() -> Self { 2u64.into() }
fn is_odd(&self) -> Choice {
unimplemented!()
}
fn multiplicative_generator() -> Self {
2u64.into()
}
fn root_of_unity() -> Self {
FieldElement(
U256::from_be_hex("2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0")
)
FieldElement(U256::from_be_hex(
"2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0",
))
}
}

View file

@ -3,7 +3,7 @@
use core::{
ops::{Deref, Add, AddAssign, Sub, SubAssign, Neg, Mul, MulAssign},
borrow::Borrow,
iter::{Iterator, Sum}
iter::{Iterator, Sum},
};
use subtle::{ConstantTimeEq, ConditionallySelectable};
@ -20,15 +20,13 @@ use dalek::{
traits::Identity,
scalar::Scalar as DScalar,
edwards::{
EdwardsPoint as DEdwardsPoint,
EdwardsBasepointTable as DEdwardsBasepointTable,
CompressedEdwardsY as DCompressedEdwards
EdwardsPoint as DEdwardsPoint, EdwardsBasepointTable as DEdwardsBasepointTable,
CompressedEdwardsY as DCompressedEdwards,
},
ristretto::{
RistrettoPoint as DRistrettoPoint,
RistrettoBasepointTable as DRistrettoBasepointTable,
CompressedRistretto as DCompressedRistretto
}
RistrettoPoint as DRistrettoPoint, RistrettoBasepointTable as DRistrettoBasepointTable,
CompressedRistretto as DCompressedRistretto,
},
};
use ff::{Field, PrimeField, FieldBits, PrimeFieldBits};
@ -64,7 +62,7 @@ macro_rules! deref_borrow {
&self.0
}
}
}
};
}
#[doc(hidden)]
@ -72,7 +70,9 @@ macro_rules! deref_borrow {
macro_rules! constant_time {
($Value: ident, $Inner: ident) => {
impl ConstantTimeEq for $Value {
fn ct_eq(&self, other: &Self) -> Choice { self.0.ct_eq(&other.0) }
fn ct_eq(&self, other: &Self) -> Choice {
self.0.ct_eq(&other.0)
}
}
impl ConditionallySelectable for $Value {
@ -80,7 +80,7 @@ macro_rules! constant_time {
$Value($Inner::conditional_select(&a.0, &b.0, choice))
}
}
}
};
}
#[doc(hidden)]
@ -117,7 +117,7 @@ macro_rules! math_op {
self.0 = $function(self.0, other.0);
}
}
}
};
}
#[doc(hidden)]
@ -127,7 +127,7 @@ macro_rules! math {
math_op!($Value, $Value, Add, add, AddAssign, add_assign, $add);
math_op!($Value, $Value, Sub, sub, SubAssign, sub_assign, $sub);
math_op!($Value, $Factor, Mul, mul, MulAssign, mul_assign, $mul);
}
};
}
macro_rules! math_neg {
@ -136,9 +136,11 @@ macro_rules! math_neg {
impl Neg for $Value {
type Output = Self;
fn neg(self) -> Self::Output { Self(-self.0) }
fn neg(self) -> Self::Output {
Self(-self.0)
}
}
}
};
}
#[doc(hidden)]
@ -146,9 +148,11 @@ macro_rules! math_neg {
macro_rules! from_wrapper {
($wrapper: ident, $inner: ident, $uint: ident) => {
impl From<$uint> for $wrapper {
fn from(a: $uint) -> $wrapper { Self($inner::from(a)) }
fn from(a: $uint) -> $wrapper {
Self($inner::from(a))
}
}
}
};
}
#[doc(hidden)]
@ -159,7 +163,7 @@ macro_rules! from_uint {
from_wrapper!($wrapper, $inner, u16);
from_wrapper!($wrapper, $inner, u32);
from_wrapper!($wrapper, $inner, u64);
}
};
}
/// Wrapper around the dalek Scalar type
@ -191,17 +195,33 @@ impl Field for Scalar {
Self(DScalar::from_bytes_mod_order_wide(&r))
}
fn zero() -> Self { Self(DScalar::zero()) }
fn one() -> Self { Self(DScalar::one()) }
fn square(&self) -> Self { *self * self }
fn double(&self) -> Self { *self + self }
fn zero() -> Self {
Self(DScalar::zero())
}
fn one() -> Self {
Self(DScalar::one())
}
fn square(&self) -> Self {
*self * self
}
fn double(&self) -> Self {
*self + self
}
fn invert(&self) -> CtOption<Self> {
CtOption::new(Self(self.0.invert()), !self.is_zero())
}
fn sqrt(&self) -> CtOption<Self> { unimplemented!() }
fn is_zero(&self) -> Choice { self.0.ct_eq(&DScalar::zero()) }
fn cube(&self) -> Self { *self * self * self }
fn pow_vartime<S: AsRef<[u64]>>(&self, _exp: S) -> Self { unimplemented!() }
fn sqrt(&self) -> CtOption<Self> {
unimplemented!()
}
fn is_zero(&self) -> Choice {
self.0.ct_eq(&DScalar::zero())
}
fn cube(&self) -> Self {
*self * self * self
}
fn pow_vartime<S: AsRef<[u64]>>(&self, _exp: S) -> Self {
unimplemented!()
}
}
impl PrimeField for Scalar {
@ -213,12 +233,20 @@ impl PrimeField for Scalar {
// TODO: This unwrap_or isn't constant time, yet do we have an alternative?
CtOption::new(Scalar(scalar.unwrap_or(DScalar::zero())), choice(scalar.is_some()))
}
fn to_repr(&self) -> [u8; 32] { self.0.to_bytes() }
fn to_repr(&self) -> [u8; 32] {
self.0.to_bytes()
}
const S: u32 = 2;
fn is_odd(&self) -> Choice { unimplemented!() }
fn multiplicative_generator() -> Self { 2u64.into() }
fn root_of_unity() -> Self { unimplemented!() }
fn is_odd(&self) -> Choice {
unimplemented!()
}
fn multiplicative_generator() -> Self {
2u64.into()
}
fn root_of_unity() -> Self {
unimplemented!()
}
}
impl PrimeFieldBits for Scalar {
@ -260,21 +288,35 @@ macro_rules! dalek_group {
pub const $BASEPOINT_POINT: $Point = $Point(constants::$BASEPOINT_POINT);
impl Sum<$Point> for $Point {
fn sum<I: Iterator<Item = $Point>>(iter: I) -> $Point { Self($DPoint::sum(iter)) }
fn sum<I: Iterator<Item = $Point>>(iter: I) -> $Point {
Self($DPoint::sum(iter))
}
}
impl<'a> Sum<&'a $Point> for $Point {
fn sum<I: Iterator<Item = &'a $Point>>(iter: I) -> $Point { Self($DPoint::sum(iter)) }
fn sum<I: Iterator<Item = &'a $Point>>(iter: I) -> $Point {
Self($DPoint::sum(iter))
}
}
impl Group for $Point {
type Scalar = Scalar;
// Ideally, this would be cryptographically secure, yet that's not a bound on the trait
// k256 also does this
fn random(rng: impl RngCore) -> Self { &$BASEPOINT_TABLE * Scalar::random(rng) }
fn identity() -> Self { Self($DPoint::identity()) }
fn generator() -> Self { $BASEPOINT_POINT }
fn is_identity(&self) -> Choice { self.0.ct_eq(&$DPoint::identity()) }
fn double(&self) -> Self { *self + self }
fn random(rng: impl RngCore) -> Self {
&$BASEPOINT_TABLE * Scalar::random(rng)
}
fn identity() -> Self {
Self($DPoint::identity())
}
fn generator() -> Self {
$BASEPOINT_POINT
}
fn is_identity(&self) -> Choice {
self.0.ct_eq(&$DPoint::identity())
}
fn double(&self) -> Self {
*self + self
}
}
impl GroupEncoding for $Point {
@ -306,7 +348,9 @@ macro_rules! dalek_group {
impl Mul<Scalar> for &$Table {
type Output = $Point;
fn mul(self, b: Scalar) -> $Point { $Point(&b.0 * &self.0) }
fn mul(self, b: Scalar) -> $Point {
$Point(&b.0 * &self.0)
}
}
};
}
@ -315,12 +359,9 @@ dalek_group!(
EdwardsPoint,
DEdwardsPoint,
|point: DEdwardsPoint| point.is_torsion_free(),
EdwardsBasepointTable,
DEdwardsBasepointTable,
DCompressedEdwards,
ED25519_BASEPOINT_POINT,
ED25519_BASEPOINT_TABLE
);
@ -329,12 +370,9 @@ dalek_group!(
RistrettoPoint,
DRistrettoPoint,
|_| true,
RistrettoBasepointTable,
DRistrettoBasepointTable,
DCompressedRistretto,
RISTRETTO_BASEPOINT_POINT,
RISTRETTO_BASEPOINT_TABLE
);

View file

@ -2,12 +2,16 @@ use rand_core::{RngCore, CryptoRng};
use transcript::Transcript;
use group::{ff::{Field, PrimeFieldBits}, prime::PrimeGroup};
use group::{
ff::{Field, PrimeFieldBits},
prime::PrimeGroup,
};
use multiexp::BatchVerifier;
use crate::cross_group::{
Generators, DLEqError, scalar::{scalar_convert, mutual_scalar_from_bytes}
Generators, DLEqError,
scalar::{scalar_convert, mutual_scalar_from_bytes},
};
#[cfg(feature = "serialize")]
@ -26,7 +30,7 @@ pub(crate) enum Re<G0: PrimeGroup, G1: PrimeGroup> {
// present here, which is then hashed for each of the two challenges, remaining unbiased/unique
// while maintaining the bandwidth savings, yet also while adding 252 hashes for
// Secp256k1/Ed25519
e(G0::Scalar)
e(G0::Scalar),
}
impl<G0: PrimeGroup, G1: PrimeGroup> Re<G0, G1> {
@ -44,14 +48,14 @@ impl<G0: PrimeGroup, G1: PrimeGroup> Re<G0, G1> {
#[derive(Clone, PartialEq, Eq, Debug)]
pub(crate) struct Aos<G0: PrimeGroup, G1: PrimeGroup, const RING_LEN: usize> {
Re_0: Re<G0, G1>,
s: [(G0::Scalar, G1::Scalar); RING_LEN]
s: [(G0::Scalar, G1::Scalar); RING_LEN],
}
impl<
G0: PrimeGroup,
G1: PrimeGroup,
const RING_LEN: usize
> Aos<G0, G1, RING_LEN> where G0::Scalar: PrimeFieldBits, G1::Scalar: PrimeFieldBits {
impl<G0: PrimeGroup, G1: PrimeGroup, const RING_LEN: usize> Aos<G0, G1, RING_LEN>
where
G0::Scalar: PrimeFieldBits,
G1::Scalar: PrimeFieldBits,
{
#[allow(non_snake_case)]
fn nonces<T: Transcript>(mut transcript: T, nonces: (G0, G1)) -> (G0::Scalar, G1::Scalar) {
transcript.domain_separate(b"aos_membership_proof");
@ -66,7 +70,7 @@ impl<
generators: (Generators<G0>, Generators<G1>),
s: (G0::Scalar, G1::Scalar),
A: (G0, G1),
e: (G0::Scalar, G1::Scalar)
e: (G0::Scalar, G1::Scalar),
) -> (G0, G1) {
(((generators.0.alt * s.0) - (A.0 * e.0)), ((generators.1.alt * s.1) - (A.1 * e.1)))
}
@ -76,7 +80,7 @@ impl<
generators: (Generators<G0>, Generators<G1>),
s: (G0::Scalar, G1::Scalar),
A: (G0, G1),
e: (G0::Scalar, G1::Scalar)
e: (G0::Scalar, G1::Scalar),
) -> (Vec<(G0::Scalar, G0)>, Vec<(G1::Scalar, G1)>) {
(vec![(-s.0, generators.0.alt), (e.0, A.0)], vec![(-s.1, generators.1.alt), (e.1, A.1)])
}
@ -87,7 +91,7 @@ impl<
generators: (Generators<G0>, Generators<G1>),
s: (G0::Scalar, G1::Scalar),
A: (G0, G1),
e: (G0::Scalar, G1::Scalar)
e: (G0::Scalar, G1::Scalar),
) -> (G0::Scalar, G1::Scalar) {
Self::nonces(transcript, Self::R(generators, s, A, e))
}
@ -100,7 +104,7 @@ impl<
ring: &[(G0, G1)],
actual: usize,
blinding_key: (G0::Scalar, G1::Scalar),
mut Re_0: Re<G0, G1>
mut Re_0: Re<G0, G1>,
) -> Self {
// While it is possible to use larger values, it's not efficient to do so
// 2 + 2 == 2^2, yet 2 + 2 + 2 < 2^3
@ -119,8 +123,11 @@ impl<
let e = Self::nonces(transcript.clone(), R);
if i == 0 {
match Re_0 {
Re::R(ref mut R0_0, ref mut R1_0) => { *R0_0 = R.0; *R1_0 = R.1 },
Re::e(ref mut e_0) => *e_0 = e.0
Re::R(ref mut R0_0, ref mut R1_0) => {
*R0_0 = R.0;
*R1_0 = R.1
}
Re::e(ref mut e_0) => *e_0 = e.0,
}
}
@ -147,7 +154,7 @@ impl<
transcript: T,
generators: (Generators<G0>, Generators<G1>),
batch: &mut (BatchVerifier<(), G0>, BatchVerifier<(), G1>),
ring: &[(G0, G1)]
ring: &[(G0, G1)],
) -> Result<(), DLEqError> {
debug_assert!((RING_LEN == 2) || (RING_LEN == 4));
debug_assert_eq!(RING_LEN, ring.len());
@ -160,25 +167,25 @@ impl<
e = Self::R_nonces(transcript.clone(), generators, self.s[i], ring[i], e);
}
let mut statements = Self::R_batch(
generators,
*self.s.last().unwrap(),
*ring.last().unwrap(),
e
);
let mut statements =
Self::R_batch(generators, *self.s.last().unwrap(), *ring.last().unwrap(), e);
statements.0.push((G0::Scalar::one(), R0_0));
statements.1.push((G1::Scalar::one(), R1_0));
batch.0.queue(&mut *rng, (), statements.0);
batch.1.queue(&mut *rng, (), statements.1);
},
}
Re::e(e_0) => {
let e_0 = (e_0, scalar_convert(e_0).ok_or(DLEqError::InvalidChallenge)?);
let mut e = None;
for i in 0 .. RING_LEN {
e = Some(
Self::R_nonces(transcript.clone(), generators, self.s[i], ring[i], e.unwrap_or(e_0))
);
e = Some(Self::R_nonces(
transcript.clone(),
generators,
self.s[i],
ring[i],
e.unwrap_or(e_0),
));
}
// Will panic if the above loop is never run somehow
@ -199,8 +206,8 @@ impl<
Re::R(R0, R1) => {
w.write_all(R0.to_bytes().as_ref())?;
w.write_all(R1.to_bytes().as_ref())?;
},
Re::e(e) => w.write_all(e.to_repr().as_ref())?
}
Re::e(e) => w.write_all(e.to_repr().as_ref())?,
}
for i in 0 .. RING_LEN {
@ -215,8 +222,11 @@ impl<
#[cfg(feature = "serialize")]
pub(crate) fn deserialize<R: Read>(r: &mut R, mut Re_0: Re<G0, G1>) -> std::io::Result<Self> {
match Re_0 {
Re::R(ref mut R0, ref mut R1) => { *R0 = read_point(r)?; *R1 = read_point(r)? },
Re::e(ref mut e) => *e = read_scalar(r)?
Re::R(ref mut R0, ref mut R1) => {
*R0 = read_point(r)?;
*R1 = read_point(r)?
}
Re::e(ref mut e) => *e = read_scalar(r)?,
}
let mut s = [(G0::Scalar::zero(), G1::Scalar::zero()); RING_LEN];

View file

@ -5,7 +5,10 @@ use transcript::Transcript;
use group::{ff::PrimeFieldBits, prime::PrimeGroup};
use multiexp::BatchVerifier;
use crate::cross_group::{Generators, DLEqError, aos::{Re, Aos}};
use crate::cross_group::{
Generators, DLEqError,
aos::{Re, Aos},
};
#[cfg(feature = "serialize")]
use std::io::{Read, Write};
@ -16,7 +19,7 @@ pub(crate) enum BitSignature {
ClassicLinear,
ConciseLinear,
EfficientLinear,
CompromiseLinear
CompromiseLinear,
}
impl BitSignature {
@ -25,7 +28,7 @@ impl BitSignature {
BitSignature::ClassicLinear => 0,
BitSignature::ConciseLinear => 1,
BitSignature::EfficientLinear => 2,
BitSignature::CompromiseLinear => 3
BitSignature::CompromiseLinear => 3,
}
}
@ -35,7 +38,7 @@ impl BitSignature {
1 => BitSignature::ConciseLinear,
2 => BitSignature::EfficientLinear,
3 => BitSignature::CompromiseLinear,
_ => panic!("Unknown algorithm")
_ => panic!("Unknown algorithm"),
}
}
@ -44,7 +47,7 @@ impl BitSignature {
BitSignature::ClassicLinear => 1,
BitSignature::ConciseLinear => 2,
BitSignature::EfficientLinear => 1,
BitSignature::CompromiseLinear => 2
BitSignature::CompromiseLinear => 2,
}
}
@ -57,28 +60,23 @@ impl BitSignature {
BitSignature::ClassicLinear => Re::e_default(),
BitSignature::ConciseLinear => Re::e_default(),
BitSignature::EfficientLinear => Re::R_default(),
BitSignature::CompromiseLinear => Re::R_default()
BitSignature::CompromiseLinear => Re::R_default(),
}
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub(crate) struct Bits<
G0: PrimeGroup,
G1: PrimeGroup,
const SIGNATURE: u8,
const RING_LEN: usize
> {
pub(crate) struct Bits<G0: PrimeGroup, G1: PrimeGroup, const SIGNATURE: u8, const RING_LEN: usize> {
pub(crate) commitments: (G0, G1),
signature: Aos<G0, G1, RING_LEN>
signature: Aos<G0, G1, RING_LEN>,
}
impl<
G0: PrimeGroup,
G1: PrimeGroup,
const SIGNATURE: u8,
const RING_LEN: usize
> Bits<G0, G1, SIGNATURE, RING_LEN> where G0::Scalar: PrimeFieldBits, G1::Scalar: PrimeFieldBits {
impl<G0: PrimeGroup, G1: PrimeGroup, const SIGNATURE: u8, const RING_LEN: usize>
Bits<G0, G1, SIGNATURE, RING_LEN>
where
G0::Scalar: PrimeFieldBits,
G1::Scalar: PrimeFieldBits,
{
fn transcript<T: Transcript>(transcript: &mut T, i: usize, commitments: (G0, G1)) {
transcript.domain_separate(b"bits");
transcript.append_message(b"group", &u16::try_from(i).unwrap().to_le_bytes());
@ -88,7 +86,7 @@ impl<
fn ring(pow_2: (G0, G1), commitments: (G0, G1)) -> Vec<(G0, G1)> {
let mut res = vec![commitments; RING_LEN];
for i in 1 .. RING_LEN {
for i in 1 .. RING_LEN {
res[i] = (res[i - 1].0 - pow_2.0, res[i - 1].1 - pow_2.1);
}
res
@ -108,12 +106,10 @@ impl<
i: usize,
pow_2: &mut (G0, G1),
bits: u8,
blinding_key: (G0::Scalar, G1::Scalar)
blinding_key: (G0::Scalar, G1::Scalar),
) -> Self {
let mut commitments = (
(generators.0.alt * blinding_key.0),
(generators.1.alt * blinding_key.1)
);
let mut commitments =
((generators.0.alt * blinding_key.0), (generators.1.alt * blinding_key.1));
commitments.0 += pow_2.0 * G0::Scalar::from(bits.into());
commitments.1 += pow_2.1 * G1::Scalar::from(bits.into());
@ -126,7 +122,7 @@ impl<
&Self::ring(*pow_2, commitments),
usize::from(bits),
blinding_key,
BitSignature::from(SIGNATURE).aos_form()
BitSignature::from(SIGNATURE).aos_form(),
);
Self::shift(pow_2);
@ -140,7 +136,7 @@ impl<
generators: (Generators<G0>, Generators<G1>),
batch: &mut (BatchVerifier<(), G0>, BatchVerifier<(), G1>),
i: usize,
pow_2: &mut (G0, G1)
pow_2: &mut (G0, G1),
) -> Result<(), DLEqError> {
Self::transcript(transcript, i, self.commitments);
@ -149,7 +145,7 @@ impl<
transcript.clone(),
generators,
batch,
&Self::ring(*pow_2, self.commitments)
&Self::ring(*pow_2, self.commitments),
)?;
Self::shift(pow_2);
@ -165,11 +161,9 @@ impl<
#[cfg(feature = "serialize")]
pub(crate) fn deserialize<R: Read>(r: &mut R) -> std::io::Result<Self> {
Ok(
Bits {
commitments: (read_point(r)?, read_point(r)?),
signature: Aos::deserialize(r, BitSignature::from(SIGNATURE).aos_form())?
}
)
Ok(Bits {
commitments: (read_point(r)?, read_point(r)?),
signature: Aos::deserialize(r, BitSignature::from(SIGNATURE).aos_form())?,
})
}
}

View file

@ -5,7 +5,10 @@ use digest::Digest;
use transcript::Transcript;
use group::{ff::{Field, PrimeField, PrimeFieldBits}, prime::PrimeGroup};
use group::{
ff::{Field, PrimeField, PrimeFieldBits},
prime::PrimeGroup,
};
use multiexp::BatchVerifier;
pub mod scalar;
@ -36,7 +39,7 @@ pub(crate) fn read_point<R: Read, G: PrimeGroup>(r: &mut R) -> std::io::Result<G
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct Generators<G: PrimeGroup> {
pub primary: G,
pub alt: G
pub alt: G,
}
impl<G: PrimeGroup> Generators<G> {
@ -60,7 +63,7 @@ pub enum DLEqError {
#[error("invalid challenge")]
InvalidChallenge,
#[error("invalid proof")]
InvalidProof
InvalidProof,
}
// This should never be directly instantiated and uses a u8 to represent internal values
@ -74,11 +77,14 @@ pub struct __DLEqProof<
G1: PrimeGroup,
const SIGNATURE: u8,
const RING_LEN: usize,
const REMAINDER_RING_LEN: usize
> where G0::Scalar: PrimeFieldBits, G1::Scalar: PrimeFieldBits {
const REMAINDER_RING_LEN: usize,
> where
G0::Scalar: PrimeFieldBits,
G1::Scalar: PrimeFieldBits,
{
bits: Vec<Bits<G0, G1, SIGNATURE, RING_LEN>>,
remainder: Option<Bits<G0, G1, SIGNATURE, REMAINDER_RING_LEN>>,
poks: (SchnorrPoK<G0>, SchnorrPoK<G1>)
poks: (SchnorrPoK<G0>, SchnorrPoK<G1>),
}
macro_rules! dleq {
@ -90,9 +96,15 @@ macro_rules! dleq {
{ $signature.ring_len() },
// There may not be a remainder, yet if there is one, it'll be just one bit
// A ring for one bit has a RING_LEN of 2
{ if $remainder { 2 } else { 0 } }
{
if $remainder {
2
} else {
0
}
},
>;
}
};
}
// Proves for 1-bit at a time with the signature form (e, s), as originally described in MRL-0010.
@ -119,18 +131,20 @@ dleq!(EfficientLinearDLEq, BitSignature::EfficientLinear, false);
dleq!(CompromiseLinearDLEq, BitSignature::CompromiseLinear, true);
impl<
G0: PrimeGroup,
G1: PrimeGroup,
const SIGNATURE: u8,
const RING_LEN: usize,
const REMAINDER_RING_LEN: usize
> __DLEqProof<G0, G1, SIGNATURE, RING_LEN, REMAINDER_RING_LEN> where
G0::Scalar: PrimeFieldBits, G1::Scalar: PrimeFieldBits {
G0: PrimeGroup,
G1: PrimeGroup,
const SIGNATURE: u8,
const RING_LEN: usize,
const REMAINDER_RING_LEN: usize,
> __DLEqProof<G0, G1, SIGNATURE, RING_LEN, REMAINDER_RING_LEN>
where
G0::Scalar: PrimeFieldBits,
G1::Scalar: PrimeFieldBits,
{
pub(crate) fn transcript<T: Transcript>(
transcript: &mut T,
generators: (Generators<G0>, Generators<G1>),
keys: (G0, G1)
keys: (G0, G1),
) {
transcript.domain_separate(b"cross_group_dleq");
generators.0.transcript(transcript);
@ -143,13 +157,9 @@ impl<
pub(crate) fn blinding_key<R: RngCore + CryptoRng, F: PrimeField>(
rng: &mut R,
total: &mut F,
last: bool
last: bool,
) -> F {
let blinding_key = if last {
-*total
} else {
F::random(&mut *rng)
};
let blinding_key = if last { -*total } else { F::random(&mut *rng) };
*total += blinding_key;
blinding_key
}
@ -157,7 +167,7 @@ impl<
fn reconstruct_keys(&self) -> (G0, G1) {
let mut res = (
self.bits.iter().map(|bit| bit.commitments.0).sum::<G0>(),
self.bits.iter().map(|bit| bit.commitments.1).sum::<G1>()
self.bits.iter().map(|bit| bit.commitments.1).sum::<G1>(),
);
if let Some(bit) = &self.remainder {
@ -172,24 +182,24 @@ impl<
rng: &mut R,
transcript: &mut T,
generators: (Generators<G0>, Generators<G1>),
f: (G0::Scalar, G1::Scalar)
f: (G0::Scalar, G1::Scalar),
) -> (Self, (G0::Scalar, G1::Scalar)) {
Self::transcript(
transcript,
generators,
((generators.0.primary * f.0), (generators.1.primary * f.1))
((generators.0.primary * f.0), (generators.1.primary * f.1)),
);
let poks = (
SchnorrPoK::<G0>::prove(rng, transcript, generators.0.primary, f.0),
SchnorrPoK::<G1>::prove(rng, transcript, generators.1.primary, f.1)
SchnorrPoK::<G1>::prove(rng, transcript, generators.1.primary, f.1),
);
let mut blinding_key_total = (G0::Scalar::zero(), G1::Scalar::zero());
let mut blinding_key = |rng: &mut R, last| {
let blinding_key = (
Self::blinding_key(&mut *rng, &mut blinding_key_total.0, last),
Self::blinding_key(&mut *rng, &mut blinding_key_total.1, last)
Self::blinding_key(&mut *rng, &mut blinding_key_total.1, last),
);
if last {
debug_assert_eq!(blinding_key_total.0, G0::Scalar::zero());
@ -219,17 +229,15 @@ impl<
if (i % bits_per_group) == (bits_per_group - 1) {
let last = i == (capacity - 1);
let blinding_key = blinding_key(&mut *rng, last);
bits.push(
Bits::prove(
&mut *rng,
transcript,
generators,
i / bits_per_group,
&mut pow_2,
these_bits,
blinding_key
)
);
bits.push(Bits::prove(
&mut *rng,
transcript,
generators,
i / bits_per_group,
&mut pow_2,
these_bits,
blinding_key,
));
these_bits = 0;
}
}
@ -238,17 +246,15 @@ impl<
let mut remainder = None;
if capacity != ((capacity / bits_per_group) * bits_per_group) {
let blinding_key = blinding_key(&mut *rng, true);
remainder = Some(
Bits::prove(
&mut *rng,
transcript,
generators,
capacity / bits_per_group,
&mut pow_2,
these_bits,
blinding_key
)
);
remainder = Some(Bits::prove(
&mut *rng,
transcript,
generators,
capacity / bits_per_group,
&mut pow_2,
these_bits,
blinding_key,
));
}
let proof = __DLEqProof { bits, remainder, poks };
@ -270,13 +276,13 @@ impl<
rng: &mut R,
transcript: &mut T,
generators: (Generators<G0>, Generators<G1>),
digest: D
digest: D,
) -> (Self, (G0::Scalar, G1::Scalar)) {
Self::prove_internal(
rng,
transcript,
generators,
mutual_scalar_from_bytes(digest.finalize().as_ref())
mutual_scalar_from_bytes(digest.finalize().as_ref()),
)
}
@ -287,7 +293,7 @@ impl<
rng: &mut R,
transcript: &mut T,
generators: (Generators<G0>, Generators<G1>),
f0: G0::Scalar
f0: G0::Scalar,
) -> Option<(Self, (G0::Scalar, G1::Scalar))> {
scalar_convert(f0).map(|f1| Self::prove_internal(rng, transcript, generators, (f0, f1)))
}
@ -297,19 +303,18 @@ impl<
&self,
rng: &mut R,
transcript: &mut T,
generators: (Generators<G0>, Generators<G1>)
generators: (Generators<G0>, Generators<G1>),
) -> Result<(G0, G1), DLEqError> {
let capacity = usize::try_from(
G0::Scalar::CAPACITY.min(G1::Scalar::CAPACITY)
).unwrap();
let capacity = usize::try_from(G0::Scalar::CAPACITY.min(G1::Scalar::CAPACITY)).unwrap();
let bits_per_group = BitSignature::from(SIGNATURE).bits();
let has_remainder = (capacity % bits_per_group) != 0;
// These shouldn't be possible, as locally created and deserialized proofs should be properly
// formed in these regards, yet it doesn't hurt to check and would be problematic if true
if (self.bits.len() != (capacity / bits_per_group)) || (
(self.remainder.is_none() && has_remainder) || (self.remainder.is_some() && !has_remainder)
) {
if (self.bits.len() != (capacity / bits_per_group)) ||
((self.remainder.is_none() && has_remainder) ||
(self.remainder.is_some() && !has_remainder))
{
return Err(DLEqError::InvalidProofLength);
}
@ -320,7 +325,7 @@ impl<
BitSignature::ClassicLinear => 3,
BitSignature::ConciseLinear => 3,
BitSignature::EfficientLinear => (self.bits.len() + 1) * 3,
BitSignature::CompromiseLinear => (self.bits.len() + 1) * 3
BitSignature::CompromiseLinear => (self.bits.len() + 1) * 3,
};
let mut batch = (BatchVerifier::new(batch_capacity), BatchVerifier::new(batch_capacity));
@ -356,9 +361,7 @@ impl<
#[cfg(feature = "serialize")]
pub fn deserialize<R: Read>(r: &mut R) -> std::io::Result<Self> {
let capacity = usize::try_from(
G0::Scalar::CAPACITY.min(G1::Scalar::CAPACITY)
).unwrap();
let capacity = usize::try_from(G0::Scalar::CAPACITY.min(G1::Scalar::CAPACITY)).unwrap();
let bits_per_group = BitSignature::from(SIGNATURE).bits();
let mut bits = Vec::with_capacity(capacity / bits_per_group);
@ -371,12 +374,10 @@ impl<
remainder = Some(Bits::deserialize(r)?);
}
Ok(
__DLEqProof {
bits,
remainder,
poks: (SchnorrPoK::deserialize(r)?, SchnorrPoK::deserialize(r)?)
}
)
Ok(__DLEqProof {
bits,
remainder,
poks: (SchnorrPoK::deserialize(r)?, SchnorrPoK::deserialize(r)?),
})
}
}

View file

@ -2,7 +2,10 @@ use rand_core::{RngCore, CryptoRng};
use transcript::Transcript;
use group::{ff::{Field, PrimeFieldBits}, prime::PrimeGroup};
use group::{
ff::{Field, PrimeFieldBits},
prime::PrimeGroup,
};
use multiexp::BatchVerifier;
use crate::challenge;
@ -18,10 +21,13 @@ use crate::{read_scalar, cross_group::read_point};
#[derive(Clone, PartialEq, Eq, Debug)]
pub(crate) struct SchnorrPoK<G: PrimeGroup> {
R: G,
s: G::Scalar
s: G::Scalar,
}
impl<G: PrimeGroup> SchnorrPoK<G> where G::Scalar: PrimeFieldBits {
impl<G: PrimeGroup> SchnorrPoK<G>
where
G::Scalar: PrimeFieldBits,
{
// Not hram due to the lack of m
#[allow(non_snake_case)]
fn hra<T: Transcript>(transcript: &mut T, generator: G, R: G, A: G) -> G::Scalar {
@ -36,14 +42,14 @@ impl<G: PrimeGroup> SchnorrPoK<G> where G::Scalar: PrimeFieldBits {
rng: &mut R,
transcript: &mut T,
generator: G,
private_key: G::Scalar
private_key: G::Scalar,
) -> SchnorrPoK<G> {
let nonce = G::Scalar::random(rng);
#[allow(non_snake_case)]
let R = generator * nonce;
SchnorrPoK {
R,
s: nonce + (private_key * SchnorrPoK::hra(transcript, generator, R, generator * private_key))
s: nonce + (private_key * SchnorrPoK::hra(transcript, generator, R, generator * private_key)),
}
}
@ -53,7 +59,7 @@ impl<G: PrimeGroup> SchnorrPoK<G> where G::Scalar: PrimeFieldBits {
transcript: &mut T,
generator: G,
public_key: G,
batch: &mut BatchVerifier<(), G>
batch: &mut BatchVerifier<(), G>,
) {
batch.queue(
rng,
@ -61,8 +67,8 @@ impl<G: PrimeGroup> SchnorrPoK<G> where G::Scalar: PrimeFieldBits {
[
(-self.s, generator),
(G::Scalar::one(), self.R),
(Self::hra(transcript, generator, self.R, public_key), public_key)
]
(Self::hra(transcript, generator, self.R, public_key), public_key),
],
);
}

View file

@ -55,13 +55,13 @@ fn read_scalar<R: Read, F: PrimeField>(r: &mut R) -> io::Result<F> {
#[derive(Debug)]
pub enum DLEqError {
InvalidProof
InvalidProof,
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct DLEqProof<G: PrimeGroup> {
c: G::Scalar,
s: G::Scalar
s: G::Scalar,
}
#[allow(non_snake_case)]
@ -76,7 +76,7 @@ impl<G: PrimeGroup> DLEqProof<G> {
rng: &mut R,
transcript: &mut T,
generators: &[G],
scalar: G::Scalar
scalar: G::Scalar,
) -> DLEqProof<G> {
let r = G::Scalar::random(rng);
@ -95,7 +95,7 @@ impl<G: PrimeGroup> DLEqProof<G> {
&self,
transcript: &mut T,
generators: &[G],
points: &[G]
points: &[G],
) -> Result<(), DLEqError> {
if generators.len() != points.len() {
Err(DLEqError::InvalidProof)?;

View file

@ -6,7 +6,7 @@ use multiexp::BatchVerifier;
use crate::{
cross_group::aos::{Re, Aos},
tests::cross_group::{G0, G1, transcript, generators}
tests::cross_group::{G0, G1, transcript, generators},
};
#[allow(non_snake_case)]
@ -26,10 +26,8 @@ fn test_aos<const RING_LEN: usize>(default: Re<G0, G1>) {
#[allow(deprecated)]
let mut ring = [(G0::identity(), G1::identity()); RING_LEN];
for i in 0 .. RING_LEN {
ring_keys[i] = (
<G0 as Group>::Scalar::random(&mut OsRng),
<G1 as Group>::Scalar::random(&mut OsRng)
);
ring_keys[i] =
(<G0 as Group>::Scalar::random(&mut OsRng), <G1 as Group>::Scalar::random(&mut OsRng));
ring[i] = (generators.0.alt * ring_keys[i].0, generators.1.alt * ring_keys[i].1);
}
@ -41,7 +39,7 @@ fn test_aos<const RING_LEN: usize>(default: Re<G0, G1>) {
&ring,
actual,
ring_keys[actual],
default.clone()
default.clone(),
);
let mut batch = (BatchVerifier::new(0), BatchVerifier::new(0));

View file

@ -13,9 +13,9 @@ use transcript::{Transcript, RecommendedTranscript};
use crate::{
cross_group::{
scalar::mutual_scalar_from_bytes,
Generators, ClassicLinearDLEq, EfficientLinearDLEq, ConciseLinearDLEq, CompromiseLinearDLEq
}
scalar::mutual_scalar_from_bytes, Generators, ClassicLinearDLEq, EfficientLinearDLEq,
ConciseLinearDLEq, CompromiseLinearDLEq,
},
};
mod scalar;
@ -34,16 +34,17 @@ pub(crate) fn generators() -> (Generators<G0>, Generators<G1>) {
Generators::new(
ProjectivePoint::GENERATOR,
ProjectivePoint::from_bytes(
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac0").into())
).unwrap()
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac0").into()),
)
.unwrap(),
),
Generators::new(
EdwardsPoint::generator(),
EdwardsPoint::from_bytes(
&hex!("8b655970153799af2aeadc9ff1add0ea6c7251d54154cfa92c173a0dd39c1f94")
).unwrap()
)
EdwardsPoint::from_bytes(&hex!(
"8b655970153799af2aeadc9ff1add0ea6c7251d54154cfa92c173a0dd39c1f94"
))
.unwrap(),
),
)
}
@ -60,7 +61,7 @@ macro_rules! verify_and_deserialize {
let deserialized = <$type>::deserialize(&mut std::io::Cursor::new(&buf)).unwrap();
assert_eq!($proof, deserialized);
}
}
};
}
macro_rules! test_dleq {
@ -110,7 +111,7 @@ macro_rules! test_dleq {
&mut OsRng,
&mut transcript(),
generators,
Blake2b512::new().chain_update(seed)
Blake2b512::new().chain_update(seed),
)
} else {
let mut key;
@ -121,14 +122,14 @@ macro_rules! test_dleq {
res.is_none()
} {}
let res = res.unwrap();
assert_eq!(key, res.1.0);
assert_eq!(key, res.1 .0);
res
};
verify_and_deserialize!($type::<G0, G1>, proof, generators, keys);
}
}
}
};
}
test_dleq!("ClassicLinear", benchmark_classic_linear, test_classic_linear, ClassicLinearDLEq);
@ -155,12 +156,8 @@ fn test_rejection_sampling() {
assert!(
// Either would work
EfficientLinearDLEq::prove_without_bias(
&mut OsRng,
&mut transcript(),
generators(),
pow_2
).is_none()
EfficientLinearDLEq::prove_without_bias(&mut OsRng, &mut transcript(), generators(), pow_2)
.is_none()
);
}
@ -174,12 +171,9 @@ fn test_remainder() {
assert_eq!(keys.0 + Scalar::one(), Scalar::from(2u64).pow_vartime(&[255]));
assert_eq!(keys.0, keys.1);
let (proof, res) = ConciseLinearDLEq::prove_without_bias(
&mut OsRng,
&mut transcript(),
generators,
keys.0
).unwrap();
let (proof, res) =
ConciseLinearDLEq::prove_without_bias(&mut OsRng, &mut transcript(), generators, keys.0)
.unwrap();
assert_eq!(keys, res);
verify_and_deserialize!(

View file

@ -1,28 +1,29 @@
use rand_core::OsRng;
use group::{ff::{Field, PrimeFieldBits}, prime::PrimeGroup};
use group::{
ff::{Field, PrimeFieldBits},
prime::PrimeGroup,
};
use multiexp::BatchVerifier;
use transcript::{Transcript, RecommendedTranscript};
use crate::cross_group::schnorr::SchnorrPoK;
fn test_schnorr<G: PrimeGroup>() where G::Scalar: PrimeFieldBits {
fn test_schnorr<G: PrimeGroup>()
where
G::Scalar: PrimeFieldBits,
{
let private = G::Scalar::random(&mut OsRng);
let transcript = RecommendedTranscript::new(b"Schnorr Test");
let mut batch = BatchVerifier::new(3);
SchnorrPoK::prove(
&mut OsRng,
&mut transcript.clone(),
G::generator(),
private
).verify(
SchnorrPoK::prove(&mut OsRng, &mut transcript.clone(), G::generator(), private).verify(
&mut OsRng,
&mut transcript.clone(),
G::generator(),
G::generator() * private,
&mut batch
&mut batch,
);
assert!(batch.verify_vartime());
}

View file

@ -20,18 +20,22 @@ fn test_dleq() {
let generators = [
ProjectivePoint::GENERATOR,
ProjectivePoint::from_bytes(
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac0").into())
).unwrap(),
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac0").into()),
)
.unwrap(),
// Just an increment of the last byte from the previous, where the previous two are valid
ProjectivePoint::from_bytes(
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac4").into())
).unwrap(),
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac4").into()),
)
.unwrap(),
ProjectivePoint::from_bytes(
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803aca").into())
).unwrap(),
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803aca").into()),
)
.unwrap(),
ProjectivePoint::from_bytes(
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803acb").into())
).unwrap()
&(hex!("0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803acb").into()),
)
.unwrap(),
];
for i in 0 .. 5 {
@ -48,9 +52,8 @@ fn test_dleq() {
{
let mut buf = vec![];
proof.serialize(&mut buf).unwrap();
let deserialized = DLEqProof::<ProjectivePoint>::deserialize(
&mut std::io::Cursor::new(&buf)
).unwrap();
let deserialized =
DLEqProof::<ProjectivePoint>::deserialize(&mut std::io::Cursor::new(&buf)).unwrap();
assert_eq!(proof, deserialized);
}
}

View file

@ -55,12 +55,7 @@ pub trait Algorithm<C: Curve>: Clone {
/// Verify a specific share given as a response. Used to determine blame if signature
/// verification fails
#[must_use]
fn verify_share(
&self,
verification_share: C::G,
nonces: &[Vec<C::G>],
share: C::F,
) -> bool;
fn verify_share(&self, verification_share: C::G, nonces: &[Vec<C::G>], share: C::F) -> bool;
}
// Transcript which will create an IETF compliant serialization for the binding factor
@ -88,7 +83,6 @@ impl Transcript for IetfTranscript {
}
}
pub trait Hram<C: Curve>: Clone {
/// HRAM function to generate a challenge
/// H2 from the IETF draft despite having a different argument set (not pre-formatted)
@ -105,11 +99,7 @@ pub struct Schnorr<C: Curve, H: Hram<C>> {
impl<C: Curve, H: Hram<C>> Schnorr<C, H> {
pub fn new() -> Schnorr<C, H> {
Schnorr {
transcript: IetfTranscript(vec![]),
c: None,
_hram: PhantomData
}
Schnorr { transcript: IetfTranscript(vec![]), c: None, _hram: PhantomData }
}
}
@ -166,16 +156,11 @@ impl<C: Curve, H: Hram<C>> Algorithm<C> for Schnorr<C, H> {
}
#[must_use]
fn verify_share(
&self,
verification_share: C::G,
nonces: &[Vec<C::G>],
share: C::F,
) -> bool {
fn verify_share(&self, verification_share: C::G, nonces: &[Vec<C::G>], share: C::F) -> bool {
schnorr::verify::<C>(
verification_share,
self.c.unwrap(),
&SchnorrSignature { R: nonces[0][0], s: share}
&SchnorrSignature { R: nonces[0][0], s: share },
)
}
}

View file

@ -63,7 +63,7 @@ macro_rules! dalek_curve {
$Curve::hash_to_F($chal, &[&R.compress().to_bytes(), &A.compress().to_bytes(), m].concat())
}
}
}
};
}
#[cfg(any(test, feature = "ristretto"))]

View file

@ -6,7 +6,10 @@ use sha2::{digest::Update, Digest, Sha256};
use group::{ff::Field, GroupEncoding};
use elliptic_curve::{bigint::{Encoding, U384}, hash2curve::{Expander, ExpandMsg, ExpandMsgXmd}};
use elliptic_curve::{
bigint::{Encoding, U384},
hash2curve::{Expander, ExpandMsg, ExpandMsgXmd},
};
use crate::{curve::Curve, algorithm::Hram};
@ -36,12 +39,7 @@ macro_rules! kp_curve {
}
fn hash_msg(msg: &[u8]) -> Vec<u8> {
(&Sha256::new()
.chain($CONTEXT)
.chain(b"digest")
.chain(msg)
.finalize()
).to_vec()
(&Sha256::new().chain($CONTEXT).chain(b"digest").chain(msg).finalize()).to_vec()
}
fn hash_binding_factor(binding: &[u8]) -> Self::F {
@ -60,19 +58,17 @@ macro_rules! kp_curve {
let mut modulus = vec![0; 16];
modulus.extend((Self::F::zero() - Self::F::one()).to_bytes());
let modulus = U384::from_be_slice(&modulus).wrapping_add(&U384::ONE);
Self::read_F(
&mut Cursor::new(
&U384::from_be_slice(&{
let mut bytes = [0; 48];
ExpandMsgXmd::<Sha256>::expand_message(
&[msg],
dst,
48
).unwrap().fill_bytes(&mut bytes);
bytes
}).reduce(&modulus).unwrap().to_be_bytes()[16 ..]
)
).unwrap()
Self::read_F(&mut Cursor::new(
&U384::from_be_slice(&{
let mut bytes = [0; 48];
ExpandMsgXmd::<Sha256>::expand_message(&[msg], dst, 48).unwrap().fill_bytes(&mut bytes);
bytes
})
.reduce(&modulus)
.unwrap()
.to_be_bytes()[16 ..],
))
.unwrap()
}
}
@ -83,27 +79,15 @@ macro_rules! kp_curve {
fn hram(R: &$lib::ProjectivePoint, A: &$lib::ProjectivePoint, m: &[u8]) -> $lib::Scalar {
$Curve::hash_to_F(
&[$CONTEXT as &[u8], b"chal"].concat(),
&[R.to_bytes().as_ref(), A.to_bytes().as_ref(), m].concat()
&[R.to_bytes().as_ref(), A.to_bytes().as_ref(), m].concat(),
)
}
}
}
};
}
#[cfg(feature = "p256")]
kp_curve!(
p256,
P256,
IetfP256Hram,
b"P-256",
b"FROST-P256-SHA256-v5"
);
kp_curve!(p256, P256, IetfP256Hram, b"P-256", b"FROST-P256-SHA256-v5");
#[cfg(feature = "secp256k1")]
kp_curve!(
k256,
Secp256k1,
NonIetfSecp256k1Hram,
b"secp256k1",
b"FROST-secp256k1-SHA256-v7"
);
kp_curve!(k256, Secp256k1, NonIetfSecp256k1Hram, b"secp256k1", b"FROST-secp256k1-SHA256-v7");

View file

@ -102,9 +102,8 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug {
let mut encoding = <Self::G as GroupEncoding>::Repr::default();
r.read_exact(encoding.as_mut()).map_err(|_| CurveError::InvalidPoint)?;
let point = Option::<Self::G>::from(
Self::G::from_bytes(&encoding)
).ok_or(CurveError::InvalidPoint)?;
let point =
Option::<Self::G>::from(Self::G::from_bytes(&encoding)).ok_or(CurveError::InvalidPoint)?;
// Ban the identity, per the FROST spec, and non-canonical points
if (point.is_identity().into()) || (point.to_bytes().as_ref() != encoding.as_ref()) {
Err(CurveError::InvalidPoint)?;

View file

@ -1,8 +1,15 @@
use std::{marker::PhantomData, io::{Read, Cursor}, collections::HashMap};
use std::{
marker::PhantomData,
io::{Read, Cursor},
collections::HashMap,
};
use rand_core::{RngCore, CryptoRng};
use group::{ff::{Field, PrimeField}, GroupEncoding};
use group::{
ff::{Field, PrimeField},
GroupEncoding,
};
use multiexp::{multiexp_vartime, BatchVerifier};
@ -10,7 +17,7 @@ use crate::{
curve::Curve,
FrostError, FrostParams, FrostKeys,
schnorr::{self, SchnorrSignature},
validate_map
validate_map,
};
#[allow(non_snake_case)]
@ -56,13 +63,9 @@ fn generate_key_r1<R: RngCore + CryptoRng, C: Curve>(
// There's no reason to spend the time and effort to make this deterministic besides a
// general obsession with canonicity and determinism though
r,
challenge::<C>(
context,
params.i(),
(C::GENERATOR * r).to_bytes().as_ref(),
&serialized
)
).serialize()
challenge::<C>(context, params.i(), (C::GENERATOR * r).to_bytes().as_ref(), &serialized),
)
.serialize(),
);
// Step 4: Broadcast
@ -114,7 +117,7 @@ fn verify_r1<Re: Read, R: RngCore + CryptoRng, C: Curve>(
l,
these_commitments[0],
challenge::<C>(context, l, R.to_bytes().as_ref(), &Am),
SchnorrSignature::<C> { R, s }
SchnorrSignature::<C> { R, s },
));
}
@ -126,10 +129,7 @@ fn verify_r1<Re: Read, R: RngCore + CryptoRng, C: Curve>(
Ok(commitments)
}
fn polynomial<F: PrimeField>(
coefficients: &[F],
l: u16
) -> F {
fn polynomial<F: PrimeField>(coefficients: &[F], l: u16) -> F {
let l = F::from(u64::from(l));
let mut share = F::zero();
for (idx, coefficient) in coefficients.iter().rev().enumerate() {
@ -207,13 +207,10 @@ fn complete_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>(
let exponential = |i: u16, values: &[_]| {
let i = C::F::from(i.into());
let mut res = Vec::with_capacity(params.t().into());
(0 .. usize::from(params.t())).into_iter().fold(
C::F::one(),
|exp, l| {
res.push((exp, values[l]));
exp * i
}
);
(0 .. usize::from(params.t())).into_iter().fold(C::F::one(), |exp, l| {
res.push((exp, values[l]));
exp * i
});
res
};
@ -254,15 +251,7 @@ fn complete_r2<Re: Read, R: RngCore + CryptoRng, C: Curve>(
// TODO: Clear serialized and shares
Ok(
FrostKeys {
params,
secret_share,
group_key: stripes[0],
verification_shares,
offset: None
}
)
Ok(FrostKeys { params, secret_share, group_key: stripes[0], verification_shares, offset: None })
}
pub struct KeyGenMachine<C: Curve> {
@ -298,11 +287,8 @@ impl<C: Curve> KeyGenMachine<C> {
self,
rng: &mut R,
) -> (SecretShareMachine<C>, Vec<u8>) {
let (
coefficients,
our_commitments,
serialized
) = generate_key_r1::<_, C>(rng, &self.params, &self.context);
let (coefficients, our_commitments, serialized) =
generate_key_r1::<_, C>(rng, &self.params, &self.context);
(
SecretShareMachine {

View file

@ -3,7 +3,10 @@ use std::{io::Read, collections::HashMap};
use thiserror::Error;
use group::{ff::{Field, PrimeField}, GroupEncoding};
use group::{
ff::{Field, PrimeField},
GroupEncoding,
};
mod schnorr;
@ -28,11 +31,7 @@ pub struct FrostParams {
}
impl FrostParams {
pub fn new(
t: u16,
n: u16,
i: u16
) -> Result<FrostParams, FrostError> {
pub fn new(t: u16, n: u16, i: u16) -> Result<FrostParams, FrostError> {
if (t == 0) || (n == 0) {
Err(FrostError::ZeroParameter(t, n))?;
}
@ -46,12 +45,18 @@ impl FrostParams {
Err(FrostError::InvalidParticipantIndex(n, i))?;
}
Ok(FrostParams{ t, n, i })
Ok(FrostParams { t, n, i })
}
pub fn t(&self) -> u16 { self.t }
pub fn n(&self) -> u16 { self.n }
pub fn i(&self) -> u16 { self.i }
pub fn t(&self) -> u16 {
self.t
}
pub fn n(&self) -> u16 {
self.n
}
pub fn i(&self) -> u16 {
self.i
}
}
#[derive(Copy, Clone, Error, Debug)]
@ -112,10 +117,7 @@ impl<C: Curve> FrostView<C> {
}
/// Calculate the lagrange coefficient for a signing set
pub fn lagrange<F: PrimeField>(
i: u16,
included: &[u16],
) -> F {
pub fn lagrange<F: PrimeField>(i: u16, included: &[u16]) -> F {
let mut num = F::one();
let mut denom = F::one();
for l in included {
@ -192,12 +194,13 @@ impl<C: Curve> FrostKeys<C> {
Ok(FrostView {
group_key: self.group_key,
secret_share: secret_share + offset_share,
verification_shares: self.verification_shares.iter().map(
|(l, share)| (
*l,
(*share * lagrange::<C::F>(*l, &included)) + (C::GENERATOR * offset_share)
)
).collect(),
verification_shares: self
.verification_shares
.iter()
.map(|(l, share)| {
(*l, (*share * lagrange::<C::F>(*l, &included)) + (C::GENERATOR * offset_share))
})
.collect(),
included: included.to_vec(),
})
}
@ -242,36 +245,35 @@ impl<C: Curve> FrostKeys<C> {
let (t, n, i) = {
let mut read_u16 = || {
let mut value = [0; 2];
cursor.read_exact(&mut value).map_err(
|_| FrostError::InternalError("missing participant quantities")
)?;
cursor
.read_exact(&mut value)
.map_err(|_| FrostError::InternalError("missing participant quantities"))?;
Ok(u16::from_be_bytes(value))
};
(read_u16()?, read_u16()?, read_u16()?)
};
let secret_share = C::read_F(cursor)
.map_err(|_| FrostError::InternalError("invalid secret share"))?;
let group_key = C::read_G(cursor).map_err(|_| FrostError::InternalError("invalid group key"))?;
let secret_share =
C::read_F(cursor).map_err(|_| FrostError::InternalError("invalid secret share"))?;
let group_key =
C::read_G(cursor).map_err(|_| FrostError::InternalError("invalid group key"))?;
let mut verification_shares = HashMap::new();
for l in 1 ..= n {
verification_shares.insert(
l,
C::read_G(cursor).map_err(|_| FrostError::InternalError("invalid verification share"))?
C::read_G(cursor).map_err(|_| FrostError::InternalError("invalid verification share"))?,
);
}
Ok(
FrostKeys {
params: FrostParams::new(t, n, i)
.map_err(|_| FrostError::InternalError("invalid parameters"))?,
secret_share,
group_key,
verification_shares,
offset: None
}
)
Ok(FrostKeys {
params: FrostParams::new(t, n, i)
.map_err(|_| FrostError::InternalError("invalid parameters"))?,
secret_share,
group_key,
verification_shares,
offset: None,
})
}
}
@ -279,7 +281,7 @@ impl<C: Curve> FrostKeys<C> {
pub(crate) fn validate_map<T>(
map: &mut HashMap<u16, T>,
included: &[u16],
ours: u16
ours: u16,
) -> Result<(), FrostError> {
if (map.len() + 1) != included.len() {
Err(FrostError::InvalidParticipantQuantity(included.len(), map.len() + 1))?;

View file

@ -1,6 +1,9 @@
use rand_core::{RngCore, CryptoRng};
use group::{ff::{Field, PrimeField}, GroupEncoding};
use group::{
ff::{Field, PrimeField},
GroupEncoding,
};
use multiexp::BatchVerifier;
@ -25,26 +28,23 @@ impl<C: Curve> SchnorrSignature<C> {
pub(crate) fn sign<C: Curve>(
private_key: C::F,
nonce: C::F,
challenge: C::F
challenge: C::F,
) -> SchnorrSignature<C> {
SchnorrSignature {
R: C::GENERATOR * nonce,
s: nonce + (private_key * challenge)
}
SchnorrSignature { R: C::GENERATOR * nonce, s: nonce + (private_key * challenge) }
}
#[must_use]
pub(crate) fn verify<C: Curve>(
public_key: C::G,
challenge: C::F,
signature: &SchnorrSignature<C>
signature: &SchnorrSignature<C>,
) -> bool {
(C::GENERATOR * signature.s) == (signature.R + (public_key * challenge))
}
pub(crate) fn batch_verify<C: Curve, R: RngCore + CryptoRng>(
rng: &mut R,
triplets: &[(u16, C::G, C::F, SchnorrSignature<C>)]
triplets: &[(u16, C::G, C::F, SchnorrSignature<C>)],
) -> Result<(), u16> {
let mut values = [(C::F::one(), C::GENERATOR); 3];
let mut batch = BatchVerifier::new(triplets.len());

View file

@ -1,20 +1,24 @@
use core::fmt;
use std::{io::{Read, Cursor}, sync::Arc, collections::HashMap};
use std::{
io::{Read, Cursor},
sync::Arc,
collections::HashMap,
};
use rand_core::{RngCore, CryptoRng};
use transcript::Transcript;
use group::{ff::{Field, PrimeField}, Group, GroupEncoding};
use group::{
ff::{Field, PrimeField},
Group, GroupEncoding,
};
use multiexp::multiexp_vartime;
use dleq::DLEqProof;
use crate::{
curve::Curve,
FrostError, FrostParams, FrostKeys, FrostView,
algorithm::Algorithm,
validate_map
curve::Curve, FrostError, FrostParams, FrostKeys, FrostView, algorithm::Algorithm, validate_map,
};
/// Pairing of an Algorithm with a FrostKeys instance and this specific signing set
@ -88,11 +92,14 @@ fn preprocess<R: RngCore + CryptoRng, C: Curve, A: Algorithm<C>>(
params: &mut Params<C, A>,
) -> (PreprocessPackage<C>, Vec<u8>) {
let mut serialized = Vec::with_capacity(2 * C::G_len());
let (nonces, commitments) = params.algorithm.nonces().iter().map(
|generators| {
let (nonces, commitments) = params
.algorithm
.nonces()
.iter()
.map(|generators| {
let nonces = [
C::random_nonce(params.view().secret_share(), &mut *rng),
C::random_nonce(params.view().secret_share(), &mut *rng)
C::random_nonce(params.view().secret_share(), &mut *rng),
];
let commit = |generator: C::G, buf: &mut Vec<u8>| {
@ -116,18 +123,15 @@ fn preprocess<R: RngCore + CryptoRng, C: Curve, A: Algorithm<C>>(
// This could be further optimized with a multi-nonce proof.
// See https://github.com/serai-dex/serai/issues/38
for nonce in nonces {
DLEqProof::prove(
&mut *rng,
&mut transcript,
&generators,
nonce
).serialize(&mut serialized).unwrap();
DLEqProof::prove(&mut *rng, &mut transcript, &generators, nonce)
.serialize(&mut serialized)
.unwrap();
}
}
(nonces, commitments)
}
).unzip();
})
.unzip();
let addendum = params.algorithm.preprocess_addendum(rng, &params.view);
serialized.extend(&addendum);
@ -139,7 +143,7 @@ fn preprocess<R: RngCore + CryptoRng, C: Curve, A: Algorithm<C>>(
fn read_D_E<Re: Read, C: Curve>(cursor: &mut Re, l: u16) -> Result<[C::G; 2], FrostError> {
Ok([
C::read_G(cursor).map_err(|_| FrostError::InvalidCommitment(l))?,
C::read_G(cursor).map_err(|_| FrostError::InvalidCommitment(l))?
C::read_G(cursor).map_err(|_| FrostError::InvalidCommitment(l))?,
])
}
@ -197,7 +201,7 @@ fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
params.algorithm.process_addendum(
&params.view,
*l,
&mut Cursor::new(our_preprocess.addendum.clone())
&mut Cursor::new(our_preprocess.addendum.clone()),
)?;
} else {
let mut cursor = commitments.remove(l).unwrap();
@ -213,13 +217,14 @@ fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
if nonce_generators.len() >= 2 {
let mut transcript = nonce_transcript::<A::Transcript>();
for de in 0 .. 2 {
DLEqProof::deserialize(
&mut cursor
).map_err(|_| FrostError::InvalidCommitment(*l))?.verify(
&mut transcript,
&nonce_generators,
&commitments[n].iter().map(|commitments| commitments[de]).collect::<Vec<_>>(),
).map_err(|_| FrostError::InvalidCommitment(*l))?;
DLEqProof::deserialize(&mut cursor)
.map_err(|_| FrostError::InvalidCommitment(*l))?
.verify(
&mut transcript,
&nonce_generators,
&commitments[n].iter().map(|commitments| commitments[de]).collect::<Vec<_>>(),
)
.map_err(|_| FrostError::InvalidCommitment(*l))?;
}
}
}
@ -236,7 +241,7 @@ fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
// protocol
rho_transcript.append_message(
b"commitments",
&C::hash_msg(params.algorithm.transcript().challenge(b"commitments").as_ref())
&C::hash_msg(params.algorithm.transcript().challenge(b"commitments").as_ref()),
);
// Include the offset, if one exists
// While this isn't part of the FROST-expected rho transcript, the offset being here coincides
@ -254,10 +259,10 @@ fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
// Merge the rho transcript back into the global one to ensure its advanced while committing to
// everything
params.algorithm.transcript().append_message(
b"rho_transcript",
rho_transcript.challenge(b"merge").as_ref()
);
params
.algorithm
.transcript()
.append_message(b"rho_transcript", rho_transcript.challenge(b"merge").as_ref());
}
#[allow(non_snake_case)]
@ -280,10 +285,12 @@ fn sign_with_share<Re: Read, C: Curve, A: Algorithm<C>>(
let share = params.algorithm.sign_share(
&params.view,
&Rs,
&our_preprocess.nonces.iter().map(
|nonces| nonces[0] + (nonces[1] * B[&params.keys.params.i()].1)
).collect::<Vec<_>>(),
msg
&our_preprocess
.nonces
.iter()
.map(|nonces| nonces[0] + (nonces[1] * B[&params.keys.params.i()].1))
.collect::<Vec<_>>(),
msg,
);
Ok((Package { B, Rs, share }, share.to_repr().as_ref().to_vec()))
}
@ -321,21 +328,21 @@ fn complete<Re: Read, C: Curve, A: Algorithm<C>>(
for l in &sign_params.view.included {
if !sign_params.algorithm.verify_share(
sign_params.view.verification_share(*l),
&sign.B[l].0.iter().map(
|nonces| nonces.iter().map(
|commitments| commitments[0] + (commitments[1] * sign.B[l].1)
).collect()
).collect::<Vec<_>>(),
responses[l]
&sign.B[l]
.0
.iter()
.map(|nonces| {
nonces.iter().map(|commitments| commitments[0] + (commitments[1] * sign.B[l].1)).collect()
})
.collect::<Vec<_>>(),
responses[l],
) {
Err(FrostError::InvalidShare(*l))?;
}
}
// If everyone has a valid share and there were enough participants, this should've worked
Err(
FrostError::InternalError("everyone had a valid share yet the signature was still invalid")
)
Err(FrostError::InternalError("everyone had a valid share yet the signature was still invalid"))
}
pub trait PreprocessMachine {
@ -345,10 +352,7 @@ pub trait PreprocessMachine {
/// Perform the preprocessing round required in order to sign
/// Returns a byte vector which must be transmitted to all parties selected for this signing
/// process, over an authenticated channel
fn preprocess<R: RngCore + CryptoRng>(
self,
rng: &mut R
) -> (Self::SignMachine, Vec<u8>);
fn preprocess<R: RngCore + CryptoRng>(self, rng: &mut R) -> (Self::SignMachine, Vec<u8>);
}
pub trait SignMachine<S> {
@ -376,7 +380,7 @@ pub trait SignatureMachine<S> {
/// State machine which manages signing for an arbitrary signature algorithm
pub struct AlgorithmMachine<C: Curve, A: Algorithm<C>> {
params: Params<C, A>
params: Params<C, A>,
}
pub struct AlgorithmSignMachine<C: Curve, A: Algorithm<C>> {
@ -401,7 +405,7 @@ impl<C: Curve, A: Algorithm<C>> AlgorithmMachine<C, A> {
pub(crate) fn unsafe_override_preprocess(
self,
preprocess: PreprocessPackage<C>
preprocess: PreprocessPackage<C>,
) -> AlgorithmSignMachine<C, A> {
AlgorithmSignMachine { params: self.params, preprocess }
}
@ -411,10 +415,7 @@ impl<C: Curve, A: Algorithm<C>> PreprocessMachine for AlgorithmMachine<C, A> {
type Signature = A::Signature;
type SignMachine = AlgorithmSignMachine<C, A>;
fn preprocess<R: RngCore + CryptoRng>(
self,
rng: &mut R
) -> (Self::SignMachine, Vec<u8>) {
fn preprocess<R: RngCore + CryptoRng>(self, rng: &mut R) -> (Self::SignMachine, Vec<u8>) {
let mut params = self.params;
let (preprocess, serialized) = preprocess::<R, C, A>(rng, &mut params);
(AlgorithmSignMachine { params, preprocess }, serialized)
@ -427,7 +428,7 @@ impl<C: Curve, A: Algorithm<C>> SignMachine<A::Signature> for AlgorithmSignMachi
fn sign<Re: Read>(
self,
commitments: HashMap<u16, Re>,
msg: &[u8]
msg: &[u8],
) -> Result<(Self::SignatureMachine, Vec<u8>), FrostError> {
let mut params = self.params;
let (sign, serialized) = sign_with_share(&mut params, self.preprocess, commitments, msg)?;
@ -435,10 +436,7 @@ impl<C: Curve, A: Algorithm<C>> SignMachine<A::Signature> for AlgorithmSignMachi
}
}
impl<
C: Curve,
A: Algorithm<C>
> SignatureMachine<A::Signature> for AlgorithmSignatureMachine<C, A> {
impl<C: Curve, A: Algorithm<C>> SignatureMachine<A::Signature> for AlgorithmSignatureMachine<C, A> {
fn complete<Re: Read>(self, shares: HashMap<u16, Re>) -> Result<A::Signature, FrostError> {
complete(&self.params, self.sign, shares)
}

View file

@ -1,6 +1,9 @@
use rand::rngs::OsRng;
use crate::{curve, tests::vectors::{Vectors, test_with_vectors}};
use crate::{
curve,
tests::vectors::{Vectors, test_with_vectors},
};
#[cfg(any(test, feature = "ristretto"))]
#[test]
@ -12,7 +15,7 @@ fn ristretto_vectors() {
shares: &[
"5c3430d391552f6e60ecdc093ff9f6f4488756aa6cebdbad75a768010b8f830e",
"b06fc5eac20b4f6e1b271d9df2343d843e1e1fb03c4cbb673f2872d459ce6f01",
"f17e505f0e2581c6acfe54d3846a622834b5e7b50cad9a2109a97ba7a80d5c04"
"f17e505f0e2581c6acfe54d3846a622834b5e7b50cad9a2109a97ba7a80d5c04",
],
group_secret: "1b25a55e463cfd15cf14a5d3acc3d15053f08da49c8afcf3ab265f2ebc4f970b",
group_key: "e2a62f39eede11269e3bd5a7d97554f5ca384f9f6d3dd9c3c0d05083c7254f57",
@ -22,20 +25,20 @@ fn ristretto_vectors() {
nonces: &[
[
"eb0dc12ae7b746d36e3f2de46ce3833a05b9d4af5434eeb8cafaefda76906d00",
"491e91aa9df514ef598d5e0c7c5cdd088fbde4965b96069d546c0f04f1822b03"
"491e91aa9df514ef598d5e0c7c5cdd088fbde4965b96069d546c0f04f1822b03",
],
[
"abd12b8e6f255ee1e540eab029003a6e956567617720f61115f0941615892209",
"218e22625f93f262f025bd2d13c46ba722aa29fe585ceed66ff442d98fe4e509"
]
"218e22625f93f262f025bd2d13c46ba722aa29fe585ceed66ff442d98fe4e509",
],
],
sig_shares: &[
"efae3a83437fa8cd96194aacc56a7eb841630c280da99e7764a81d1340323306",
"96ddc4582e45eabce46f07b9e9375f8b49d35d1510fd34ac02b1e79d6100a602"
"96ddc4582e45eabce46f07b9e9375f8b49d35d1510fd34ac02b1e79d6100a602",
],
sig: "7ec584cef9a383afb43883b73bcaa6313afe878bd5fe75a608311b866a76ec67".to_owned() +
"858cffdb71c4928a7b895165afa2dd438b366a3d1da6d323675905b1a132d908"
}
"858cffdb71c4928a7b895165afa2dd438b366a3d1da6d323675905b1a132d908",
},
);
}
@ -49,7 +52,7 @@ fn ed25519_vectors() {
shares: &[
"929dcc590407aae7d388761cddb0c0db6f5627aea8e217f4a033f2ec83d93509",
"a91e66e012e4364ac9aaa405fcafd370402d9859f7b6685c07eed76bf409e80d",
"d3cb090a075eb154e82fdb4b3cb507f110040905468bb9c46da8bdea643a9a02"
"d3cb090a075eb154e82fdb4b3cb507f110040905468bb9c46da8bdea643a9a02",
],
group_secret: "7b1c33d3f5291d85de664833beb1ad469f7fb6025a0ec78b3a790c6e13a98304",
group_key: "15d21ccd7ee42959562fc8aa63224c8851fb3ec85a3faf66040d380fb9738673",
@ -59,19 +62,19 @@ fn ed25519_vectors() {
nonces: &[
[
"d9aad97e1a1127bb87702ce8d81d8c07c7cbca89e784868d8e3876ff6b459700",
"5063be2774520d08a5ccd7f1213fb1179a5fa292bf13bc91cb28e7bd4d4a690c"
"5063be2774520d08a5ccd7f1213fb1179a5fa292bf13bc91cb28e7bd4d4a690c",
],
[
"86961f3a429ac0c5696f49e6d796817ff653f83c07f34e9e1f4d4c8c515b7900",
"72225ec11c1315d9f1ea0e78b1160ed95800fadd0191d23fd2f2c90ac96cb307"
]
"72225ec11c1315d9f1ea0e78b1160ed95800fadd0191d23fd2f2c90ac96cb307",
],
],
sig_shares: &[
"caae171b83bff0c2c6f56a1276892918ba228146f6344b85d2ec6efeb6f16d0d",
"ea6fdbf61683cf5f1f742e1b91583f0f667f0369efd2e33399b96d5a3ff0300d"
"ea6fdbf61683cf5f1f742e1b91583f0f667f0369efd2e33399b96d5a3ff0300d",
],
sig: "5da10008c13c04dd72328ba8e0f72b63cad43c3bf4b7eaada1c78225afbd977e".to_owned() +
"c74afdb47fdfadca0fcda18a28e8891220a284afe5072fb96ba6dc58f6e19e0a"
}
"c74afdb47fdfadca0fcda18a28e8891220a284afe5072fb96ba6dc58f6e19e0a",
},
);
}

View file

@ -19,7 +19,7 @@ fn secp256k1_non_ietf() {
shares: &[
"08f89ffe80ac94dcb920c26f3f46140bfc7f95b493f8310f5fc1ea2b01f4254c",
"04f0feac2edcedc6ce1253b7fab8c86b856a797f44d83d82a385554e6e401984",
"00e95d59dd0d46b0e303e500b62b7ccb0e555d49f5b849f5e748c071da8c0dbc"
"00e95d59dd0d46b0e303e500b62b7ccb0e555d49f5b849f5e748c071da8c0dbc",
],
group_secret: "0d004150d27c3bf2a42f312683d35fac7394b1e9e318249c1bfe7f0795a83114",
group_key: "02f37c34b66ced1fb51c34a90bdae006901f10625cc06c4f64663b0eae87d87b4f",
@ -29,20 +29,20 @@ fn secp256k1_non_ietf() {
nonces: &[
[
"31c3c1b76b76664569859b9251fbabed9d4d432c6f5aaa03ed41f9c231935798",
"206f4ffaeb602ccb57cbe50e146ac690e6d7317d4b93377061d9d1b4caf78a26"
"206f4ffaeb602ccb57cbe50e146ac690e6d7317d4b93377061d9d1b4caf78a26",
],
[
"0d3945bc1553676a5dd910cb4f14437d99ed421516b2617357b984820fdca520",
"635e0fd90caaf40b5e986d0ee0f58778e4d88731bc6ac70350ef702ffe20a21b"
]
"635e0fd90caaf40b5e986d0ee0f58778e4d88731bc6ac70350ef702ffe20a21b",
],
],
sig_shares: &[
"18b71e284c5d008896ed8847b234ec829eda376d6208838ee7faf2ce21b154c1",
"a452a49c8116124d0a283f3589a96b704894b43246e47e59d376353bcc638311"
"a452a49c8116124d0a283f3589a96b704894b43246e47e59d376353bcc638311",
],
sig: "03dafb28ee7ad033fd15ed470d07156617260d74a9d76a15d371d7b613d2b111e".to_owned() +
"7bd09c2c4cd7312d5a115c77d3bde57f2e76eeb9fa8ed01e8bb712809ee14d7d2"
}
"7bd09c2c4cd7312d5a115c77d3bde57f2e76eeb9fa8ed01e8bb712809ee14d7d2",
},
);
}
@ -56,7 +56,7 @@ fn p256_vectors() {
shares: &[
"0c9c1a0fe806c184add50bbdcac913dda73e482daf95dcb9f35dbb0d8a9f7731",
"8d8e787bef0ff6c2f494ca45f4dad198c6bee01212d6c84067159c52e1863ad5",
"0e80d6e8f6192c003b5488ce1eec8f5429587d48cf001541e713b2d53c09d928"
"0e80d6e8f6192c003b5488ce1eec8f5429587d48cf001541e713b2d53c09d928",
],
group_secret: "8ba9bba2e0fd8c4767154d35a0b7562244a4aaf6f36c8fb8735fa48b301bd8de",
group_key: "023a309ad94e9fe8a7ba45dfc58f38bf091959d3c99cfbd02b4dc00585ec45ab70",
@ -66,19 +66,19 @@ fn p256_vectors() {
nonces: &[
[
"33a519cf070a166f9ef41a798d03423743f3e7d0b0efd5d0d963773c4c53205e",
"307d208d0c5728f323ae374f1ebd7f14a1a49b77d9d4bc1eab222218a17765ff"
"307d208d0c5728f323ae374f1ebd7f14a1a49b77d9d4bc1eab222218a17765ff",
],
[
"a614eadb972dc37b88aeceb6e899903f3104742d13f379a0e014541decbea4a4",
"e509791018504c5bb87edaf0f44761cc840888507c4cd80237971d78e65f70f2"
]
"e509791018504c5bb87edaf0f44761cc840888507c4cd80237971d78e65f70f2",
],
],
sig_shares: &[
"61e8b9c474df2e66ad19fd80a6e6cec1c6fe43c0a1cffd2d1c28299e93e1bbdb",
"9651d355ca1dea2557ba1f73e38a9f4ff1f1afc565323ef27f88a9d14df8370e"
"9651d355ca1dea2557ba1f73e38a9f4ff1f1afc565323ef27f88a9d14df8370e",
],
sig: "02dfba781e17b830229ae4ed22ebe402873683d9dfd945d01762217fb3172c2a7".to_owned() +
"1f83a8d1a3efd188c04d41cf48a716e11b8eff38607023c1f9bb0d36fe1d9f2e9"
}
"1f83a8d1a3efd188c04d41cf48a716e11b8eff38607023c1f9bb0d36fe1d9f2e9",
},
);
}

View file

@ -5,12 +5,10 @@ use rand_core::{RngCore, CryptoRng};
use group::ff::Field;
use crate::{
Curve,
FrostParams, FrostKeys,
lagrange,
Curve, FrostParams, FrostKeys, lagrange,
key_gen::KeyGenMachine,
algorithm::Algorithm,
sign::{PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine}
sign::{PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine},
};
// Test suites for public usage
@ -27,22 +25,20 @@ pub const THRESHOLD: u16 = ((PARTICIPANTS / 3) * 2) + 1;
pub fn clone_without<K: Clone + std::cmp::Eq + std::hash::Hash, V: Clone>(
map: &HashMap<K, V>,
without: &K
without: &K,
) -> HashMap<K, V> {
let mut res = map.clone();
res.remove(without).unwrap();
res
}
pub fn key_gen<R: RngCore + CryptoRng, C: Curve>(
rng: &mut R
) -> HashMap<u16, Arc<FrostKeys<C>>> {
pub fn key_gen<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) -> HashMap<u16, Arc<FrostKeys<C>>> {
let mut machines = HashMap::new();
let mut commitments = HashMap::new();
for i in 1 ..= PARTICIPANTS {
let machine = KeyGenMachine::<C>::new(
FrostParams::new(THRESHOLD, PARTICIPANTS, i).unwrap(),
"FROST Test key_gen".to_string()
"FROST Test key_gen".to_string(),
);
let (machine, these_commitments) = machine.generate_coefficients(rng);
machines.insert(i, machine);
@ -50,41 +46,45 @@ pub fn key_gen<R: RngCore + CryptoRng, C: Curve>(
}
let mut secret_shares = HashMap::new();
let mut machines = machines.drain().map(|(l, machine)| {
let (machine, shares) = machine.generate_secret_shares(
rng,
clone_without(&commitments, &l)
).unwrap();
secret_shares.insert(l, shares);
(l, machine)
}).collect::<HashMap<_, _>>();
let mut machines = machines
.drain()
.map(|(l, machine)| {
let (machine, shares) =
machine.generate_secret_shares(rng, clone_without(&commitments, &l)).unwrap();
secret_shares.insert(l, shares);
(l, machine)
})
.collect::<HashMap<_, _>>();
let mut verification_shares = None;
let mut group_key = None;
machines.drain().map(|(i, machine)| {
let mut our_secret_shares = HashMap::new();
for (l, shares) in &secret_shares {
if i == *l {
continue;
machines
.drain()
.map(|(i, machine)| {
let mut our_secret_shares = HashMap::new();
for (l, shares) in &secret_shares {
if i == *l {
continue;
}
our_secret_shares.insert(*l, Cursor::new(shares[&i].clone()));
}
our_secret_shares.insert(*l, Cursor::new(shares[&i].clone()));
}
let these_keys = machine.complete(rng, our_secret_shares).unwrap();
let these_keys = machine.complete(rng, our_secret_shares).unwrap();
// Verify the verification_shares are agreed upon
if verification_shares.is_none() {
verification_shares = Some(these_keys.verification_shares());
}
assert_eq!(verification_shares.as_ref().unwrap(), &these_keys.verification_shares());
// Verify the verification_shares are agreed upon
if verification_shares.is_none() {
verification_shares = Some(these_keys.verification_shares());
}
assert_eq!(verification_shares.as_ref().unwrap(), &these_keys.verification_shares());
// Verify the group keys are agreed upon
if group_key.is_none() {
group_key = Some(these_keys.group_key());
}
assert_eq!(group_key.unwrap(), these_keys.group_key());
// Verify the group keys are agreed upon
if group_key.is_none() {
group_key = Some(these_keys.group_key());
}
assert_eq!(group_key.unwrap(), these_keys.group_key());
(i, Arc::new(these_keys))
}).collect::<HashMap<_, _>>()
(i, Arc::new(these_keys))
})
.collect::<HashMap<_, _>>()
}
pub fn recover<C: Curve>(keys: &HashMap<u16, FrostKeys<C>>) -> C::F {
@ -92,10 +92,9 @@ pub fn recover<C: Curve>(keys: &HashMap<u16, FrostKeys<C>>) -> C::F {
assert!(keys.len() >= first.params().t().into(), "not enough keys provided");
let included = keys.keys().cloned().collect::<Vec<_>>();
let group_private = keys.iter().fold(
C::F::zero(),
|accum, (i, keys)| accum + (keys.secret_share() * lagrange::<C::F>(*i, &included))
);
let group_private = keys.iter().fold(C::F::zero(), |accum, (i, keys)| {
accum + (keys.secret_share() * lagrange::<C::F>(*i, &included))
});
assert_eq!(C::GENERATOR * group_private, first.group_key(), "failed to recover keys");
group_private
}
@ -114,40 +113,45 @@ pub fn algorithm_machines<R: RngCore, C: Curve, A: Algorithm<C>>(
included.push(n);
}
keys.iter().filter_map(
|(i, keys)| if included.contains(&i) {
Some((
*i,
AlgorithmMachine::new(
algorithm.clone(),
keys.clone(),
&included.clone()
).unwrap()
))
} else {
None
}
).collect()
keys
.iter()
.filter_map(|(i, keys)| {
if included.contains(&i) {
Some((
*i,
AlgorithmMachine::new(algorithm.clone(), keys.clone(), &included.clone()).unwrap(),
))
} else {
None
}
})
.collect()
}
pub fn sign<R: RngCore + CryptoRng, M: PreprocessMachine>(
rng: &mut R,
mut machines: HashMap<u16, M>,
msg: &[u8]
msg: &[u8],
) -> M::Signature {
let mut commitments = HashMap::new();
let mut machines = machines.drain().map(|(i, machine)| {
let (machine, preprocess) = machine.preprocess(rng);
commitments.insert(i, Cursor::new(preprocess));
(i, machine)
}).collect::<HashMap<_, _>>();
let mut machines = machines
.drain()
.map(|(i, machine)| {
let (machine, preprocess) = machine.preprocess(rng);
commitments.insert(i, Cursor::new(preprocess));
(i, machine)
})
.collect::<HashMap<_, _>>();
let mut shares = HashMap::new();
let mut machines = machines.drain().map(|(i, machine)| {
let (machine, share) = machine.sign(clone_without(&commitments, &i), msg).unwrap();
shares.insert(i, Cursor::new(share));
(i, machine)
}).collect::<HashMap<_, _>>();
let mut machines = machines
.drain()
.map(|(i, machine)| {
let (machine, share) = machine.sign(clone_without(&commitments, &i), msg).unwrap();
shares.insert(i, Cursor::new(share));
(i, machine)
})
.collect::<HashMap<_, _>>();
let mut signature = None;
for (i, machine) in machines.drain() {

View file

@ -5,34 +5,32 @@ use rand_core::{RngCore, CryptoRng};
use group::{ff::Field, GroupEncoding};
use crate::{
Curve, FrostKeys, schnorr::{self, SchnorrSignature}, algorithm::{Hram, Schnorr},
tests::{key_gen, algorithm_machines, sign as sign_test}
Curve, FrostKeys,
schnorr::{self, SchnorrSignature},
algorithm::{Hram, Schnorr},
tests::{key_gen, algorithm_machines, sign as sign_test},
};
pub(crate) fn core_sign<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
let private_key = C::F::random(&mut *rng);
let nonce = C::F::random(&mut *rng);
let challenge = C::F::random(rng); // Doesn't bother to craft an HRAM
assert!(
schnorr::verify::<C>(
C::GENERATOR * private_key,
challenge,
&schnorr::sign(private_key, nonce, challenge)
)
);
assert!(schnorr::verify::<C>(
C::GENERATOR * private_key,
challenge,
&schnorr::sign(private_key, nonce, challenge)
));
}
// The above sign function verifies signing works
// This verifies invalid signatures don't pass, using zero signatures, which should effectively be
// random
pub(crate) fn core_verify<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
assert!(
!schnorr::verify::<C>(
C::GENERATOR * C::F::random(&mut *rng),
C::F::random(rng),
&SchnorrSignature { R: C::GENERATOR * C::F::zero(), s: C::F::zero() }
)
);
assert!(!schnorr::verify::<C>(
C::GENERATOR * C::F::random(&mut *rng),
C::F::random(rng),
&SchnorrSignature { R: C::GENERATOR * C::F::zero(), s: C::F::zero() }
));
}
pub(crate) fn core_batch_verify<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
@ -47,9 +45,9 @@ pub(crate) fn core_batch_verify<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
}
// Batch verify
let triplets = (0 .. 5).map(
|i| (u16::try_from(i + 1).unwrap(), C::GENERATOR * keys[i], challenges[i], sigs[i])
).collect::<Vec<_>>();
let triplets = (0 .. 5)
.map(|i| (u16::try_from(i + 1).unwrap(), C::GENERATOR * keys[i], challenges[i], sigs[i]))
.collect::<Vec<_>>();
schnorr::batch_verify(rng, &triplets).unwrap();
// Shift 1 from s from one to another and verify it fails
@ -80,7 +78,7 @@ pub(crate) fn core_batch_verify<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
fn sign_core<R: RngCore + CryptoRng, C: Curve>(
rng: &mut R,
group_key: C::G,
keys: &HashMap<u16, Arc<FrostKeys<C>>>
keys: &HashMap<u16, Arc<FrostKeys<C>>>,
) {
const MESSAGE: &'static [u8] = b"Hello, World!";
@ -91,7 +89,7 @@ fn sign_core<R: RngCore + CryptoRng, C: Curve>(
#[derive(Clone)]
pub struct TestHram<C: Curve> {
_curve: PhantomData<C>
_curve: PhantomData<C>,
}
impl<C: Curve> Hram<C> for TestHram<C> {
#[allow(non_snake_case)]

View file

@ -5,10 +5,11 @@ use rand_core::{RngCore, CryptoRng};
use group::{ff::PrimeField, GroupEncoding};
use crate::{
curve::Curve, FrostKeys,
curve::Curve,
FrostKeys,
algorithm::{Schnorr, Hram},
sign::{PreprocessPackage, SignMachine, SignatureMachine, AlgorithmMachine},
tests::{clone_without, curve::test_curve, schnorr::test_schnorr, recover}
tests::{clone_without, curve::test_curve, schnorr::test_schnorr, recover},
};
pub struct Vectors {
@ -21,17 +22,17 @@ pub struct Vectors {
pub included: &'static [u16],
pub nonces: &'static [[&'static str; 2]],
pub sig_shares: &'static [&'static str],
pub sig: String
pub sig: String,
}
// Load these vectors into FrostKeys using a custom serialization it'll deserialize
fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, FrostKeys<C>> {
let shares = vectors.shares.iter().map(
|secret| C::read_F(&mut Cursor::new(hex::decode(secret).unwrap())).unwrap()
).collect::<Vec<_>>();
let verification_shares = shares.iter().map(
|secret| C::GENERATOR * secret
).collect::<Vec<_>>();
let shares = vectors
.shares
.iter()
.map(|secret| C::read_F(&mut Cursor::new(hex::decode(secret).unwrap())).unwrap())
.collect::<Vec<_>>();
let verification_shares = shares.iter().map(|secret| C::GENERATOR * secret).collect::<Vec<_>>();
let mut keys = HashMap::new();
for i in 1 ..= u16::try_from(shares.len()).unwrap() {
@ -59,11 +60,10 @@ fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, FrostKe
keys
}
pub fn test_with_vectors<
R: RngCore + CryptoRng,
C: Curve,
H: Hram<C>
>(rng: &mut R, vectors: Vectors) {
pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
rng: &mut R,
vectors: Vectors,
) {
// Do basic tests before trying the vectors
test_curve::<_, C>(&mut *rng);
test_schnorr::<_, C>(rng);
@ -87,54 +87,59 @@ pub fn test_with_vectors<
AlgorithmMachine::new(
Schnorr::<C, H>::new(),
Arc::new(keys[i].clone()),
vectors.included.clone()
).unwrap()
vectors.included.clone(),
)
.unwrap(),
));
}
let mut commitments = HashMap::new();
let mut c = 0;
let mut machines = machines.drain(..).map(|(i, machine)| {
let nonces = [
C::read_F(&mut Cursor::new(hex::decode(vectors.nonces[c][0]).unwrap())).unwrap(),
C::read_F(&mut Cursor::new(hex::decode(vectors.nonces[c][1]).unwrap())).unwrap()
];
c += 1;
let these_commitments = vec![[C::GENERATOR * nonces[0], C::GENERATOR * nonces[1]]];
let machine = machine.unsafe_override_preprocess(
PreprocessPackage {
let mut machines = machines
.drain(..)
.map(|(i, machine)| {
let nonces = [
C::read_F(&mut Cursor::new(hex::decode(vectors.nonces[c][0]).unwrap())).unwrap(),
C::read_F(&mut Cursor::new(hex::decode(vectors.nonces[c][1]).unwrap())).unwrap(),
];
c += 1;
let these_commitments = vec![[C::GENERATOR * nonces[0], C::GENERATOR * nonces[1]]];
let machine = machine.unsafe_override_preprocess(PreprocessPackage {
nonces: vec![nonces],
commitments: vec![these_commitments.clone()],
addendum: vec![]
}
);
addendum: vec![],
});
commitments.insert(
i,
Cursor::new(
[
these_commitments[0][0].to_bytes().as_ref(),
these_commitments[0][1].to_bytes().as_ref()
].concat().to_vec()
)
);
(i, machine)
}).collect::<Vec<_>>();
commitments.insert(
i,
Cursor::new(
[
these_commitments[0][0].to_bytes().as_ref(),
these_commitments[0][1].to_bytes().as_ref(),
]
.concat()
.to_vec(),
),
);
(i, machine)
})
.collect::<Vec<_>>();
let mut shares = HashMap::new();
c = 0;
let mut machines = machines.drain(..).map(|(i, machine)| {
let (machine, share) = machine.sign(
clone_without(&commitments, &i),
&hex::decode(vectors.msg).unwrap()
).unwrap();
let mut machines = machines
.drain(..)
.map(|(i, machine)| {
let (machine, share) =
machine.sign(clone_without(&commitments, &i), &hex::decode(vectors.msg).unwrap()).unwrap();
assert_eq!(share, hex::decode(vectors.sig_shares[c]).unwrap());
c += 1;
assert_eq!(share, hex::decode(vectors.sig_shares[c]).unwrap());
c += 1;
shares.insert(i, Cursor::new(share));
(i, machine)
}).collect::<HashMap<_, _>>();
shares.insert(i, Cursor::new(share));
(i, machine)
})
.collect::<HashMap<_, _>>();
for (i, machine) in machines.drain() {
let sig = machine.complete(clone_without(&shares, &i)).unwrap();

View file

@ -9,15 +9,20 @@ use crate::{multiexp, multiexp_vartime};
pub struct BatchVerifier<Id: Copy, G: Group>(Vec<(Id, Vec<(G::Scalar, G)>)>);
#[cfg(feature = "batch")]
impl<Id: Copy, G: Group> BatchVerifier<Id, G> where <G as Group>::Scalar: PrimeFieldBits {
impl<Id: Copy, G: Group> BatchVerifier<Id, G>
where
<G as Group>::Scalar: PrimeFieldBits,
{
pub fn new(capacity: usize) -> BatchVerifier<Id, G> {
BatchVerifier(Vec::with_capacity(capacity))
}
pub fn queue<
R: RngCore + CryptoRng,
I: IntoIterator<Item = (G::Scalar, G)>
>(&mut self, rng: &mut R, id: Id, pairs: I) {
pub fn queue<R: RngCore + CryptoRng, I: IntoIterator<Item = (G::Scalar, G)>>(
&mut self,
rng: &mut R,
id: Id,
pairs: I,
) {
// Define a unique scalar factor for this set of variables so individual items can't overlap
let u = if self.0.len() == 0 {
G::Scalar::one()
@ -35,16 +40,16 @@ impl<Id: Copy, G: Group> BatchVerifier<Id, G> where <G as Group>::Scalar: PrimeF
#[must_use]
pub fn verify(&self) -> bool {
multiexp(
&self.0.iter().flat_map(|pairs| pairs.1.iter()).cloned().collect::<Vec<_>>()
).is_identity().into()
multiexp(&self.0.iter().flat_map(|pairs| pairs.1.iter()).cloned().collect::<Vec<_>>())
.is_identity()
.into()
}
#[must_use]
pub fn verify_vartime(&self) -> bool {
multiexp_vartime(
&self.0.iter().flat_map(|pairs| pairs.1.iter()).cloned().collect::<Vec<_>>()
).is_identity().into()
multiexp_vartime(&self.0.iter().flat_map(|pairs| pairs.1.iter()).cloned().collect::<Vec<_>>())
.is_identity()
.into()
}
// A constant time variant may be beneficial for robust protocols
@ -53,17 +58,21 @@ impl<Id: Copy, G: Group> BatchVerifier<Id, G> where <G as Group>::Scalar: PrimeF
while slice.len() > 1 {
let split = slice.len() / 2;
if multiexp_vartime(
&slice[.. split].iter().flat_map(|pairs| pairs.1.iter()).cloned().collect::<Vec<_>>()
).is_identity().into() {
&slice[.. split].iter().flat_map(|pairs| pairs.1.iter()).cloned().collect::<Vec<_>>(),
)
.is_identity()
.into()
{
slice = &slice[split ..];
} else {
slice = &slice[.. split];
}
}
slice.get(0).filter(
|(_, value)| !bool::from(multiexp_vartime(value).is_identity())
).map(|(id, _)| *id)
slice
.get(0)
.filter(|(_, value)| !bool::from(multiexp_vartime(value).is_identity()))
.map(|(id, _)| *id)
}
pub fn verify_with_vartime_blame(&self) -> Result<(), Id> {

View file

@ -15,10 +15,10 @@ pub use batch::BatchVerifier;
#[cfg(test)]
mod tests;
pub(crate) fn prep_bits<G: Group>(
pairs: &[(G::Scalar, G)],
window: u8
) -> Vec<Vec<u8>> where G::Scalar: PrimeFieldBits {
pub(crate) fn prep_bits<G: Group>(pairs: &[(G::Scalar, G)], window: u8) -> Vec<Vec<u8>>
where
G::Scalar: PrimeFieldBits,
{
let w_usize = usize::from(window);
let mut groupings = vec![];
@ -37,10 +37,7 @@ pub(crate) fn prep_bits<G: Group>(
groupings
}
pub(crate) fn prep_tables<G: Group>(
pairs: &[(G::Scalar, G)],
window: u8
) -> Vec<Vec<G>> {
pub(crate) fn prep_tables<G: Group>(pairs: &[(G::Scalar, G)], window: u8) -> Vec<Vec<G>> {
let mut tables = Vec::with_capacity(pairs.len());
for pair in pairs {
let p = tables.len();
@ -59,7 +56,7 @@ enum Algorithm {
Null,
Single,
Straus(u8),
Pippenger(u8)
Pippenger(u8),
}
/*
@ -157,20 +154,26 @@ fn algorithm(len: usize) -> Algorithm {
}
// Performs a multiexp, automatically selecting the optimal algorithm based on amount of pairs
pub fn multiexp<G: Group>(pairs: &[(G::Scalar, G)]) -> G where G::Scalar: PrimeFieldBits {
pub fn multiexp<G: Group>(pairs: &[(G::Scalar, G)]) -> G
where
G::Scalar: PrimeFieldBits,
{
match algorithm(pairs.len()) {
Algorithm::Null => Group::identity(),
Algorithm::Single => pairs[0].1 * pairs[0].0,
Algorithm::Straus(window) => straus(pairs, window),
Algorithm::Pippenger(window) => pippenger(pairs, window)
Algorithm::Pippenger(window) => pippenger(pairs, window),
}
}
pub fn multiexp_vartime<G: Group>(pairs: &[(G::Scalar, G)]) -> G where G::Scalar: PrimeFieldBits {
pub fn multiexp_vartime<G: Group>(pairs: &[(G::Scalar, G)]) -> G
where
G::Scalar: PrimeFieldBits,
{
match algorithm(pairs.len()) {
Algorithm::Null => Group::identity(),
Algorithm::Single => pairs[0].1 * pairs[0].0,
Algorithm::Straus(window) => straus_vartime(pairs, window),
Algorithm::Pippenger(window) => pippenger_vartime(pairs, window)
Algorithm::Pippenger(window) => pippenger_vartime(pairs, window),
}
}

View file

@ -3,10 +3,10 @@ use group::Group;
use crate::prep_bits;
pub(crate) fn pippenger<G: Group>(
pairs: &[(G::Scalar, G)],
window: u8
) -> G where G::Scalar: PrimeFieldBits {
pub(crate) fn pippenger<G: Group>(pairs: &[(G::Scalar, G)], window: u8) -> G
where
G::Scalar: PrimeFieldBits,
{
let bits = prep_bits(pairs, window);
let mut res = G::identity();
@ -30,10 +30,10 @@ pub(crate) fn pippenger<G: Group>(
res
}
pub(crate) fn pippenger_vartime<G: Group>(
pairs: &[(G::Scalar, G)],
window: u8
) -> G where G::Scalar: PrimeFieldBits {
pub(crate) fn pippenger_vartime<G: Group>(pairs: &[(G::Scalar, G)], window: u8) -> G
where
G::Scalar: PrimeFieldBits,
{
let bits = prep_bits(pairs, window);
let mut res = G::identity();

View file

@ -3,10 +3,10 @@ use group::Group;
use crate::{prep_bits, prep_tables};
pub(crate) fn straus<G: Group>(
pairs: &[(G::Scalar, G)],
window: u8
) -> G where G::Scalar: PrimeFieldBits {
pub(crate) fn straus<G: Group>(pairs: &[(G::Scalar, G)], window: u8) -> G
where
G::Scalar: PrimeFieldBits,
{
let groupings = prep_bits(pairs, window);
let tables = prep_tables(pairs, window);
@ -23,10 +23,10 @@ pub(crate) fn straus<G: Group>(
res
}
pub(crate) fn straus_vartime<G: Group>(
pairs: &[(G::Scalar, G)],
window: u8
) -> G where G::Scalar: PrimeFieldBits {
pub(crate) fn straus_vartime<G: Group>(pairs: &[(G::Scalar, G)], window: u8) -> G
where
G::Scalar: PrimeFieldBits,
{
let groupings = prep_bits(pairs, window);
let tables = prep_tables(pairs, window);

View file

@ -11,7 +11,10 @@ use dalek_ff_group::EdwardsPoint;
use crate::{straus, pippenger, multiexp, multiexp_vartime};
#[allow(dead_code)]
fn benchmark_internal<G: Group>(straus_bool: bool) where G::Scalar: PrimeFieldBits {
fn benchmark_internal<G: Group>(straus_bool: bool)
where
G::Scalar: PrimeFieldBits,
{
let runs: usize = 20;
let mut start = 0;
@ -64,7 +67,10 @@ fn benchmark_internal<G: Group>(straus_bool: bool) where G::Scalar: PrimeFieldBi
current += 1;
println!(
"{} {} is more efficient at {} with {}µs per",
if straus_bool { "Straus" } else { "Pippenger" }, current, pairs.len(), next_per
if straus_bool { "Straus" } else { "Pippenger" },
current,
pairs.len(),
next_per
);
if current >= 8 {
return;
@ -73,7 +79,10 @@ fn benchmark_internal<G: Group>(straus_bool: bool) where G::Scalar: PrimeFieldBi
}
}
fn test_multiexp<G: Group>() where G::Scalar: PrimeFieldBits {
fn test_multiexp<G: Group>()
where
G::Scalar: PrimeFieldBits,
{
let mut pairs = Vec::with_capacity(1000);
let mut sum = G::identity();
for _ in 0 .. 10 {

View file

@ -34,7 +34,7 @@ enum DigestTranscriptMember {
Domain,
Label,
Value,
Challenge
Challenge,
}
impl DigestTranscriptMember {
@ -44,7 +44,7 @@ impl DigestTranscriptMember {
DigestTranscriptMember::Domain => 1,
DigestTranscriptMember::Label => 2,
DigestTranscriptMember::Value => 3,
DigestTranscriptMember::Challenge => 4
DigestTranscriptMember::Challenge => 4,
}
}
}

View file

@ -6,7 +6,9 @@ use crate::Transcript;
pub struct MerlinTranscript(pub merlin::Transcript);
// Merlin doesn't implement Debug so provide a stub which won't panic
impl Debug for MerlinTranscript {
fn fmt(&self, _: &mut Formatter<'_>) -> Result<(), core::fmt::Error> { Ok(()) }
fn fmt(&self, _: &mut Formatter<'_>) -> Result<(), core::fmt::Error> {
Ok(())
}
}
impl Transcript for MerlinTranscript {

View file

@ -12,7 +12,7 @@ pub use self::monero::Monero;
#[derive(Clone, Error, Debug)]
pub enum CoinError {
#[error("failed to connect to coin daemon")]
ConnectionError
ConnectionError,
}
pub trait Output: Sized + Clone {
@ -52,7 +52,7 @@ pub trait Coin {
async fn get_outputs(
&self,
block: &Self::Block,
key: <Self::Curve as Curve>::G
key: <Self::Curve as Curve>::G,
) -> Vec<Self::Output>;
async fn prepare_send(
@ -62,18 +62,18 @@ pub trait Coin {
height: usize,
inputs: Vec<Self::Output>,
payments: &[(Self::Address, u64)],
fee: Self::Fee
fee: Self::Fee,
) -> Result<Self::SignableTransaction, CoinError>;
async fn attempt_send(
&self,
transaction: Self::SignableTransaction,
included: &[u16]
included: &[u16],
) -> Result<Self::TransactionMachine, CoinError>;
async fn publish_transaction(
&self,
tx: &Self::Transaction
tx: &Self::Transaction,
) -> Result<(Vec<u8>, Vec<<Self::Output as Output>::Id>), CoinError>;
#[cfg(test)]

View file

@ -12,12 +12,16 @@ use monero_serai::{
transaction::Transaction,
rpc::Rpc,
wallet::{
ViewPair, address::{Network, AddressType, Address},
Fee, SpendableOutput, SignableTransaction as MSignableTransaction, TransactionMachine
}
ViewPair,
address::{Network, AddressType, Address},
Fee, SpendableOutput, SignableTransaction as MSignableTransaction, TransactionMachine,
},
};
use crate::{coin::{CoinError, Output as OutputTrait, Coin}, view_key};
use crate::{
coin::{CoinError, Output as OutputTrait, Coin},
view_key,
};
#[derive(Clone, Debug)]
pub struct Output(SpendableOutput);
@ -55,13 +59,13 @@ pub struct SignableTransaction(
Arc<FrostKeys<Ed25519>>,
RecommendedTranscript,
usize,
MSignableTransaction
MSignableTransaction,
);
#[derive(Clone, Debug)]
pub struct Monero {
pub(crate) rpc: Rpc,
view: Scalar
view: Scalar,
}
impl Monero {
@ -138,47 +142,51 @@ impl Coin for Monero {
height: usize,
mut inputs: Vec<Output>,
payments: &[(Address, u64)],
fee: Fee
fee: Fee,
) -> Result<SignableTransaction, CoinError> {
let spend = keys.group_key();
Ok(
SignableTransaction(
keys,
transcript,
height,
MSignableTransaction::new(
inputs.drain(..).map(|input| input.0).collect(),
payments.to_vec(),
Some(self.address(spend)),
fee
).map_err(|_| CoinError::ConnectionError)?
Ok(SignableTransaction(
keys,
transcript,
height,
MSignableTransaction::new(
inputs.drain(..).map(|input| input.0).collect(),
payments.to_vec(),
Some(self.address(spend)),
fee,
)
)
.map_err(|_| CoinError::ConnectionError)?,
))
}
async fn attempt_send(
&self,
transaction: SignableTransaction,
included: &[u16]
included: &[u16],
) -> Result<Self::TransactionMachine, CoinError> {
transaction.3.clone().multisig(
&self.rpc,
(*transaction.0).clone(),
transaction.1.clone(),
transaction.2,
included.to_vec()
).await.map_err(|_| CoinError::ConnectionError)
transaction
.3
.clone()
.multisig(
&self.rpc,
(*transaction.0).clone(),
transaction.1.clone(),
transaction.2,
included.to_vec(),
)
.await
.map_err(|_| CoinError::ConnectionError)
}
async fn publish_transaction(
&self,
tx: &Self::Transaction
tx: &Self::Transaction,
) -> Result<(Vec<u8>, Vec<<Self::Output as OutputTrait>::Id>), CoinError> {
self.rpc.publish_transaction(&tx).await.map_err(|_| CoinError::ConnectionError)?;
Ok((
tx.hash().to_vec(),
tx.prefix.outputs.iter().map(|output| output.key.compress().to_bytes()).collect()
tx.prefix.outputs.iter().map(|output| output.key.compress().to_bytes()).collect(),
))
}
@ -186,13 +194,20 @@ impl Coin for Monero {
async fn mine_block(&self) {
#[derive(serde::Deserialize, Debug)]
struct EmptyResponse {}
let _: EmptyResponse = self.rpc.rpc_call("json_rpc", Some(serde_json::json!({
"method": "generateblocks",
"params": {
"wallet_address": self.empty_address().to_string(),
"amount_of_blocks": 10
},
}))).await.unwrap();
let _: EmptyResponse = self
.rpc
.rpc_call(
"json_rpc",
Some(serde_json::json!({
"method": "generateblocks",
"params": {
"wallet_address": self.empty_address().to_string(),
"amount_of_blocks": 10
},
})),
)
.await
.unwrap();
}
#[cfg(test)]
@ -206,9 +221,14 @@ impl Coin for Monero {
self.mine_block().await;
}
let outputs = self.rpc
.get_block_transactions_possible(height).await.unwrap()
.swap_remove(0).scan(self.empty_view_pair(), false).ignore_timelock();
let outputs = self
.rpc
.get_block_transactions_possible(height)
.await
.unwrap()
.swap_remove(0)
.scan(self.empty_view_pair(), false)
.ignore_timelock();
let amount = outputs[0].commitment.amount;
let fee = 1000000000; // TODO
@ -216,8 +236,12 @@ impl Coin for Monero {
outputs,
vec![(address, amount - fee)],
Some(self.empty_address()),
self.rpc.get_fee().await.unwrap()
).unwrap().sign(&mut OsRng, &self.rpc, &Scalar::one()).await.unwrap();
self.rpc.get_fee().await.unwrap(),
)
.unwrap()
.sign(&mut OsRng, &self.rpc, &Scalar::one())
.await
.unwrap();
self.rpc.publish_transaction(&tx).await.unwrap();
self.mine_block().await;
}

View file

@ -27,7 +27,7 @@ pub enum SignError {
#[error("coin had an error {0}")]
CoinError(CoinError),
#[error("network had an error {0}")]
NetworkError(NetworkError)
NetworkError(NetworkError),
}
// Generate a static view key for a given chain in a globally consistent manner

View file

@ -1,17 +1,25 @@
use std::{io::Cursor, sync::{Arc, RwLock}, collections::HashMap};
use std::{
io::Cursor,
sync::{Arc, RwLock},
collections::HashMap,
};
use async_trait::async_trait;
use rand::rngs::OsRng;
use crate::{NetworkError, Network, coin::{Coin, Monero}, wallet::{WalletKeys, MemCoinDb, Wallet}};
use crate::{
NetworkError, Network,
coin::{Coin, Monero},
wallet::{WalletKeys, MemCoinDb, Wallet},
};
#[derive(Clone)]
struct LocalNetwork {
i: u16,
size: u16,
round: usize,
rounds: Arc<RwLock<Vec<HashMap<u16, Cursor<Vec<u8>>>>>>
rounds: Arc<RwLock<Vec<HashMap<u16, Cursor<Vec<u8>>>>>>,
}
impl LocalNetwork {
@ -63,9 +71,7 @@ async fn test_send<C: Coin + Clone>(coin: C, fee: C::Fee) {
for i in 1 ..= threshold {
let mut wallet = Wallet::new(MemCoinDb::new(), coin.clone());
wallet.acknowledge_height(0, height);
wallet.add_keys(
&WalletKeys::new(Arc::try_unwrap(keys.remove(&i).take().unwrap()).unwrap(), 0)
);
wallet.add_keys(&WalletKeys::new(Arc::try_unwrap(keys.remove(&i).take().unwrap()).unwrap(), 0));
wallets.push(wallet);
}
@ -87,20 +93,20 @@ async fn test_send<C: Coin + Clone>(coin: C, fee: C::Fee) {
let height = coin.get_height().await.unwrap();
wallet.acknowledge_height(1, height - 10);
let signable = wallet.prepare_sends(
1,
vec![(wallet.address(), 10000000000)],
fee
).await.unwrap().1.swap_remove(0);
futures.push(
wallet.attempt_send(network, signable, (1 ..= threshold).into_iter().collect::<Vec<_>>())
);
let signable = wallet
.prepare_sends(1, vec![(wallet.address(), 10000000000)], fee)
.await
.unwrap()
.1
.swap_remove(0);
futures.push(wallet.attempt_send(
network,
signable,
(1 ..= threshold).into_iter().collect::<Vec<_>>(),
));
}
println!(
"{:?}",
hex::encode(futures::future::join_all(futures).await.swap_remove(0).unwrap().0)
);
println!("{:?}", hex::encode(futures::future::join_all(futures).await.swap_remove(0).unwrap().0));
}
#[tokio::test]

View file

@ -5,13 +5,20 @@ use rand_core::OsRng;
use group::GroupEncoding;
use transcript::{Transcript, RecommendedTranscript};
use frost::{curve::Curve, FrostKeys, sign::{PreprocessMachine, SignMachine, SignatureMachine}};
use frost::{
curve::Curve,
FrostKeys,
sign::{PreprocessMachine, SignMachine, SignatureMachine},
};
use crate::{coin::{CoinError, Output, Coin}, SignError, Network};
use crate::{
coin::{CoinError, Output, Coin},
SignError, Network,
};
pub struct WalletKeys<C: Curve> {
keys: FrostKeys<C>,
creation_height: usize
creation_height: usize,
}
impl<C: Curve> WalletKeys<C> {
@ -57,17 +64,12 @@ pub struct MemCoinDb {
scanned_height: usize,
// Acknowledged height for a given canonical height
acknowledged_heights: HashMap<usize, usize>,
outputs: HashMap<Vec<u8>, Vec<u8>>
outputs: HashMap<Vec<u8>, Vec<u8>>,
}
impl MemCoinDb {
pub fn new() -> MemCoinDb {
MemCoinDb {
scanned_height: 0,
acknowledged_heights: HashMap::new(),
outputs: HashMap::new()
}
MemCoinDb { scanned_height: 0, acknowledged_heights: HashMap::new(), outputs: HashMap::new() }
}
}
@ -118,7 +120,7 @@ fn select_inputs<C: Coin>(inputs: &mut Vec<C::Output>) -> (Vec<C::Output>, u64)
fn select_outputs<C: Coin>(
payments: &mut Vec<(C::Address, u64)>,
value: &mut u64
value: &mut u64,
) -> Vec<(C::Address, u64)> {
// Prioritize large payments which will most efficiently use large inputs
payments.sort_by(|a, b| a.1.cmp(&b.1));
@ -144,7 +146,7 @@ fn select_outputs<C: Coin>(
fn refine_inputs<C: Coin>(
selected: &mut Vec<C::Output>,
inputs: &mut Vec<C::Output>,
mut remaining: u64
mut remaining: u64,
) {
// Drop unused inputs
let mut s = 0;
@ -180,7 +182,7 @@ fn refine_inputs<C: Coin>(
fn select_inputs_outputs<C: Coin>(
inputs: &mut Vec<C::Output>,
outputs: &mut Vec<(C::Address, u64)>
outputs: &mut Vec<(C::Address, u64)>,
) -> (Vec<C::Output>, Vec<(C::Address, u64)>) {
if inputs.len() == 0 {
return (vec![], vec![]);
@ -202,21 +204,17 @@ pub struct Wallet<D: CoinDb, C: Coin> {
db: D,
coin: C,
keys: Vec<(Arc<FrostKeys<C::Curve>>, Vec<C::Output>)>,
pending: Vec<(usize, FrostKeys<C::Curve>)>
pending: Vec<(usize, FrostKeys<C::Curve>)>,
}
impl<D: CoinDb, C: Coin> Wallet<D, C> {
pub fn new(db: D, coin: C) -> Wallet<D, C> {
Wallet {
db,
coin,
keys: vec![],
pending: vec![]
}
Wallet { db, coin, keys: vec![], pending: vec![] }
}
pub fn scanned_height(&self) -> usize { self.db.scanned_height() }
pub fn scanned_height(&self) -> usize {
self.db.scanned_height()
}
pub fn acknowledge_height(&mut self, canonical: usize, height: usize) {
self.db.acknowledge_height(canonical, height);
if height > self.db.scanned_height() {
@ -261,9 +259,13 @@ impl<D: CoinDb, C: Coin> Wallet<D, C> {
let block = self.coin.get_block(b).await?;
for (keys, outputs) in self.keys.iter_mut() {
outputs.extend(
self.coin.get_outputs(&block, keys.group_key()).await.iter().cloned().filter(
|output| self.db.add_output(output)
)
self
.coin
.get_outputs(&block, keys.group_key())
.await
.iter()
.cloned()
.filter(|output| self.db.add_output(output)),
);
}
@ -283,7 +285,7 @@ impl<D: CoinDb, C: Coin> Wallet<D, C> {
&mut self,
canonical: usize,
payments: Vec<(C::Address, u64)>,
fee: C::Fee
fee: C::Fee,
) -> Result<(Vec<(C::Address, u64)>, Vec<C::SignableTransaction>), CoinError> {
if payments.len() == 0 {
return Ok((vec![], vec![]));
@ -310,27 +312,18 @@ impl<D: CoinDb, C: Coin> Wallet<D, C> {
// Create the transcript for this transaction
let mut transcript = RecommendedTranscript::new(b"Serai Processor Wallet Send");
transcript.append_message(
b"canonical_height",
&u64::try_from(canonical).unwrap().to_le_bytes()
);
transcript
.append_message(b"canonical_height", &u64::try_from(canonical).unwrap().to_le_bytes());
transcript.append_message(
b"acknowledged_height",
&u64::try_from(acknowledged_height).unwrap().to_le_bytes()
);
transcript.append_message(
b"index",
&u64::try_from(txs.len()).unwrap().to_le_bytes()
&u64::try_from(acknowledged_height).unwrap().to_le_bytes(),
);
transcript.append_message(b"index", &u64::try_from(txs.len()).unwrap().to_le_bytes());
let tx = self.coin.prepare_send(
keys.clone(),
transcript,
acknowledged_height,
inputs,
&outputs,
fee
).await?;
let tx = self
.coin
.prepare_send(keys.clone(), transcript, acknowledged_height, inputs, &outputs, fee)
.await?;
// self.db.save_tx(tx) // TODO
txs.push(tx);
}
@ -343,12 +336,10 @@ impl<D: CoinDb, C: Coin> Wallet<D, C> {
&mut self,
network: &mut N,
prepared: C::SignableTransaction,
included: Vec<u16>
included: Vec<u16>,
) -> Result<(Vec<u8>, Vec<<C::Output as Output>::Id>), SignError> {
let attempt = self.coin.attempt_send(
prepared,
&included
).await.map_err(|e| SignError::CoinError(e))?;
let attempt =
self.coin.attempt_send(prepared, &included).await.map_err(|e| SignError::CoinError(e))?;
let (attempt, commitments) = attempt.preprocess(&mut OsRng);
let commitments = network.round(commitments).await.map_err(|e| SignError::NetworkError(e))?;

View file

@ -1,4 +1,4 @@
use std::{sync::Arc, time::Duration};
use std::{marker::Sync, sync::Arc, time::Duration};
use substrate_prometheus_endpoint::Registry;
@ -27,11 +27,8 @@ impl sc_executor::NativeExecutionDispatch for ExecutorDispatch {
}
}
pub type FullClient = sc_service::TFullClient<
Block,
RuntimeApi,
NativeElseWasmExecutor<ExecutorDispatch>
>;
pub type FullClient =
sc_service::TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<ExecutorDispatch>>;
type Db = sp_trie::PrefixedMemoryDB<sp_runtime::traits::BlakeTwo256>;
@ -39,45 +36,45 @@ pub fn import_queue<S: sp_consensus::SelectChain<Block> + 'static>(
task_manager: &TaskManager,
client: Arc<FullClient>,
select_chain: S,
registry: Option<&Registry>
registry: Option<&Registry>,
) -> Result<sc_pow::PowImportQueue<Block, Db>, sp_consensus::Error> {
let pow_block_import = Box::new(
sc_pow::PowBlockImport::new(
client.clone(),
client.clone(),
algorithm::AcceptAny,
0,
select_chain.clone(),
|_, _| { async { Ok(sp_timestamp::InherentDataProvider::from_system_time()) } },
sp_consensus::CanAuthorWithNativeVersion::new(client.executor().clone())
)
);
let pow_block_import = Box::new(sc_pow::PowBlockImport::new(
client.clone(),
client.clone(),
algorithm::AcceptAny,
0,
select_chain.clone(),
|_, _| async { Ok(sp_timestamp::InherentDataProvider::from_system_time()) },
sp_consensus::CanAuthorWithNativeVersion::new(client.executor().clone()),
));
sc_pow::import_queue(
pow_block_import,
None,
algorithm::AcceptAny,
&task_manager.spawn_essential_handle(),
registry
registry,
)
}
// Produce a block every 5 seconds
async fn produce<
Block: sp_api::BlockT<Hash = sp_core::H256>,
Algorithm: sc_pow::PowAlgorithm<Block, Difficulty = sp_core::U256> +
'static + Send + std::marker::Sync,
Algorithm: sc_pow::PowAlgorithm<Block, Difficulty = sp_core::U256> + Send + Sync + 'static,
C: sp_api::ProvideRuntimeApi<Block> + 'static,
Link: sc_consensus::JustificationSyncLink<Block> + 'static,
P: Send + 'static
>(worker: sc_pow::MiningHandle<Block, Algorithm, C, Link, P>)
where sp_api::TransactionFor<C, Block>: Send + 'static {
P: Send + 'static,
>(
worker: sc_pow::MiningHandle<Block, Algorithm, C, Link, P>,
) where
sp_api::TransactionFor<C, Block>: Send + 'static,
{
loop {
let worker_clone = worker.clone();
std::thread::spawn(move || {
tokio::runtime::Runtime::new().unwrap().handle().block_on(
async { worker_clone.submit(vec![]).await; }
);
tokio::runtime::Runtime::new().unwrap().handle().block_on(async {
worker_clone.submit(vec![]).await;
});
});
tokio::time::sleep(Duration::from_secs(5)).await;
}
@ -90,14 +87,14 @@ pub fn authority<S: sp_consensus::SelectChain<Block> + 'static>(
network: Arc<sc_network::NetworkService<Block, <Block as sp_runtime::traits::Block>::Hash>>,
pool: Arc<sc_transaction_pool::FullPool<Block, FullClient>>,
select_chain: S,
registry: Option<&Registry>
registry: Option<&Registry>,
) {
let proposer = sc_basic_authorship::ProposerFactory::new(
task_manager.spawn_handle(),
client.clone(),
pool,
registry,
None
None,
);
let can_author_with = sp_consensus::CanAuthorWithNativeVersion::new(client.executor().clone());
@ -108,8 +105,8 @@ pub fn authority<S: sp_consensus::SelectChain<Block> + 'static>(
algorithm::AcceptAny,
0, // Block to start checking inherents at
select_chain.clone(),
move |_, _| { async { Ok(sp_timestamp::InherentDataProvider::from_system_time()) } },
sp_consensus::CanAuthorWithNativeVersion::new(client.executor().clone())
move |_, _| async { Ok(sp_timestamp::InherentDataProvider::from_system_time()) },
sp_consensus::CanAuthorWithNativeVersion::new(client.executor().clone()),
));
let (worker, worker_task) = sc_pow::start_mining_worker(
@ -121,17 +118,13 @@ pub fn authority<S: sp_consensus::SelectChain<Block> + 'static>(
network.clone(),
network.clone(),
None,
move |_, _| { async { Ok(sp_timestamp::InherentDataProvider::from_system_time()) } },
move |_, _| async { Ok(sp_timestamp::InherentDataProvider::from_system_time()) },
Duration::from_secs(1),
Duration::from_secs(2),
can_author_with
can_author_with,
);
task_manager
.spawn_essential_handle()
.spawn_blocking("pow", None, worker_task);
task_manager.spawn_essential_handle().spawn_blocking("pow", None, worker_task);
task_manager
.spawn_essential_handle()
.spawn("producer", None, produce(worker));
task_manager.spawn_essential_handle().spawn("producer", None, produce(worker));
}

View file

@ -5,9 +5,7 @@ use sp_core::{sr25519, Pair, Public};
use sp_runtime::traits::IdentifyAccount;
use serai_runtime::{
WASM_BINARY, AccountId, Signature, GenesisConfig, SystemConfig, BalancesConfig
};
use serai_runtime::{WASM_BINARY, AccountId, Signature, GenesisConfig, SystemConfig, BalancesConfig};
pub type ChainSpec = sc_service::GenericChainSpec<GenesisConfig>;
type AccountPublic = <Signature as Verify>::Signer;
@ -17,58 +15,53 @@ fn get_from_seed<TPublic: Public>(seed: &'static str) -> <TPublic::Pair as Pair>
}
fn get_account_id_from_seed<TPublic: Public>(seed: &'static str) -> AccountId
where AccountPublic: From<<TPublic::Pair as Pair>::Public> {
where
AccountPublic: From<<TPublic::Pair as Pair>::Public>,
{
AccountPublic::from(get_from_seed::<TPublic>(seed)).into_account()
}
fn testnet_genesis(
wasm_binary: &[u8],
endowed_accounts: Vec<AccountId>
) -> GenesisConfig {
fn testnet_genesis(wasm_binary: &[u8], endowed_accounts: Vec<AccountId>) -> GenesisConfig {
GenesisConfig {
system: SystemConfig {
code: wasm_binary.to_vec(),
},
system: SystemConfig { code: wasm_binary.to_vec() },
balances: BalancesConfig {
balances: endowed_accounts.iter().cloned().map(|k| (k, 1 << 60)).collect(),
},
transaction_payment: Default::default()
transaction_payment: Default::default(),
}
}
pub fn development_config() -> Result<ChainSpec, &'static str> {
let wasm_binary = WASM_BINARY.ok_or_else(|| "Development wasm not available")?;
Ok(
ChainSpec::from_genesis(
// Name
"Development Network",
// ID
"dev",
ChainType::Development,
|| {
testnet_genesis(
wasm_binary,
vec![
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_account_id_from_seed::<sr25519::Public>("Alice//stash"),
get_account_id_from_seed::<sr25519::Public>("Bob//stash"),
]
)
},
// Bootnodes
vec![],
// Telemetry
None,
// Protocol ID
Some("serai"),
// Fork ID
None,
// Properties
None,
// Extensions
None
)
)
Ok(ChainSpec::from_genesis(
// Name
"Development Network",
// ID
"dev",
ChainType::Development,
|| {
testnet_genesis(
wasm_binary,
vec![
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_account_id_from_seed::<sr25519::Public>("Alice//stash"),
get_account_id_from_seed::<sr25519::Public>("Bob//stash"),
],
)
},
// Bootnodes
vec![],
// Telemetry
None,
// Protocol ID
Some("serai"),
// Fork ID
None,
// Properties
None,
// Extensions
None,
))
}

View file

@ -10,7 +10,7 @@ use crate::{
chain_spec,
cli::{Cli, Subcommand},
command_helper::{BenchmarkExtrinsicBuilder, inherent_benchmark_data},
service
service,
};
impl SubstrateCli for Cli {
@ -41,7 +41,7 @@ impl SubstrateCli for Cli {
fn load_spec(&self, id: &str) -> Result<Box<dyn sc_service::ChainSpec>, String> {
match id {
"dev" => Ok(Box::new(chain_spec::development_config()?)),
_ => panic!("Unknown network ID")
_ => panic!("Unknown network ID"),
}
}
@ -58,119 +58,83 @@ pub fn run() -> sc_cli::Result<()> {
Some(Subcommand::BuildSpec(cmd)) => {
cli.create_runner(cmd)?.sync_run(|config| cmd.run(config.chain_spec, config.network))
},
}
Some(Subcommand::CheckBlock(cmd)) => {
cli.create_runner(cmd)?.async_run(|config| {
let PartialComponents {
client,
task_manager,
import_queue,
..
} = service::new_partial(&config)?;
Ok((cmd.run(client, import_queue), task_manager))
})
},
Some(Subcommand::CheckBlock(cmd)) => cli.create_runner(cmd)?.async_run(|config| {
let PartialComponents { client, task_manager, import_queue, .. } =
service::new_partial(&config)?;
Ok((cmd.run(client, import_queue), task_manager))
}),
Some(Subcommand::ExportBlocks(cmd)) => {
cli.create_runner(cmd)?.async_run(|config| {
let PartialComponents { client, task_manager, .. } = service::new_partial(&config)?;
Ok((cmd.run(client, config.database), task_manager))
})
},
Some(Subcommand::ExportBlocks(cmd)) => cli.create_runner(cmd)?.async_run(|config| {
let PartialComponents { client, task_manager, .. } = service::new_partial(&config)?;
Ok((cmd.run(client, config.database), task_manager))
}),
Some(Subcommand::ExportState(cmd)) => {
cli.create_runner(cmd)?.async_run(|config| {
let PartialComponents { client, task_manager, .. } = service::new_partial(&config)?;
Ok((cmd.run(client, config.chain_spec), task_manager))
})
},
Some(Subcommand::ExportState(cmd)) => cli.create_runner(cmd)?.async_run(|config| {
let PartialComponents { client, task_manager, .. } = service::new_partial(&config)?;
Ok((cmd.run(client, config.chain_spec), task_manager))
}),
Some(Subcommand::ImportBlocks(cmd)) => {
cli.create_runner(cmd)?.async_run(|config| {
let PartialComponents {
client,
task_manager,
import_queue,
..
} = service::new_partial(&config)?;
Ok((cmd.run(client, import_queue), task_manager))
})
},
Some(Subcommand::ImportBlocks(cmd)) => cli.create_runner(cmd)?.async_run(|config| {
let PartialComponents { client, task_manager, import_queue, .. } =
service::new_partial(&config)?;
Ok((cmd.run(client, import_queue), task_manager))
}),
Some(Subcommand::PurgeChain(cmd)) => {
cli.create_runner(cmd)?.sync_run(|config| cmd.run(config.database))
},
}
Some(Subcommand::Revert(cmd)) => {
cli.create_runner(cmd)?.async_run(|config| {
let PartialComponents {
client,
task_manager,
backend,
..
} = service::new_partial(&config)?;
Ok((cmd.run(client, backend, None), task_manager))
})
},
Some(Subcommand::Revert(cmd)) => cli.create_runner(cmd)?.async_run(|config| {
let PartialComponents { client, task_manager, backend, .. } = service::new_partial(&config)?;
Ok((cmd.run(client, backend, None), task_manager))
}),
Some(Subcommand::Benchmark(cmd)) => {
cli.create_runner(cmd)?.sync_run(|config| {
match cmd {
BenchmarkCmd::Pallet(cmd) => {
cmd.run::<Block, service::ExecutorDispatch>(config)
},
Some(Subcommand::Benchmark(cmd)) => cli.create_runner(cmd)?.sync_run(|config| match cmd {
BenchmarkCmd::Pallet(cmd) => cmd.run::<Block, service::ExecutorDispatch>(config),
BenchmarkCmd::Block(cmd) => {
cmd.run(service::new_partial(&config)?.client)
},
BenchmarkCmd::Block(cmd) => cmd.run(service::new_partial(&config)?.client),
BenchmarkCmd::Storage(cmd) => {
let PartialComponents { client, backend, .. } = service::new_partial(&config)?;
cmd.run(config, client, backend.expose_db(), backend.expose_storage())
},
BenchmarkCmd::Storage(cmd) => {
let PartialComponents { client, backend, .. } = service::new_partial(&config)?;
cmd.run(config, client, backend.expose_db(), backend.expose_storage())
}
BenchmarkCmd::Overhead(cmd) => {
let client = service::new_partial(&config)?.client;
cmd.run(
config,
client.clone(),
inherent_benchmark_data()?,
Arc::new(BenchmarkExtrinsicBuilder::new(client))
)
},
BenchmarkCmd::Overhead(cmd) => {
let client = service::new_partial(&config)?.client;
cmd.run(
config,
client.clone(),
inherent_benchmark_data()?,
Arc::new(BenchmarkExtrinsicBuilder::new(client)),
)
}
BenchmarkCmd::Machine(cmd) => cmd.run(&config, SUBSTRATE_REFERENCE_HARDWARE.clone())
}
})
},
BenchmarkCmd::Machine(cmd) => cmd.run(&config, SUBSTRATE_REFERENCE_HARDWARE.clone()),
}),
#[cfg(feature = "try-runtime")]
Some(Subcommand::TryRuntime(cmd)) => {
cli.create_runner(cmd)?.async_run(|config| {
Ok(
(
cmd.run::<Block, service::ExecutorDispatch>(config),
sc_service::TaskManager::new(
config.tokio_handle.clone(),
config.prometheus_config.as_ref().map(|cfg| &cfg.registry)
).map_err(|e| sc_cli::Error::Service(sc_service::Error::Prometheus(e)))?
)
Some(Subcommand::TryRuntime(cmd)) => cli.create_runner(cmd)?.async_run(|config| {
Ok((
cmd.run::<Block, service::ExecutorDispatch>(config),
sc_service::TaskManager::new(
config.tokio_handle.clone(),
config.prometheus_config.as_ref().map(|cfg| &cfg.registry),
)
})
},
.map_err(|e| sc_cli::Error::Service(sc_service::Error::Prometheus(e)))?,
))
}),
#[cfg(not(feature = "try-runtime"))]
Some(Subcommand::TryRuntime) => Err("TryRuntime wasn't enabled when building the node".into()),
Some(Subcommand::ChainInfo(cmd)) => {
cli.create_runner(cmd)?.sync_run(|config| cmd.run::<Block>(&config))
},
None => {
cli.create_runner(&cli.run)?.run_node_until_exit(|config| async {
service::new_full(config).map_err(sc_cli::Error::Service)
})
}
None => cli.create_runner(&cli.run)?.run_node_until_exit(|config| async {
service::new_full(config).map_err(sc_cli::Error::Service)
}),
}
}

View file

@ -15,7 +15,7 @@ use runtime::SystemCall;
use crate::service::FullClient;
pub struct BenchmarkExtrinsicBuilder {
client: Arc<FullClient>
client: Arc<FullClient>,
}
impl BenchmarkExtrinsicBuilder {
@ -26,16 +26,12 @@ impl BenchmarkExtrinsicBuilder {
impl frame_benchmarking_cli::ExtrinsicBuilder for BenchmarkExtrinsicBuilder {
fn remark(&self, nonce: u32) -> std::result::Result<OpaqueExtrinsic, &'static str> {
Ok(
OpaqueExtrinsic::from(
create_benchmark_extrinsic(
self.client.as_ref(),
Sr25519Keyring::Bob.pair(),
SystemCall::remark { remark: vec![] }.into(),
nonce
)
)
)
Ok(OpaqueExtrinsic::from(create_benchmark_extrinsic(
self.client.as_ref(),
Sr25519Keyring::Bob.pair(),
SystemCall::remark { remark: vec![] }.into(),
nonce,
)))
}
}
@ -50,17 +46,15 @@ pub fn create_benchmark_extrinsic(
frame_system::CheckSpecVersion::<runtime::Runtime>::new(),
frame_system::CheckTxVersion::<runtime::Runtime>::new(),
frame_system::CheckGenesis::<runtime::Runtime>::new(),
frame_system::CheckEra::<runtime::Runtime>::from(
sp_runtime::generic::Era::mortal(
u64::from(
runtime::BlockHashCount::get().checked_next_power_of_two().map(|c| c / 2).unwrap_or(2)
),
client.chain_info().best_number.into(),
)
),
frame_system::CheckEra::<runtime::Runtime>::from(sp_runtime::generic::Era::mortal(
u64::from(
runtime::BlockHashCount::get().checked_next_power_of_two().map(|c| c / 2).unwrap_or(2),
),
client.chain_info().best_number.into(),
)),
frame_system::CheckNonce::<runtime::Runtime>::from(nonce),
frame_system::CheckWeight::<runtime::Runtime>::new(),
pallet_transaction_payment::ChargeTransactionPayment::<runtime::Runtime>::from(0)
pallet_transaction_payment::ChargeTransactionPayment::<runtime::Runtime>::from(0),
);
runtime::UncheckedExtrinsic::new_signed(
@ -79,8 +73,9 @@ pub fn create_benchmark_extrinsic(
(),
(),
(),
)
).using_encoded(|e| sender.sign(e))
),
)
.using_encoded(|e| sender.sign(e)),
),
extra,
)

View file

@ -14,21 +14,25 @@ use serai_runtime::{opaque::Block, AccountId, Balance, Index};
pub struct FullDeps<C, P> {
pub client: Arc<C>,
pub pool: Arc<P>,
pub deny_unsafe: DenyUnsafe
pub deny_unsafe: DenyUnsafe,
}
pub fn create_full<
C: ProvideRuntimeApi<Block> +
HeaderBackend<Block> + HeaderMetadata<Block, Error = BlockchainError> +
Send + Sync + 'static,
P: TransactionPool + 'static
C: ProvideRuntimeApi<Block>
+ HeaderBackend<Block>
+ HeaderMetadata<Block, Error = BlockchainError>
+ Send
+ Sync
+ 'static,
P: TransactionPool + 'static,
>(
deps: FullDeps<C, P>
deps: FullDeps<C, P>,
) -> Result<RpcModule<()>, Box<dyn std::error::Error + Send + Sync>>
where C::Api: substrate_frame_rpc_system::AccountNonceApi<Block, AccountId, Index> +
pallet_transaction_payment_rpc::TransactionPaymentRuntimeApi<Block, Balance> +
BlockBuilder<Block> {
where
C::Api: substrate_frame_rpc_system::AccountNonceApi<Block, AccountId, Index>
+ pallet_transaction_payment_rpc::TransactionPaymentRuntimeApi<Block, Balance>
+ BlockBuilder<Block>,
{
use pallet_transaction_payment_rpc::{TransactionPayment, TransactionPaymentApiServer};
use substrate_frame_rpc_system::{System, SystemApiServer};

View file

@ -21,7 +21,7 @@ type PartialComponents = sc_service::PartialComponents<
pub fn new_partial(config: &Configuration) -> Result<PartialComponents, ServiceError> {
if config.keystore_remote.is_some() {
return Err(ServiceError::Other("Remote Keystores are not supported".to_string()))
return Err(ServiceError::Other("Remote Keystores are not supported".to_string()));
}
let telemetry = config
@ -39,19 +39,15 @@ pub fn new_partial(config: &Configuration) -> Result<PartialComponents, ServiceE
config.wasm_method,
config.default_heap_pages,
config.max_runtime_instances,
config.runtime_cache_size
config.runtime_cache_size,
);
let (
client,
backend,
keystore_container,
task_manager
) = sc_service::new_full_parts::<Block, RuntimeApi, _>(
config,
telemetry.as_ref().map(|(_, telemetry)| telemetry.handle()),
executor
)?;
let (client, backend, keystore_container, task_manager) =
sc_service::new_full_parts::<Block, RuntimeApi, _>(
config,
telemetry.as_ref().map(|(_, telemetry)| telemetry.handle()),
executor,
)?;
let client = Arc::new(client);
let telemetry = telemetry.map(|(worker, telemetry)| {
@ -66,28 +62,26 @@ pub fn new_partial(config: &Configuration) -> Result<PartialComponents, ServiceE
config.role.is_authority().into(),
config.prometheus_registry(),
task_manager.spawn_essential_handle(),
client.clone()
client.clone(),
);
let import_queue = serai_consensus::import_queue(
&task_manager,
client.clone(),
select_chain.clone(),
config.prometheus_registry()
config.prometheus_registry(),
)?;
Ok(
sc_service::PartialComponents {
client,
backend,
task_manager,
import_queue,
keystore_container,
select_chain,
transaction_pool,
other: telemetry,
}
)
Ok(sc_service::PartialComponents {
client,
backend,
task_manager,
import_queue,
keystore_container,
select_chain,
transaction_pool,
other: telemetry,
})
}
pub fn new_full(config: Configuration) -> Result<TaskManager, ServiceError> {
@ -99,11 +93,11 @@ pub fn new_full(config: Configuration) -> Result<TaskManager, ServiceError> {
keystore_container,
select_chain,
other: mut telemetry,
transaction_pool
transaction_pool,
} = new_partial(&config)?;
let (network, system_rpc_tx, network_starter) = sc_service::build_network(
sc_service::BuildNetworkParams {
let (network, system_rpc_tx, network_starter) =
sc_service::build_network(sc_service::BuildNetworkParams {
config: &config,
client: client.clone(),
transaction_pool: transaction_pool.clone(),
@ -111,8 +105,7 @@ pub fn new_full(config: Configuration) -> Result<TaskManager, ServiceError> {
import_queue,
block_announce_validator_builder: None,
warp_sync: None,
}
)?;
})?;
if config.offchain_worker.enabled {
sc_service::build_offchain_workers(
@ -130,29 +123,28 @@ pub fn new_full(config: Configuration) -> Result<TaskManager, ServiceError> {
let client = client.clone();
let pool = transaction_pool.clone();
Box::new(
move |deny_unsafe, _| {
crate::rpc::create_full(
crate::rpc::FullDeps { client: client.clone(), pool: pool.clone(), deny_unsafe }
).map_err(Into::into)
}
)
Box::new(move |deny_unsafe, _| {
crate::rpc::create_full(crate::rpc::FullDeps {
client: client.clone(),
pool: pool.clone(),
deny_unsafe,
})
.map_err(Into::into)
})
};
sc_service::spawn_tasks(
sc_service::SpawnTasksParams {
network: network.clone(),
client: client.clone(),
keystore: keystore_container.sync_keystore(),
task_manager: &mut task_manager,
transaction_pool: transaction_pool.clone(),
rpc_builder: rpc_extensions_builder,
backend,
system_rpc_tx,
config,
telemetry: telemetry.as_mut(),
}
)?;
sc_service::spawn_tasks(sc_service::SpawnTasksParams {
network: network.clone(),
client: client.clone(),
keystore: keystore_container.sync_keystore(),
task_manager: &mut task_manager,
transaction_pool: transaction_pool.clone(),
rpc_builder: rpc_extensions_builder,
backend,
system_rpc_tx,
config,
telemetry: telemetry.as_mut(),
})?;
if role.is_authority() {
serai_consensus::authority(
@ -161,7 +153,7 @@ pub fn new_full(config: Configuration) -> Result<TaskManager, ServiceError> {
network,
transaction_pool,
select_chain,
prometheus_registry.as_ref()
prometheus_registry.as_ref(),
);
}

View file

@ -1,9 +1,5 @@
use substrate_wasm_builder::WasmBuilder;
fn main() {
WasmBuilder::new()
.with_current_project()
.export_heap_base()
.import_memory()
.build()
WasmBuilder::new().with_current_project().export_heap_base().import_memory().build()
}

View file

@ -10,8 +10,7 @@ use sp_runtime::{
create_runtime_str, generic, impl_opaque_keys,
traits::{AccountIdLookup, BlakeTwo256, Block as BlockT, IdentifyAccount, Verify},
transaction_validity::{TransactionSource, TransactionValidity},
ApplyExtrinsicResult, MultiSignature,
Perbill
ApplyExtrinsicResult, MultiSignature, Perbill,
};
use sp_std::prelude::*;
#[cfg(feature = "std")]
@ -20,8 +19,11 @@ use sp_version::RuntimeVersion;
use frame_support::{
traits::{ConstU8, ConstU32, ConstU64},
weights::{constants::{RocksDbWeight, WEIGHT_PER_SECOND}, IdentityFee},
parameter_types, construct_runtime
weights::{
constants::{RocksDbWeight, WEIGHT_PER_SECOND},
IdentityFee,
},
parameter_types, construct_runtime,
};
pub use frame_system::Call as SystemCall;
pub use pallet_timestamp::Call as TimestampCall;
@ -70,7 +72,7 @@ pub const VERSION: RuntimeVersion = RuntimeVersion {
impl_version: 1,
apis: RUNTIME_API_VERSIONS,
transaction_version: 1,
state_version: 1
state_version: 1,
};
pub const MILLISECS_PER_BLOCK: u64 = 6000;