Implement Lelantus Spark's Chaum Pedersen proof with a FROST algorithm

This commit is contained in:
Luke Parker 2022-05-31 02:09:09 -04:00
parent e504266c80
commit 6d9221d56c
No known key found for this signature in database
GPG key ID: F9F1386DB1E119B6
11 changed files with 637 additions and 0 deletions

View file

@ -6,5 +6,6 @@ members = [
"crypto/frost",
"crypto/dalek-ff-group",
"coins/monero",
"coins/firo",
"processor",
]

30
coins/firo/Cargo.toml Normal file
View file

@ -0,0 +1,30 @@
[package]
name = "firo"
version = "0.1.0"
description = "A modern Firo wallet library"
license = "MIT"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
[dependencies]
lazy_static = "1"
thiserror = "1"
rand_core = "0.6"
rand_chacha = { version = "0.3", optional = true }
sha2 = "0.10"
ff = "0.11"
group = "0.11"
k256 = { version = "0.10", features = ["arithmetic"] }
blake2 = { version = "0.10", optional = true }
transcript = { path = "../../crypto/transcript", optional = true }
frost = { path = "../../crypto/frost", optional = true }
[dev-dependencies]
rand = "0.8"
[features]
multisig = ["blake2", "transcript", "frost", "rand_chacha"]

4
coins/firo/src/lib.rs Normal file
View file

@ -0,0 +1,4 @@
pub mod spark;
#[cfg(test)]
mod tests;

View file

@ -0,0 +1,183 @@
#![allow(non_snake_case)]
use rand_core::{RngCore, CryptoRng};
use sha2::{Digest, Sha512};
use ff::Field;
use group::{Group, GroupEncoding};
use k256::{
elliptic_curve::{bigint::{ArrayEncoding, U512}, ops::Reduce},
Scalar, ProjectivePoint
};
use crate::spark::{F, G, H, U, GENERATORS_TRANSCRIPT};
#[cfg(feature = "frost")]
mod multisig;
#[cfg(feature = "frost")]
pub use multisig::ChaumMultisig;
#[derive(Clone, Debug)]
pub struct ChaumStatement {
context: Vec<u8>,
S_T: Vec<(ProjectivePoint, ProjectivePoint)>,
}
impl ChaumStatement {
pub fn new(context: Vec<u8>, S_T: Vec<(ProjectivePoint, ProjectivePoint)>) -> ChaumStatement {
ChaumStatement { context, S_T }
}
fn transcript(&self) -> Vec<u8> {
let mut transcript = self.context.clone();
for S_T in &self.S_T {
transcript.extend(S_T.0.to_bytes());
transcript.extend(S_T.1.to_bytes());
}
transcript
}
}
#[derive(Clone, Debug)]
pub struct ChaumWitness {
statement: ChaumStatement,
xz: Vec<(Scalar, Scalar)>
}
impl ChaumWitness {
pub fn new(statement: ChaumStatement, xz: Vec<(Scalar, Scalar)>) -> ChaumWitness {
assert!(statement.S_T.len() != 0);
assert_eq!(statement.S_T.len(), xz.len());
ChaumWitness { statement, xz }
}
}
#[derive(Clone, PartialEq, Debug)]
pub(crate) struct ChaumCommitments {
A1: ProjectivePoint,
A2: Vec<ProjectivePoint>
}
impl ChaumCommitments {
fn transcript(&self) -> Vec<u8> {
let mut transcript = Vec::with_capacity((self.A2.len() + 1) * 33);
transcript.extend(self.A1.to_bytes());
for A in &self.A2 {
transcript.extend(A.to_bytes());
}
transcript
}
}
#[derive(Clone, PartialEq, Debug)]
pub struct ChaumProof {
commitments: ChaumCommitments,
t1: Vec<Scalar>,
t2: Scalar,
t3: Scalar
}
impl ChaumProof {
fn r_t_commitments<R: RngCore + CryptoRng>(
rng: &mut R,
witness: &ChaumWitness
) -> (Vec<Scalar>, Scalar, ChaumCommitments) {
let len = witness.xz.len();
let mut rs = Vec::with_capacity(len);
let mut r_sum = Scalar::zero();
let mut commitments = ChaumCommitments {
A1: ProjectivePoint::IDENTITY,
A2: Vec::with_capacity(len)
};
for (_, T) in &witness.statement.S_T {
let r = Scalar::random(&mut *rng);
r_sum += r;
commitments.A2.push(T * &r);
rs.push(r);
}
let t = Scalar::random(&mut *rng);
commitments.A1 = (*F * r_sum) + (*H * t);
(rs, t, commitments)
}
fn t_prove(
witness: &ChaumWitness,
rs: &[Scalar],
mut t3: Scalar,
commitments: ChaumCommitments,
nonces: &[Scalar],
y: &Scalar
) -> (Scalar, ChaumProof) {
let challenge = ChaumProof::challenge(&witness.statement, &commitments);
let mut t1 = Vec::with_capacity(rs.len());
let mut t2 = Scalar::zero();
let mut accum = challenge;
for (i, (x, z)) in witness.xz.iter().enumerate() {
t1.push(rs[i] + (accum * x));
t2 += nonces[i] + (accum * y);
t3 += accum * z;
accum *= challenge;
}
(challenge, ChaumProof { commitments, t1, t2, t3 })
}
fn challenge(statement: &ChaumStatement, commitments: &ChaumCommitments) -> Scalar {
let mut transcript = b"Chaum".to_vec();
transcript.extend(&*GENERATORS_TRANSCRIPT);
transcript.extend(&statement.transcript());
transcript.extend(&commitments.transcript());
Scalar::from_uint_reduced(U512::from_be_byte_array(Sha512::digest(transcript)))
}
pub fn prove<R: RngCore + CryptoRng>(
rng: &mut R,
witness: &ChaumWitness,
y: &Scalar
) -> ChaumProof {
let len = witness.xz.len();
let (rs, t3, mut commitments) = Self::r_t_commitments(rng, witness);
let mut s_sum = Scalar::zero();
let mut ss = Vec::with_capacity(len);
for i in 0 .. len {
let s = Scalar::random(&mut *rng);
s_sum += s;
commitments.A2[i] += *G * s;
ss.push(s);
}
commitments.A1 += *G * s_sum;
let (_, proof) = Self::t_prove(&witness, &rs, t3, commitments, &ss, y);
proof
}
pub fn verify(&self, statement: &ChaumStatement) -> bool {
let len = statement.S_T.len();
assert_eq!(len, self.commitments.A2.len());
assert_eq!(len, self.t1.len());
let challenge = Self::challenge(&statement, &self.commitments);
let mut one = self.commitments.A1 - ((*G * self.t2) + (*H * self.t3));
let mut two = -(*G * self.t2);
let mut accum = challenge;
for i in 0 .. len {
one += statement.S_T[i].0 * accum;
one -= *F * self.t1[i];
two += self.commitments.A2[i] + (*U * accum);
two -= statement.S_T[i].1 * self.t1[i];
accum *= challenge;
}
one.is_identity().into() && two.is_identity().into()
}
}

View file

@ -0,0 +1,202 @@
use std::collections::HashMap;
use rand_core::{RngCore, CryptoRng, SeedableRng};
use rand_chacha::ChaCha12Rng;
use ff::Field;
use group::GroupEncoding;
use k256::{Scalar, ProjectivePoint};
use transcript::Transcript as _;
use frost::{CurveError, Curve, FrostError, MultisigView, algorithm::Algorithm};
use crate::spark::{
G, GENERATORS_TRANSCRIPT,
frost::{Transcript, Secp256k1},
chaum::{ChaumWitness, ChaumProof}
};
#[derive(Clone)]
pub struct ChaumMultisig {
transcript: Transcript,
len: usize,
witness: ChaumWitness,
// The following is ugly as hell as it's re-implementing the nonce code FROST is meant to handle
// Using FROST's provided SchnorrSignature algorithm multiple times would work, handling nonces
// for us, except you need the commitments for the challenge which means you need the binding
// factors, which means then you're re-calculating those, and...
// The best solution would be for FROST itself to support multi-nonce protocols, if there is
// sufficient reason for it to
additional_nonces: Vec<(Scalar, Scalar)>,
nonces: HashMap<u16, Vec<(ProjectivePoint, ProjectivePoint)>>,
sum: Vec<(ProjectivePoint, ProjectivePoint)>,
challenge: Scalar,
binding: Scalar,
proof: Option<ChaumProof>
}
impl ChaumMultisig {
pub fn new(mut transcript: Transcript, witness: ChaumWitness) -> ChaumMultisig {
transcript.domain_separate(b"Chaum");
transcript.append_message(b"generators", &*GENERATORS_TRANSCRIPT);
transcript.append_message(b"statement", &witness.statement.transcript());
for (x, z) in &witness.xz {
transcript.append_message(b"x", &x.to_bytes());
transcript.append_message(b"z", &z.to_bytes());
}
let len = witness.xz.len();
ChaumMultisig {
transcript,
len,
witness,
additional_nonces: Vec::with_capacity(len - 1),
nonces: HashMap::new(),
sum: vec![(ProjectivePoint::IDENTITY, ProjectivePoint::IDENTITY); len - 1],
binding: Scalar::zero(),
challenge: Scalar::zero(),
proof: None
}
}
}
impl Algorithm<Secp256k1> for ChaumMultisig {
type Transcript = Transcript;
type Signature = ChaumProof;
fn transcript(&mut self) -> &mut Self::Transcript {
&mut self.transcript
}
fn preprocess_addendum<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
_: &MultisigView<Secp256k1>,
_: &[Scalar; 2],
) -> Vec<u8> {
// While FROST will provide D_0 and E_0, we need D_i and E_i
let mut res = Vec::with_capacity((self.len - 1) * 33);
for _ in 1 .. self.len {
let d = Scalar::random(&mut *rng);
let e = Scalar::random(&mut *rng);
res.extend(&(*G * d).to_bytes());
res.extend(&(*G * e).to_bytes());
self.additional_nonces.push((d, e));
}
res
}
fn process_addendum(
&mut self,
_: &MultisigView<Secp256k1>,
l: u16,
_: &[ProjectivePoint; 2],
addendum: &[u8],
) -> Result<(), FrostError> {
let mut nonces = Vec::with_capacity(self.len - 1);
for i in 0 .. (self.len - 1) {
let p = i * 2;
let (D, E) = (|| Ok((
Secp256k1::G_from_slice(&addendum[(p * 33) .. ((p + 1) * 33)])?,
Secp256k1::G_from_slice(&addendum[((p + 1) * 33) .. ((p + 2) * 33)])?
)))().map_err(|_: CurveError| FrostError::InvalidCommitment(l))?;
self.transcript.append_message(b"participant", &l.to_be_bytes());
self.transcript.append_message(b"commitment_D_additional", &D.to_bytes());
self.transcript.append_message(b"commitment_E_additional", &E.to_bytes());
self.sum[i].0 += D;
self.sum[i].1 += E;
nonces.push((D, E));
}
self.nonces.insert(l, nonces);
Ok(())
}
fn sign_share(
&mut self,
view: &MultisigView<Secp256k1>,
sum_0: ProjectivePoint,
binding: Scalar,
nonce_0: Scalar,
_: &[u8],
) -> Scalar {
self.binding = binding;
let (rs, t3, mut commitments) = ChaumProof::r_t_commitments(
&mut ChaCha12Rng::from_seed(self.transcript.rng_seed(b"r_t")),
&self.witness
);
let mut sum = ProjectivePoint::IDENTITY;
for i in 0 .. self.len {
let nonce = if i == 0 {
sum_0
} else {
self.sum[i - 1].0 + (self.sum[i - 1].1 * binding)
};
commitments.A2[i] += nonce;
sum += nonce;
}
commitments.A1 += sum;
let mut nonces = Vec::with_capacity(self.len);
for i in 0 .. self.len {
nonces.push(
if i == 0 {
nonce_0
} else {
self.additional_nonces[i - 1].0 + (self.additional_nonces[i - 1].1 * binding)
}
);
}
let (challenge, proof) = ChaumProof::t_prove(
&self.witness,
&rs,
t3,
commitments,
&nonces,
&view.secret_share()
);
self.challenge = challenge;
let t2 = proof.t2;
self.proof = Some(proof);
t2
}
fn verify(
&self,
_: ProjectivePoint,
_: ProjectivePoint,
sum: Scalar
) -> Option<Self::Signature> {
let mut proof = self.proof.clone().unwrap();
proof.t2 = sum;
Some(proof).filter(|proof| proof.verify(&self.witness.statement))
}
fn verify_share(
&self,
l: u16,
verification_share: ProjectivePoint,
nonce: ProjectivePoint,
share: Scalar,
) -> bool {
let mut t2 = ProjectivePoint::IDENTITY;
let mut accum = self.challenge;
for i in 0 .. self.len {
let nonce = if i == 0 {
nonce
} else {
self.nonces[&l][i - 1].0 + (self.nonces[&l][i - 1].1 * self.binding)
};
t2 += nonce + (verification_share * accum);
accum *= self.challenge;
}
(*G * share) == t2
}
}

View file

@ -0,0 +1,100 @@
use core::convert::TryInto;
use ff::PrimeField;
use group::GroupEncoding;
use sha2::{Digest, Sha256, Sha512};
use k256::{
elliptic_curve::{generic_array::GenericArray, bigint::{ArrayEncoding, U512}, ops::Reduce},
Scalar,
ProjectivePoint
};
use transcript::DigestTranscript;
use frost::{CurveError, Curve};
use crate::spark::G;
const CONTEXT: &[u8] = b"FROST-K256-SHA";
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub(crate) struct Secp256k1;
impl Curve for Secp256k1 {
type F = Scalar;
type G = ProjectivePoint;
type T = ProjectivePoint;
fn id() -> String {
"secp256k1".to_string()
}
fn id_len() -> u8 {
u8::try_from(Self::id().len()).unwrap()
}
fn generator() -> Self::G {
*G
}
fn generator_table() -> Self::T {
*G
}
fn little_endian() -> bool {
false
}
// The IETF draft doesn't specify a secp256k1 ciphersuite
// This test just uses the simplest ciphersuite which would still be viable to deploy
// The comparable P-256 curve uses hash_to_field from the Hash To Curve IETF draft with a context
// string and further DST for H1 ("rho") and H3 ("digest"). With lack of hash_to_field, wide
// reduction is used
fn hash_msg(msg: &[u8]) -> Vec<u8> {
(&Sha256::digest(&[CONTEXT, b"digest", msg].concat())).to_vec()
}
fn hash_binding_factor(binding: &[u8]) -> Self::F {
Self::hash_to_F(&[CONTEXT, b"rho", binding].concat())
}
fn hash_to_F(data: &[u8]) -> Self::F {
Scalar::from_uint_reduced(U512::from_be_byte_array(Sha512::digest(data)))
}
fn F_len() -> usize {
32
}
fn G_len() -> usize {
33
}
fn F_from_slice(slice: &[u8]) -> Result<Self::F, CurveError> {
let bytes: [u8; 32] = slice.try_into()
.map_err(|_| CurveError::InvalidLength(32, slice.len()))?;
let scalar = Scalar::from_repr(bytes.into());
if scalar.is_none().unwrap_u8() == 1 {
Err(CurveError::InvalidScalar)?;
}
Ok(scalar.unwrap())
}
fn G_from_slice(slice: &[u8]) -> Result<Self::G, CurveError> {
let point = ProjectivePoint::from_bytes(GenericArray::from_slice(slice));
if point.is_none().unwrap_u8() == 1 {
Err(CurveError::InvalidScalar)?;
}
Ok(point.unwrap())
}
fn F_to_bytes(f: &Self::F) -> Vec<u8> {
(&f.to_bytes()).to_vec()
}
fn G_to_bytes(g: &Self::G) -> Vec<u8> {
(&g.to_bytes()).to_vec()
}
}
pub type Transcript = DigestTranscript::<blake2::Blake2b512>;

View file

@ -0,0 +1,41 @@
use lazy_static::lazy_static;
use sha2::{Digest, Sha256};
use group::GroupEncoding;
use k256::{ProjectivePoint, CompressedPoint};
pub mod chaum;
#[cfg(feature = "frost")]
pub(crate) mod frost;
// Extremely basic hash to curve, which should not be used, yet which offers the needed generators
fn generator(letter: u8) -> ProjectivePoint {
let mut point = [2; 33];
let mut g = b"Generator ".to_vec();
let mut res;
while {
g.push(letter);
point[1..].copy_from_slice(&Sha256::digest(&g));
res = ProjectivePoint::from_bytes(&CompressedPoint::from(point));
res.is_none().into()
} {}
res.unwrap()
}
lazy_static! {
pub static ref F: ProjectivePoint = generator(b'F');
pub static ref G: ProjectivePoint = generator(b'G');
pub static ref H: ProjectivePoint = generator(b'H');
pub static ref U: ProjectivePoint = generator(b'U');
pub static ref GENERATORS_TRANSCRIPT: Vec<u8> = {
let mut transcript = Vec::with_capacity(4 * 33);
transcript.extend(&F.to_bytes());
transcript.extend(&G.to_bytes());
transcript.extend(&H.to_bytes());
transcript.extend(&U.to_bytes());
transcript
};
}

View file

@ -0,0 +1,72 @@
use rand::rngs::OsRng;
use ff::Field;
use k256::Scalar;
#[cfg(feature = "multisig")]
use frost::tests::{key_gen, algorithm_machines, sign};
use crate::spark::{F, G, H, U, chaum::*};
#[cfg(feature = "multisig")]
use crate::spark::frost::{Transcript, Secp256k1};
#[test]
fn chaum() {
#[allow(non_snake_case)]
let mut S_T = vec![];
let mut xz = vec![];
let y = Scalar::random(&mut OsRng);
for _ in 0 .. 2 {
let x = Scalar::random(&mut OsRng);
let z = Scalar::random(&mut OsRng);
S_T.push((
(*F * x) + (*G * y) + (*H * z),
// U = (x * T) + (y * G)
// T = (U - (y * G)) * x^-1
(*U - (*G * y)) * x.invert().unwrap()
));
xz.push((x, z));
}
let statement = ChaumStatement::new(b"Hello, World!".to_vec(), S_T);
let witness = ChaumWitness::new(statement.clone(), xz);
assert!(ChaumProof::prove(&mut OsRng, &witness, &y).verify(&statement));
}
#[cfg(feature = "multisig")]
#[test]
fn chaum_multisig() {
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
#[allow(non_snake_case)]
let mut S_T = vec![];
let mut xz = vec![];
for _ in 0 .. 2 {
let x = Scalar::random(&mut OsRng);
let z = Scalar::random(&mut OsRng);
S_T.push((
(*F * x) + keys[&1].group_key() + (*H * z),
(*U - keys[&1].group_key()) * x.invert().unwrap()
));
xz.push((x, z));
}
let statement = ChaumStatement::new(b"Hello, Multisig World!".to_vec(), S_T);
let witness = ChaumWitness::new(statement.clone(), xz);
assert!(
sign(
&mut OsRng,
algorithm_machines(
&mut OsRng,
ChaumMultisig::new(Transcript::new(b"Firo Serai Chaum Test".to_vec()), witness),
&keys
),
&[]
).verify(&statement)
);
}

View file

@ -229,6 +229,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
fn verify(
&self,
_: u16,
_: dfg::EdwardsPoint,
_: dfg::EdwardsPoint,
sum: dfg::Scalar

View file

@ -52,6 +52,7 @@ pub trait Algorithm<C: Curve>: Clone {
/// verification fails
fn verify_share(
&self,
l: u16,
verification_share: C::G,
nonce: C::G,
share: C::F,
@ -154,6 +155,7 @@ impl<C: Curve, H: Hram<C>> Algorithm<C> for Schnorr<C, H> {
fn verify_share(
&self,
_: u16,
verification_share: C::G,
nonce: C::G,
share: C::F,

View file

@ -217,6 +217,7 @@ fn complete<C: Curve, A: Algorithm<C>>(
// within n / 2 on average, and not gameable to n, though that should be minor
for l in &sign_params.view.included {
if !sign_params.algorithm.verify_share(
*l,
sign_params.view.verification_share(*l),
sign.B[l][0] + (sign.B[l][1] * sign.binding),
responses[l]