mirror of
https://github.com/serai-dex/serai.git
synced 2025-01-23 11:15:16 +00:00
Create a dedicated crate for the DKG (#141)
* Add dkg crate * Remove F_len and G_len They're generally no longer used. * Replace hash_to_vec with a provided method around associated type H: Digest Part of trying to minimize this trait so it can be moved elsewhere. Vec, which isn't std, may have been a blocker. * Encrypt secret shares within the FROST library Reduces requirements on callers in order to be correct. * Update usage of Zeroize within FROST * Inline functions in key_gen There was no reason to have them separated as they were. sign probably has the same statement available, yet that isn't the focus right now. * Add a ciphersuite package which provides hash_to_F * Set the Ciphersuite version to something valid * Have ed448 export Scalar/FieldElement/Point at the top level * Move FROST over to Ciphersuite * Correct usage of ff in ciphersuite * Correct documentation handling * Move Schnorr signatures to their own crate * Remove unused feature from schnorr * Fix Schnorr tests * Split DKG into a separate crate * Add serialize to Commitments and SecretShare Helper for buf = vec![]; .write(buf).unwrap(); buf * Move FROST over to the new dkg crate * Update Monero lib to latest FROST * Correct ethereum's usage of features * Add serialize to GeneratorProof * Add serialize helper function to FROST * Rename AddendumSerialize to WriteAddendum * Update processor * Slight fix to processor
This commit is contained in:
parent
cbceaff678
commit
2379855b31
50 changed files with 2076 additions and 1601 deletions
117
Cargo.lock
generated
117
Cargo.lock
generated
|
@ -43,7 +43,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "9e8b47f52ea9bae42228d07ec09eb676433d7c4ed1ebdf0f1d1c29ed446f1ab8"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cipher",
|
||||
"cipher 0.3.0",
|
||||
"cpufeatures",
|
||||
"opaque-debug 0.3.0",
|
||||
]
|
||||
|
@ -56,7 +56,7 @@ checksum = "df5f85a83a7d8b0442b6aa7b504b8212c1733da07b98aae43d4bc21b2cb3cdf6"
|
|||
dependencies = [
|
||||
"aead",
|
||||
"aes",
|
||||
"cipher",
|
||||
"cipher 0.3.0",
|
||||
"ctr",
|
||||
"ghash",
|
||||
"subtle",
|
||||
|
@ -818,11 +818,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "5c80e5460aa66fe3b91d40bcbdab953a597b60053e34d684ac6903f863b680a6"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cipher",
|
||||
"cipher 0.3.0",
|
||||
"cpufeatures",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "chacha20"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c7fc89c7c5b9e7a02dfe45cd2367bae382f9ed31c61ca8debe5f827c420a2f08"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cipher 0.4.3",
|
||||
"cpufeatures",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "chacha20poly1305"
|
||||
version = "0.9.1"
|
||||
|
@ -830,8 +841,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "a18446b09be63d457bbec447509e85f662f32952b035ce892290396bc0b0cff5"
|
||||
dependencies = [
|
||||
"aead",
|
||||
"chacha20",
|
||||
"cipher",
|
||||
"chacha20 0.8.2",
|
||||
"cipher 0.3.0",
|
||||
"poly1305",
|
||||
"zeroize",
|
||||
]
|
||||
|
@ -873,6 +884,36 @@ dependencies = [
|
|||
"generic-array 0.14.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cipher"
|
||||
version = "0.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d1873270f8f7942c191139cb8a40fd228da6c3fd2fc376d7e92d47aa14aeb59e"
|
||||
dependencies = [
|
||||
"crypto-common",
|
||||
"inout",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ciphersuite"
|
||||
version = "0.1.1"
|
||||
dependencies = [
|
||||
"dalek-ff-group",
|
||||
"digest 0.10.5",
|
||||
"elliptic-curve",
|
||||
"ff",
|
||||
"group",
|
||||
"k256",
|
||||
"minimal-ed448",
|
||||
"p256",
|
||||
"rand_core 0.6.4",
|
||||
"sha2 0.10.6",
|
||||
"sha3",
|
||||
"subtle",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clang-sys"
|
||||
version = "1.4.0"
|
||||
|
@ -1361,7 +1402,7 @@ version = "0.8.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "049bb91fb4aaf0e3c7efa6cd5ef877dbbbd15b39dad06d9948de4ec8a75761ea"
|
||||
dependencies = [
|
||||
"cipher",
|
||||
"cipher 0.3.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1612,6 +1653,26 @@ dependencies = [
|
|||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dkg"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"chacha20 0.9.0",
|
||||
"ciphersuite",
|
||||
"digest 0.10.5",
|
||||
"dleq",
|
||||
"flexible-transcript",
|
||||
"group",
|
||||
"hex",
|
||||
"hkdf",
|
||||
"multiexp",
|
||||
"rand_core 0.6.4",
|
||||
"schnorr-signatures",
|
||||
"subtle",
|
||||
"thiserror",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dleq"
|
||||
version = "0.1.2"
|
||||
|
@ -2990,6 +3051,15 @@ version = "0.3.4"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7ebdb29d2ea9ed0083cd8cece49bbd968021bd99b0849edb4a9a7ee0fdf6a4e0"
|
||||
|
||||
[[package]]
|
||||
name = "hkdf"
|
||||
version = "0.12.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437"
|
||||
dependencies = [
|
||||
"hmac 0.12.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hmac"
|
||||
version = "0.8.1"
|
||||
|
@ -3469,6 +3539,15 @@ dependencies = [
|
|||
"synstructure",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inout"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5"
|
||||
dependencies = [
|
||||
"generic-array 0.14.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "instant"
|
||||
version = "0.1.12"
|
||||
|
@ -4509,21 +4588,21 @@ dependencies = [
|
|||
name = "modular-frost"
|
||||
version = "0.3.0"
|
||||
dependencies = [
|
||||
"chacha20 0.9.0",
|
||||
"ciphersuite",
|
||||
"dalek-ff-group",
|
||||
"digest 0.10.5",
|
||||
"dkg",
|
||||
"dleq",
|
||||
"elliptic-curve",
|
||||
"ff",
|
||||
"flexible-transcript",
|
||||
"group",
|
||||
"hex",
|
||||
"k256",
|
||||
"hkdf",
|
||||
"minimal-ed448",
|
||||
"multiexp",
|
||||
"p256",
|
||||
"rand_core 0.6.4",
|
||||
"schnorr-signatures",
|
||||
"serde_json",
|
||||
"sha2 0.10.6",
|
||||
"sha3",
|
||||
"subtle",
|
||||
"thiserror",
|
||||
"zeroize",
|
||||
|
@ -6370,7 +6449,7 @@ version = "0.9.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0c0fbb5f676da676c260ba276a8f43a8dc67cf02d1438423aeb1c677a7212686"
|
||||
dependencies = [
|
||||
"cipher",
|
||||
"cipher 0.3.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -7266,6 +7345,18 @@ dependencies = [
|
|||
"windows-sys 0.36.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "schnorr-signatures"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"ciphersuite",
|
||||
"dalek-ff-group",
|
||||
"group",
|
||||
"multiexp",
|
||||
"rand_core 0.6.4",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "schnorrkel"
|
||||
version = "0.9.1"
|
||||
|
|
|
@ -4,10 +4,13 @@ members = [
|
|||
|
||||
"crypto/dalek-ff-group",
|
||||
"crypto/ed448",
|
||||
"crypto/ciphersuite",
|
||||
|
||||
"crypto/multiexp",
|
||||
|
||||
"crypto/schnorr",
|
||||
"crypto/dleq",
|
||||
"crypto/dkg",
|
||||
"crypto/frost",
|
||||
|
||||
"coins/ethereum",
|
||||
|
|
|
@ -24,7 +24,7 @@ sha3 = "0.10"
|
|||
|
||||
group = "0.12"
|
||||
k256 = { version = "0.11", features = ["arithmetic", "keccak256", "ecdsa"] }
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1"] }
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1", "tests"] }
|
||||
|
||||
eyre = "0.6"
|
||||
|
||||
|
|
|
@ -23,8 +23,8 @@ use dalek_ff_group as dfg;
|
|||
use dleq::DLEqProof;
|
||||
use frost::{
|
||||
curve::Ed25519,
|
||||
FrostError, FrostView,
|
||||
algorithm::{AddendumSerialize, Algorithm},
|
||||
FrostError, ThresholdView,
|
||||
algorithm::{WriteAddendum, Algorithm},
|
||||
};
|
||||
|
||||
use crate::ringct::{
|
||||
|
@ -80,7 +80,7 @@ pub struct ClsagAddendum {
|
|||
dleq: DLEqProof<dfg::EdwardsPoint>,
|
||||
}
|
||||
|
||||
impl AddendumSerialize for ClsagAddendum {
|
||||
impl WriteAddendum for ClsagAddendum {
|
||||
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
writer.write_all(self.key_image.compress().to_bytes().as_ref())?;
|
||||
self.dleq.serialize(writer)
|
||||
|
@ -154,7 +154,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
|||
fn preprocess_addendum<R: RngCore + CryptoRng>(
|
||||
&mut self,
|
||||
rng: &mut R,
|
||||
view: &FrostView<Ed25519>,
|
||||
view: &ThresholdView<Ed25519>,
|
||||
) -> ClsagAddendum {
|
||||
ClsagAddendum {
|
||||
key_image: dfg::EdwardsPoint(self.H * view.secret_share().0),
|
||||
|
@ -188,7 +188,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
|||
|
||||
fn process_addendum(
|
||||
&mut self,
|
||||
view: &FrostView<Ed25519>,
|
||||
view: &ThresholdView<Ed25519>,
|
||||
l: u16,
|
||||
addendum: ClsagAddendum,
|
||||
) -> Result<(), FrostError> {
|
||||
|
@ -223,7 +223,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
|||
|
||||
fn sign_share(
|
||||
&mut self,
|
||||
view: &FrostView<Ed25519>,
|
||||
view: &ThresholdView<Ed25519>,
|
||||
nonce_sums: &[Vec<dfg::EdwardsPoint>],
|
||||
nonces: &[dfg::Scalar],
|
||||
msg: &[u8],
|
||||
|
|
|
@ -12,7 +12,7 @@ use curve25519_dalek::{traits::Identity, scalar::Scalar, edwards::EdwardsPoint};
|
|||
use transcript::{Transcript, RecommendedTranscript};
|
||||
use frost::{
|
||||
curve::Ed25519,
|
||||
FrostError, FrostKeys,
|
||||
FrostError, ThresholdKeys,
|
||||
sign::{
|
||||
Writable, Preprocess, SignatureShare, PreprocessMachine, SignMachine, SignatureMachine,
|
||||
AlgorithmMachine, AlgorithmSignMachine, AlgorithmSignatureMachine,
|
||||
|
@ -68,7 +68,7 @@ impl SignableTransaction {
|
|||
pub async fn multisig(
|
||||
self,
|
||||
rpc: &Rpc,
|
||||
keys: FrostKeys<Ed25519>,
|
||||
keys: ThresholdKeys<Ed25519>,
|
||||
mut transcript: RecommendedTranscript,
|
||||
height: usize,
|
||||
mut included: Vec<u16>,
|
||||
|
|
49
crypto/ciphersuite/Cargo.toml
Normal file
49
crypto/ciphersuite/Cargo.toml
Normal file
|
@ -0,0 +1,49 @@
|
|||
[package]
|
||||
name = "ciphersuite"
|
||||
version = "0.1.1"
|
||||
description = "Ciphersuites built around ff/group"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ciphersuite"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = ["ciphersuite", "ff", "group"]
|
||||
edition = "2021"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[dependencies]
|
||||
rand_core = "0.6"
|
||||
|
||||
zeroize = { version = "1.5", features = ["zeroize_derive"] }
|
||||
subtle = "2"
|
||||
|
||||
digest = "0.10"
|
||||
sha2 = { version = "0.10", optional = true }
|
||||
sha3 = { version = "0.10", optional = true }
|
||||
|
||||
ff = { version = "0.12", features = ["bits"] }
|
||||
group = "0.12"
|
||||
|
||||
dalek-ff-group = { path = "../dalek-ff-group", version = "^0.1.2", optional = true }
|
||||
|
||||
elliptic-curve = { version = "0.12", features = ["hash2curve"], optional = true }
|
||||
p256 = { version = "0.11", features = ["arithmetic", "bits", "hash2curve"], optional = true }
|
||||
k256 = { version = "0.11", features = ["arithmetic", "bits", "hash2curve"], optional = true }
|
||||
|
||||
minimal-ed448 = { path = "../ed448", version = "0.1", optional = true }
|
||||
|
||||
[features]
|
||||
std = []
|
||||
|
||||
dalek = ["sha2", "dalek-ff-group"]
|
||||
ed25519 = ["dalek"]
|
||||
ristretto = ["dalek"]
|
||||
|
||||
kp256 = ["sha2", "elliptic-curve"]
|
||||
p256 = ["kp256", "dep:p256"]
|
||||
secp256k1 = ["kp256", "k256"]
|
||||
|
||||
ed448 = ["sha3", "minimal-ed448"]
|
||||
|
||||
default = ["std"]
|
21
crypto/ciphersuite/LICENSE
Normal file
21
crypto/ciphersuite/LICENSE
Normal file
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2021-2022 Luke Parker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
3
crypto/ciphersuite/README.md
Normal file
3
crypto/ciphersuite/README.md
Normal file
|
@ -0,0 +1,3 @@
|
|||
# Ciphersuite
|
||||
|
||||
Ciphersuites for elliptic curves premised on ff/group.
|
44
crypto/ciphersuite/src/dalek.rs
Normal file
44
crypto/ciphersuite/src/dalek.rs
Normal file
|
@ -0,0 +1,44 @@
|
|||
use zeroize::Zeroize;
|
||||
|
||||
use sha2::{Digest, Sha512};
|
||||
|
||||
use group::Group;
|
||||
use dalek_ff_group::Scalar;
|
||||
|
||||
use crate::Ciphersuite;
|
||||
|
||||
macro_rules! dalek_curve {
|
||||
(
|
||||
$feature: literal,
|
||||
|
||||
$Ciphersuite: ident,
|
||||
$Point: ident,
|
||||
$ID: literal
|
||||
) => {
|
||||
use dalek_ff_group::$Point;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct $Ciphersuite;
|
||||
impl Ciphersuite for $Ciphersuite {
|
||||
type F = Scalar;
|
||||
type G = $Point;
|
||||
type H = Sha512;
|
||||
|
||||
const ID: &'static [u8] = $ID;
|
||||
|
||||
fn generator() -> Self::G {
|
||||
$Point::generator()
|
||||
}
|
||||
|
||||
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
|
||||
Scalar::from_hash(Sha512::new_with_prefix(&[dst, data].concat()))
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "ristretto"))]
|
||||
dalek_curve!("ristretto", Ristretto, RistrettoPoint, b"ristretto");
|
||||
|
||||
#[cfg(feature = "ed25519")]
|
||||
dalek_curve!("ed25519", Ed25519, EdwardsPoint, b"edwards25519");
|
67
crypto/ciphersuite/src/ed448.rs
Normal file
67
crypto/ciphersuite/src/ed448.rs
Normal file
|
@ -0,0 +1,67 @@
|
|||
use zeroize::Zeroize;
|
||||
|
||||
use digest::{
|
||||
typenum::U114, core_api::BlockSizeUser, Update, Output, OutputSizeUser, FixedOutput,
|
||||
ExtendableOutput, XofReader, HashMarker, Digest,
|
||||
};
|
||||
use sha3::Shake256;
|
||||
|
||||
use group::Group;
|
||||
use minimal_ed448::{scalar::Scalar, point::Point};
|
||||
|
||||
use crate::Ciphersuite;
|
||||
|
||||
// Re-define Shake256 as a traditional Digest to meet API expectations
|
||||
#[derive(Clone, Default)]
|
||||
pub struct Shake256_114(Shake256);
|
||||
impl BlockSizeUser for Shake256_114 {
|
||||
type BlockSize = <Shake256 as BlockSizeUser>::BlockSize;
|
||||
fn block_size() -> usize {
|
||||
Shake256::block_size()
|
||||
}
|
||||
}
|
||||
impl OutputSizeUser for Shake256_114 {
|
||||
type OutputSize = U114;
|
||||
fn output_size() -> usize {
|
||||
114
|
||||
}
|
||||
}
|
||||
impl Update for Shake256_114 {
|
||||
fn update(&mut self, data: &[u8]) {
|
||||
self.0.update(data);
|
||||
}
|
||||
fn chain(mut self, data: impl AsRef<[u8]>) -> Self {
|
||||
Update::update(&mut self, data.as_ref());
|
||||
self
|
||||
}
|
||||
}
|
||||
impl FixedOutput for Shake256_114 {
|
||||
fn finalize_fixed(self) -> Output<Self> {
|
||||
let mut res = Default::default();
|
||||
FixedOutput::finalize_into(self, &mut res);
|
||||
res
|
||||
}
|
||||
fn finalize_into(self, out: &mut Output<Self>) {
|
||||
let mut reader = self.0.finalize_xof();
|
||||
reader.read(out);
|
||||
}
|
||||
}
|
||||
impl HashMarker for Shake256_114 {}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct Ed448;
|
||||
impl Ciphersuite for Ed448 {
|
||||
type F = Scalar;
|
||||
type G = Point;
|
||||
type H = Shake256_114;
|
||||
|
||||
const ID: &'static [u8] = b"ed448";
|
||||
|
||||
fn generator() -> Self::G {
|
||||
Point::generator()
|
||||
}
|
||||
|
||||
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
|
||||
Scalar::wide_reduce(Self::H::digest(&[dst, data].concat()).as_ref().try_into().unwrap())
|
||||
}
|
||||
}
|
72
crypto/ciphersuite/src/kp256.rs
Normal file
72
crypto/ciphersuite/src/kp256.rs
Normal file
|
@ -0,0 +1,72 @@
|
|||
use zeroize::Zeroize;
|
||||
|
||||
use sha2::{Digest, Sha256};
|
||||
|
||||
use group::ff::{Field, PrimeField};
|
||||
|
||||
use elliptic_curve::{
|
||||
generic_array::GenericArray,
|
||||
bigint::{Encoding, U384},
|
||||
hash2curve::{Expander, ExpandMsg, ExpandMsgXmd},
|
||||
};
|
||||
|
||||
use crate::Ciphersuite;
|
||||
|
||||
macro_rules! kp_curve {
|
||||
(
|
||||
$feature: literal,
|
||||
$lib: ident,
|
||||
|
||||
$Ciphersuite: ident,
|
||||
$ID: literal
|
||||
) => {
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct $Ciphersuite;
|
||||
impl Ciphersuite for $Ciphersuite {
|
||||
type F = $lib::Scalar;
|
||||
type G = $lib::ProjectivePoint;
|
||||
type H = Sha256;
|
||||
|
||||
const ID: &'static [u8] = $ID;
|
||||
|
||||
fn generator() -> Self::G {
|
||||
$lib::ProjectivePoint::GENERATOR
|
||||
}
|
||||
|
||||
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F {
|
||||
let mut dst = dst;
|
||||
let oversize = Sha256::digest([b"H2C-OVERSIZE-DST-".as_ref(), dst].concat());
|
||||
if dst.len() > 255 {
|
||||
dst = oversize.as_ref();
|
||||
}
|
||||
|
||||
// While one of these two libraries does support directly hashing to the Scalar field, the
|
||||
// other doesn't. While that's probably an oversight, this is a universally working method
|
||||
let mut modulus = [0; 48];
|
||||
modulus[16 ..].copy_from_slice(&(Self::F::zero() - Self::F::one()).to_bytes());
|
||||
let modulus = U384::from_be_slice(&modulus).wrapping_add(&U384::ONE);
|
||||
|
||||
let mut unreduced = U384::from_be_bytes({
|
||||
let mut bytes = [0; 48];
|
||||
ExpandMsgXmd::<Sha256>::expand_message(&[msg], dst, 48).unwrap().fill_bytes(&mut bytes);
|
||||
bytes
|
||||
})
|
||||
.reduce(&modulus)
|
||||
.unwrap()
|
||||
.to_be_bytes();
|
||||
|
||||
let mut array = *GenericArray::from_slice(&unreduced[16 ..]);
|
||||
let res = $lib::Scalar::from_repr(array).unwrap();
|
||||
unreduced.zeroize();
|
||||
array.zeroize();
|
||||
res
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(feature = "p256")]
|
||||
kp_curve!("p256", p256, P256, b"P-256");
|
||||
|
||||
#[cfg(feature = "secp256k1")]
|
||||
kp_curve!("secp256k1", k256, Secp256k1, b"secp256k1");
|
106
crypto/ciphersuite/src/lib.rs
Normal file
106
crypto/ciphersuite/src/lib.rs
Normal file
|
@ -0,0 +1,106 @@
|
|||
#![cfg_attr(docsrs, feature(doc_cfg))]
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
|
||||
use core::fmt::Debug;
|
||||
#[cfg(feature = "std")]
|
||||
use std::io::{self, Read};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
use subtle::ConstantTimeEq;
|
||||
|
||||
use digest::{core_api::BlockSizeUser, Digest};
|
||||
|
||||
use group::{
|
||||
ff::{Field, PrimeField, PrimeFieldBits},
|
||||
Group, GroupOps,
|
||||
prime::PrimeGroup,
|
||||
};
|
||||
#[cfg(feature = "std")]
|
||||
use group::GroupEncoding;
|
||||
|
||||
#[cfg(feature = "dalek")]
|
||||
mod dalek;
|
||||
#[cfg(feature = "ristretto")]
|
||||
pub use dalek::Ristretto;
|
||||
#[cfg(feature = "ed25519")]
|
||||
pub use dalek::Ed25519;
|
||||
|
||||
#[cfg(feature = "kp256")]
|
||||
mod kp256;
|
||||
#[cfg(feature = "secp256k1")]
|
||||
pub use kp256::Secp256k1;
|
||||
#[cfg(feature = "p256")]
|
||||
pub use kp256::P256;
|
||||
|
||||
#[cfg(feature = "ed448")]
|
||||
mod ed448;
|
||||
#[cfg(feature = "ed448")]
|
||||
pub use ed448::*;
|
||||
|
||||
/// Unified trait defining a ciphersuite around an elliptic curve.
|
||||
pub trait Ciphersuite: Clone + Copy + PartialEq + Eq + Debug + Zeroize {
|
||||
/// Scalar field element type.
|
||||
// This is available via G::Scalar yet `C::G::Scalar` is ambiguous, forcing horrific accesses
|
||||
type F: PrimeField + PrimeFieldBits + Zeroize;
|
||||
/// Group element type.
|
||||
type G: Group<Scalar = Self::F> + GroupOps + PrimeGroup + Zeroize + ConstantTimeEq;
|
||||
/// Hash algorithm used with this curve.
|
||||
// Requires BlockSizeUser so it can be used within Hkdf which requies that.
|
||||
type H: Clone + BlockSizeUser + Digest;
|
||||
|
||||
/// ID for this curve.
|
||||
const ID: &'static [u8];
|
||||
|
||||
/// Generator for the group.
|
||||
// While group does provide this in its API, privacy coins may want to use a custom basepoint
|
||||
fn generator() -> Self::G;
|
||||
|
||||
/// Hash the provided dst and message to a scalar.
|
||||
#[allow(non_snake_case)]
|
||||
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F;
|
||||
|
||||
/// Generate a random non-zero scalar.
|
||||
#[allow(non_snake_case)]
|
||||
fn random_nonzero_F<R: RngCore + CryptoRng>(rng: &mut R) -> Self::F {
|
||||
let mut res;
|
||||
while {
|
||||
res = Self::F::random(&mut *rng);
|
||||
res.ct_eq(&Self::F::zero()).into()
|
||||
} {}
|
||||
res
|
||||
}
|
||||
|
||||
/// Read a canonical scalar from something implementing std::io::Read.
|
||||
#[cfg(feature = "std")]
|
||||
#[allow(non_snake_case)]
|
||||
fn read_F<R: Read>(reader: &mut R) -> io::Result<Self::F> {
|
||||
let mut encoding = <Self::F as PrimeField>::Repr::default();
|
||||
reader.read_exact(encoding.as_mut())?;
|
||||
|
||||
// ff mandates this is canonical
|
||||
let res = Option::<Self::F>::from(Self::F::from_repr(encoding))
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "non-canonical scalar"));
|
||||
for b in encoding.as_mut() {
|
||||
b.zeroize();
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// Read a canonical point from something implementing std::io::Read.
|
||||
#[cfg(feature = "std")]
|
||||
#[allow(non_snake_case)]
|
||||
fn read_G<R: Read>(reader: &mut R) -> io::Result<Self::G> {
|
||||
let mut encoding = <Self::G as GroupEncoding>::Repr::default();
|
||||
reader.read_exact(encoding.as_mut())?;
|
||||
|
||||
let point = Option::<Self::G>::from(Self::G::from_bytes(&encoding))
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))?;
|
||||
if point.to_bytes().as_ref() != encoding.as_ref() {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "non-canonical point"))?;
|
||||
}
|
||||
Ok(point)
|
||||
}
|
||||
}
|
42
crypto/dkg/Cargo.toml
Normal file
42
crypto/dkg/Cargo.toml
Normal file
|
@ -0,0 +1,42 @@
|
|||
[package]
|
||||
name = "dkg"
|
||||
version = "0.1.0"
|
||||
description = "Distributed key generation over ff/group"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = ["dkg", "multisig", "threshold", "ff", "group"]
|
||||
edition = "2021"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[dependencies]
|
||||
thiserror = "1"
|
||||
|
||||
rand_core = "0.6"
|
||||
|
||||
zeroize = { version = "1.5", features = ["zeroize_derive"] }
|
||||
subtle = "2"
|
||||
|
||||
hex = "0.4"
|
||||
|
||||
digest = "0.10"
|
||||
|
||||
hkdf = "0.12"
|
||||
chacha20 = { version = "0.9", features = ["zeroize"] }
|
||||
|
||||
group = "0.12"
|
||||
|
||||
ciphersuite = { path = "../ciphersuite", version = "0.1", features = ["std"] }
|
||||
|
||||
transcript = { package = "flexible-transcript", path = "../transcript", features = ["recommended"], version = "^0.1.3" }
|
||||
|
||||
multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] }
|
||||
|
||||
schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "0.1.0" }
|
||||
dleq = { path = "../dleq", version = "^0.1.2", features = ["serialize"] }
|
||||
|
||||
[features]
|
||||
tests = []
|
21
crypto/dkg/LICENSE
Normal file
21
crypto/dkg/LICENSE
Normal file
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2021-2022 Luke Parker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
12
crypto/dkg/README.md
Normal file
12
crypto/dkg/README.md
Normal file
|
@ -0,0 +1,12 @@
|
|||
# Distributed Key Generation
|
||||
|
||||
A collection of implementations of various distributed key generation protocols.
|
||||
|
||||
All included protocols resolve into the provided `Threshold` types, intended to
|
||||
enable their modularity.
|
||||
|
||||
Additional utilities around them, such as promotion from one generator to
|
||||
another, are also provided.
|
||||
|
||||
Currently included is the two-round protocol from the
|
||||
[FROST paper](https://eprint.iacr.org/2020/852).
|
458
crypto/dkg/src/frost.rs
Normal file
458
crypto/dkg/src/frost.rs
Normal file
|
@ -0,0 +1,458 @@
|
|||
use std::{
|
||||
marker::PhantomData,
|
||||
io::{self, Read, Write},
|
||||
collections::HashMap,
|
||||
};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use digest::Digest;
|
||||
use hkdf::{Hkdf, hmac::SimpleHmac};
|
||||
use chacha20::{
|
||||
cipher::{crypto_common::KeyIvInit, StreamCipher},
|
||||
Key as Cc20Key, Nonce as Cc20Iv, ChaCha20,
|
||||
};
|
||||
|
||||
use group::{
|
||||
ff::{Field, PrimeField},
|
||||
GroupEncoding,
|
||||
};
|
||||
|
||||
use ciphersuite::Ciphersuite;
|
||||
|
||||
use multiexp::{multiexp_vartime, BatchVerifier};
|
||||
|
||||
use schnorr::SchnorrSignature;
|
||||
|
||||
use crate::{DkgError, ThresholdParams, ThresholdCore, validate_map};
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
fn challenge<C: Ciphersuite>(context: &str, l: u16, R: &[u8], Am: &[u8]) -> C::F {
|
||||
const DST: &[u8] = b"FROST Schnorr Proof of Knowledge";
|
||||
|
||||
// Hashes the context to get a fixed size value out of it
|
||||
let mut transcript = C::H::digest(context.as_bytes()).as_ref().to_vec();
|
||||
transcript.extend(l.to_be_bytes());
|
||||
transcript.extend(R);
|
||||
transcript.extend(Am);
|
||||
C::hash_to_F(DST, &transcript)
|
||||
}
|
||||
|
||||
/// Commitments message to be broadcast to all other parties.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct Commitments<C: Ciphersuite> {
|
||||
commitments: Vec<C::G>,
|
||||
enc_key: C::G,
|
||||
cached_msg: Vec<u8>,
|
||||
sig: SchnorrSignature<C>,
|
||||
}
|
||||
impl<C: Ciphersuite> Drop for Commitments<C> {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize();
|
||||
}
|
||||
}
|
||||
impl<C: Ciphersuite> ZeroizeOnDrop for Commitments<C> {}
|
||||
|
||||
impl<C: Ciphersuite> Commitments<C> {
|
||||
pub fn read<R: Read>(reader: &mut R, params: ThresholdParams) -> io::Result<Self> {
|
||||
let mut commitments = Vec::with_capacity(params.t().into());
|
||||
let mut cached_msg = vec![];
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let mut read_G = || -> io::Result<C::G> {
|
||||
let mut buf = <C::G as GroupEncoding>::Repr::default();
|
||||
reader.read_exact(buf.as_mut())?;
|
||||
let point = C::read_G(&mut buf.as_ref())?;
|
||||
cached_msg.extend(buf.as_ref());
|
||||
Ok(point)
|
||||
};
|
||||
|
||||
for _ in 0 .. params.t() {
|
||||
commitments.push(read_G()?);
|
||||
}
|
||||
let enc_key = read_G()?;
|
||||
|
||||
Ok(Commitments { commitments, enc_key, cached_msg, sig: SchnorrSignature::read(reader)? })
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
writer.write_all(&self.cached_msg)?;
|
||||
self.sig.write(writer)
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut buf = vec![];
|
||||
self.write(&mut buf).unwrap();
|
||||
buf
|
||||
}
|
||||
}
|
||||
|
||||
/// State machine to begin the key generation protocol.
|
||||
pub struct KeyGenMachine<C: Ciphersuite> {
|
||||
params: ThresholdParams,
|
||||
context: String,
|
||||
_curve: PhantomData<C>,
|
||||
}
|
||||
|
||||
impl<C: Ciphersuite> KeyGenMachine<C> {
|
||||
/// Creates a new machine to generate a key for the specified curve in the specified multisig.
|
||||
// The context string should be unique among multisigs.
|
||||
pub fn new(params: ThresholdParams, context: String) -> KeyGenMachine<C> {
|
||||
KeyGenMachine { params, context, _curve: PhantomData }
|
||||
}
|
||||
|
||||
/// Start generating a key according to the FROST DKG spec.
|
||||
/// Returns a commitments message to be sent to all parties over an authenticated channel. If any
|
||||
/// party submits multiple sets of commitments, they MUST be treated as malicious.
|
||||
pub fn generate_coefficients<R: RngCore + CryptoRng>(
|
||||
self,
|
||||
rng: &mut R,
|
||||
) -> (SecretShareMachine<C>, Commitments<C>) {
|
||||
let t = usize::from(self.params.t);
|
||||
let mut coefficients = Vec::with_capacity(t);
|
||||
let mut commitments = Vec::with_capacity(t);
|
||||
let mut cached_msg = vec![];
|
||||
|
||||
for i in 0 .. t {
|
||||
// Step 1: Generate t random values to form a polynomial with
|
||||
coefficients.push(C::random_nonzero_F(&mut *rng));
|
||||
// Step 3: Generate public commitments
|
||||
commitments.push(C::generator() * coefficients[i]);
|
||||
cached_msg.extend(commitments[i].to_bytes().as_ref());
|
||||
}
|
||||
|
||||
// Generate an encryption key for transmitting the secret shares
|
||||
// It would probably be perfectly fine to use one of our polynomial elements, yet doing so
|
||||
// puts the integrity of FROST at risk. While there's almost no way it could, as it's used in
|
||||
// an ECDH with validated group elemnents, better to avoid any questions on it
|
||||
let enc_key = C::random_nonzero_F(&mut *rng);
|
||||
let pub_enc_key = C::generator() * enc_key;
|
||||
cached_msg.extend(pub_enc_key.to_bytes().as_ref());
|
||||
|
||||
// Step 2: Provide a proof of knowledge
|
||||
let mut r = C::random_nonzero_F(rng);
|
||||
let sig = SchnorrSignature::<C>::sign(
|
||||
coefficients[0],
|
||||
// This could be deterministic as the PoK is a singleton never opened up to cooperative
|
||||
// discussion
|
||||
// There's no reason to spend the time and effort to make this deterministic besides a
|
||||
// general obsession with canonicity and determinism though
|
||||
r,
|
||||
challenge::<C>(
|
||||
&self.context,
|
||||
self.params.i(),
|
||||
(C::generator() * r).to_bytes().as_ref(),
|
||||
&cached_msg,
|
||||
),
|
||||
);
|
||||
r.zeroize();
|
||||
|
||||
// Step 4: Broadcast
|
||||
(
|
||||
SecretShareMachine {
|
||||
params: self.params,
|
||||
context: self.context,
|
||||
coefficients,
|
||||
our_commitments: commitments.clone(),
|
||||
enc_key,
|
||||
},
|
||||
Commitments { commitments, enc_key: pub_enc_key, cached_msg, sig },
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn polynomial<F: PrimeField>(coefficients: &[F], l: u16) -> F {
|
||||
let l = F::from(u64::from(l));
|
||||
let mut share = F::zero();
|
||||
for (idx, coefficient) in coefficients.iter().rev().enumerate() {
|
||||
share += coefficient;
|
||||
if idx != (coefficients.len() - 1) {
|
||||
share *= l;
|
||||
}
|
||||
}
|
||||
share
|
||||
}
|
||||
|
||||
/// Secret share to be sent to the party it's intended for over an authenticated channel.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct SecretShare<F: PrimeField>(F::Repr);
|
||||
impl<F: PrimeField> Zeroize for SecretShare<F> {
|
||||
fn zeroize(&mut self) {
|
||||
self.0.as_mut().zeroize()
|
||||
}
|
||||
}
|
||||
impl<F: PrimeField> Drop for SecretShare<F> {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize();
|
||||
}
|
||||
}
|
||||
impl<F: PrimeField> ZeroizeOnDrop for SecretShare<F> {}
|
||||
|
||||
impl<F: PrimeField> SecretShare<F> {
|
||||
pub fn read<R: Read>(reader: &mut R) -> io::Result<Self> {
|
||||
let mut repr = F::Repr::default();
|
||||
reader.read_exact(repr.as_mut())?;
|
||||
Ok(SecretShare(repr))
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
writer.write_all(self.0.as_ref())
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut buf = vec![];
|
||||
self.write(&mut buf).unwrap();
|
||||
buf
|
||||
}
|
||||
}
|
||||
|
||||
fn create_ciphers<C: Ciphersuite>(
|
||||
mut sender: <C::G as GroupEncoding>::Repr,
|
||||
receiver: &mut <C::G as GroupEncoding>::Repr,
|
||||
ecdh: &mut <C::G as GroupEncoding>::Repr,
|
||||
) -> (ChaCha20, ChaCha20) {
|
||||
let directional = |sender: &mut <C::G as GroupEncoding>::Repr| {
|
||||
let mut key = Cc20Key::default();
|
||||
key.copy_from_slice(
|
||||
&Hkdf::<C::H, SimpleHmac<C::H>>::extract(
|
||||
Some(b"key"),
|
||||
&[sender.as_ref(), ecdh.as_ref()].concat(),
|
||||
)
|
||||
.0
|
||||
.as_ref()[.. 32],
|
||||
);
|
||||
let mut iv = Cc20Iv::default();
|
||||
iv.copy_from_slice(
|
||||
&Hkdf::<C::H, SimpleHmac<C::H>>::extract(
|
||||
Some(b"iv"),
|
||||
&[sender.as_ref(), ecdh.as_ref()].concat(),
|
||||
)
|
||||
.0
|
||||
.as_ref()[.. 12],
|
||||
);
|
||||
sender.as_mut().zeroize();
|
||||
|
||||
let res = ChaCha20::new(&key, &iv);
|
||||
<Cc20Key as AsMut<[u8]>>::as_mut(&mut key).zeroize();
|
||||
<Cc20Iv as AsMut<[u8]>>::as_mut(&mut iv).zeroize();
|
||||
res
|
||||
};
|
||||
|
||||
let res = (directional(&mut sender), directional(receiver));
|
||||
ecdh.as_mut().zeroize();
|
||||
res
|
||||
}
|
||||
|
||||
/// Advancement of the key generation state machine.
|
||||
#[derive(Zeroize)]
|
||||
pub struct SecretShareMachine<C: Ciphersuite> {
|
||||
params: ThresholdParams,
|
||||
context: String,
|
||||
coefficients: Vec<C::F>,
|
||||
our_commitments: Vec<C::G>,
|
||||
enc_key: C::F,
|
||||
}
|
||||
impl<C: Ciphersuite> Drop for SecretShareMachine<C> {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize()
|
||||
}
|
||||
}
|
||||
impl<C: Ciphersuite> ZeroizeOnDrop for SecretShareMachine<C> {}
|
||||
|
||||
impl<C: Ciphersuite> SecretShareMachine<C> {
|
||||
/// Verify the data from the previous round (canonicity, PoKs, message authenticity)
|
||||
fn verify_r1<R: RngCore + CryptoRng>(
|
||||
&mut self,
|
||||
rng: &mut R,
|
||||
mut commitments: HashMap<u16, Commitments<C>>,
|
||||
) -> Result<(HashMap<u16, Vec<C::G>>, HashMap<u16, C::G>), DkgError> {
|
||||
validate_map(&commitments, &(1 ..= self.params.n()).collect::<Vec<_>>(), self.params.i())?;
|
||||
|
||||
let mut enc_keys = HashMap::new();
|
||||
let mut batch = BatchVerifier::<u16, C::G>::new(commitments.len());
|
||||
let mut commitments = commitments
|
||||
.drain()
|
||||
.map(|(l, mut msg)| {
|
||||
enc_keys.insert(l, msg.enc_key);
|
||||
msg.enc_key.zeroize();
|
||||
|
||||
// Step 5: Validate each proof of knowledge
|
||||
// This is solely the prep step for the latter batch verification
|
||||
msg.sig.batch_verify(
|
||||
rng,
|
||||
&mut batch,
|
||||
l,
|
||||
msg.commitments[0],
|
||||
challenge::<C>(&self.context, l, msg.sig.R.to_bytes().as_ref(), &msg.cached_msg),
|
||||
);
|
||||
|
||||
(l, msg.commitments.drain(..).collect::<Vec<_>>())
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
batch.verify_with_vartime_blame().map_err(DkgError::InvalidProofOfKnowledge)?;
|
||||
|
||||
commitments.insert(self.params.i, self.our_commitments.drain(..).collect());
|
||||
Ok((commitments, enc_keys))
|
||||
}
|
||||
|
||||
/// Continue generating a key.
|
||||
/// Takes in everyone else's commitments. Returns a HashMap of secret shares to be sent over
|
||||
/// authenticated channels to their relevant counterparties.
|
||||
pub fn generate_secret_shares<R: RngCore + CryptoRng>(
|
||||
mut self,
|
||||
rng: &mut R,
|
||||
commitments: HashMap<u16, Commitments<C>>,
|
||||
) -> Result<(KeyMachine<C>, HashMap<u16, SecretShare<C::F>>), DkgError> {
|
||||
let (commitments, mut enc_keys) = self.verify_r1(&mut *rng, commitments)?;
|
||||
|
||||
// Step 1: Generate secret shares for all other parties
|
||||
let mut sender = (C::generator() * self.enc_key).to_bytes();
|
||||
let mut ciphers = HashMap::new();
|
||||
let mut res = HashMap::new();
|
||||
for l in 1 ..= self.params.n() {
|
||||
// Don't insert our own shares to the byte buffer which is meant to be sent around
|
||||
// An app developer could accidentally send it. Best to keep this black boxed
|
||||
if l == self.params.i() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let (mut cipher_send, cipher_recv) = {
|
||||
let receiver = enc_keys.get_mut(&l).unwrap();
|
||||
let mut ecdh = (*receiver * self.enc_key).to_bytes();
|
||||
|
||||
create_ciphers::<C>(sender, &mut receiver.to_bytes(), &mut ecdh)
|
||||
};
|
||||
|
||||
let mut share = polynomial(&self.coefficients, l);
|
||||
let mut share_bytes = share.to_repr();
|
||||
share.zeroize();
|
||||
|
||||
cipher_send.apply_keystream(share_bytes.as_mut());
|
||||
drop(cipher_send);
|
||||
|
||||
ciphers.insert(l, cipher_recv);
|
||||
res.insert(l, SecretShare::<C::F>(share_bytes));
|
||||
share_bytes.as_mut().zeroize();
|
||||
}
|
||||
self.enc_key.zeroize();
|
||||
sender.as_mut().zeroize();
|
||||
|
||||
// Calculate our own share
|
||||
let share = polynomial(&self.coefficients, self.params.i());
|
||||
|
||||
self.coefficients.zeroize();
|
||||
|
||||
Ok((KeyMachine { params: self.params, secret: share, commitments, ciphers }, res))
|
||||
}
|
||||
}
|
||||
|
||||
/// Final step of the key generation protocol.
|
||||
pub struct KeyMachine<C: Ciphersuite> {
|
||||
params: ThresholdParams,
|
||||
secret: C::F,
|
||||
ciphers: HashMap<u16, ChaCha20>,
|
||||
commitments: HashMap<u16, Vec<C::G>>,
|
||||
}
|
||||
impl<C: Ciphersuite> Zeroize for KeyMachine<C> {
|
||||
fn zeroize(&mut self) {
|
||||
self.params.zeroize();
|
||||
self.secret.zeroize();
|
||||
|
||||
// cipher implements ZeroizeOnDrop and zeroizes on drop, yet doesn't implement Zeroize
|
||||
// The following is redundant, as Rust should automatically handle dropping it, yet it shows
|
||||
// awareness of this quirk and at least attempts to be comprehensive
|
||||
for (_, cipher) in self.ciphers.drain() {
|
||||
drop(cipher);
|
||||
}
|
||||
|
||||
for (_, commitments) in self.commitments.iter_mut() {
|
||||
commitments.zeroize();
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<C: Ciphersuite> Drop for KeyMachine<C> {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize()
|
||||
}
|
||||
}
|
||||
impl<C: Ciphersuite> ZeroizeOnDrop for KeyMachine<C> {}
|
||||
|
||||
impl<C: Ciphersuite> KeyMachine<C> {
|
||||
/// Complete key generation.
|
||||
/// Takes in everyone elses' shares submitted to us. Returns a ThresholdCore object representing
|
||||
/// the generated keys. Successful protocol completion MUST be confirmed by all parties before
|
||||
/// these keys may be safely used.
|
||||
pub fn complete<R: RngCore + CryptoRng>(
|
||||
mut self,
|
||||
rng: &mut R,
|
||||
mut shares: HashMap<u16, SecretShare<C::F>>,
|
||||
) -> Result<ThresholdCore<C>, DkgError> {
|
||||
let mut secret_share = self.secret;
|
||||
self.secret.zeroize();
|
||||
|
||||
validate_map(&shares, &(1 ..= self.params.n()).collect::<Vec<_>>(), self.params.i())?;
|
||||
|
||||
// Calculate the exponent for a given participant and apply it to a series of commitments
|
||||
// Initially used with the actual commitments to verify the secret share, later used with
|
||||
// stripes to generate the verification shares
|
||||
let exponential = |i: u16, values: &[_]| {
|
||||
let i = C::F::from(i.into());
|
||||
let mut res = Vec::with_capacity(self.params.t().into());
|
||||
(0 .. usize::from(self.params.t())).into_iter().fold(C::F::one(), |exp, l| {
|
||||
res.push((exp, values[l]));
|
||||
exp * i
|
||||
});
|
||||
res
|
||||
};
|
||||
|
||||
let mut batch = BatchVerifier::new(shares.len());
|
||||
for (l, mut share_bytes) in shares.drain() {
|
||||
let mut cipher = self.ciphers.remove(&l).unwrap();
|
||||
cipher.apply_keystream(share_bytes.0.as_mut());
|
||||
drop(cipher);
|
||||
|
||||
let mut share: C::F =
|
||||
Option::from(C::F::from_repr(share_bytes.0)).ok_or(DkgError::InvalidShare(l))?;
|
||||
share_bytes.zeroize();
|
||||
secret_share += share;
|
||||
|
||||
// This can be insecurely linearized from n * t to just n using the below sums for a given
|
||||
// stripe. Doing so uses naive addition which is subject to malleability. The only way to
|
||||
// ensure that malleability isn't present is to use this n * t algorithm, which runs
|
||||
// per sender and not as an aggregate of all senders, which also enables blame
|
||||
let mut values = exponential(self.params.i, &self.commitments[&l]);
|
||||
values.push((-share, C::generator()));
|
||||
share.zeroize();
|
||||
|
||||
batch.queue(rng, l, values);
|
||||
}
|
||||
batch.verify_with_vartime_blame().map_err(DkgError::InvalidShare)?;
|
||||
|
||||
// Stripe commitments per t and sum them in advance. Calculating verification shares relies on
|
||||
// these sums so preprocessing them is a massive speedup
|
||||
// If these weren't just sums, yet the tables used in multiexp, this would be further optimized
|
||||
// As of right now, each multiexp will regenerate them
|
||||
let mut stripes = Vec::with_capacity(usize::from(self.params.t()));
|
||||
for t in 0 .. usize::from(self.params.t()) {
|
||||
stripes.push(self.commitments.values().map(|commitments| commitments[t]).sum());
|
||||
}
|
||||
|
||||
// Calculate each user's verification share
|
||||
let mut verification_shares = HashMap::new();
|
||||
for i in 1 ..= self.params.n() {
|
||||
verification_shares.insert(i, multiexp_vartime(&exponential(i, &stripes)));
|
||||
}
|
||||
// Removing this check would enable optimizing the above from t + (n * t) to t + ((n - 1) * t)
|
||||
debug_assert_eq!(C::generator() * secret_share, verification_shares[&self.params.i()]);
|
||||
|
||||
Ok(ThresholdCore {
|
||||
params: self.params,
|
||||
secret_share,
|
||||
group_key: stripes[0],
|
||||
verification_shares,
|
||||
})
|
||||
}
|
||||
}
|
399
crypto/dkg/src/lib.rs
Normal file
399
crypto/dkg/src/lib.rs
Normal file
|
@ -0,0 +1,399 @@
|
|||
#![cfg_attr(docsrs, feature(doc_cfg))]
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
|
||||
//! A collection of implementations of various distributed key generation protocols.
|
||||
//! They all resolve into the provided Threshold types intended to enable their modularity.
|
||||
//! Additional utilities around them, such as promotion from one generator to another, are also
|
||||
//! provided.
|
||||
|
||||
use core::fmt::Debug;
|
||||
use std::{io::Read, sync::Arc, collections::HashMap};
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use group::{
|
||||
ff::{Field, PrimeField},
|
||||
GroupEncoding,
|
||||
};
|
||||
|
||||
use ciphersuite::Ciphersuite;
|
||||
|
||||
/// The distributed key generation protocol described in the
|
||||
/// [FROST paper](https://eprint.iacr.org/2020/852).
|
||||
pub mod frost;
|
||||
|
||||
/// Promote keys between ciphersuites.
|
||||
pub mod promote;
|
||||
|
||||
/// Tests for application-provided curves and algorithms.
|
||||
#[cfg(any(test, feature = "tests"))]
|
||||
pub mod tests;
|
||||
|
||||
// Validate a map of values to have the expected included participants
|
||||
pub(crate) fn validate_map<T>(
|
||||
map: &HashMap<u16, T>,
|
||||
included: &[u16],
|
||||
ours: u16,
|
||||
) -> Result<(), DkgError> {
|
||||
if (map.len() + 1) != included.len() {
|
||||
Err(DkgError::InvalidParticipantQuantity(included.len(), map.len() + 1))?;
|
||||
}
|
||||
|
||||
for included in included {
|
||||
if *included == ours {
|
||||
if map.contains_key(included) {
|
||||
Err(DkgError::DuplicatedIndex(*included))?;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if !map.contains_key(included) {
|
||||
Err(DkgError::MissingParticipant(*included))?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Parameters for a multisig.
|
||||
// These fields should not be made public as they should be static
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct ThresholdParams {
|
||||
/// Participants needed to sign on behalf of the group.
|
||||
t: u16,
|
||||
/// Amount of participants.
|
||||
n: u16,
|
||||
/// Index of the participant being acted for.
|
||||
i: u16,
|
||||
}
|
||||
|
||||
impl ThresholdParams {
|
||||
pub fn new(t: u16, n: u16, i: u16) -> Result<ThresholdParams, DkgError> {
|
||||
if (t == 0) || (n == 0) {
|
||||
Err(DkgError::ZeroParameter(t, n))?;
|
||||
}
|
||||
|
||||
// When t == n, this shouldn't be used (MuSig2 and other variants of MuSig exist for a reason),
|
||||
// but it's not invalid to do so
|
||||
if t > n {
|
||||
Err(DkgError::InvalidRequiredQuantity(t, n))?;
|
||||
}
|
||||
if (i == 0) || (i > n) {
|
||||
Err(DkgError::InvalidParticipantIndex(n, i))?;
|
||||
}
|
||||
|
||||
Ok(ThresholdParams { t, n, i })
|
||||
}
|
||||
|
||||
pub fn t(&self) -> u16 {
|
||||
self.t
|
||||
}
|
||||
pub fn n(&self) -> u16 {
|
||||
self.n
|
||||
}
|
||||
pub fn i(&self) -> u16 {
|
||||
self.i
|
||||
}
|
||||
}
|
||||
|
||||
/// Various errors possible during key generation/signing.
|
||||
#[derive(Copy, Clone, Error, Debug)]
|
||||
pub enum DkgError {
|
||||
#[error("a parameter was 0 (required {0}, participants {1})")]
|
||||
ZeroParameter(u16, u16),
|
||||
#[error("invalid amount of required participants (max {1}, got {0})")]
|
||||
InvalidRequiredQuantity(u16, u16),
|
||||
#[error("invalid participant index (0 < index <= {0}, yet index is {1})")]
|
||||
InvalidParticipantIndex(u16, u16),
|
||||
|
||||
#[error("invalid signing set")]
|
||||
InvalidSigningSet,
|
||||
#[error("invalid participant quantity (expected {0}, got {1})")]
|
||||
InvalidParticipantQuantity(usize, usize),
|
||||
#[error("duplicated participant index ({0})")]
|
||||
DuplicatedIndex(u16),
|
||||
#[error("missing participant {0}")]
|
||||
MissingParticipant(u16),
|
||||
|
||||
#[error("invalid proof of knowledge (participant {0})")]
|
||||
InvalidProofOfKnowledge(u16),
|
||||
#[error("invalid share (participant {0})")]
|
||||
InvalidShare(u16),
|
||||
|
||||
#[error("internal error ({0})")]
|
||||
InternalError(&'static str),
|
||||
}
|
||||
|
||||
/// Calculate the lagrange coefficient for a signing set.
|
||||
pub fn lagrange<F: PrimeField>(i: u16, included: &[u16]) -> F {
|
||||
let mut num = F::one();
|
||||
let mut denom = F::one();
|
||||
for l in included {
|
||||
if i == *l {
|
||||
continue;
|
||||
}
|
||||
|
||||
let share = F::from(u64::try_from(*l).unwrap());
|
||||
num *= share;
|
||||
denom *= share - F::from(u64::try_from(i).unwrap());
|
||||
}
|
||||
|
||||
// Safe as this will only be 0 if we're part of the above loop
|
||||
// (which we have an if case to avoid)
|
||||
num * denom.invert().unwrap()
|
||||
}
|
||||
|
||||
/// Keys and verification shares generated by a DKG.
|
||||
/// Called core as they're expected to be wrapped into an Arc before usage in various operations.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct ThresholdCore<C: Ciphersuite> {
|
||||
/// Threshold Parameters.
|
||||
params: ThresholdParams,
|
||||
|
||||
/// Secret share key.
|
||||
secret_share: C::F,
|
||||
/// Group key.
|
||||
group_key: C::G,
|
||||
/// Verification shares.
|
||||
verification_shares: HashMap<u16, C::G>,
|
||||
}
|
||||
|
||||
impl<C: Ciphersuite> Zeroize for ThresholdCore<C> {
|
||||
fn zeroize(&mut self) {
|
||||
self.params.zeroize();
|
||||
self.secret_share.zeroize();
|
||||
self.group_key.zeroize();
|
||||
for (_, share) in self.verification_shares.iter_mut() {
|
||||
share.zeroize();
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<C: Ciphersuite> Drop for ThresholdCore<C> {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize()
|
||||
}
|
||||
}
|
||||
impl<C: Ciphersuite> ZeroizeOnDrop for ThresholdCore<C> {}
|
||||
|
||||
impl<C: Ciphersuite> ThresholdCore<C> {
|
||||
pub(crate) fn new(
|
||||
params: ThresholdParams,
|
||||
secret_share: C::F,
|
||||
verification_shares: HashMap<u16, C::G>,
|
||||
) -> ThresholdCore<C> {
|
||||
#[cfg(debug_assertions)]
|
||||
validate_map(&verification_shares, &(0 ..= params.n).collect::<Vec<_>>(), 0).unwrap();
|
||||
|
||||
let t = (1 ..= params.t).collect::<Vec<_>>();
|
||||
ThresholdCore {
|
||||
params,
|
||||
secret_share,
|
||||
group_key: t.iter().map(|i| verification_shares[i] * lagrange::<C::F>(*i, &t)).sum(),
|
||||
verification_shares,
|
||||
}
|
||||
}
|
||||
pub fn params(&self) -> ThresholdParams {
|
||||
self.params
|
||||
}
|
||||
|
||||
pub fn secret_share(&self) -> C::F {
|
||||
self.secret_share
|
||||
}
|
||||
|
||||
pub fn group_key(&self) -> C::G {
|
||||
self.group_key
|
||||
}
|
||||
|
||||
pub(crate) fn verification_shares(&self) -> HashMap<u16, C::G> {
|
||||
self.verification_shares.clone()
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = vec![];
|
||||
serialized.extend(u32::try_from(C::ID.len()).unwrap().to_be_bytes());
|
||||
serialized.extend(C::ID);
|
||||
serialized.extend(self.params.t.to_be_bytes());
|
||||
serialized.extend(self.params.n.to_be_bytes());
|
||||
serialized.extend(self.params.i.to_be_bytes());
|
||||
serialized.extend(self.secret_share.to_repr().as_ref());
|
||||
for l in 1 ..= self.params.n {
|
||||
serialized.extend(self.verification_shares[&l].to_bytes().as_ref());
|
||||
}
|
||||
serialized
|
||||
}
|
||||
|
||||
pub fn deserialize<R: Read>(reader: &mut R) -> Result<ThresholdCore<C>, DkgError> {
|
||||
{
|
||||
let missing = DkgError::InternalError("ThresholdCore serialization is missing its curve");
|
||||
let different = DkgError::InternalError("deserializing ThresholdCore for another curve");
|
||||
|
||||
let mut id_len = [0; 4];
|
||||
reader.read_exact(&mut id_len).map_err(|_| missing)?;
|
||||
if u32::try_from(C::ID.len()).unwrap().to_be_bytes() != id_len {
|
||||
Err(different)?;
|
||||
}
|
||||
|
||||
let mut id = vec![0; C::ID.len()];
|
||||
reader.read_exact(&mut id).map_err(|_| missing)?;
|
||||
if id != C::ID {
|
||||
Err(different)?;
|
||||
}
|
||||
}
|
||||
|
||||
let (t, n, i) = {
|
||||
let mut read_u16 = || {
|
||||
let mut value = [0; 2];
|
||||
reader
|
||||
.read_exact(&mut value)
|
||||
.map_err(|_| DkgError::InternalError("missing participant quantities"))?;
|
||||
Ok(u16::from_be_bytes(value))
|
||||
};
|
||||
(read_u16()?, read_u16()?, read_u16()?)
|
||||
};
|
||||
|
||||
let secret_share =
|
||||
C::read_F(reader).map_err(|_| DkgError::InternalError("invalid secret share"))?;
|
||||
|
||||
let mut verification_shares = HashMap::new();
|
||||
for l in 1 ..= n {
|
||||
verification_shares.insert(
|
||||
l,
|
||||
<C as Ciphersuite>::read_G(reader)
|
||||
.map_err(|_| DkgError::InternalError("invalid verification share"))?,
|
||||
);
|
||||
}
|
||||
|
||||
Ok(ThresholdCore::new(
|
||||
ThresholdParams::new(t, n, i).map_err(|_| DkgError::InternalError("invalid parameters"))?,
|
||||
secret_share,
|
||||
verification_shares,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Threshold keys usable for signing.
|
||||
#[derive(Clone, Debug, Zeroize)]
|
||||
pub struct ThresholdKeys<C: Ciphersuite> {
|
||||
/// Core keys.
|
||||
#[zeroize(skip)]
|
||||
core: Arc<ThresholdCore<C>>,
|
||||
|
||||
/// Offset applied to these keys.
|
||||
pub(crate) offset: Option<C::F>,
|
||||
}
|
||||
|
||||
// Manually implement Drop due to https://github.com/RustCrypto/utils/issues/786
|
||||
impl<C: Ciphersuite> Drop for ThresholdKeys<C> {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize()
|
||||
}
|
||||
}
|
||||
impl<C: Ciphersuite> ZeroizeOnDrop for ThresholdKeys<C> {}
|
||||
|
||||
/// View of keys passed to algorithm implementations.
|
||||
#[derive(Clone, Zeroize)]
|
||||
pub struct ThresholdView<C: Ciphersuite> {
|
||||
group_key: C::G,
|
||||
#[zeroize(skip)]
|
||||
included: Vec<u16>,
|
||||
secret_share: C::F,
|
||||
#[zeroize(skip)]
|
||||
verification_shares: HashMap<u16, C::G>,
|
||||
}
|
||||
|
||||
impl<C: Ciphersuite> Drop for ThresholdView<C> {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize()
|
||||
}
|
||||
}
|
||||
impl<C: Ciphersuite> ZeroizeOnDrop for ThresholdView<C> {}
|
||||
|
||||
impl<C: Ciphersuite> ThresholdKeys<C> {
|
||||
pub fn new(core: ThresholdCore<C>) -> ThresholdKeys<C> {
|
||||
ThresholdKeys { core: Arc::new(core), offset: None }
|
||||
}
|
||||
|
||||
/// Offset the keys by a given scalar to allow for account and privacy schemes.
|
||||
/// This offset is ephemeral and will not be included when these keys are serialized.
|
||||
/// Keys offset multiple times will form a new offset of their sum.
|
||||
pub fn offset(&self, offset: C::F) -> ThresholdKeys<C> {
|
||||
let mut res = self.clone();
|
||||
// Carry any existing offset
|
||||
// Enables schemes like Monero's subaddresses which have a per-subaddress offset and then a
|
||||
// one-time-key offset
|
||||
res.offset = Some(offset + res.offset.unwrap_or_else(C::F::zero));
|
||||
res
|
||||
}
|
||||
|
||||
/// Returns the current offset in-use for these keys.
|
||||
pub fn current_offset(&self) -> Option<C::F> {
|
||||
self.offset
|
||||
}
|
||||
|
||||
pub fn params(&self) -> ThresholdParams {
|
||||
self.core.params
|
||||
}
|
||||
|
||||
pub fn secret_share(&self) -> C::F {
|
||||
self.core.secret_share
|
||||
}
|
||||
|
||||
/// Returns the group key with any offset applied.
|
||||
pub fn group_key(&self) -> C::G {
|
||||
self.core.group_key + (C::generator() * self.offset.unwrap_or_else(C::F::zero))
|
||||
}
|
||||
|
||||
/// Returns all participants' verification shares without any offsetting.
|
||||
pub(crate) fn verification_shares(&self) -> HashMap<u16, C::G> {
|
||||
self.core.verification_shares()
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
self.core.serialize()
|
||||
}
|
||||
|
||||
pub fn view(&self, included: &[u16]) -> Result<ThresholdView<C>, DkgError> {
|
||||
if (included.len() < self.params().t.into()) || (usize::from(self.params().n) < included.len())
|
||||
{
|
||||
Err(DkgError::InvalidSigningSet)?;
|
||||
}
|
||||
|
||||
let offset_share = self.offset.unwrap_or_else(C::F::zero) *
|
||||
C::F::from(included.len().try_into().unwrap()).invert().unwrap();
|
||||
let offset_verification_share = C::generator() * offset_share;
|
||||
|
||||
Ok(ThresholdView {
|
||||
group_key: self.group_key(),
|
||||
secret_share: (self.secret_share() * lagrange::<C::F>(self.params().i, included)) +
|
||||
offset_share,
|
||||
verification_shares: self
|
||||
.verification_shares()
|
||||
.iter()
|
||||
.map(|(l, share)| {
|
||||
(*l, (*share * lagrange::<C::F>(*l, included)) + offset_verification_share)
|
||||
})
|
||||
.collect(),
|
||||
included: included.to_vec(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Ciphersuite> ThresholdView<C> {
|
||||
pub fn group_key(&self) -> C::G {
|
||||
self.group_key
|
||||
}
|
||||
|
||||
pub fn included(&self) -> Vec<u16> {
|
||||
self.included.clone()
|
||||
}
|
||||
|
||||
pub fn secret_share(&self) -> C::F {
|
||||
self.secret_share
|
||||
}
|
||||
|
||||
pub fn verification_share(&self, l: u16) -> C::G {
|
||||
self.verification_shares[&l]
|
||||
}
|
||||
}
|
|
@ -9,47 +9,24 @@ use rand_core::{RngCore, CryptoRng};
|
|||
|
||||
use group::GroupEncoding;
|
||||
|
||||
use ciphersuite::Ciphersuite;
|
||||
|
||||
use transcript::{Transcript, RecommendedTranscript};
|
||||
use dleq::DLEqProof;
|
||||
|
||||
use crate::{curve::Curve, FrostError, FrostCore, FrostKeys, validate_map};
|
||||
use crate::{DkgError, ThresholdCore, ThresholdKeys, validate_map};
|
||||
|
||||
/// Promote a set of keys to another Curve definition.
|
||||
pub trait CurvePromote<C2: Curve> {
|
||||
/// Promote a set of keys to another Ciphersuite definition.
|
||||
pub trait CiphersuitePromote<C2: Ciphersuite> {
|
||||
#[doc(hidden)]
|
||||
#[allow(non_snake_case)]
|
||||
fn _bound_C2(_c2: C2) {
|
||||
panic!()
|
||||
}
|
||||
|
||||
fn promote(self) -> FrostKeys<C2>;
|
||||
fn promote(self) -> ThresholdKeys<C2>;
|
||||
}
|
||||
|
||||
// Implement promotion to different ciphersuites, panicking if the generators are different
|
||||
// Commented due to lack of practical benefit. While it'd have interoperability benefits, those
|
||||
// would have their own DKG process which isn't compatible anyways. This becomes unsafe code
|
||||
// that'll never be used but we're bound to support
|
||||
/*
|
||||
impl<C1: Curve, C2: Curve> CurvePromote<C2> for FrostKeys<C1>
|
||||
where
|
||||
C2: Curve<F = C1::F, G = C1::G>,
|
||||
{
|
||||
fn promote(self) -> FrostKeys<C2> {
|
||||
assert_eq!(C::GENERATOR, C2::GENERATOR);
|
||||
|
||||
FrostKeys {
|
||||
core: Arc::new(FrostCore {
|
||||
params: self.core.params,
|
||||
secret_share: self.core.secret_share,
|
||||
group_key: self.core.group_key,
|
||||
verification_shares: self.core.verification_shares(),
|
||||
}),
|
||||
offset: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
fn transcript<G: GroupEncoding>(key: G, i: u16) -> RecommendedTranscript {
|
||||
let mut transcript = RecommendedTranscript::new(b"FROST Generator Update");
|
||||
transcript.append_message(b"group_key", key.to_bytes().as_ref());
|
||||
|
@ -59,40 +36,49 @@ fn transcript<G: GroupEncoding>(key: G, i: u16) -> RecommendedTranscript {
|
|||
|
||||
/// Proof of valid promotion to another generator.
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct GeneratorProof<C: Curve> {
|
||||
pub struct GeneratorProof<C: Ciphersuite> {
|
||||
share: C::G,
|
||||
proof: DLEqProof<C::G>,
|
||||
}
|
||||
|
||||
impl<C: Curve> GeneratorProof<C> {
|
||||
pub fn serialize<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
impl<C: Ciphersuite> GeneratorProof<C> {
|
||||
pub fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
writer.write_all(self.share.to_bytes().as_ref())?;
|
||||
self.proof.serialize(writer)
|
||||
}
|
||||
|
||||
pub fn deserialize<R: Read>(reader: &mut R) -> io::Result<GeneratorProof<C>> {
|
||||
Ok(GeneratorProof { share: C::read_G(reader)?, proof: DLEqProof::deserialize(reader)? })
|
||||
pub fn read<R: Read>(reader: &mut R) -> io::Result<GeneratorProof<C>> {
|
||||
Ok(GeneratorProof {
|
||||
share: <C as Ciphersuite>::read_G(reader)?,
|
||||
proof: DLEqProof::deserialize(reader)?,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut buf = vec![];
|
||||
self.write(&mut buf).unwrap();
|
||||
buf
|
||||
}
|
||||
}
|
||||
|
||||
/// Promote a set of keys from one curve to another, where the elliptic curve is the same.
|
||||
/// Since the Curve trait additionally specifies a generator, this provides an O(n) way to update
|
||||
/// the generator used with keys. The key generation protocol itself is exponential.
|
||||
pub struct GeneratorPromotion<C1: Curve, C2: Curve> {
|
||||
base: FrostKeys<C1>,
|
||||
/// Since the Ciphersuite trait additionally specifies a generator, this provides an O(n) way to
|
||||
/// update the generator used with keys. The key generation protocol itself is exponential.
|
||||
pub struct GeneratorPromotion<C1: Ciphersuite, C2: Ciphersuite> {
|
||||
base: ThresholdKeys<C1>,
|
||||
proof: GeneratorProof<C1>,
|
||||
_c2: PhantomData<C2>,
|
||||
}
|
||||
|
||||
impl<C1: Curve, C2: Curve> GeneratorPromotion<C1, C2>
|
||||
impl<C1: Ciphersuite, C2: Ciphersuite> GeneratorPromotion<C1, C2>
|
||||
where
|
||||
C2: Curve<F = C1::F, G = C1::G>,
|
||||
C2: Ciphersuite<F = C1::F, G = C1::G>,
|
||||
{
|
||||
/// Begin promoting keys from one curve to another. Returns a proof this share was properly
|
||||
/// promoted.
|
||||
pub fn promote<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
base: FrostKeys<C1>,
|
||||
base: ThresholdKeys<C1>,
|
||||
) -> (GeneratorPromotion<C1, C2>, GeneratorProof<C1>) {
|
||||
// Do a DLEqProof for the new generator
|
||||
let proof = GeneratorProof {
|
||||
|
@ -112,7 +98,7 @@ where
|
|||
pub fn complete(
|
||||
self,
|
||||
proofs: &HashMap<u16, GeneratorProof<C1>>,
|
||||
) -> Result<FrostKeys<C2>, FrostError> {
|
||||
) -> Result<ThresholdKeys<C2>, DkgError> {
|
||||
let params = self.base.params();
|
||||
validate_map(proofs, &(1 ..= params.n).collect::<Vec<_>>(), params.i)?;
|
||||
|
||||
|
@ -129,12 +115,12 @@ where
|
|||
&[C1::generator(), C2::generator()],
|
||||
&[original_shares[&i], proof.share],
|
||||
)
|
||||
.map_err(|_| FrostError::InvalidProofOfKnowledge(i))?;
|
||||
.map_err(|_| DkgError::InvalidProofOfKnowledge(i))?;
|
||||
verification_shares.insert(i, proof.share);
|
||||
}
|
||||
|
||||
Ok(FrostKeys {
|
||||
core: Arc::new(FrostCore::new(params, self.base.secret_share(), verification_shares)),
|
||||
Ok(ThresholdKeys {
|
||||
core: Arc::new(ThresholdCore::new(params, self.base.secret_share(), verification_shares)),
|
||||
offset: None,
|
||||
})
|
||||
}
|
81
crypto/dkg/src/tests/frost.rs
Normal file
81
crypto/dkg/src/tests/frost.rs
Normal file
|
@ -0,0 +1,81 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use crate::{
|
||||
Ciphersuite, ThresholdParams, ThresholdCore,
|
||||
frost::{SecretShare, Commitments, KeyGenMachine},
|
||||
tests::{THRESHOLD, PARTICIPANTS, clone_without},
|
||||
};
|
||||
|
||||
/// Fully perform the FROST key generation algorithm.
|
||||
pub fn frost_gen<R: RngCore + CryptoRng, C: Ciphersuite>(
|
||||
rng: &mut R,
|
||||
) -> HashMap<u16, ThresholdCore<C>> {
|
||||
let mut machines = HashMap::new();
|
||||
let mut commitments = HashMap::new();
|
||||
for i in 1 ..= PARTICIPANTS {
|
||||
let machine = KeyGenMachine::<C>::new(
|
||||
ThresholdParams::new(THRESHOLD, PARTICIPANTS, i).unwrap(),
|
||||
"DKG Test Key Generation".to_string(),
|
||||
);
|
||||
let (machine, these_commitments) = machine.generate_coefficients(rng);
|
||||
machines.insert(i, machine);
|
||||
|
||||
commitments.insert(
|
||||
i,
|
||||
Commitments::read::<&[u8]>(
|
||||
&mut these_commitments.serialize().as_ref(),
|
||||
ThresholdParams { t: THRESHOLD, n: PARTICIPANTS, i: 1 },
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
|
||||
let mut secret_shares = HashMap::new();
|
||||
let mut machines = machines
|
||||
.drain()
|
||||
.map(|(l, machine)| {
|
||||
let (machine, mut shares) =
|
||||
machine.generate_secret_shares(rng, clone_without(&commitments, &l)).unwrap();
|
||||
let shares = shares
|
||||
.drain()
|
||||
.map(|(l, share)| {
|
||||
(l, SecretShare::<C::F>::read::<&[u8]>(&mut share.serialize().as_ref()).unwrap())
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
secret_shares.insert(l, shares);
|
||||
(l, machine)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let mut verification_shares = None;
|
||||
let mut group_key = None;
|
||||
machines
|
||||
.drain()
|
||||
.map(|(i, machine)| {
|
||||
let mut our_secret_shares = HashMap::new();
|
||||
for (l, shares) in &secret_shares {
|
||||
if i == *l {
|
||||
continue;
|
||||
}
|
||||
our_secret_shares.insert(*l, shares[&i].clone());
|
||||
}
|
||||
let these_keys = machine.complete(rng, our_secret_shares).unwrap();
|
||||
|
||||
// Verify the verification_shares are agreed upon
|
||||
if verification_shares.is_none() {
|
||||
verification_shares = Some(these_keys.verification_shares());
|
||||
}
|
||||
assert_eq!(verification_shares.as_ref().unwrap(), &these_keys.verification_shares());
|
||||
|
||||
// Verify the group keys are agreed upon
|
||||
if group_key.is_none() {
|
||||
group_key = Some(these_keys.group_key());
|
||||
}
|
||||
assert_eq!(group_key.unwrap(), these_keys.group_key());
|
||||
|
||||
(i, these_keys)
|
||||
})
|
||||
.collect::<HashMap<_, _>>()
|
||||
}
|
69
crypto/dkg/src/tests/mod.rs
Normal file
69
crypto/dkg/src/tests/mod.rs
Normal file
|
@ -0,0 +1,69 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use group::ff::Field;
|
||||
|
||||
use ciphersuite::Ciphersuite;
|
||||
|
||||
use crate::{ThresholdCore, ThresholdKeys, lagrange};
|
||||
|
||||
/// FROST generation test.
|
||||
pub mod frost;
|
||||
use frost::frost_gen;
|
||||
|
||||
// Promotion test.
|
||||
mod promote;
|
||||
use promote::test_generator_promotion;
|
||||
|
||||
/// Constant amount of participants to use when testing.
|
||||
pub const PARTICIPANTS: u16 = 5;
|
||||
/// Constant threshold of participants to use when signing.
|
||||
pub const THRESHOLD: u16 = ((PARTICIPANTS / 3) * 2) + 1;
|
||||
|
||||
/// Clone a map without a specific value.
|
||||
pub fn clone_without<K: Clone + std::cmp::Eq + std::hash::Hash, V: Clone>(
|
||||
map: &HashMap<K, V>,
|
||||
without: &K,
|
||||
) -> HashMap<K, V> {
|
||||
let mut res = map.clone();
|
||||
res.remove(without).unwrap();
|
||||
res
|
||||
}
|
||||
|
||||
/// Recover the secret from a collection of keys.
|
||||
pub fn recover_key<C: Ciphersuite>(keys: &HashMap<u16, ThresholdKeys<C>>) -> C::F {
|
||||
let first = keys.values().next().expect("no keys provided");
|
||||
assert!(keys.len() >= first.params().t().into(), "not enough keys provided");
|
||||
let included = keys.keys().cloned().collect::<Vec<_>>();
|
||||
|
||||
let group_private = keys.iter().fold(C::F::zero(), |accum, (i, keys)| {
|
||||
accum + (keys.secret_share() * lagrange::<C::F>(*i, &included))
|
||||
});
|
||||
assert_eq!(C::generator() * group_private, first.group_key(), "failed to recover keys");
|
||||
group_private
|
||||
}
|
||||
|
||||
/// Generate threshold keys for tests.
|
||||
pub fn key_gen<R: RngCore + CryptoRng, C: Ciphersuite>(
|
||||
rng: &mut R,
|
||||
) -> HashMap<u16, ThresholdKeys<C>> {
|
||||
let res = frost_gen(rng)
|
||||
.drain()
|
||||
.map(|(i, core)| {
|
||||
assert_eq!(
|
||||
&ThresholdCore::<C>::deserialize::<&[u8]>(&mut core.serialize().as_ref()).unwrap(),
|
||||
&core
|
||||
);
|
||||
(i, ThresholdKeys::new(core))
|
||||
})
|
||||
.collect();
|
||||
assert_eq!(C::generator() * recover_key(&res), res[&1].group_key());
|
||||
res
|
||||
}
|
||||
|
||||
/// Run the test suite on a ciphersuite.
|
||||
pub fn test_ciphersuite<R: RngCore + CryptoRng, C: Ciphersuite>(rng: &mut R) {
|
||||
key_gen::<_, C>(rng);
|
||||
test_generator_promotion::<_, C>(rng);
|
||||
}
|
60
crypto/dkg/src/tests/promote.rs
Normal file
60
crypto/dkg/src/tests/promote.rs
Normal file
|
@ -0,0 +1,60 @@
|
|||
use std::{marker::PhantomData, collections::HashMap};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use group::Group;
|
||||
|
||||
use ciphersuite::Ciphersuite;
|
||||
|
||||
use crate::{
|
||||
promote::{GeneratorPromotion, GeneratorProof},
|
||||
tests::{clone_without, key_gen, recover_key},
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
struct AltGenerator<C: Ciphersuite> {
|
||||
_curve: PhantomData<C>,
|
||||
}
|
||||
|
||||
impl<C: Ciphersuite> Ciphersuite for AltGenerator<C> {
|
||||
type F = C::F;
|
||||
type G = C::G;
|
||||
type H = C::H;
|
||||
|
||||
const ID: &'static [u8] = b"Alternate Ciphersuite";
|
||||
|
||||
fn generator() -> Self::G {
|
||||
C::G::generator() * <C as Ciphersuite>::hash_to_F(b"DKG Promotion Test", b"generator")
|
||||
}
|
||||
|
||||
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
|
||||
<C as Ciphersuite>::hash_to_F(dst, data)
|
||||
}
|
||||
}
|
||||
|
||||
// Test promotion of threshold keys to another generator
|
||||
pub(crate) fn test_generator_promotion<R: RngCore + CryptoRng, C: Ciphersuite>(rng: &mut R) {
|
||||
let keys = key_gen::<_, C>(&mut *rng);
|
||||
|
||||
let mut promotions = HashMap::new();
|
||||
let mut proofs = HashMap::new();
|
||||
for (i, keys) in &keys {
|
||||
let (promotion, proof) =
|
||||
GeneratorPromotion::<_, AltGenerator<C>>::promote(&mut *rng, keys.clone());
|
||||
promotions.insert(*i, promotion);
|
||||
proofs.insert(*i, GeneratorProof::<C>::read::<&[u8]>(&mut proof.serialize().as_ref()).unwrap());
|
||||
}
|
||||
|
||||
let new_group_key = AltGenerator::<C>::generator() * recover_key(&keys);
|
||||
for (i, promoting) in promotions.drain() {
|
||||
let promoted = promoting.complete(&clone_without(&proofs, &i)).unwrap();
|
||||
assert_eq!(keys[&i].params(), promoted.params());
|
||||
assert_eq!(keys[&i].secret_share(), promoted.secret_share());
|
||||
assert_eq!(new_group_key, promoted.group_key());
|
||||
for (l, verification_share) in promoted.verification_shares() {
|
||||
assert_eq!(AltGenerator::<C>::generator() * keys[&l].secret_share(), verification_share);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,6 +1,12 @@
|
|||
#![no_std]
|
||||
|
||||
mod backend;
|
||||
|
||||
pub mod scalar;
|
||||
pub use scalar::Scalar;
|
||||
|
||||
pub mod field;
|
||||
pub use field::FieldElement;
|
||||
|
||||
pub mod point;
|
||||
pub use point::Point;
|
||||
|
|
|
@ -22,40 +22,37 @@ subtle = "2"
|
|||
|
||||
hex = "0.4"
|
||||
|
||||
sha2 = { version = "0.10", optional = true }
|
||||
sha3 = { version = "0.10", optional = true }
|
||||
digest = "0.10"
|
||||
|
||||
hkdf = "0.12"
|
||||
chacha20 = { version = "0.9", features = ["zeroize"] }
|
||||
|
||||
ff = "0.12"
|
||||
group = "0.12"
|
||||
|
||||
dalek-ff-group = { path = "../dalek-ff-group", version = "^0.1.2", optional = true }
|
||||
|
||||
elliptic-curve = { version = "0.12", features = ["hash2curve"], optional = true }
|
||||
p256 = { version = "0.11", features = ["arithmetic", "bits", "hash2curve"], optional = true }
|
||||
k256 = { version = "0.11", features = ["arithmetic", "bits", "hash2curve"], optional = true }
|
||||
|
||||
minimal-ed448 = { path = "../ed448", version = "0.1", optional = true }
|
||||
|
||||
ciphersuite = { path = "../ciphersuite", version = "0.1", features = ["std"] }
|
||||
|
||||
transcript = { package = "flexible-transcript", path = "../transcript", features = ["recommended"], version = "^0.1.3" }
|
||||
|
||||
multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] }
|
||||
|
||||
schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "0.1.0" }
|
||||
dleq = { path = "../dleq", version = "^0.1.2", features = ["serialize"] }
|
||||
|
||||
dkg = { path = "../dkg", version = "0.1.0" }
|
||||
|
||||
[dev-dependencies]
|
||||
sha2 = "0.10"
|
||||
dalek-ff-group = { path = "../dalek-ff-group", version = "^0.1.2" }
|
||||
serde_json = "1"
|
||||
|
||||
[features]
|
||||
dalek = ["sha2", "dalek-ff-group"]
|
||||
ed25519 = ["dalek"]
|
||||
ristretto = ["dalek"]
|
||||
ed25519 = ["dalek-ff-group", "ciphersuite/ed25519"]
|
||||
ristretto = ["dalek-ff-group", "ciphersuite/ristretto"]
|
||||
|
||||
kp256 = ["sha2", "elliptic-curve"]
|
||||
p256 = ["kp256", "dep:p256"]
|
||||
secp256k1 = ["kp256", "k256"]
|
||||
secp256k1 = ["ciphersuite/secp256k1"]
|
||||
p256 = ["ciphersuite/p256"]
|
||||
|
||||
ed448 = ["sha3", "minimal-ed448"]
|
||||
ed448 = ["minimal-ed448", "ciphersuite/ed448"]
|
||||
|
||||
tests = []
|
||||
tests = ["dkg/tests"]
|
||||
|
|
|
@ -10,4 +10,4 @@ integrating with existing systems.
|
|||
|
||||
This library offers ciphersuites compatible with the
|
||||
[IETF draft](https://github.com/cfrg/draft-irtf-cfrg-frost). Currently, version
|
||||
10 is supported.
|
||||
11 is supported.
|
||||
|
|
|
@ -3,27 +3,25 @@ use std::io::{self, Read, Write};
|
|||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use transcript::Transcript;
|
||||
|
||||
use crate::{Curve, FrostError, FrostView, schnorr};
|
||||
use crate::{Curve, FrostError, ThresholdView};
|
||||
pub use schnorr::SchnorrSignature;
|
||||
|
||||
/// Serialize an addendum to a writer.
|
||||
pub trait AddendumSerialize {
|
||||
/// Write an addendum to a writer.
|
||||
pub trait WriteAddendum {
|
||||
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()>;
|
||||
}
|
||||
|
||||
impl AddendumSerialize for () {
|
||||
impl WriteAddendum for () {
|
||||
fn write<W: Write>(&self, _: &mut W) -> io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait alias for the requirements to be used as an addendum.
|
||||
pub trait Addendum: Clone + PartialEq + Debug + Zeroize + AddendumSerialize {}
|
||||
impl<A: Clone + PartialEq + Debug + Zeroize + AddendumSerialize> Addendum for A {}
|
||||
pub trait Addendum: Clone + PartialEq + Debug + WriteAddendum {}
|
||||
impl<A: Clone + PartialEq + Debug + WriteAddendum> Addendum for A {}
|
||||
|
||||
/// Algorithm trait usable by the FROST signing machine to produce signatures..
|
||||
pub trait Algorithm<C: Curve>: Clone {
|
||||
|
@ -46,7 +44,7 @@ pub trait Algorithm<C: Curve>: Clone {
|
|||
fn preprocess_addendum<R: RngCore + CryptoRng>(
|
||||
&mut self,
|
||||
rng: &mut R,
|
||||
params: &FrostView<C>,
|
||||
params: &ThresholdView<C>,
|
||||
) -> Self::Addendum;
|
||||
|
||||
/// Read an addendum from a reader.
|
||||
|
@ -55,7 +53,7 @@ pub trait Algorithm<C: Curve>: Clone {
|
|||
/// Proccess the addendum for the specified participant. Guaranteed to be called in order.
|
||||
fn process_addendum(
|
||||
&mut self,
|
||||
params: &FrostView<C>,
|
||||
params: &ThresholdView<C>,
|
||||
l: u16,
|
||||
reader: Self::Addendum,
|
||||
) -> Result<(), FrostError>;
|
||||
|
@ -66,7 +64,7 @@ pub trait Algorithm<C: Curve>: Clone {
|
|||
/// The nonce will already have been processed into the combined form d + (e * p).
|
||||
fn sign_share(
|
||||
&mut self,
|
||||
params: &FrostView<C>,
|
||||
params: &ThresholdView<C>,
|
||||
nonce_sums: &[Vec<C::G>],
|
||||
nonces: &[C::F],
|
||||
msg: &[u8],
|
||||
|
@ -149,44 +147,36 @@ impl<C: Curve, H: Hram<C>> Algorithm<C> for Schnorr<C, H> {
|
|||
vec![vec![C::generator()]]
|
||||
}
|
||||
|
||||
fn preprocess_addendum<R: RngCore + CryptoRng>(&mut self, _: &mut R, _: &FrostView<C>) {}
|
||||
fn preprocess_addendum<R: RngCore + CryptoRng>(&mut self, _: &mut R, _: &ThresholdView<C>) {}
|
||||
|
||||
fn read_addendum<R: Read>(&self, _: &mut R) -> io::Result<Self::Addendum> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn process_addendum(&mut self, _: &FrostView<C>, _: u16, _: ()) -> Result<(), FrostError> {
|
||||
fn process_addendum(&mut self, _: &ThresholdView<C>, _: u16, _: ()) -> Result<(), FrostError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn sign_share(
|
||||
&mut self,
|
||||
params: &FrostView<C>,
|
||||
params: &ThresholdView<C>,
|
||||
nonce_sums: &[Vec<C::G>],
|
||||
nonces: &[C::F],
|
||||
msg: &[u8],
|
||||
) -> C::F {
|
||||
let c = H::hram(&nonce_sums[0][0], ¶ms.group_key(), msg);
|
||||
self.c = Some(c);
|
||||
schnorr::sign::<C>(params.secret_share(), nonces[0], c).s
|
||||
SchnorrSignature::<C>::sign(params.secret_share(), nonces[0], c).s
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn verify(&self, group_key: C::G, nonces: &[Vec<C::G>], sum: C::F) -> Option<Self::Signature> {
|
||||
let sig = SchnorrSignature { R: nonces[0][0], s: sum };
|
||||
if schnorr::verify::<C>(group_key, self.c.unwrap(), &sig) {
|
||||
Some(sig)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
Some(sig).filter(|sig| sig.verify(group_key, self.c.unwrap()))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn verify_share(&self, verification_share: C::G, nonces: &[Vec<C::G>], share: C::F) -> bool {
|
||||
schnorr::verify::<C>(
|
||||
verification_share,
|
||||
self.c.unwrap(),
|
||||
&SchnorrSignature { R: nonces[0][0], s: share },
|
||||
)
|
||||
SchnorrSignature::<C> { R: nonces[0][0], s: share }.verify(verification_share, self.c.unwrap())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
use zeroize::Zeroize;
|
||||
use digest::Digest;
|
||||
|
||||
use sha2::{Digest, Sha512};
|
||||
|
||||
use group::Group;
|
||||
use dalek_ff_group::Scalar;
|
||||
|
||||
use ciphersuite::Ciphersuite;
|
||||
|
||||
use crate::{curve::Curve, algorithm::Hram};
|
||||
|
||||
macro_rules! dalek_curve {
|
||||
|
@ -13,49 +12,22 @@ macro_rules! dalek_curve {
|
|||
|
||||
$Curve: ident,
|
||||
$Hram: ident,
|
||||
$Point: ident,
|
||||
|
||||
$ID: literal,
|
||||
$CONTEXT: literal,
|
||||
$chal: literal,
|
||||
$chal: literal
|
||||
) => {
|
||||
use dalek_ff_group::$Point;
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = $feature)))]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct $Curve;
|
||||
impl $Curve {
|
||||
fn hash(dst: &[u8], data: &[u8]) -> Sha512 {
|
||||
Sha512::new().chain_update(&[$CONTEXT.as_ref(), dst, data].concat())
|
||||
}
|
||||
}
|
||||
pub use ciphersuite::$Curve;
|
||||
|
||||
impl Curve for $Curve {
|
||||
type F = Scalar;
|
||||
type G = $Point;
|
||||
|
||||
const ID: &'static [u8] = $ID;
|
||||
|
||||
fn generator() -> Self::G {
|
||||
$Point::generator()
|
||||
}
|
||||
|
||||
fn hash_to_vec(dst: &[u8], data: &[u8]) -> Vec<u8> {
|
||||
Self::hash(dst, data).finalize().to_vec()
|
||||
}
|
||||
|
||||
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
|
||||
Scalar::from_hash(Self::hash(dst, data))
|
||||
}
|
||||
const CONTEXT: &'static [u8] = $CONTEXT;
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = $feature)))]
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct $Hram;
|
||||
impl Hram<$Curve> for $Hram {
|
||||
#[allow(non_snake_case)]
|
||||
fn hram(R: &$Point, A: &$Point, m: &[u8]) -> Scalar {
|
||||
let mut hash = Sha512::new();
|
||||
fn hram(R: &<$Curve as Ciphersuite>::G, A: &<$Curve as Ciphersuite>::G, m: &[u8]) -> Scalar {
|
||||
let mut hash = <$Curve as Ciphersuite>::H::new();
|
||||
if $chal.len() != 0 {
|
||||
hash.update(&[$CONTEXT.as_ref(), $chal].concat());
|
||||
}
|
||||
|
@ -67,24 +39,8 @@ macro_rules! dalek_curve {
|
|||
};
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "ristretto"))]
|
||||
dalek_curve!(
|
||||
"ristretto",
|
||||
Ristretto,
|
||||
IetfRistrettoHram,
|
||||
RistrettoPoint,
|
||||
b"ristretto",
|
||||
b"FROST-RISTRETTO255-SHA512-v11",
|
||||
b"chal",
|
||||
);
|
||||
#[cfg(feature = "ristretto")]
|
||||
dalek_curve!("ristretto", Ristretto, IetfRistrettoHram, b"FROST-RISTRETTO255-SHA512-v11", b"chal");
|
||||
|
||||
#[cfg(feature = "ed25519")]
|
||||
dalek_curve!(
|
||||
"ed25519",
|
||||
Ed25519,
|
||||
IetfEd25519Hram,
|
||||
EdwardsPoint,
|
||||
b"edwards25519",
|
||||
b"FROST-ED25519-SHA512-v11",
|
||||
b"",
|
||||
);
|
||||
dalek_curve!("ed25519", Ed25519, IetfEd25519Hram, b"FROST-ED25519-SHA512-v11", b"");
|
||||
|
|
|
@ -1,41 +1,17 @@
|
|||
use zeroize::Zeroize;
|
||||
use digest::Digest;
|
||||
|
||||
use sha3::{digest::ExtendableOutput, Shake256};
|
||||
use group::GroupEncoding;
|
||||
|
||||
use group::{Group, GroupEncoding};
|
||||
use minimal_ed448::{scalar::Scalar, point::Point};
|
||||
use minimal_ed448::{Scalar, Point};
|
||||
|
||||
pub use ciphersuite::{Shake256_114, Ed448};
|
||||
|
||||
use crate::{curve::Curve, algorithm::Hram};
|
||||
|
||||
const CONTEXT: &[u8] = b"FROST-ED448-SHAKE256-v11";
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct Ed448;
|
||||
impl Ed448 {
|
||||
fn hash(prefix: &[u8], context: &[u8], dst: &[u8], data: &[u8]) -> [u8; 114] {
|
||||
let mut res = [0; 114];
|
||||
Shake256::digest_xof(&[prefix, context, dst, data].concat(), &mut res);
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
impl Curve for Ed448 {
|
||||
type F = Scalar;
|
||||
type G = Point;
|
||||
|
||||
const ID: &'static [u8] = b"ed448";
|
||||
|
||||
fn generator() -> Self::G {
|
||||
Point::generator()
|
||||
}
|
||||
|
||||
fn hash_to_vec(dst: &[u8], data: &[u8]) -> Vec<u8> {
|
||||
Self::hash(b"", CONTEXT, dst, data).as_ref().to_vec()
|
||||
}
|
||||
|
||||
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
|
||||
Scalar::wide_reduce(Self::hash(b"", CONTEXT, dst, data))
|
||||
}
|
||||
const CONTEXT: &'static [u8] = CONTEXT;
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
|
@ -43,12 +19,19 @@ pub struct Ietf8032Ed448Hram;
|
|||
impl Ietf8032Ed448Hram {
|
||||
#[allow(non_snake_case)]
|
||||
pub fn hram(context: &[u8], R: &Point, A: &Point, m: &[u8]) -> Scalar {
|
||||
Scalar::wide_reduce(Ed448::hash(
|
||||
&[b"SigEd448".as_ref(), &[0, u8::try_from(context.len()).unwrap()]].concat(),
|
||||
context,
|
||||
b"",
|
||||
&[R.to_bytes().as_ref(), A.to_bytes().as_ref(), m].concat(),
|
||||
))
|
||||
Scalar::wide_reduce(
|
||||
Shake256_114::digest(
|
||||
&[
|
||||
&[b"SigEd448".as_ref(), &[0, u8::try_from(context.len()).unwrap()]].concat(),
|
||||
context,
|
||||
&[R.to_bytes().as_ref(), A.to_bytes().as_ref(), m].concat(),
|
||||
]
|
||||
.concat(),
|
||||
)
|
||||
.as_ref()
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,17 +1,6 @@
|
|||
use zeroize::Zeroize;
|
||||
use group::GroupEncoding;
|
||||
|
||||
use sha2::{Digest, Sha256};
|
||||
|
||||
use group::{
|
||||
ff::{Field, PrimeField},
|
||||
GroupEncoding,
|
||||
};
|
||||
|
||||
use elliptic_curve::{
|
||||
generic_array::GenericArray,
|
||||
bigint::{Encoding, U384},
|
||||
hash2curve::{Expander, ExpandMsg, ExpandMsgXmd},
|
||||
};
|
||||
use ciphersuite::Ciphersuite;
|
||||
|
||||
use crate::{curve::Curve, algorithm::Hram};
|
||||
|
||||
|
@ -19,87 +8,37 @@ macro_rules! kp_curve {
|
|||
(
|
||||
$feature: literal,
|
||||
|
||||
$lib: ident,
|
||||
$Curve: ident,
|
||||
$Hram: ident,
|
||||
|
||||
$ID: literal,
|
||||
$CONTEXT: literal
|
||||
) => {
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = $feature)))]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct $Curve;
|
||||
impl $Curve {
|
||||
fn hash(dst: &[u8], data: &[u8]) -> Sha256 {
|
||||
Sha256::new().chain_update(&[$CONTEXT.as_ref(), dst, data].concat())
|
||||
}
|
||||
}
|
||||
pub use ciphersuite::$Curve;
|
||||
|
||||
impl Curve for $Curve {
|
||||
type F = $lib::Scalar;
|
||||
type G = $lib::ProjectivePoint;
|
||||
|
||||
const ID: &'static [u8] = $ID;
|
||||
|
||||
fn generator() -> Self::G {
|
||||
$lib::ProjectivePoint::GENERATOR
|
||||
}
|
||||
|
||||
fn hash_to_vec(dst: &[u8], data: &[u8]) -> Vec<u8> {
|
||||
Self::hash(dst, data).finalize().to_vec()
|
||||
}
|
||||
|
||||
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F {
|
||||
let mut dst = &[$CONTEXT, dst].concat();
|
||||
let oversize = Sha256::digest([b"H2C-OVERSIZE-DST-".as_ref(), dst].concat()).to_vec();
|
||||
if dst.len() > 255 {
|
||||
dst = &oversize;
|
||||
}
|
||||
|
||||
// While one of these two libraries does support directly hashing to the Scalar field, the
|
||||
// other doesn't. While that's probably an oversight, this is a universally working method
|
||||
let mut modulus = vec![0; 16];
|
||||
modulus.extend((Self::F::zero() - Self::F::one()).to_bytes());
|
||||
let modulus = U384::from_be_slice(&modulus).wrapping_add(&U384::ONE);
|
||||
|
||||
let mut unreduced = U384::from_be_bytes({
|
||||
let mut bytes = [0; 48];
|
||||
ExpandMsgXmd::<Sha256>::expand_message(&[msg], dst, 48).unwrap().fill_bytes(&mut bytes);
|
||||
bytes
|
||||
})
|
||||
.reduce(&modulus)
|
||||
.unwrap()
|
||||
.to_be_bytes();
|
||||
|
||||
let mut array = *GenericArray::from_slice(&unreduced[16 ..]);
|
||||
let res = $lib::Scalar::from_repr(array).unwrap();
|
||||
unreduced.zeroize();
|
||||
array.zeroize();
|
||||
res
|
||||
}
|
||||
const CONTEXT: &'static [u8] = $CONTEXT;
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = $feature)))]
|
||||
#[derive(Clone)]
|
||||
pub struct $Hram;
|
||||
impl Hram<$Curve> for $Hram {
|
||||
#[allow(non_snake_case)]
|
||||
fn hram(R: &$lib::ProjectivePoint, A: &$lib::ProjectivePoint, m: &[u8]) -> $lib::Scalar {
|
||||
$Curve::hash_to_F(b"chal", &[R.to_bytes().as_ref(), A.to_bytes().as_ref(), m].concat())
|
||||
fn hram(
|
||||
R: &<$Curve as Ciphersuite>::G,
|
||||
A: &<$Curve as Ciphersuite>::G,
|
||||
m: &[u8],
|
||||
) -> <$Curve as Ciphersuite>::F {
|
||||
<$Curve as Curve>::hash_to_F(
|
||||
b"chal",
|
||||
&[R.to_bytes().as_ref(), A.to_bytes().as_ref(), m].concat(),
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(feature = "p256")]
|
||||
kp_curve!("p256", p256, P256, IetfP256Hram, b"P-256", b"FROST-P256-SHA256-v11");
|
||||
kp_curve!("p256", P256, IetfP256Hram, b"FROST-P256-SHA256-v11");
|
||||
|
||||
#[cfg(feature = "secp256k1")]
|
||||
kp_curve!(
|
||||
"secp256k1",
|
||||
k256,
|
||||
Secp256k1,
|
||||
IetfSecp256k1Hram,
|
||||
b"secp256k1",
|
||||
b"FROST-secp256k1-SHA256-v11"
|
||||
);
|
||||
kp_curve!("secp256k1", Secp256k1, IetfSecp256k1Hram, b"FROST-secp256k1-SHA256-v11");
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
use core::fmt::Debug;
|
||||
use std::io::{self, Read};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
@ -6,17 +5,23 @@ use rand_core::{RngCore, CryptoRng};
|
|||
use zeroize::Zeroize;
|
||||
use subtle::ConstantTimeEq;
|
||||
|
||||
use ff::{Field, PrimeField, PrimeFieldBits};
|
||||
use group::{Group, GroupOps, GroupEncoding, prime::PrimeGroup};
|
||||
use digest::Digest;
|
||||
|
||||
#[cfg(any(test, feature = "dalek"))]
|
||||
use group::{
|
||||
ff::{Field, PrimeField},
|
||||
Group,
|
||||
};
|
||||
|
||||
pub use ciphersuite::Ciphersuite;
|
||||
|
||||
#[cfg(any(feature = "ristretto", feature = "ed25519"))]
|
||||
mod dalek;
|
||||
#[cfg(any(test, feature = "ristretto"))]
|
||||
#[cfg(feature = "ristretto")]
|
||||
pub use dalek::{Ristretto, IetfRistrettoHram};
|
||||
#[cfg(feature = "ed25519")]
|
||||
pub use dalek::{Ed25519, IetfEd25519Hram};
|
||||
|
||||
#[cfg(feature = "kp256")]
|
||||
#[cfg(any(feature = "secp256k1", feature = "p256"))]
|
||||
mod kp256;
|
||||
#[cfg(feature = "secp256k1")]
|
||||
pub use kp256::{Secp256k1, IetfSecp256k1Hram};
|
||||
|
@ -28,33 +33,23 @@ mod ed448;
|
|||
#[cfg(feature = "ed448")]
|
||||
pub use ed448::{Ed448, Ietf8032Ed448Hram, IetfEd448Hram};
|
||||
|
||||
/// Unified trait to manage an elliptic curve.
|
||||
// This should be moved into its own crate if the need for generic cryptography over ff/group
|
||||
// continues, which is the exact reason ff/group exists (to provide a generic interface)
|
||||
// elliptic-curve exists, yet it doesn't really serve the same role, nor does it use &[u8]/Vec<u8>
|
||||
// It uses GenericArray which will hopefully be deprecated as Rust evolves and doesn't offer enough
|
||||
// advantages in the modern day to be worth the hassle -- Kayaba
|
||||
pub trait Curve: Clone + Copy + PartialEq + Eq + Debug + Zeroize {
|
||||
/// Scalar field element type.
|
||||
// This is available via G::Scalar yet `C::G::Scalar` is ambiguous, forcing horrific accesses
|
||||
type F: PrimeField + PrimeFieldBits + Zeroize;
|
||||
/// Group element type.
|
||||
type G: Group<Scalar = Self::F> + GroupOps + PrimeGroup + Zeroize + ConstantTimeEq;
|
||||
|
||||
/// ID for this curve.
|
||||
const ID: &'static [u8];
|
||||
|
||||
/// Generator for the group.
|
||||
// While group does provide this in its API, privacy coins may want to use a custom basepoint
|
||||
fn generator() -> Self::G;
|
||||
/// FROST Ciphersuite, except for the signing algorithm specific H2, making this solely the curve,
|
||||
/// its associated hash function, and the functions derived from it.
|
||||
pub trait Curve: Ciphersuite {
|
||||
/// Context string for this curve.
|
||||
const CONTEXT: &'static [u8];
|
||||
|
||||
/// Hash the given dst and data to a byte vector. Used to instantiate H4 and H5.
|
||||
fn hash_to_vec(dst: &[u8], data: &[u8]) -> Vec<u8>;
|
||||
fn hash_to_vec(dst: &[u8], data: &[u8]) -> Vec<u8> {
|
||||
Self::H::digest(&[Self::CONTEXT, dst, data].concat()).as_ref().to_vec()
|
||||
}
|
||||
|
||||
/// Field element from hash. Used during key gen and by other crates under Serai as a general
|
||||
/// utility. Used to instantiate H1 and H3.
|
||||
#[allow(non_snake_case)]
|
||||
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F;
|
||||
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F {
|
||||
<Self as Ciphersuite>::hash_to_F(&[Self::CONTEXT, dst].concat(), msg)
|
||||
}
|
||||
|
||||
/// Hash the message for the binding factor. H4 from the IETF draft.
|
||||
fn hash_msg(msg: &[u8]) -> Vec<u8> {
|
||||
|
@ -68,17 +63,7 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug + Zeroize {
|
|||
|
||||
/// Hash the commitments and message to calculate the binding factor. H1 from the IETF draft.
|
||||
fn hash_binding_factor(binding: &[u8]) -> Self::F {
|
||||
Self::hash_to_F(b"rho", binding)
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
fn random_F<R: RngCore + CryptoRng>(rng: &mut R) -> Self::F {
|
||||
let mut res;
|
||||
while {
|
||||
res = Self::F::random(&mut *rng);
|
||||
res.ct_eq(&Self::F::zero()).into()
|
||||
} {}
|
||||
res
|
||||
<Self as Curve>::hash_to_F(b"rho", binding)
|
||||
}
|
||||
|
||||
/// Securely generate a random nonce. H3 from the IETF draft.
|
||||
|
@ -92,7 +77,7 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug + Zeroize {
|
|||
let mut res;
|
||||
while {
|
||||
seed.extend(repr.as_ref());
|
||||
res = Self::hash_to_F(b"nonce", &seed);
|
||||
res = <Self as Curve>::hash_to_F(b"nonce", &seed);
|
||||
res.ct_eq(&Self::F::zero()).into()
|
||||
} {
|
||||
rng.fill_bytes(&mut seed);
|
||||
|
@ -106,40 +91,11 @@ pub trait Curve: Clone + Copy + PartialEq + Eq + Debug + Zeroize {
|
|||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
fn F_len() -> usize {
|
||||
<Self::F as PrimeField>::Repr::default().as_ref().len()
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
fn G_len() -> usize {
|
||||
<Self::G as GroupEncoding>::Repr::default().as_ref().len()
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
fn read_F<R: Read>(r: &mut R) -> io::Result<Self::F> {
|
||||
let mut encoding = <Self::F as PrimeField>::Repr::default();
|
||||
r.read_exact(encoding.as_mut())?;
|
||||
|
||||
// ff mandates this is canonical
|
||||
let res = Option::<Self::F>::from(Self::F::from_repr(encoding))
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "non-canonical scalar"));
|
||||
for b in encoding.as_mut() {
|
||||
b.zeroize();
|
||||
fn read_G<R: Read>(reader: &mut R) -> io::Result<Self::G> {
|
||||
let res = <Self as Ciphersuite>::read_G(reader)?;
|
||||
if res.is_identity().into() {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "identity point"))?;
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
fn read_G<R: Read>(r: &mut R) -> io::Result<Self::G> {
|
||||
let mut encoding = <Self::G as GroupEncoding>::Repr::default();
|
||||
r.read_exact(encoding.as_mut())?;
|
||||
|
||||
let point = Option::<Self::G>::from(Self::G::from_bytes(&encoding))
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))?;
|
||||
// Ban the identity, per the FROST spec, and non-canonical points
|
||||
if (point.is_identity().into()) || (point.to_bytes().as_ref() != encoding.as_ref()) {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "non-canonical or identity point"))?;
|
||||
}
|
||||
Ok(point)
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,363 +0,0 @@
|
|||
use std::{
|
||||
marker::PhantomData,
|
||||
io::{self, Read, Write},
|
||||
collections::HashMap,
|
||||
};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use group::{
|
||||
ff::{Field, PrimeField},
|
||||
GroupEncoding,
|
||||
};
|
||||
|
||||
use multiexp::{multiexp_vartime, BatchVerifier};
|
||||
|
||||
use crate::{
|
||||
curve::Curve,
|
||||
FrostError, FrostParams, FrostCore,
|
||||
schnorr::{self, SchnorrSignature},
|
||||
validate_map,
|
||||
};
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
fn challenge<C: Curve>(context: &str, l: u16, R: &[u8], Am: &[u8]) -> C::F {
|
||||
const DST: &[u8] = b"FROST Schnorr Proof of Knowledge";
|
||||
|
||||
// Uses hash_msg to get a fixed size value out of the context string
|
||||
let mut transcript = C::hash_msg(context.as_bytes());
|
||||
transcript.extend(l.to_be_bytes());
|
||||
transcript.extend(R);
|
||||
transcript.extend(Am);
|
||||
C::hash_to_F(DST, &transcript)
|
||||
}
|
||||
|
||||
/// Commitments message to be broadcast to all other parties.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct Commitments<C: Curve>(Vec<C::G>, Vec<u8>, SchnorrSignature<C>);
|
||||
impl<C: Curve> Commitments<C> {
|
||||
pub fn read<R: Read>(reader: &mut R, params: FrostParams) -> io::Result<Self> {
|
||||
let mut commitments = Vec::with_capacity(params.t().into());
|
||||
let mut serialized = Vec::with_capacity(usize::from(params.t()) * C::G_len());
|
||||
for _ in 0 .. params.t() {
|
||||
let mut buf = <C::G as GroupEncoding>::Repr::default();
|
||||
reader.read_exact(buf.as_mut())?;
|
||||
|
||||
commitments.push(C::read_G(&mut buf.as_ref())?);
|
||||
serialized.extend(buf.as_ref());
|
||||
}
|
||||
|
||||
Ok(Commitments(commitments, serialized, SchnorrSignature::read(reader)?))
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
writer.write_all(&self.1)?;
|
||||
self.2.write(writer)
|
||||
}
|
||||
}
|
||||
|
||||
// Implements steps 1 through 3 of round 1 of FROST DKG. Returns the coefficients, commitments, and
|
||||
// the commitments to be broadcasted over an authenticated channel to all parties
|
||||
fn generate_key_r1<R: RngCore + CryptoRng, C: Curve>(
|
||||
rng: &mut R,
|
||||
params: &FrostParams,
|
||||
context: &str,
|
||||
) -> (Vec<C::F>, Vec<C::G>, Commitments<C>) {
|
||||
let t = usize::from(params.t);
|
||||
let mut coefficients = Vec::with_capacity(t);
|
||||
let mut commitments = Vec::with_capacity(t);
|
||||
let mut serialized = Vec::with_capacity(t * C::G_len());
|
||||
|
||||
for i in 0 .. t {
|
||||
// Step 1: Generate t random values to form a polynomial with
|
||||
coefficients.push(C::random_F(&mut *rng));
|
||||
// Step 3: Generate public commitments
|
||||
commitments.push(C::generator() * coefficients[i]);
|
||||
serialized.extend(commitments[i].to_bytes().as_ref());
|
||||
}
|
||||
|
||||
// Step 2: Provide a proof of knowledge
|
||||
let mut r = C::random_F(rng);
|
||||
let sig = schnorr::sign::<C>(
|
||||
coefficients[0],
|
||||
// This could be deterministic as the PoK is a singleton never opened up to cooperative
|
||||
// discussion
|
||||
// There's no reason to spend the time and effort to make this deterministic besides a
|
||||
// general obsession with canonicity and determinism though
|
||||
r,
|
||||
challenge::<C>(context, params.i(), (C::generator() * r).to_bytes().as_ref(), &serialized),
|
||||
);
|
||||
r.zeroize();
|
||||
|
||||
// Step 4: Broadcast
|
||||
(coefficients, commitments.clone(), Commitments(commitments, serialized, sig))
|
||||
}
|
||||
|
||||
// Verify the received data from the first round of key generation
|
||||
fn verify_r1<R: RngCore + CryptoRng, C: Curve>(
|
||||
rng: &mut R,
|
||||
params: &FrostParams,
|
||||
context: &str,
|
||||
our_commitments: Vec<C::G>,
|
||||
mut msgs: HashMap<u16, Commitments<C>>,
|
||||
) -> Result<HashMap<u16, Vec<C::G>>, FrostError> {
|
||||
validate_map(&msgs, &(1 ..= params.n()).collect::<Vec<_>>(), params.i())?;
|
||||
|
||||
let mut signatures = Vec::with_capacity(usize::from(params.n() - 1));
|
||||
let mut commitments = msgs
|
||||
.drain()
|
||||
.map(|(l, msg)| {
|
||||
// Step 5: Validate each proof of knowledge
|
||||
// This is solely the prep step for the latter batch verification
|
||||
signatures.push((
|
||||
l,
|
||||
msg.0[0],
|
||||
challenge::<C>(context, l, msg.2.R.to_bytes().as_ref(), &msg.1),
|
||||
msg.2,
|
||||
));
|
||||
|
||||
(l, msg.0)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
schnorr::batch_verify(rng, &signatures).map_err(FrostError::InvalidProofOfKnowledge)?;
|
||||
|
||||
commitments.insert(params.i, our_commitments);
|
||||
Ok(commitments)
|
||||
}
|
||||
|
||||
fn polynomial<F: PrimeField>(coefficients: &[F], l: u16) -> F {
|
||||
let l = F::from(u64::from(l));
|
||||
let mut share = F::zero();
|
||||
for (idx, coefficient) in coefficients.iter().rev().enumerate() {
|
||||
share += coefficient;
|
||||
if idx != (coefficients.len() - 1) {
|
||||
share *= l;
|
||||
}
|
||||
}
|
||||
share
|
||||
}
|
||||
|
||||
/// Secret share, to be sent only to the party it's intended for, over an encrypted and
|
||||
/// authenticated channel.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct SecretShare<C: Curve>(C::F);
|
||||
impl<C: Curve> SecretShare<C> {
|
||||
pub fn read<R: Read>(reader: &mut R) -> io::Result<Self> {
|
||||
Ok(SecretShare(C::read_F(reader)?))
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
writer.write_all(self.0.to_repr().as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Curve> Drop for SecretShare<C> {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize();
|
||||
}
|
||||
}
|
||||
impl<C: Curve> ZeroizeOnDrop for SecretShare<C> {}
|
||||
|
||||
// Calls round 1, step 5 and implements round 2, step 1 of FROST key generation
|
||||
// Returns our secret share part, commitments for the next step, and a vector for each
|
||||
// counterparty to receive
|
||||
fn generate_key_r2<R: RngCore + CryptoRng, C: Curve>(
|
||||
rng: &mut R,
|
||||
params: &FrostParams,
|
||||
context: &str,
|
||||
coefficients: &mut Vec<C::F>,
|
||||
our_commitments: Vec<C::G>,
|
||||
msgs: HashMap<u16, Commitments<C>>,
|
||||
) -> Result<(C::F, HashMap<u16, Vec<C::G>>, HashMap<u16, SecretShare<C>>), FrostError> {
|
||||
let commitments = verify_r1::<_, C>(rng, params, context, our_commitments, msgs)?;
|
||||
|
||||
// Step 1: Generate secret shares for all other parties
|
||||
let mut res = HashMap::new();
|
||||
for l in 1 ..= params.n() {
|
||||
// Don't insert our own shares to the byte buffer which is meant to be sent around
|
||||
// An app developer could accidentally send it. Best to keep this black boxed
|
||||
if l == params.i() {
|
||||
continue;
|
||||
}
|
||||
|
||||
res.insert(l, SecretShare(polynomial(coefficients, l)));
|
||||
}
|
||||
|
||||
// Calculate our own share
|
||||
let share = polynomial(coefficients, params.i());
|
||||
|
||||
coefficients.zeroize();
|
||||
|
||||
Ok((share, commitments, res))
|
||||
}
|
||||
|
||||
// Finishes round 2 and returns the keys.
|
||||
// This key MUST NOT be considered usable until all parties confirm they have completed the
|
||||
// protocol without issue.
|
||||
fn complete_r2<R: RngCore + CryptoRng, C: Curve>(
|
||||
rng: &mut R,
|
||||
params: FrostParams,
|
||||
mut secret_share: C::F,
|
||||
commitments: &mut HashMap<u16, Vec<C::G>>,
|
||||
mut shares: HashMap<u16, SecretShare<C>>,
|
||||
) -> Result<FrostCore<C>, FrostError> {
|
||||
validate_map(&shares, &(1 ..= params.n()).collect::<Vec<_>>(), params.i())?;
|
||||
|
||||
// Calculate the exponent for a given participant and apply it to a series of commitments
|
||||
// Initially used with the actual commitments to verify the secret share, later used with stripes
|
||||
// to generate the verification shares
|
||||
let exponential = |i: u16, values: &[_]| {
|
||||
let i = C::F::from(i.into());
|
||||
let mut res = Vec::with_capacity(params.t().into());
|
||||
(0 .. usize::from(params.t())).into_iter().fold(C::F::one(), |exp, l| {
|
||||
res.push((exp, values[l]));
|
||||
exp * i
|
||||
});
|
||||
res
|
||||
};
|
||||
|
||||
let mut batch = BatchVerifier::new(shares.len());
|
||||
for (l, mut share) in shares.drain() {
|
||||
secret_share += share.0;
|
||||
|
||||
// This can be insecurely linearized from n * t to just n using the below sums for a given
|
||||
// stripe. Doing so uses naive addition which is subject to malleability. The only way to
|
||||
// ensure that malleability isn't present is to use this n * t algorithm, which runs
|
||||
// per sender and not as an aggregate of all senders, which also enables blame
|
||||
let mut values = exponential(params.i, &commitments[&l]);
|
||||
values.push((-share.0, C::generator()));
|
||||
share.zeroize();
|
||||
|
||||
batch.queue(rng, l, values);
|
||||
}
|
||||
batch.verify_with_vartime_blame().map_err(FrostError::InvalidCommitment)?;
|
||||
|
||||
// Stripe commitments per t and sum them in advance. Calculating verification shares relies on
|
||||
// these sums so preprocessing them is a massive speedup
|
||||
// If these weren't just sums, yet the tables used in multiexp, this would be further optimized
|
||||
// As of right now, each multiexp will regenerate them
|
||||
let mut stripes = Vec::with_capacity(usize::from(params.t()));
|
||||
for t in 0 .. usize::from(params.t()) {
|
||||
stripes.push(commitments.values().map(|commitments| commitments[t]).sum());
|
||||
}
|
||||
|
||||
// Calculate each user's verification share
|
||||
let mut verification_shares = HashMap::new();
|
||||
for i in 1 ..= params.n() {
|
||||
verification_shares.insert(i, multiexp_vartime(&exponential(i, &stripes)));
|
||||
}
|
||||
// Removing this check would enable optimizing the above from t + (n * t) to t + ((n - 1) * t)
|
||||
debug_assert_eq!(C::generator() * secret_share, verification_shares[¶ms.i()]);
|
||||
|
||||
Ok(FrostCore { params, secret_share, group_key: stripes[0], verification_shares })
|
||||
}
|
||||
|
||||
/// State machine to begin the key generation protocol.
|
||||
pub struct KeyGenMachine<C: Curve> {
|
||||
params: FrostParams,
|
||||
context: String,
|
||||
_curve: PhantomData<C>,
|
||||
}
|
||||
|
||||
/// Advancement of the key generation state machine.
|
||||
#[derive(Zeroize)]
|
||||
pub struct SecretShareMachine<C: Curve> {
|
||||
#[zeroize(skip)]
|
||||
params: FrostParams,
|
||||
context: String,
|
||||
coefficients: Vec<C::F>,
|
||||
#[zeroize(skip)]
|
||||
our_commitments: Vec<C::G>,
|
||||
}
|
||||
|
||||
impl<C: Curve> Drop for SecretShareMachine<C> {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize()
|
||||
}
|
||||
}
|
||||
impl<C: Curve> ZeroizeOnDrop for SecretShareMachine<C> {}
|
||||
|
||||
/// Final step of the key generation protocol.
|
||||
#[derive(Zeroize)]
|
||||
pub struct KeyMachine<C: Curve> {
|
||||
#[zeroize(skip)]
|
||||
params: FrostParams,
|
||||
secret: C::F,
|
||||
#[zeroize(skip)]
|
||||
commitments: HashMap<u16, Vec<C::G>>,
|
||||
}
|
||||
|
||||
impl<C: Curve> Drop for KeyMachine<C> {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize()
|
||||
}
|
||||
}
|
||||
impl<C: Curve> ZeroizeOnDrop for KeyMachine<C> {}
|
||||
|
||||
impl<C: Curve> KeyGenMachine<C> {
|
||||
/// Creates a new machine to generate a key for the specified curve in the specified multisig.
|
||||
// The context string should be unique among multisigs.
|
||||
pub fn new(params: FrostParams, context: String) -> KeyGenMachine<C> {
|
||||
KeyGenMachine { params, context, _curve: PhantomData }
|
||||
}
|
||||
|
||||
/// Start generating a key according to the FROST DKG spec.
|
||||
/// Returns a commitments message to be sent to all parties over an authenticated
|
||||
/// channel. If any party submits multiple sets of commitments, they MUST be treated as
|
||||
/// malicious.
|
||||
pub fn generate_coefficients<R: RngCore + CryptoRng>(
|
||||
self,
|
||||
rng: &mut R,
|
||||
) -> (SecretShareMachine<C>, Commitments<C>) {
|
||||
let (coefficients, our_commitments, commitments) =
|
||||
generate_key_r1::<_, C>(rng, &self.params, &self.context);
|
||||
|
||||
(
|
||||
SecretShareMachine {
|
||||
params: self.params,
|
||||
context: self.context,
|
||||
coefficients,
|
||||
our_commitments,
|
||||
},
|
||||
commitments,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Curve> SecretShareMachine<C> {
|
||||
/// Continue generating a key.
|
||||
/// Takes in everyone else's commitments. Returns a HashMap of secret shares.
|
||||
/// These MUST be encrypted and only then sent to their respective participants.
|
||||
pub fn generate_secret_shares<R: RngCore + CryptoRng>(
|
||||
mut self,
|
||||
rng: &mut R,
|
||||
commitments: HashMap<u16, Commitments<C>>,
|
||||
) -> Result<(KeyMachine<C>, HashMap<u16, SecretShare<C>>), FrostError> {
|
||||
let (secret, commitments, shares) = generate_key_r2::<_, C>(
|
||||
rng,
|
||||
&self.params,
|
||||
&self.context,
|
||||
&mut self.coefficients,
|
||||
self.our_commitments.clone(),
|
||||
commitments,
|
||||
)?;
|
||||
Ok((KeyMachine { params: self.params, secret, commitments }, shares))
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Curve> KeyMachine<C> {
|
||||
/// Complete key generation.
|
||||
/// Takes in everyone elses' shares submitted to us. Returns a FrostCore object representing the
|
||||
/// generated keys. Successful protocol completion MUST be confirmed by all parties before these
|
||||
/// keys may be safely used.
|
||||
pub fn complete<R: RngCore + CryptoRng>(
|
||||
mut self,
|
||||
rng: &mut R,
|
||||
shares: HashMap<u16, SecretShare<C>>,
|
||||
) -> Result<FrostCore<C>, FrostError> {
|
||||
complete_r2(rng, self.params, self.secret, &mut self.commitments, shares)
|
||||
}
|
||||
}
|
|
@ -11,31 +11,20 @@
|
|||
//!
|
||||
//! This library offers ciphersuites compatible with the
|
||||
//! [IETF draft](https://github.com/cfrg/draft-irtf-cfrg-frost). Currently, version
|
||||
//! 10 is supported.
|
||||
//! 11 is supported.
|
||||
|
||||
use core::fmt::{self, Debug};
|
||||
use std::{io::Read, sync::Arc, collections::HashMap};
|
||||
use core::fmt::Debug;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use group::{
|
||||
ff::{Field, PrimeField},
|
||||
GroupEncoding,
|
||||
};
|
||||
|
||||
mod schnorr;
|
||||
/// Distributed key generation protocol.
|
||||
pub use dkg::{self, ThresholdParams, ThresholdCore, ThresholdKeys, ThresholdView};
|
||||
|
||||
/// Curve trait and provided curves/HRAMs, forming various ciphersuites.
|
||||
pub mod curve;
|
||||
use curve::Curve;
|
||||
|
||||
/// Distributed key generation protocol.
|
||||
pub mod key_gen;
|
||||
/// Promote keys between curves.
|
||||
pub mod promote;
|
||||
|
||||
/// Algorithm for the signing process.
|
||||
pub mod algorithm;
|
||||
mod nonce;
|
||||
|
@ -72,59 +61,11 @@ pub(crate) fn validate_map<T>(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Parameters for a multisig.
|
||||
// These fields can not be made public as they should be static
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub struct FrostParams {
|
||||
/// Participants needed to sign on behalf of the group.
|
||||
t: u16,
|
||||
/// Amount of participants.
|
||||
n: u16,
|
||||
/// Index of the participant being acted for.
|
||||
i: u16,
|
||||
}
|
||||
|
||||
impl FrostParams {
|
||||
pub fn new(t: u16, n: u16, i: u16) -> Result<FrostParams, FrostError> {
|
||||
if (t == 0) || (n == 0) {
|
||||
Err(FrostError::ZeroParameter(t, n))?;
|
||||
}
|
||||
|
||||
// When t == n, this shouldn't be used (MuSig2 and other variants of MuSig exist for a reason),
|
||||
// but it's not invalid to do so
|
||||
if t > n {
|
||||
Err(FrostError::InvalidRequiredQuantity(t, n))?;
|
||||
}
|
||||
if (i == 0) || (i > n) {
|
||||
Err(FrostError::InvalidParticipantIndex(n, i))?;
|
||||
}
|
||||
|
||||
Ok(FrostParams { t, n, i })
|
||||
}
|
||||
|
||||
pub fn t(&self) -> u16 {
|
||||
self.t
|
||||
}
|
||||
pub fn n(&self) -> u16 {
|
||||
self.n
|
||||
}
|
||||
pub fn i(&self) -> u16 {
|
||||
self.i
|
||||
}
|
||||
}
|
||||
|
||||
/// Various errors possible during key generation/signing.
|
||||
/// Various errors possible during signing.
|
||||
#[derive(Copy, Clone, Error, Debug)]
|
||||
pub enum FrostError {
|
||||
#[error("a parameter was 0 (required {0}, participants {1})")]
|
||||
ZeroParameter(u16, u16),
|
||||
#[error("too many participants (max {1}, got {0})")]
|
||||
TooManyParticipants(usize, u16),
|
||||
#[error("invalid amount of required participants (max {1}, got {0})")]
|
||||
InvalidRequiredQuantity(u16, u16),
|
||||
#[error("invalid participant index (0 < index <= {0}, yet index is {1})")]
|
||||
InvalidParticipantIndex(u16, u16),
|
||||
|
||||
#[error("invalid signing set ({0})")]
|
||||
InvalidSigningSet(&'static str),
|
||||
#[error("invalid participant quantity (expected {0}, got {1})")]
|
||||
|
@ -133,10 +74,7 @@ pub enum FrostError {
|
|||
DuplicatedIndex(u16),
|
||||
#[error("missing participant {0}")]
|
||||
MissingParticipant(u16),
|
||||
#[error("invalid commitment (participant {0})")]
|
||||
InvalidCommitment(u16),
|
||||
#[error("invalid proof of knowledge (participant {0})")]
|
||||
InvalidProofOfKnowledge(u16),
|
||||
|
||||
#[error("invalid preprocess (participant {0})")]
|
||||
InvalidPreprocess(u16),
|
||||
#[error("invalid share (participant {0})")]
|
||||
|
@ -145,280 +83,3 @@ pub enum FrostError {
|
|||
#[error("internal error ({0})")]
|
||||
InternalError(&'static str),
|
||||
}
|
||||
|
||||
/// Calculate the lagrange coefficient for a signing set.
|
||||
pub fn lagrange<F: PrimeField>(i: u16, included: &[u16]) -> F {
|
||||
let mut num = F::one();
|
||||
let mut denom = F::one();
|
||||
for l in included {
|
||||
if i == *l {
|
||||
continue;
|
||||
}
|
||||
|
||||
let share = F::from(u64::try_from(*l).unwrap());
|
||||
num *= share;
|
||||
denom *= share - F::from(u64::try_from(i).unwrap());
|
||||
}
|
||||
|
||||
// Safe as this will only be 0 if we're part of the above loop
|
||||
// (which we have an if case to avoid)
|
||||
num * denom.invert().unwrap()
|
||||
}
|
||||
|
||||
/// Core keys generated by performing a FROST keygen protocol.
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize)]
|
||||
pub struct FrostCore<C: Curve> {
|
||||
/// FROST Parameters.
|
||||
#[zeroize(skip)]
|
||||
params: FrostParams,
|
||||
|
||||
/// Secret share key.
|
||||
secret_share: C::F,
|
||||
/// Group key.
|
||||
group_key: C::G,
|
||||
/// Verification shares.
|
||||
#[zeroize(skip)]
|
||||
verification_shares: HashMap<u16, C::G>,
|
||||
}
|
||||
|
||||
impl<C: Curve> Drop for FrostCore<C> {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize()
|
||||
}
|
||||
}
|
||||
impl<C: Curve> ZeroizeOnDrop for FrostCore<C> {}
|
||||
|
||||
impl<C: Curve> Debug for FrostCore<C> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("FrostCore")
|
||||
.field("params", &self.params)
|
||||
.field("group_key", &self.group_key)
|
||||
.field("verification_shares", &self.verification_shares)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Curve> FrostCore<C> {
|
||||
pub(crate) fn new(
|
||||
params: FrostParams,
|
||||
secret_share: C::F,
|
||||
verification_shares: HashMap<u16, C::G>,
|
||||
) -> FrostCore<C> {
|
||||
#[cfg(debug_assertions)]
|
||||
validate_map(&verification_shares, &(0 ..= params.n).collect::<Vec<_>>(), 0).unwrap();
|
||||
|
||||
let t = (1 ..= params.t).collect::<Vec<_>>();
|
||||
FrostCore {
|
||||
params,
|
||||
secret_share,
|
||||
group_key: t.iter().map(|i| verification_shares[i] * lagrange::<C::F>(*i, &t)).sum(),
|
||||
verification_shares,
|
||||
}
|
||||
}
|
||||
pub fn params(&self) -> FrostParams {
|
||||
self.params
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "tests"))]
|
||||
pub(crate) fn secret_share(&self) -> C::F {
|
||||
self.secret_share
|
||||
}
|
||||
|
||||
pub fn group_key(&self) -> C::G {
|
||||
self.group_key
|
||||
}
|
||||
|
||||
pub(crate) fn verification_shares(&self) -> HashMap<u16, C::G> {
|
||||
self.verification_shares.clone()
|
||||
}
|
||||
|
||||
pub fn serialized_len(n: u16) -> usize {
|
||||
8 + C::ID.len() + (3 * 2) + C::F_len() + C::G_len() + (usize::from(n) * C::G_len())
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = Vec::with_capacity(FrostCore::<C>::serialized_len(self.params.n));
|
||||
serialized.extend(u32::try_from(C::ID.len()).unwrap().to_be_bytes());
|
||||
serialized.extend(C::ID);
|
||||
serialized.extend(self.params.t.to_be_bytes());
|
||||
serialized.extend(self.params.n.to_be_bytes());
|
||||
serialized.extend(self.params.i.to_be_bytes());
|
||||
serialized.extend(self.secret_share.to_repr().as_ref());
|
||||
for l in 1 ..= self.params.n {
|
||||
serialized.extend(self.verification_shares[&l].to_bytes().as_ref());
|
||||
}
|
||||
serialized
|
||||
}
|
||||
|
||||
pub fn deserialize<R: Read>(cursor: &mut R) -> Result<FrostCore<C>, FrostError> {
|
||||
{
|
||||
let missing = FrostError::InternalError("FrostCore serialization is missing its curve");
|
||||
let different = FrostError::InternalError("deserializing FrostCore for another curve");
|
||||
|
||||
let mut id_len = [0; 4];
|
||||
cursor.read_exact(&mut id_len).map_err(|_| missing)?;
|
||||
if u32::try_from(C::ID.len()).unwrap().to_be_bytes() != id_len {
|
||||
Err(different)?;
|
||||
}
|
||||
|
||||
let mut id = vec![0; C::ID.len()];
|
||||
cursor.read_exact(&mut id).map_err(|_| missing)?;
|
||||
if id != C::ID {
|
||||
Err(different)?;
|
||||
}
|
||||
}
|
||||
|
||||
let (t, n, i) = {
|
||||
let mut read_u16 = || {
|
||||
let mut value = [0; 2];
|
||||
cursor
|
||||
.read_exact(&mut value)
|
||||
.map_err(|_| FrostError::InternalError("missing participant quantities"))?;
|
||||
Ok(u16::from_be_bytes(value))
|
||||
};
|
||||
(read_u16()?, read_u16()?, read_u16()?)
|
||||
};
|
||||
|
||||
let secret_share =
|
||||
C::read_F(cursor).map_err(|_| FrostError::InternalError("invalid secret share"))?;
|
||||
|
||||
let mut verification_shares = HashMap::new();
|
||||
for l in 1 ..= n {
|
||||
verification_shares.insert(
|
||||
l,
|
||||
C::read_G(cursor).map_err(|_| FrostError::InternalError("invalid verification share"))?,
|
||||
);
|
||||
}
|
||||
|
||||
Ok(FrostCore::new(
|
||||
FrostParams::new(t, n, i).map_err(|_| FrostError::InternalError("invalid parameters"))?,
|
||||
secret_share,
|
||||
verification_shares,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// FROST keys usable for signing.
|
||||
#[derive(Clone, Debug, Zeroize)]
|
||||
pub struct FrostKeys<C: Curve> {
|
||||
/// Core keys.
|
||||
#[zeroize(skip)]
|
||||
core: Arc<FrostCore<C>>,
|
||||
|
||||
/// Offset applied to these keys.
|
||||
pub(crate) offset: Option<C::F>,
|
||||
}
|
||||
|
||||
// Manually implement Drop due to https://github.com/RustCrypto/utils/issues/786
|
||||
impl<C: Curve> Drop for FrostKeys<C> {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize()
|
||||
}
|
||||
}
|
||||
impl<C: Curve> ZeroizeOnDrop for FrostKeys<C> {}
|
||||
|
||||
/// View of keys passed to algorithm implementations.
|
||||
#[derive(Clone, Zeroize)]
|
||||
pub struct FrostView<C: Curve> {
|
||||
group_key: C::G,
|
||||
#[zeroize(skip)]
|
||||
included: Vec<u16>,
|
||||
secret_share: C::F,
|
||||
#[zeroize(skip)]
|
||||
verification_shares: HashMap<u16, C::G>,
|
||||
}
|
||||
|
||||
impl<C: Curve> Drop for FrostView<C> {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize()
|
||||
}
|
||||
}
|
||||
impl<C: Curve> ZeroizeOnDrop for FrostView<C> {}
|
||||
|
||||
impl<C: Curve> FrostKeys<C> {
|
||||
pub fn new(core: FrostCore<C>) -> FrostKeys<C> {
|
||||
FrostKeys { core: Arc::new(core), offset: None }
|
||||
}
|
||||
|
||||
/// Offset the keys by a given scalar to allow for account and privacy schemes.
|
||||
/// This offset is ephemeral and will not be included when these keys are serialized.
|
||||
/// Keys offset multiple times will form a new offset of their sum.
|
||||
/// Not IETF compliant.
|
||||
pub fn offset(&self, offset: C::F) -> FrostKeys<C> {
|
||||
let mut res = self.clone();
|
||||
// Carry any existing offset
|
||||
// Enables schemes like Monero's subaddresses which have a per-subaddress offset and then a
|
||||
// one-time-key offset
|
||||
res.offset = Some(offset + res.offset.unwrap_or_else(C::F::zero));
|
||||
res
|
||||
}
|
||||
|
||||
pub fn params(&self) -> FrostParams {
|
||||
self.core.params
|
||||
}
|
||||
|
||||
pub(crate) fn secret_share(&self) -> C::F {
|
||||
self.core.secret_share
|
||||
}
|
||||
|
||||
/// Returns the group key with any offset applied.
|
||||
pub fn group_key(&self) -> C::G {
|
||||
self.core.group_key + (C::generator() * self.offset.unwrap_or_else(C::F::zero))
|
||||
}
|
||||
|
||||
/// Returns all participants' verification shares without any offsetting.
|
||||
pub(crate) fn verification_shares(&self) -> HashMap<u16, C::G> {
|
||||
self.core.verification_shares()
|
||||
}
|
||||
|
||||
pub fn serialized_len(n: u16) -> usize {
|
||||
FrostCore::<C>::serialized_len(n)
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
self.core.serialize()
|
||||
}
|
||||
|
||||
pub fn view(&self, included: &[u16]) -> Result<FrostView<C>, FrostError> {
|
||||
if (included.len() < self.params().t.into()) || (usize::from(self.params().n) < included.len())
|
||||
{
|
||||
Err(FrostError::InvalidSigningSet("invalid amount of participants included"))?;
|
||||
}
|
||||
|
||||
let offset_share = self.offset.unwrap_or_else(C::F::zero) *
|
||||
C::F::from(included.len().try_into().unwrap()).invert().unwrap();
|
||||
let offset_verification_share = C::generator() * offset_share;
|
||||
|
||||
Ok(FrostView {
|
||||
group_key: self.group_key(),
|
||||
secret_share: (self.secret_share() * lagrange::<C::F>(self.params().i, included)) +
|
||||
offset_share,
|
||||
verification_shares: self
|
||||
.verification_shares()
|
||||
.iter()
|
||||
.map(|(l, share)| {
|
||||
(*l, (*share * lagrange::<C::F>(*l, included)) + offset_verification_share)
|
||||
})
|
||||
.collect(),
|
||||
included: included.to_vec(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Curve> FrostView<C> {
|
||||
pub fn group_key(&self) -> C::G {
|
||||
self.group_key
|
||||
}
|
||||
|
||||
pub fn included(&self) -> Vec<u16> {
|
||||
self.included.clone()
|
||||
}
|
||||
|
||||
pub fn secret_share(&self) -> C::F {
|
||||
self.secret_share
|
||||
}
|
||||
|
||||
pub fn verification_share(&self, l: u16) -> C::G {
|
||||
self.verification_shares[&l]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@ use std::{
|
|||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use transcript::Transcript;
|
||||
|
||||
|
@ -34,13 +34,19 @@ fn dleq_transcript<T: Transcript>() -> T {
|
|||
// This is considered a single nonce as r = d + be
|
||||
#[derive(Clone, Zeroize)]
|
||||
pub(crate) struct Nonce<C: Curve>(pub(crate) [C::F; 2]);
|
||||
impl<C: Curve> Drop for Nonce<C> {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize();
|
||||
}
|
||||
}
|
||||
impl<C: Curve> ZeroizeOnDrop for Nonce<C> {}
|
||||
|
||||
// Commitments to a specific generator for this nonce
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Zeroize)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct GeneratorCommitments<C: Curve>(pub(crate) [C::G; 2]);
|
||||
impl<C: Curve> GeneratorCommitments<C> {
|
||||
fn read<R: Read>(reader: &mut R) -> io::Result<GeneratorCommitments<C>> {
|
||||
Ok(GeneratorCommitments([C::read_G(reader)?, C::read_G(reader)?]))
|
||||
Ok(GeneratorCommitments([<C as Curve>::read_G(reader)?, <C as Curve>::read_G(reader)?]))
|
||||
}
|
||||
|
||||
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
|
@ -50,7 +56,7 @@ impl<C: Curve> GeneratorCommitments<C> {
|
|||
}
|
||||
|
||||
// A single nonce's commitments and relevant proofs
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize)]
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub(crate) struct NonceCommitments<C: Curve> {
|
||||
// Called generators as these commitments are indexed by generator
|
||||
pub(crate) generators: Vec<GeneratorCommitments<C>>,
|
||||
|
@ -130,7 +136,7 @@ impl<C: Curve> NonceCommitments<C> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize)]
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub(crate) struct Commitments<C: Curve> {
|
||||
// Called nonces as these commitments are indexed by nonce
|
||||
pub(crate) nonces: Vec<NonceCommitments<C>>,
|
||||
|
@ -165,7 +171,7 @@ impl<C: Curve> Commitments<C> {
|
|||
// committed to as their entire series per-nonce, not as isolates
|
||||
if let Some(dleqs) = &nonce.dleqs {
|
||||
let mut transcript_dleq = |label, dleq: &DLEqProof<C::G>| {
|
||||
let mut buf = Vec::with_capacity(C::G_len() + C::F_len());
|
||||
let mut buf = vec![];
|
||||
dleq.serialize(&mut buf).unwrap();
|
||||
t.append_message(label, &buf);
|
||||
};
|
||||
|
@ -194,7 +200,6 @@ impl<C: Curve> Commitments<C> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Zeroize)]
|
||||
pub(crate) struct IndividualBinding<C: Curve> {
|
||||
commitments: Commitments<C>,
|
||||
binding_factors: Option<Vec<C::F>>,
|
||||
|
@ -202,15 +207,6 @@ pub(crate) struct IndividualBinding<C: Curve> {
|
|||
|
||||
pub(crate) struct BindingFactor<C: Curve>(pub(crate) HashMap<u16, IndividualBinding<C>>);
|
||||
|
||||
impl<C: Curve> Zeroize for BindingFactor<C> {
|
||||
fn zeroize(&mut self) {
|
||||
for (mut validator, mut binding) in self.0.drain() {
|
||||
validator.zeroize();
|
||||
binding.zeroize();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Curve> BindingFactor<C> {
|
||||
pub(crate) fn insert(&mut self, i: u16, commitments: Commitments<C>) {
|
||||
self.0.insert(i, IndividualBinding { commitments, binding_factors: None });
|
||||
|
|
|
@ -1,77 +0,0 @@
|
|||
use std::io::{self, Read, Write};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use group::{
|
||||
ff::{Field, PrimeField},
|
||||
GroupEncoding,
|
||||
};
|
||||
|
||||
use multiexp::BatchVerifier;
|
||||
|
||||
use crate::curve::Curve;
|
||||
|
||||
/// A Schnorr signature of the form (R, s) where s = r + cx.
|
||||
#[allow(non_snake_case)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub struct SchnorrSignature<C: Curve> {
|
||||
pub R: C::G,
|
||||
pub s: C::F,
|
||||
}
|
||||
|
||||
impl<C: Curve> SchnorrSignature<C> {
|
||||
pub(crate) fn read<R: Read>(reader: &mut R) -> io::Result<Self> {
|
||||
Ok(SchnorrSignature { R: C::read_G(reader)?, s: C::read_F(reader)? })
|
||||
}
|
||||
|
||||
pub(crate) fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
writer.write_all(self.R.to_bytes().as_ref())?;
|
||||
writer.write_all(self.s.to_repr().as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn sign<C: Curve>(
|
||||
mut private_key: C::F,
|
||||
mut nonce: C::F,
|
||||
challenge: C::F,
|
||||
) -> SchnorrSignature<C> {
|
||||
let res = SchnorrSignature { R: C::generator() * nonce, s: nonce + (private_key * challenge) };
|
||||
private_key.zeroize();
|
||||
nonce.zeroize();
|
||||
res
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn verify<C: Curve>(
|
||||
public_key: C::G,
|
||||
challenge: C::F,
|
||||
signature: &SchnorrSignature<C>,
|
||||
) -> bool {
|
||||
(C::generator() * signature.s) == (signature.R + (public_key * challenge))
|
||||
}
|
||||
|
||||
pub(crate) fn batch_verify<C: Curve, R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
triplets: &[(u16, C::G, C::F, SchnorrSignature<C>)],
|
||||
) -> Result<(), u16> {
|
||||
let mut values = [(C::F::one(), C::generator()); 3];
|
||||
let mut batch = BatchVerifier::new(triplets.len());
|
||||
for triple in triplets {
|
||||
// s = r + ca
|
||||
// sG == R + cA
|
||||
// R + cA - sG == 0
|
||||
|
||||
// R
|
||||
values[0].1 = triple.3.R;
|
||||
// cA
|
||||
values[1] = (triple.2, triple.1);
|
||||
// -sG
|
||||
values[2].0 = -triple.3.s;
|
||||
|
||||
batch.queue(rng, triple.0, values);
|
||||
}
|
||||
|
||||
batch.verify_vartime_with_vartime_blame()
|
||||
}
|
|
@ -14,8 +14,8 @@ use group::{ff::PrimeField, GroupEncoding};
|
|||
|
||||
use crate::{
|
||||
curve::Curve,
|
||||
FrostError, FrostParams, FrostKeys, FrostView,
|
||||
algorithm::{AddendumSerialize, Addendum, Algorithm},
|
||||
FrostError, ThresholdParams, ThresholdKeys, ThresholdView,
|
||||
algorithm::{WriteAddendum, Addendum, Algorithm},
|
||||
validate_map,
|
||||
};
|
||||
|
||||
|
@ -24,6 +24,12 @@ pub(crate) use crate::nonce::*;
|
|||
/// Trait enabling writing preprocesses and signature shares.
|
||||
pub trait Writable {
|
||||
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()>;
|
||||
|
||||
fn serialize(&self) -> Vec<u8> {
|
||||
let mut buf = vec![];
|
||||
self.write(&mut buf).unwrap();
|
||||
buf
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Writable> Writable for Vec<T> {
|
||||
|
@ -35,18 +41,18 @@ impl<T: Writable> Writable for Vec<T> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Pairing of an Algorithm with a FrostKeys instance and this specific signing set.
|
||||
/// Pairing of an Algorithm with a ThresholdKeys instance and this specific signing set.
|
||||
#[derive(Clone)]
|
||||
pub struct Params<C: Curve, A: Algorithm<C>> {
|
||||
algorithm: A,
|
||||
keys: FrostKeys<C>,
|
||||
view: FrostView<C>,
|
||||
keys: ThresholdKeys<C>,
|
||||
view: ThresholdView<C>,
|
||||
}
|
||||
|
||||
impl<C: Curve, A: Algorithm<C>> Params<C, A> {
|
||||
pub fn new(
|
||||
algorithm: A,
|
||||
keys: FrostKeys<C>,
|
||||
keys: ThresholdKeys<C>,
|
||||
included: &[u16],
|
||||
) -> Result<Params<C, A>, FrostError> {
|
||||
let params = keys.params();
|
||||
|
@ -55,16 +61,16 @@ impl<C: Curve, A: Algorithm<C>> Params<C, A> {
|
|||
included.sort_unstable();
|
||||
|
||||
// Included < threshold
|
||||
if included.len() < usize::from(params.t) {
|
||||
if included.len() < usize::from(params.t()) {
|
||||
Err(FrostError::InvalidSigningSet("not enough signers"))?;
|
||||
}
|
||||
// Invalid index
|
||||
if included[0] == 0 {
|
||||
Err(FrostError::InvalidParticipantIndex(included[0], params.n))?;
|
||||
Err(FrostError::InvalidParticipantIndex(included[0], params.n()))?;
|
||||
}
|
||||
// OOB index
|
||||
if included[included.len() - 1] > params.n {
|
||||
Err(FrostError::InvalidParticipantIndex(included[included.len() - 1], params.n))?;
|
||||
if included[included.len() - 1] > params.n() {
|
||||
Err(FrostError::InvalidParticipantIndex(included[included.len() - 1], params.n()))?;
|
||||
}
|
||||
// Same signer included multiple times
|
||||
for i in 0 .. (included.len() - 1) {
|
||||
|
@ -73,7 +79,7 @@ impl<C: Curve, A: Algorithm<C>> Params<C, A> {
|
|||
}
|
||||
}
|
||||
// Not included
|
||||
if !included.contains(¶ms.i) {
|
||||
if !included.contains(¶ms.i()) {
|
||||
Err(FrostError::InvalidSigningSet("signing despite not being included"))?;
|
||||
}
|
||||
|
||||
|
@ -81,17 +87,17 @@ impl<C: Curve, A: Algorithm<C>> Params<C, A> {
|
|||
Ok(Params { algorithm, view: keys.view(&included).unwrap(), keys })
|
||||
}
|
||||
|
||||
pub fn multisig_params(&self) -> FrostParams {
|
||||
pub fn multisig_params(&self) -> ThresholdParams {
|
||||
self.keys.params()
|
||||
}
|
||||
|
||||
pub fn view(&self) -> FrostView<C> {
|
||||
pub fn view(&self) -> ThresholdView<C> {
|
||||
self.view.clone()
|
||||
}
|
||||
}
|
||||
|
||||
/// Preprocess for an instance of the FROST signing protocol.
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize)]
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct Preprocess<C: Curve, A: Addendum> {
|
||||
pub(crate) commitments: Commitments<C>,
|
||||
pub addendum: A,
|
||||
|
@ -107,6 +113,7 @@ impl<C: Curve, A: Addendum> Writable for Preprocess<C, A> {
|
|||
#[derive(Zeroize)]
|
||||
pub(crate) struct PreprocessData<C: Curve, A: Addendum> {
|
||||
pub(crate) nonces: Vec<Nonce<C>>,
|
||||
#[zeroize(skip)]
|
||||
pub(crate) preprocess: Preprocess<C, A>,
|
||||
}
|
||||
|
||||
|
@ -140,7 +147,7 @@ struct SignData<C: Curve> {
|
|||
}
|
||||
|
||||
/// Share of a signature produced via FROST.
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize)]
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct SignatureShare<C: Curve>(C::F);
|
||||
impl<C: Curve> Writable for SignatureShare<C> {
|
||||
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
|
@ -158,7 +165,7 @@ fn sign_with_share<C: Curve, A: Algorithm<C>>(
|
|||
msg: &[u8],
|
||||
) -> Result<(SignData<C>, SignatureShare<C>), FrostError> {
|
||||
let multisig_params = params.multisig_params();
|
||||
validate_map(&preprocesses, ¶ms.view.included, multisig_params.i)?;
|
||||
validate_map(&preprocesses, ¶ms.view.included(), multisig_params.i())?;
|
||||
|
||||
{
|
||||
// Domain separate FROST
|
||||
|
@ -167,10 +174,10 @@ fn sign_with_share<C: Curve, A: Algorithm<C>>(
|
|||
|
||||
let nonces = params.algorithm.nonces();
|
||||
#[allow(non_snake_case)]
|
||||
let mut B = BindingFactor(HashMap::<u16, _>::with_capacity(params.view.included.len()));
|
||||
let mut B = BindingFactor(HashMap::<u16, _>::with_capacity(params.view.included().len()));
|
||||
{
|
||||
// Parse the preprocesses
|
||||
for l in ¶ms.view.included {
|
||||
for l in ¶ms.view.included() {
|
||||
{
|
||||
params
|
||||
.algorithm
|
||||
|
@ -178,7 +185,7 @@ fn sign_with_share<C: Curve, A: Algorithm<C>>(
|
|||
.append_message(b"participant", C::F::from(u64::from(*l)).to_repr().as_ref());
|
||||
}
|
||||
|
||||
if *l == params.keys.params().i {
|
||||
if *l == params.keys.params().i() {
|
||||
let commitments = our_preprocess.preprocess.commitments.clone();
|
||||
commitments.transcript(params.algorithm.transcript());
|
||||
|
||||
|
@ -216,7 +223,7 @@ fn sign_with_share<C: Curve, A: Algorithm<C>>(
|
|||
// Include the offset, if one exists
|
||||
// While this isn't part of the FROST-expected rho transcript, the offset being here coincides
|
||||
// with another specification (despite the transcript format being distinct)
|
||||
if let Some(offset) = params.keys.offset {
|
||||
if let Some(offset) = params.keys.current_offset() {
|
||||
// Transcript as a point
|
||||
// Under a coordinated model, the coordinater can be the only party to know the discrete log
|
||||
// of the offset. This removes the ability for any signer to provide the discrete log,
|
||||
|
@ -262,7 +269,7 @@ fn complete<C: Curve, A: Algorithm<C>>(
|
|||
mut shares: HashMap<u16, SignatureShare<C>>,
|
||||
) -> Result<A::Signature, FrostError> {
|
||||
let params = sign_params.multisig_params();
|
||||
validate_map(&shares, &sign_params.view.included, params.i)?;
|
||||
validate_map(&shares, &sign_params.view.included(), params.i())?;
|
||||
|
||||
let mut responses = HashMap::new();
|
||||
responses.insert(params.i(), sign.share);
|
||||
|
@ -275,13 +282,14 @@ fn complete<C: Curve, A: Algorithm<C>>(
|
|||
// Perform signature validation instead of individual share validation
|
||||
// For the success route, which should be much more frequent, this should be faster
|
||||
// It also acts as an integrity check of this library's signing function
|
||||
if let Some(sig) = sign_params.algorithm.verify(sign_params.view.group_key, &sign.Rs, sum) {
|
||||
if let Some(sig) = sign_params.algorithm.verify(sign_params.view.group_key(), &sign.Rs, sum) {
|
||||
return Ok(sig);
|
||||
}
|
||||
|
||||
// Find out who misbehaved. It may be beneficial to randomly sort this to have detection be
|
||||
// within n / 2 on average, and not gameable to n, though that should be minor
|
||||
for l in &sign_params.view.included {
|
||||
// TODO
|
||||
for l in &sign_params.view.included() {
|
||||
if !sign_params.algorithm.verify_share(
|
||||
sign_params.view.verification_share(*l),
|
||||
&sign.B.bound(*l),
|
||||
|
@ -367,7 +375,7 @@ impl<C: Curve, A: Algorithm<C>> AlgorithmMachine<C, A> {
|
|||
/// Creates a new machine to generate a signature with the specified keys.
|
||||
pub fn new(
|
||||
algorithm: A,
|
||||
keys: FrostKeys<C>,
|
||||
keys: ThresholdKeys<C>,
|
||||
included: &[u16],
|
||||
) -> Result<AlgorithmMachine<C, A>, FrostError> {
|
||||
Ok(AlgorithmMachine { params: Params::new(algorithm, keys, included)? })
|
||||
|
|
|
@ -2,23 +2,7 @@ use rand_core::{RngCore, CryptoRng};
|
|||
|
||||
use group::Group;
|
||||
|
||||
use crate::{Curve, FrostCore, tests::core_gen};
|
||||
|
||||
// Test generation of FROST keys
|
||||
fn key_generation<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
||||
// This alone verifies the verification shares and group key are agreed upon as expected
|
||||
core_gen::<_, C>(rng);
|
||||
}
|
||||
|
||||
// Test serialization of generated keys
|
||||
fn keys_serialization<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
||||
for (_, keys) in core_gen::<_, C>(rng) {
|
||||
assert_eq!(
|
||||
&FrostCore::<C>::deserialize::<&[u8]>(&mut keys.serialize().as_ref()).unwrap(),
|
||||
&keys
|
||||
);
|
||||
}
|
||||
}
|
||||
use crate::Curve;
|
||||
|
||||
// Test successful multiexp, with enough pairs to trigger its variety of algorithms
|
||||
// Multiexp has its own tests, yet only against k256 and Ed25519 (which should be sufficient
|
||||
|
@ -28,7 +12,7 @@ pub fn test_multiexp<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
|||
let mut sum = C::G::identity();
|
||||
for _ in 0 .. 10 {
|
||||
for _ in 0 .. 100 {
|
||||
pairs.push((C::random_F(&mut *rng), C::generator() * C::random_F(&mut *rng)));
|
||||
pairs.push((C::random_nonzero_F(&mut *rng), C::generator() * C::random_nonzero_F(&mut *rng)));
|
||||
sum += pairs[pairs.len() - 1].1 * pairs[pairs.len() - 1].0;
|
||||
}
|
||||
assert_eq!(multiexp::multiexp(&pairs), sum);
|
||||
|
@ -40,8 +24,4 @@ pub fn test_curve<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
|||
// TODO: Test the Curve functions themselves
|
||||
|
||||
test_multiexp::<_, C>(rng);
|
||||
|
||||
// Test FROST key generation and serialization of FrostCore works as expected
|
||||
key_generation::<_, C>(rng);
|
||||
keys_serialization::<_, C>(rng);
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ use crate::{
|
|||
tests::vectors::{Vectors, test_with_vectors},
|
||||
};
|
||||
|
||||
#[cfg(any(test, feature = "ristretto"))]
|
||||
#[cfg(feature = "ristretto")]
|
||||
#[test]
|
||||
fn ristretto_vectors() {
|
||||
test_with_vectors::<_, curve::Ristretto, curve::IetfRistrettoHram>(
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
use rand_core::OsRng;
|
||||
|
||||
use ciphersuite::Ciphersuite;
|
||||
|
||||
use schnorr::SchnorrSignature;
|
||||
|
||||
use crate::{
|
||||
curve::{Curve, Ed448, Ietf8032Ed448Hram, IetfEd448Hram},
|
||||
schnorr::{SchnorrSignature, verify},
|
||||
curve::{Ed448, Ietf8032Ed448Hram, IetfEd448Hram},
|
||||
tests::vectors::{Vectors, test_with_vectors},
|
||||
};
|
||||
|
||||
|
@ -37,11 +40,9 @@ fn ed448_8032_vector() {
|
|||
let R = Ed448::read_G::<&[u8]>(&mut sig.as_ref()).unwrap();
|
||||
let s = Ed448::read_F::<&[u8]>(&mut &sig[57 ..]).unwrap();
|
||||
|
||||
assert!(verify(
|
||||
A,
|
||||
Ietf8032Ed448Hram::hram(&context, &R, &A, &msg),
|
||||
&SchnorrSignature::<Ed448> { R, s }
|
||||
));
|
||||
assert!(
|
||||
SchnorrSignature::<Ed448> { R, s }.verify(A, Ietf8032Ed448Hram::hram(&context, &R, &A, &msg))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#[cfg(any(test, feature = "dalek"))]
|
||||
#[cfg(any(feature = "ristretto", feature = "ed25519"))]
|
||||
mod dalek;
|
||||
#[cfg(feature = "kp256")]
|
||||
#[cfg(any(feature = "secp256k1", feature = "p256"))]
|
||||
mod kp256;
|
||||
#[cfg(feature = "ed448")]
|
||||
mod ed448;
|
||||
|
|
|
@ -2,21 +2,16 @@ use std::collections::HashMap;
|
|||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use group::ff::Field;
|
||||
pub use dkg::tests::{key_gen, recover_key};
|
||||
|
||||
use crate::{
|
||||
Curve, FrostParams, FrostCore, FrostKeys, lagrange,
|
||||
key_gen::{SecretShare, Commitments as KGCommitments, KeyGenMachine},
|
||||
Curve, ThresholdKeys,
|
||||
algorithm::Algorithm,
|
||||
sign::{Writable, PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine},
|
||||
};
|
||||
|
||||
/// Curve tests.
|
||||
pub mod curve;
|
||||
/// Schnorr signature tests.
|
||||
pub mod schnorr;
|
||||
/// Promotion tests.
|
||||
pub mod promote;
|
||||
/// Vectorized test suite to ensure consistency.
|
||||
pub mod vectors;
|
||||
|
||||
|
@ -39,102 +34,11 @@ pub fn clone_without<K: Clone + std::cmp::Eq + std::hash::Hash, V: Clone>(
|
|||
res
|
||||
}
|
||||
|
||||
/// Generate FROST keys (as FrostCore objects) for tests.
|
||||
pub fn core_gen<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) -> HashMap<u16, FrostCore<C>> {
|
||||
let mut machines = HashMap::new();
|
||||
let mut commitments = HashMap::new();
|
||||
for i in 1 ..= PARTICIPANTS {
|
||||
let machine = KeyGenMachine::<C>::new(
|
||||
FrostParams::new(THRESHOLD, PARTICIPANTS, i).unwrap(),
|
||||
"FROST Test key_gen".to_string(),
|
||||
);
|
||||
let (machine, these_commitments) = machine.generate_coefficients(rng);
|
||||
machines.insert(i, machine);
|
||||
|
||||
commitments.insert(i, {
|
||||
let mut buf = vec![];
|
||||
these_commitments.write(&mut buf).unwrap();
|
||||
KGCommitments::read::<&[u8]>(
|
||||
&mut buf.as_ref(),
|
||||
FrostParams { t: THRESHOLD, n: PARTICIPANTS, i: 1 },
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
}
|
||||
|
||||
let mut secret_shares = HashMap::new();
|
||||
let mut machines = machines
|
||||
.drain()
|
||||
.map(|(l, machine)| {
|
||||
let (machine, mut shares) =
|
||||
machine.generate_secret_shares(rng, clone_without(&commitments, &l)).unwrap();
|
||||
let shares = shares
|
||||
.drain()
|
||||
.map(|(l, share)| {
|
||||
let mut buf = vec![];
|
||||
share.write(&mut buf).unwrap();
|
||||
(l, SecretShare::<C>::read::<&[u8]>(&mut buf.as_ref()).unwrap())
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
secret_shares.insert(l, shares);
|
||||
(l, machine)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let mut verification_shares = None;
|
||||
let mut group_key = None;
|
||||
machines
|
||||
.drain()
|
||||
.map(|(i, machine)| {
|
||||
let mut our_secret_shares = HashMap::new();
|
||||
for (l, shares) in &secret_shares {
|
||||
if i == *l {
|
||||
continue;
|
||||
}
|
||||
our_secret_shares.insert(*l, shares[&i].clone());
|
||||
}
|
||||
let these_keys = machine.complete(rng, our_secret_shares).unwrap();
|
||||
|
||||
// Verify the verification_shares are agreed upon
|
||||
if verification_shares.is_none() {
|
||||
verification_shares = Some(these_keys.verification_shares());
|
||||
}
|
||||
assert_eq!(verification_shares.as_ref().unwrap(), &these_keys.verification_shares());
|
||||
|
||||
// Verify the group keys are agreed upon
|
||||
if group_key.is_none() {
|
||||
group_key = Some(these_keys.group_key());
|
||||
}
|
||||
assert_eq!(group_key.unwrap(), these_keys.group_key());
|
||||
|
||||
(i, these_keys)
|
||||
})
|
||||
.collect::<HashMap<_, _>>()
|
||||
}
|
||||
|
||||
/// Generate FROST keys for tests.
|
||||
pub fn key_gen<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) -> HashMap<u16, FrostKeys<C>> {
|
||||
core_gen(rng).drain().map(|(i, core)| (i, FrostKeys::new(core))).collect()
|
||||
}
|
||||
|
||||
/// Recover the secret from a collection of keys.
|
||||
pub fn recover<C: Curve>(keys: &HashMap<u16, FrostKeys<C>>) -> C::F {
|
||||
let first = keys.values().next().expect("no keys provided");
|
||||
assert!(keys.len() >= first.params().t().into(), "not enough keys provided");
|
||||
let included = keys.keys().cloned().collect::<Vec<_>>();
|
||||
|
||||
let group_private = keys.iter().fold(C::F::zero(), |accum, (i, keys)| {
|
||||
accum + (keys.secret_share() * lagrange::<C::F>(*i, &included))
|
||||
});
|
||||
assert_eq!(C::generator() * group_private, first.group_key(), "failed to recover keys");
|
||||
group_private
|
||||
}
|
||||
|
||||
/// Spawn algorithm machines for a random selection of signers, each executing the given algorithm.
|
||||
pub fn algorithm_machines<R: RngCore, C: Curve, A: Algorithm<C>>(
|
||||
rng: &mut R,
|
||||
algorithm: A,
|
||||
keys: &HashMap<u16, FrostKeys<C>>,
|
||||
keys: &HashMap<u16, ThresholdKeys<C>>,
|
||||
) -> HashMap<u16, AlgorithmMachine<C, A>> {
|
||||
let mut included = vec![];
|
||||
while included.len() < usize::from(keys[&1].params().t()) {
|
||||
|
|
|
@ -1,121 +0,0 @@
|
|||
use std::{marker::PhantomData, collections::HashMap};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use group::Group;
|
||||
|
||||
use crate::{
|
||||
Curve, // FrostKeys,
|
||||
promote::{GeneratorPromotion /* CurvePromote */},
|
||||
tests::{clone_without, key_gen, schnorr::sign_core},
|
||||
};
|
||||
|
||||
/*
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
struct AltFunctions<C: Curve> {
|
||||
_curve: PhantomData<C>,
|
||||
}
|
||||
|
||||
impl<C: Curve> Curve for AltFunctions<C> {
|
||||
type F = C::F;
|
||||
type G = C::G;
|
||||
|
||||
const ID: &'static [u8] = b"alt_functions";
|
||||
|
||||
fn generator() -> Self::G {
|
||||
C::generator()
|
||||
}
|
||||
|
||||
fn hash_msg(msg: &[u8]) -> Vec<u8> {
|
||||
C::hash_msg(&[msg, b"alt"].concat())
|
||||
}
|
||||
|
||||
fn hash_binding_factor(binding: &[u8]) -> Self::F {
|
||||
C::hash_to_F(b"rho_alt", binding)
|
||||
}
|
||||
|
||||
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F {
|
||||
C::hash_to_F(&[dst, b"alt"].concat(), msg)
|
||||
}
|
||||
}
|
||||
|
||||
// Test promotion of FROST keys to another set of functions for interoperability
|
||||
fn test_ciphersuite_promotion<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
||||
let keys = key_gen::<_, C>(&mut *rng);
|
||||
for keys in keys.values() {
|
||||
let promoted: FrostKeys<AltFunctions<C>> = keys.clone().promote();
|
||||
// Verify equivalence via their serializations, minus the ID's length and ID itself
|
||||
assert_eq!(
|
||||
keys.serialize()[(4 + C::ID.len()) ..],
|
||||
promoted.serialize()[(4 + AltFunctions::<C>::ID.len()) ..]
|
||||
);
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
struct AltGenerator<C: Curve> {
|
||||
_curve: PhantomData<C>,
|
||||
}
|
||||
|
||||
impl<C: Curve> Curve for AltGenerator<C> {
|
||||
type F = C::F;
|
||||
type G = C::G;
|
||||
|
||||
const ID: &'static [u8] = b"alt_generator";
|
||||
|
||||
fn generator() -> Self::G {
|
||||
C::G::generator() * C::hash_to_F(b"FROST_tests", b"generator")
|
||||
}
|
||||
|
||||
fn hash_to_vec(dst: &[u8], data: &[u8]) -> Vec<u8> {
|
||||
C::hash_to_vec(&[b"FROST_tests_alt", dst].concat(), data)
|
||||
}
|
||||
|
||||
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
|
||||
C::hash_to_F(&[b"FROST_tests_alt", dst].concat(), data)
|
||||
}
|
||||
}
|
||||
|
||||
// Test promotion of FROST keys to another generator
|
||||
fn test_generator_promotion<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
||||
// A seeded RNG can theoretically generate for C1 and C2, verifying promotion that way?
|
||||
// TODO
|
||||
let keys = key_gen::<_, C>(&mut *rng);
|
||||
|
||||
let mut promotions = HashMap::new();
|
||||
let mut proofs = HashMap::new();
|
||||
for (i, keys) in &keys {
|
||||
let promotion = GeneratorPromotion::<_, AltGenerator<C>>::promote(&mut *rng, keys.clone());
|
||||
promotions.insert(*i, promotion.0);
|
||||
proofs.insert(*i, promotion.1);
|
||||
}
|
||||
|
||||
let mut new_keys = HashMap::new();
|
||||
let mut group_key = None;
|
||||
let mut verification_shares = None;
|
||||
for (i, promoting) in promotions.drain() {
|
||||
let promoted = promoting.complete(&clone_without(&proofs, &i)).unwrap();
|
||||
assert_eq!(keys[&i].params(), promoted.params());
|
||||
assert_eq!(keys[&i].secret_share(), promoted.secret_share());
|
||||
|
||||
if group_key.is_none() {
|
||||
group_key = Some(keys[&i].group_key());
|
||||
verification_shares = Some(keys[&i].verification_shares());
|
||||
}
|
||||
assert_eq!(keys[&i].group_key(), group_key.unwrap());
|
||||
assert_eq!(&keys[&i].verification_shares(), verification_shares.as_ref().unwrap());
|
||||
|
||||
new_keys.insert(i, promoted);
|
||||
}
|
||||
|
||||
// Sign with the keys to ensure their integrity
|
||||
sign_core(rng, &new_keys);
|
||||
}
|
||||
|
||||
pub fn test_promotion<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
||||
// test_ciphersuite_promotion::<_, C>(rng);
|
||||
test_generator_promotion::<_, C>(rng);
|
||||
}
|
|
@ -1,131 +0,0 @@
|
|||
use std::{marker::PhantomData, collections::HashMap};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use group::{ff::Field, Group, GroupEncoding};
|
||||
|
||||
use crate::{
|
||||
Curve, FrostKeys,
|
||||
schnorr::{self, SchnorrSignature},
|
||||
algorithm::{Hram, Schnorr},
|
||||
tests::{key_gen, algorithm_machines, sign as sign_test},
|
||||
};
|
||||
|
||||
pub(crate) fn core_sign<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
||||
let private_key = C::random_F(&mut *rng);
|
||||
let nonce = C::random_F(&mut *rng);
|
||||
let challenge = C::random_F(rng); // Doesn't bother to craft an HRAm
|
||||
assert!(schnorr::verify::<C>(
|
||||
C::generator() * private_key,
|
||||
challenge,
|
||||
&schnorr::sign(private_key, nonce, challenge)
|
||||
));
|
||||
}
|
||||
|
||||
// The above sign function verifies signing works
|
||||
// This verifies invalid signatures don't pass, using zero signatures, which should effectively be
|
||||
// random
|
||||
pub(crate) fn core_verify<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
||||
assert!(!schnorr::verify::<C>(
|
||||
C::generator() * C::random_F(&mut *rng),
|
||||
C::random_F(rng),
|
||||
&SchnorrSignature { R: C::G::identity(), s: C::F::zero() }
|
||||
));
|
||||
}
|
||||
|
||||
pub(crate) fn core_batch_verify<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
||||
// Create 5 signatures
|
||||
let mut keys = vec![];
|
||||
let mut challenges = vec![];
|
||||
let mut sigs = vec![];
|
||||
for i in 0 .. 5 {
|
||||
keys.push(C::random_F(&mut *rng));
|
||||
challenges.push(C::random_F(&mut *rng));
|
||||
sigs.push(schnorr::sign::<C>(keys[i], C::random_F(&mut *rng), challenges[i]));
|
||||
}
|
||||
|
||||
// Batch verify
|
||||
let triplets = (0 .. 5)
|
||||
.map(|i| (u16::try_from(i + 1).unwrap(), C::generator() * keys[i], challenges[i], sigs[i]))
|
||||
.collect::<Vec<_>>();
|
||||
schnorr::batch_verify(rng, &triplets).unwrap();
|
||||
|
||||
// Shift 1 from s from one to another and verify it fails
|
||||
// This test will fail if unique factors aren't used per-signature, hence its inclusion
|
||||
{
|
||||
let mut triplets = triplets.clone();
|
||||
triplets[1].3.s += C::F::one();
|
||||
triplets[2].3.s -= C::F::one();
|
||||
if let Err(blame) = schnorr::batch_verify(rng, &triplets) {
|
||||
assert_eq!(blame, 2);
|
||||
} else {
|
||||
panic!("batch verification considered a malleated signature valid");
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure a completely invalid signature fails when included
|
||||
for i in 0 .. 5 {
|
||||
let mut triplets = triplets.clone();
|
||||
triplets[i].3.s = C::random_F(&mut *rng);
|
||||
if let Err(blame) = schnorr::batch_verify(rng, &triplets) {
|
||||
assert_eq!(blame, u16::try_from(i + 1).unwrap());
|
||||
} else {
|
||||
panic!("batch verification considered an invalid signature valid");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn sign_core<R: RngCore + CryptoRng, C: Curve>(
|
||||
rng: &mut R,
|
||||
keys: &HashMap<u16, FrostKeys<C>>,
|
||||
) {
|
||||
const MESSAGE: &[u8] = b"Hello, World!";
|
||||
|
||||
let machines = algorithm_machines(rng, Schnorr::<C, TestHram<C>>::new(), keys);
|
||||
let sig = sign_test(&mut *rng, machines, MESSAGE);
|
||||
|
||||
let group_key = keys[&1].group_key();
|
||||
assert!(schnorr::verify(group_key, TestHram::<C>::hram(&sig.R, &group_key, MESSAGE), &sig));
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TestHram<C: Curve> {
|
||||
_curve: PhantomData<C>,
|
||||
}
|
||||
impl<C: Curve> Hram<C> for TestHram<C> {
|
||||
#[allow(non_snake_case)]
|
||||
fn hram(R: &C::G, A: &C::G, m: &[u8]) -> C::F {
|
||||
C::hash_to_F(b"challenge", &[R.to_bytes().as_ref(), A.to_bytes().as_ref(), m].concat())
|
||||
}
|
||||
}
|
||||
|
||||
fn sign<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
||||
let keys = key_gen::<_, C>(&mut *rng);
|
||||
sign_core(rng, &keys);
|
||||
}
|
||||
|
||||
fn sign_with_offset<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
||||
let mut keys = key_gen::<_, C>(&mut *rng);
|
||||
let group_key = keys[&1].group_key();
|
||||
|
||||
let offset = C::hash_to_F(b"FROST Test sign_with_offset", b"offset");
|
||||
for i in 1 ..= u16::try_from(keys.len()).unwrap() {
|
||||
keys.insert(i, keys[&i].offset(offset));
|
||||
}
|
||||
let offset_key = group_key + (C::generator() * offset);
|
||||
assert_eq!(keys[&1].group_key(), offset_key);
|
||||
|
||||
sign_core(rng, &keys);
|
||||
}
|
||||
|
||||
pub fn test_schnorr<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
||||
// Test Schnorr signatures work as expected
|
||||
// This is a bit unnecessary, as they should for any valid curve, yet this establishes sanity
|
||||
core_sign::<_, C>(rng);
|
||||
core_verify::<_, C>(rng);
|
||||
core_batch_verify::<_, C>(rng);
|
||||
|
||||
// Test Schnorr signatures under FROST
|
||||
sign::<_, C>(rng);
|
||||
sign_with_offset::<_, C>(rng);
|
||||
}
|
|
@ -6,17 +6,17 @@ use rand_core::{RngCore, CryptoRng};
|
|||
|
||||
use group::{ff::PrimeField, GroupEncoding};
|
||||
|
||||
use dkg::tests::{test_ciphersuite as test_dkg};
|
||||
|
||||
use crate::{
|
||||
curve::Curve,
|
||||
FrostCore, FrostKeys,
|
||||
ThresholdCore, ThresholdKeys,
|
||||
algorithm::{Schnorr, Hram},
|
||||
sign::{
|
||||
Nonce, GeneratorCommitments, NonceCommitments, Commitments, Writable, Preprocess,
|
||||
PreprocessData, SignMachine, SignatureMachine, AlgorithmMachine,
|
||||
},
|
||||
tests::{
|
||||
clone_without, curve::test_curve, schnorr::test_schnorr, promote::test_promotion, recover,
|
||||
},
|
||||
tests::{clone_without, recover_key, curve::test_curve},
|
||||
};
|
||||
|
||||
pub struct Vectors {
|
||||
|
@ -76,8 +76,8 @@ impl From<serde_json::Value> for Vectors {
|
|||
}
|
||||
}
|
||||
|
||||
// Load these vectors into FrostKeys using a custom serialization it'll deserialize
|
||||
fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, FrostKeys<C>> {
|
||||
// Load these vectors into ThresholdKeys using a custom serialization it'll deserialize
|
||||
fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, ThresholdKeys<C>> {
|
||||
let shares = vectors
|
||||
.shares
|
||||
.iter()
|
||||
|
@ -87,7 +87,7 @@ fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, FrostKe
|
|||
|
||||
let mut keys = HashMap::new();
|
||||
for i in 1 ..= u16::try_from(shares.len()).unwrap() {
|
||||
// Manually re-implement the serialization for FrostCore to import this data
|
||||
// Manually re-implement the serialization for ThresholdCore to import this data
|
||||
let mut serialized = vec![];
|
||||
serialized.extend(u32::try_from(C::ID.len()).unwrap().to_be_bytes());
|
||||
serialized.extend(C::ID);
|
||||
|
@ -99,13 +99,13 @@ fn vectors_to_multisig_keys<C: Curve>(vectors: &Vectors) -> HashMap<u16, FrostKe
|
|||
serialized.extend(share.to_bytes().as_ref());
|
||||
}
|
||||
|
||||
let these_keys = FrostCore::<C>::deserialize::<&[u8]>(&mut serialized.as_ref()).unwrap();
|
||||
let these_keys = ThresholdCore::<C>::deserialize::<&[u8]>(&mut serialized.as_ref()).unwrap();
|
||||
assert_eq!(these_keys.params().t(), vectors.threshold);
|
||||
assert_eq!(usize::from(these_keys.params().n()), shares.len());
|
||||
assert_eq!(these_keys.params().i(), i);
|
||||
assert_eq!(these_keys.secret_share(), shares[usize::from(i - 1)]);
|
||||
assert_eq!(hex::encode(these_keys.group_key().to_bytes().as_ref()), vectors.group_key);
|
||||
keys.insert(i, FrostKeys::new(these_keys));
|
||||
keys.insert(i, ThresholdKeys::new(these_keys));
|
||||
}
|
||||
|
||||
keys
|
||||
|
@ -117,17 +117,18 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
|
|||
) {
|
||||
// Do basic tests before trying the vectors
|
||||
test_curve::<_, C>(&mut *rng);
|
||||
test_schnorr::<_, C>(&mut *rng);
|
||||
test_promotion::<_, C>(rng);
|
||||
|
||||
// Test the DKG
|
||||
test_dkg::<_, C>(&mut *rng);
|
||||
|
||||
// Test against the vectors
|
||||
let keys = vectors_to_multisig_keys::<C>(&vectors);
|
||||
let group_key =
|
||||
C::read_G::<&[u8]>(&mut hex::decode(&vectors.group_key).unwrap().as_ref()).unwrap();
|
||||
<C as Curve>::read_G::<&[u8]>(&mut hex::decode(&vectors.group_key).unwrap().as_ref()).unwrap();
|
||||
let secret =
|
||||
C::read_F::<&[u8]>(&mut hex::decode(&vectors.group_secret).unwrap().as_ref()).unwrap();
|
||||
assert_eq!(C::generator() * secret, group_key);
|
||||
assert_eq!(recover(&keys), secret);
|
||||
assert_eq!(recover_key(&keys), secret);
|
||||
|
||||
let mut machines = vec![];
|
||||
for i in &vectors.included {
|
||||
|
|
27
crypto/schnorr/Cargo.toml
Normal file
27
crypto/schnorr/Cargo.toml
Normal file
|
@ -0,0 +1,27 @@
|
|||
[package]
|
||||
name = "schnorr-signatures"
|
||||
version = "0.1.0"
|
||||
description = "Minimal Schnorr signatures crate hosting common code"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/schnorr"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = ["schnorr", "ff", "group"]
|
||||
edition = "2021"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[dependencies]
|
||||
rand_core = "0.6"
|
||||
|
||||
zeroize = { version = "1.5", features = ["zeroize_derive"] }
|
||||
|
||||
group = "0.12"
|
||||
ciphersuite = { path = "../ciphersuite", version = "0.1" }
|
||||
|
||||
multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] }
|
||||
|
||||
[dev-dependencies]
|
||||
dalek-ff-group = { path = "../dalek-ff-group", version = "^0.1.2" }
|
||||
ciphersuite = { path = "../ciphersuite", version = "0.1", features = ["ristretto"] }
|
21
crypto/schnorr/LICENSE
Normal file
21
crypto/schnorr/LICENSE
Normal file
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2021-2022 Luke Parker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
86
crypto/schnorr/src/lib.rs
Normal file
86
crypto/schnorr/src/lib.rs
Normal file
|
@ -0,0 +1,86 @@
|
|||
use std::io::{self, Read, Write};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use group::{
|
||||
ff::{Field, PrimeField},
|
||||
GroupEncoding,
|
||||
};
|
||||
|
||||
use multiexp::BatchVerifier;
|
||||
|
||||
use ciphersuite::Ciphersuite;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
/// A Schnorr signature of the form (R, s) where s = r + cx.
|
||||
#[allow(non_snake_case)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct SchnorrSignature<C: Ciphersuite> {
|
||||
pub R: C::G,
|
||||
pub s: C::F,
|
||||
}
|
||||
|
||||
impl<C: Ciphersuite> SchnorrSignature<C> {
|
||||
/// Read a SchnorrSignature from something implementing Read.
|
||||
pub fn read<R: Read>(reader: &mut R) -> io::Result<Self> {
|
||||
Ok(SchnorrSignature { R: C::read_G(reader)?, s: C::read_F(reader)? })
|
||||
}
|
||||
|
||||
/// Write a SchnorrSignature to something implementing Read.
|
||||
pub fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
writer.write_all(self.R.to_bytes().as_ref())?;
|
||||
writer.write_all(self.s.to_repr().as_ref())
|
||||
}
|
||||
|
||||
/// Serialize a SchnorrSignature, returning a Vec<u8>.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut buf = vec![];
|
||||
self.write(&mut buf).unwrap();
|
||||
buf
|
||||
}
|
||||
|
||||
/// Sign a Schnorr signature with the given nonce for the specified challenge.
|
||||
pub fn sign(mut private_key: C::F, mut nonce: C::F, challenge: C::F) -> SchnorrSignature<C> {
|
||||
let res = SchnorrSignature { R: C::generator() * nonce, s: nonce + (private_key * challenge) };
|
||||
private_key.zeroize();
|
||||
nonce.zeroize();
|
||||
res
|
||||
}
|
||||
|
||||
/// Verify a Schnorr signature for the given key with the specified challenge.
|
||||
#[must_use]
|
||||
pub fn verify(&self, public_key: C::G, challenge: C::F) -> bool {
|
||||
(C::generator() * self.s) == (self.R + (public_key * challenge))
|
||||
}
|
||||
|
||||
/// Queue a signature for batch verification.
|
||||
pub fn batch_verify<R: RngCore + CryptoRng, I: Copy + Zeroize>(
|
||||
&self,
|
||||
rng: &mut R,
|
||||
batch: &mut BatchVerifier<I, C::G>,
|
||||
id: I,
|
||||
public_key: C::G,
|
||||
challenge: C::F,
|
||||
) {
|
||||
// s = r + ca
|
||||
// sG == R + cA
|
||||
// R + cA - sG == 0
|
||||
|
||||
batch.queue(
|
||||
rng,
|
||||
id,
|
||||
[
|
||||
// R
|
||||
(C::F::one(), self.R),
|
||||
// cA
|
||||
(challenge, public_key),
|
||||
// -sG
|
||||
(-self.s, C::generator()),
|
||||
],
|
||||
);
|
||||
}
|
||||
}
|
72
crypto/schnorr/src/tests.rs
Normal file
72
crypto/schnorr/src/tests.rs
Normal file
|
@ -0,0 +1,72 @@
|
|||
use rand_core::OsRng;
|
||||
|
||||
use group::{ff::Field, Group};
|
||||
|
||||
use multiexp::BatchVerifier;
|
||||
|
||||
use ciphersuite::{Ciphersuite, Ristretto};
|
||||
use crate::SchnorrSignature;
|
||||
|
||||
pub(crate) fn core_sign<C: Ciphersuite>() {
|
||||
let private_key = C::random_nonzero_F(&mut OsRng);
|
||||
let nonce = C::random_nonzero_F(&mut OsRng);
|
||||
let challenge = C::random_nonzero_F(&mut OsRng); // Doesn't bother to craft an HRAm
|
||||
assert!(SchnorrSignature::<C>::sign(private_key, nonce, challenge)
|
||||
.verify(C::generator() * private_key, challenge));
|
||||
}
|
||||
|
||||
// The above sign function verifies signing works
|
||||
// This verifies invalid signatures don't pass, using zero signatures, which should effectively be
|
||||
// random
|
||||
pub(crate) fn core_verify<C: Ciphersuite>() {
|
||||
assert!(!SchnorrSignature::<C> { R: C::G::identity(), s: C::F::zero() }
|
||||
.verify(C::generator() * C::random_nonzero_F(&mut OsRng), C::random_nonzero_F(&mut OsRng)));
|
||||
}
|
||||
|
||||
pub(crate) fn core_batch_verify<C: Ciphersuite>() {
|
||||
// Create 5 signatures
|
||||
let mut keys = vec![];
|
||||
let mut challenges = vec![];
|
||||
let mut sigs = vec![];
|
||||
for i in 0 .. 5 {
|
||||
keys.push(C::random_nonzero_F(&mut OsRng));
|
||||
challenges.push(C::random_nonzero_F(&mut OsRng));
|
||||
sigs.push(SchnorrSignature::<C>::sign(keys[i], C::random_nonzero_F(&mut OsRng), challenges[i]));
|
||||
}
|
||||
|
||||
// Batch verify
|
||||
{
|
||||
let mut batch = BatchVerifier::new(5);
|
||||
for (i, sig) in sigs.iter().enumerate() {
|
||||
sig.batch_verify(&mut OsRng, &mut batch, i, C::generator() * keys[i], challenges[i]);
|
||||
}
|
||||
batch.verify_with_vartime_blame().unwrap();
|
||||
}
|
||||
|
||||
// Shift 1 from s from one to another and verify it fails
|
||||
// This test will fail if unique factors aren't used per-signature, hence its inclusion
|
||||
{
|
||||
let mut batch = BatchVerifier::new(5);
|
||||
for (i, mut sig) in sigs.clone().drain(..).enumerate() {
|
||||
if i == 1 {
|
||||
sig.s += C::F::one();
|
||||
}
|
||||
if i == 2 {
|
||||
sig.s -= C::F::one();
|
||||
}
|
||||
sig.batch_verify(&mut OsRng, &mut batch, i, C::generator() * keys[i], challenges[i]);
|
||||
}
|
||||
if let Err(blame) = batch.verify_with_vartime_blame() {
|
||||
assert!((blame == 1) || (blame == 2));
|
||||
} else {
|
||||
panic!("Batch verification considered malleated signatures valid");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test() {
|
||||
core_sign::<Ristretto>();
|
||||
core_verify::<Ristretto>();
|
||||
core_batch_verify::<Ristretto>();
|
||||
}
|
|
@ -4,7 +4,11 @@ use async_trait::async_trait;
|
|||
use thiserror::Error;
|
||||
|
||||
use transcript::RecommendedTranscript;
|
||||
use frost::{curve::Curve, FrostKeys, sign::PreprocessMachine};
|
||||
use frost::{
|
||||
curve::{Ciphersuite, Curve},
|
||||
ThresholdKeys,
|
||||
sign::PreprocessMachine,
|
||||
};
|
||||
|
||||
pub mod monero;
|
||||
pub use self::monero::Monero;
|
||||
|
@ -45,14 +49,14 @@ pub trait Coin {
|
|||
const MAX_OUTPUTS: usize; // TODO: Decide if this includes change or not
|
||||
|
||||
// Doesn't have to take self, enables some level of caching which is pleasant
|
||||
fn address(&self, key: <Self::Curve as Curve>::G) -> Self::Address;
|
||||
fn address(&self, key: <Self::Curve as Ciphersuite>::G) -> Self::Address;
|
||||
|
||||
async fn get_latest_block_number(&self) -> Result<usize, CoinError>;
|
||||
async fn get_block(&self, number: usize) -> Result<Self::Block, CoinError>;
|
||||
async fn get_outputs(
|
||||
&self,
|
||||
block: &Self::Block,
|
||||
key: <Self::Curve as Curve>::G,
|
||||
key: <Self::Curve as Ciphersuite>::G,
|
||||
) -> Result<Vec<Self::Output>, CoinError>;
|
||||
|
||||
// TODO: Remove
|
||||
|
@ -60,7 +64,7 @@ pub trait Coin {
|
|||
|
||||
async fn prepare_send(
|
||||
&self,
|
||||
keys: FrostKeys<Self::Curve>,
|
||||
keys: ThresholdKeys<Self::Curve>,
|
||||
transcript: RecommendedTranscript,
|
||||
block_number: usize,
|
||||
inputs: Vec<Self::Output>,
|
||||
|
|
|
@ -4,7 +4,7 @@ use curve25519_dalek::scalar::Scalar;
|
|||
|
||||
use dalek_ff_group as dfg;
|
||||
use transcript::RecommendedTranscript;
|
||||
use frost::{curve::Ed25519, FrostKeys};
|
||||
use frost::{curve::Ed25519, ThresholdKeys};
|
||||
|
||||
use monero_serai::{
|
||||
transaction::Transaction,
|
||||
|
@ -55,7 +55,7 @@ impl OutputTrait for Output {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct SignableTransaction {
|
||||
keys: FrostKeys<Ed25519>,
|
||||
keys: ThresholdKeys<Ed25519>,
|
||||
transcript: RecommendedTranscript,
|
||||
// Monero height, defined as the length of the chain
|
||||
height: usize,
|
||||
|
@ -157,7 +157,7 @@ impl Coin for Monero {
|
|||
|
||||
async fn prepare_send(
|
||||
&self,
|
||||
keys: FrostKeys<Ed25519>,
|
||||
keys: ThresholdKeys<Ed25519>,
|
||||
transcript: RecommendedTranscript,
|
||||
block_number: usize,
|
||||
mut inputs: Vec<Output>,
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::{marker::Send, collections::HashMap};
|
|||
use async_trait::async_trait;
|
||||
use thiserror::Error;
|
||||
|
||||
use frost::{curve::Curve, FrostError};
|
||||
use frost::{curve::Ciphersuite, FrostError};
|
||||
|
||||
mod coin;
|
||||
use coin::{CoinError, Coin};
|
||||
|
@ -35,6 +35,9 @@ pub enum SignError {
|
|||
// Doesn't consider the current group key to increase the simplicity of verifying Serai's status
|
||||
// Takes an index, k, to support protocols which use multiple secondary keys
|
||||
// Presumably a view key
|
||||
pub(crate) fn additional_key<C: Coin>(k: u64) -> <C::Curve as Curve>::F {
|
||||
C::Curve::hash_to_F(b"Serai DEX Additional Key", &[C::ID, &k.to_le_bytes()].concat())
|
||||
pub(crate) fn additional_key<C: Coin>(k: u64) -> <C::Curve as Ciphersuite>::F {
|
||||
<C::Curve as Ciphersuite>::hash_to_F(
|
||||
b"Serai DEX Additional Key",
|
||||
&[C::ID, &k.to_le_bytes()].concat(),
|
||||
)
|
||||
}
|
||||
|
|
|
@ -6,8 +6,8 @@ use group::GroupEncoding;
|
|||
|
||||
use transcript::{Transcript, RecommendedTranscript};
|
||||
use frost::{
|
||||
curve::Curve,
|
||||
FrostError, FrostKeys,
|
||||
curve::{Ciphersuite, Curve},
|
||||
FrostError, ThresholdKeys,
|
||||
sign::{Writable, PreprocessMachine, SignMachine, SignatureMachine},
|
||||
};
|
||||
|
||||
|
@ -17,12 +17,12 @@ use crate::{
|
|||
};
|
||||
|
||||
pub struct WalletKeys<C: Curve> {
|
||||
keys: FrostKeys<C>,
|
||||
keys: ThresholdKeys<C>,
|
||||
creation_block: usize,
|
||||
}
|
||||
|
||||
impl<C: Curve> WalletKeys<C> {
|
||||
pub fn new(keys: FrostKeys<C>, creation_block: usize) -> WalletKeys<C> {
|
||||
pub fn new(keys: ThresholdKeys<C>, creation_block: usize) -> WalletKeys<C> {
|
||||
WalletKeys { keys, creation_block }
|
||||
}
|
||||
|
||||
|
@ -34,13 +34,13 @@ impl<C: Curve> WalletKeys<C> {
|
|||
// system, there are potentially other benefits to binding this to a specific group key
|
||||
// It's no longer possible to influence group key gen to key cancel without breaking the hash
|
||||
// function as well, although that degree of influence means key gen is broken already
|
||||
fn bind(&self, chain: &[u8]) -> FrostKeys<C> {
|
||||
fn bind(&self, chain: &[u8]) -> ThresholdKeys<C> {
|
||||
const DST: &[u8] = b"Serai Processor Wallet Chain Bind";
|
||||
let mut transcript = RecommendedTranscript::new(DST);
|
||||
transcript.append_message(b"chain", chain);
|
||||
transcript.append_message(b"curve", C::ID);
|
||||
transcript.append_message(b"group_key", self.keys.group_key().to_bytes().as_ref());
|
||||
self.keys.offset(C::hash_to_F(DST, &transcript.challenge(b"offset")))
|
||||
self.keys.offset(<C as Ciphersuite>::hash_to_F(DST, &transcript.challenge(b"offset")))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -203,8 +203,8 @@ fn select_inputs_outputs<C: Coin>(
|
|||
pub struct Wallet<D: CoinDb, C: Coin> {
|
||||
db: D,
|
||||
coin: C,
|
||||
keys: Vec<(FrostKeys<C::Curve>, Vec<C::Output>)>,
|
||||
pending: Vec<(usize, FrostKeys<C::Curve>)>,
|
||||
keys: Vec<(ThresholdKeys<C::Curve>, Vec<C::Output>)>,
|
||||
pending: Vec<(usize, ThresholdKeys<C::Curve>)>,
|
||||
}
|
||||
|
||||
impl<D: CoinDb, C: Coin> Wallet<D, C> {
|
||||
|
@ -344,11 +344,7 @@ impl<D: CoinDb, C: Coin> Wallet<D, C> {
|
|||
|
||||
let (attempt, commitments) = attempt.preprocess(&mut OsRng);
|
||||
let commitments = network
|
||||
.round({
|
||||
let mut buf = vec![];
|
||||
commitments.write(&mut buf).unwrap();
|
||||
buf
|
||||
})
|
||||
.round(commitments.serialize())
|
||||
.await
|
||||
.map_err(SignError::NetworkError)?
|
||||
.drain()
|
||||
|
@ -364,11 +360,7 @@ impl<D: CoinDb, C: Coin> Wallet<D, C> {
|
|||
|
||||
let (attempt, share) = attempt.sign(commitments, b"").map_err(SignError::FrostError)?;
|
||||
let shares = network
|
||||
.round({
|
||||
let mut buf = vec![];
|
||||
share.write(&mut buf).unwrap();
|
||||
buf
|
||||
})
|
||||
.round(share.serialize())
|
||||
.await
|
||||
.map_err(SignError::NetworkError)?
|
||||
.drain()
|
||||
|
|
Loading…
Reference in a new issue