Add FROST Ed25519 test vectors

This commit is contained in:
Luke Parker 2022-06-03 03:56:17 -04:00
parent b4cd29f49a
commit f8d127bf8a
No known key found for this signature in database
GPG key ID: F9F1386DB1E119B6
5 changed files with 102 additions and 22 deletions

View file

@ -40,4 +40,5 @@ experimental = []
multisig = ["ff", "group", "rand_chacha", "transcript", "frost", "dalek-ff-group"]
[dev-dependencies]
sha2 = "0.10"
tokio = { version = "1", features = ["full"] }

View file

@ -1,9 +1,10 @@
use core::convert::TryInto;
use core::{convert::TryInto, fmt::{Formatter, Debug}};
use std::marker::PhantomData;
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use blake2::{digest::Update, Digest, Blake2b512};
use blake2::{digest::{generic_array::typenum::U64, Digest}, Blake2b512};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE as DTable,
@ -11,7 +12,7 @@ use curve25519_dalek::{
edwards::EdwardsPoint as DPoint
};
use ff::{Field, PrimeField};
use ff::PrimeField;
use group::Group;
use transcript::{Transcript as TranscriptTrait, DigestTranscript};
@ -32,9 +33,26 @@ pub enum MultisigError {
InvalidKeyImage(u16)
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct Ed25519;
impl Curve for Ed25519 {
// Accept a parameterized hash function in order to check against the FROST vectors while still
// allowing Blake2b to be used with wide reduction in practice
pub struct Ed25519Internal<D: Digest<OutputSize = U64>, const WIDE: bool> {
_digest: PhantomData<D>
}
// Removed requirements for D to have all of these
impl<D: Digest<OutputSize = U64>, const WIDE: bool> Clone for Ed25519Internal<D, WIDE> {
fn clone(&self) -> Self { *self }
}
impl<D: Digest<OutputSize = U64>, const WIDE: bool> Copy for Ed25519Internal<D, WIDE> {}
impl<D: Digest<OutputSize = U64>, const WIDE: bool> PartialEq for Ed25519Internal<D, WIDE> {
fn eq(&self, _: &Self) -> bool { true }
}
impl<D: Digest<OutputSize = U64>, const WIDE: bool> Eq for Ed25519Internal<D, WIDE> {}
impl<D: Digest<OutputSize = U64>, const WIDE: bool> Debug for Ed25519Internal<D, WIDE> {
fn fmt(&self, _: &mut Formatter<'_>) -> Result<(), core::fmt::Error> { Ok(()) }
}
impl<D: Digest<OutputSize = U64>, const WIDE: bool> Curve for Ed25519Internal<D, WIDE> {
type F = dfg::Scalar;
type G = dfg::EdwardsPoint;
type T = &'static dfg::EdwardsBasepointTable;
@ -44,7 +62,7 @@ impl Curve for Ed25519 {
}
fn id() -> &'static [u8] {
b"Ed25519"
b"edwards25519"
}
fn generator() -> Self::G {
@ -59,15 +77,15 @@ impl Curve for Ed25519 {
true
}
fn random_nonce<R: RngCore + CryptoRng>(_secret: Self::F, rng: &mut R) -> Self::F {
dfg::Scalar::random(rng)
fn random_nonce<R: RngCore + CryptoRng>(secret: Self::F, rng: &mut R) -> Self::F {
let mut seed = vec![0; 32];
rng.fill_bytes(&mut seed);
seed.extend(&secret.to_bytes());
Self::hash_to_F(b"nonce", &seed)
}
// This will already be a keccak256 hash in the case of CLSAG signing, making it fine to simply
// return as-is, yet this ensures it's fixed size (a security requirement) and unique regardless
// of how it's called/what it's called with
fn hash_msg(msg: &[u8]) -> Vec<u8> {
Blake2b512::digest(msg).to_vec()
D::digest(msg).to_vec()
}
fn hash_binding_factor(binding: &[u8]) -> Self::F {
@ -75,7 +93,12 @@ impl Curve for Ed25519 {
}
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F {
dfg::Scalar::from_hash(Blake2b512::new().chain(dst).chain(msg))
let digest = D::new().chain_update(dst).chain_update(msg);
if WIDE {
dfg::Scalar::from_hash(digest)
} else {
dfg::Scalar::from_bytes_mod_order(digest.finalize()[32 ..].try_into().unwrap())
}
}
fn F_len() -> usize {
@ -101,8 +124,8 @@ impl Curve for Ed25519 {
let point = dfg::CompressedEdwardsY::new(bytes).decompress();
if let Some(point) = point {
// Ban torsioned points
if !point.is_torsion_free() {
// Ban identity and torsioned points
if point.is_identity().into() || (!bool::from(point.is_torsion_free())) {
Err(CurveError::InvalidPoint)?;
}
// Ban points which weren't canonically encoded
@ -124,6 +147,8 @@ impl Curve for Ed25519 {
}
}
pub type Ed25519 = Ed25519Internal<Blake2b512, true>;
// Used to prove legitimacy of key images and nonces which both involve other basepoints
#[derive(Clone)]
pub struct DLEqProof {
@ -225,6 +250,7 @@ pub fn read_dleq(
xG: &DPoint
) -> Result<dfg::EdwardsPoint, MultisigError> {
// Not using G_from_slice here would enable non-canonical points and break blame
// This does also ban identity points, yet those should never be a concern
let other = <Ed25519 as Curve>::G_from_slice(
&serialized[(start + 0) .. (start + 32)]
).map_err(|_| MultisigError::InvalidDLEqProof(l))?;

View file

@ -1,11 +1,67 @@
use rand::rngs::OsRng;
use frost::tests::{curve::test_curve, key_gen};
use sha2::Sha512;
use crate::frost::Ed25519;
use dalek_ff_group as dfg;
use frost::{Curve, algorithm::Hram, tests::{curve::test_curve, vectors::{Vectors, vectors}}};
use crate::frost::{Ed25519, Ed25519Internal};
#[test]
fn frost_ed25519() {
test_curve::<_, Ed25519>(&mut OsRng);
key_gen::<_, Ed25519>(&mut OsRng);
}
// Not spec-compliant, as this shouldn't use wide reduction
// Is vectors compliant, which is why the below tests pass
// See https://github.com/cfrg/draft-irtf-cfrg-frost/issues/204
//type TestEd25519 = Ed25519Internal<Sha512, false>;
// If this is kept, we can remove WIDE
type TestEd25519 = Ed25519Internal<Sha512, true>;
#[derive(Copy, Clone)]
struct IetfEd25519Hram {}
impl Hram<TestEd25519> for IetfEd25519Hram {
#[allow(non_snake_case)]
fn hram(R: &dfg::EdwardsPoint, A: &dfg::EdwardsPoint, m: &[u8]) -> dfg::Scalar {
TestEd25519::hash_to_F(
b"",
&[&R.compress().to_bytes(), &A.compress().to_bytes(), m].concat()
)
}
}
#[test]
fn frost_ed25519_vectors() {
vectors::<TestEd25519, IetfEd25519Hram>(
Vectors {
threshold: 2,
shares: &[
"929dcc590407aae7d388761cddb0c0db6f5627aea8e217f4a033f2ec83d93509",
"a91e66e012e4364ac9aaa405fcafd370402d9859f7b6685c07eed76bf409e80d",
"d3cb090a075eb154e82fdb4b3cb507f110040905468bb9c46da8bdea643a9a02"
],
group_secret: "7b1c33d3f5291d85de664833beb1ad469f7fb6025a0ec78b3a790c6e13a98304",
group_key: "15d21ccd7ee42959562fc8aa63224c8851fb3ec85a3faf66040d380fb9738673",
msg: "74657374",
included: &[1, 3],
nonces: &[
[
"8c76af04340e83bb5fc427c117d38347fc8ef86d5397feea9aa6412d96c05b0a",
"14a37ddbeae8d9e9687369e5eb3c6d54f03dc19d76bb54fb5425131bc37a600b"
],
[
"5ca39ebab6874f5e7b5089f3521819a2aa1e2cf738bae6974ee80555de2ef70e",
"0afe3650c4815ff37becd3c6948066e906e929ea9b8f546c74e10002dbcc150c"
]
],
sig_shares: &[
"4369474a398aa10357b60d683da91ea6a767dcf53fd541a8ed6b4d780827ea0a",
"32fcc690d926075e45d2dfb746bab71447943cddbefe80d122c39174aa2e1004"
],
sig: "2b8d9c6995333c5990e3a3dd6568785539d3322f7f0376452487ea35cfda587b".to_owned() +
"75650edb12b1a8619c88ed1f8463d6baeefb18d3fed3c279102fdfecb255fa0e"
}
);
}

View file

@ -179,7 +179,6 @@ fn p256_curve() {
test_curve::<_, P256>(&mut OsRng);
}
#[allow(non_snake_case)]
#[derive(Clone)]
pub struct IetfP256Hram {}
impl Hram<P256> for IetfP256Hram {
@ -217,7 +216,6 @@ fn p256_vectors() {
"486e2ee25a3fbc8e6399d748b077a2755fde99fa85cc24fa647ea4ebf5811a15"
]
],
binding: "cf7ffe4b8ad6edb6237efaa8cbfb2dfb2fd08d163b6ad9063720f14779a9e143",
sig_shares: &[
"9e4d8865faf8c7b3193a3b35eda3d9e12118447114b1e7d5b4809ea28067f8a9",
"b7d094eab6305ae74daeed1acd31abba9ab81f638d38b72c132cb25a5dfae1fc"

View file

@ -16,7 +16,6 @@ pub struct Vectors {
pub msg: &'static str,
pub included: &'static [u16],
pub nonces: &'static [[&'static str; 2]],
pub binding: &'static str,
pub sig_shares: &'static [&'static str],
pub sig: String
}