Add Ed25519 to FROST and remove expand_xmd for elliptic_curve's

Doesn't fully utilize ec's hash2curve module as k256 Scalar doesn't have 
FromOkm for some reason. The previously present bigint reduction is 
preserved.

Updates ff/group to 0.12.

Premised on https://github.com/cfrg/draft-irtf-cfrg-frost/pull/205 being 
merged, as while this Ed25519 is vector compliant, it's technically not 
spec compliant due to that conflict.
This commit is contained in:
Luke Parker 2022-06-06 02:18:25 -04:00
parent 55a895d65a
commit e0ce6e5c12
No known key found for this signature in database
GPG key ID: F9F1386DB1E119B6
15 changed files with 189 additions and 266 deletions

View file

@ -16,16 +16,15 @@ rand = "0.8"
rand_distr = "0.4"
tiny-keccak = { version = "2", features = ["keccak"] }
blake2 = "0.10"
blake2 = { version = "0.10", optional = true }
curve25519-dalek = { version = "3", features = ["std"] }
ff = { version = "0.11", optional = true }
group = { version = "0.11", optional = true }
group = { version = "0.12", optional = true }
dalek-ff-group = { path = "../../crypto/dalek-ff-group", optional = true }
transcript = { path = "../../crypto/transcript", optional = true }
frost = { path = "../../crypto/frost", optional = true }
frost = { path = "../../crypto/frost", features = ["ed25519"], optional = true }
monero = "0.16"
@ -37,7 +36,7 @@ reqwest = { version = "0.11", features = ["json"] }
[features]
experimental = []
multisig = ["ff", "group", "rand_chacha", "transcript", "frost", "dalek-ff-group"]
multisig = ["rand_chacha", "blake2", "group", "dalek-ff-group", "transcript", "frost"]
[dev-dependencies]
sha2 = "0.10"

View file

@ -1,22 +1,17 @@
use core::{convert::TryInto, fmt::{Formatter, Debug}};
use std::marker::PhantomData;
use core::convert::TryInto;
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use blake2::{digest::{generic_array::typenum::U64, Digest}, Blake2b512};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE as DTable,
scalar::Scalar as DScalar,
edwards::EdwardsPoint as DPoint
};
use ff::PrimeField;
use group::Group;
use transcript::{Transcript as TranscriptTrait, DigestTranscript};
use frost::{CurveError, Curve};
use frost::Curve;
pub use frost::curves::ed25519::Ed25519;
use dalek_ff_group as dfg;
use crate::random_scalar;
@ -33,109 +28,6 @@ pub enum MultisigError {
InvalidKeyImage(u16)
}
// Accept a parameterized hash function in order to check against the FROST vectors while still
// allowing Blake2b to be used with wide reduction in practice
pub struct Ed25519Internal<D: Digest<OutputSize = U64>, const WIDE: bool> {
_digest: PhantomData<D>
}
// Removed requirements for D to have all of these
impl<D: Digest<OutputSize = U64>, const WIDE: bool> Clone for Ed25519Internal<D, WIDE> {
fn clone(&self) -> Self { *self }
}
impl<D: Digest<OutputSize = U64>, const WIDE: bool> Copy for Ed25519Internal<D, WIDE> {}
impl<D: Digest<OutputSize = U64>, const WIDE: bool> PartialEq for Ed25519Internal<D, WIDE> {
fn eq(&self, _: &Self) -> bool { true }
}
impl<D: Digest<OutputSize = U64>, const WIDE: bool> Eq for Ed25519Internal<D, WIDE> {}
impl<D: Digest<OutputSize = U64>, const WIDE: bool> Debug for Ed25519Internal<D, WIDE> {
fn fmt(&self, _: &mut Formatter<'_>) -> Result<(), core::fmt::Error> { Ok(()) }
}
impl<D: Digest<OutputSize = U64>, const WIDE: bool> Curve for Ed25519Internal<D, WIDE> {
type F = dfg::Scalar;
type G = dfg::EdwardsPoint;
type T = &'static dfg::EdwardsBasepointTable;
const ID: &'static [u8] = b"edwards25519";
const GENERATOR: Self::G = dfg::ED25519_BASEPOINT_POINT;
const GENERATOR_TABLE: Self::T = &dfg::ED25519_BASEPOINT_TABLE;
const LITTLE_ENDIAN: bool = true;
fn random_nonce<R: RngCore + CryptoRng>(secret: Self::F, rng: &mut R) -> Self::F {
let mut seed = vec![0; 32];
rng.fill_bytes(&mut seed);
seed.extend(&secret.to_bytes());
Self::hash_to_F(b"nonce", &seed)
}
fn hash_msg(msg: &[u8]) -> Vec<u8> {
D::digest(msg).to_vec()
}
fn hash_binding_factor(binding: &[u8]) -> Self::F {
Self::hash_to_F(b"rho", binding)
}
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F {
let digest = D::new().chain_update(dst).chain_update(msg);
if WIDE {
dfg::Scalar::from_hash(digest)
} else {
dfg::Scalar::from_bytes_mod_order(digest.finalize()[32 ..].try_into().unwrap())
}
}
fn F_len() -> usize {
32
}
fn G_len() -> usize {
32
}
fn F_from_slice(slice: &[u8]) -> Result<Self::F, CurveError> {
let scalar = Self::F::from_repr(
slice.try_into().map_err(|_| CurveError::InvalidLength(32, slice.len()))?
);
if scalar.is_some().unwrap_u8() == 0 {
Err(CurveError::InvalidScalar)?;
}
Ok(scalar.unwrap())
}
fn G_from_slice(slice: &[u8]) -> Result<Self::G, CurveError> {
let bytes = slice.try_into().map_err(|_| CurveError::InvalidLength(32, slice.len()))?;
let point = dfg::CompressedEdwardsY::new(bytes).decompress();
if let Some(point) = point {
// Ban identity and torsioned points
if point.is_identity().into() || (!bool::from(point.is_torsion_free())) {
Err(CurveError::InvalidPoint)?;
}
// Ban points which weren't canonically encoded
if point.compress().to_bytes() != bytes {
Err(CurveError::InvalidPoint)?;
}
Ok(point)
} else {
Err(CurveError::InvalidPoint)
}
}
fn F_to_bytes(f: &Self::F) -> Vec<u8> {
f.to_repr().to_vec()
}
fn G_to_bytes(g: &Self::G) -> Vec<u8> {
g.compress().to_bytes().to_vec()
}
}
pub type Ed25519 = Ed25519Internal<Blake2b512, true>;
// Used to prove legitimacy of key images and nonces which both involve other basepoints
#[derive(Clone)]
pub struct DLEqProof {

View file

@ -1,4 +1 @@
#[cfg(feature = "multisig")]
mod frost;
mod clsag;

View file

@ -12,7 +12,6 @@ digest = "0.10"
subtle = "2.4"
ff = "0.11"
group = "0.11"
group = "0.12"
curve25519-dalek = "3.2"

View file

@ -22,8 +22,7 @@ use dalek::{
}
};
use ff::{Field, PrimeField};
use group::Group;
use group::{ff::{Field, PrimeField}, Group};
#[derive(Clone, Copy, PartialEq, Eq, Debug, Default)]
pub struct Scalar(pub DScalar);

View file

@ -12,12 +12,15 @@ thiserror = "1"
rand_core = "0.6"
hex = "0.4"
ff = "0.11"
group = "0.11"
sha2 = { version = "0.10", optional = true }
p256 = { version = "0.10", optional = true }
k256 = { version = "0.10", optional = true }
ff = "0.12"
group = "0.12"
elliptic-curve = { version = "0.12", features = ["hash2curve"], optional = true }
p256 = { version = "0.11", features = ["arithmetic", "hash2curve"], optional = true }
k256 = { version = "0.11", features = ["arithmetic", "hash2curve"], optional = true }
dalek-ff-group = { path = "../dalek-ff-group", optional = true }
transcript = { path = "../transcript" }
@ -25,9 +28,14 @@ multiexp = { path = "../multiexp", features = ["batch"] }
[dev-dependencies]
rand = "0.8"
sha2 = "0.10"
p256 = { version = "0.10", features = ["arithmetic"] }
elliptic-curve = { version = "0.12", features = ["hash2curve"] }
p256 = { version = "0.11", features = ["arithmetic", "hash2curve"] }
[features]
p256 = ["sha2", "dep:p256"]
k256 = ["sha2", "dep:k256"]
curves = []
kp256 = ["elliptic-curve"]
p256 = ["curves", "kp256", "sha2", "dep:p256"]
k256 = ["curves", "kp256", "sha2", "dep:k256"]
ed25519 = ["curves", "sha2", "dalek-ff-group"]

View file

@ -0,0 +1,104 @@
use core::convert::TryInto;
use rand_core::{RngCore, CryptoRng};
use sha2::{Digest, Sha512};
use ff::PrimeField;
use group::Group;
use dalek_ff_group::{
EdwardsBasepointTable,
ED25519_BASEPOINT_POINT, ED25519_BASEPOINT_TABLE,
Scalar, EdwardsPoint, CompressedEdwardsY
};
use crate::{CurveError, Curve, algorithm::Hram};
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct Ed25519;
impl Curve for Ed25519 {
type F = Scalar;
type G = EdwardsPoint;
type T = &'static EdwardsBasepointTable;
const ID: &'static [u8] = b"edwards25519";
const GENERATOR: Self::G = ED25519_BASEPOINT_POINT;
const GENERATOR_TABLE: Self::T = &ED25519_BASEPOINT_TABLE;
const LITTLE_ENDIAN: bool = true;
fn random_nonce<R: RngCore + CryptoRng>(secret: Self::F, rng: &mut R) -> Self::F {
let mut seed = vec![0; 32];
rng.fill_bytes(&mut seed);
seed.extend(&secret.to_bytes());
Self::hash_to_F(b"nonce", &seed)
}
fn hash_msg(msg: &[u8]) -> Vec<u8> {
Sha512::digest(msg).to_vec()
}
fn hash_binding_factor(binding: &[u8]) -> Self::F {
Self::hash_to_F(b"rho", binding)
}
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F {
Scalar::from_hash(Sha512::new().chain_update(dst).chain_update(msg))
}
fn F_len() -> usize {
32
}
fn G_len() -> usize {
32
}
fn F_from_slice(slice: &[u8]) -> Result<Self::F, CurveError> {
let scalar = Self::F::from_repr(
slice.try_into().map_err(|_| CurveError::InvalidLength(32, slice.len()))?
);
if scalar.is_some().unwrap_u8() == 0 {
Err(CurveError::InvalidScalar)?;
}
Ok(scalar.unwrap())
}
fn G_from_slice(slice: &[u8]) -> Result<Self::G, CurveError> {
let bytes = slice.try_into().map_err(|_| CurveError::InvalidLength(32, slice.len()))?;
let point = CompressedEdwardsY::new(bytes).decompress();
if let Some(point) = point {
// Ban identity and torsioned points
if point.is_identity().into() || (!bool::from(point.is_torsion_free())) {
Err(CurveError::InvalidPoint)?;
}
// Ban points which weren't canonically encoded
if point.compress().to_bytes() != bytes {
Err(CurveError::InvalidPoint)?;
}
Ok(point)
} else {
Err(CurveError::InvalidPoint)
}
}
fn F_to_bytes(f: &Self::F) -> Vec<u8> {
f.to_repr().to_vec()
}
fn G_to_bytes(g: &Self::G) -> Vec<u8> {
g.compress().to_bytes().to_vec()
}
}
#[derive(Copy, Clone)]
pub struct IetfEd25519Hram;
impl Hram<Ed25519> for IetfEd25519Hram {
#[allow(non_snake_case)]
fn hram(R: &EdwardsPoint, A: &EdwardsPoint, m: &[u8]) -> Scalar {
Ed25519::hash_to_F(b"", &[&R.compress().to_bytes(), &A.compress().to_bytes(), m].concat())
}
}

View file

@ -2,28 +2,27 @@ use core::{marker::PhantomData, convert::TryInto};
use rand_core::{RngCore, CryptoRng};
use sha2::{digest::Update, Digest, Sha256};
use ff::{Field, PrimeField};
use group::{Group, GroupEncoding};
use sha2::{digest::Update, Digest, Sha256};
use elliptic_curve::{bigint::{Encoding, U384}, hash2curve::{Expander, ExpandMsg, ExpandMsgXmd}};
#[cfg(feature = "k256")]
use k256::elliptic_curve::bigint::{Encoding, U384};
#[cfg(all(not(feature = "k256"), any(test, feature = "p256")))]
use p256::elliptic_curve::bigint::{Encoding, U384};
use crate::{CurveError, Curve, curves::expand_message_xmd_sha256};
use crate::{CurveError, Curve};
#[cfg(any(test, feature = "p256"))]
use crate::algorithm::Hram;
#[allow(non_snake_case)]
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct KP256<P: Group> {
_P: PhantomData<P>
pub struct KP256<G: Group> {
_G: PhantomData<G>
}
pub(crate) trait KP256Instance<P> {
pub(crate) trait KP256Instance<G> {
const CONTEXT: &'static [u8];
const ID: &'static [u8];
const GENERATOR: P;
const GENERATOR: G;
}
#[cfg(any(test, feature = "p256"))]
@ -44,19 +43,19 @@ impl KP256Instance<k256::ProjectivePoint> for K256 {
const GENERATOR: k256::ProjectivePoint = k256::ProjectivePoint::GENERATOR;
}
impl<P: Group + GroupEncoding> Curve for KP256<P> where
KP256<P>: KP256Instance<P>,
P::Scalar: PrimeField,
<P::Scalar as PrimeField>::Repr: From<[u8; 32]> + AsRef<[u8]>,
P::Repr: From<[u8; 33]> + AsRef<[u8]> {
type F = P::Scalar;
type G = P;
type T = P;
impl<G: Group + GroupEncoding> Curve for KP256<G> where
KP256<G>: KP256Instance<G>,
G::Scalar: PrimeField,
<G::Scalar as PrimeField>::Repr: From<[u8; 32]> + AsRef<[u8]>,
G::Repr: From<[u8; 33]> + AsRef<[u8]> {
type F = G::Scalar;
type G = G;
type T = G;
const ID: &'static [u8] = <Self as KP256Instance<P>>::ID;
const ID: &'static [u8] = <Self as KP256Instance<G>>::ID;
const GENERATOR: Self::G = <Self as KP256Instance<P>>::GENERATOR;
const GENERATOR_TABLE: Self::G = <Self as KP256Instance<P>>::GENERATOR;
const GENERATOR: Self::G = <Self as KP256Instance<G>>::GENERATOR;
const GENERATOR_TABLE: Self::G = <Self as KP256Instance<G>>::GENERATOR;
const LITTLE_ENDIAN: bool = false;
@ -81,13 +80,21 @@ impl<P: Group + GroupEncoding> Curve for KP256<P> where
}
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F {
let mut dst = dst;
let oversize = Sha256::digest([b"H2C-OVERSIZE-DST-", dst].concat());
if dst.len() > 255 {
dst = &oversize;
}
let mut modulus = vec![0; 16];
modulus.extend((Self::F::zero() - Self::F::one()).to_repr().as_ref());
let modulus = U384::from_be_slice(&modulus).wrapping_add(&U384::ONE);
Self::F_from_slice(
&U384::from_be_slice(
&expand_message_xmd_sha256(dst, msg, 48).unwrap()
).reduce(&modulus).unwrap().to_be_bytes()[16 ..]
&U384::from_be_slice(&{
let mut bytes = [0; 48];
ExpandMsgXmd::<Sha256>::expand_message(&[msg], dst, 48).unwrap().fill_bytes(&mut bytes);
bytes
}).reduce(&modulus).unwrap().to_be_bytes()[16 ..]
).unwrap()
}
@ -131,3 +138,17 @@ impl<P: Group + GroupEncoding> Curve for KP256<P> where
g.to_bytes().as_ref().to_vec()
}
}
#[cfg(any(test, feature = "p256"))]
#[derive(Clone)]
pub struct IetfP256Hram;
#[cfg(any(test, feature = "p256"))]
impl Hram<P256> for IetfP256Hram {
#[allow(non_snake_case)]
fn hram(R: &p256::ProjectivePoint, A: &p256::ProjectivePoint, m: &[u8]) -> p256::Scalar {
P256::hash_to_F(
&[P256::CONTEXT, b"chal"].concat(),
&[&P256::G_to_bytes(R), &P256::G_to_bytes(A), m].concat()
)
}
}

View file

@ -1,48 +1,5 @@
use sha2::{Digest, Sha256};
#[cfg(any(test, feature = "kp256"))]
pub mod kp256;
// TODO: Actually make proper or replace with something from another crate
pub(crate) fn expand_message_xmd_sha256(dst: &[u8], msg: &[u8], len: u16) -> Option<Vec<u8>> {
const OUTPUT_SIZE: u16 = 32;
const BLOCK_SIZE: u16 = 64;
let blocks = ((len + OUTPUT_SIZE) - 1) / OUTPUT_SIZE;
if blocks > 255 {
return None;
}
let blocks = blocks as u8;
let mut dst = dst;
let oversize = Sha256::digest([b"H2C-OVERSIZE-DST-", dst].concat());
if dst.len() > 255 {
dst = &oversize;
}
let dst_prime = &[dst, &[dst.len() as u8]].concat();
let mut msg_prime = vec![0; BLOCK_SIZE.into()];
msg_prime.extend(msg);
msg_prime.extend(len.to_be_bytes());
msg_prime.push(0);
msg_prime.extend(dst_prime);
let mut b = vec![Sha256::digest(&msg_prime).to_vec()];
{
let mut b1 = b[0].clone();
b1.push(1);
b1.extend(dst_prime);
b.push(Sha256::digest(&b1).to_vec());
}
for i in 2 ..= blocks {
let mut msg = b[0]
.iter().zip(b[usize::from(i) - 1].iter())
.map(|(a, b)| *a ^ b).collect::<Vec<_>>();
msg.push(i);
msg.extend(dst_prime);
b.push(Sha256::digest(msg).to_vec());
}
Some(b[1 ..].concat()[.. usize::from(len)].to_vec())
}
#[cfg(feature = "ed25519")]
pub mod ed25519;

View file

@ -13,7 +13,7 @@ mod schnorr;
pub mod key_gen;
pub mod algorithm;
pub mod sign;
#[cfg(any(test, feature = "p256", feature = "k256"))]
#[cfg(any(test, feature = "curves"))]
pub mod curves;
pub mod tests;

View file

@ -1,48 +1,23 @@
use rand::rngs::OsRng;
use sha2::Sha512;
use dalek_ff_group as dfg;
use frost::{
Curve,
algorithm::Hram,
use crate::{
curves::ed25519::{Ed25519, IetfEd25519Hram},
tests::{curve::test_curve, schnorr::test_schnorr, vectors::{Vectors, vectors}}
};
use crate::frost::{Ed25519, Ed25519Internal};
#[test]
fn frost_ed25519_curve() {
fn ed25519_curve() {
test_curve::<_, Ed25519>(&mut OsRng);
}
#[test]
fn frost_ed25519_schnorr() {
fn ed25519_schnorr() {
test_schnorr::<_, Ed25519>(&mut OsRng);
}
// Not spec-compliant, as this shouldn't use wide reduction
// Is vectors compliant, which is why the below tests pass
// See https://github.com/cfrg/draft-irtf-cfrg-frost/issues/204
//type TestEd25519 = Ed25519Internal<Sha512, false>;
// If this is kept, we can remove WIDE
type TestEd25519 = Ed25519Internal<Sha512, true>;
#[derive(Copy, Clone)]
struct IetfEd25519Hram {}
impl Hram<TestEd25519> for IetfEd25519Hram {
#[allow(non_snake_case)]
fn hram(R: &dfg::EdwardsPoint, A: &dfg::EdwardsPoint, m: &[u8]) -> dfg::Scalar {
TestEd25519::hash_to_F(
b"",
&[&R.compress().to_bytes(), &A.compress().to_bytes(), m].concat()
)
}
}
#[test]
fn frost_ed25519_vectors() {
vectors::<TestEd25519, IetfEd25519Hram>(
fn ed25519_vectors() {
vectors::<Ed25519, IetfEd25519Hram>(
Vectors {
threshold: 2,
shares: &[

View file

@ -1,15 +0,0 @@
use crate::curves::expand_message_xmd_sha256;
#[test]
fn test_xmd_sha256() {
assert_eq!(
hex::encode(expand_message_xmd_sha256(b"QUUX-V01-CS02-with-expander", b"", 0x80).unwrap()),
(
"8bcffd1a3cae24cf9cd7ab85628fd111bb17e3739d3b53f8".to_owned() +
"9580d217aa79526f1708354a76a402d3569d6a9d19ef3de4d0b991" +
"e4f54b9f20dcde9b95a66824cbdf6c1a963a1913d43fd7ac443a02" +
"fc5d9d8d77e2071b86ab114a9f34150954a7531da568a1ea8c7608" +
"61c0cde2005afc2c114042ee7b5848f5303f0611cf297f"
)
);
}

View file

@ -1,9 +1,7 @@
use rand::rngs::OsRng;
use crate::{
Curve,
curves::kp256::{KP256Instance, P256},
algorithm::Hram,
curves::kp256::{P256, IetfP256Hram},
tests::{curve::test_curve, schnorr::test_schnorr, vectors::{Vectors, vectors}}
};
@ -20,18 +18,6 @@ fn p256_schnorr() {
test_schnorr::<_, P256>(&mut OsRng);
}
#[derive(Clone)]
pub struct IetfP256Hram;
impl Hram<P256> for IetfP256Hram {
#[allow(non_snake_case)]
fn hram(R: &p256::ProjectivePoint, A: &p256::ProjectivePoint, m: &[u8]) -> p256::Scalar {
P256::hash_to_F(
&[P256::CONTEXT, b"chal"].concat(),
&[&P256::G_to_bytes(R), &P256::G_to_bytes(A), m].concat()
)
}
}
#[test]
fn p256_vectors() {
vectors::<P256, IetfP256Hram>(

View file

@ -1,2 +1,3 @@
mod expand_message;
mod kp256;
#[cfg(feature = "ed25519")]
mod ed25519;

View file

@ -7,7 +7,7 @@ authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
[dependencies]
group = "0.11"
group = "0.12"
rand_core = { version = "0.6", optional = true }