mirror of
https://github.com/serai-dex/serai.git
synced 2025-04-23 14:38:14 +00:00
Merge https://github.com/serai-dex/serai into develop
This commit is contained in:
commit
ce5b5f2b37
72 changed files with 2330 additions and 699 deletions
.github
Cargo.lockCargo.tomlREADME.mdcoins
crypto
ciphersuite
dalek-ff-group
dkg
dleq/src
ed448
ff-group-tests
frost/src
transcript/src
docs/cryptography
processor/src/tests
substrate
|
@ -2,6 +2,11 @@ name: build-dependencies
|
|||
description: Installs build dependencies for Serai
|
||||
|
||||
inputs:
|
||||
github-token:
|
||||
description: "GitHub token to install Protobuf with"
|
||||
require: true
|
||||
default:
|
||||
|
||||
rust-toolchain:
|
||||
description: "Rust toolchain to install"
|
||||
required: false
|
||||
|
@ -17,6 +22,8 @@ runs:
|
|||
steps:
|
||||
- name: Install Protobuf
|
||||
uses: arduino/setup-protoc@v1
|
||||
with:
|
||||
repo-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Install solc
|
||||
shell: bash
|
||||
|
|
7
.github/actions/test-dependencies/action.yml
vendored
7
.github/actions/test-dependencies/action.yml
vendored
|
@ -2,6 +2,11 @@ name: test-dependencies
|
|||
description: Installs test dependencies for Serai
|
||||
|
||||
inputs:
|
||||
github-token:
|
||||
description: "GitHub token to install Protobuf with"
|
||||
require: true
|
||||
default:
|
||||
|
||||
monero-version:
|
||||
description: "Monero version to download and run as a regtest node"
|
||||
required: false
|
||||
|
@ -12,6 +17,8 @@ runs:
|
|||
steps:
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Install Foundry
|
||||
uses: foundry-rs/foundry-toolchain@v1
|
||||
|
|
3
.github/workflows/monero-tests.yaml
vendored
3
.github/workflows/monero-tests.yaml
vendored
|
@ -20,6 +20,8 @@ jobs:
|
|||
|
||||
- name: Test Dependencies
|
||||
uses: ./.github/actions/test-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Unit Tests Without Features
|
||||
run: cargo test --package monero-serai --lib
|
||||
|
@ -39,6 +41,7 @@ jobs:
|
|||
- name: Test Dependencies
|
||||
uses: ./.github/actions/test-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
monero-version: ${{ matrix.version }}
|
||||
|
||||
- name: Run Integration Tests Without Features
|
||||
|
|
5
.github/workflows/tests.yml
vendored
5
.github/workflows/tests.yml
vendored
|
@ -20,12 +20,13 @@ jobs:
|
|||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# Clippy requires nightly due to serai-runtime requiring it
|
||||
rust-toolchain: ${{ steps.nightly.outputs.version }}
|
||||
rust-components: clippy
|
||||
|
||||
- name: Run Clippy
|
||||
run: cargo clippy --all-features -- -D warnings -A dead_code
|
||||
run: cargo clippy --all-features --tests -- -D warnings -A dead_code
|
||||
|
||||
deny:
|
||||
runs-on: ubuntu-latest
|
||||
|
@ -57,6 +58,8 @@ jobs:
|
|||
|
||||
- name: Test Dependencies
|
||||
uses: ./.github/actions/test-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Tests
|
||||
run: cargo test --all-features
|
||||
|
|
365
Cargo.lock
generated
365
Cargo.lock
generated
|
@ -18,7 +18,16 @@ version = "0.17.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b"
|
||||
dependencies = [
|
||||
"gimli",
|
||||
"gimli 0.26.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "addr2line"
|
||||
version = "0.19.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97"
|
||||
dependencies = [
|
||||
"gimli 0.27.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -113,9 +122,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.66"
|
||||
version = "1.0.68"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "216261ddc8289130e551ddcd5ce8a064710c0d064a4d2895c67151c92b5443f6"
|
||||
checksum = "2cb2f989d18dd141ab8ae82f64d1a8cdd37e0840f73a406896cf5e99502fab61"
|
||||
|
||||
[[package]]
|
||||
name = "approx"
|
||||
|
@ -308,9 +317,9 @@ checksum = "7a40729d2133846d9ed0ea60a8b9541bccddab49cd30f0715a1da672fe9a2524"
|
|||
|
||||
[[package]]
|
||||
name = "async-trait"
|
||||
version = "0.1.59"
|
||||
version = "0.1.60"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "31e6e93155431f3931513b243d371981bb2770112b370c82745a1d19d2f99364"
|
||||
checksum = "677d1d8ab452a3936018a687b20e6f7cf5363d713b732b8884001317b0e48aa3"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -390,16 +399,16 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
|||
|
||||
[[package]]
|
||||
name = "backtrace"
|
||||
version = "0.3.66"
|
||||
version = "0.3.67"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7"
|
||||
checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca"
|
||||
dependencies = [
|
||||
"addr2line",
|
||||
"addr2line 0.19.0",
|
||||
"cc",
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"miniz_oxide 0.5.4",
|
||||
"object",
|
||||
"miniz_oxide",
|
||||
"object 0.30.0",
|
||||
"rustc-demangle",
|
||||
]
|
||||
|
||||
|
@ -553,9 +562,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "blake2"
|
||||
version = "0.10.5"
|
||||
version = "0.10.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b12e5fd123190ce1c2e559308a94c9bacad77907d4c6005d9e58fe1a0689e55e"
|
||||
checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe"
|
||||
dependencies = [
|
||||
"digest 0.10.6",
|
||||
]
|
||||
|
@ -752,7 +761,7 @@ checksum = "4acbb09d9ee8e23699b9634375c72795d095bf268439da88562cf9b501f181fa"
|
|||
dependencies = [
|
||||
"camino",
|
||||
"cargo-platform",
|
||||
"semver 1.0.14",
|
||||
"semver 1.0.16",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
@ -765,7 +774,7 @@ checksum = "982a0cf6a99c350d7246035613882e376d58cebe571785abc5da4f648d53ac0a"
|
|||
dependencies = [
|
||||
"camino",
|
||||
"cargo-platform",
|
||||
"semver 1.0.14",
|
||||
"semver 1.0.16",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror",
|
||||
|
@ -773,9 +782,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.0.77"
|
||||
version = "1.0.78"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e9f73505338f7d905b19d18738976aae232eb46b8efc15554ffc56deb5d9ebe4"
|
||||
checksum = "a20104e2335ce8a659d6dd92a51a767a0c062599c73b343fd152cb401e828c3d"
|
||||
dependencies = [
|
||||
"jobserver",
|
||||
]
|
||||
|
@ -902,7 +911,9 @@ dependencies = [
|
|||
"digest 0.10.6",
|
||||
"elliptic-curve",
|
||||
"ff",
|
||||
"ff-group-tests",
|
||||
"group",
|
||||
"hex",
|
||||
"k256",
|
||||
"minimal-ed448",
|
||||
"p256",
|
||||
|
@ -943,9 +954,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.0.29"
|
||||
version = "4.0.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4d63b9e9c07271b9957ad22c173bae2a4d9a81127680962039296abcd2f8251d"
|
||||
checksum = "656ad1e55e23d287773f7d8192c300dc715c3eeded93b3da651d11c42cfd74d2"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"clap_derive 4.0.21",
|
||||
|
@ -1207,7 +1218,7 @@ dependencies = [
|
|||
"cranelift-codegen-shared",
|
||||
"cranelift-entity",
|
||||
"cranelift-isle",
|
||||
"gimli",
|
||||
"gimli 0.26.2",
|
||||
"log",
|
||||
"regalloc2",
|
||||
"smallvec",
|
||||
|
@ -1439,22 +1450,23 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "curve25519-dalek"
|
||||
version = "4.0.0-pre.1"
|
||||
version = "4.0.0-pre.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4033478fbf70d6acf2655ac70da91ee65852d69daf7a67bf7a2f518fb47aafcf"
|
||||
checksum = "67bc65846be335cb20f4e52d49a437b773a2c1fdb42b19fc84e79e6f6771536f"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"digest 0.9.0",
|
||||
"rand_core 0.6.4",
|
||||
"cfg-if",
|
||||
"fiat-crypto",
|
||||
"packed_simd_2",
|
||||
"platforms 3.0.2",
|
||||
"subtle",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cxx"
|
||||
version = "1.0.83"
|
||||
version = "1.0.85"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bdf07d07d6531bfcdbe9b8b739b104610c6508dcc4d63b410585faf338241daf"
|
||||
checksum = "5add3fc1717409d029b20c5b6903fc0c0b02fa6741d820054f4a2efa5e5816fd"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"cxxbridge-flags",
|
||||
|
@ -1464,9 +1476,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cxx-build"
|
||||
version = "1.0.83"
|
||||
version = "1.0.85"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d2eb5b96ecdc99f72657332953d4d9c50135af1bac34277801cc3937906ebd39"
|
||||
checksum = "b4c87959ba14bc6fbc61df77c3fcfe180fc32b93538c4f1031dd802ccb5f2ff0"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"codespan-reporting",
|
||||
|
@ -1479,15 +1491,15 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cxxbridge-flags"
|
||||
version = "1.0.83"
|
||||
version = "1.0.85"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac040a39517fd1674e0f32177648334b0f4074625b5588a64519804ba0553b12"
|
||||
checksum = "69a3e162fde4e594ed2b07d0f83c6c67b745e7f28ce58c6df5e6b6bef99dfb59"
|
||||
|
||||
[[package]]
|
||||
name = "cxxbridge-macro"
|
||||
version = "1.0.83"
|
||||
version = "1.0.85"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1362b0ddcfc4eb0a1f57b68bd77dd99f0e826958a96abd0ae9bd092e114ffed6"
|
||||
checksum = "3e7e2adeb6a0d4a282e581096b06e1791532b7d576dcde5ccd9382acf55db8e6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -1502,6 +1514,7 @@ dependencies = [
|
|||
"curve25519-dalek 3.2.0",
|
||||
"digest 0.10.6",
|
||||
"ff",
|
||||
"ff-group-tests",
|
||||
"group",
|
||||
"rand_core 0.6.4",
|
||||
"subtle",
|
||||
|
@ -1732,9 +1745,9 @@ checksum = "9ea835d29036a4087793836fa931b08837ad5e957da9e23886b29586fb9b6650"
|
|||
|
||||
[[package]]
|
||||
name = "dtoa"
|
||||
version = "1.0.4"
|
||||
version = "1.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f8a6eee2d5d0d113f015688310da018bd1d864d86bd567c8fca9c266889e1bfa"
|
||||
checksum = "c00704156a7de8df8da0911424e30c2049957b0a714542a44e05fe693dd85313"
|
||||
|
||||
[[package]]
|
||||
name = "dunce"
|
||||
|
@ -1765,9 +1778,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "dyn-clone"
|
||||
version = "1.0.9"
|
||||
version = "1.0.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4f94fa09c2aeea5b8839e414b7b841bf429fd25b9c522116ac97ee87856d88b2"
|
||||
checksum = "c9b0705efd4599c15a38151f4721f7bc388306f61084d3bfd50bd07fbca5cb60"
|
||||
|
||||
[[package]]
|
||||
name = "ecdsa"
|
||||
|
@ -2136,7 +2149,7 @@ dependencies = [
|
|||
"ethers-core",
|
||||
"getrandom 0.2.8",
|
||||
"reqwest",
|
||||
"semver 1.0.14",
|
||||
"semver 1.0.16",
|
||||
"serde",
|
||||
"serde-aux",
|
||||
"serde_json",
|
||||
|
@ -2242,7 +2255,7 @@ dependencies = [
|
|||
"path-slash",
|
||||
"rayon",
|
||||
"regex",
|
||||
"semver 1.0.14",
|
||||
"semver 1.0.16",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"solang-parser",
|
||||
|
@ -2321,6 +2334,21 @@ dependencies = [
|
|||
"subtle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ff-group-tests"
|
||||
version = "0.12.0"
|
||||
dependencies = [
|
||||
"group",
|
||||
"k256",
|
||||
"p256",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fiat-crypto"
|
||||
version = "0.1.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a214f5bb88731d436478f3ae1f8a277b62124089ba9fb67f4f93fb100ef73c90"
|
||||
|
||||
[[package]]
|
||||
name = "file-per-thread-logger"
|
||||
version = "0.1.5"
|
||||
|
@ -2385,7 +2413,7 @@ checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841"
|
|||
dependencies = [
|
||||
"crc32fast",
|
||||
"libz-sys",
|
||||
"miniz_oxide 0.6.2",
|
||||
"miniz_oxide",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -2481,7 +2509,7 @@ dependencies = [
|
|||
"Inflector",
|
||||
"array-bytes",
|
||||
"chrono",
|
||||
"clap 4.0.29",
|
||||
"clap 4.0.30",
|
||||
"comfy-table",
|
||||
"frame-benchmarking",
|
||||
"frame-support",
|
||||
|
@ -2736,9 +2764,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "futures-locks"
|
||||
version = "0.7.0"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3eb42d4fb72227be5778429f9ef5240a38a358925a49f05b5cf702ce7c7e558a"
|
||||
checksum = "45ec6fe3675af967e67c5536c0b9d44e34e6c52f86bedc4ea49c5317b8e94d06"
|
||||
dependencies = [
|
||||
"futures-channel",
|
||||
"futures-task",
|
||||
|
@ -2887,6 +2915,12 @@ dependencies = [
|
|||
"stable_deref_trait",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gimli"
|
||||
version = "0.27.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dec7af912d60cdbd3677c1af9352ebae6fb8394d165568a2234df0fa00f87793"
|
||||
|
||||
[[package]]
|
||||
name = "glob"
|
||||
version = "0.3.0"
|
||||
|
@ -2950,9 +2984,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "handlebars"
|
||||
version = "4.3.5"
|
||||
version = "4.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "433e4ab33f1213cdc25b5fa45c76881240cfe79284cf2b395e8b9e312a30a2fd"
|
||||
checksum = "035ef95d03713f2c347a72547b7cd38cbc9af7cd51e6099fb62d586d4a6dee3a"
|
||||
dependencies = [
|
||||
"log",
|
||||
"pest",
|
||||
|
@ -3166,9 +3200,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "hyper-rustls"
|
||||
version = "0.23.1"
|
||||
version = "0.23.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "59df7c4e19c950e6e0e868dcc0a300b09a9b88e9ec55bd879ca819087a77355d"
|
||||
checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c"
|
||||
dependencies = [
|
||||
"http",
|
||||
"hyper",
|
||||
|
@ -3585,15 +3619,15 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ipnet"
|
||||
version = "2.6.0"
|
||||
version = "2.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec947b7a4ce12e3b87e353abae7ce124d025b6c7d6c5aea5cc0bcf92e9510ded"
|
||||
checksum = "11b0d96e660696543b251e58030cf9787df56da39dab19ad60eae7353040917e"
|
||||
|
||||
[[package]]
|
||||
name = "is-terminal"
|
||||
version = "0.4.1"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "927609f78c2913a6f6ac3c27a4fe87f43e2a35367c0c4b0f8265e8f49a104330"
|
||||
checksum = "28dfb6c8100ccc63462345b67d1bbc3679177c75ee4bf59bf29c8b1d110b8189"
|
||||
dependencies = [
|
||||
"hermit-abi 0.2.6",
|
||||
"io-lifetimes 1.0.3",
|
||||
|
@ -3612,9 +3646,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "itoa"
|
||||
version = "1.0.4"
|
||||
version = "1.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
|
||||
checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
|
||||
|
||||
[[package]]
|
||||
name = "jobserver"
|
||||
|
@ -3869,6 +3903,12 @@ dependencies = [
|
|||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libm"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7fc7aa29613bd6a620df431842069224d8bc9011086b1db4c0e0cd47fa03ec9a"
|
||||
|
||||
[[package]]
|
||||
name = "libm"
|
||||
version = "0.2.6"
|
||||
|
@ -4284,9 +4324,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "link-cplusplus"
|
||||
version = "1.0.7"
|
||||
version = "1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9272ab7b96c9046fbc5bc56c06c117cb639fe2d509df0c421cad82d2915cf369"
|
||||
checksum = "ecd207c9c713c34f95a097a5b029ac2ce6010530c7b49d7fea24d977dede04f5"
|
||||
dependencies = [
|
||||
"cc",
|
||||
]
|
||||
|
@ -4324,9 +4364,9 @@ checksum = "d4d2456c373231a208ad294c33dc5bff30051eafd954cd4caae83a712b12854d"
|
|||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.1.3"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f9f08d8963a6c613f4b1a78f4f4a4dbfadf8e6545b2d72861731e4858b8b47f"
|
||||
checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4"
|
||||
|
||||
[[package]]
|
||||
name = "lock_api"
|
||||
|
@ -4542,10 +4582,10 @@ dependencies = [
|
|||
"dalek-ff-group",
|
||||
"digest 0.10.6",
|
||||
"ff",
|
||||
"ff-group-tests",
|
||||
"generic-array 0.14.6",
|
||||
"group",
|
||||
"hex",
|
||||
"hex-literal",
|
||||
"lazy_static",
|
||||
"rand_core 0.6.4",
|
||||
"subtle",
|
||||
|
@ -4558,15 +4598,6 @@ version = "0.2.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
|
||||
|
||||
[[package]]
|
||||
name = "miniz_oxide"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34"
|
||||
dependencies = [
|
||||
"adler",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "miniz_oxide"
|
||||
version = "0.6.2"
|
||||
|
@ -5007,16 +5038,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"libm",
|
||||
"libm 0.2.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num_cpus"
|
||||
version = "1.14.0"
|
||||
version = "1.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f6058e64324c71e02bc2b150e4f3bc8286db6c83092132ffa3f6b1eab0f9def5"
|
||||
checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
|
||||
dependencies = [
|
||||
"hermit-abi 0.1.19",
|
||||
"hermit-abi 0.2.6",
|
||||
"libc",
|
||||
]
|
||||
|
||||
|
@ -5038,6 +5069,15 @@ dependencies = [
|
|||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "object"
|
||||
version = "0.30.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "239da7f290cfa979f43f85a8efeee9a8a76d0827c356d37f9d3d7254d6b537fb"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.16.0"
|
||||
|
@ -5083,9 +5123,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "openssl"
|
||||
version = "0.10.44"
|
||||
version = "0.10.45"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "29d971fd5722fec23977260f6e81aa67d2f22cadbdc2aa049f1022d9a3be1566"
|
||||
checksum = "b102428fd03bc5edf97f62620f7298614c45cedf287c271e7ed450bbaf83f2e1"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"cfg-if",
|
||||
|
@ -5115,9 +5155,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
|
|||
|
||||
[[package]]
|
||||
name = "openssl-sys"
|
||||
version = "0.9.79"
|
||||
version = "0.9.80"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5454462c0eced1e97f2ec09036abc8da362e66802f66fd20f86854d9d8cbcbc4"
|
||||
checksum = "23bbbf7854cd45b83958ebe919f0e8e516793727652e27fda10a8384cfc790b7"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"cc",
|
||||
|
@ -5143,6 +5183,16 @@ dependencies = [
|
|||
"sha2 0.10.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packed_simd_2"
|
||||
version = "0.3.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a1914cd452d8fccd6f9db48147b29fd4ae05bea9dc5d9ad578509f72415de282"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libm 0.1.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pallet-balances"
|
||||
version = "4.0.0-dev"
|
||||
|
@ -5417,7 +5467,7 @@ checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
|
|||
dependencies = [
|
||||
"instant",
|
||||
"lock_api",
|
||||
"parking_lot_core 0.8.5",
|
||||
"parking_lot_core 0.8.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -5432,9 +5482,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "parking_lot_core"
|
||||
version = "0.8.5"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216"
|
||||
checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"instant",
|
||||
|
@ -5470,9 +5520,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "paste"
|
||||
version = "1.0.9"
|
||||
version = "1.0.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1"
|
||||
checksum = "d01a5bd0424d00070b0098dd17ebca6f961a959dead1dbcbbbc1d1cd8d3deeba"
|
||||
|
||||
[[package]]
|
||||
name = "path-slash"
|
||||
|
@ -5697,10 +5747,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "e8d0eef3571242013a0d5dc84861c3ae4a652e56e12adf8bdc26ff5f8cb34c94"
|
||||
|
||||
[[package]]
|
||||
name = "polling"
|
||||
version = "2.5.1"
|
||||
name = "platforms"
|
||||
version = "3.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "166ca89eb77fd403230b9c156612965a81e094ec6ec3aa13663d4c8b113fa748"
|
||||
checksum = "e3d7ddaed09e0eb771a79ab0fd64609ba0afb0a8366421957936ad14cbd13630"
|
||||
|
||||
[[package]]
|
||||
name = "polling"
|
||||
version = "2.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22122d5ec4f9fe1b3916419b76be1e80bcb93f618d071d2edf841b137b2a2bd6"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"cfg-if",
|
||||
|
@ -5777,9 +5833,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "prettyplease"
|
||||
version = "0.1.21"
|
||||
version = "0.1.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c142c0e46b57171fe0c528bee8c5b7569e80f0c17e377cd0e30ea57dbc11bb51"
|
||||
checksum = "2c8992a85d8e93a28bdf76137db888d3874e3b230dee5ed8bebac4c9f7617773"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"syn",
|
||||
|
@ -5836,15 +5892,15 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "proc-macro-hack"
|
||||
version = "0.5.19"
|
||||
version = "0.5.20+deprecated"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
|
||||
checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.47"
|
||||
version = "1.0.49"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725"
|
||||
checksum = "57a8eca9f9c4ffde41714334dee777596264c7825420f521abc92b5b5deb63a5"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
@ -5888,9 +5944,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "prost"
|
||||
version = "0.11.3"
|
||||
version = "0.11.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c0b18e655c21ff5ac2084a5ad0611e827b3f92badf79f4910b5a5c58f4d87ff0"
|
||||
checksum = "c01db6702aa05baa3f57dec92b8eeeeb4cb19e894e73996b32a4093289e54592"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"prost-derive",
|
||||
|
@ -5898,9 +5954,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "prost-build"
|
||||
version = "0.11.3"
|
||||
version = "0.11.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e330bf1316db56b12c2bcfa399e8edddd4821965ea25ddb2c134b610b1c1c604"
|
||||
checksum = "cb5320c680de74ba083512704acb90fe00f28f79207286a848e730c45dd73ed6"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"heck",
|
||||
|
@ -5933,9 +5989,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "prost-derive"
|
||||
version = "0.11.2"
|
||||
version = "0.11.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "164ae68b6587001ca506d3bf7f1000bfa248d0e1217b618108fba4ec1d0cc306"
|
||||
checksum = "c8842bad1a5419bca14eac663ba798f6bc19c413c2fdceb5f3ba3b0932d96720"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"itertools",
|
||||
|
@ -5946,9 +6002,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "prost-types"
|
||||
version = "0.11.2"
|
||||
version = "0.11.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "747761bc3dc48f9a34553bf65605cf6cb6288ba219f3450b4275dbd81539551a"
|
||||
checksum = "017f79637768cde62820bc2d4fe0e45daaa027755c323ad077767c6c5f173091"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"prost",
|
||||
|
@ -5982,9 +6038,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.21"
|
||||
version = "1.0.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
|
||||
checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
@ -6109,11 +6165,10 @@ checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3"
|
|||
|
||||
[[package]]
|
||||
name = "rayon"
|
||||
version = "1.6.0"
|
||||
version = "1.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e060280438193c554f654141c9ea9417886713b7acd75974c85b18a69a88e0b"
|
||||
checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7"
|
||||
dependencies = [
|
||||
"crossbeam-deque",
|
||||
"either",
|
||||
"rayon-core",
|
||||
]
|
||||
|
@ -6152,18 +6207,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ref-cast"
|
||||
version = "1.0.13"
|
||||
version = "1.0.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "53b15debb4f9d60d767cd8ca9ef7abb2452922f3214671ff052defc7f3502c44"
|
||||
checksum = "8c78fb8c9293bcd48ef6fce7b4ca950ceaf21210de6e105a883ee280c0f7b9ed"
|
||||
dependencies = [
|
||||
"ref-cast-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ref-cast-impl"
|
||||
version = "1.0.13"
|
||||
version = "1.0.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "abfa8511e9e94fd3de6585a3d3cd00e01ed556dc9814829280af0e8dc72a8f36"
|
||||
checksum = "9f9c0c92af03644e4806106281fe2e068ac5bc0ae74a707266d06ea27bccee5f"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -6410,7 +6465,7 @@ version = "0.4.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
|
||||
dependencies = [
|
||||
"semver 1.0.14",
|
||||
"semver 1.0.16",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -6437,7 +6492,7 @@ dependencies = [
|
|||
"errno",
|
||||
"io-lifetimes 1.0.3",
|
||||
"libc",
|
||||
"linux-raw-sys 0.1.3",
|
||||
"linux-raw-sys 0.1.4",
|
||||
"windows-sys 0.42.0",
|
||||
]
|
||||
|
||||
|
@ -6476,9 +6531,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "rustversion"
|
||||
version = "1.0.9"
|
||||
version = "1.0.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "97477e48b4cf8603ad5f7aaf897467cf42ab4218a38ef76fb14c2d6773a6d6a8"
|
||||
checksum = "5583e89e108996506031660fe09baa5011b9dd0341b89029313006d1fb508d70"
|
||||
|
||||
[[package]]
|
||||
name = "rw-stream-sink"
|
||||
|
@ -6493,9 +6548,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.11"
|
||||
version = "1.0.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
|
||||
checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
|
||||
|
||||
[[package]]
|
||||
name = "safe-mix"
|
||||
|
@ -6609,7 +6664,7 @@ source = "git+https://github.com/serai-dex/substrate#881cfbc59c8b65bcccc9fa6187e
|
|||
dependencies = [
|
||||
"array-bytes",
|
||||
"chrono",
|
||||
"clap 4.0.29",
|
||||
"clap 4.0.30",
|
||||
"fdlimit",
|
||||
"futures",
|
||||
"libp2p",
|
||||
|
@ -7398,9 +7453,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "scale-info"
|
||||
version = "2.3.0"
|
||||
version = "2.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "88d8a765117b237ef233705cc2cc4c6a27fccd46eea6ef0c8c6dae5f3ef407f8"
|
||||
checksum = "001cf62ece89779fd16105b5f515ad0e5cedcd5440d3dd806bb067978e7c3608"
|
||||
dependencies = [
|
||||
"bitvec 1.0.1",
|
||||
"cfg-if",
|
||||
|
@ -7412,9 +7467,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "scale-info-derive"
|
||||
version = "2.3.0"
|
||||
version = "2.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cdcd47b380d8c4541044e341dcd9475f55ba37ddc50c908d945fc036a8642496"
|
||||
checksum = "303959cf613a6f6efd19ed4b4ad5bf79966a13352716299ad532cfb115f4205c"
|
||||
dependencies = [
|
||||
"proc-macro-crate",
|
||||
"proc-macro2",
|
||||
|
@ -7472,9 +7527,9 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
|||
|
||||
[[package]]
|
||||
name = "scratch"
|
||||
version = "1.0.2"
|
||||
version = "1.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c8132065adcfd6e02db789d9285a0deb2f3fcb04002865ab67d5fb103533898"
|
||||
checksum = "ddccb15bcce173023b3fedd9436f882a0739b8dfb45e4f6b6002bee5929f61b2"
|
||||
|
||||
[[package]]
|
||||
name = "scrypt"
|
||||
|
@ -7582,9 +7637,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "1.0.14"
|
||||
version = "1.0.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4"
|
||||
checksum = "58bc9567378fc7690d6b2addae4e60ac2eeea07becb2c64b9f218b53865cba2a"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
@ -7630,7 +7685,7 @@ name = "serai-node"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"clap 4.0.29",
|
||||
"clap 4.0.30",
|
||||
"frame-benchmarking",
|
||||
"frame-benchmarking-cli",
|
||||
"frame-system",
|
||||
|
@ -7730,9 +7785,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.149"
|
||||
version = "1.0.151"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "256b9932320c590e707b94576e3cc1f7c9024d0ee6612dfbcf1cb106cbe8e055"
|
||||
checksum = "97fed41fc1a24994d044e6db6935e69511a1153b52c15eb42493b26fa87feba0"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
@ -7749,9 +7804,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.149"
|
||||
version = "1.0.151"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b4eae9b04cbffdfd550eb462ed33bc6a1b68c935127d008b27444d08380f94e4"
|
||||
checksum = "255abe9a125a985c05190d687b320c12f9b1f0b99445e608c21ba0782c719ad8"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -7760,9 +7815,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.89"
|
||||
version = "1.0.91"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "020ff22c755c2ed3f8cf162dbb41a7268d934702f3ed3631656ea597e08fc3db"
|
||||
checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
|
@ -7958,7 +8013,7 @@ dependencies = [
|
|||
"aes-gcm",
|
||||
"blake2",
|
||||
"chacha20poly1305",
|
||||
"curve25519-dalek 4.0.0-pre.1",
|
||||
"curve25519-dalek 4.0.0-pre.5",
|
||||
"rand_core 0.6.4",
|
||||
"ring",
|
||||
"rustc_version 0.4.0",
|
||||
|
@ -8664,7 +8719,7 @@ dependencies = [
|
|||
"cfg_aliases",
|
||||
"libc",
|
||||
"parking_lot 0.11.2",
|
||||
"parking_lot_core 0.8.5",
|
||||
"parking_lot_core 0.8.6",
|
||||
"static_init_macro",
|
||||
"winapi",
|
||||
]
|
||||
|
@ -8754,7 +8809,7 @@ name = "substrate-build-script-utils"
|
|||
version = "3.0.0"
|
||||
source = "git+https://github.com/serai-dex/substrate#881cfbc59c8b65bcccc9fa6187e5096ac3594e3a"
|
||||
dependencies = [
|
||||
"platforms",
|
||||
"platforms 2.0.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -8833,7 +8888,7 @@ dependencies = [
|
|||
"once_cell",
|
||||
"rand 0.8.5",
|
||||
"reqwest",
|
||||
"semver 1.0.14",
|
||||
"semver 1.0.16",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2 0.9.9",
|
||||
|
@ -8847,9 +8902,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.105"
|
||||
version = "1.0.107"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "60b9b43d45702de4c839cb9b51d9f529c5dd26a4aff255b42b1ebc03e88ee908"
|
||||
checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -8972,18 +9027,18 @@ checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d"
|
|||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.37"
|
||||
version = "1.0.38"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "10deb33631e3c9018b9baf9dcbbc4f737320d2b576bac10f6aefa048fa407e3e"
|
||||
checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "1.0.37"
|
||||
version = "1.0.38"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb"
|
||||
checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -9186,9 +9241,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.5.9"
|
||||
version = "0.5.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8d82e1a7758622a465f8cee077614c73484dac5b836c02ff6a40d5d1010324d7"
|
||||
checksum = "1333c76748e868a4d9d1017b5ab53171dfd095f70c712fdb4653a406547f598f"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
@ -9359,9 +9414,9 @@ checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642"
|
|||
|
||||
[[package]]
|
||||
name = "tt-call"
|
||||
version = "1.0.8"
|
||||
version = "1.0.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5e66dcbec4290c69dd03c57e76c2469ea5c7ce109c6dd4351c13055cf71ea055"
|
||||
checksum = "f4f195fd851901624eee5a58c4bb2b4f06399148fcd0ed336e6f1cb60a9881df"
|
||||
|
||||
[[package]]
|
||||
name = "twox-hash"
|
||||
|
@ -9416,9 +9471,9 @@ checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992"
|
|||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.5"
|
||||
version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
|
||||
checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-normalization"
|
||||
|
@ -9733,7 +9788,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "57d20cb3c59b788653d99541c646c561c9dd26506f25c0cebfe810659c54c6d7"
|
||||
dependencies = [
|
||||
"downcast-rs",
|
||||
"libm",
|
||||
"libm 0.2.6",
|
||||
"memory_units",
|
||||
"num-rational",
|
||||
"num-traits",
|
||||
|
@ -9760,7 +9815,7 @@ dependencies = [
|
|||
"indexmap",
|
||||
"libc",
|
||||
"log",
|
||||
"object",
|
||||
"object 0.29.0",
|
||||
"once_cell",
|
||||
"paste",
|
||||
"psm",
|
||||
|
@ -9817,9 +9872,9 @@ dependencies = [
|
|||
"cranelift-frontend",
|
||||
"cranelift-native",
|
||||
"cranelift-wasm",
|
||||
"gimli",
|
||||
"gimli 0.26.2",
|
||||
"log",
|
||||
"object",
|
||||
"object 0.29.0",
|
||||
"target-lexicon",
|
||||
"thiserror",
|
||||
"wasmparser",
|
||||
|
@ -9834,10 +9889,10 @@ checksum = "ebb881c61f4f627b5d45c54e629724974f8a8890d455bcbe634330cc27309644"
|
|||
dependencies = [
|
||||
"anyhow",
|
||||
"cranelift-entity",
|
||||
"gimli",
|
||||
"gimli 0.26.2",
|
||||
"indexmap",
|
||||
"log",
|
||||
"object",
|
||||
"object 0.29.0",
|
||||
"serde",
|
||||
"target-lexicon",
|
||||
"thiserror",
|
||||
|
@ -9851,14 +9906,14 @@ version = "1.0.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1985c628011fe26adf5e23a5301bdc79b245e0e338f14bb58b39e4e25e4d8681"
|
||||
dependencies = [
|
||||
"addr2line",
|
||||
"addr2line 0.17.0",
|
||||
"anyhow",
|
||||
"bincode",
|
||||
"cfg-if",
|
||||
"cpp_demangle",
|
||||
"gimli",
|
||||
"gimli 0.26.2",
|
||||
"log",
|
||||
"object",
|
||||
"object 0.29.0",
|
||||
"rustc-demangle",
|
||||
"rustix 0.35.13",
|
||||
"serde",
|
||||
|
@ -9876,7 +9931,7 @@ version = "1.0.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f671b588486f5ccec8c5a3dba6b4c07eac2e66ab8c60e6f4e53717c77f709731"
|
||||
dependencies = [
|
||||
"object",
|
||||
"object 0.29.0",
|
||||
"once_cell",
|
||||
"rustix 0.35.13",
|
||||
]
|
||||
|
@ -9940,9 +9995,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "webpki-roots"
|
||||
version = "0.22.5"
|
||||
version = "0.22.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "368bfe657969fb01238bb756d351dcade285e0f6fcbd36dcb23359a5169975be"
|
||||
checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87"
|
||||
dependencies = [
|
||||
"webpki",
|
||||
]
|
||||
|
|
|
@ -4,6 +4,7 @@ members = [
|
|||
|
||||
"crypto/transcript",
|
||||
|
||||
"crypto/ff-group-tests",
|
||||
"crypto/dalek-ff-group",
|
||||
"crypto/ed448",
|
||||
"crypto/ciphersuite",
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
# Serai
|
||||
|
||||
Serai is a new DEX, built from the ground up, initially planning on listing
|
||||
Bitcoin, Ethereum, Monero, DAI, and USDC, offering a liquidity pool trading
|
||||
experience. Funds are stored in an economically secured threshold multisig
|
||||
wallet.
|
||||
Bitcoin, Ethereum, Monero, DAI, offering a liquidity pool trading experience.
|
||||
Funds are stored in an economically secured threshold multisig wallet.
|
||||
|
||||
[Getting Started](docs/Getting%20Started.md)
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ async fn test_ecrecover_hack() {
|
|||
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
||||
let group_key = keys[&1].group_key();
|
||||
|
||||
const MESSAGE: &'static [u8] = b"Hello, World!";
|
||||
const MESSAGE: &[u8] = b"Hello, World!";
|
||||
let hashed_message = keccak256(MESSAGE);
|
||||
|
||||
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
|
||||
|
|
|
@ -17,7 +17,7 @@ fn test_ecrecover() {
|
|||
let private = SigningKey::random(&mut OsRng);
|
||||
let public = VerifyingKey::from(&private);
|
||||
|
||||
const MESSAGE: &'static [u8] = b"Hello, World!";
|
||||
const MESSAGE: &[u8] = b"Hello, World!";
|
||||
let sig: Signature = private.sign(MESSAGE);
|
||||
public.verify(MESSAGE, &sig).unwrap();
|
||||
|
||||
|
@ -38,12 +38,12 @@ fn test_signing() {
|
|||
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
||||
let _group_key = keys[&1].group_key();
|
||||
|
||||
const MESSAGE: &'static [u8] = b"Hello, World!";
|
||||
const MESSAGE: &[u8] = b"Hello, World!";
|
||||
|
||||
let algo = Schnorr::<Secp256k1, EthereumHram>::new();
|
||||
let _sig = sign(
|
||||
&mut OsRng,
|
||||
algo.clone(),
|
||||
algo,
|
||||
keys.clone(),
|
||||
algorithm_machines(&mut OsRng, Schnorr::<Secp256k1, EthereumHram>::new(), &keys),
|
||||
MESSAGE,
|
||||
|
@ -64,7 +64,7 @@ fn test_ecrecover_hack() {
|
|||
let group_key_compressed = group_key_encoded.as_ref();
|
||||
let group_key_x = Scalar::from_uint_reduced(U256::from_be_slice(&group_key_compressed[1 .. 33]));
|
||||
|
||||
const MESSAGE: &'static [u8] = b"Hello, World!";
|
||||
const MESSAGE: &[u8] = b"Hello, World!";
|
||||
let hashed_message = keccak256(MESSAGE);
|
||||
let chain_id = U256::ONE;
|
||||
|
||||
|
|
|
@ -44,15 +44,14 @@ fn generators(prefix: &'static str, path: &str) {
|
|||
lazy_static! {{
|
||||
pub static ref GENERATORS: Generators = Generators {{
|
||||
G: [
|
||||
{}
|
||||
{G_str}
|
||||
],
|
||||
H: [
|
||||
{}
|
||||
{H_str}
|
||||
],
|
||||
}};
|
||||
}}
|
||||
",
|
||||
G_str, H_str,
|
||||
)
|
||||
.as_bytes(),
|
||||
)
|
||||
|
|
|
@ -131,6 +131,6 @@ pub fn hash_to_scalar(data: &[u8]) -> Scalar {
|
|||
// This library acknowledges its practical impossibility of it occurring, and doesn't bother to
|
||||
// code in logic to handle it. That said, if it ever occurs, something must happen in order to
|
||||
// not generate/verify a proof we believe to be valid when it isn't
|
||||
assert!(scalar != Scalar::zero(), "ZERO HASH: {:?}", data);
|
||||
assert!(scalar != Scalar::zero(), "ZERO HASH: {data:?}");
|
||||
scalar
|
||||
}
|
||||
|
|
|
@ -83,7 +83,7 @@ pub struct ClsagAddendum {
|
|||
impl WriteAddendum for ClsagAddendum {
|
||||
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
writer.write_all(self.key_image.compress().to_bytes().as_ref())?;
|
||||
self.dleq.serialize(writer)
|
||||
self.dleq.write(writer)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -197,7 +197,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
|||
Err(io::Error::new(io::ErrorKind::Other, "non-canonical key image"))?;
|
||||
}
|
||||
|
||||
Ok(ClsagAddendum { key_image: xH, dleq: DLEqProof::<dfg::EdwardsPoint>::deserialize(reader)? })
|
||||
Ok(ClsagAddendum { key_image: xH, dleq: DLEqProof::<dfg::EdwardsPoint>::read(reader)? })
|
||||
}
|
||||
|
||||
fn process_addendum(
|
||||
|
|
|
@ -12,30 +12,30 @@ use crate::{
|
|||
const SPEND: [u8; 32] = hex!("f8631661f6ab4e6fda310c797330d86e23a682f20d5bc8cc27b18051191f16d7");
|
||||
const VIEW: [u8; 32] = hex!("4a1535063ad1fee2dabbf909d4fd9a873e29541b401f0944754e17c9a41820ce");
|
||||
|
||||
const STANDARD: &'static str =
|
||||
const STANDARD: &str =
|
||||
"4B33mFPMq6mKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KQH4pNey";
|
||||
|
||||
const PAYMENT_ID: [u8; 8] = hex!("b8963a57855cf73f");
|
||||
const INTEGRATED: &'static str =
|
||||
const INTEGRATED: &str =
|
||||
"4Ljin4CrSNHKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KbaTH6Mn\
|
||||
pXSn88oBX35";
|
||||
|
||||
const SUB_SPEND: [u8; 32] =
|
||||
hex!("fe358188b528335ad1cfdc24a22a23988d742c882b6f19a602892eaab3c1b62b");
|
||||
const SUB_VIEW: [u8; 32] = hex!("9bc2b464de90d058468522098d5610c5019c45fd1711a9517db1eea7794f5470");
|
||||
const SUBADDRESS: &'static str =
|
||||
const SUBADDRESS: &str =
|
||||
"8C5zHM5ud8nGC4hC2ULiBLSWx9infi8JUUmWEat4fcTf8J4H38iWYVdFmPCA9UmfLTZxD43RsyKnGEdZkoGij6csDeUnbEB";
|
||||
|
||||
const FEATURED_JSON: &'static str = include_str!("vectors/featured_addresses.json");
|
||||
const FEATURED_JSON: &str = include_str!("vectors/featured_addresses.json");
|
||||
|
||||
#[test]
|
||||
fn standard_address() {
|
||||
let addr = MoneroAddress::from_str(Network::Mainnet, STANDARD).unwrap();
|
||||
assert_eq!(addr.meta.network, Network::Mainnet);
|
||||
assert_eq!(addr.meta.kind, AddressType::Standard);
|
||||
assert_eq!(addr.meta.kind.subaddress(), false);
|
||||
assert!(!addr.meta.kind.subaddress());
|
||||
assert_eq!(addr.meta.kind.payment_id(), None);
|
||||
assert_eq!(addr.meta.kind.guaranteed(), false);
|
||||
assert!(!addr.meta.kind.guaranteed());
|
||||
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
|
||||
assert_eq!(addr.view.compress().to_bytes(), VIEW);
|
||||
assert_eq!(addr.to_string(), STANDARD);
|
||||
|
@ -46,9 +46,9 @@ fn integrated_address() {
|
|||
let addr = MoneroAddress::from_str(Network::Mainnet, INTEGRATED).unwrap();
|
||||
assert_eq!(addr.meta.network, Network::Mainnet);
|
||||
assert_eq!(addr.meta.kind, AddressType::Integrated(PAYMENT_ID));
|
||||
assert_eq!(addr.meta.kind.subaddress(), false);
|
||||
assert!(!addr.meta.kind.subaddress());
|
||||
assert_eq!(addr.meta.kind.payment_id(), Some(PAYMENT_ID));
|
||||
assert_eq!(addr.meta.kind.guaranteed(), false);
|
||||
assert!(!addr.meta.kind.guaranteed());
|
||||
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
|
||||
assert_eq!(addr.view.compress().to_bytes(), VIEW);
|
||||
assert_eq!(addr.to_string(), INTEGRATED);
|
||||
|
@ -59,9 +59,9 @@ fn subaddress() {
|
|||
let addr = MoneroAddress::from_str(Network::Mainnet, SUBADDRESS).unwrap();
|
||||
assert_eq!(addr.meta.network, Network::Mainnet);
|
||||
assert_eq!(addr.meta.kind, AddressType::Subaddress);
|
||||
assert_eq!(addr.meta.kind.subaddress(), true);
|
||||
assert!(addr.meta.kind.subaddress());
|
||||
assert_eq!(addr.meta.kind.payment_id(), None);
|
||||
assert_eq!(addr.meta.kind.guaranteed(), false);
|
||||
assert!(!addr.meta.kind.guaranteed());
|
||||
assert_eq!(addr.spend.compress().to_bytes(), SUB_SPEND);
|
||||
assert_eq!(addr.view.compress().to_bytes(), SUB_VIEW);
|
||||
assert_eq!(addr.to_string(), SUBADDRESS);
|
||||
|
|
|
@ -43,7 +43,7 @@ fn clsag() {
|
|||
let dest = Zeroizing::new(random_scalar(&mut OsRng));
|
||||
let mask = random_scalar(&mut OsRng);
|
||||
let amount;
|
||||
if i == u64::from(real) {
|
||||
if i == real {
|
||||
secrets = (dest.clone(), mask);
|
||||
amount = AMOUNT;
|
||||
} else {
|
||||
|
|
|
@ -7,7 +7,7 @@ use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwar
|
|||
use crate::{
|
||||
Commitment,
|
||||
serialize::{read_byte, read_u32, read_u64, read_bytes, read_scalar, read_point, read_raw_vec},
|
||||
transaction::{Timelock, Transaction},
|
||||
transaction::{Input, Timelock, Transaction},
|
||||
block::Block,
|
||||
rpc::{Rpc, RpcError},
|
||||
wallet::{PaymentId, Extra, Scanner, uniqueness, shared_key, amount_decryption, commitment_mask},
|
||||
|
@ -373,18 +373,22 @@ impl Scanner {
|
|||
};
|
||||
|
||||
let mut res = vec![];
|
||||
for (i, tx) in txs.drain(..).enumerate() {
|
||||
for tx in txs.drain(..) {
|
||||
if let Some(timelock) = map(self.scan_transaction(&tx), index) {
|
||||
res.push(timelock);
|
||||
}
|
||||
index += tx
|
||||
.prefix
|
||||
.outputs
|
||||
.iter()
|
||||
// Filter to miner TX outputs/0-amount outputs since we're tacking the 0-amount index
|
||||
.filter_map(|output| Some(1).filter(|_| (i == 0) || (output.amount == 0)))
|
||||
// Since we can't get the length of an iterator, map each value to 1 and sum
|
||||
.sum::<u64>();
|
||||
index += u64::try_from(
|
||||
tx.prefix
|
||||
.outputs
|
||||
.iter()
|
||||
// Filter to miner TX outputs/0-amount outputs since we're tacking the 0-amount index
|
||||
// This will fail to scan blocks containing pre-RingCT miner TXs
|
||||
.filter(|output| {
|
||||
matches!(tx.prefix.inputs.get(0), Some(Input::Gen(..))) || (output.amount == 0)
|
||||
})
|
||||
.count(),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
Ok(res)
|
||||
}
|
||||
|
|
|
@ -4,7 +4,6 @@ use std::{
|
|||
collections::HashMap,
|
||||
};
|
||||
|
||||
use zeroize::Zeroizing;
|
||||
use rand_core::{RngCore, CryptoRng, SeedableRng};
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
|
||||
|
@ -217,18 +216,14 @@ impl SignMachine<Transaction> for TransactionSignMachine {
|
|||
type SignatureShare = Vec<SignatureShare<Ed25519>>;
|
||||
type SignatureMachine = TransactionSignatureMachine;
|
||||
|
||||
fn cache(self) -> Zeroizing<CachedPreprocess> {
|
||||
fn cache(self) -> CachedPreprocess {
|
||||
unimplemented!(
|
||||
"Monero transactions don't support caching their preprocesses due to {}",
|
||||
"being already bound to a specific transaction"
|
||||
);
|
||||
}
|
||||
|
||||
fn from_cache(
|
||||
_: (),
|
||||
_: ThresholdKeys<Ed25519>,
|
||||
_: Zeroizing<CachedPreprocess>,
|
||||
) -> Result<Self, FrostError> {
|
||||
fn from_cache(_: (), _: ThresholdKeys<Ed25519>, _: CachedPreprocess) -> Result<Self, FrostError> {
|
||||
unimplemented!(
|
||||
"Monero transactions don't support caching their preprocesses due to {}",
|
||||
"being already bound to a specific transaction"
|
||||
|
|
|
@ -6,8 +6,7 @@ test!(
|
|||
add_single_data_less_than_255,
|
||||
(
|
||||
|_, mut builder: Builder, addr| async move {
|
||||
// make a data that is less than 255 bytes
|
||||
let arbitrary_data = Vec::from("this is an arbitrary data less than 255 bytes");
|
||||
let arbitrary_data = vec![b'\0', 254];
|
||||
|
||||
// make sure we can add to tx
|
||||
let result = builder.add_data(arbitrary_data.clone());
|
||||
|
@ -19,7 +18,7 @@ test!(
|
|||
|_, tx: Transaction, mut scanner: Scanner, state: (Vec<u8>,)| async move {
|
||||
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||
assert_eq!(output.commitment().amount, 5);
|
||||
assert_eq!(output.arbitrary_data()[0], state.0);
|
||||
assert_eq!(output.arbitrary_data()[0], data.0);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
@ -28,25 +27,21 @@ test!(
|
|||
add_multiple_data_less_than_255,
|
||||
(
|
||||
|_, mut builder: Builder, addr| async move {
|
||||
// make a data that is less than 255 bytes
|
||||
let arbitrary_data = Vec::from("this is an arbitrary data less than 255 bytes");
|
||||
let data = vec![b'\0', 254];
|
||||
|
||||
// add tx multiple times
|
||||
// Add tx multiple times
|
||||
for _ in 0 .. 5 {
|
||||
let result = builder.add_data(arbitrary_data.clone());
|
||||
let result = builder.add_data(data.clone());
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
builder.add_payment(addr, 5);
|
||||
(builder.build().unwrap(), (arbitrary_data,))
|
||||
(builder.build().unwrap(), data)
|
||||
},
|
||||
|_, tx: Transaction, mut scanner: Scanner, state: (Vec<u8>,)| async move {
|
||||
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||
assert_eq!(output.commitment().amount, 5);
|
||||
let data = output.arbitrary_data();
|
||||
for i in 0 .. 5 {
|
||||
assert_eq!(data[i], state.0);
|
||||
}
|
||||
assert_eq!(output.arbitrary_data(), vec![data; 5]);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
@ -55,23 +50,24 @@ test!(
|
|||
add_single_data_more_than_255,
|
||||
(
|
||||
|_, mut builder: Builder, addr| async move {
|
||||
// make a data that is bigger than 255 bytes
|
||||
let mut arbitrary_data = vec![];
|
||||
for _ in 0 .. 256 {
|
||||
arbitrary_data.push(b'a');
|
||||
}
|
||||
// Make a data that is bigger than 255 bytes
|
||||
let mut data = vec![b'a'; 256];
|
||||
|
||||
// make sure we get an error if we try to add it to tx
|
||||
let mut result = builder.add_payment(addr, 5).add_data(arbitrary_data.clone());
|
||||
assert_eq!(result, Err(TransactionError::TooMuchData));
|
||||
// Make sure we get an error if we try to add it to the TX
|
||||
assert_eq!(builder.add_data(data.clone()), Err(TransactionError::TooMuchData));
|
||||
|
||||
// reduce data size and re-try
|
||||
arbitrary_data.swap_remove(0);
|
||||
result = builder.add_data(arbitrary_data);
|
||||
// Reduce data size and retry. The data will now be 255 bytes long, exactly
|
||||
data.pop();
|
||||
assert!(builder.add_data(data.clone()).is_ok());
|
||||
|
||||
assert!(result.is_ok());
|
||||
(builder.build().unwrap(), ())
|
||||
builder.add_payment(addr, 5);
|
||||
(builder.build().unwrap(), data)
|
||||
},
|
||||
|rpc: Rpc, signed: Transaction, mut scanner: Scanner, data: Vec<u8>| async move {
|
||||
let tx = rpc.get_transaction(signed.hash()).await.unwrap();
|
||||
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||
assert_eq!(output.commitment().amount, 5);
|
||||
assert_eq!(output.arbitrary_data(), vec![data]);
|
||||
},
|
||||
|_, _, _, _| async move {},
|
||||
),
|
||||
);
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use core::ops::Deref;
|
||||
use std::{sync::Mutex, collections::HashSet};
|
||||
use std::collections::HashSet;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
|
@ -8,6 +8,8 @@ use rand_core::OsRng;
|
|||
|
||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
|
||||
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
use monero_serai::{
|
||||
Protocol, random_scalar,
|
||||
wallet::{
|
||||
|
@ -100,7 +102,7 @@ macro_rules! async_sequential {
|
|||
$(
|
||||
#[tokio::test]
|
||||
async fn $name() {
|
||||
let guard = runner::SEQUENTIAL.lock().unwrap();
|
||||
let guard = runner::SEQUENTIAL.lock().await;
|
||||
let local = tokio::task::LocalSet::new();
|
||||
local.run_until(async move {
|
||||
if let Err(err) = tokio::task::spawn_local(async move { $body }).await {
|
||||
|
@ -159,6 +161,7 @@ macro_rules! test {
|
|||
type Builder = SignableTransactionBuilder;
|
||||
|
||||
// Run each function as both a single signer and as a multisig
|
||||
#[allow(clippy::redundant_closure_call)]
|
||||
for multisig in [false, true] {
|
||||
// Only run the multisig variant if multisig is enabled
|
||||
if multisig {
|
||||
|
@ -222,7 +225,7 @@ macro_rules! test {
|
|||
);
|
||||
}
|
||||
|
||||
frost::tests::sign_without_caching(&mut OsRng, machines, &vec![])
|
||||
frost::tests::sign_without_caching(&mut OsRng, machines, &[])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,6 +33,11 @@ k256 = { version = "0.11", features = ["arithmetic", "bits", "hash2curve"], opti
|
|||
|
||||
minimal-ed448 = { path = "../ed448", version = "^0.1.2", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
hex = "0.4"
|
||||
|
||||
ff-group-tests = { version = "0.12", path = "../ff-group-tests" }
|
||||
|
||||
[features]
|
||||
std = []
|
||||
|
||||
|
|
|
@ -1,3 +1,35 @@
|
|||
# Ciphersuite
|
||||
|
||||
Ciphersuites for elliptic curves premised on ff/group.
|
||||
|
||||
### Secp256k1/P-256
|
||||
|
||||
Secp256k1 and P-256 are offered via [k256](https://crates.io/crates/k256) and
|
||||
[p256](https://crates.io/crates/p256), two libraries maintained by
|
||||
[RustCrypto](https://github.com/RustCrypto).
|
||||
|
||||
Their `hash_to_F` is the
|
||||
[IETF's hash to curve](https://www.ietf.org/archive/id/draft-irtf-cfrg-hash-to-curve-16.html),
|
||||
yet applied to their scalar field.
|
||||
|
||||
### Ed25519/Ristretto
|
||||
|
||||
Ed25519/Ristretto are offered via
|
||||
[dalek-ff-group](https://crates.io/crates/dalek-ff-group), an ff/group wrapper
|
||||
around [curve25519-dalek](https://crates.io/crates/curve25519-dalek).
|
||||
|
||||
Their `hash_to_F` is the wide reduction of SHA2-512, as used in
|
||||
[RFC-8032](https://www.rfc-editor.org/rfc/rfc8032). This is also compliant with
|
||||
the draft
|
||||
[RFC-RISTRETTO](https://www.ietf.org/archive/id/draft-irtf-cfrg-ristretto255-decaf448-05.html).
|
||||
The domain-separation tag is naively prefixed to the message.
|
||||
|
||||
### Ed448
|
||||
|
||||
Ed448 is offered via [minimal-ed448](https://crates.io/crates/minimal-ed448), an
|
||||
explicitly not recommended, unaudited Ed448 implementation, limited to its
|
||||
prime-order subgroup.
|
||||
|
||||
Its `hash_to_F` is the wide reduction of SHAKE256, with a 114-byte output, as
|
||||
used in [RFC-8032](https://www.rfc-editor.org/rfc/rfc8032). The
|
||||
domain-separation tag is naively prefixed to the message.
|
||||
|
|
|
@ -39,6 +39,48 @@ macro_rules! dalek_curve {
|
|||
|
||||
#[cfg(any(test, feature = "ristretto"))]
|
||||
dalek_curve!("ristretto", Ristretto, RistrettoPoint, b"ristretto");
|
||||
#[cfg(any(test, feature = "ristretto"))]
|
||||
#[test]
|
||||
fn test_ristretto() {
|
||||
ff_group_tests::group::test_prime_group_bits::<RistrettoPoint>();
|
||||
|
||||
assert_eq!(
|
||||
Ristretto::hash_to_F(
|
||||
b"FROST-RISTRETTO255-SHA512-v11nonce",
|
||||
&hex::decode(
|
||||
"\
|
||||
81800157bb554f299fe0b6bd658e4c4591d74168b5177bf55e8dceed59dc80c7\
|
||||
5c3430d391552f6e60ecdc093ff9f6f4488756aa6cebdbad75a768010b8f830e"
|
||||
)
|
||||
.unwrap()
|
||||
)
|
||||
.to_bytes()
|
||||
.as_ref(),
|
||||
&hex::decode("40f58e8df202b21c94f826e76e4647efdb0ea3ca7ae7e3689bc0cbe2e2f6660c").unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(feature = "ed25519")]
|
||||
dalek_curve!("ed25519", Ed25519, EdwardsPoint, b"edwards25519");
|
||||
#[cfg(feature = "ed25519")]
|
||||
#[test]
|
||||
fn test_ed25519() {
|
||||
ff_group_tests::group::test_prime_group_bits::<EdwardsPoint>();
|
||||
|
||||
// Ideally, a test vector from RFC-8032 (not FROST) would be here
|
||||
// Unfortunately, the IETF draft doesn't provide any vectors for the derived challenges
|
||||
assert_eq!(
|
||||
Ed25519::hash_to_F(
|
||||
b"FROST-ED25519-SHA512-v11nonce",
|
||||
&hex::decode(
|
||||
"\
|
||||
9d06a6381c7a4493929761a73692776772b274236fb5cfcc7d1b48ac3a9c249f\
|
||||
929dcc590407aae7d388761cddb0c0db6f5627aea8e217f4a033f2ec83d93509"
|
||||
)
|
||||
.unwrap()
|
||||
)
|
||||
.to_bytes()
|
||||
.as_ref(),
|
||||
&hex::decode("70652da3e8d7533a0e4b9e9104f01b48c396b5b553717784ed8d05c6a36b9609").unwrap()
|
||||
);
|
||||
}
|
||||
|
|
|
@ -65,3 +65,38 @@ impl Ciphersuite for Ed448 {
|
|||
Scalar::wide_reduce(Self::H::digest([dst, data].concat()).as_ref().try_into().unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ed448() {
|
||||
use ff::PrimeField;
|
||||
|
||||
// TODO: Enable once ed448 passes these tests
|
||||
//ff_group_tests::group::test_prime_group_bits::<Point>();
|
||||
|
||||
// Ideally, a test vector from RFC-8032 (not FROST) would be here
|
||||
// Unfortunately, the IETF draft doesn't provide any vectors for the derived challenges
|
||||
assert_eq!(
|
||||
Ed448::hash_to_F(
|
||||
b"FROST-ED448-SHAKE256-v11nonce",
|
||||
&hex::decode(
|
||||
"\
|
||||
89bf16040081ff2990336b200613787937ebe1f024b8cdff90eb6f1c741d91c1\
|
||||
4a2b2f5858a932ad3d3b18bd16e76ced3070d72fd79ae4402df201f5\
|
||||
25e754716a1bc1b87a502297f2a99d89ea054e0018eb55d39562fd01\
|
||||
00"
|
||||
)
|
||||
.unwrap()
|
||||
)
|
||||
.to_repr()
|
||||
.iter()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>(),
|
||||
hex::decode(
|
||||
"\
|
||||
67a6f023e77361707c6e894c625e809e80f33fdb310810053ae29e28\
|
||||
e7011f3193b9020e73c183a98cc3a519160ed759376dd92c94831622\
|
||||
00"
|
||||
)
|
||||
.unwrap()
|
||||
);
|
||||
}
|
||||
|
|
|
@ -65,8 +65,54 @@ macro_rules! kp_curve {
|
|||
};
|
||||
}
|
||||
|
||||
#[cfg(feature = "p256")]
|
||||
kp_curve!("p256", p256, P256, b"P-256");
|
||||
|
||||
#[cfg(feature = "secp256k1")]
|
||||
kp_curve!("secp256k1", k256, Secp256k1, b"secp256k1");
|
||||
#[cfg(feature = "secp256k1")]
|
||||
#[test]
|
||||
fn test_secp256k1() {
|
||||
ff_group_tests::group::test_prime_group_bits::<k256::ProjectivePoint>();
|
||||
|
||||
// Ideally, a test vector from hash to field (not FROST) would be here
|
||||
// Unfortunately, the IETF draft only provides vectors for field elements, not scalars
|
||||
assert_eq!(
|
||||
Secp256k1::hash_to_F(
|
||||
b"FROST-secp256k1-SHA256-v11nonce",
|
||||
&hex::decode(
|
||||
"\
|
||||
80cbea5e405d169999d8c4b30b755fedb26ab07ec8198cda4873ed8ce5e16773\
|
||||
08f89ffe80ac94dcb920c26f3f46140bfc7f95b493f8310f5fc1ea2b01f4254c"
|
||||
)
|
||||
.unwrap()
|
||||
)
|
||||
.to_repr()
|
||||
.iter()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>(),
|
||||
hex::decode("acc83278035223c1ba464e2d11bfacfc872b2b23e1041cf5f6130da21e4d8068").unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(feature = "p256")]
|
||||
kp_curve!("p256", p256, P256, b"P-256");
|
||||
#[cfg(feature = "p256")]
|
||||
#[test]
|
||||
fn test_p256() {
|
||||
ff_group_tests::group::test_prime_group_bits::<p256::ProjectivePoint>();
|
||||
|
||||
assert_eq!(
|
||||
P256::hash_to_F(
|
||||
b"FROST-P256-SHA256-v11nonce",
|
||||
&hex::decode(
|
||||
"\
|
||||
f4e8cf80aec3f888d997900ac7e3e349944b5a6b47649fc32186d2f1238103c6\
|
||||
0c9c1a0fe806c184add50bbdcac913dda73e482daf95dcb9f35dbb0d8a9f7731"
|
||||
)
|
||||
.unwrap()
|
||||
)
|
||||
.to_repr()
|
||||
.iter()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>(),
|
||||
hex::decode("f871dfcf6bcd199342651adc361b92c941cb6a0d8c8c1a3b91d79e2c1bf3722d").unwrap()
|
||||
);
|
||||
}
|
||||
|
|
|
@ -58,7 +58,14 @@ pub trait Ciphersuite: Clone + Copy + PartialEq + Eq + Debug + Zeroize {
|
|||
// While group does provide this in its API, privacy coins may want to use a custom basepoint
|
||||
fn generator() -> Self::G;
|
||||
|
||||
/// Hash the provided dst and message to a scalar.
|
||||
/// Hash the provided domain-separation tag and message to a scalar. Ciphersuites MAY naively
|
||||
/// prefix the tag to the message, enabling transpotion between the two. Accordingly, this
|
||||
/// function should NOT be used in any scheme where one tag is a valid substring of another
|
||||
/// UNLESS the specific Ciphersuite is verified to handle the DST securely.
|
||||
///
|
||||
/// Verifying specific ciphersuites have secure tag handling is not recommended, due to it
|
||||
/// breaking the intended modularity of ciphersuites. Instead, component-specific tags with
|
||||
/// further purpose tags are recommended ("Schnorr-nonce", "Schnorr-chal").
|
||||
#[allow(non_snake_case)]
|
||||
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F;
|
||||
|
||||
|
|
|
@ -24,3 +24,6 @@ group = "0.12"
|
|||
|
||||
crypto-bigint = "0.4"
|
||||
curve25519-dalek = "3.2"
|
||||
|
||||
[dev-dependencies]
|
||||
ff-group-tests = { path = "../ff-group-tests" }
|
||||
|
|
|
@ -2,5 +2,3 @@
|
|||
|
||||
ff/group bindings around curve25519-dalek with a from_hash/random function based
|
||||
around modern dependencies.
|
||||
|
||||
Some functions currently remain unimplemented.
|
||||
|
|
|
@ -13,7 +13,7 @@ use ff::{Field, PrimeField, FieldBits, PrimeFieldBits};
|
|||
|
||||
use crate::{constant_time, math, from_uint};
|
||||
|
||||
const FIELD_MODULUS: U256 =
|
||||
const MODULUS: U256 =
|
||||
U256::from_be_hex("7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffed");
|
||||
|
||||
const WIDE_MODULUS: U512 = U512::from_be_hex(concat!(
|
||||
|
@ -25,7 +25,7 @@ const WIDE_MODULUS: U512 = U512::from_be_hex(concat!(
|
|||
pub struct FieldElement(U256);
|
||||
|
||||
pub const MOD_3_8: FieldElement =
|
||||
FieldElement(FIELD_MODULUS.saturating_add(&U256::from_u8(3)).wrapping_div(&U256::from_u8(8)));
|
||||
FieldElement(MODULUS.saturating_add(&U256::from_u8(3)).wrapping_div(&U256::from_u8(8)));
|
||||
|
||||
pub const MOD_5_8: FieldElement = FieldElement(MOD_3_8.0.saturating_sub(&U256::ONE));
|
||||
|
||||
|
@ -45,8 +45,8 @@ constant_time!(FieldElement, U256);
|
|||
math!(
|
||||
FieldElement,
|
||||
FieldElement,
|
||||
|x, y| U256::add_mod(&x, &y, &FIELD_MODULUS),
|
||||
|x, y| U256::sub_mod(&x, &y, &FIELD_MODULUS),
|
||||
|x, y| U256::add_mod(&x, &y, &MODULUS),
|
||||
|x, y| U256::sub_mod(&x, &y, &MODULUS),
|
||||
|x, y| {
|
||||
let wide = U256::mul_wide(&x, &y);
|
||||
reduce(U512::from((wide.1, wide.0)))
|
||||
|
@ -57,7 +57,7 @@ from_uint!(FieldElement, U256);
|
|||
impl Neg for FieldElement {
|
||||
type Output = Self;
|
||||
fn neg(self) -> Self::Output {
|
||||
Self(self.0.neg_mod(&FIELD_MODULUS))
|
||||
Self(self.0.neg_mod(&MODULUS))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -85,28 +85,19 @@ impl Field for FieldElement {
|
|||
FieldElement(reduce(self.0.square()))
|
||||
}
|
||||
fn double(&self) -> Self {
|
||||
FieldElement((self.0 << 1).reduce(&FIELD_MODULUS).unwrap())
|
||||
FieldElement((self.0 << 1).reduce(&MODULUS).unwrap())
|
||||
}
|
||||
|
||||
fn invert(&self) -> CtOption<Self> {
|
||||
const NEG_2: FieldElement = FieldElement(FIELD_MODULUS.saturating_sub(&U256::from_u8(2)));
|
||||
const NEG_2: FieldElement = FieldElement(MODULUS.saturating_sub(&U256::from_u8(2)));
|
||||
CtOption::new(self.pow(NEG_2), !self.is_zero())
|
||||
}
|
||||
|
||||
fn sqrt(&self) -> CtOption<Self> {
|
||||
let tv1 = self.pow(MOD_3_8);
|
||||
let tv2 = tv1 * SQRT_M1;
|
||||
CtOption::new(Self::conditional_select(&tv2, &tv1, tv1.square().ct_eq(self)), 1.into())
|
||||
}
|
||||
|
||||
fn is_zero(&self) -> Choice {
|
||||
self.0.ct_eq(&U256::ZERO)
|
||||
}
|
||||
fn cube(&self) -> Self {
|
||||
self.square() * self
|
||||
}
|
||||
fn pow_vartime<S: AsRef<[u64]>>(&self, _exp: S) -> Self {
|
||||
unimplemented!()
|
||||
let candidate = Self::conditional_select(&tv2, &tv1, tv1.square().ct_eq(self));
|
||||
CtOption::new(candidate, candidate.square().ct_eq(self))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -116,7 +107,7 @@ impl PrimeField for FieldElement {
|
|||
const CAPACITY: u32 = 254;
|
||||
fn from_repr(bytes: [u8; 32]) -> CtOption<Self> {
|
||||
let res = Self(U256::from_le_bytes(bytes));
|
||||
CtOption::new(res, res.0.ct_lt(&FIELD_MODULUS))
|
||||
CtOption::new(res, res.0.ct_lt(&MODULUS))
|
||||
}
|
||||
fn to_repr(&self) -> [u8; 32] {
|
||||
self.0.to_le_bytes()
|
||||
|
@ -144,7 +135,7 @@ impl PrimeFieldBits for FieldElement {
|
|||
}
|
||||
|
||||
fn char_le_bits() -> FieldBits<Self::ReprBits> {
|
||||
FIELD_MODULUS.to_le_bytes().into()
|
||||
MODULUS.to_le_bytes().into()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -155,13 +146,13 @@ impl FieldElement {
|
|||
}
|
||||
|
||||
pub fn pow(&self, other: FieldElement) -> FieldElement {
|
||||
let mut table = [FieldElement(U256::ONE); 16];
|
||||
let mut table = [FieldElement::one(); 16];
|
||||
table[1] = *self;
|
||||
for i in 2 .. 16 {
|
||||
table[i] = table[i - 1] * self;
|
||||
}
|
||||
|
||||
let mut res = FieldElement(U256::ONE);
|
||||
let mut res = FieldElement::one();
|
||||
let mut bits = 0;
|
||||
for (i, bit) in other.to_le_bits().iter().rev().enumerate() {
|
||||
bits <<= 1;
|
||||
|
@ -203,80 +194,6 @@ impl FieldElement {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn test_conditional_negate() {
|
||||
let one = FieldElement::one();
|
||||
let true_choice = 1.into();
|
||||
let false_choice = 0.into();
|
||||
|
||||
let mut var = one;
|
||||
|
||||
var.conditional_negate(false_choice);
|
||||
assert_eq!(var, FieldElement::one());
|
||||
|
||||
var.conditional_negate(true_choice);
|
||||
assert_eq!(var, -FieldElement::one());
|
||||
|
||||
var.conditional_negate(false_choice);
|
||||
assert_eq!(var, -FieldElement::one());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_edwards_d() {
|
||||
// TODO: Generate the constant with this when const fn mul_mod is available, removing the need
|
||||
// for this test
|
||||
let a = -FieldElement::from(121665u32);
|
||||
let b = FieldElement::from(121666u32);
|
||||
assert_eq!(EDWARDS_D, a * b.invert().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_odd() {
|
||||
assert_eq!(0, FieldElement::zero().is_odd().unwrap_u8());
|
||||
assert_eq!(1, FieldElement::one().is_odd().unwrap_u8());
|
||||
assert_eq!(0, FieldElement::one().double().is_odd().unwrap_u8());
|
||||
|
||||
// 0 is even, yet the modulus is odd
|
||||
// -1 moves to the even value before the modulus
|
||||
assert_eq!(0, (-FieldElement::one()).is_odd().unwrap_u8());
|
||||
assert_eq!(1, (-FieldElement::one().double()).is_odd().unwrap_u8());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_mul() {
|
||||
assert_eq!(FieldElement(FIELD_MODULUS) * FieldElement::one(), FieldElement::zero());
|
||||
assert_eq!(FieldElement(FIELD_MODULUS) * FieldElement::one().double(), FieldElement::zero());
|
||||
assert_eq!(SQRT_M1.square(), -FieldElement::one());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sqrt_ratio_i() {
|
||||
let zero = FieldElement::zero();
|
||||
let one = FieldElement::one();
|
||||
let two = one + one;
|
||||
let three = two + one;
|
||||
|
||||
let (choice, sqrt) = FieldElement::sqrt_ratio_i(zero, zero);
|
||||
assert_eq!(sqrt, zero);
|
||||
assert_eq!(sqrt.is_odd().unwrap_u8(), 0);
|
||||
assert_eq!(choice.unwrap_u8(), 1);
|
||||
|
||||
let (choice, sqrt) = FieldElement::sqrt_ratio_i(one, zero);
|
||||
assert_eq!(sqrt, zero);
|
||||
assert_eq!(sqrt.is_odd().unwrap_u8(), 0);
|
||||
assert_eq!(choice.unwrap_u8(), 0);
|
||||
|
||||
let (choice, sqrt) = FieldElement::sqrt_ratio_i(two, one);
|
||||
assert_eq!(sqrt.square(), two * SQRT_M1);
|
||||
assert_eq!(sqrt.is_odd().unwrap_u8(), 0);
|
||||
assert_eq!(choice.unwrap_u8(), 0);
|
||||
|
||||
let (choice, sqrt) = FieldElement::sqrt_ratio_i(three, one);
|
||||
assert_eq!(sqrt.square(), three);
|
||||
assert_eq!(sqrt.is_odd().unwrap_u8(), 0);
|
||||
assert_eq!(choice.unwrap_u8(), 1);
|
||||
|
||||
let (choice, sqrt) = FieldElement::sqrt_ratio_i(one, three);
|
||||
assert_eq!(sqrt.square() * three, one);
|
||||
assert_eq!(sqrt.is_odd().unwrap_u8(), 0);
|
||||
assert_eq!(choice.unwrap_u8(), 1);
|
||||
fn test_field() {
|
||||
ff_group_tests::prime_field::test_prime_field_bits::<FieldElement>();
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![no_std]
|
||||
|
||||
use core::{
|
||||
|
@ -14,6 +15,7 @@ use digest::{consts::U64, Digest, HashMarker};
|
|||
|
||||
use subtle::{Choice, CtOption};
|
||||
|
||||
use crypto_bigint::{Encoding, U256};
|
||||
pub use curve25519_dalek as dalek;
|
||||
|
||||
use dalek::{
|
||||
|
@ -175,7 +177,37 @@ constant_time!(Scalar, DScalar);
|
|||
math_neg!(Scalar, Scalar, DScalar::add, DScalar::sub, DScalar::mul);
|
||||
from_uint!(Scalar, DScalar);
|
||||
|
||||
const MODULUS: U256 =
|
||||
U256::from_be_hex("1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3ed");
|
||||
|
||||
impl Scalar {
|
||||
pub fn pow(&self, other: Scalar) -> Scalar {
|
||||
let mut table = [Scalar::one(); 16];
|
||||
table[1] = *self;
|
||||
for i in 2 .. 16 {
|
||||
table[i] = table[i - 1] * self;
|
||||
}
|
||||
|
||||
let mut res = Scalar::one();
|
||||
let mut bits = 0;
|
||||
for (i, bit) in other.to_le_bits().iter().rev().enumerate() {
|
||||
bits <<= 1;
|
||||
let bit = u8::from(*bit);
|
||||
bits |= bit;
|
||||
|
||||
if ((i + 1) % 4) == 0 {
|
||||
if i != 3 {
|
||||
for _ in 0 .. 4 {
|
||||
res *= res;
|
||||
}
|
||||
}
|
||||
res *= table[usize::from(bits)];
|
||||
bits = 0;
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// Perform wide reduction on a 64-byte array to create a Scalar without bias.
|
||||
pub fn from_bytes_mod_order_wide(bytes: &[u8; 64]) -> Scalar {
|
||||
Self(DScalar::from_bytes_mod_order_wide(bytes))
|
||||
|
@ -214,16 +246,16 @@ impl Field for Scalar {
|
|||
CtOption::new(Self(self.0.invert()), !self.is_zero())
|
||||
}
|
||||
fn sqrt(&self) -> CtOption<Self> {
|
||||
unimplemented!()
|
||||
}
|
||||
fn is_zero(&self) -> Choice {
|
||||
self.0.ct_eq(&DScalar::zero())
|
||||
}
|
||||
fn cube(&self) -> Self {
|
||||
*self * self * self
|
||||
}
|
||||
fn pow_vartime<S: AsRef<[u64]>>(&self, _exp: S) -> Self {
|
||||
unimplemented!()
|
||||
let mod_3_8 = MODULUS.saturating_add(&U256::from_u8(3)).wrapping_div(&U256::from_u8(8));
|
||||
let mod_3_8 = Scalar::from_repr(mod_3_8.to_le_bytes()).unwrap();
|
||||
|
||||
let sqrt_m1 = MODULUS.saturating_sub(&U256::from_u8(1)).wrapping_div(&U256::from_u8(4));
|
||||
let sqrt_m1 = Scalar::one().double().pow(Scalar::from_repr(sqrt_m1.to_le_bytes()).unwrap());
|
||||
|
||||
let tv1 = self.pow(mod_3_8);
|
||||
let tv2 = tv1 * sqrt_m1;
|
||||
let candidate = Self::conditional_select(&tv2, &tv1, tv1.square().ct_eq(self));
|
||||
CtOption::new(candidate, candidate.square().ct_eq(self))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -233,7 +265,7 @@ impl PrimeField for Scalar {
|
|||
const CAPACITY: u32 = 252;
|
||||
fn from_repr(bytes: [u8; 32]) -> CtOption<Self> {
|
||||
let scalar = DScalar::from_canonical_bytes(bytes);
|
||||
// TODO: This unwrap_or isn't constant time, yet do we have an alternative?
|
||||
// TODO: This unwrap_or isn't constant time, yet we don't exactly have an alternative...
|
||||
CtOption::new(Scalar(scalar.unwrap_or_else(DScalar::zero)), choice(scalar.is_some()))
|
||||
}
|
||||
fn to_repr(&self) -> [u8; 32] {
|
||||
|
@ -248,7 +280,11 @@ impl PrimeField for Scalar {
|
|||
2u64.into()
|
||||
}
|
||||
fn root_of_unity() -> Self {
|
||||
unimplemented!()
|
||||
const ROOT: [u8; 32] = [
|
||||
212, 7, 190, 235, 223, 117, 135, 190, 254, 131, 206, 66, 83, 86, 240, 14, 122, 194, 193, 171,
|
||||
96, 109, 61, 125, 231, 129, 121, 224, 16, 115, 74, 9,
|
||||
];
|
||||
Scalar::from_repr(ROOT).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -396,3 +432,13 @@ dalek_group!(
|
|||
RISTRETTO_BASEPOINT_POINT,
|
||||
RISTRETTO_BASEPOINT_TABLE
|
||||
);
|
||||
|
||||
#[test]
|
||||
fn test_ed25519_group() {
|
||||
ff_group_tests::group::test_prime_group_bits::<EdwardsPoint>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ristretto_group() {
|
||||
ff_group_tests::group::test_prime_group_bits::<RistrettoPoint>();
|
||||
}
|
||||
|
|
|
@ -32,5 +32,8 @@ ciphersuite = { path = "../ciphersuite", version = "0.1", features = ["std"] }
|
|||
schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "0.2" }
|
||||
dleq = { path = "../dleq", version = "0.2", features = ["serialize"] }
|
||||
|
||||
[dev-dependencies]
|
||||
ciphersuite = { path = "../ciphersuite", version = "0.1", features = ["std", "ristretto"] }
|
||||
|
||||
[features]
|
||||
tests = []
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
use core::{hash::Hash, fmt::Debug};
|
||||
use core::fmt::Debug;
|
||||
use std::{
|
||||
ops::Deref,
|
||||
io::{self, Read, Write},
|
||||
collections::HashMap,
|
||||
};
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
use zeroize::{Zeroize, Zeroizing};
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
|
@ -13,12 +15,17 @@ use chacha20::{
|
|||
Key as Cc20Key, Nonce as Cc20Iv, ChaCha20,
|
||||
};
|
||||
|
||||
use group::GroupEncoding;
|
||||
|
||||
use ciphersuite::Ciphersuite;
|
||||
|
||||
use transcript::{Transcript, RecommendedTranscript};
|
||||
|
||||
#[cfg(test)]
|
||||
use group::ff::Field;
|
||||
use group::GroupEncoding;
|
||||
use ciphersuite::Ciphersuite;
|
||||
use multiexp::BatchVerifier;
|
||||
|
||||
use schnorr::SchnorrSignature;
|
||||
use dleq::DLEqProof;
|
||||
|
||||
use crate::ThresholdParams;
|
||||
|
||||
pub trait ReadWrite: Sized {
|
||||
|
@ -35,6 +42,7 @@ pub trait ReadWrite: Sized {
|
|||
pub trait Message: Clone + PartialEq + Eq + Debug + Zeroize + ReadWrite {}
|
||||
impl<M: Clone + PartialEq + Eq + Debug + Zeroize + ReadWrite> Message for M {}
|
||||
|
||||
/// Wraps a message with a key to use for encryption in the future.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct EncryptionKeyMessage<C: Ciphersuite, M: Message> {
|
||||
msg: M,
|
||||
|
@ -57,20 +65,115 @@ impl<C: Ciphersuite, M: Message> EncryptionKeyMessage<C, M> {
|
|||
self.write(&mut buf).unwrap();
|
||||
buf
|
||||
}
|
||||
|
||||
// Used by tests
|
||||
pub(crate) fn enc_key(&self) -> C::G {
|
||||
self.enc_key
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Encryptable: Clone + AsMut<[u8]> + Zeroize + ReadWrite {}
|
||||
impl<E: Clone + AsMut<[u8]> + Zeroize + ReadWrite> Encryptable for E {}
|
||||
#[derive(Clone, Zeroize)]
|
||||
pub struct EncryptedMessage<E: Encryptable>(Zeroizing<E>);
|
||||
pub trait Encryptable: Clone + AsRef<[u8]> + AsMut<[u8]> + Zeroize + ReadWrite {}
|
||||
impl<E: Clone + AsRef<[u8]> + AsMut<[u8]> + Zeroize + ReadWrite> Encryptable for E {}
|
||||
|
||||
impl<E: Encryptable> EncryptedMessage<E> {
|
||||
/// An encrypted message, with a per-message encryption key enabling revealing specific messages
|
||||
/// without side effects.
|
||||
#[derive(Clone, Zeroize)]
|
||||
pub struct EncryptedMessage<C: Ciphersuite, E: Encryptable> {
|
||||
key: C::G,
|
||||
// Also include a proof-of-possession for the key.
|
||||
// If this proof-of-possession wasn't here, Eve could observe Alice encrypt to Bob with key X,
|
||||
// then send Bob a message also claiming to use X.
|
||||
// While Eve's message would fail to meaningfully decrypt, Bob would then use this to create a
|
||||
// blame argument against Eve. When they do, they'd reveal bX, revealing Alice's message to Bob.
|
||||
// This is a massive side effect which could break some protocols, in the worst case.
|
||||
// While Eve can still reuse their own keys, causing Bob to leak all messages by revealing for
|
||||
// any single one, that's effectively Eve revealing themselves, and not considered relevant.
|
||||
pop: SchnorrSignature<C>,
|
||||
msg: Zeroizing<E>,
|
||||
}
|
||||
|
||||
fn ecdh<C: Ciphersuite>(private: &Zeroizing<C::F>, public: C::G) -> Zeroizing<C::G> {
|
||||
Zeroizing::new(public * private.deref())
|
||||
}
|
||||
|
||||
fn cipher<C: Ciphersuite>(dst: &'static [u8], ecdh: &Zeroizing<C::G>) -> ChaCha20 {
|
||||
// Ideally, we'd box this transcript with ZAlloc, yet that's only possible on nightly
|
||||
// TODO: https://github.com/serai-dex/serai/issues/151
|
||||
let mut transcript = RecommendedTranscript::new(b"DKG Encryption v0.2");
|
||||
transcript.domain_separate(dst);
|
||||
|
||||
let mut ecdh = ecdh.to_bytes();
|
||||
transcript.append_message(b"shared_key", ecdh.as_ref());
|
||||
ecdh.as_mut().zeroize();
|
||||
|
||||
let zeroize = |buf: &mut [u8]| buf.zeroize();
|
||||
|
||||
let mut key = Cc20Key::default();
|
||||
let mut challenge = transcript.challenge(b"key");
|
||||
key.copy_from_slice(&challenge[.. 32]);
|
||||
zeroize(challenge.as_mut());
|
||||
|
||||
// The RecommendedTranscript isn't vulnerable to length extension attacks, yet if it was,
|
||||
// it'd make sense to clone it (and fork it) just to hedge against that
|
||||
let mut iv = Cc20Iv::default();
|
||||
let mut challenge = transcript.challenge(b"iv");
|
||||
iv.copy_from_slice(&challenge[.. 12]);
|
||||
zeroize(challenge.as_mut());
|
||||
|
||||
// Same commentary as the transcript regarding ZAlloc
|
||||
// TODO: https://github.com/serai-dex/serai/issues/151
|
||||
let res = ChaCha20::new(&key, &iv);
|
||||
zeroize(key.as_mut());
|
||||
zeroize(iv.as_mut());
|
||||
res
|
||||
}
|
||||
|
||||
fn encrypt<R: RngCore + CryptoRng, C: Ciphersuite, E: Encryptable>(
|
||||
rng: &mut R,
|
||||
dst: &'static [u8],
|
||||
from: u16,
|
||||
to: C::G,
|
||||
mut msg: Zeroizing<E>,
|
||||
) -> EncryptedMessage<C, E> {
|
||||
/*
|
||||
The following code could be used to replace the requirement on an RNG here.
|
||||
It's just currently not an issue to require taking in an RNG here.
|
||||
let last = self.last_enc_key.to_bytes();
|
||||
self.last_enc_key = C::hash_to_F(b"encryption_base", last.as_ref());
|
||||
let key = C::hash_to_F(b"encryption_key", last.as_ref());
|
||||
last.as_mut().zeroize();
|
||||
*/
|
||||
|
||||
let key = Zeroizing::new(C::random_nonzero_F(rng));
|
||||
cipher::<C>(dst, &ecdh::<C>(&key, to)).apply_keystream(msg.as_mut().as_mut());
|
||||
|
||||
let pub_key = C::generator() * key.deref();
|
||||
let nonce = Zeroizing::new(C::random_nonzero_F(rng));
|
||||
let pub_nonce = C::generator() * nonce.deref();
|
||||
EncryptedMessage {
|
||||
key: pub_key,
|
||||
pop: SchnorrSignature::sign(
|
||||
&key,
|
||||
nonce,
|
||||
pop_challenge::<C>(pub_nonce, pub_key, from, msg.deref().as_ref()),
|
||||
),
|
||||
msg,
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Ciphersuite, E: Encryptable> EncryptedMessage<C, E> {
|
||||
pub fn read<R: Read>(reader: &mut R, params: ThresholdParams) -> io::Result<Self> {
|
||||
Ok(Self(Zeroizing::new(E::read(reader, params)?)))
|
||||
Ok(Self {
|
||||
key: C::read_G(reader)?,
|
||||
pop: SchnorrSignature::<C>::read(reader)?,
|
||||
msg: Zeroizing::new(E::read(reader, params)?),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
self.0.write(writer)
|
||||
writer.write_all(self.key.to_bytes().as_ref())?;
|
||||
self.pop.write(writer)?;
|
||||
self.msg.write(writer)
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
|
@ -78,17 +181,150 @@ impl<E: Encryptable> EncryptedMessage<E> {
|
|||
self.write(&mut buf).unwrap();
|
||||
buf
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn invalidate_pop(&mut self) {
|
||||
self.pop.s += C::F::one();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn invalidate_msg<R: RngCore + CryptoRng>(&mut self, rng: &mut R, from: u16) {
|
||||
// Invalidate the message by specifying a new key/Schnorr PoP
|
||||
// This will cause all initial checks to pass, yet a decrypt to gibberish
|
||||
let key = Zeroizing::new(C::random_nonzero_F(rng));
|
||||
let pub_key = C::generator() * key.deref();
|
||||
let nonce = Zeroizing::new(C::random_nonzero_F(rng));
|
||||
let pub_nonce = C::generator() * nonce.deref();
|
||||
self.key = pub_key;
|
||||
self.pop = SchnorrSignature::sign(
|
||||
&key,
|
||||
nonce,
|
||||
pop_challenge::<C>(pub_nonce, pub_key, from, self.msg.deref().as_ref()),
|
||||
);
|
||||
}
|
||||
|
||||
// Assumes the encrypted message is a secret share.
|
||||
#[cfg(test)]
|
||||
pub(crate) fn invalidate_share_serialization<R: RngCore + CryptoRng>(
|
||||
&mut self,
|
||||
rng: &mut R,
|
||||
dst: &'static [u8],
|
||||
from: u16,
|
||||
to: C::G,
|
||||
) {
|
||||
use group::ff::PrimeField;
|
||||
|
||||
let mut repr = <C::F as PrimeField>::Repr::default();
|
||||
for b in repr.as_mut().iter_mut() {
|
||||
*b = 255;
|
||||
}
|
||||
// Tries to guarantee the above assumption.
|
||||
assert_eq!(repr.as_ref().len(), self.msg.as_ref().len());
|
||||
// Checks that this isn't over a field where this is somehow valid
|
||||
assert!(!bool::from(C::F::from_repr(repr).is_some()));
|
||||
|
||||
self.msg.as_mut().as_mut().copy_from_slice(repr.as_ref());
|
||||
*self = encrypt(rng, dst, from, to, self.msg.clone());
|
||||
}
|
||||
|
||||
// Assumes the encrypted message is a secret share.
|
||||
#[cfg(test)]
|
||||
pub(crate) fn invalidate_share_value<R: RngCore + CryptoRng>(
|
||||
&mut self,
|
||||
rng: &mut R,
|
||||
dst: &'static [u8],
|
||||
from: u16,
|
||||
to: C::G,
|
||||
) {
|
||||
use group::ff::PrimeField;
|
||||
|
||||
// Assumes the share isn't randomly 1
|
||||
let repr = C::F::one().to_repr();
|
||||
self.msg.as_mut().as_mut().copy_from_slice(repr.as_ref());
|
||||
*self = encrypt(rng, dst, from, to, self.msg.clone());
|
||||
}
|
||||
}
|
||||
|
||||
/// A proof that the provided point is the legitimately derived shared key for some message.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct EncryptionKeyProof<C: Ciphersuite> {
|
||||
key: Zeroizing<C::G>,
|
||||
dleq: DLEqProof<C::G>,
|
||||
}
|
||||
|
||||
impl<C: Ciphersuite> EncryptionKeyProof<C> {
|
||||
pub fn read<R: Read>(reader: &mut R) -> io::Result<Self> {
|
||||
Ok(Self { key: Zeroizing::new(C::read_G(reader)?), dleq: DLEqProof::read(reader)? })
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
writer.write_all(self.key.to_bytes().as_ref())?;
|
||||
self.dleq.write(writer)
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut buf = vec![];
|
||||
self.write(&mut buf).unwrap();
|
||||
buf
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn invalidate_key(&mut self) {
|
||||
*self.key += C::generator();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn invalidate_dleq(&mut self) {
|
||||
let mut buf = vec![];
|
||||
self.dleq.write(&mut buf).unwrap();
|
||||
// Adds one to c since this is serialized c, s
|
||||
// Adding one to c will leave a validly serialized c
|
||||
// Adding one to s may leave an invalidly serialized s
|
||||
buf[0] = buf[0].wrapping_add(1);
|
||||
self.dleq = DLEqProof::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
// This doesn't need to take the msg. It just doesn't hurt as an extra layer.
|
||||
// This still doesn't mean the DKG offers an authenticated channel. The per-message keys have no
|
||||
// root of trust other than their existence in the assumed-to-exist external authenticated channel.
|
||||
fn pop_challenge<C: Ciphersuite>(nonce: C::G, key: C::G, sender: u16, msg: &[u8]) -> C::F {
|
||||
let mut transcript = RecommendedTranscript::new(b"DKG Encryption Key Proof of Possession v0.2");
|
||||
transcript.append_message(b"nonce", nonce.to_bytes());
|
||||
transcript.append_message(b"key", key.to_bytes());
|
||||
// This is sufficient to prevent the attack this is meant to stop
|
||||
transcript.append_message(b"sender", sender.to_le_bytes());
|
||||
// This, as written above, doesn't hurt
|
||||
transcript.append_message(b"message", msg);
|
||||
// While this is a PoK and a PoP, it's called a PoP here since the important part is its owner
|
||||
// Elsewhere, where we use the term PoK, the important part is that it isn't some inverse, with
|
||||
// an unknown to anyone discrete log, breaking the system
|
||||
C::hash_to_F(b"DKG-encryption-proof_of_possession", &transcript.challenge(b"schnorr"))
|
||||
}
|
||||
|
||||
fn encryption_key_transcript() -> RecommendedTranscript {
|
||||
RecommendedTranscript::new(b"DKG Encryption Key Correctness Proof v0.2")
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
|
||||
pub(crate) enum DecryptionError {
|
||||
#[error("accused provided an invalid signature")]
|
||||
InvalidSignature,
|
||||
#[error("accuser provided an invalid decryption key")]
|
||||
InvalidProof,
|
||||
}
|
||||
|
||||
// A simple box for managing encryption.
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct Encryption<Id: Eq + Hash, C: Ciphersuite> {
|
||||
pub(crate) struct Encryption<C: Ciphersuite> {
|
||||
dst: &'static [u8],
|
||||
i: u16,
|
||||
enc_key: Zeroizing<C::F>,
|
||||
enc_pub_key: C::G,
|
||||
enc_keys: HashMap<Id, C::G>,
|
||||
enc_keys: HashMap<u16, C::G>,
|
||||
}
|
||||
|
||||
impl<Id: Eq + Hash, C: Ciphersuite> Zeroize for Encryption<Id, C> {
|
||||
impl<C: Ciphersuite> Zeroize for Encryption<C> {
|
||||
fn zeroize(&mut self) {
|
||||
self.enc_key.zeroize();
|
||||
self.enc_pub_key.zeroize();
|
||||
|
@ -98,10 +334,16 @@ impl<Id: Eq + Hash, C: Ciphersuite> Zeroize for Encryption<Id, C> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<Id: Eq + Hash, C: Ciphersuite> Encryption<Id, C> {
|
||||
pub(crate) fn new<R: RngCore + CryptoRng>(dst: &'static [u8], rng: &mut R) -> Self {
|
||||
impl<C: Ciphersuite> Encryption<C> {
|
||||
pub(crate) fn new<R: RngCore + CryptoRng>(dst: &'static [u8], i: u16, rng: &mut R) -> Self {
|
||||
let enc_key = Zeroizing::new(C::random_nonzero_F(rng));
|
||||
Self { dst, enc_pub_key: C::generator() * enc_key.deref(), enc_key, enc_keys: HashMap::new() }
|
||||
Self {
|
||||
dst,
|
||||
i,
|
||||
enc_pub_key: C::generator() * enc_key.deref(),
|
||||
enc_key,
|
||||
enc_keys: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn registration<M: Message>(&self, msg: M) -> EncryptionKeyMessage<C, M> {
|
||||
|
@ -110,7 +352,7 @@ impl<Id: Eq + Hash, C: Ciphersuite> Encryption<Id, C> {
|
|||
|
||||
pub(crate) fn register<M: Message>(
|
||||
&mut self,
|
||||
participant: Id,
|
||||
participant: u16,
|
||||
msg: EncryptionKeyMessage<C, M>,
|
||||
) -> M {
|
||||
if self.enc_keys.contains_key(&participant) {
|
||||
|
@ -120,62 +362,81 @@ impl<Id: Eq + Hash, C: Ciphersuite> Encryption<Id, C> {
|
|||
msg.msg
|
||||
}
|
||||
|
||||
fn cipher(&self, participant: Id, encrypt: bool) -> ChaCha20 {
|
||||
// Ideally, we'd box this transcript with ZAlloc, yet that's only possible on nightly
|
||||
// TODO: https://github.com/serai-dex/serai/issues/151
|
||||
let mut transcript = RecommendedTranscript::new(b"DKG Encryption v0");
|
||||
transcript.domain_separate(self.dst);
|
||||
pub(crate) fn encrypt<R: RngCore + CryptoRng, E: Encryptable>(
|
||||
&self,
|
||||
rng: &mut R,
|
||||
participant: u16,
|
||||
msg: Zeroizing<E>,
|
||||
) -> EncryptedMessage<C, E> {
|
||||
encrypt(rng, self.dst, self.i, self.enc_keys[&participant], msg)
|
||||
}
|
||||
|
||||
let other = self.enc_keys[&participant];
|
||||
if encrypt {
|
||||
transcript.append_message(b"sender", self.enc_pub_key.to_bytes());
|
||||
transcript.append_message(b"receiver", other.to_bytes());
|
||||
} else {
|
||||
transcript.append_message(b"sender", other.to_bytes());
|
||||
transcript.append_message(b"receiver", self.enc_pub_key.to_bytes());
|
||||
pub(crate) fn decrypt<R: RngCore + CryptoRng, I: Copy + Zeroize, E: Encryptable>(
|
||||
&self,
|
||||
rng: &mut R,
|
||||
batch: &mut BatchVerifier<I, C::G>,
|
||||
// Uses a distinct batch ID so if this batch verifier is reused, we know its the PoP aspect
|
||||
// which failed, and therefore to use None for the blame
|
||||
batch_id: I,
|
||||
from: u16,
|
||||
mut msg: EncryptedMessage<C, E>,
|
||||
) -> (Zeroizing<E>, EncryptionKeyProof<C>) {
|
||||
msg.pop.batch_verify(
|
||||
rng,
|
||||
batch,
|
||||
batch_id,
|
||||
msg.key,
|
||||
pop_challenge::<C>(msg.pop.R, msg.key, from, msg.msg.deref().as_ref()),
|
||||
);
|
||||
|
||||
let key = ecdh::<C>(&self.enc_key, msg.key);
|
||||
cipher::<C>(self.dst, &key).apply_keystream(msg.msg.as_mut().as_mut());
|
||||
(
|
||||
msg.msg,
|
||||
EncryptionKeyProof {
|
||||
key,
|
||||
dleq: DLEqProof::prove(
|
||||
rng,
|
||||
&mut encryption_key_transcript(),
|
||||
&[C::generator(), msg.key],
|
||||
&self.enc_key,
|
||||
),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
// Given a message, and the intended decryptor, and a proof for its key, decrypt the message.
|
||||
// Returns None if the key was wrong.
|
||||
pub(crate) fn decrypt_with_proof<E: Encryptable>(
|
||||
&self,
|
||||
from: u16,
|
||||
decryptor: u16,
|
||||
mut msg: EncryptedMessage<C, E>,
|
||||
// There's no encryption key proof if the accusation is of an invalid signature
|
||||
proof: Option<EncryptionKeyProof<C>>,
|
||||
) -> Result<Zeroizing<E>, DecryptionError> {
|
||||
if !msg
|
||||
.pop
|
||||
.verify(msg.key, pop_challenge::<C>(msg.pop.R, msg.key, from, msg.msg.deref().as_ref()))
|
||||
{
|
||||
Err(DecryptionError::InvalidSignature)?;
|
||||
}
|
||||
|
||||
let mut shared = Zeroizing::new(other * self.enc_key.deref()).deref().to_bytes();
|
||||
transcript.append_message(b"shared_key", shared.as_ref());
|
||||
shared.as_mut().zeroize();
|
||||
if let Some(proof) = proof {
|
||||
// Verify this is the decryption key for this message
|
||||
proof
|
||||
.dleq
|
||||
.verify(
|
||||
&mut encryption_key_transcript(),
|
||||
&[C::generator(), msg.key],
|
||||
&[self.enc_keys[&decryptor], *proof.key],
|
||||
)
|
||||
.map_err(|_| DecryptionError::InvalidProof)?;
|
||||
|
||||
let zeroize = |buf: &mut [u8]| buf.zeroize();
|
||||
|
||||
let mut key = Cc20Key::default();
|
||||
let mut challenge = transcript.challenge(b"key");
|
||||
key.copy_from_slice(&challenge[.. 32]);
|
||||
zeroize(challenge.as_mut());
|
||||
|
||||
// The RecommendedTranscript isn't vulnerable to length extension attacks, yet if it was,
|
||||
// it'd make sense to clone it (and fork it) just to hedge against that
|
||||
let mut iv = Cc20Iv::default();
|
||||
let mut challenge = transcript.challenge(b"iv");
|
||||
iv.copy_from_slice(&challenge[.. 12]);
|
||||
zeroize(challenge.as_mut());
|
||||
|
||||
// Same commentary as the transcript regarding ZAlloc
|
||||
// TODO: https://github.com/serai-dex/serai/issues/151
|
||||
let res = ChaCha20::new(&key, &iv);
|
||||
zeroize(key.as_mut());
|
||||
zeroize(iv.as_mut());
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn encrypt<E: Encryptable>(
|
||||
&self,
|
||||
participant: Id,
|
||||
mut msg: Zeroizing<E>,
|
||||
) -> EncryptedMessage<E> {
|
||||
self.cipher(participant, true).apply_keystream(msg.as_mut().as_mut());
|
||||
EncryptedMessage(msg)
|
||||
}
|
||||
|
||||
pub(crate) fn decrypt<E: Encryptable>(
|
||||
&self,
|
||||
participant: Id,
|
||||
mut msg: EncryptedMessage<E>,
|
||||
) -> Zeroizing<E> {
|
||||
self.cipher(participant, false).apply_keystream(msg.0.as_mut().as_mut());
|
||||
msg.0
|
||||
cipher::<C>(self.dst, &proof.key).apply_keystream(msg.msg.as_mut().as_mut());
|
||||
Ok(msg.msg)
|
||||
} else {
|
||||
Err(DecryptionError::InvalidProof)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
use std::{
|
||||
use core::{
|
||||
marker::PhantomData,
|
||||
ops::Deref,
|
||||
fmt::{Debug, Formatter},
|
||||
};
|
||||
use std::{
|
||||
io::{self, Read, Write},
|
||||
collections::HashMap,
|
||||
};
|
||||
|
@ -13,7 +16,7 @@ use transcript::{Transcript, RecommendedTranscript};
|
|||
|
||||
use group::{
|
||||
ff::{Field, PrimeField},
|
||||
GroupEncoding,
|
||||
Group, GroupEncoding,
|
||||
};
|
||||
use ciphersuite::Ciphersuite;
|
||||
use multiexp::{multiexp_vartime, BatchVerifier};
|
||||
|
@ -22,21 +25,31 @@ use schnorr::SchnorrSignature;
|
|||
|
||||
use crate::{
|
||||
DkgError, ThresholdParams, ThresholdCore, validate_map,
|
||||
encryption::{ReadWrite, EncryptionKeyMessage, EncryptedMessage, Encryption},
|
||||
encryption::{
|
||||
ReadWrite, EncryptionKeyMessage, EncryptedMessage, Encryption, EncryptionKeyProof,
|
||||
DecryptionError,
|
||||
},
|
||||
};
|
||||
|
||||
type FrostError<C> = DkgError<EncryptionKeyProof<C>>;
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
fn challenge<C: Ciphersuite>(context: &str, l: u16, R: &[u8], Am: &[u8]) -> C::F {
|
||||
let mut transcript = RecommendedTranscript::new(b"DKG FROST v0");
|
||||
let mut transcript = RecommendedTranscript::new(b"DKG FROST v0.2");
|
||||
transcript.domain_separate(b"Schnorr Proof of Knowledge");
|
||||
transcript.append_message(b"context", context.as_bytes());
|
||||
transcript.append_message(b"participant", l.to_le_bytes());
|
||||
transcript.append_message(b"nonce", R);
|
||||
transcript.append_message(b"commitments", Am);
|
||||
C::hash_to_F(b"PoK 0", &transcript.challenge(b"challenge"))
|
||||
C::hash_to_F(b"DKG-FROST-proof_of_knowledge-0", &transcript.challenge(b"schnorr"))
|
||||
}
|
||||
|
||||
/// Commitments message to be broadcast to all other parties.
|
||||
/// The commitments message, intended to be broadcast to all other parties.
|
||||
/// Every participant should only provide one set of commitments to all parties.
|
||||
/// If any participant sends multiple sets of commitments, they are faulty and should be presumed
|
||||
/// malicious.
|
||||
/// As this library does not handle networking, it is also unable to detect if any participant is
|
||||
/// so faulty. That responsibility lies with the caller.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct Commitments<C: Ciphersuite> {
|
||||
commitments: Vec<C::G>,
|
||||
|
@ -119,7 +132,7 @@ impl<C: Ciphersuite> KeyGenMachine<C> {
|
|||
);
|
||||
|
||||
// Additionally create an encryption mechanism to protect the secret shares
|
||||
let encryption = Encryption::new(b"FROST", rng);
|
||||
let encryption = Encryption::new(b"FROST", self.params.i, rng);
|
||||
|
||||
// Step 4: Broadcast
|
||||
let msg =
|
||||
|
@ -149,19 +162,39 @@ fn polynomial<F: PrimeField + Zeroize>(coefficients: &[Zeroizing<F>], l: u16) ->
|
|||
share
|
||||
}
|
||||
|
||||
/// Secret share to be sent to the party it's intended for over an authenticated channel.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
/// The secret share message, to be sent to the party it's intended for over an authenticated
|
||||
/// channel.
|
||||
/// If any participant sends multiple secret shares to another participant, they are faulty.
|
||||
// This should presumably be written as SecretShare(Zeroizing<F::Repr>).
|
||||
// It's unfortunately not possible as F::Repr doesn't have Zeroize as a bound.
|
||||
// The encryption system also explicitly uses Zeroizing<M> so it can ensure anything being
|
||||
// encrypted is within Zeroizing. Accordingly, internally having Zeroizing would be redundant.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct SecretShare<F: PrimeField>(F::Repr);
|
||||
impl<F: PrimeField> AsRef<[u8]> for SecretShare<F> {
|
||||
fn as_ref(&self) -> &[u8] {
|
||||
self.0.as_ref()
|
||||
}
|
||||
}
|
||||
impl<F: PrimeField> AsMut<[u8]> for SecretShare<F> {
|
||||
fn as_mut(&mut self) -> &mut [u8] {
|
||||
self.0.as_mut()
|
||||
}
|
||||
}
|
||||
impl<F: PrimeField> Debug for SecretShare<F> {
|
||||
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result<(), core::fmt::Error> {
|
||||
fmt.debug_struct("SecretShare").finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
impl<F: PrimeField> Zeroize for SecretShare<F> {
|
||||
fn zeroize(&mut self) {
|
||||
self.0.as_mut().zeroize()
|
||||
}
|
||||
}
|
||||
// Still manually implement ZeroizeOnDrop to ensure these don't stick around.
|
||||
// We could replace Zeroizing<M> with a bound M: ZeroizeOnDrop.
|
||||
// Doing so would potentially fail to highlight thr expected behavior with these and remove a layer
|
||||
// of depth.
|
||||
impl<F: PrimeField> Drop for SecretShare<F> {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize();
|
||||
|
@ -188,7 +221,7 @@ pub struct SecretShareMachine<C: Ciphersuite> {
|
|||
context: String,
|
||||
coefficients: Vec<Zeroizing<C::F>>,
|
||||
our_commitments: Vec<C::G>,
|
||||
encryption: Encryption<u16, C>,
|
||||
encryption: Encryption<C>,
|
||||
}
|
||||
|
||||
impl<C: Ciphersuite> SecretShareMachine<C> {
|
||||
|
@ -198,7 +231,7 @@ impl<C: Ciphersuite> SecretShareMachine<C> {
|
|||
&mut self,
|
||||
rng: &mut R,
|
||||
mut commitments: HashMap<u16, EncryptionKeyMessage<C, Commitments<C>>>,
|
||||
) -> Result<HashMap<u16, Vec<C::G>>, DkgError> {
|
||||
) -> Result<HashMap<u16, Vec<C::G>>, FrostError<C>> {
|
||||
validate_map(&commitments, &(1 ..= self.params.n()).collect::<Vec<_>>(), self.params.i())?;
|
||||
|
||||
let mut batch = BatchVerifier::<u16, C::G>::new(commitments.len());
|
||||
|
@ -221,21 +254,23 @@ impl<C: Ciphersuite> SecretShareMachine<C> {
|
|||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
batch.verify_with_vartime_blame().map_err(DkgError::InvalidProofOfKnowledge)?;
|
||||
batch.verify_with_vartime_blame().map_err(FrostError::InvalidProofOfKnowledge)?;
|
||||
|
||||
commitments.insert(self.params.i, self.our_commitments.drain(..).collect());
|
||||
Ok(commitments)
|
||||
}
|
||||
|
||||
/// Continue generating a key.
|
||||
/// Takes in everyone else's commitments. Returns a HashMap of secret shares to be sent over
|
||||
/// authenticated channels to their relevant counterparties.
|
||||
/// Takes in everyone else's commitments. Returns a HashMap of encrypted secret shares to be sent
|
||||
/// over authenticated channels to their relevant counterparties.
|
||||
/// If any participant sends multiple secret shares to another participant, they are faulty.
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn generate_secret_shares<R: RngCore + CryptoRng>(
|
||||
mut self,
|
||||
rng: &mut R,
|
||||
commitments: HashMap<u16, EncryptionKeyMessage<C, Commitments<C>>>,
|
||||
) -> Result<(KeyMachine<C>, HashMap<u16, EncryptedMessage<SecretShare<C::F>>>), DkgError> {
|
||||
) -> Result<(KeyMachine<C>, HashMap<u16, EncryptedMessage<C, SecretShare<C::F>>>), FrostError<C>>
|
||||
{
|
||||
let commitments = self.verify_r1(&mut *rng, commitments)?;
|
||||
|
||||
// Step 1: Generate secret shares for all other parties
|
||||
|
@ -250,7 +285,7 @@ impl<C: Ciphersuite> SecretShareMachine<C> {
|
|||
let mut share = polynomial(&self.coefficients, l);
|
||||
let share_bytes = Zeroizing::new(SecretShare::<C::F>(share.to_repr()));
|
||||
share.zeroize();
|
||||
res.insert(l, self.encryption.encrypt(l, share_bytes));
|
||||
res.insert(l, self.encryption.encrypt(rng, l, share_bytes));
|
||||
}
|
||||
|
||||
// Calculate our own share
|
||||
|
@ -264,13 +299,18 @@ impl<C: Ciphersuite> SecretShareMachine<C> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Final step of the key generation protocol.
|
||||
/// Advancement of the the secret share state machine protocol.
|
||||
/// This machine will 'complete' the protocol, by a local perspective, and can be the last
|
||||
/// interactive component. In order to be secure, the parties must confirm having successfully
|
||||
/// completed the protocol (an effort out of scope to this library), yet this is modelled by one
|
||||
/// more state transition.
|
||||
pub struct KeyMachine<C: Ciphersuite> {
|
||||
params: ThresholdParams,
|
||||
secret: Zeroizing<C::F>,
|
||||
commitments: HashMap<u16, Vec<C::G>>,
|
||||
encryption: Encryption<u16, C>,
|
||||
encryption: Encryption<C>,
|
||||
}
|
||||
|
||||
impl<C: Ciphersuite> Zeroize for KeyMachine<C> {
|
||||
fn zeroize(&mut self) {
|
||||
self.params.zeroize();
|
||||
|
@ -281,59 +321,84 @@ impl<C: Ciphersuite> Zeroize for KeyMachine<C> {
|
|||
self.encryption.zeroize();
|
||||
}
|
||||
}
|
||||
impl<C: Ciphersuite> Drop for KeyMachine<C> {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize()
|
||||
}
|
||||
|
||||
// Calculate the exponent for a given participant and apply it to a series of commitments
|
||||
// Initially used with the actual commitments to verify the secret share, later used with
|
||||
// stripes to generate the verification shares
|
||||
fn exponential<C: Ciphersuite>(i: u16, values: &[C::G]) -> Vec<(C::F, C::G)> {
|
||||
let i = C::F::from(i.into());
|
||||
let mut res = Vec::with_capacity(values.len());
|
||||
(0 .. values.len()).fold(C::F::one(), |exp, l| {
|
||||
res.push((exp, values[l]));
|
||||
exp * i
|
||||
});
|
||||
res
|
||||
}
|
||||
|
||||
fn share_verification_statements<C: Ciphersuite>(
|
||||
target: u16,
|
||||
commitments: &[C::G],
|
||||
mut share: Zeroizing<C::F>,
|
||||
) -> Vec<(C::F, C::G)> {
|
||||
// This can be insecurely linearized from n * t to just n using the below sums for a given
|
||||
// stripe. Doing so uses naive addition which is subject to malleability. The only way to
|
||||
// ensure that malleability isn't present is to use this n * t algorithm, which runs
|
||||
// per sender and not as an aggregate of all senders, which also enables blame
|
||||
let mut values = exponential::<C>(target, commitments);
|
||||
|
||||
// Perform the share multiplication outside of the multiexp to minimize stack copying
|
||||
// While the multiexp BatchVerifier does zeroize its flattened multiexp, and itself, it still
|
||||
// converts whatever we give to an iterator and then builds a Vec internally, welcoming copies
|
||||
let neg_share_pub = C::generator() * -*share;
|
||||
share.zeroize();
|
||||
values.push((C::F::one(), neg_share_pub));
|
||||
|
||||
values
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Hash, Debug, Zeroize)]
|
||||
enum BatchId {
|
||||
Decryption(u16),
|
||||
Share(u16),
|
||||
}
|
||||
impl<C: Ciphersuite> ZeroizeOnDrop for KeyMachine<C> {}
|
||||
|
||||
impl<C: Ciphersuite> KeyMachine<C> {
|
||||
/// Complete key generation.
|
||||
/// Takes in everyone elses' shares submitted to us. Returns a ThresholdCore object representing
|
||||
/// the generated keys. Successful protocol completion MUST be confirmed by all parties before
|
||||
/// these keys may be safely used.
|
||||
pub fn complete<R: RngCore + CryptoRng>(
|
||||
/// Calculate our share given the shares sent to us.
|
||||
/// Returns a BlameMachine usable to determine if faults in the protocol occurred.
|
||||
/// Will error on, and return a blame proof for, the first-observed case of faulty behavior.
|
||||
pub fn calculate_share<R: RngCore + CryptoRng>(
|
||||
mut self,
|
||||
rng: &mut R,
|
||||
mut shares: HashMap<u16, EncryptedMessage<SecretShare<C::F>>>,
|
||||
) -> Result<ThresholdCore<C>, DkgError> {
|
||||
mut shares: HashMap<u16, EncryptedMessage<C, SecretShare<C::F>>>,
|
||||
) -> Result<BlameMachine<C>, FrostError<C>> {
|
||||
validate_map(&shares, &(1 ..= self.params.n()).collect::<Vec<_>>(), self.params.i())?;
|
||||
|
||||
// Calculate the exponent for a given participant and apply it to a series of commitments
|
||||
// Initially used with the actual commitments to verify the secret share, later used with
|
||||
// stripes to generate the verification shares
|
||||
let exponential = |i: u16, values: &[_]| {
|
||||
let i = C::F::from(i.into());
|
||||
let mut res = Vec::with_capacity(self.params.t().into());
|
||||
(0 .. usize::from(self.params.t())).into_iter().fold(C::F::one(), |exp, l| {
|
||||
res.push((exp, values[l]));
|
||||
exp * i
|
||||
});
|
||||
res
|
||||
};
|
||||
|
||||
let mut batch = BatchVerifier::new(shares.len());
|
||||
let mut blames = HashMap::new();
|
||||
for (l, share_bytes) in shares.drain() {
|
||||
let mut share_bytes = self.encryption.decrypt(l, share_bytes);
|
||||
let mut share = Zeroizing::new(
|
||||
Option::<C::F>::from(C::F::from_repr(share_bytes.0)).ok_or(DkgError::InvalidShare(l))?,
|
||||
);
|
||||
let (mut share_bytes, blame) =
|
||||
self.encryption.decrypt(rng, &mut batch, BatchId::Decryption(l), l, share_bytes);
|
||||
let share =
|
||||
Zeroizing::new(Option::<C::F>::from(C::F::from_repr(share_bytes.0)).ok_or_else(|| {
|
||||
FrostError::InvalidShare { participant: l, blame: Some(blame.clone()) }
|
||||
})?);
|
||||
share_bytes.zeroize();
|
||||
*self.secret += share.deref();
|
||||
|
||||
// This can be insecurely linearized from n * t to just n using the below sums for a given
|
||||
// stripe. Doing so uses naive addition which is subject to malleability. The only way to
|
||||
// ensure that malleability isn't present is to use this n * t algorithm, which runs
|
||||
// per sender and not as an aggregate of all senders, which also enables blame
|
||||
let mut values = exponential(self.params.i, &self.commitments[&l]);
|
||||
// multiexp will Zeroize this when it's done with it
|
||||
values.push((-*share.deref(), C::generator()));
|
||||
share.zeroize();
|
||||
|
||||
batch.queue(rng, l, values);
|
||||
blames.insert(l, blame);
|
||||
batch.queue(
|
||||
rng,
|
||||
BatchId::Share(l),
|
||||
share_verification_statements::<C>(self.params.i(), &self.commitments[&l], share),
|
||||
);
|
||||
}
|
||||
batch.verify_with_vartime_blame().map_err(DkgError::InvalidShare)?;
|
||||
batch.verify_with_vartime_blame().map_err(|id| {
|
||||
let (l, blame) = match id {
|
||||
BatchId::Decryption(l) => (l, None),
|
||||
BatchId::Share(l) => (l, Some(blames.remove(&l).unwrap())),
|
||||
};
|
||||
FrostError::InvalidShare { participant: l, blame }
|
||||
})?;
|
||||
|
||||
// Stripe commitments per t and sum them in advance. Calculating verification shares relies on
|
||||
// these sums so preprocessing them is a massive speedup
|
||||
|
@ -352,16 +417,136 @@ impl<C: Ciphersuite> KeyMachine<C> {
|
|||
if i == self.params.i() {
|
||||
C::generator() * self.secret.deref()
|
||||
} else {
|
||||
multiexp_vartime(&exponential(i, &stripes))
|
||||
multiexp_vartime(&exponential::<C>(i, &stripes))
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
Ok(ThresholdCore {
|
||||
params: self.params,
|
||||
secret_share: self.secret.clone(),
|
||||
group_key: stripes[0],
|
||||
verification_shares,
|
||||
let KeyMachine { commitments, encryption, params, secret } = self;
|
||||
Ok(BlameMachine {
|
||||
commitments,
|
||||
encryption,
|
||||
result: ThresholdCore {
|
||||
params,
|
||||
secret_share: secret,
|
||||
group_key: stripes[0],
|
||||
verification_shares,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BlameMachine<C: Ciphersuite> {
|
||||
commitments: HashMap<u16, Vec<C::G>>,
|
||||
encryption: Encryption<C>,
|
||||
result: ThresholdCore<C>,
|
||||
}
|
||||
|
||||
impl<C: Ciphersuite> Zeroize for BlameMachine<C> {
|
||||
fn zeroize(&mut self) {
|
||||
for (_, commitments) in self.commitments.iter_mut() {
|
||||
commitments.zeroize();
|
||||
}
|
||||
self.encryption.zeroize();
|
||||
self.result.zeroize();
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Ciphersuite> BlameMachine<C> {
|
||||
/// Mark the protocol as having been successfully completed, returning the generated keys.
|
||||
/// This should only be called after having confirmed, with all participants, successful
|
||||
/// completion.
|
||||
///
|
||||
/// Confirming successful completion is not necessarily as simple as everyone reporting their
|
||||
/// completion. Everyone must also receive everyone's report of completion, entering into the
|
||||
/// territory of consensus protocols. This library does not handle that nor does it provide any
|
||||
/// tooling to do so. This function is solely intended to force users to acknowledge they're
|
||||
/// completing the protocol, not processing any blame.
|
||||
pub fn complete(self) -> ThresholdCore<C> {
|
||||
self.result
|
||||
}
|
||||
|
||||
fn blame_internal(
|
||||
&self,
|
||||
sender: u16,
|
||||
recipient: u16,
|
||||
msg: EncryptedMessage<C, SecretShare<C::F>>,
|
||||
proof: Option<EncryptionKeyProof<C>>,
|
||||
) -> u16 {
|
||||
let share_bytes = match self.encryption.decrypt_with_proof(sender, recipient, msg, proof) {
|
||||
Ok(share_bytes) => share_bytes,
|
||||
// If there's an invalid signature, the sender did not send a properly formed message
|
||||
Err(DecryptionError::InvalidSignature) => return sender,
|
||||
// Decryption will fail if the provided ECDH key wasn't correct for the given message
|
||||
Err(DecryptionError::InvalidProof) => return recipient,
|
||||
};
|
||||
|
||||
let share = match Option::<C::F>::from(C::F::from_repr(share_bytes.0)) {
|
||||
Some(share) => share,
|
||||
// If this isn't a valid scalar, the sender is faulty
|
||||
None => return sender,
|
||||
};
|
||||
|
||||
// If this isn't a valid share, the sender is faulty
|
||||
if !bool::from(
|
||||
multiexp_vartime(&share_verification_statements::<C>(
|
||||
recipient,
|
||||
&self.commitments[&sender],
|
||||
Zeroizing::new(share),
|
||||
))
|
||||
.is_identity(),
|
||||
) {
|
||||
return sender;
|
||||
}
|
||||
|
||||
// The share was canonical and valid
|
||||
recipient
|
||||
}
|
||||
|
||||
/// Given an accusation of fault, determine the faulty party (either the sender, who sent an
|
||||
/// invalid secret share, or the receiver, who claimed a valid secret share was invalid). No
|
||||
/// matter which, prevent completion of the machine, forcing an abort of the protocol.
|
||||
///
|
||||
/// The message should be a copy of the encrypted secret share from the accused sender to the
|
||||
/// accusing recipient. This message must have been authenticated as actually having come from
|
||||
/// the sender in question.
|
||||
///
|
||||
/// In order to enable detecting multiple faults, an `AdditionalBlameMachine` is returned, which
|
||||
/// can be used to determine further blame. These machines will process the same blame statements
|
||||
/// multiple times, always identifying blame. It is the caller's job to ensure they're unique in
|
||||
/// order to prevent multiple instances of blame over a single incident.
|
||||
pub fn blame(
|
||||
self,
|
||||
sender: u16,
|
||||
recipient: u16,
|
||||
msg: EncryptedMessage<C, SecretShare<C::F>>,
|
||||
proof: Option<EncryptionKeyProof<C>>,
|
||||
) -> (AdditionalBlameMachine<C>, u16) {
|
||||
let faulty = self.blame_internal(sender, recipient, msg, proof);
|
||||
(AdditionalBlameMachine(self), faulty)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Zeroize)]
|
||||
pub struct AdditionalBlameMachine<C: Ciphersuite>(BlameMachine<C>);
|
||||
impl<C: Ciphersuite> AdditionalBlameMachine<C> {
|
||||
/// Given an accusation of fault, determine the faulty party (either the sender, who sent an
|
||||
/// invalid secret share, or the receiver, who claimed a valid secret share was invalid).
|
||||
///
|
||||
/// The message should be a copy of the encrypted secret share from the accused sender to the
|
||||
/// accusing recipient. This message must have been authenticated as actually having come from
|
||||
/// the sender in question.
|
||||
///
|
||||
/// This will process the same blame statement multiple times, always identifying blame. It is
|
||||
/// the caller's job to ensure they're unique in order to prevent multiple instances of blame
|
||||
/// over a single incident.
|
||||
pub fn blame(
|
||||
self,
|
||||
sender: u16,
|
||||
recipient: u16,
|
||||
msg: EncryptedMessage<C, SecretShare<C::F>>,
|
||||
proof: Option<EncryptionKeyProof<C>>,
|
||||
) -> u16 {
|
||||
self.0.blame_internal(sender, recipient, msg, proof)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,10 @@
|
|||
//! Additional utilities around them, such as promotion from one generator to another, are also
|
||||
//! provided.
|
||||
|
||||
use core::{fmt::Debug, ops::Deref};
|
||||
use core::{
|
||||
fmt::{Debug, Formatter},
|
||||
ops::Deref,
|
||||
};
|
||||
use std::{io::Read, sync::Arc, collections::HashMap};
|
||||
|
||||
use thiserror::Error;
|
||||
|
@ -34,8 +37,8 @@ pub mod promote;
|
|||
pub mod tests;
|
||||
|
||||
/// Various errors possible during key generation/signing.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
|
||||
pub enum DkgError {
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Error)]
|
||||
pub enum DkgError<B: Clone + PartialEq + Eq + Debug> {
|
||||
#[error("a parameter was 0 (required {0}, participants {1})")]
|
||||
ZeroParameter(u16, u16),
|
||||
#[error("invalid amount of required participants (max {1}, got {0})")]
|
||||
|
@ -54,19 +57,19 @@ pub enum DkgError {
|
|||
|
||||
#[error("invalid proof of knowledge (participant {0})")]
|
||||
InvalidProofOfKnowledge(u16),
|
||||
#[error("invalid share (participant {0})")]
|
||||
InvalidShare(u16),
|
||||
#[error("invalid share (participant {participant}, blame {blame})")]
|
||||
InvalidShare { participant: u16, blame: Option<B> },
|
||||
|
||||
#[error("internal error ({0})")]
|
||||
InternalError(&'static str),
|
||||
}
|
||||
|
||||
// Validate a map of values to have the expected included participants
|
||||
pub(crate) fn validate_map<T>(
|
||||
pub(crate) fn validate_map<T, B: Clone + PartialEq + Eq + Debug>(
|
||||
map: &HashMap<u16, T>,
|
||||
included: &[u16],
|
||||
ours: u16,
|
||||
) -> Result<(), DkgError> {
|
||||
) -> Result<(), DkgError<B>> {
|
||||
if (map.len() + 1) != included.len() {
|
||||
Err(DkgError::InvalidParticipantQuantity(included.len(), map.len() + 1))?;
|
||||
}
|
||||
|
@ -100,7 +103,7 @@ pub struct ThresholdParams {
|
|||
}
|
||||
|
||||
impl ThresholdParams {
|
||||
pub fn new(t: u16, n: u16, i: u16) -> Result<ThresholdParams, DkgError> {
|
||||
pub fn new(t: u16, n: u16, i: u16) -> Result<ThresholdParams, DkgError<()>> {
|
||||
if (t == 0) || (n == 0) {
|
||||
Err(DkgError::ZeroParameter(t, n))?;
|
||||
}
|
||||
|
@ -149,7 +152,7 @@ pub fn lagrange<F: PrimeField>(i: u16, included: &[u16]) -> F {
|
|||
|
||||
/// Keys and verification shares generated by a DKG.
|
||||
/// Called core as they're expected to be wrapped into an Arc before usage in various operations.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct ThresholdCore<C: Ciphersuite> {
|
||||
/// Threshold Parameters.
|
||||
params: ThresholdParams,
|
||||
|
@ -162,6 +165,17 @@ pub struct ThresholdCore<C: Ciphersuite> {
|
|||
verification_shares: HashMap<u16, C::G>,
|
||||
}
|
||||
|
||||
impl<C: Ciphersuite> Debug for ThresholdCore<C> {
|
||||
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result<(), core::fmt::Error> {
|
||||
fmt
|
||||
.debug_struct("ThresholdCore")
|
||||
.field("params", &self.params)
|
||||
.field("group_key", &self.group_key)
|
||||
.field("verification_shares", &self.verification_shares)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Ciphersuite> Zeroize for ThresholdCore<C> {
|
||||
fn zeroize(&mut self) {
|
||||
self.params.zeroize();
|
||||
|
@ -179,8 +193,12 @@ impl<C: Ciphersuite> ThresholdCore<C> {
|
|||
secret_share: Zeroizing<C::F>,
|
||||
verification_shares: HashMap<u16, C::G>,
|
||||
) -> ThresholdCore<C> {
|
||||
#[cfg(debug_assertions)]
|
||||
validate_map(&verification_shares, &(0 ..= params.n).collect::<Vec<_>>(), 0).unwrap();
|
||||
debug_assert!(validate_map::<_, ()>(
|
||||
&verification_shares,
|
||||
&(0 ..= params.n).collect::<Vec<_>>(),
|
||||
0
|
||||
)
|
||||
.is_ok());
|
||||
|
||||
let t = (1 ..= params.t).collect::<Vec<_>>();
|
||||
ThresholdCore {
|
||||
|
@ -220,15 +238,15 @@ impl<C: Ciphersuite> ThresholdCore<C> {
|
|||
serialized
|
||||
}
|
||||
|
||||
pub fn deserialize<R: Read>(reader: &mut R) -> Result<ThresholdCore<C>, DkgError> {
|
||||
pub fn deserialize<R: Read>(reader: &mut R) -> Result<ThresholdCore<C>, DkgError<()>> {
|
||||
{
|
||||
let missing = DkgError::InternalError("ThresholdCore serialization is missing its curve");
|
||||
let different = DkgError::InternalError("deserializing ThresholdCore for another curve");
|
||||
|
||||
let mut id_len = [0; 4];
|
||||
reader.read_exact(&mut id_len).map_err(|_| missing)?;
|
||||
reader.read_exact(&mut id_len).map_err(|_| missing.clone())?;
|
||||
if u32::try_from(C::ID.len()).unwrap().to_be_bytes() != id_len {
|
||||
Err(different)?;
|
||||
Err(different.clone())?;
|
||||
}
|
||||
|
||||
let mut id = vec![0; C::ID.len()];
|
||||
|
@ -273,27 +291,42 @@ impl<C: Ciphersuite> ThresholdCore<C> {
|
|||
/// Threshold keys usable for signing.
|
||||
#[derive(Clone, Debug, Zeroize)]
|
||||
pub struct ThresholdKeys<C: Ciphersuite> {
|
||||
/// Core keys.
|
||||
// Core keys.
|
||||
// If this is the last reference, the underlying keys will be dropped. When that happens, the
|
||||
// private key present within it will be zeroed out (as it's within Zeroizing).
|
||||
#[zeroize(skip)]
|
||||
core: Arc<ThresholdCore<C>>,
|
||||
|
||||
/// Offset applied to these keys.
|
||||
// Offset applied to these keys.
|
||||
pub(crate) offset: Option<C::F>,
|
||||
}
|
||||
|
||||
/// View of keys passed to algorithm implementations.
|
||||
#[derive(Clone, Zeroize)]
|
||||
#[derive(Clone)]
|
||||
pub struct ThresholdView<C: Ciphersuite> {
|
||||
offset: C::F,
|
||||
group_key: C::G,
|
||||
included: Vec<u16>,
|
||||
secret_share: Zeroizing<C::F>,
|
||||
#[zeroize(skip)]
|
||||
original_verification_shares: HashMap<u16, C::G>,
|
||||
#[zeroize(skip)]
|
||||
verification_shares: HashMap<u16, C::G>,
|
||||
}
|
||||
|
||||
impl<C: Ciphersuite> Zeroize for ThresholdView<C> {
|
||||
fn zeroize(&mut self) {
|
||||
self.offset.zeroize();
|
||||
self.group_key.zeroize();
|
||||
self.included.zeroize();
|
||||
self.secret_share.zeroize();
|
||||
for (_, share) in self.original_verification_shares.iter_mut() {
|
||||
share.zeroize();
|
||||
}
|
||||
for (_, share) in self.verification_shares.iter_mut() {
|
||||
share.zeroize();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Ciphersuite> ThresholdKeys<C> {
|
||||
pub fn new(core: ThresholdCore<C>) -> ThresholdKeys<C> {
|
||||
ThresholdKeys { core: Arc::new(core), offset: None }
|
||||
|
@ -338,7 +371,7 @@ impl<C: Ciphersuite> ThresholdKeys<C> {
|
|||
self.core.serialize()
|
||||
}
|
||||
|
||||
pub fn view(&self, included: &[u16]) -> Result<ThresholdView<C>, DkgError> {
|
||||
pub fn view(&self, included: &[u16]) -> Result<ThresholdView<C>, DkgError<()>> {
|
||||
if (included.len() < self.params().t.into()) || (usize::from(self.params().n) < included.len())
|
||||
{
|
||||
Err(DkgError::InvalidSigningSet)?;
|
||||
|
|
|
@ -28,7 +28,7 @@ pub trait CiphersuitePromote<C2: Ciphersuite> {
|
|||
}
|
||||
|
||||
fn transcript<G: GroupEncoding>(key: G, i: u16) -> RecommendedTranscript {
|
||||
let mut transcript = RecommendedTranscript::new(b"DKG Generator Promotion v0");
|
||||
let mut transcript = RecommendedTranscript::new(b"DKG Generator Promotion v0.2");
|
||||
transcript.append_message(b"group_key", key.to_bytes());
|
||||
transcript.append_message(b"participant", i.to_be_bytes());
|
||||
transcript
|
||||
|
@ -44,13 +44,13 @@ pub struct GeneratorProof<C: Ciphersuite> {
|
|||
impl<C: Ciphersuite> GeneratorProof<C> {
|
||||
pub fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
writer.write_all(self.share.to_bytes().as_ref())?;
|
||||
self.proof.serialize(writer)
|
||||
self.proof.write(writer)
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(reader: &mut R) -> io::Result<GeneratorProof<C>> {
|
||||
Ok(GeneratorProof {
|
||||
share: <C as Ciphersuite>::read_G(reader)?,
|
||||
proof: DLEqProof::deserialize(reader)?,
|
||||
proof: DLEqProof::read(reader)?,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -98,7 +98,7 @@ where
|
|||
pub fn complete(
|
||||
self,
|
||||
proofs: &HashMap<u16, GeneratorProof<C1>>,
|
||||
) -> Result<ThresholdKeys<C2>, DkgError> {
|
||||
) -> Result<ThresholdKeys<C2>, DkgError<()>> {
|
||||
let params = self.base.params();
|
||||
validate_map(proofs, &(1 ..= params.n).collect::<Vec<_>>(), params.i)?;
|
||||
|
||||
|
|
|
@ -4,17 +4,23 @@ use rand_core::{RngCore, CryptoRng};
|
|||
|
||||
use crate::{
|
||||
Ciphersuite, ThresholdParams, ThresholdCore,
|
||||
frost::KeyGenMachine,
|
||||
frost::{KeyGenMachine, SecretShare, KeyMachine},
|
||||
encryption::{EncryptionKeyMessage, EncryptedMessage},
|
||||
tests::{THRESHOLD, PARTICIPANTS, clone_without},
|
||||
};
|
||||
|
||||
/// Fully perform the FROST key generation algorithm.
|
||||
pub fn frost_gen<R: RngCore + CryptoRng, C: Ciphersuite>(
|
||||
// Needed so rustfmt doesn't fail to format on line length issues
|
||||
type FrostEncryptedMessage<C> = EncryptedMessage<C, SecretShare<<C as Ciphersuite>::F>>;
|
||||
type FrostSecretShares<C> = HashMap<u16, FrostEncryptedMessage<C>>;
|
||||
|
||||
// Commit, then return enc key and shares
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn commit_enc_keys_and_shares<R: RngCore + CryptoRng, C: Ciphersuite>(
|
||||
rng: &mut R,
|
||||
) -> HashMap<u16, ThresholdCore<C>> {
|
||||
) -> (HashMap<u16, KeyMachine<C>>, HashMap<u16, C::G>, HashMap<u16, FrostSecretShares<C>>) {
|
||||
let mut machines = HashMap::new();
|
||||
let mut commitments = HashMap::new();
|
||||
let mut enc_keys = HashMap::new();
|
||||
for i in 1 ..= PARTICIPANTS {
|
||||
let machine = KeyGenMachine::<C>::new(
|
||||
ThresholdParams::new(THRESHOLD, PARTICIPANTS, i).unwrap(),
|
||||
|
@ -31,10 +37,11 @@ pub fn frost_gen<R: RngCore + CryptoRng, C: Ciphersuite>(
|
|||
)
|
||||
.unwrap(),
|
||||
);
|
||||
enc_keys.insert(i, commitments[&i].enc_key());
|
||||
}
|
||||
|
||||
let mut secret_shares = HashMap::new();
|
||||
let mut machines = machines
|
||||
let machines = machines
|
||||
.drain()
|
||||
.map(|(l, machine)| {
|
||||
let (machine, mut shares) =
|
||||
|
@ -57,19 +64,36 @@ pub fn frost_gen<R: RngCore + CryptoRng, C: Ciphersuite>(
|
|||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
(machines, enc_keys, secret_shares)
|
||||
}
|
||||
|
||||
fn generate_secret_shares<C: Ciphersuite>(
|
||||
shares: &HashMap<u16, FrostSecretShares<C>>,
|
||||
recipient: u16,
|
||||
) -> FrostSecretShares<C> {
|
||||
let mut our_secret_shares = HashMap::new();
|
||||
for (i, shares) in shares {
|
||||
if recipient == *i {
|
||||
continue;
|
||||
}
|
||||
our_secret_shares.insert(*i, shares[&recipient].clone());
|
||||
}
|
||||
our_secret_shares
|
||||
}
|
||||
|
||||
/// Fully perform the FROST key generation algorithm.
|
||||
pub fn frost_gen<R: RngCore + CryptoRng, C: Ciphersuite>(
|
||||
rng: &mut R,
|
||||
) -> HashMap<u16, ThresholdCore<C>> {
|
||||
let (mut machines, _, secret_shares) = commit_enc_keys_and_shares::<_, C>(rng);
|
||||
|
||||
let mut verification_shares = None;
|
||||
let mut group_key = None;
|
||||
machines
|
||||
.drain()
|
||||
.map(|(i, machine)| {
|
||||
let mut our_secret_shares = HashMap::new();
|
||||
for (l, shares) in &secret_shares {
|
||||
if i == *l {
|
||||
continue;
|
||||
}
|
||||
our_secret_shares.insert(*l, shares[&i].clone());
|
||||
}
|
||||
let these_keys = machine.complete(rng, our_secret_shares).unwrap();
|
||||
let our_secret_shares = generate_secret_shares(&secret_shares, i);
|
||||
let these_keys = machine.calculate_share(rng, our_secret_shares).unwrap().complete();
|
||||
|
||||
// Verify the verification_shares are agreed upon
|
||||
if verification_shares.is_none() {
|
||||
|
@ -87,3 +111,188 @@ pub fn frost_gen<R: RngCore + CryptoRng, C: Ciphersuite>(
|
|||
})
|
||||
.collect::<HashMap<_, _>>()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod literal {
|
||||
use rand_core::OsRng;
|
||||
|
||||
use ciphersuite::Ristretto;
|
||||
|
||||
use crate::{DkgError, encryption::EncryptionKeyProof, frost::BlameMachine};
|
||||
|
||||
use super::*;
|
||||
|
||||
fn test_blame(
|
||||
machines: Vec<BlameMachine<Ristretto>>,
|
||||
msg: FrostEncryptedMessage<Ristretto>,
|
||||
blame: Option<EncryptionKeyProof<Ristretto>>,
|
||||
) {
|
||||
for machine in machines {
|
||||
let (additional, blamed) = machine.blame(1, 2, msg.clone(), blame.clone());
|
||||
assert_eq!(blamed, 1);
|
||||
// Verify additional blame also works
|
||||
assert_eq!(additional.blame(1, 2, msg.clone(), blame.clone()), 1);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Write a macro which expands to the following
|
||||
#[test]
|
||||
fn invalid_encryption_pop_blame() {
|
||||
let (mut machines, _, mut secret_shares) =
|
||||
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
|
||||
|
||||
// Mutate the PoP of the encrypted message from 1 to 2
|
||||
secret_shares.get_mut(&1).unwrap().get_mut(&2).unwrap().invalidate_pop();
|
||||
|
||||
let mut blame = None;
|
||||
let machines = machines
|
||||
.drain()
|
||||
.filter_map(|(i, machine)| {
|
||||
let our_secret_shares = generate_secret_shares(&secret_shares, i);
|
||||
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
|
||||
if i == 2 {
|
||||
assert_eq!(machine.err(), Some(DkgError::InvalidShare { participant: 1, blame: None }));
|
||||
// Explicitly declare we have a blame object, which happens to be None since invalid PoP
|
||||
// is self-explainable
|
||||
blame = Some(None);
|
||||
None
|
||||
} else {
|
||||
Some(machine.unwrap())
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
test_blame(machines, secret_shares[&1][&2].clone(), blame.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_ecdh_blame() {
|
||||
let (mut machines, _, mut secret_shares) =
|
||||
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
|
||||
|
||||
// Mutate the share to trigger a blame event
|
||||
// Mutates from 2 to 1, as 1 is expected to end up malicious for test_blame to pass
|
||||
// While here, 2 is malicious, this is so 1 creates the blame proof
|
||||
// We then malleate 1's blame proof, so 1 ends up malicious
|
||||
// Doesn't simply invalidate the PoP as that won't have a blame statement
|
||||
// By mutating the encrypted data, we do ensure a blame statement is created
|
||||
secret_shares.get_mut(&2).unwrap().get_mut(&1).unwrap().invalidate_msg(&mut OsRng, 2);
|
||||
|
||||
let mut blame = None;
|
||||
let machines = machines
|
||||
.drain()
|
||||
.filter_map(|(i, machine)| {
|
||||
let our_secret_shares = generate_secret_shares(&secret_shares, i);
|
||||
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
|
||||
if i == 1 {
|
||||
blame = Some(match machine.err() {
|
||||
Some(DkgError::InvalidShare { participant: 2, blame: Some(blame) }) => Some(blame),
|
||||
_ => panic!(),
|
||||
});
|
||||
None
|
||||
} else {
|
||||
Some(machine.unwrap())
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
blame.as_mut().unwrap().as_mut().unwrap().invalidate_key();
|
||||
test_blame(machines, secret_shares[&2][&1].clone(), blame.unwrap());
|
||||
}
|
||||
|
||||
// This should be largely equivalent to the prior test
|
||||
#[test]
|
||||
fn invalid_dleq_blame() {
|
||||
let (mut machines, _, mut secret_shares) =
|
||||
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
|
||||
|
||||
secret_shares.get_mut(&2).unwrap().get_mut(&1).unwrap().invalidate_msg(&mut OsRng, 2);
|
||||
|
||||
let mut blame = None;
|
||||
let machines = machines
|
||||
.drain()
|
||||
.filter_map(|(i, machine)| {
|
||||
let our_secret_shares = generate_secret_shares(&secret_shares, i);
|
||||
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
|
||||
if i == 1 {
|
||||
blame = Some(match machine.err() {
|
||||
Some(DkgError::InvalidShare { participant: 2, blame: Some(blame) }) => Some(blame),
|
||||
_ => panic!(),
|
||||
});
|
||||
None
|
||||
} else {
|
||||
Some(machine.unwrap())
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
blame.as_mut().unwrap().as_mut().unwrap().invalidate_dleq();
|
||||
test_blame(machines, secret_shares[&2][&1].clone(), blame.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_share_serialization_blame() {
|
||||
let (mut machines, enc_keys, mut secret_shares) =
|
||||
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
|
||||
|
||||
secret_shares.get_mut(&1).unwrap().get_mut(&2).unwrap().invalidate_share_serialization(
|
||||
&mut OsRng,
|
||||
b"FROST",
|
||||
1,
|
||||
enc_keys[&2],
|
||||
);
|
||||
|
||||
let mut blame = None;
|
||||
let machines = machines
|
||||
.drain()
|
||||
.filter_map(|(i, machine)| {
|
||||
let our_secret_shares = generate_secret_shares(&secret_shares, i);
|
||||
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
|
||||
if i == 2 {
|
||||
blame = Some(match machine.err() {
|
||||
Some(DkgError::InvalidShare { participant: 1, blame: Some(blame) }) => Some(blame),
|
||||
_ => panic!(),
|
||||
});
|
||||
None
|
||||
} else {
|
||||
Some(machine.unwrap())
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
test_blame(machines, secret_shares[&1][&2].clone(), blame.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_share_value_blame() {
|
||||
let (mut machines, enc_keys, mut secret_shares) =
|
||||
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
|
||||
|
||||
secret_shares.get_mut(&1).unwrap().get_mut(&2).unwrap().invalidate_share_value(
|
||||
&mut OsRng,
|
||||
b"FROST",
|
||||
1,
|
||||
enc_keys[&2],
|
||||
);
|
||||
|
||||
let mut blame = None;
|
||||
let machines = machines
|
||||
.drain()
|
||||
.filter_map(|(i, machine)| {
|
||||
let our_secret_shares = generate_secret_shares(&secret_shares, i);
|
||||
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
|
||||
if i == 2 {
|
||||
blame = Some(match machine.err() {
|
||||
Some(DkgError::InvalidShare { participant: 1, blame: Some(blame) }) => Some(blame),
|
||||
_ => panic!(),
|
||||
});
|
||||
None
|
||||
} else {
|
||||
Some(machine.unwrap())
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
test_blame(machines, secret_shares[&1][&2].clone(), blame.unwrap());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -68,3 +68,8 @@ pub fn test_ciphersuite<R: RngCore + CryptoRng, C: Ciphersuite>(rng: &mut R) {
|
|||
key_gen::<_, C>(rng);
|
||||
test_generator_promotion::<_, C>(rng);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_with_ristretto() {
|
||||
test_ciphersuite::<_, ciphersuite::Ristretto>(&mut rand_core::OsRng);
|
||||
}
|
||||
|
|
|
@ -210,7 +210,7 @@ where
|
|||
}
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
pub(crate) fn serialize<W: Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||
pub(crate) fn write<W: Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||
#[allow(non_snake_case)]
|
||||
match self.Re_0 {
|
||||
Re::R(R0, R1) => {
|
||||
|
@ -230,7 +230,7 @@ where
|
|||
|
||||
#[allow(non_snake_case)]
|
||||
#[cfg(feature = "serialize")]
|
||||
pub(crate) fn deserialize<R: Read>(r: &mut R, mut Re_0: Re<G0, G1>) -> std::io::Result<Self> {
|
||||
pub(crate) fn read<R: Read>(r: &mut R, mut Re_0: Re<G0, G1>) -> std::io::Result<Self> {
|
||||
match Re_0 {
|
||||
Re::R(ref mut R0, ref mut R1) => {
|
||||
*R0 = read_point(r)?;
|
||||
|
|
|
@ -166,17 +166,17 @@ where
|
|||
}
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
pub(crate) fn serialize<W: Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||
pub(crate) fn write<W: Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||
w.write_all(self.commitments.0.to_bytes().as_ref())?;
|
||||
w.write_all(self.commitments.1.to_bytes().as_ref())?;
|
||||
self.signature.serialize(w)
|
||||
self.signature.write(w)
|
||||
}
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
pub(crate) fn deserialize<R: Read>(r: &mut R) -> std::io::Result<Self> {
|
||||
pub(crate) fn read<R: Read>(r: &mut R) -> std::io::Result<Self> {
|
||||
Ok(Bits {
|
||||
commitments: (read_point(r)?, read_point(r)?),
|
||||
signature: Aos::deserialize(r, BitSignature::from(SIGNATURE).aos_form())?,
|
||||
signature: Aos::read(r, BitSignature::from(SIGNATURE).aos_form())?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -367,36 +367,32 @@ where
|
|||
}
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
pub fn serialize<W: Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||
for bit in &self.bits {
|
||||
bit.serialize(w)?;
|
||||
bit.write(w)?;
|
||||
}
|
||||
if let Some(bit) = &self.remainder {
|
||||
bit.serialize(w)?;
|
||||
bit.write(w)?;
|
||||
}
|
||||
self.poks.0.serialize(w)?;
|
||||
self.poks.1.serialize(w)
|
||||
self.poks.0.write(w)?;
|
||||
self.poks.1.write(w)
|
||||
}
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
pub fn deserialize<R: Read>(r: &mut R) -> std::io::Result<Self> {
|
||||
pub fn read<R: Read>(r: &mut R) -> std::io::Result<Self> {
|
||||
let capacity = usize::try_from(G0::Scalar::CAPACITY.min(G1::Scalar::CAPACITY)).unwrap();
|
||||
let bits_per_group = BitSignature::from(SIGNATURE).bits();
|
||||
|
||||
let mut bits = Vec::with_capacity(capacity / bits_per_group);
|
||||
for _ in 0 .. (capacity / bits_per_group) {
|
||||
bits.push(Bits::deserialize(r)?);
|
||||
bits.push(Bits::read(r)?);
|
||||
}
|
||||
|
||||
let mut remainder = None;
|
||||
if (capacity % bits_per_group) != 0 {
|
||||
remainder = Some(Bits::deserialize(r)?);
|
||||
remainder = Some(Bits::read(r)?);
|
||||
}
|
||||
|
||||
Ok(__DLEqProof {
|
||||
bits,
|
||||
remainder,
|
||||
poks: (SchnorrPoK::deserialize(r)?, SchnorrPoK::deserialize(r)?),
|
||||
})
|
||||
Ok(__DLEqProof { bits, remainder, poks: (SchnorrPoK::read(r)?, SchnorrPoK::read(r)?) })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -79,13 +79,13 @@ where
|
|||
}
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
pub fn serialize<W: Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||
w.write_all(self.R.to_bytes().as_ref())?;
|
||||
w.write_all(self.s.to_repr().as_ref())
|
||||
}
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
pub fn deserialize<R: Read>(r: &mut R) -> std::io::Result<SchnorrPoK<G>> {
|
||||
pub fn read<R: Read>(r: &mut R) -> std::io::Result<SchnorrPoK<G>> {
|
||||
Ok(SchnorrPoK { R: read_point(r)?, s: read_scalar(r)? })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,22 +28,55 @@ pub(crate) fn challenge<T: Transcript, F: PrimeField>(transcript: &mut T) -> F {
|
|||
// and loading it in
|
||||
// 3: Iterating over each byte and manually doubling/adding. This is simplest
|
||||
|
||||
// Get a wide amount of bytes to safely reduce without bias
|
||||
let target = ((usize::try_from(F::NUM_BITS).unwrap() + 7) / 8) * 2;
|
||||
let mut challenge_bytes = transcript.challenge(b"challenge").as_ref().to_vec();
|
||||
while challenge_bytes.len() < target {
|
||||
// Secure given transcripts updating on challenge
|
||||
challenge_bytes.extend(transcript.challenge(b"challenge_extension").as_ref());
|
||||
}
|
||||
challenge_bytes.truncate(target);
|
||||
|
||||
let mut challenge = F::zero();
|
||||
for b in challenge_bytes {
|
||||
for _ in 0 .. 8 {
|
||||
challenge = challenge.double();
|
||||
|
||||
// Get a wide amount of bytes to safely reduce without bias
|
||||
// In most cases, <=1.5x bytes is enough. 2x is still standard and there's some theoretical
|
||||
// groups which may technically require more than 1.5x bytes for this to work as intended
|
||||
let target_bytes = ((usize::try_from(F::NUM_BITS).unwrap() + 7) / 8) * 2;
|
||||
let mut challenge_bytes = transcript.challenge(b"challenge");
|
||||
let challenge_bytes_len = challenge_bytes.as_ref().len();
|
||||
// If the challenge is 32 bytes, and we need 64, we need two challenges
|
||||
let needed_challenges = (target_bytes + (challenge_bytes_len - 1)) / challenge_bytes_len;
|
||||
|
||||
// The following algorithm should be equivalent to a wide reduction of the challenges,
|
||||
// interpreted as concatenated, big-endian byte string
|
||||
let mut handled_bytes = 0;
|
||||
'outer: for _ in 0 ..= needed_challenges {
|
||||
// Cursor of which byte of the challenge to use next
|
||||
let mut b = 0;
|
||||
while b < challenge_bytes_len {
|
||||
// Get the next amount of bytes to attempt
|
||||
// Only grabs the needed amount of bytes, up to 8 at a time (u64), so long as they're
|
||||
// available in the challenge
|
||||
let chunk_bytes = (target_bytes - handled_bytes).min(8).min(challenge_bytes_len - b);
|
||||
|
||||
let mut chunk = 0;
|
||||
for _ in 0 .. chunk_bytes {
|
||||
chunk <<= 8;
|
||||
chunk |= u64::from(challenge_bytes.as_ref()[b]);
|
||||
b += 1;
|
||||
}
|
||||
// Add this chunk
|
||||
challenge += F::from(chunk);
|
||||
|
||||
handled_bytes += chunk_bytes;
|
||||
// If we've reached the target amount of bytes, break
|
||||
if handled_bytes == target_bytes {
|
||||
break 'outer;
|
||||
}
|
||||
|
||||
// Shift over by however many bits will be in the next chunk
|
||||
let next_chunk_bytes = (target_bytes - handled_bytes).min(8).min(challenge_bytes_len);
|
||||
for _ in 0 .. (next_chunk_bytes * 8) {
|
||||
challenge = challenge.double();
|
||||
}
|
||||
}
|
||||
challenge += F::from(u64::from(b));
|
||||
|
||||
// Secure thanks to the Transcript trait having a bound of updating on challenge
|
||||
challenge_bytes = transcript.challenge(b"challenge_extension");
|
||||
}
|
||||
|
||||
challenge
|
||||
}
|
||||
|
||||
|
@ -90,10 +123,12 @@ impl<G: PrimeGroup> DLEqProof<G> {
|
|||
|
||||
transcript.domain_separate(b"dleq");
|
||||
for generator in generators {
|
||||
// R, A
|
||||
Self::transcript(transcript, *generator, *generator * r.deref(), *generator * scalar.deref());
|
||||
}
|
||||
|
||||
let c = challenge(transcript);
|
||||
// r + ca
|
||||
let s = (c * scalar.deref()) + r.deref();
|
||||
|
||||
DLEqProof { c, s }
|
||||
|
@ -111,6 +146,9 @@ impl<G: PrimeGroup> DLEqProof<G> {
|
|||
|
||||
transcript.domain_separate(b"dleq");
|
||||
for (generator, point) in generators.iter().zip(points) {
|
||||
// s = r + ca
|
||||
// sG - cA = R
|
||||
// R, A
|
||||
Self::transcript(transcript, *generator, (*generator * self.s) - (*point * self.c), *point);
|
||||
}
|
||||
|
||||
|
@ -122,13 +160,20 @@ impl<G: PrimeGroup> DLEqProof<G> {
|
|||
}
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
pub fn serialize<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
w.write_all(self.c.to_repr().as_ref())?;
|
||||
w.write_all(self.s.to_repr().as_ref())
|
||||
}
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
pub fn deserialize<R: Read>(r: &mut R) -> io::Result<DLEqProof<G>> {
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<DLEqProof<G>> {
|
||||
Ok(DLEqProof { c: read_scalar(r)?, s: read_scalar(r)? })
|
||||
}
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut res = vec![];
|
||||
self.write(&mut res).unwrap();
|
||||
res
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,8 +13,8 @@ use crate::{
|
|||
#[cfg(feature = "serialize")]
|
||||
fn test_aos_serialization<const RING_LEN: usize>(proof: Aos<G0, G1, RING_LEN>, Re_0: Re<G0, G1>) {
|
||||
let mut buf = vec![];
|
||||
proof.serialize(&mut buf).unwrap();
|
||||
let deserialized = Aos::deserialize(&mut std::io::Cursor::new(buf), Re_0).unwrap();
|
||||
proof.write(&mut buf).unwrap();
|
||||
let deserialized = Aos::read::<&[u8]>(&mut buf.as_ref(), Re_0).unwrap();
|
||||
assert_eq!(proof, deserialized);
|
||||
}
|
||||
|
||||
|
@ -31,14 +31,14 @@ fn test_aos<const RING_LEN: usize>(default: Re<G0, G1>) {
|
|||
ring[i] = (generators.0.alt * ring_keys[i].0, generators.1.alt * ring_keys[i].1);
|
||||
}
|
||||
|
||||
for actual in 0 .. RING_LEN {
|
||||
for (actual, key) in ring_keys.iter_mut().enumerate() {
|
||||
let proof = Aos::<_, _, RING_LEN>::prove(
|
||||
&mut OsRng,
|
||||
transcript(),
|
||||
generators,
|
||||
&ring,
|
||||
actual,
|
||||
&mut ring_keys[actual],
|
||||
key,
|
||||
default.clone(),
|
||||
);
|
||||
|
||||
|
|
|
@ -60,8 +60,8 @@ macro_rules! verify_and_deserialize {
|
|||
#[cfg(feature = "serialize")]
|
||||
{
|
||||
let mut buf = vec![];
|
||||
$proof.serialize(&mut buf).unwrap();
|
||||
let deserialized = <$type>::deserialize(&mut std::io::Cursor::new(&buf)).unwrap();
|
||||
$proof.write(&mut buf).unwrap();
|
||||
let deserialized = <$type>::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
||||
assert_eq!($proof, deserialized);
|
||||
}
|
||||
};
|
||||
|
@ -96,7 +96,7 @@ macro_rules! test_dleq {
|
|||
#[cfg(feature = "serialize")]
|
||||
{
|
||||
let mut buf = vec![];
|
||||
proofs[0].serialize(&mut buf).unwrap();
|
||||
proofs[0].write(&mut buf).unwrap();
|
||||
println!("{} had a proof size of {} bytes", $str, buf.len());
|
||||
}
|
||||
}
|
||||
|
@ -177,7 +177,7 @@ fn test_remainder() {
|
|||
// This will ignore any unused bits, ensuring every remaining one is set
|
||||
let keys = mutual_scalar_from_bytes::<Scalar, Scalar>(&[0xFF; 32]);
|
||||
let keys = (Zeroizing::new(keys.0), Zeroizing::new(keys.1));
|
||||
assert_eq!(Scalar::one() + keys.0.deref(), Scalar::from(2u64).pow_vartime(&[255]));
|
||||
assert_eq!(Scalar::one() + keys.0.deref(), Scalar::from(2u64).pow_vartime([255]));
|
||||
assert_eq!(keys.0, keys.1);
|
||||
|
||||
let (proof, res) = ConciseLinearDLEq::prove_without_bias(
|
||||
|
|
|
@ -25,10 +25,10 @@ fn test_scalar() {
|
|||
let (k, ed) = scalar_normalize::<_, DalekScalar>(initial);
|
||||
|
||||
// The initial scalar should equal the new scalar with Ed25519's capacity
|
||||
let mut initial_bytes = (&initial.to_repr()).to_vec();
|
||||
let mut initial_bytes = initial.to_repr().to_vec();
|
||||
// Drop the first 4 bits to hit 252
|
||||
initial_bytes[0] = initial_bytes[0] & 0b00001111;
|
||||
let k_bytes = (&k.to_repr()).to_vec();
|
||||
initial_bytes[0] &= 0b00001111;
|
||||
let k_bytes = k.to_repr().to_vec();
|
||||
assert_eq!(initial_bytes, k_bytes);
|
||||
|
||||
let mut ed_bytes = ed.to_repr().as_ref().to_vec();
|
||||
|
|
|
@ -52,13 +52,44 @@ fn test_dleq() {
|
|||
keys[k] = generators[k] * key.deref();
|
||||
}
|
||||
proof.verify(&mut transcript(), &generators[.. i], &keys[.. i]).unwrap();
|
||||
// Different challenge
|
||||
assert!(proof
|
||||
.verify(
|
||||
&mut RecommendedTranscript::new(b"different challenge"),
|
||||
&generators[.. i],
|
||||
&keys[.. i]
|
||||
)
|
||||
.is_err());
|
||||
|
||||
// We could edit these tests to always test with at least two generators
|
||||
// Then we don't test proofs with zero/one generator(s)
|
||||
// While those are stupid, and pointless, and potentially point to a failure in the caller,
|
||||
// it could also be part of a dynamic system which deals with variable amounts of generators
|
||||
// Not panicking in such use cases, even if they're inefficient, provides seamless behavior
|
||||
if i >= 2 {
|
||||
// Different generators
|
||||
assert!(proof
|
||||
.verify(
|
||||
&mut transcript(),
|
||||
generators[.. i].iter().cloned().rev().collect::<Vec<_>>().as_ref(),
|
||||
&keys[.. i]
|
||||
)
|
||||
.is_err());
|
||||
// Different keys
|
||||
assert!(proof
|
||||
.verify(
|
||||
&mut transcript(),
|
||||
&generators[.. i],
|
||||
keys[.. i].iter().cloned().rev().collect::<Vec<_>>().as_ref()
|
||||
)
|
||||
.is_err());
|
||||
}
|
||||
|
||||
#[cfg(feature = "serialize")]
|
||||
{
|
||||
let mut buf = vec![];
|
||||
proof.serialize(&mut buf).unwrap();
|
||||
let deserialized =
|
||||
DLEqProof::<ProjectivePoint>::deserialize(&mut std::io::Cursor::new(&buf)).unwrap();
|
||||
proof.write(&mut buf).unwrap();
|
||||
let deserialized = DLEqProof::<ProjectivePoint>::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
||||
assert_eq!(proof, deserialized);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,5 +30,6 @@ crypto-bigint = { version = "0.4", features = ["zeroize"] }
|
|||
dalek-ff-group = { path = "../dalek-ff-group", version = "^0.1.2" }
|
||||
|
||||
[dev-dependencies]
|
||||
hex-literal = "0.3"
|
||||
hex = "0.4"
|
||||
|
||||
ff-group-tests = { path = "../ff-group-tests" }
|
||||
|
|
|
@ -3,7 +3,8 @@
|
|||
Inefficient, barebones implementation of Ed448 bound to the ff/group API,
|
||||
rejecting torsion to achieve a PrimeGroup definition. This likely should not be
|
||||
used and was only done so another library under Serai could confirm its
|
||||
completion. It is minimally tested, yet should be correct for what it has.
|
||||
Multiple functions remain unimplemented.
|
||||
completion. It is minimally tested, yet should be correct for what it has. The
|
||||
functions it doesn't have are marked `unimplemented!()`. This has not undergone
|
||||
auditing.
|
||||
|
||||
constant time and no_std.
|
||||
|
|
|
@ -38,7 +38,7 @@ macro_rules! field {
|
|||
impl Neg for $FieldName {
|
||||
type Output = $FieldName;
|
||||
fn neg(self) -> $FieldName {
|
||||
$MODULUS - self
|
||||
Self(self.0.neg_mod(&$MODULUS.0))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -104,17 +104,10 @@ macro_rules! field {
|
|||
}
|
||||
|
||||
fn sqrt(&self) -> CtOption<Self> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn is_zero(&self) -> Choice {
|
||||
self.0.ct_eq(&U512::ZERO)
|
||||
}
|
||||
fn cube(&self) -> Self {
|
||||
self.square() * self
|
||||
}
|
||||
fn pow_vartime<S: AsRef<[u64]>>(&self, _exp: S) -> Self {
|
||||
unimplemented!()
|
||||
const MOD_1_4: $FieldName =
|
||||
Self($MODULUS.0.saturating_add(&U512::from_u8(1)).wrapping_div(&U512::from_u8(4)));
|
||||
let res = self.pow(MOD_1_4);
|
||||
CtOption::new(res, res.square().ct_eq(self))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -30,30 +30,7 @@ pub(crate) const Q_4: FieldElement =
|
|||
field!(FieldElement, MODULUS, WIDE_MODULUS, 448);
|
||||
|
||||
#[test]
|
||||
fn repr() {
|
||||
assert_eq!(FieldElement::from_repr(FieldElement::one().to_repr()).unwrap(), FieldElement::one());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn one_two() {
|
||||
assert_eq!(FieldElement::one() * FieldElement::one().double(), FieldElement::from(2u8));
|
||||
assert_eq!(
|
||||
FieldElement::from_repr(FieldElement::from(2u8).to_repr()).unwrap(),
|
||||
FieldElement::from(2u8)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pow() {
|
||||
assert_eq!(FieldElement::one().pow(FieldElement::one()), FieldElement::one());
|
||||
let two = FieldElement::one().double();
|
||||
assert_eq!(two.pow(two), two.double());
|
||||
|
||||
let three = two + FieldElement::one();
|
||||
assert_eq!(three.pow(three), three * three * three);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invert() {
|
||||
assert_eq!(FieldElement::one().invert().unwrap(), FieldElement::one());
|
||||
fn test_field() {
|
||||
// TODO: Move to test_prime_field_bits once the impl is finished
|
||||
ff_group_tests::prime_field::test_prime_field::<FieldElement>();
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![no_std]
|
||||
|
||||
mod backend;
|
||||
|
|
|
@ -309,31 +309,38 @@ impl GroupEncoding for Point {
|
|||
impl PrimeGroup for Point {}
|
||||
|
||||
#[test]
|
||||
fn identity() {
|
||||
assert_eq!(Point::from_bytes(&Point::identity().to_bytes()).unwrap(), Point::identity());
|
||||
assert_eq!(Point::identity() + Point::identity(), Point::identity());
|
||||
fn test_group() {
|
||||
// TODO: Move to test_prime_group_bits once the impl is finished
|
||||
use ff_group_tests::group::*;
|
||||
|
||||
test_eq::<Point>();
|
||||
test_identity::<Point>();
|
||||
test_generator::<Point>();
|
||||
test_double::<Point>();
|
||||
test_add::<Point>();
|
||||
test_sum::<Point>();
|
||||
test_neg::<Point>();
|
||||
test_sub::<Point>();
|
||||
test_mul::<Point>();
|
||||
test_order::<Point>();
|
||||
|
||||
test_encoding::<Point>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn addition_multiplication_serialization() {
|
||||
let mut accum = Point::identity();
|
||||
for x in 1 .. 10 {
|
||||
accum += Point::generator();
|
||||
let mul = Point::generator() * Scalar::from(u8::try_from(x).unwrap());
|
||||
assert_eq!(accum, mul);
|
||||
assert_eq!(Point::from_bytes(&mul.to_bytes()).unwrap(), mul);
|
||||
}
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
#[test]
|
||||
fn torsion() {
|
||||
use generic_array::GenericArray;
|
||||
|
||||
// Uses the originally suggested generator which had torsion
|
||||
let old_y = FieldElement::from_repr(
|
||||
hex_literal::hex!(
|
||||
"12796c1532041525945f322e414d434467cfd5c57c9a9af2473b27758c921c4828b277ca5f2891fc4f3d79afdf29a64c72fb28b59c16fa5100"
|
||||
).into(),
|
||||
)
|
||||
let old_y = FieldElement::from_repr(*GenericArray::from_slice(
|
||||
&hex::decode(
|
||||
"\
|
||||
12796c1532041525945f322e414d434467cfd5c57c9a9af2473b2775\
|
||||
8c921c4828b277ca5f2891fc4f3d79afdf29a64c72fb28b59c16fa51\
|
||||
00",
|
||||
)
|
||||
.unwrap(),
|
||||
))
|
||||
.unwrap();
|
||||
let old = Point { x: -recover_x(old_y).unwrap(), y: old_y, z: FieldElement::one() };
|
||||
assert!(bool::from(!old.is_torsion_free()));
|
||||
|
@ -382,6 +389,7 @@ a401cd9df24632adfe6b418dc942d8a091817dd8bd70e1c72ba52f3c\
|
|||
);
|
||||
}
|
||||
|
||||
// Checks random won't infinitely loop
|
||||
#[test]
|
||||
fn random() {
|
||||
Point::random(&mut rand_core::OsRng);
|
||||
|
|
|
@ -33,6 +33,7 @@ impl Scalar {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn invert() {
|
||||
assert_eq!(Scalar::one().invert().unwrap(), Scalar::one());
|
||||
fn test_scalar_field() {
|
||||
// TODO: Move to test_prime_field_bits once the impl is finished
|
||||
ff_group_tests::prime_field::test_prime_field::<Scalar>();
|
||||
}
|
||||
|
|
20
crypto/ff-group-tests/Cargo.toml
Normal file
20
crypto/ff-group-tests/Cargo.toml
Normal file
|
@ -0,0 +1,20 @@
|
|||
[package]
|
||||
name = "ff-group-tests"
|
||||
version = "0.12.0"
|
||||
description = "A collection of sanity tests for implementors of ff/group APIs"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ff-group-tests"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = ["ff", "group", "ecc"]
|
||||
edition = "2021"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[dependencies]
|
||||
group = "0.12"
|
||||
|
||||
[dev-dependencies]
|
||||
k256 = { version = "0.11", features = ["bits"] }
|
||||
p256 = { version = "0.11", features = ["bits"] }
|
21
crypto/ff-group-tests/LICENSE
Normal file
21
crypto/ff-group-tests/LICENSE
Normal file
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2022 Luke Parker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
5
crypto/ff-group-tests/README.md
Normal file
5
crypto/ff-group-tests/README.md
Normal file
|
@ -0,0 +1,5 @@
|
|||
# FF/Group Tests
|
||||
|
||||
A series of sanity checks for implementors of the ff/group APIs. Implementors
|
||||
are assumed to be of a non-trivial size. These tests do not attempt to check if
|
||||
constant time implementations are used.
|
122
crypto/ff-group-tests/src/field.rs
Normal file
122
crypto/ff-group-tests/src/field.rs
Normal file
|
@ -0,0 +1,122 @@
|
|||
use group::ff::Field;
|
||||
|
||||
/// Perform basic tests on equality.
|
||||
pub fn test_eq<F: Field>() {
|
||||
let zero = F::zero();
|
||||
let one = F::one();
|
||||
|
||||
assert!(zero != one, "0 == 1");
|
||||
assert!(!bool::from(zero.ct_eq(&one)), "0 ct_eq 1");
|
||||
|
||||
assert_eq!(zero, F::zero(), "0 != 0");
|
||||
assert!(bool::from(zero.ct_eq(&F::zero())), "0 !ct_eq 0");
|
||||
|
||||
assert_eq!(one, F::one(), "1 != 1");
|
||||
assert!(bool::from(one.ct_eq(&F::one())), "1 !ct_eq 1");
|
||||
}
|
||||
|
||||
/// Verify conditional selection works. Doesn't verify it's actually constant time.
|
||||
pub fn test_conditional_select<F: Field>() {
|
||||
let zero = F::zero();
|
||||
let one = F::one();
|
||||
assert_eq!(F::conditional_select(&zero, &one, 0.into()), zero, "couldn't select when false");
|
||||
assert_eq!(F::conditional_select(&zero, &one, 1.into()), one, "couldn't select when true");
|
||||
}
|
||||
|
||||
/// Perform basic tests on addition.
|
||||
pub fn test_add<F: Field>() {
|
||||
assert_eq!(F::zero() + F::zero(), F::zero(), "0 + 0 != 0");
|
||||
assert_eq!(F::zero() + F::one(), F::one(), "0 + 1 != 1");
|
||||
assert_eq!(F::one() + F::zero(), F::one(), "1 + 0 != 1");
|
||||
// Only PrimeField offers From<u64>
|
||||
// Accordingly, we assume either double or addition is correct
|
||||
// They either have to be matchingly correct or matchingly incorrect, yet we can't
|
||||
// reliably determine that here
|
||||
assert_eq!(F::one() + F::one(), F::one().double(), "1 + 1 != 2");
|
||||
}
|
||||
|
||||
/// Perform basic tests on subtraction.
|
||||
pub fn test_sub<F: Field>() {
|
||||
assert_eq!(F::zero() - F::zero(), F::zero(), "0 - 0 != 0");
|
||||
assert_eq!(F::one() - F::zero(), F::one(), "1 - 0 != 1");
|
||||
assert_eq!(F::one() - F::one(), F::zero(), "1 - 1 != 0");
|
||||
}
|
||||
|
||||
/// Perform basic tests on negation.
|
||||
pub fn test_neg<F: Field>() {
|
||||
assert_eq!(-F::zero(), F::zero(), "-0 != 0");
|
||||
assert_eq!(-(-F::one()), F::one(), "-(-1) != 1");
|
||||
assert_eq!(F::one() + (-F::one()), F::zero(), "1 + -1 != 0");
|
||||
assert_eq!(F::one() - (-F::one()), F::one().double(), "1 - -1 != 2");
|
||||
}
|
||||
|
||||
/// Perform basic tests on multiplication.
|
||||
pub fn test_mul<F: Field>() {
|
||||
assert_eq!(F::zero() * F::zero(), F::zero(), "0 * 0 != 0");
|
||||
assert_eq!(F::one() * F::zero(), F::zero(), "1 * 0 != 0");
|
||||
assert_eq!(F::one() * F::one(), F::one(), "1 * 1 != 1");
|
||||
let two = F::one().double();
|
||||
assert_eq!(two * (two + F::one()), two + two + two, "2 * 3 != 6");
|
||||
}
|
||||
|
||||
/// Perform basic tests on the square function.
|
||||
pub fn test_square<F: Field>() {
|
||||
assert_eq!(F::zero().square(), F::zero(), "0^2 != 0");
|
||||
assert_eq!(F::one().square(), F::one(), "1^2 != 1");
|
||||
let two = F::one().double();
|
||||
assert_eq!(two.square(), two + two, "2^2 != 4");
|
||||
let three = two + F::one();
|
||||
assert_eq!(three.square(), three * three, "3^2 != 9");
|
||||
}
|
||||
|
||||
/// Perform basic tests on the invert function.
|
||||
pub fn test_invert<F: Field>() {
|
||||
assert!(bool::from(F::zero().invert().is_none()), "0.invert() is some");
|
||||
assert_eq!(F::one().invert().unwrap(), F::one(), "1.invert() != 1");
|
||||
|
||||
let two = F::one().double();
|
||||
let three = two + F::one();
|
||||
assert_eq!(two * three.invert().unwrap() * three, two, "2 * 3.invert() * 3 != 2");
|
||||
}
|
||||
|
||||
/// Perform basic tests on the sqrt function.
|
||||
pub fn test_sqrt<F: Field>() {
|
||||
assert_eq!(F::zero().sqrt().unwrap(), F::zero(), "sqrt(0) != 0");
|
||||
assert_eq!(F::one().sqrt().unwrap(), F::one(), "sqrt(1) != 1");
|
||||
|
||||
let mut has_root = F::one().double();
|
||||
while bool::from(has_root.sqrt().is_none()) {
|
||||
has_root += F::one();
|
||||
}
|
||||
let root = has_root.sqrt().unwrap();
|
||||
assert_eq!(root * root, has_root, "sqrt(x)^2 != x");
|
||||
}
|
||||
|
||||
/// Perform basic tests on the is_zero functions.
|
||||
pub fn test_is_zero<F: Field>() {
|
||||
assert!(bool::from(F::zero().is_zero()), "0 is not 0");
|
||||
assert!(F::zero().is_zero_vartime(), "0 is not 0");
|
||||
}
|
||||
|
||||
/// Perform basic tests on the cube function.
|
||||
pub fn test_cube<F: Field>() {
|
||||
assert_eq!(F::zero().cube(), F::zero(), "0^3 != 0");
|
||||
assert_eq!(F::one().cube(), F::one(), "1^3 != 1");
|
||||
let two = F::one().double();
|
||||
assert_eq!(two.cube(), two * two * two, "2^3 != 8");
|
||||
}
|
||||
|
||||
/// Run all tests on fields implementing Field.
|
||||
pub fn test_field<F: Field>() {
|
||||
test_eq::<F>();
|
||||
test_conditional_select::<F>();
|
||||
test_add::<F>();
|
||||
test_sub::<F>();
|
||||
test_neg::<F>();
|
||||
test_mul::<F>();
|
||||
test_square::<F>();
|
||||
test_invert::<F>();
|
||||
test_sqrt::<F>();
|
||||
test_is_zero::<F>();
|
||||
test_cube::<F>();
|
||||
}
|
168
crypto/ff-group-tests/src/group.rs
Normal file
168
crypto/ff-group-tests/src/group.rs
Normal file
|
@ -0,0 +1,168 @@
|
|||
use group::{
|
||||
ff::{Field, PrimeFieldBits},
|
||||
Group,
|
||||
prime::PrimeGroup,
|
||||
};
|
||||
|
||||
use crate::prime_field::{test_prime_field, test_prime_field_bits};
|
||||
|
||||
/// Test equality.
|
||||
pub fn test_eq<G: Group>() {
|
||||
assert_eq!(G::identity(), G::identity(), "identity != identity");
|
||||
assert_eq!(G::generator(), G::generator(), "generator != generator");
|
||||
assert!(G::identity() != G::generator(), "identity != generator");
|
||||
}
|
||||
|
||||
/// Test identity.
|
||||
pub fn test_identity<G: Group>() {
|
||||
assert!(bool::from(G::identity().is_identity()), "identity wasn't identity");
|
||||
assert!(
|
||||
bool::from((G::identity() + G::identity()).is_identity()),
|
||||
"identity + identity wasn't identity"
|
||||
);
|
||||
assert!(
|
||||
bool::from((G::generator() - G::generator()).is_identity()),
|
||||
"generator - generator wasn't identity"
|
||||
);
|
||||
assert!(!bool::from(G::generator().is_identity()), "is_identity claimed generator was identity");
|
||||
}
|
||||
|
||||
/// Sanity check the generator.
|
||||
pub fn test_generator<G: Group>() {
|
||||
assert!(G::generator() != G::identity(), "generator was identity");
|
||||
assert!(
|
||||
(G::generator() + G::generator()) != G::generator(),
|
||||
"generator added to itself identity"
|
||||
);
|
||||
}
|
||||
|
||||
/// Test doubling of group elements.
|
||||
pub fn test_double<G: Group>() {
|
||||
assert!(bool::from(G::identity().double().is_identity()), "identity.double() wasn't identity");
|
||||
assert_eq!(
|
||||
G::generator() + G::generator(),
|
||||
G::generator().double(),
|
||||
"generator + generator != generator.double()"
|
||||
);
|
||||
}
|
||||
|
||||
/// Test addition.
|
||||
pub fn test_add<G: Group>() {
|
||||
assert_eq!(G::identity() + G::identity(), G::identity(), "identity + identity != identity");
|
||||
assert_eq!(G::identity() + G::generator(), G::generator(), "identity + generator != generator");
|
||||
assert_eq!(G::generator() + G::identity(), G::generator(), "generator + identity != generator");
|
||||
|
||||
let two = G::generator().double();
|
||||
assert_eq!(G::generator() + G::generator(), two, "generator + generator != two");
|
||||
let four = two.double();
|
||||
assert_eq!(
|
||||
G::generator() + G::generator() + G::generator() + G::generator(),
|
||||
four,
|
||||
"generator + generator + generator + generator != four"
|
||||
);
|
||||
}
|
||||
|
||||
/// Test summation.
|
||||
pub fn test_sum<G: Group>() {
|
||||
assert_eq!(
|
||||
[G::generator(), G::generator()].iter().sum::<G>(),
|
||||
G::generator().double(),
|
||||
"[generator, generator].sum() != two"
|
||||
);
|
||||
}
|
||||
|
||||
/// Test negation.
|
||||
pub fn test_neg<G: Group>() {
|
||||
assert_eq!(G::identity(), G::identity().neg(), "identity != -identity");
|
||||
assert_eq!(
|
||||
G::generator() + G::generator().neg(),
|
||||
G::identity(),
|
||||
"generator + -generator != identity"
|
||||
);
|
||||
}
|
||||
|
||||
/// Test subtraction.
|
||||
pub fn test_sub<G: Group>() {
|
||||
assert_eq!(G::generator() - G::generator(), G::identity(), "generator - generator != identity");
|
||||
let two = G::generator() + G::generator();
|
||||
assert_eq!(two - G::generator(), G::generator(), "two - one != one");
|
||||
}
|
||||
|
||||
/// Test scalar multiplication
|
||||
pub fn test_mul<G: Group>() {
|
||||
assert_eq!(G::generator() * G::Scalar::from(0), G::identity(), "generator * 0 != identity");
|
||||
assert_eq!(G::generator() * G::Scalar::from(1), G::generator(), "generator * 1 != generator");
|
||||
assert_eq!(
|
||||
G::generator() * G::Scalar::from(2),
|
||||
G::generator() + G::generator(),
|
||||
"generator * 2 != generator + generator"
|
||||
);
|
||||
assert_eq!(G::identity() * G::Scalar::from(2), G::identity(), "identity * 2 != identity");
|
||||
}
|
||||
|
||||
/// Test `((order - 1) * G) + G == identity`.
|
||||
pub fn test_order<G: Group>() {
|
||||
let minus_one = G::generator() * (G::Scalar::zero() - G::Scalar::one());
|
||||
assert!(minus_one != G::identity(), "(modulus - 1) * G was identity");
|
||||
assert_eq!(minus_one + G::generator(), G::identity(), "((modulus - 1) * G) + G wasn't identity");
|
||||
}
|
||||
|
||||
/// Run all tests on groups implementing Group.
|
||||
pub fn test_group<G: Group>() {
|
||||
test_prime_field::<G::Scalar>();
|
||||
|
||||
test_eq::<G>();
|
||||
test_identity::<G>();
|
||||
test_generator::<G>();
|
||||
test_double::<G>();
|
||||
test_add::<G>();
|
||||
test_sum::<G>();
|
||||
test_neg::<G>();
|
||||
test_sub::<G>();
|
||||
test_mul::<G>();
|
||||
test_order::<G>();
|
||||
}
|
||||
|
||||
/// Test encoding and decoding of group elements.
|
||||
pub fn test_encoding<G: PrimeGroup>() {
|
||||
let test = |point: G, msg| {
|
||||
let bytes = point.to_bytes();
|
||||
let mut repr = G::Repr::default();
|
||||
repr.as_mut().copy_from_slice(bytes.as_ref());
|
||||
assert_eq!(point, G::from_bytes(&repr).unwrap(), "{msg} couldn't be encoded and decoded");
|
||||
assert_eq!(
|
||||
point,
|
||||
G::from_bytes_unchecked(&repr).unwrap(),
|
||||
"{msg} couldn't be encoded and decoded",
|
||||
);
|
||||
};
|
||||
test(G::identity(), "identity");
|
||||
test(G::generator(), "generator");
|
||||
test(G::generator() + G::generator(), "(generator * 2)");
|
||||
}
|
||||
|
||||
/// Run all tests on groups implementing PrimeGroup (Group + GroupEncoding).
|
||||
pub fn test_prime_group<G: PrimeGroup>() {
|
||||
test_group::<G>();
|
||||
|
||||
test_encoding::<G>();
|
||||
}
|
||||
|
||||
/// Run all tests offered by this crate on the group.
|
||||
pub fn test_prime_group_bits<G: PrimeGroup>()
|
||||
where
|
||||
G::Scalar: PrimeFieldBits,
|
||||
{
|
||||
test_prime_field_bits::<G::Scalar>();
|
||||
test_prime_group::<G>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_k256_group_encoding() {
|
||||
test_prime_group_bits::<k256::ProjectivePoint>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_p256_group_encoding() {
|
||||
test_prime_group_bits::<p256::ProjectivePoint>();
|
||||
}
|
9
crypto/ff-group-tests/src/lib.rs
Normal file
9
crypto/ff-group-tests/src/lib.rs
Normal file
|
@ -0,0 +1,9 @@
|
|||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
|
||||
/// Tests for the Field trait.
|
||||
pub mod field;
|
||||
/// Tests for the PrimeField and PrimeFieldBits traits.
|
||||
pub mod prime_field;
|
||||
|
||||
/// Tests for the Group and GroupEncoding traits.
|
||||
pub mod group;
|
290
crypto/ff-group-tests/src/prime_field.rs
Normal file
290
crypto/ff-group-tests/src/prime_field.rs
Normal file
|
@ -0,0 +1,290 @@
|
|||
use group::ff::{PrimeField, PrimeFieldBits};
|
||||
|
||||
use crate::field::test_field;
|
||||
|
||||
// Ideally, this and test_one would be under Field, yet these tests require access to From<u64>
|
||||
/// Test zero returns F::from(0).
|
||||
pub fn test_zero<F: PrimeField>() {
|
||||
assert_eq!(F::zero(), F::from(0u64), "0 != 0");
|
||||
}
|
||||
|
||||
/// Test one returns F::from(1).
|
||||
pub fn test_one<F: PrimeField>() {
|
||||
assert_eq!(F::one(), F::from(1u64), "1 != 1");
|
||||
}
|
||||
|
||||
/// Test From<u64> for F works.
|
||||
pub fn test_from_u64<F: PrimeField>() {
|
||||
assert_eq!(F::one().double(), F::from(2u64), "2 != 2");
|
||||
}
|
||||
|
||||
/// Test is_odd/is_even works.
|
||||
/// This test assumes an odd modulus with oddness being determined by the least-significant bit.
|
||||
/// Accordingly, this test doesn't support fields alternatively defined.
|
||||
/// TODO: Improve in the future.
|
||||
pub fn test_is_odd<F: PrimeField>() {
|
||||
assert_eq!(F::zero().is_odd().unwrap_u8(), 0, "0 was odd");
|
||||
assert_eq!(F::zero().is_even().unwrap_u8(), 1, "0 wasn't even");
|
||||
|
||||
assert_eq!(F::one().is_odd().unwrap_u8(), 1, "1 was even");
|
||||
assert_eq!(F::one().is_even().unwrap_u8(), 0, "1 wasn't odd");
|
||||
|
||||
let neg_one = -F::one();
|
||||
assert_eq!(neg_one.is_odd().unwrap_u8(), 0, "-1 was odd");
|
||||
assert_eq!(neg_one.is_even().unwrap_u8(), 1, "-1 wasn't even");
|
||||
|
||||
assert_eq!(neg_one.double().is_odd().unwrap_u8(), 1, "(-1).double() was even");
|
||||
assert_eq!(neg_one.double().is_even().unwrap_u8(), 0, "(-1).double() wasn't odd");
|
||||
}
|
||||
|
||||
/// Test encoding and decoding of field elements.
|
||||
pub fn test_encoding<F: PrimeField>() {
|
||||
let test = |scalar: F, msg| {
|
||||
let bytes = scalar.to_repr();
|
||||
let mut repr = F::Repr::default();
|
||||
repr.as_mut().copy_from_slice(bytes.as_ref());
|
||||
assert_eq!(scalar, F::from_repr(repr).unwrap(), "{msg} couldn't be encoded and decoded");
|
||||
assert_eq!(
|
||||
scalar,
|
||||
F::from_repr_vartime(repr).unwrap(),
|
||||
"{msg} couldn't be encoded and decoded",
|
||||
);
|
||||
};
|
||||
test(F::zero(), "0");
|
||||
test(F::one(), "1");
|
||||
test(F::one() + F::one(), "2");
|
||||
test(-F::one(), "-1");
|
||||
}
|
||||
|
||||
/// Run all tests on fields implementing PrimeField.
|
||||
pub fn test_prime_field<F: PrimeField>() {
|
||||
test_field::<F>();
|
||||
|
||||
test_zero::<F>();
|
||||
test_one::<F>();
|
||||
test_from_u64::<F>();
|
||||
test_is_odd::<F>();
|
||||
|
||||
// Do a sanity check on the CAPACITY. A full test can't be done at this time
|
||||
assert!(F::CAPACITY <= F::NUM_BITS, "capacity exceeded number of bits");
|
||||
|
||||
test_encoding::<F>();
|
||||
}
|
||||
|
||||
/// Test to_le_bits returns the little-endian bits of a value.
|
||||
// This test assumes that the modulus is at least 4.
|
||||
pub fn test_to_le_bits<F: PrimeField + PrimeFieldBits>() {
|
||||
{
|
||||
let bits = F::zero().to_le_bits();
|
||||
assert_eq!(bits.iter().filter(|bit| **bit).count(), 0, "0 had bits set");
|
||||
}
|
||||
|
||||
{
|
||||
let bits = F::one().to_le_bits();
|
||||
assert!(bits[0], "1 didn't have its least significant bit set");
|
||||
assert_eq!(bits.iter().filter(|bit| **bit).count(), 1, "1 had multiple bits set");
|
||||
}
|
||||
|
||||
{
|
||||
let bits = F::from(2).to_le_bits();
|
||||
assert!(bits[1], "2 didn't have its second bit set");
|
||||
assert_eq!(bits.iter().filter(|bit| **bit).count(), 1, "2 had multiple bits set");
|
||||
}
|
||||
|
||||
{
|
||||
let bits = F::from(3).to_le_bits();
|
||||
assert!(bits[0], "3 didn't have its first bit set");
|
||||
assert!(bits[1], "3 didn't have its second bit set");
|
||||
assert_eq!(bits.iter().filter(|bit| **bit).count(), 2, "2 didn't have two bits set");
|
||||
}
|
||||
}
|
||||
|
||||
/// Test char_le_bits returns the bits of the modulus.
|
||||
pub fn test_char_le_bits<F: PrimeField + PrimeFieldBits>() {
|
||||
// A field with a modulus of 0 may be technically valid? Yet these tests assume some basic
|
||||
// functioning.
|
||||
assert!(F::char_le_bits().iter().any(|bit| *bit), "char_le_bits contained 0");
|
||||
|
||||
// Test this is the bit pattern of the modulus by reconstructing the modulus from it
|
||||
let mut bit = F::one();
|
||||
let mut modulus = F::zero();
|
||||
for set in F::char_le_bits() {
|
||||
if set {
|
||||
modulus += bit;
|
||||
}
|
||||
bit = bit.double();
|
||||
}
|
||||
assert_eq!(modulus, F::zero(), "char_le_bits did not contain the field's modulus");
|
||||
}
|
||||
|
||||
/// Test NUM_BITS is accurate.
|
||||
pub fn test_num_bits<F: PrimeField + PrimeFieldBits>() {
|
||||
let mut val = F::one();
|
||||
let mut bit = 0;
|
||||
while ((bit + 1) < val.to_le_bits().len()) && val.double().to_le_bits()[bit + 1] {
|
||||
val = val.double();
|
||||
bit += 1;
|
||||
}
|
||||
assert_eq!(
|
||||
F::NUM_BITS,
|
||||
u32::try_from(bit + 1).unwrap(),
|
||||
"NUM_BITS was incorrect. it should be {}",
|
||||
bit + 1
|
||||
);
|
||||
}
|
||||
|
||||
/// Test CAPACITY is accurate.
|
||||
pub fn test_capacity<F: PrimeField + PrimeFieldBits>() {
|
||||
assert!(F::CAPACITY <= F::NUM_BITS, "capacity exceeded number of bits");
|
||||
|
||||
let mut val = F::one();
|
||||
assert!(val.to_le_bits()[0], "1 didn't have its least significant bit set");
|
||||
for b in 1 .. F::CAPACITY {
|
||||
val = val.double();
|
||||
val += F::one();
|
||||
for i in 0 ..= b {
|
||||
assert!(
|
||||
val.to_le_bits()[usize::try_from(i).unwrap()],
|
||||
"couldn't set a bit within the capacity",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// If the field has a modulus which is a power of 2, NUM_BITS should equal CAPACITY
|
||||
// Adding one would also be sufficient to trigger an overflow
|
||||
if F::char_le_bits().iter().filter(|bit| **bit).count() == 1 {
|
||||
assert_eq!(
|
||||
F::NUM_BITS,
|
||||
F::CAPACITY,
|
||||
"field has a power of two modulus yet CAPACITY doesn't equal NUM_BITS",
|
||||
);
|
||||
assert_eq!(val + F::one(), F::zero());
|
||||
return;
|
||||
}
|
||||
|
||||
assert_eq!(F::NUM_BITS - 1, F::CAPACITY, "capacity wasn't NUM_BITS - 1");
|
||||
}
|
||||
|
||||
fn pow<F: PrimeFieldBits>(base: F, exp: F) -> F {
|
||||
let mut res = F::one();
|
||||
for bit in exp.to_le_bits().iter().rev() {
|
||||
res *= res;
|
||||
if *bit {
|
||||
res *= base;
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
// Ideally, this would be under field.rs, yet the above pow function requires PrimeFieldBits
|
||||
/// Perform basic tests on the pow functions, even when passed non-canonical inputs.
|
||||
pub fn test_pow<F: PrimeFieldBits>() {
|
||||
// Sanity check the local pow algorithm. Does not have assert messages as these shouldn't fail
|
||||
assert_eq!(pow(F::one(), F::zero()), F::one());
|
||||
assert_eq!(pow(F::one().double(), F::zero()), F::one());
|
||||
assert_eq!(pow(F::one(), F::one()), F::one());
|
||||
|
||||
let two = F::one().double();
|
||||
assert_eq!(pow(two, F::one()), two);
|
||||
assert_eq!(pow(two, two), two.double());
|
||||
let three = two + F::one();
|
||||
assert_eq!(pow(three, F::one()), three);
|
||||
assert_eq!(pow(three, two), three * three);
|
||||
assert_eq!(pow(three, three), three * three * three);
|
||||
|
||||
// TODO: Test against Field::pow once updated to ff 0.13
|
||||
|
||||
// Choose a small base without a notably uniform bit pattern
|
||||
let bit_0 = F::one();
|
||||
let base = {
|
||||
let bit_1 = bit_0.double();
|
||||
let bit_2 = bit_1.double();
|
||||
let bit_3 = bit_2.double();
|
||||
let bit_4 = bit_3.double();
|
||||
let bit_5 = bit_4.double();
|
||||
let bit_6 = bit_5.double();
|
||||
let bit_7 = bit_6.double();
|
||||
bit_7 + bit_6 + bit_5 + bit_2 + bit_0
|
||||
};
|
||||
|
||||
// Ensure pow_vartime returns 1 when the base is raised to 0, handling malleated inputs
|
||||
assert_eq!(base.pow_vartime([]), F::one(), "pow_vartime x^0 ([]) != 1");
|
||||
assert_eq!(base.pow_vartime([0]), F::one(), "pow_vartime x^0 ([0]) != 1");
|
||||
assert_eq!(base.pow_vartime([0, 0]), F::one(), "pow_vartime x^0 ([0, 0]) != 1");
|
||||
|
||||
// Ensure pow_vartime returns the base when raised to 1, handling malleated inputs
|
||||
assert_eq!(base.pow_vartime([1]), base, "pow_vartime x^1 ([1]) != x");
|
||||
assert_eq!(base.pow_vartime([1, 0]), base, "pow_vartime x^1 ([1, 0]) != x");
|
||||
|
||||
// Ensure pow_vartime can handle multiple u64s properly
|
||||
// Create a scalar which exceeds u64
|
||||
let mut bit_64 = bit_0;
|
||||
for _ in 0 .. 64 {
|
||||
bit_64 = bit_64.double();
|
||||
}
|
||||
// Run the tests
|
||||
assert_eq!(base.pow_vartime([0, 1]), pow(base, bit_64), "pow_vartime x^(2^64) != x^(2^64)");
|
||||
assert_eq!(
|
||||
base.pow_vartime([1, 1]),
|
||||
pow(base, bit_64 + F::one()),
|
||||
"pow_vartime x^(2^64 + 1) != x^(2^64 + 1)"
|
||||
);
|
||||
}
|
||||
|
||||
/// Test S is correct.
|
||||
pub fn test_s<F: PrimeFieldBits>() {
|
||||
// "This is the number of leading zero bits in the little-endian bit representation of
|
||||
// `modulus - 1`."
|
||||
let mut s = 0;
|
||||
for b in (F::zero() - F::one()).to_le_bits() {
|
||||
if b {
|
||||
break;
|
||||
}
|
||||
s += 1;
|
||||
}
|
||||
assert_eq!(s, F::S, "incorrect S");
|
||||
}
|
||||
|
||||
// Test the root of unity is correct for the given multiplicative generator.
|
||||
pub fn test_root_of_unity<F: PrimeFieldBits>() {
|
||||
// "It can be calculated by exponentiating `Self::multiplicative_generator` by `t`, where
|
||||
// `t = (modulus - 1) >> Self::S`."
|
||||
|
||||
// Get the bytes to shift
|
||||
let mut bits = (F::zero() - F::one()).to_le_bits().iter().map(|bit| *bit).collect::<Vec<_>>();
|
||||
for _ in 0 .. F::S {
|
||||
bits.remove(0);
|
||||
}
|
||||
|
||||
// Construct t
|
||||
let mut bit = F::one();
|
||||
let mut t = F::zero();
|
||||
for set in bits {
|
||||
if set {
|
||||
t += bit;
|
||||
}
|
||||
bit = bit.double();
|
||||
}
|
||||
|
||||
assert_eq!(pow(F::multiplicative_generator(), t), F::root_of_unity(), "incorrect root of unity");
|
||||
assert_eq!(
|
||||
pow(F::root_of_unity(), pow(F::from(2u64), F::from(F::S.into()))),
|
||||
F::one(),
|
||||
"root of unity raised to 2^S wasn't 1"
|
||||
);
|
||||
}
|
||||
|
||||
/// Run all tests on fields implementing PrimeFieldBits.
|
||||
pub fn test_prime_field_bits<F: PrimeFieldBits>() {
|
||||
test_prime_field::<F>();
|
||||
|
||||
test_to_le_bits::<F>();
|
||||
test_char_le_bits::<F>();
|
||||
|
||||
test_pow::<F>();
|
||||
test_s::<F>();
|
||||
test_root_of_unity::<F>();
|
||||
|
||||
test_num_bits::<F>();
|
||||
test_capacity::<F>();
|
||||
}
|
|
@ -108,6 +108,7 @@ impl Transcript for IetfTranscript {
|
|||
self.0.clone()
|
||||
}
|
||||
|
||||
// FROST won't use this and this shouldn't be used outside of FROST
|
||||
fn rng_seed(&mut self, _: &'static [u8]) -> [u8; 32] {
|
||||
unimplemented!()
|
||||
}
|
||||
|
|
|
@ -119,7 +119,7 @@ impl<C: Curve> NonceCommitments<C> {
|
|||
let mut dleqs = None;
|
||||
if generators.len() >= 2 {
|
||||
let mut verify = |i| -> io::Result<_> {
|
||||
let dleq = DLEqProof::deserialize(reader)?;
|
||||
let dleq = DLEqProof::read(reader)?;
|
||||
dleq
|
||||
.verify(
|
||||
&mut dleq_transcript::<T>(context),
|
||||
|
@ -140,8 +140,8 @@ impl<C: Curve> NonceCommitments<C> {
|
|||
generator.write(writer)?;
|
||||
}
|
||||
if let Some(dleqs) = &self.dleqs {
|
||||
dleqs[0].serialize(writer)?;
|
||||
dleqs[1].serialize(writer)?;
|
||||
dleqs[0].write(writer)?;
|
||||
dleqs[1].write(writer)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -184,7 +184,7 @@ impl<C: Curve> Commitments<C> {
|
|||
if let Some(dleqs) = &nonce.dleqs {
|
||||
let mut transcript_dleq = |label, dleq: &DLEqProof<C::G>| {
|
||||
let mut buf = vec![];
|
||||
dleq.serialize(&mut buf).unwrap();
|
||||
dleq.write(&mut buf).unwrap();
|
||||
t.append_message(label, &buf);
|
||||
};
|
||||
transcript_dleq(b"dleq_D", &dleqs[0]);
|
||||
|
|
|
@ -7,11 +7,14 @@ use std::{
|
|||
use rand_core::{RngCore, CryptoRng, SeedableRng};
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
||||
use zeroize::{Zeroize, Zeroizing};
|
||||
|
||||
use transcript::Transcript;
|
||||
|
||||
use group::{ff::PrimeField, GroupEncoding};
|
||||
use group::{
|
||||
ff::{Field, PrimeField},
|
||||
GroupEncoding,
|
||||
};
|
||||
use multiexp::BatchVerifier;
|
||||
|
||||
use crate::{
|
||||
|
@ -46,6 +49,7 @@ impl<T: Writable> Writable for Vec<T> {
|
|||
/// Pairing of an Algorithm with a ThresholdKeys instance and this specific signing set.
|
||||
#[derive(Clone, Zeroize)]
|
||||
pub struct Params<C: Curve, A: Algorithm<C>> {
|
||||
// Skips the algorithm due to being too large a bound to feasibly enforce on users
|
||||
#[zeroize(skip)]
|
||||
algorithm: A,
|
||||
keys: ThresholdKeys<C>,
|
||||
|
@ -78,8 +82,11 @@ impl<C: Curve, A: Addendum> Writable for Preprocess<C, A> {
|
|||
/// A cached preprocess. A preprocess MUST only be used once. Reuse will enable third-party
|
||||
/// recovery of your private key share. Additionally, this MUST be handled with the same security
|
||||
/// as your private key share, as knowledge of it also enables recovery.
|
||||
#[derive(Zeroize, ZeroizeOnDrop)]
|
||||
pub struct CachedPreprocess(pub [u8; 32]);
|
||||
// Directly exposes the [u8; 32] member to void needing to route through std::io interfaces.
|
||||
// Still uses Zeroizing internally so when users grab it, they have a higher likelihood of
|
||||
// appreciating how to handle it and don't immediately start copying it just by grabbing it.
|
||||
#[derive(Zeroize)]
|
||||
pub struct CachedPreprocess(pub Zeroizing<[u8; 32]>);
|
||||
|
||||
/// Trait for the initial state machine of a two-round signing protocol.
|
||||
pub trait PreprocessMachine {
|
||||
|
@ -110,11 +117,11 @@ impl<C: Curve, A: Algorithm<C>> AlgorithmMachine<C, A> {
|
|||
|
||||
fn seeded_preprocess(
|
||||
self,
|
||||
seed: Zeroizing<CachedPreprocess>,
|
||||
seed: CachedPreprocess,
|
||||
) -> (AlgorithmSignMachine<C, A>, Preprocess<C, A::Addendum>) {
|
||||
let mut params = self.params;
|
||||
|
||||
let mut rng = ChaCha20Rng::from_seed(seed.0);
|
||||
let mut rng = ChaCha20Rng::from_seed(*seed.0);
|
||||
// Get a challenge to the existing transcript for use when proving for the commitments
|
||||
let commitments_challenge = params.algorithm.transcript().challenge(b"commitments");
|
||||
let (nonces, commitments) = Commitments::new::<_, A::Transcript>(
|
||||
|
@ -153,7 +160,7 @@ impl<C: Curve, A: Algorithm<C>> AlgorithmMachine<C, A> {
|
|||
commitments_challenge: self.params.algorithm.transcript().challenge(b"commitments"),
|
||||
|
||||
params: self.params,
|
||||
seed: Zeroizing::new(CachedPreprocess([0; 32])),
|
||||
seed: CachedPreprocess(Zeroizing::new([0; 32])),
|
||||
|
||||
nonces,
|
||||
preprocess,
|
||||
|
@ -174,7 +181,7 @@ impl<C: Curve, A: Algorithm<C>> PreprocessMachine for AlgorithmMachine<C, A> {
|
|||
self,
|
||||
rng: &mut R,
|
||||
) -> (Self::SignMachine, Preprocess<C, A::Addendum>) {
|
||||
let mut seed = Zeroizing::new(CachedPreprocess([0; 32]));
|
||||
let mut seed = CachedPreprocess(Zeroizing::new([0; 32]));
|
||||
rng.fill_bytes(seed.0.as_mut());
|
||||
self.seeded_preprocess(seed)
|
||||
}
|
||||
|
@ -188,6 +195,12 @@ impl<C: Curve> Writable for SignatureShare<C> {
|
|||
writer.write_all(self.0.to_repr().as_ref())
|
||||
}
|
||||
}
|
||||
#[cfg(any(test, feature = "tests"))]
|
||||
impl<C: Curve> SignatureShare<C> {
|
||||
pub(crate) fn invalidate(&mut self) {
|
||||
self.0 += C::F::one();
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait for the second machine of a two-round signing protocol.
|
||||
pub trait SignMachine<S>: Sized {
|
||||
|
@ -206,14 +219,14 @@ pub trait SignMachine<S>: Sized {
|
|||
/// of it enables recovery of your private key share. Third-party recovery of a cached preprocess
|
||||
/// also enables recovery of your private key share, so this MUST be treated with the same
|
||||
/// security as your private key share.
|
||||
fn cache(self) -> Zeroizing<CachedPreprocess>;
|
||||
fn cache(self) -> CachedPreprocess;
|
||||
|
||||
/// Create a sign machine from a cached preprocess. After this, the preprocess should be fully
|
||||
/// deleted, as it must never be reused. It is
|
||||
fn from_cache(
|
||||
params: Self::Params,
|
||||
keys: Self::Keys,
|
||||
cache: Zeroizing<CachedPreprocess>,
|
||||
cache: CachedPreprocess,
|
||||
) -> Result<Self, FrostError>;
|
||||
|
||||
/// Read a Preprocess message. Despite taking self, this does not save the preprocess.
|
||||
|
@ -235,10 +248,11 @@ pub trait SignMachine<S>: Sized {
|
|||
#[derive(Zeroize)]
|
||||
pub struct AlgorithmSignMachine<C: Curve, A: Algorithm<C>> {
|
||||
params: Params<C, A>,
|
||||
seed: Zeroizing<CachedPreprocess>,
|
||||
seed: CachedPreprocess,
|
||||
|
||||
commitments_challenge: <A::Transcript as Transcript>::Challenge,
|
||||
pub(crate) nonces: Vec<Nonce<C>>,
|
||||
// Skips the preprocess due to being too large a bound to feasibly enforce on users
|
||||
#[zeroize(skip)]
|
||||
pub(crate) preprocess: Preprocess<C, A::Addendum>,
|
||||
pub(crate) blame_entropy: [u8; 32],
|
||||
|
@ -251,14 +265,14 @@ impl<C: Curve, A: Algorithm<C>> SignMachine<A::Signature> for AlgorithmSignMachi
|
|||
type SignatureShare = SignatureShare<C>;
|
||||
type SignatureMachine = AlgorithmSignatureMachine<C, A>;
|
||||
|
||||
fn cache(self) -> Zeroizing<CachedPreprocess> {
|
||||
fn cache(self) -> CachedPreprocess {
|
||||
self.seed
|
||||
}
|
||||
|
||||
fn from_cache(
|
||||
algorithm: A,
|
||||
keys: ThresholdKeys<C>,
|
||||
cache: Zeroizing<CachedPreprocess>,
|
||||
cache: CachedPreprocess,
|
||||
) -> Result<Self, FrostError> {
|
||||
let (machine, _) = AlgorithmMachine::new(algorithm, keys)?.seeded_preprocess(cache);
|
||||
Ok(machine)
|
||||
|
|
|
@ -1,27 +0,0 @@
|
|||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use group::Group;
|
||||
|
||||
use crate::Curve;
|
||||
|
||||
// Test successful multiexp, with enough pairs to trigger its variety of algorithms
|
||||
// Multiexp has its own tests, yet only against k256 and Ed25519 (which should be sufficient
|
||||
// as-is to prove multiexp), and this doesn't hurt
|
||||
pub fn test_multiexp<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
||||
let mut pairs = Vec::with_capacity(1000);
|
||||
let mut sum = C::G::identity();
|
||||
for _ in 0 .. 10 {
|
||||
for _ in 0 .. 100 {
|
||||
pairs.push((C::random_nonzero_F(&mut *rng), C::generator() * C::random_nonzero_F(&mut *rng)));
|
||||
sum += pairs[pairs.len() - 1].1 * pairs[pairs.len() - 1].0;
|
||||
}
|
||||
assert_eq!(multiexp::multiexp(&pairs), sum);
|
||||
assert_eq!(multiexp::multiexp_vartime(&pairs), sum);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn test_curve<R: RngCore + CryptoRng, C: Curve>(rng: &mut R) {
|
||||
// TODO: Test the Curve functions themselves
|
||||
|
||||
test_multiexp::<_, C>(rng);
|
||||
}
|
|
@ -10,8 +10,6 @@ use crate::{
|
|||
sign::{Writable, PreprocessMachine, SignMachine, SignatureMachine, AlgorithmMachine},
|
||||
};
|
||||
|
||||
/// Curve tests.
|
||||
pub mod curve;
|
||||
/// Vectorized test suite to ensure consistency.
|
||||
pub mod vectors;
|
||||
|
||||
|
@ -61,7 +59,9 @@ pub fn algorithm_machines<R: RngCore, C: Curve, A: Algorithm<C>>(
|
|||
.collect()
|
||||
}
|
||||
|
||||
fn sign_internal<
|
||||
// Run the commit step and generate signature shares
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub(crate) fn commit_and_shares<
|
||||
R: RngCore + CryptoRng,
|
||||
M: PreprocessMachine,
|
||||
F: FnMut(&mut R, &mut HashMap<u16, M::SignMachine>),
|
||||
|
@ -70,7 +70,10 @@ fn sign_internal<
|
|||
mut machines: HashMap<u16, M>,
|
||||
mut cache: F,
|
||||
msg: &[u8],
|
||||
) -> M::Signature {
|
||||
) -> (
|
||||
HashMap<u16, <M::SignMachine as SignMachine<M::Signature>>::SignatureMachine>,
|
||||
HashMap<u16, <M::SignMachine as SignMachine<M::Signature>>::SignatureShare>,
|
||||
) {
|
||||
let mut commitments = HashMap::new();
|
||||
let mut machines = machines
|
||||
.drain()
|
||||
|
@ -88,7 +91,7 @@ fn sign_internal<
|
|||
cache(rng, &mut machines);
|
||||
|
||||
let mut shares = HashMap::new();
|
||||
let mut machines = machines
|
||||
let machines = machines
|
||||
.drain()
|
||||
.map(|(i, machine)| {
|
||||
let (machine, share) = machine.sign(clone_without(&commitments, &i), msg).unwrap();
|
||||
|
@ -101,6 +104,21 @@ fn sign_internal<
|
|||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
(machines, shares)
|
||||
}
|
||||
|
||||
fn sign_internal<
|
||||
R: RngCore + CryptoRng,
|
||||
M: PreprocessMachine,
|
||||
F: FnMut(&mut R, &mut HashMap<u16, M::SignMachine>),
|
||||
>(
|
||||
rng: &mut R,
|
||||
machines: HashMap<u16, M>,
|
||||
cache: F,
|
||||
msg: &[u8],
|
||||
) -> M::Signature {
|
||||
let (mut machines, shares) = commit_and_shares(rng, machines, cache, msg);
|
||||
|
||||
let mut signature = None;
|
||||
for (i, machine) in machines.drain() {
|
||||
let sig = machine.complete(clone_without(&shares, &i)).unwrap();
|
||||
|
@ -137,7 +155,7 @@ pub fn sign<R: RngCore + CryptoRng, M: PreprocessMachine>(
|
|||
machines,
|
||||
|rng, machines| {
|
||||
// Cache and rebuild half of the machines
|
||||
let mut included = machines.keys().into_iter().cloned().collect::<Vec<_>>();
|
||||
let mut included = machines.keys().cloned().collect::<Vec<_>>();
|
||||
for i in included.drain(..) {
|
||||
if (rng.next_u64() % 2) == 0 {
|
||||
let cache = machines.remove(&i).unwrap().cache();
|
||||
|
|
|
@ -9,17 +9,17 @@ use rand_core::{RngCore, CryptoRng};
|
|||
|
||||
use group::{ff::PrimeField, GroupEncoding};
|
||||
|
||||
use dkg::tests::{key_gen, test_ciphersuite as test_dkg};
|
||||
use dkg::tests::key_gen;
|
||||
|
||||
use crate::{
|
||||
curve::Curve,
|
||||
ThresholdCore, ThresholdKeys,
|
||||
ThresholdCore, ThresholdKeys, FrostError,
|
||||
algorithm::{Schnorr, Hram},
|
||||
sign::{
|
||||
Nonce, GeneratorCommitments, NonceCommitments, Commitments, Writable, Preprocess, SignMachine,
|
||||
SignatureMachine, AlgorithmMachine,
|
||||
},
|
||||
tests::{clone_without, recover_key, algorithm_machines, sign, curve::test_curve},
|
||||
tests::{clone_without, recover_key, algorithm_machines, commit_and_shares, sign},
|
||||
};
|
||||
|
||||
pub struct Vectors {
|
||||
|
@ -56,7 +56,7 @@ impl From<serde_json::Value> for Vectors {
|
|||
|
||||
msg: to_str(&value["inputs"]["message"]),
|
||||
included: to_str(&value["round_one_outputs"]["participant_list"])
|
||||
.split(",")
|
||||
.split(',')
|
||||
.map(u16::from_str)
|
||||
.collect::<Result<_, _>>()
|
||||
.unwrap(),
|
||||
|
@ -118,12 +118,6 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
|
|||
rng: &mut R,
|
||||
vectors: Vectors,
|
||||
) {
|
||||
// Do basic tests before trying the vectors
|
||||
test_curve::<_, C>(&mut *rng);
|
||||
|
||||
// Test the DKG
|
||||
test_dkg::<_, C>(&mut *rng);
|
||||
|
||||
// Test a basic Schnorr signature
|
||||
{
|
||||
let keys = key_gen(&mut *rng);
|
||||
|
@ -133,6 +127,27 @@ pub fn test_with_vectors<R: RngCore + CryptoRng, C: Curve, H: Hram<C>>(
|
|||
assert!(sig.verify(keys[&1].group_key(), H::hram(&sig.R, &keys[&1].group_key(), MSG)));
|
||||
}
|
||||
|
||||
// Test blame on an invalid Schnorr signature share
|
||||
{
|
||||
let keys = key_gen(&mut *rng);
|
||||
let machines = algorithm_machines(&mut *rng, Schnorr::<C, H>::new(), &keys);
|
||||
const MSG: &[u8] = b"Hello, World!";
|
||||
|
||||
let (mut machines, mut shares) = commit_and_shares(&mut *rng, machines, |_, _| {}, MSG);
|
||||
let faulty = *shares.keys().next().unwrap();
|
||||
shares.get_mut(&faulty).unwrap().invalidate();
|
||||
|
||||
for (i, machine) in machines.drain() {
|
||||
if i == faulty {
|
||||
continue;
|
||||
}
|
||||
assert_eq!(
|
||||
machine.complete(clone_without(&shares, &i)).err(),
|
||||
Some(FrostError::InvalidShare(faulty))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Test against the vectors
|
||||
let keys = vectors_to_multisig_keys::<C>(&vectors);
|
||||
let group_key =
|
||||
|
|
|
@ -7,7 +7,7 @@ pub struct MerlinTranscript(pub merlin::Transcript);
|
|||
// Merlin doesn't implement Debug so provide a stub which won't panic
|
||||
impl Debug for MerlinTranscript {
|
||||
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result<(), core::fmt::Error> {
|
||||
fmt.debug_struct("MerlinTranscript").finish()
|
||||
fmt.debug_struct("MerlinTranscript").finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -9,7 +9,27 @@ This results in a two-round protocol.
|
|||
### Encryption
|
||||
|
||||
In order to protect the secret shares during communication, the `dkg` library
|
||||
additionally sends an encryption key. These encryption keys are used in an ECDH
|
||||
to derive a shared key. This key is then hashed to obtain two keys and IVs, one
|
||||
for sending and one for receiving, with the given counterparty. Chacha20 is used
|
||||
as the stream cipher.
|
||||
establishes a public key for encryption at the start of a given protocol.
|
||||
Every encrypted message (such as the secret shares) then includes a per-message
|
||||
encryption key. These two keys are used in an Elliptic-curve Diffie-Hellman
|
||||
handshake to derive a shared key. This shared key is then hashed to obtain a key
|
||||
and IV for use in a ChaCha20 stream cipher instance, which is xor'd against a
|
||||
message to encrypt it.
|
||||
|
||||
### Blame
|
||||
|
||||
Since each message has a distinct key attached, and accordingly a distinct
|
||||
shared key, it's possible to reveal the shared key for a specific message
|
||||
without revealing any other message's decryption keys. This is utilized when a
|
||||
participant misbehaves. A participant who receives an invalid encrypted message
|
||||
publishes its key, able to without concern for side effects, With the key
|
||||
published, all participants can decrypt the message in order to decide blame.
|
||||
|
||||
While key reuse by a participant is considered as them revealing the messages
|
||||
themselves, and therefore out of scope, there is an attack where a malicious
|
||||
adversary claims another participant's encryption key. They'll fail to encrypt
|
||||
their message, and the recipient will issue a blame statement. This blame
|
||||
statement, intended to reveal the malicious adversary, also reveals the message
|
||||
by the participant whose keys were co-opted. To resolve this, a
|
||||
proof-of-possession is also included with encrypted messages, ensuring only
|
||||
those actually with per-message keys can claim to use them.
|
||||
|
|
|
@ -18,6 +18,7 @@ struct LocalNetwork {
|
|||
i: u16,
|
||||
size: u16,
|
||||
round: usize,
|
||||
#[allow(clippy::type_complexity)]
|
||||
rounds: Arc<RwLock<Vec<HashMap<u16, Vec<u8>>>>>,
|
||||
}
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ use serai_runtime::{
|
|||
pub type ChainSpec = sc_service::GenericChainSpec<GenesisConfig>;
|
||||
|
||||
fn insecure_pair_from_name(name: &'static str) -> Pair {
|
||||
Pair::from_string(&format!("//{}", name), None).unwrap()
|
||||
Pair::from_string(&format!("//{name}"), None).unwrap()
|
||||
}
|
||||
|
||||
fn account_id_from_name(name: &'static str) -> AccountId {
|
||||
|
|
|
@ -90,6 +90,6 @@ pub fn inherent_benchmark_data() -> Result<InherentData> {
|
|||
let mut inherent_data = InherentData::new();
|
||||
sp_timestamp::InherentDataProvider::new(Duration::from_millis(0).into())
|
||||
.provide_inherent_data(&mut inherent_data)
|
||||
.map_err(|e| format!("creating inherent data: {:?}", e))?;
|
||||
.map_err(|e| format!("creating inherent data: {e:?}"))?;
|
||||
Ok(inherent_data)
|
||||
}
|
||||
|
|
|
@ -121,7 +121,7 @@ where
|
|||
return Ok((block, None));
|
||||
}
|
||||
|
||||
self.check(&mut block).await.map_err(|e| format!("{}", e))?;
|
||||
self.check(&mut block).await.map_err(|e| format!("{e}"))?;
|
||||
Ok((block, None))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,7 +38,7 @@ const PROTOCOL_NAME: &str = "/tendermint/1";
|
|||
pub fn protocol_name<Hash: AsRef<[u8]>>(genesis: Hash, fork: Option<&str>) -> ProtocolName {
|
||||
let mut name = format!("/{}", hex::encode(genesis.as_ref()));
|
||||
if let Some(fork) = fork {
|
||||
name += &format!("/{}", fork);
|
||||
name += &format!("/{fork}");
|
||||
}
|
||||
name += PROTOCOL_NAME;
|
||||
name.into()
|
||||
|
|
|
@ -44,7 +44,7 @@ impl SignatureScheme for TestSignatureScheme {
|
|||
|
||||
#[must_use]
|
||||
fn verify(&self, validator: u16, msg: &[u8], sig: &[u8; 32]) -> bool {
|
||||
(sig[.. 2] == validator.to_le_bytes()) && (&sig[2 ..] == &[msg, &[0; 30]].concat()[.. 30])
|
||||
(sig[.. 2] == validator.to_le_bytes()) && (sig[2 ..] == [msg, &[0; 30]].concat()[.. 30])
|
||||
}
|
||||
|
||||
fn aggregate(sigs: &[[u8; 32]]) -> Vec<[u8; 32]> {
|
||||
|
@ -96,6 +96,7 @@ impl Block for TestBlock {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
struct TestNetwork(u16, Arc<RwLock<Vec<(MessageSender<Self>, StepSender<Self>)>>>);
|
||||
|
||||
#[async_trait]
|
||||
|
|
Loading…
Reference in a new issue