mirror of
https://github.com/serai-dex/serai.git
synced 2025-03-24 08:08:51 +00:00
Merge branch 'develop' into crypto-tweaks
This commit is contained in:
commit
caf37527eb
173 changed files with 29638 additions and 3517 deletions
21
.github/actions/LICENSE
vendored
Normal file
21
.github/actions/LICENSE
vendored
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2022-2023 Luke Parker
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
41
.github/actions/bitcoin/action.yml
vendored
Normal file
41
.github/actions/bitcoin/action.yml
vendored
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
name: bitcoin-regtest
|
||||||
|
description: Spawns a regtest Bitcoin daemon
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: "Version to download and run"
|
||||||
|
required: false
|
||||||
|
default: 24.0.1
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- name: Bitcoin Daemon Cache
|
||||||
|
id: cache-bitcoind
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: bitcoind
|
||||||
|
key: bitcoind-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||||
|
|
||||||
|
- name: Download the Bitcoin Daemon
|
||||||
|
if: steps.cache-bitcoind.outputs.cache-hit != 'true'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
RUNNER_OS=linux
|
||||||
|
RUNNER_ARCH=x86_64
|
||||||
|
|
||||||
|
BASE=bitcoin-${{ inputs.version }}
|
||||||
|
FILE=$BASE-$RUNNER_ARCH-$RUNNER_OS-gnu.tar.gz
|
||||||
|
wget https://bitcoincore.org/bin/bitcoin-core-${{ inputs.version }}/$FILE
|
||||||
|
tar xzvf $FILE
|
||||||
|
|
||||||
|
cd bitcoin-${{ inputs.version }}
|
||||||
|
sudo mv bin/* /bin && sudo mv lib/* /lib
|
||||||
|
|
||||||
|
- name: Bitcoin Regtest Daemon
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
RPC_USER=serai
|
||||||
|
RPC_PASS=seraidex
|
||||||
|
|
||||||
|
bitcoind -regtest -rpcuser=$RPC_USER -rpcpassword=$RPC_PASS -daemon
|
|
@ -21,7 +21,7 @@ runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
- name: Install Protobuf
|
- name: Install Protobuf
|
||||||
uses: arduino/setup-protoc@v1
|
uses: arduino/setup-protoc@master
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ inputs.github-token }}
|
repo-token: ${{ inputs.github-token }}
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ runs:
|
||||||
solc-select use 0.8.16
|
solc-select use 0.8.16
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: ./.github/actions/cached-rust
|
uses: dtolnay/rust-toolchain@master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ inputs.rust-toolchain }}
|
toolchain: ${{ inputs.rust-toolchain }}
|
||||||
components: ${{ inputs.rust-components }}
|
components: ${{ inputs.rust-components }}
|
||||||
|
@ -44,8 +44,7 @@ runs:
|
||||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Install WASM toolchain
|
- name: Install WASM toolchain
|
||||||
uses: actions-rs/toolchain@v1
|
uses: dtolnay/rust-toolchain@master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ steps.nightly.outputs.version }}
|
toolchain: ${{ steps.nightly.outputs.version }}
|
||||||
profile: minimal
|
targets: wasm32-unknown-unknown
|
||||||
target: wasm32-unknown-unknown
|
|
||||||
|
|
34
.github/actions/cached-rust/action.yml
vendored
34
.github/actions/cached-rust/action.yml
vendored
|
@ -1,34 +0,0 @@
|
||||||
name: cached-rust
|
|
||||||
description: Installs Rust, caching ~/.cargo and ./target
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
toolchain:
|
|
||||||
description: "Toolchain to install"
|
|
||||||
required: false
|
|
||||||
default: stable
|
|
||||||
|
|
||||||
components:
|
|
||||||
description: "Components to install"
|
|
||||||
required: false
|
|
||||||
default:
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: "composite"
|
|
||||||
steps:
|
|
||||||
- name: Rust Cache
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
~/.cargo/registry
|
|
||||||
~/.cargo/git
|
|
||||||
./target
|
|
||||||
key: ${{ runner.os }}-${{ runner.arch }}-rust-${{ steps.install-rust.outputs.rustc_hash }}-${{ hashFiles('**/Cargo.lock') }}
|
|
||||||
restore-keys: ${{ runner.os }}-${{ runner.arch }}-rust-${{ steps.install-rust.outputs.rustc_hash }}-
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: ${{ inputs.toolchain }}
|
|
||||||
profile: minimal
|
|
||||||
default: true
|
|
||||||
components: ${{ inputs.components }}
|
|
44
.github/actions/monero-wallet-rpc/action.yml
vendored
Normal file
44
.github/actions/monero-wallet-rpc/action.yml
vendored
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
name: monero-wallet-rpc
|
||||||
|
description: Spawns a Monero Wallet-RPC.
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: "Version to download and run"
|
||||||
|
required: false
|
||||||
|
default: v0.18.1.2
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- name: Monero Wallet RPC Cache
|
||||||
|
id: cache-monero-wallet-rpc
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: monero-wallet-rpc
|
||||||
|
key: monero-wallet-rpc-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||||
|
|
||||||
|
- name: Download the Monero Wallet RPC
|
||||||
|
if: steps.cache-monero-wallet-rpc.outputs.cache-hit != 'true'
|
||||||
|
# Calculates OS/ARCH to demonstrate it, yet then locks to linux-x64 due
|
||||||
|
# to the contained folder not following the same naming scheme and
|
||||||
|
# requiring further expansion not worth doing right now
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
RUNNER_OS=${{ runner.os }}
|
||||||
|
RUNNER_ARCH=${{ runner.arch }}
|
||||||
|
|
||||||
|
RUNNER_OS=${RUNNER_OS,,}
|
||||||
|
RUNNER_ARCH=${RUNNER_ARCH,,}
|
||||||
|
|
||||||
|
RUNNER_OS=linux
|
||||||
|
RUNNER_ARCH=x64
|
||||||
|
|
||||||
|
FILE=monero-$RUNNER_OS-$RUNNER_ARCH-${{ inputs.version }}.tar.bz2
|
||||||
|
wget https://downloads.getmonero.org/cli/$FILE
|
||||||
|
tar -xvf $FILE
|
||||||
|
|
||||||
|
mv monero-x86_64-linux-gnu-${{ inputs.version }}/monero-wallet-rpc monero-wallet-rpc
|
||||||
|
|
||||||
|
- name: Monero Wallet RPC
|
||||||
|
shell: bash
|
||||||
|
run: ./monero-wallet-rpc --disable-rpc-login --rpc-bind-port 6061 --allow-mismatched-daemon-version --wallet-dir ./ --detach
|
2
.github/actions/monero/action.yml
vendored
2
.github/actions/monero/action.yml
vendored
|
@ -5,7 +5,7 @@ inputs:
|
||||||
version:
|
version:
|
||||||
description: "Version to download and run"
|
description: "Version to download and run"
|
||||||
required: false
|
required: false
|
||||||
default: v0.18.0.0
|
default: v0.18.1.2
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
|
|
28
.github/actions/test-dependencies/action.yml
vendored
28
.github/actions/test-dependencies/action.yml
vendored
|
@ -12,6 +12,16 @@ inputs:
|
||||||
required: false
|
required: false
|
||||||
default: v0.18.0.0
|
default: v0.18.0.0
|
||||||
|
|
||||||
|
bitcoin-version:
|
||||||
|
description: "Bitcoin version to download and run as a regtest node"
|
||||||
|
required: false
|
||||||
|
default: 24.0.1
|
||||||
|
|
||||||
|
serai:
|
||||||
|
description: "Run a Serai development node in the background"
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
|
@ -29,3 +39,21 @@ runs:
|
||||||
uses: ./.github/actions/monero
|
uses: ./.github/actions/monero
|
||||||
with:
|
with:
|
||||||
version: ${{ inputs.monero-version }}
|
version: ${{ inputs.monero-version }}
|
||||||
|
|
||||||
|
- name: Run a Bitcoin Regtest Node
|
||||||
|
uses: ./.github/actions/bitcoin
|
||||||
|
with:
|
||||||
|
version: ${{ inputs.bitcoin-version }}
|
||||||
|
|
||||||
|
- name: Run a Monero Wallet-RPC
|
||||||
|
uses: ./.github/actions/monero-wallet-rpc
|
||||||
|
|
||||||
|
- name: Run a Serai Development Node
|
||||||
|
if: ${{ inputs.serai }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cd substrate/node
|
||||||
|
cargo build
|
||||||
|
cd ../..
|
||||||
|
|
||||||
|
./target/debug/serai-node --dev &
|
||||||
|
|
2
.github/nightly-version
vendored
2
.github/nightly-version
vendored
|
@ -1 +1 @@
|
||||||
nightly-2022-12-01
|
nightly-2023-02-01
|
||||||
|
|
5
.github/workflows/daily-deny.yml
vendored
5
.github/workflows/daily-deny.yml
vendored
|
@ -18,10 +18,7 @@ jobs:
|
||||||
key: rust-advisory-db
|
key: rust-advisory-db
|
||||||
|
|
||||||
- name: Install cargo
|
- name: Install cargo
|
||||||
uses: actions-rs/toolchain@v1
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
profile: minimal
|
|
||||||
|
|
||||||
- name: Install cargo deny
|
- name: Install cargo deny
|
||||||
run: cargo install --locked cargo-deny
|
run: cargo install --locked cargo-deny
|
||||||
|
|
4
.github/workflows/monero-tests.yaml
vendored
4
.github/workflows/monero-tests.yaml
vendored
|
@ -33,7 +33,7 @@ jobs:
|
||||||
# Test against all supported protocol versions
|
# Test against all supported protocol versions
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
version: [v0.17.3.2, v0.18.0.0]
|
version: [v0.17.3.2, v0.18.1.2]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
@ -50,7 +50,7 @@ jobs:
|
||||||
|
|
||||||
- name: Run Integration Tests
|
- name: Run Integration Tests
|
||||||
# Don't run if the the tests workflow also will
|
# Don't run if the the tests workflow also will
|
||||||
if: ${{ matrix.version != 'v0.18.0.0' }}
|
if: ${{ matrix.version != 'v0.18.1.2' }}
|
||||||
run: |
|
run: |
|
||||||
cargo test --package monero-serai --all-features --test '*'
|
cargo test --package monero-serai --all-features --test '*'
|
||||||
cargo test --package serai-processor monero
|
cargo test --package serai-processor monero
|
||||||
|
|
18
.github/workflows/tests.yml
vendored
18
.github/workflows/tests.yml
vendored
|
@ -26,7 +26,7 @@ jobs:
|
||||||
rust-components: clippy
|
rust-components: clippy
|
||||||
|
|
||||||
- name: Run Clippy
|
- name: Run Clippy
|
||||||
run: cargo clippy --all-features --tests -- -D warnings -A dead_code
|
run: cargo clippy --all-features --all-targets -- -D warnings -A dead_code
|
||||||
|
|
||||||
deny:
|
deny:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
@ -40,10 +40,7 @@ jobs:
|
||||||
key: rust-advisory-db
|
key: rust-advisory-db
|
||||||
|
|
||||||
- name: Install cargo
|
- name: Install cargo
|
||||||
uses: actions-rs/toolchain@v1
|
uses: dtolnay/rust-toolchain@stable
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
profile: minimal
|
|
||||||
|
|
||||||
- name: Install cargo deny
|
- name: Install cargo deny
|
||||||
run: cargo install --locked cargo-deny
|
run: cargo install --locked cargo-deny
|
||||||
|
@ -61,8 +58,13 @@ jobs:
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Build node
|
||||||
|
run: |
|
||||||
|
cd substrate/node
|
||||||
|
cargo build
|
||||||
|
|
||||||
- name: Run Tests
|
- name: Run Tests
|
||||||
run: cargo test --all-features
|
run: GITHUB_CI=true cargo test --all-features
|
||||||
|
|
||||||
fmt:
|
fmt:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
@ -73,12 +75,10 @@ jobs:
|
||||||
id: nightly
|
id: nightly
|
||||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
# Doesn't grab the cache as it's not needed
|
|
||||||
- name: Install rustfmt
|
- name: Install rustfmt
|
||||||
uses: actions-rs/toolchain@v1
|
uses: dtolnay/rust-toolchain@master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ steps.nightly.outputs.version }}
|
toolchain: ${{ steps.nightly.outputs.version }}
|
||||||
profile: minimal
|
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
|
|
||||||
- name: Run rustfmt
|
- name: Run rustfmt
|
||||||
|
|
3954
Cargo.lock
generated
3954
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
24
Cargo.toml
24
Cargo.toml
|
@ -22,6 +22,19 @@ members = [
|
||||||
|
|
||||||
"processor",
|
"processor",
|
||||||
|
|
||||||
|
"substrate/serai/primitives",
|
||||||
|
"substrate/serai/client",
|
||||||
|
|
||||||
|
"substrate/tokens/primitives",
|
||||||
|
"substrate/tokens/pallet",
|
||||||
|
|
||||||
|
"substrate/in-instructions/primitives",
|
||||||
|
"substrate/in-instructions/pallet",
|
||||||
|
"substrate/in-instructions/client",
|
||||||
|
|
||||||
|
"substrate/validator-sets/primitives",
|
||||||
|
"substrate/validator-sets/pallet",
|
||||||
|
|
||||||
"substrate/tendermint/machine",
|
"substrate/tendermint/machine",
|
||||||
"substrate/tendermint/primitives",
|
"substrate/tendermint/primitives",
|
||||||
"substrate/tendermint/client",
|
"substrate/tendermint/client",
|
||||||
|
@ -29,13 +42,10 @@ members = [
|
||||||
|
|
||||||
"substrate/runtime",
|
"substrate/runtime",
|
||||||
"substrate/node",
|
"substrate/node",
|
||||||
|
|
||||||
"contracts/extension",
|
|
||||||
"contracts/multisig",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Always compile Monero (and a variety of dependencies) with optimizations due
|
# Always compile Monero (and a variety of dependencies) with optimizations due
|
||||||
# to the unoptimized performance of Bulletproofs
|
# to the extensive operations required for Bulletproofs
|
||||||
[profile.dev.package]
|
[profile.dev.package]
|
||||||
subtle = { opt-level = 3 }
|
subtle = { opt-level = 3 }
|
||||||
curve25519-dalek = { opt-level = 3 }
|
curve25519-dalek = { opt-level = 3 }
|
||||||
|
@ -54,7 +64,7 @@ monero-serai = { opt-level = 3 }
|
||||||
[profile.release]
|
[profile.release]
|
||||||
panic = "unwind"
|
panic = "unwind"
|
||||||
|
|
||||||
|
# Required for subxt
|
||||||
[patch.crates-io]
|
[patch.crates-io]
|
||||||
# array-bytes 4.1.0 is GPL-3.0.
|
sp-core = { git = "https://github.com/serai-dex/substrate" }
|
||||||
# array-bytes git, which has no code changes, includes a dual-license under Apache-2.0.
|
sp-runtime = { git = "https://github.com/serai-dex/substrate" }
|
||||||
array-bytes = { git = "https://github.com/hack-ink/array-bytes", rev = "994cd29b66bd2ab5c8c15f0b15a1618d4bb2d94c" }
|
|
||||||
|
|
2
LICENSE
2
LICENSE
|
@ -4,3 +4,5 @@ depending on the crate in question. Each crate declares their license in their
|
||||||
a full copy of the AGPL-3.0 License is included in the root of this repository
|
a full copy of the AGPL-3.0 License is included in the root of this repository
|
||||||
as a reference text. This copy should be provided with any distribution of a
|
as a reference text. This copy should be provided with any distribution of a
|
||||||
crate licensed under the AGPL-3.0, as per its terms.
|
crate licensed under the AGPL-3.0, as per its terms.
|
||||||
|
|
||||||
|
The GitHub actions (`.github/actions`) are licensed under the MIT license.
|
||||||
|
|
13
README.md
13
README.md
|
@ -1,8 +1,9 @@
|
||||||
# Serai
|
# Serai
|
||||||
|
|
||||||
Serai is a new DEX, built from the ground up, initially planning on listing
|
Serai is a new DEX, built from the ground up, initially planning on listing
|
||||||
Bitcoin, Ethereum, Monero, DAI, offering a liquidity pool trading experience.
|
Bitcoin, Ethereum, DAI, and Monero, offering a liquidity-pool-based trading
|
||||||
Funds are stored in an economically secured threshold multisig wallet.
|
experience. Funds are stored in an economically secured threshold-multisig
|
||||||
|
wallet.
|
||||||
|
|
||||||
[Getting Started](docs/Getting%20Started.md)
|
[Getting Started](docs/Getting%20Started.md)
|
||||||
|
|
||||||
|
@ -10,6 +11,9 @@ Funds are stored in an economically secured threshold multisig wallet.
|
||||||
|
|
||||||
- `docs`: Documentation on the Serai protocol.
|
- `docs`: Documentation on the Serai protocol.
|
||||||
|
|
||||||
|
- `common`: Crates containing utilities common to a variety of areas under
|
||||||
|
Serai, none neatly fitting under another category.
|
||||||
|
|
||||||
- `crypto`: A series of composable cryptographic libraries built around the
|
- `crypto`: A series of composable cryptographic libraries built around the
|
||||||
`ff`/`group` APIs achieving a variety of tasks. These range from generic
|
`ff`/`group` APIs achieving a variety of tasks. These range from generic
|
||||||
infrastructure, to our IETF-compliant FROST implementation, to a DLEq proof as
|
infrastructure, to our IETF-compliant FROST implementation, to a DLEq proof as
|
||||||
|
@ -22,13 +26,14 @@ Funds are stored in an economically secured threshold multisig wallet.
|
||||||
- `processor`: A generic chain processor to process data for Serai and process
|
- `processor`: A generic chain processor to process data for Serai and process
|
||||||
events from Serai, executing transactions as expected and needed.
|
events from Serai, executing transactions as expected and needed.
|
||||||
|
|
||||||
- `contracts`: Smart Contracts implementing Serai's functionality.
|
|
||||||
|
|
||||||
- `substrate`: Substrate crates used to instantiate the Serai network.
|
- `substrate`: Substrate crates used to instantiate the Serai network.
|
||||||
|
|
||||||
|
- `deploy`: Scripts to deploy a Serai node/test environment.
|
||||||
|
|
||||||
### Links
|
### Links
|
||||||
|
|
||||||
- [Twitter](https://twitter.com/SeraiDEX): https://twitter.com/SeraiDEX
|
- [Twitter](https://twitter.com/SeraiDEX): https://twitter.com/SeraiDEX
|
||||||
|
- [Mastodon](https://cryptodon.lol/@serai): https://cryptodon.lol/@serai
|
||||||
- [Discord](https://discord.gg/mpEUtJR3vz): https://discord.gg/mpEUtJR3vz
|
- [Discord](https://discord.gg/mpEUtJR3vz): https://discord.gg/mpEUtJR3vz
|
||||||
- [Matrix](https://matrix.to/#/#serai:matrix.org):
|
- [Matrix](https://matrix.to/#/#serai:matrix.org):
|
||||||
https://matrix.to/#/#serai:matrix.org
|
https://matrix.to/#/#serai:matrix.org
|
||||||
|
|
31
coins/bitcoin/Cargo.toml
Normal file
31
coins/bitcoin/Cargo.toml
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
[package]
|
||||||
|
name = "bitcoin-serai"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "A Bitcoin library for FROST-signing transactions"
|
||||||
|
license = "MIT"
|
||||||
|
repository = "https://github.com/serai-dex/serai/tree/develop/coins/bitcoin"
|
||||||
|
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"]
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
lazy_static = "1"
|
||||||
|
thiserror = "1"
|
||||||
|
|
||||||
|
rand_core = "0.6"
|
||||||
|
|
||||||
|
sha2 = "0.10"
|
||||||
|
|
||||||
|
secp256k1 = { version = "0.24", features = ["global-context"] }
|
||||||
|
bitcoin = { version = "0.29", features = ["serde"] }
|
||||||
|
|
||||||
|
k256 = { version = "0.11", features = ["arithmetic"] }
|
||||||
|
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.2", features = ["recommended"] }
|
||||||
|
frost = { version = "0.5", package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1"] }
|
||||||
|
|
||||||
|
hex = "0.4"
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
serde_json = "1"
|
||||||
|
reqwest = { version = "0.11", features = ["json"] }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
frost = { version = "0.5", package = "modular-frost", path = "../../crypto/frost", features = ["tests"] }
|
21
coins/bitcoin/LICENSE
Normal file
21
coins/bitcoin/LICENSE
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2022-2023 Luke Parker
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
61
coins/bitcoin/src/crypto.rs
Normal file
61
coins/bitcoin/src/crypto.rs
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
|
||||||
|
use sha2::{Digest, Sha256};
|
||||||
|
|
||||||
|
use k256::{
|
||||||
|
elliptic_curve::{
|
||||||
|
ops::Reduce,
|
||||||
|
sec1::{Tag, ToEncodedPoint},
|
||||||
|
},
|
||||||
|
U256, Scalar, ProjectivePoint,
|
||||||
|
};
|
||||||
|
|
||||||
|
use bitcoin::XOnlyPublicKey;
|
||||||
|
|
||||||
|
use frost::{algorithm::Hram, curve::Secp256k1};
|
||||||
|
|
||||||
|
/// Get the x coordinate of a non-infinity, even point. Panics on invalid input.
|
||||||
|
pub fn x(key: &ProjectivePoint) -> [u8; 32] {
|
||||||
|
let encoded = key.to_encoded_point(true);
|
||||||
|
assert_eq!(encoded.tag(), Tag::CompressedEvenY);
|
||||||
|
(*encoded.x().expect("point at infinity")).into()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert a non-infinite even point to a XOnlyPublicKey. Panics on invalid input.
|
||||||
|
pub fn x_only(key: &ProjectivePoint) -> XOnlyPublicKey {
|
||||||
|
XOnlyPublicKey::from_slice(&x(key)).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Make a point even, returning the even version and the offset required for it to be even.
|
||||||
|
pub fn make_even(mut key: ProjectivePoint) -> (ProjectivePoint, u64) {
|
||||||
|
let mut c = 0;
|
||||||
|
while key.to_encoded_point(true).tag() == Tag::CompressedOddY {
|
||||||
|
key += ProjectivePoint::GENERATOR;
|
||||||
|
c += 1;
|
||||||
|
}
|
||||||
|
(key, c)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A BIP-340 compatible HRAm for use with the modular-frost Schnorr Algorithm.
|
||||||
|
#[derive(Clone, Copy, Debug)]
|
||||||
|
pub struct BitcoinHram {}
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref TAG_HASH: [u8; 32] = Sha256::digest(b"BIP0340/challenge").into();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
impl Hram<Secp256k1> for BitcoinHram {
|
||||||
|
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
|
||||||
|
let (R, _) = make_even(*R);
|
||||||
|
|
||||||
|
let mut data = Sha256::new();
|
||||||
|
data.update(*TAG_HASH);
|
||||||
|
data.update(*TAG_HASH);
|
||||||
|
data.update(x(&R));
|
||||||
|
data.update(x(A));
|
||||||
|
data.update(m);
|
||||||
|
|
||||||
|
Scalar::from_uint_reduced(U256::from_be_slice(&data.finalize()))
|
||||||
|
}
|
||||||
|
}
|
9
coins/bitcoin/src/lib.rs
Normal file
9
coins/bitcoin/src/lib.rs
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
/// Cryptographic helpers.
|
||||||
|
pub mod crypto;
|
||||||
|
/// Wallet functionality to create transactions.
|
||||||
|
pub mod wallet;
|
||||||
|
/// A minimal async RPC.
|
||||||
|
pub mod rpc;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
80
coins/bitcoin/src/rpc.rs
Normal file
80
coins/bitcoin/src/rpc.rs
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
use core::fmt::Debug;
|
||||||
|
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
use serde::{Deserialize, de::DeserializeOwned};
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
use bitcoin::{
|
||||||
|
hashes::hex::{FromHex, ToHex},
|
||||||
|
consensus::encode,
|
||||||
|
Txid, Transaction, BlockHash, Block,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Deserialize)]
|
||||||
|
#[serde(untagged)]
|
||||||
|
pub(crate) enum RpcResponse<T> {
|
||||||
|
Ok { result: T },
|
||||||
|
Err { error: String },
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Rpc(String);
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug, Error)]
|
||||||
|
pub enum RpcError {
|
||||||
|
#[error("couldn't connect to node")]
|
||||||
|
ConnectionError,
|
||||||
|
#[error("request had an error: {0}")]
|
||||||
|
RequestError(String),
|
||||||
|
#[error("node sent an invalid response")]
|
||||||
|
InvalidResponse,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Rpc {
|
||||||
|
pub fn new(url: String) -> Rpc {
|
||||||
|
Rpc(url)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn rpc_call<Response: DeserializeOwned + Debug>(
|
||||||
|
&self,
|
||||||
|
method: &str,
|
||||||
|
params: serde_json::Value,
|
||||||
|
) -> Result<Response, RpcError> {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
let res = client
|
||||||
|
.post(&self.0)
|
||||||
|
.json(&json!({ "jsonrpc": "2.0", "method": method, "params": params }))
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|_| RpcError::ConnectionError)?
|
||||||
|
.text()
|
||||||
|
.await
|
||||||
|
.map_err(|_| RpcError::ConnectionError)?;
|
||||||
|
|
||||||
|
let res: RpcResponse<Response> =
|
||||||
|
serde_json::from_str(&res).map_err(|_| RpcError::InvalidResponse)?;
|
||||||
|
match res {
|
||||||
|
RpcResponse::Ok { result } => Ok(result),
|
||||||
|
RpcResponse::Err { error } => Err(RpcError::RequestError(error)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_latest_block_number(&self) -> Result<usize, RpcError> {
|
||||||
|
self.rpc_call("getblockcount", json!([])).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_block_hash(&self, number: usize) -> Result<BlockHash, RpcError> {
|
||||||
|
self.rpc_call("getblockhash", json!([number])).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_block(&self, block_hash: &BlockHash) -> Result<Block, RpcError> {
|
||||||
|
let hex = self.rpc_call::<String>("getblock", json!([block_hash.to_hex(), 0])).await?;
|
||||||
|
let bytes: Vec<u8> = FromHex::from_hex(&hex).map_err(|_| RpcError::InvalidResponse)?;
|
||||||
|
encode::deserialize(&bytes).map_err(|_| RpcError::InvalidResponse)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn send_raw_transaction(&self, tx: &Transaction) -> Result<Txid, RpcError> {
|
||||||
|
self.rpc_call("sendrawtransaction", json!([encode::serialize_hex(tx)])).await
|
||||||
|
}
|
||||||
|
}
|
47
coins/bitcoin/src/tests/mod.rs
Normal file
47
coins/bitcoin/src/tests/mod.rs
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
use sha2::{Digest, Sha256};
|
||||||
|
|
||||||
|
use secp256k1::{SECP256K1, Message, schnorr::Signature};
|
||||||
|
use bitcoin::hashes::{Hash as HashTrait, sha256::Hash};
|
||||||
|
|
||||||
|
use k256::Scalar;
|
||||||
|
use frost::{
|
||||||
|
curve::Secp256k1,
|
||||||
|
algorithm::Schnorr,
|
||||||
|
tests::{algorithm_machines, key_gen, sign},
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::crypto::{BitcoinHram, x_only, make_even};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_signing() {
|
||||||
|
let mut keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
||||||
|
const MESSAGE: &[u8] = b"Hello, World!";
|
||||||
|
|
||||||
|
for (_, keys) in keys.iter_mut() {
|
||||||
|
let (_, offset) = make_even(keys.group_key());
|
||||||
|
*keys = keys.offset(Scalar::from(offset));
|
||||||
|
}
|
||||||
|
|
||||||
|
let algo = Schnorr::<Secp256k1, BitcoinHram>::new();
|
||||||
|
let mut sig = sign(
|
||||||
|
&mut OsRng,
|
||||||
|
algo,
|
||||||
|
keys.clone(),
|
||||||
|
algorithm_machines(&mut OsRng, Schnorr::<Secp256k1, BitcoinHram>::new(), &keys),
|
||||||
|
&Sha256::digest(MESSAGE),
|
||||||
|
);
|
||||||
|
|
||||||
|
let offset;
|
||||||
|
(sig.R, offset) = make_even(sig.R);
|
||||||
|
sig.s += Scalar::from(offset);
|
||||||
|
|
||||||
|
SECP256K1
|
||||||
|
.verify_schnorr(
|
||||||
|
&Signature::from_slice(&sig.serialize()[1 .. 65]).unwrap(),
|
||||||
|
&Message::from(Hash::hash(MESSAGE)),
|
||||||
|
&x_only(&keys[&1].group_key()),
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
}
|
327
coins/bitcoin/src/wallet.rs
Normal file
327
coins/bitcoin/src/wallet.rs
Normal file
|
@ -0,0 +1,327 @@
|
||||||
|
use std::{
|
||||||
|
io::{self, Read, Write},
|
||||||
|
collections::HashMap,
|
||||||
|
};
|
||||||
|
|
||||||
|
use rand_core::RngCore;
|
||||||
|
|
||||||
|
use transcript::{Transcript, RecommendedTranscript};
|
||||||
|
|
||||||
|
use k256::{elliptic_curve::sec1::ToEncodedPoint, Scalar};
|
||||||
|
use frost::{
|
||||||
|
curve::{Ciphersuite, Secp256k1},
|
||||||
|
ThresholdKeys, FrostError,
|
||||||
|
algorithm::Schnorr,
|
||||||
|
sign::*,
|
||||||
|
};
|
||||||
|
|
||||||
|
use bitcoin::{
|
||||||
|
hashes::Hash,
|
||||||
|
consensus::encode::{Decodable, serialize},
|
||||||
|
util::sighash::{SchnorrSighashType, SighashCache, Prevouts},
|
||||||
|
OutPoint, Script, Sequence, Witness, TxIn, TxOut, PackedLockTime, Transaction, Address,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::crypto::{BitcoinHram, make_even};
|
||||||
|
|
||||||
|
/// A spendable output.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct SpendableOutput {
|
||||||
|
/// The scalar offset to obtain the key usable to spend this output.
|
||||||
|
/// Enables HDKD systems.
|
||||||
|
pub offset: Scalar,
|
||||||
|
/// The output to spend.
|
||||||
|
pub output: TxOut,
|
||||||
|
/// The TX ID and vout of the output to spend.
|
||||||
|
pub outpoint: OutPoint,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SpendableOutput {
|
||||||
|
/// Obtain a unique ID for this output.
|
||||||
|
pub fn id(&self) -> [u8; 36] {
|
||||||
|
serialize(&self.outpoint).try_into().unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a SpendableOutput from a generic satisfying Read.
|
||||||
|
pub fn read<R: Read>(r: &mut R) -> io::Result<SpendableOutput> {
|
||||||
|
Ok(SpendableOutput {
|
||||||
|
offset: Secp256k1::read_F(r)?,
|
||||||
|
output: TxOut::consensus_decode(r)
|
||||||
|
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid TxOut"))?,
|
||||||
|
outpoint: OutPoint::consensus_decode(r)
|
||||||
|
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid OutPoint"))?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write a SpendableOutput to a generic satisfying Write.
|
||||||
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
|
w.write_all(&self.offset.to_bytes())?;
|
||||||
|
w.write_all(&serialize(&self.output))?;
|
||||||
|
w.write_all(&serialize(&self.outpoint))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Serialize a SpendableOutput to a Vec<u8>.
|
||||||
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
|
let mut res = vec![];
|
||||||
|
self.write(&mut res).unwrap();
|
||||||
|
res
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A signable transaction, clone-able across attempts.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct SignableTransaction(Transaction, Vec<Scalar>, Vec<TxOut>);
|
||||||
|
|
||||||
|
impl SignableTransaction {
|
||||||
|
fn calculate_weight(inputs: usize, payments: &[(Address, u64)], change: Option<&Address>) -> u64 {
|
||||||
|
let mut tx = Transaction {
|
||||||
|
version: 2,
|
||||||
|
lock_time: PackedLockTime::ZERO,
|
||||||
|
input: vec![
|
||||||
|
TxIn {
|
||||||
|
previous_output: OutPoint::default(),
|
||||||
|
script_sig: Script::new(),
|
||||||
|
sequence: Sequence::MAX,
|
||||||
|
witness: Witness::from_vec(vec![vec![0; 64]])
|
||||||
|
};
|
||||||
|
inputs
|
||||||
|
],
|
||||||
|
output: payments
|
||||||
|
.iter()
|
||||||
|
.map(|payment| TxOut { value: payment.1, script_pubkey: payment.0.script_pubkey() })
|
||||||
|
.collect(),
|
||||||
|
};
|
||||||
|
if let Some(change) = change {
|
||||||
|
tx.output.push(TxOut { value: 0, script_pubkey: change.script_pubkey() });
|
||||||
|
}
|
||||||
|
u64::try_from(tx.weight()).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new signable-transaction.
|
||||||
|
pub fn new(
|
||||||
|
mut inputs: Vec<SpendableOutput>,
|
||||||
|
payments: &[(Address, u64)],
|
||||||
|
change: Option<Address>,
|
||||||
|
fee: u64,
|
||||||
|
) -> Option<SignableTransaction> {
|
||||||
|
let input_sat = inputs.iter().map(|input| input.output.value).sum::<u64>();
|
||||||
|
let offsets = inputs.iter().map(|input| input.offset).collect();
|
||||||
|
let tx_ins = inputs
|
||||||
|
.iter()
|
||||||
|
.map(|input| TxIn {
|
||||||
|
previous_output: input.outpoint,
|
||||||
|
script_sig: Script::new(),
|
||||||
|
sequence: Sequence::MAX,
|
||||||
|
witness: Witness::new(),
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let payment_sat = payments.iter().map(|payment| payment.1).sum::<u64>();
|
||||||
|
let mut tx_outs = payments
|
||||||
|
.iter()
|
||||||
|
.map(|payment| TxOut { value: payment.1, script_pubkey: payment.0.script_pubkey() })
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let actual_fee = fee * Self::calculate_weight(tx_ins.len(), payments, None);
|
||||||
|
if payment_sat > (input_sat - actual_fee) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If there's a change address, check if there's a meaningful change
|
||||||
|
if let Some(change) = change.as_ref() {
|
||||||
|
let fee_with_change = fee * Self::calculate_weight(tx_ins.len(), payments, Some(change));
|
||||||
|
// If there's a non-zero change, add it
|
||||||
|
if let Some(value) = input_sat.checked_sub(payment_sat + fee_with_change) {
|
||||||
|
tx_outs.push(TxOut { value, script_pubkey: change.script_pubkey() });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Drop outputs which BTC will consider spam (outputs worth less than the cost to spend
|
||||||
|
// them)
|
||||||
|
|
||||||
|
Some(SignableTransaction(
|
||||||
|
Transaction { version: 2, lock_time: PackedLockTime::ZERO, input: tx_ins, output: tx_outs },
|
||||||
|
offsets,
|
||||||
|
inputs.drain(..).map(|input| input.output).collect(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a multisig machine for this transaction.
|
||||||
|
pub async fn multisig(
|
||||||
|
self,
|
||||||
|
keys: ThresholdKeys<Secp256k1>,
|
||||||
|
mut transcript: RecommendedTranscript,
|
||||||
|
) -> Result<TransactionMachine, FrostError> {
|
||||||
|
transcript.domain_separate(b"bitcoin_transaction");
|
||||||
|
transcript.append_message(b"root_key", keys.group_key().to_encoded_point(true).as_bytes());
|
||||||
|
|
||||||
|
// Transcript the inputs and outputs
|
||||||
|
let tx = &self.0;
|
||||||
|
for input in &tx.input {
|
||||||
|
transcript.append_message(b"input_hash", input.previous_output.txid.as_hash().into_inner());
|
||||||
|
transcript.append_message(b"input_output_index", input.previous_output.vout.to_le_bytes());
|
||||||
|
}
|
||||||
|
for payment in &tx.output {
|
||||||
|
transcript.append_message(b"output_script", payment.script_pubkey.as_bytes());
|
||||||
|
transcript.append_message(b"output_amount", payment.value.to_le_bytes());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut sigs = vec![];
|
||||||
|
for i in 0 .. tx.input.len() {
|
||||||
|
// TODO: Use the above transcript here
|
||||||
|
sigs.push(
|
||||||
|
AlgorithmMachine::new(
|
||||||
|
Schnorr::<Secp256k1, BitcoinHram>::new(),
|
||||||
|
keys.clone().offset(self.1[i]),
|
||||||
|
)
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(TransactionMachine { tx: self, transcript, sigs })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A FROST signing machine to produce a Bitcoin transaction.
|
||||||
|
pub struct TransactionMachine {
|
||||||
|
tx: SignableTransaction,
|
||||||
|
transcript: RecommendedTranscript,
|
||||||
|
sigs: Vec<AlgorithmMachine<Secp256k1, Schnorr<Secp256k1, BitcoinHram>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PreprocessMachine for TransactionMachine {
|
||||||
|
type Preprocess = Vec<Preprocess<Secp256k1, ()>>;
|
||||||
|
type Signature = Transaction;
|
||||||
|
type SignMachine = TransactionSignMachine;
|
||||||
|
|
||||||
|
fn preprocess<R: RngCore + rand_core::CryptoRng>(
|
||||||
|
mut self,
|
||||||
|
rng: &mut R,
|
||||||
|
) -> (Self::SignMachine, Self::Preprocess) {
|
||||||
|
let mut preprocesses = Vec::with_capacity(self.sigs.len());
|
||||||
|
let sigs = self
|
||||||
|
.sigs
|
||||||
|
.drain(..)
|
||||||
|
.map(|sig| {
|
||||||
|
let (sig, preprocess) = sig.preprocess(rng);
|
||||||
|
preprocesses.push(preprocess);
|
||||||
|
sig
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
(TransactionSignMachine { tx: self.tx, transcript: self.transcript, sigs }, preprocesses)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct TransactionSignMachine {
|
||||||
|
tx: SignableTransaction,
|
||||||
|
transcript: RecommendedTranscript,
|
||||||
|
sigs: Vec<AlgorithmSignMachine<Secp256k1, Schnorr<Secp256k1, BitcoinHram>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SignMachine<Transaction> for TransactionSignMachine {
|
||||||
|
type Params = ();
|
||||||
|
type Keys = ThresholdKeys<Secp256k1>;
|
||||||
|
type Preprocess = Vec<Preprocess<Secp256k1, ()>>;
|
||||||
|
type SignatureShare = Vec<SignatureShare<Secp256k1>>;
|
||||||
|
type SignatureMachine = TransactionSignatureMachine;
|
||||||
|
|
||||||
|
fn cache(self) -> CachedPreprocess {
|
||||||
|
unimplemented!(
|
||||||
|
"Bitcoin transactions don't support caching their preprocesses due to {}",
|
||||||
|
"being already bound to a specific transaction"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_cache(
|
||||||
|
_: (),
|
||||||
|
_: ThresholdKeys<Secp256k1>,
|
||||||
|
_: CachedPreprocess,
|
||||||
|
) -> Result<Self, FrostError> {
|
||||||
|
unimplemented!(
|
||||||
|
"Bitcoin transactions don't support caching their preprocesses due to {}",
|
||||||
|
"being already bound to a specific transaction"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess> {
|
||||||
|
self.sigs.iter().map(|sig| sig.read_preprocess(reader)).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sign(
|
||||||
|
mut self,
|
||||||
|
commitments: HashMap<u16, Self::Preprocess>,
|
||||||
|
msg: &[u8],
|
||||||
|
) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> {
|
||||||
|
if !msg.is_empty() {
|
||||||
|
Err(FrostError::InternalError(
|
||||||
|
"message was passed to the TransactionMachine when it generates its own",
|
||||||
|
))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let commitments = (0 .. self.sigs.len())
|
||||||
|
.map(|c| {
|
||||||
|
commitments
|
||||||
|
.iter()
|
||||||
|
.map(|(l, commitments)| (*l, commitments[c].clone()))
|
||||||
|
.collect::<HashMap<_, _>>()
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let mut cache = SighashCache::new(&self.tx.0);
|
||||||
|
let prevouts = Prevouts::All(&self.tx.2);
|
||||||
|
|
||||||
|
let mut shares = Vec::with_capacity(self.sigs.len());
|
||||||
|
let sigs = self
|
||||||
|
.sigs
|
||||||
|
.drain(..)
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, sig)| {
|
||||||
|
let tx_sighash = cache
|
||||||
|
.taproot_key_spend_signature_hash(i, &prevouts, SchnorrSighashType::Default)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let (sig, share) = sig.sign(commitments[i].clone(), &tx_sighash)?;
|
||||||
|
shares.push(share);
|
||||||
|
Ok(sig)
|
||||||
|
})
|
||||||
|
.collect::<Result<_, _>>()?;
|
||||||
|
|
||||||
|
Ok((TransactionSignatureMachine { tx: self.tx.0, sigs }, shares))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct TransactionSignatureMachine {
|
||||||
|
tx: Transaction,
|
||||||
|
sigs: Vec<AlgorithmSignatureMachine<Secp256k1, Schnorr<Secp256k1, BitcoinHram>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SignatureMachine<Transaction> for TransactionSignatureMachine {
|
||||||
|
type SignatureShare = Vec<SignatureShare<Secp256k1>>;
|
||||||
|
|
||||||
|
fn read_share<R: Read>(&self, reader: &mut R) -> io::Result<Self::SignatureShare> {
|
||||||
|
self.sigs.iter().map(|sig| sig.read_share(reader)).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn complete(
|
||||||
|
mut self,
|
||||||
|
mut shares: HashMap<u16, Self::SignatureShare>,
|
||||||
|
) -> Result<Transaction, FrostError> {
|
||||||
|
for (input, schnorr) in self.tx.input.iter_mut().zip(self.sigs.drain(..)) {
|
||||||
|
let mut sig = schnorr.complete(
|
||||||
|
shares.iter_mut().map(|(l, shares)| (*l, shares.remove(0))).collect::<HashMap<_, _>>(),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// TODO: Implement BitcoinSchnorr Algorithm to handle this
|
||||||
|
let offset;
|
||||||
|
(sig.R, offset) = make_even(sig.R);
|
||||||
|
sig.s += Scalar::from(offset);
|
||||||
|
|
||||||
|
let mut witness: Witness = Witness::new();
|
||||||
|
witness.push(&sig.serialize()[1 .. 65]);
|
||||||
|
input.witness = witness;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(self.tx)
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,6 +1,6 @@
|
||||||
AGPL-3.0-only license
|
AGPL-3.0-only license
|
||||||
|
|
||||||
Copyright (c) 2022 Luke Parker
|
Copyright (c) 2022-2023 Luke Parker
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
This program is free software: you can redistribute it and/or modify
|
||||||
it under the terms of the GNU Affero General Public License Version 3 as
|
it under the terms of the GNU Affero General Public License Version 3 as
|
||||||
|
|
|
@ -14,6 +14,7 @@ rustdoc-args = ["--cfg", "docsrs"]
|
||||||
[dependencies]
|
[dependencies]
|
||||||
lazy_static = "1"
|
lazy_static = "1"
|
||||||
thiserror = "1"
|
thiserror = "1"
|
||||||
|
crc = "3"
|
||||||
|
|
||||||
rand_core = "0.6"
|
rand_core = "0.6"
|
||||||
rand_chacha = { version = "0.3", optional = true }
|
rand_chacha = { version = "0.3", optional = true }
|
||||||
|
@ -24,11 +25,10 @@ zeroize = { version = "^1.5", features = ["zeroize_derive"] }
|
||||||
subtle = "^2.4"
|
subtle = "^2.4"
|
||||||
|
|
||||||
sha3 = "0.10"
|
sha3 = "0.10"
|
||||||
blake2 = { version = "0.10", optional = true }
|
|
||||||
|
|
||||||
curve25519-dalek = { version = "^3.2", features = ["std"] }
|
curve25519-dalek = { version = "^3.2", features = ["std"] }
|
||||||
|
|
||||||
group = { version = "0.12" }
|
group = "0.12"
|
||||||
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.1" }
|
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.1" }
|
||||||
multiexp = { path = "../../crypto/multiexp", version = "0.2", features = ["batch"] }
|
multiexp = { path = "../../crypto/multiexp", version = "0.2", features = ["batch"] }
|
||||||
|
|
||||||
|
@ -56,8 +56,9 @@ monero-generators = { path = "generators", version = "0.1" }
|
||||||
hex-literal = "0.3"
|
hex-literal = "0.3"
|
||||||
|
|
||||||
tokio = { version = "1", features = ["full"] }
|
tokio = { version = "1", features = ["full"] }
|
||||||
|
monero-rpc = "0.3"
|
||||||
|
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.5", features = ["ed25519", "tests"] }
|
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.5", features = ["ed25519", "tests"] }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
multisig = ["rand_chacha", "blake2", "transcript", "frost", "dleq"]
|
multisig = ["rand_chacha", "transcript", "frost", "dleq"]
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2022 Luke Parker
|
Copyright (c) 2022-2023 Luke Parker
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2022 Luke Parker
|
Copyright (c) 2022-2023 Luke Parker
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use std::io;
|
use std::io::{self, Write};
|
||||||
|
|
||||||
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
||||||
pub(crate) fn write_varint<W: io::Write>(varint: &u64, w: &mut W) -> io::Result<()> {
|
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
|
||||||
let mut varint = *varint;
|
let mut varint = *varint;
|
||||||
while {
|
while {
|
||||||
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::io::{self, Read, Write};
|
||||||
|
|
||||||
use crate::{serialize::*, transaction::Transaction};
|
use crate::{serialize::*, transaction::Transaction};
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
@ -10,7 +12,7 @@ pub struct BlockHeader {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BlockHeader {
|
impl BlockHeader {
|
||||||
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
write_varint(&self.major_version, w)?;
|
write_varint(&self.major_version, w)?;
|
||||||
write_varint(&self.minor_version, w)?;
|
write_varint(&self.minor_version, w)?;
|
||||||
write_varint(&self.timestamp, w)?;
|
write_varint(&self.timestamp, w)?;
|
||||||
|
@ -18,7 +20,13 @@ impl BlockHeader {
|
||||||
w.write_all(&self.nonce.to_le_bytes())
|
w.write_all(&self.nonce.to_le_bytes())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<BlockHeader> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
|
let mut serialized = vec![];
|
||||||
|
self.write(&mut serialized).unwrap();
|
||||||
|
serialized
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read<R: Read>(r: &mut R) -> io::Result<BlockHeader> {
|
||||||
Ok(BlockHeader {
|
Ok(BlockHeader {
|
||||||
major_version: read_varint(r)?,
|
major_version: read_varint(r)?,
|
||||||
minor_version: read_varint(r)?,
|
minor_version: read_varint(r)?,
|
||||||
|
@ -37,9 +45,9 @@ pub struct Block {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Block {
|
impl Block {
|
||||||
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
self.header.serialize(w)?;
|
self.header.write(w)?;
|
||||||
self.miner_tx.serialize(w)?;
|
self.miner_tx.write(w)?;
|
||||||
write_varint(&self.txs.len().try_into().unwrap(), w)?;
|
write_varint(&self.txs.len().try_into().unwrap(), w)?;
|
||||||
for tx in &self.txs {
|
for tx in &self.txs {
|
||||||
w.write_all(tx)?;
|
w.write_all(tx)?;
|
||||||
|
@ -47,10 +55,16 @@ impl Block {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Block> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
|
let mut serialized = vec![];
|
||||||
|
self.write(&mut serialized).unwrap();
|
||||||
|
serialized
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read<R: Read>(r: &mut R) -> io::Result<Block> {
|
||||||
Ok(Block {
|
Ok(Block {
|
||||||
header: BlockHeader::deserialize(r)?,
|
header: BlockHeader::read(r)?,
|
||||||
miner_tx: Transaction::deserialize(r)?,
|
miner_tx: Transaction::read(r)?,
|
||||||
txs: (0 .. read_varint(r)?).map(|_| read_bytes(r)).collect::<Result<_, _>>()?,
|
txs: (0 .. read_varint(r)?).map(|_| read_bytes(r)).collect::<Result<_, _>>()?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -56,7 +56,6 @@ mod tests;
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||||
#[allow(non_camel_case_types)]
|
#[allow(non_camel_case_types)]
|
||||||
pub enum Protocol {
|
pub enum Protocol {
|
||||||
Unsupported(usize),
|
|
||||||
v14,
|
v14,
|
||||||
v16,
|
v16,
|
||||||
Custom { ring_len: usize, bp_plus: bool },
|
Custom { ring_len: usize, bp_plus: bool },
|
||||||
|
@ -66,7 +65,6 @@ impl Protocol {
|
||||||
/// Amount of ring members under this protocol version.
|
/// Amount of ring members under this protocol version.
|
||||||
pub fn ring_len(&self) -> usize {
|
pub fn ring_len(&self) -> usize {
|
||||||
match self {
|
match self {
|
||||||
Protocol::Unsupported(_) => panic!("Unsupported protocol version"),
|
|
||||||
Protocol::v14 => 11,
|
Protocol::v14 => 11,
|
||||||
Protocol::v16 => 16,
|
Protocol::v16 => 16,
|
||||||
Protocol::Custom { ring_len, .. } => *ring_len,
|
Protocol::Custom { ring_len, .. } => *ring_len,
|
||||||
|
@ -77,7 +75,6 @@ impl Protocol {
|
||||||
/// This method will likely be reworked when versions not using Bulletproofs at all are added.
|
/// This method will likely be reworked when versions not using Bulletproofs at all are added.
|
||||||
pub fn bp_plus(&self) -> bool {
|
pub fn bp_plus(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
Protocol::Unsupported(_) => panic!("Unsupported protocol version"),
|
|
||||||
Protocol::v14 => false,
|
Protocol::v14 => false,
|
||||||
Protocol::v16 => true,
|
Protocol::v16 => true,
|
||||||
Protocol::Custom { bp_plus, .. } => *bp_plus,
|
Protocol::Custom { bp_plus, .. } => *bp_plus,
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
#![allow(non_snake_case)]
|
#![allow(non_snake_case)]
|
||||||
|
|
||||||
|
use std::io::{self, Read, Write};
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
use zeroize::Zeroize;
|
use zeroize::Zeroize;
|
||||||
|
@ -35,6 +37,7 @@ impl Bulletproofs {
|
||||||
pub(crate) fn fee_weight(plus: bool, outputs: usize) -> usize {
|
pub(crate) fn fee_weight(plus: bool, outputs: usize) -> usize {
|
||||||
let fields = if plus { 6 } else { 9 };
|
let fields = if plus { 6 } else { 9 };
|
||||||
|
|
||||||
|
// TODO: Shouldn't this use u32/u64?
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
let mut LR_len = usize::try_from(usize::BITS - (outputs - 1).leading_zeros()).unwrap();
|
let mut LR_len = usize::try_from(usize::BITS - (outputs - 1).leading_zeros()).unwrap();
|
||||||
let padded_outputs = 1 << LR_len;
|
let padded_outputs = 1 << LR_len;
|
||||||
|
@ -93,11 +96,11 @@ impl Bulletproofs {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn serialize_core<W: std::io::Write, F: Fn(&[EdwardsPoint], &mut W) -> std::io::Result<()>>(
|
fn write_core<W: Write, F: Fn(&[EdwardsPoint], &mut W) -> io::Result<()>>(
|
||||||
&self,
|
&self,
|
||||||
w: &mut W,
|
w: &mut W,
|
||||||
specific_write_vec: F,
|
specific_write_vec: F,
|
||||||
) -> std::io::Result<()> {
|
) -> io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
Bulletproofs::Original(bp) => {
|
Bulletproofs::Original(bp) => {
|
||||||
write_point(&bp.A, w)?;
|
write_point(&bp.A, w)?;
|
||||||
|
@ -126,16 +129,22 @@ impl Bulletproofs {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn signature_serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
pub(crate) fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
self.serialize_core(w, |points, w| write_raw_vec(write_point, points, w))
|
self.write_core(w, |points, w| write_raw_vec(write_point, points, w))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
self.serialize_core(w, |points, w| write_vec(write_point, points, w))
|
self.write_core(w, |points, w| write_vec(write_point, points, w))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Deserialize non-plus Bulletproofs.
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Bulletproofs> {
|
let mut serialized = vec![];
|
||||||
|
self.write(&mut serialized).unwrap();
|
||||||
|
serialized
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read Bulletproofs.
|
||||||
|
pub fn read<R: Read>(r: &mut R) -> io::Result<Bulletproofs> {
|
||||||
Ok(Bulletproofs::Original(OriginalStruct {
|
Ok(Bulletproofs::Original(OriginalStruct {
|
||||||
A: read_point(r)?,
|
A: read_point(r)?,
|
||||||
S: read_point(r)?,
|
S: read_point(r)?,
|
||||||
|
@ -151,8 +160,8 @@ impl Bulletproofs {
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Deserialize Bulletproofs+.
|
/// Read Bulletproofs+.
|
||||||
pub fn deserialize_plus<R: std::io::Read>(r: &mut R) -> std::io::Result<Bulletproofs> {
|
pub fn read_plus<R: Read>(r: &mut R) -> io::Result<Bulletproofs> {
|
||||||
Ok(Bulletproofs::Plus(PlusStruct {
|
Ok(Bulletproofs::Plus(PlusStruct {
|
||||||
A: read_point(r)?,
|
A: read_point(r)?,
|
||||||
A1: read_point(r)?,
|
A1: read_point(r)?,
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#![allow(non_snake_case)]
|
#![allow(non_snake_case)]
|
||||||
|
|
||||||
use core::ops::Deref;
|
use core::ops::Deref;
|
||||||
|
use std::io::{self, Read, Write};
|
||||||
|
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
@ -313,13 +314,13 @@ impl Clsag {
|
||||||
(ring_len * 32) + 32 + 32
|
(ring_len * 32) + 32 + 32
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
write_raw_vec(write_scalar, &self.s, w)?;
|
write_raw_vec(write_scalar, &self.s, w)?;
|
||||||
w.write_all(&self.c1.to_bytes())?;
|
w.write_all(&self.c1.to_bytes())?;
|
||||||
write_point(&self.D, w)
|
write_point(&self.D, w)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize<R: std::io::Read>(decoys: usize, r: &mut R) -> std::io::Result<Clsag> {
|
pub fn read<R: Read>(decoys: usize, r: &mut R) -> io::Result<Clsag> {
|
||||||
Ok(Clsag { s: read_raw_vec(read_scalar, decoys, r)?, c1: read_scalar(r)?, D: read_point(r)? })
|
Ok(Clsag { s: read_raw_vec(read_scalar, decoys, r)?, c1: read_scalar(r)?, D: read_point(r)? })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,18 +41,17 @@ impl ClsagInput {
|
||||||
// Doesn't domain separate as this is considered part of the larger CLSAG proof
|
// Doesn't domain separate as this is considered part of the larger CLSAG proof
|
||||||
|
|
||||||
// Ring index
|
// Ring index
|
||||||
transcript.append_message(b"ring_index", [self.decoys.i]);
|
transcript.append_message(b"real_spend", [self.decoys.i]);
|
||||||
|
|
||||||
// Ring
|
// Ring
|
||||||
let mut ring = vec![];
|
for (i, pair) in self.decoys.ring.iter().enumerate() {
|
||||||
for pair in &self.decoys.ring {
|
|
||||||
// Doesn't include global output indexes as CLSAG doesn't care and won't be affected by it
|
// Doesn't include global output indexes as CLSAG doesn't care and won't be affected by it
|
||||||
// They're just a unreliable reference to this data which will be included in the message
|
// They're just a unreliable reference to this data which will be included in the message
|
||||||
// if in use
|
// if in use
|
||||||
ring.extend(pair[0].compress().to_bytes());
|
transcript.append_message(b"member", [u8::try_from(i).expect("ring size exceeded 255")]);
|
||||||
ring.extend(pair[1].compress().to_bytes());
|
transcript.append_message(b"key", pair[0].compress().to_bytes());
|
||||||
|
transcript.append_message(b"commitment", pair[1].compress().to_bytes())
|
||||||
}
|
}
|
||||||
transcript.append_message(b"ring", ring);
|
|
||||||
|
|
||||||
// Doesn't include the commitment's parts as the above ring + index includes the commitment
|
// Doesn't include the commitment's parts as the above ring + index includes the commitment
|
||||||
// The only potential malleability would be if the G/H relationship is known breaking the
|
// The only potential malleability would be if the G/H relationship is known breaking the
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use core::ops::Deref;
|
use core::ops::Deref;
|
||||||
|
use std::io::{self, Read, Write};
|
||||||
|
|
||||||
use zeroize::Zeroizing;
|
use zeroize::Zeroizing;
|
||||||
|
|
||||||
|
@ -35,7 +36,7 @@ impl RctBase {
|
||||||
1 + 8 + (outputs * (8 + 32))
|
1 + 8 + (outputs * (8 + 32))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serialize<W: std::io::Write>(&self, w: &mut W, rct_type: u8) -> std::io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W, rct_type: u8) -> io::Result<()> {
|
||||||
w.write_all(&[rct_type])?;
|
w.write_all(&[rct_type])?;
|
||||||
match rct_type {
|
match rct_type {
|
||||||
0 => Ok(()),
|
0 => Ok(()),
|
||||||
|
@ -50,10 +51,7 @@ impl RctBase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize<R: std::io::Read>(
|
pub fn read<R: Read>(outputs: usize, r: &mut R) -> io::Result<(RctBase, u8)> {
|
||||||
outputs: usize,
|
|
||||||
r: &mut R,
|
|
||||||
) -> std::io::Result<(RctBase, u8)> {
|
|
||||||
let rct_type = read_byte(r)?;
|
let rct_type = read_byte(r)?;
|
||||||
Ok((
|
Ok((
|
||||||
if rct_type == 0 {
|
if rct_type == 0 {
|
||||||
|
@ -96,46 +94,43 @@ impl RctPrunable {
|
||||||
(inputs * (Clsag::fee_weight(protocol.ring_len()) + 32))
|
(inputs * (Clsag::fee_weight(protocol.ring_len()) + 32))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
RctPrunable::Null => Ok(()),
|
RctPrunable::Null => Ok(()),
|
||||||
RctPrunable::Clsag { bulletproofs, clsags, pseudo_outs, .. } => {
|
RctPrunable::Clsag { bulletproofs, clsags, pseudo_outs, .. } => {
|
||||||
write_vec(Bulletproofs::serialize, bulletproofs, w)?;
|
write_vec(Bulletproofs::write, bulletproofs, w)?;
|
||||||
write_raw_vec(Clsag::serialize, clsags, w)?;
|
write_raw_vec(Clsag::write, clsags, w)?;
|
||||||
write_raw_vec(write_point, pseudo_outs, w)
|
write_raw_vec(write_point, pseudo_outs, w)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize<R: std::io::Read>(
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
rct_type: u8,
|
let mut serialized = vec![];
|
||||||
decoys: &[usize],
|
self.write(&mut serialized).unwrap();
|
||||||
r: &mut R,
|
serialized
|
||||||
) -> std::io::Result<RctPrunable> {
|
}
|
||||||
|
|
||||||
|
pub fn read<R: Read>(rct_type: u8, decoys: &[usize], r: &mut R) -> io::Result<RctPrunable> {
|
||||||
Ok(match rct_type {
|
Ok(match rct_type {
|
||||||
0 => RctPrunable::Null,
|
0 => RctPrunable::Null,
|
||||||
5 | 6 => RctPrunable::Clsag {
|
5 | 6 => RctPrunable::Clsag {
|
||||||
bulletproofs: read_vec(
|
bulletproofs: read_vec(
|
||||||
if rct_type == 5 { Bulletproofs::deserialize } else { Bulletproofs::deserialize_plus },
|
if rct_type == 5 { Bulletproofs::read } else { Bulletproofs::read_plus },
|
||||||
r,
|
r,
|
||||||
)?,
|
)?,
|
||||||
clsags: (0 .. decoys.len())
|
clsags: (0 .. decoys.len()).map(|o| Clsag::read(decoys[o], r)).collect::<Result<_, _>>()?,
|
||||||
.map(|o| Clsag::deserialize(decoys[o], r))
|
|
||||||
.collect::<Result<_, _>>()?,
|
|
||||||
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?,
|
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?,
|
||||||
},
|
},
|
||||||
_ => Err(std::io::Error::new(
|
_ => Err(io::Error::new(io::ErrorKind::Other, "Tried to deserialize unknown RCT type"))?,
|
||||||
std::io::ErrorKind::Other,
|
|
||||||
"Tried to deserialize unknown RCT type",
|
|
||||||
))?,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn signature_serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
pub(crate) fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
RctPrunable::Null => panic!("Serializing RctPrunable::Null for a signature"),
|
RctPrunable::Null => panic!("Serializing RctPrunable::Null for a signature"),
|
||||||
RctPrunable::Clsag { bulletproofs, .. } => {
|
RctPrunable::Clsag { bulletproofs, .. } => {
|
||||||
bulletproofs.iter().try_for_each(|bp| bp.signature_serialize(w))
|
bulletproofs.iter().try_for_each(|bp| bp.signature_write(w))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -152,17 +147,19 @@ impl RctSignatures {
|
||||||
RctBase::fee_weight(outputs) + RctPrunable::fee_weight(protocol, inputs, outputs)
|
RctBase::fee_weight(outputs) + RctPrunable::fee_weight(protocol, inputs, outputs)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
self.base.serialize(w, self.prunable.rct_type())?;
|
self.base.write(w, self.prunable.rct_type())?;
|
||||||
self.prunable.serialize(w)
|
self.prunable.write(w)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize<R: std::io::Read>(
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
decoys: Vec<usize>,
|
let mut serialized = vec![];
|
||||||
outputs: usize,
|
self.write(&mut serialized).unwrap();
|
||||||
r: &mut R,
|
serialized
|
||||||
) -> std::io::Result<RctSignatures> {
|
}
|
||||||
let base = RctBase::deserialize(outputs, r)?;
|
|
||||||
Ok(RctSignatures { base: base.0, prunable: RctPrunable::deserialize(base.1, &decoys, r)? })
|
pub fn read<R: Read>(decoys: Vec<usize>, outputs: usize, r: &mut R) -> io::Result<RctSignatures> {
|
||||||
|
let base = RctBase::read(outputs, r)?;
|
||||||
|
Ok(RctSignatures { base: base.0, prunable: RctPrunable::read(base.1, &decoys, r)? })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,7 +27,6 @@ pub struct JsonRpcResponse<T> {
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug)]
|
||||||
struct TransactionResponse {
|
struct TransactionResponse {
|
||||||
tx_hash: String,
|
tx_hash: String,
|
||||||
block_height: Option<usize>,
|
|
||||||
as_hex: String,
|
as_hex: String,
|
||||||
pruned_as_hex: String,
|
pruned_as_hex: String,
|
||||||
}
|
}
|
||||||
|
@ -46,6 +45,8 @@ pub enum RpcError {
|
||||||
ConnectionError,
|
ConnectionError,
|
||||||
#[error("invalid node")]
|
#[error("invalid node")]
|
||||||
InvalidNode,
|
InvalidNode,
|
||||||
|
#[error("unsupported protocol version ({0})")]
|
||||||
|
UnsupportedProtocol(usize),
|
||||||
#[error("transactions not found")]
|
#[error("transactions not found")]
|
||||||
TransactionsNotFound(Vec<[u8; 32]>),
|
TransactionsNotFound(Vec<[u8; 32]>),
|
||||||
#[error("invalid point ({0})")]
|
#[error("invalid point ({0})")]
|
||||||
|
@ -212,7 +213,7 @@ impl Rpc {
|
||||||
{
|
{
|
||||||
13 | 14 => Protocol::v14,
|
13 | 14 => Protocol::v14,
|
||||||
15 | 16 => Protocol::v16,
|
15 | 16 => Protocol::v16,
|
||||||
version => Protocol::Unsupported(version),
|
protocol => Err(RpcError::UnsupportedProtocol(protocol))?,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -248,10 +249,12 @@ impl Rpc {
|
||||||
txs
|
txs
|
||||||
.txs
|
.txs
|
||||||
.iter()
|
.iter()
|
||||||
.map(|res| {
|
.enumerate()
|
||||||
let tx = Transaction::deserialize(&mut std::io::Cursor::new(rpc_hex(
|
.map(|(i, res)| {
|
||||||
if !res.as_hex.is_empty() { &res.as_hex } else { &res.pruned_as_hex },
|
let tx = Transaction::read::<&[u8]>(
|
||||||
)?))
|
&mut rpc_hex(if !res.as_hex.is_empty() { &res.as_hex } else { &res.pruned_as_hex })?
|
||||||
|
.as_ref(),
|
||||||
|
)
|
||||||
.map_err(|_| match hash_hex(&res.tx_hash) {
|
.map_err(|_| match hash_hex(&res.tx_hash) {
|
||||||
Ok(hash) => RpcError::InvalidTransaction(hash),
|
Ok(hash) => RpcError::InvalidTransaction(hash),
|
||||||
Err(err) => err,
|
Err(err) => err,
|
||||||
|
@ -265,6 +268,12 @@ impl Rpc {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This does run a few keccak256 hashes, which is pointless if the node is trusted
|
||||||
|
// In exchange, this provides resilience against invalid/malicious nodes
|
||||||
|
if tx.hash() != hashes[i] {
|
||||||
|
Err(RpcError::InvalidNode)?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(tx)
|
Ok(tx)
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
|
@ -274,40 +283,71 @@ impl Rpc {
|
||||||
self.get_transactions(&[tx]).await.map(|mut txs| txs.swap_remove(0))
|
self.get_transactions(&[tx]).await.map(|mut txs| txs.swap_remove(0))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_transaction_block_number(&self, tx: &[u8]) -> Result<Option<usize>, RpcError> {
|
/// Get the hash of a block from the node by the block's numbers.
|
||||||
let txs: TransactionsResponse =
|
/// This function does not verify the returned block hash is actually for the number in question.
|
||||||
self.rpc_call("get_transactions", Some(json!({ "txs_hashes": [hex::encode(tx)] }))).await?;
|
pub async fn get_block_hash(&self, number: usize) -> Result<[u8; 32], RpcError> {
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
if !txs.missed_tx.is_empty() {
|
struct BlockHeaderResponse {
|
||||||
Err(RpcError::TransactionsNotFound(
|
hash: String,
|
||||||
txs.missed_tx.iter().map(|hash| hash_hex(hash)).collect::<Result<_, _>>()?,
|
}
|
||||||
))?;
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct BlockHeaderByHeightResponse {
|
||||||
|
block_header: BlockHeaderResponse,
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(txs.txs[0].block_height)
|
let header: BlockHeaderByHeightResponse =
|
||||||
|
self.json_rpc_call("get_block_header_by_height", Some(json!({ "height": number }))).await?;
|
||||||
|
rpc_hex(&header.block_header.hash)?.try_into().map_err(|_| RpcError::InvalidNode)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_block(&self, height: usize) -> Result<Block, RpcError> {
|
/// Get a block from the node by its hash.
|
||||||
|
/// This function does not verify the returned block actually has the hash in question.
|
||||||
|
pub async fn get_block(&self, hash: [u8; 32]) -> Result<Block, RpcError> {
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug)]
|
||||||
struct BlockResponse {
|
struct BlockResponse {
|
||||||
blob: String,
|
blob: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
let block: BlockResponse =
|
let res: BlockResponse =
|
||||||
self.json_rpc_call("get_block", Some(json!({ "height": height }))).await?;
|
self.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) }))).await?;
|
||||||
Ok(
|
|
||||||
Block::deserialize(&mut std::io::Cursor::new(rpc_hex(&block.blob)?))
|
// TODO: Verify the TXs included are actually committed to by the header
|
||||||
.expect("Monero returned a block we couldn't deserialize"),
|
Block::read::<&[u8]>(&mut rpc_hex(&res.blob)?.as_ref()).map_err(|_| RpcError::InvalidNode)
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_block_transactions(&self, height: usize) -> Result<Vec<Transaction>, RpcError> {
|
pub async fn get_block_by_number(&self, number: usize) -> Result<Block, RpcError> {
|
||||||
let block = self.get_block(height).await?;
|
match self.get_block(self.get_block_hash(number).await?).await {
|
||||||
|
Ok(block) => {
|
||||||
|
// Make sure this is actually the block for this number
|
||||||
|
match block.miner_tx.prefix.inputs[0] {
|
||||||
|
Input::Gen(actual) => {
|
||||||
|
if usize::try_from(actual).unwrap() == number {
|
||||||
|
Ok(block)
|
||||||
|
} else {
|
||||||
|
Err(RpcError::InvalidNode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => Err(RpcError::InvalidNode),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
e => e,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_block_transactions(&self, hash: [u8; 32]) -> Result<Vec<Transaction>, RpcError> {
|
||||||
|
let block = self.get_block(hash).await?;
|
||||||
let mut res = vec![block.miner_tx];
|
let mut res = vec![block.miner_tx];
|
||||||
res.extend(self.get_transactions(&block.txs).await?);
|
res.extend(self.get_transactions(&block.txs).await?);
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn get_block_transactions_by_number(
|
||||||
|
&self,
|
||||||
|
number: usize,
|
||||||
|
) -> Result<Vec<Transaction>, RpcError> {
|
||||||
|
self.get_block_transactions(self.get_block_hash(number).await?).await
|
||||||
|
}
|
||||||
|
|
||||||
/// Get the output indexes of the specified transaction.
|
/// Get the output indexes of the specified transaction.
|
||||||
pub async fn get_o_indexes(&self, hash: [u8; 32]) -> Result<Vec<u64>, RpcError> {
|
pub async fn get_o_indexes(&self, hash: [u8; 32]) -> Result<Vec<u64>, RpcError> {
|
||||||
#[derive(Serialize, Debug)]
|
#[derive(Serialize, Debug)]
|
||||||
|
@ -370,8 +410,9 @@ impl Rpc {
|
||||||
Ok(distributions.distributions.swap_remove(0).distribution)
|
Ok(distributions.distributions.swap_remove(0).distribution)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the specified outputs from the RingCT (zero-amount) pool, but only return them if they're
|
/// Get the specified outputs from the RingCT (zero-amount) pool, but only return them if their
|
||||||
/// unlocked.
|
/// timelock has been satisfied. This is distinct from being free of the 10-block lock applied to
|
||||||
|
/// all Monero transactions.
|
||||||
pub async fn get_unlocked_outputs(
|
pub async fn get_unlocked_outputs(
|
||||||
&self,
|
&self,
|
||||||
indexes: &[u64],
|
indexes: &[u64],
|
||||||
|
@ -407,13 +448,8 @@ impl Rpc {
|
||||||
&outs
|
&outs
|
||||||
.outs
|
.outs
|
||||||
.iter()
|
.iter()
|
||||||
.map(|out| {
|
.map(|out| rpc_hex(&out.txid)?.try_into().map_err(|_| RpcError::InvalidNode))
|
||||||
rpc_hex(&out.txid)
|
.collect::<Result<Vec<_>, _>>()?,
|
||||||
.expect("Monero returned an invalidly encoded hash")
|
|
||||||
.try_into()
|
|
||||||
.expect("Monero returned an invalid sized hash")
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
@ -466,7 +502,7 @@ impl Rpc {
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut buf = Vec::with_capacity(2048);
|
let mut buf = Vec::with_capacity(2048);
|
||||||
tx.serialize(&mut buf).unwrap();
|
tx.write(&mut buf).unwrap();
|
||||||
let res: SendRawResponse = self
|
let res: SendRawResponse = self
|
||||||
.rpc_call("send_raw_transaction", Some(json!({ "tx_as_hex": hex::encode(&buf) })))
|
.rpc_call("send_raw_transaction", Some(json!({ "tx_as_hex": hex::encode(&buf) })))
|
||||||
.await?;
|
.await?;
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use std::io;
|
use std::io::{self, Read, Write};
|
||||||
|
|
||||||
use curve25519_dalek::{
|
use curve25519_dalek::{
|
||||||
scalar::Scalar,
|
scalar::Scalar,
|
||||||
|
@ -11,11 +11,11 @@ pub(crate) fn varint_len(varint: usize) -> usize {
|
||||||
((usize::try_from(usize::BITS - varint.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1
|
((usize::try_from(usize::BITS - varint.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn write_byte<W: io::Write>(byte: &u8, w: &mut W) -> io::Result<()> {
|
pub(crate) fn write_byte<W: Write>(byte: &u8, w: &mut W) -> io::Result<()> {
|
||||||
w.write_all(&[*byte])
|
w.write_all(&[*byte])
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn write_varint<W: io::Write>(varint: &u64, w: &mut W) -> io::Result<()> {
|
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
|
||||||
let mut varint = *varint;
|
let mut varint = *varint;
|
||||||
while {
|
while {
|
||||||
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
||||||
|
@ -29,15 +29,15 @@ pub(crate) fn write_varint<W: io::Write>(varint: &u64, w: &mut W) -> io::Result<
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn write_scalar<W: io::Write>(scalar: &Scalar, w: &mut W) -> io::Result<()> {
|
pub(crate) fn write_scalar<W: Write>(scalar: &Scalar, w: &mut W) -> io::Result<()> {
|
||||||
w.write_all(&scalar.to_bytes())
|
w.write_all(&scalar.to_bytes())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn write_point<W: io::Write>(point: &EdwardsPoint, w: &mut W) -> io::Result<()> {
|
pub(crate) fn write_point<W: Write>(point: &EdwardsPoint, w: &mut W) -> io::Result<()> {
|
||||||
w.write_all(&point.compress().to_bytes())
|
w.write_all(&point.compress().to_bytes())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn write_raw_vec<T, W: io::Write, F: Fn(&T, &mut W) -> io::Result<()>>(
|
pub(crate) fn write_raw_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
|
||||||
f: F,
|
f: F,
|
||||||
values: &[T],
|
values: &[T],
|
||||||
w: &mut W,
|
w: &mut W,
|
||||||
|
@ -48,7 +48,7 @@ pub(crate) fn write_raw_vec<T, W: io::Write, F: Fn(&T, &mut W) -> io::Result<()>
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn write_vec<T, W: io::Write, F: Fn(&T, &mut W) -> io::Result<()>>(
|
pub(crate) fn write_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
|
||||||
f: F,
|
f: F,
|
||||||
values: &[T],
|
values: &[T],
|
||||||
w: &mut W,
|
w: &mut W,
|
||||||
|
@ -57,25 +57,25 @@ pub(crate) fn write_vec<T, W: io::Write, F: Fn(&T, &mut W) -> io::Result<()>>(
|
||||||
write_raw_vec(f, values, w)
|
write_raw_vec(f, values, w)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_bytes<R: io::Read, const N: usize>(r: &mut R) -> io::Result<[u8; N]> {
|
pub(crate) fn read_bytes<R: Read, const N: usize>(r: &mut R) -> io::Result<[u8; N]> {
|
||||||
let mut res = [0; N];
|
let mut res = [0; N];
|
||||||
r.read_exact(&mut res)?;
|
r.read_exact(&mut res)?;
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_byte<R: io::Read>(r: &mut R) -> io::Result<u8> {
|
pub(crate) fn read_byte<R: Read>(r: &mut R) -> io::Result<u8> {
|
||||||
Ok(read_bytes::<_, 1>(r)?[0])
|
Ok(read_bytes::<_, 1>(r)?[0])
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_u64<R: io::Read>(r: &mut R) -> io::Result<u64> {
|
pub(crate) fn read_u64<R: Read>(r: &mut R) -> io::Result<u64> {
|
||||||
read_bytes(r).map(u64::from_le_bytes)
|
read_bytes(r).map(u64::from_le_bytes)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_u32<R: io::Read>(r: &mut R) -> io::Result<u32> {
|
pub(crate) fn read_u32<R: Read>(r: &mut R) -> io::Result<u32> {
|
||||||
read_bytes(r).map(u32::from_le_bytes)
|
read_bytes(r).map(u32::from_le_bytes)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_varint<R: io::Read>(r: &mut R) -> io::Result<u64> {
|
pub(crate) fn read_varint<R: Read>(r: &mut R) -> io::Result<u64> {
|
||||||
let mut bits = 0;
|
let mut bits = 0;
|
||||||
let mut res = 0;
|
let mut res = 0;
|
||||||
while {
|
while {
|
||||||
|
@ -100,12 +100,12 @@ pub(crate) fn read_varint<R: io::Read>(r: &mut R) -> io::Result<u64> {
|
||||||
// for now. There's also further edge cases as noted by
|
// for now. There's also further edge cases as noted by
|
||||||
// https://github.com/monero-project/monero/issues/8438, where some scalars had an archaic
|
// https://github.com/monero-project/monero/issues/8438, where some scalars had an archaic
|
||||||
// reduction applied
|
// reduction applied
|
||||||
pub(crate) fn read_scalar<R: io::Read>(r: &mut R) -> io::Result<Scalar> {
|
pub(crate) fn read_scalar<R: Read>(r: &mut R) -> io::Result<Scalar> {
|
||||||
Scalar::from_canonical_bytes(read_bytes(r)?)
|
Scalar::from_canonical_bytes(read_bytes(r)?)
|
||||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "unreduced scalar"))
|
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "unreduced scalar"))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_point<R: io::Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
pub(crate) fn read_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
||||||
let bytes = read_bytes(r)?;
|
let bytes = read_bytes(r)?;
|
||||||
CompressedEdwardsY(bytes)
|
CompressedEdwardsY(bytes)
|
||||||
.decompress()
|
.decompress()
|
||||||
|
@ -114,14 +114,14 @@ pub(crate) fn read_point<R: io::Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
||||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))
|
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_torsion_free_point<R: io::Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
pub(crate) fn read_torsion_free_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
||||||
read_point(r)
|
read_point(r)
|
||||||
.ok()
|
.ok()
|
||||||
.filter(|point| point.is_torsion_free())
|
.filter(|point| point.is_torsion_free())
|
||||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))
|
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_raw_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
pub(crate) fn read_raw_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
||||||
f: F,
|
f: F,
|
||||||
len: usize,
|
len: usize,
|
||||||
r: &mut R,
|
r: &mut R,
|
||||||
|
@ -133,7 +133,7 @@ pub(crate) fn read_raw_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
pub(crate) fn read_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
||||||
f: F,
|
f: F,
|
||||||
r: &mut R,
|
r: &mut R,
|
||||||
) -> io::Result<Vec<T>> {
|
) -> io::Result<Vec<T>> {
|
||||||
|
|
|
@ -33,9 +33,9 @@ fn standard_address() {
|
||||||
let addr = MoneroAddress::from_str(Network::Mainnet, STANDARD).unwrap();
|
let addr = MoneroAddress::from_str(Network::Mainnet, STANDARD).unwrap();
|
||||||
assert_eq!(addr.meta.network, Network::Mainnet);
|
assert_eq!(addr.meta.network, Network::Mainnet);
|
||||||
assert_eq!(addr.meta.kind, AddressType::Standard);
|
assert_eq!(addr.meta.kind, AddressType::Standard);
|
||||||
assert!(!addr.meta.kind.subaddress());
|
assert!(!addr.meta.kind.is_subaddress());
|
||||||
assert_eq!(addr.meta.kind.payment_id(), None);
|
assert_eq!(addr.meta.kind.payment_id(), None);
|
||||||
assert!(!addr.meta.kind.guaranteed());
|
assert!(!addr.meta.kind.is_guaranteed());
|
||||||
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
|
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
|
||||||
assert_eq!(addr.view.compress().to_bytes(), VIEW);
|
assert_eq!(addr.view.compress().to_bytes(), VIEW);
|
||||||
assert_eq!(addr.to_string(), STANDARD);
|
assert_eq!(addr.to_string(), STANDARD);
|
||||||
|
@ -46,9 +46,9 @@ fn integrated_address() {
|
||||||
let addr = MoneroAddress::from_str(Network::Mainnet, INTEGRATED).unwrap();
|
let addr = MoneroAddress::from_str(Network::Mainnet, INTEGRATED).unwrap();
|
||||||
assert_eq!(addr.meta.network, Network::Mainnet);
|
assert_eq!(addr.meta.network, Network::Mainnet);
|
||||||
assert_eq!(addr.meta.kind, AddressType::Integrated(PAYMENT_ID));
|
assert_eq!(addr.meta.kind, AddressType::Integrated(PAYMENT_ID));
|
||||||
assert!(!addr.meta.kind.subaddress());
|
assert!(!addr.meta.kind.is_subaddress());
|
||||||
assert_eq!(addr.meta.kind.payment_id(), Some(PAYMENT_ID));
|
assert_eq!(addr.meta.kind.payment_id(), Some(PAYMENT_ID));
|
||||||
assert!(!addr.meta.kind.guaranteed());
|
assert!(!addr.meta.kind.is_guaranteed());
|
||||||
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
|
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
|
||||||
assert_eq!(addr.view.compress().to_bytes(), VIEW);
|
assert_eq!(addr.view.compress().to_bytes(), VIEW);
|
||||||
assert_eq!(addr.to_string(), INTEGRATED);
|
assert_eq!(addr.to_string(), INTEGRATED);
|
||||||
|
@ -59,9 +59,9 @@ fn subaddress() {
|
||||||
let addr = MoneroAddress::from_str(Network::Mainnet, SUBADDRESS).unwrap();
|
let addr = MoneroAddress::from_str(Network::Mainnet, SUBADDRESS).unwrap();
|
||||||
assert_eq!(addr.meta.network, Network::Mainnet);
|
assert_eq!(addr.meta.network, Network::Mainnet);
|
||||||
assert_eq!(addr.meta.kind, AddressType::Subaddress);
|
assert_eq!(addr.meta.kind, AddressType::Subaddress);
|
||||||
assert!(addr.meta.kind.subaddress());
|
assert!(addr.meta.kind.is_subaddress());
|
||||||
assert_eq!(addr.meta.kind.payment_id(), None);
|
assert_eq!(addr.meta.kind.payment_id(), None);
|
||||||
assert!(!addr.meta.kind.guaranteed());
|
assert!(!addr.meta.kind.is_guaranteed());
|
||||||
assert_eq!(addr.spend.compress().to_bytes(), SUB_SPEND);
|
assert_eq!(addr.spend.compress().to_bytes(), SUB_SPEND);
|
||||||
assert_eq!(addr.view.compress().to_bytes(), SUB_VIEW);
|
assert_eq!(addr.view.compress().to_bytes(), SUB_VIEW);
|
||||||
assert_eq!(addr.to_string(), SUBADDRESS);
|
assert_eq!(addr.to_string(), SUBADDRESS);
|
||||||
|
@ -83,13 +83,14 @@ fn featured() {
|
||||||
|
|
||||||
let subaddress = (features & SUBADDRESS_FEATURE_BIT) == SUBADDRESS_FEATURE_BIT;
|
let subaddress = (features & SUBADDRESS_FEATURE_BIT) == SUBADDRESS_FEATURE_BIT;
|
||||||
|
|
||||||
let mut id = [0; 8];
|
let mut payment_id = [0; 8];
|
||||||
OsRng.fill_bytes(&mut id);
|
OsRng.fill_bytes(&mut payment_id);
|
||||||
let id = Some(id).filter(|_| (features & INTEGRATED_FEATURE_BIT) == INTEGRATED_FEATURE_BIT);
|
let payment_id = Some(payment_id)
|
||||||
|
.filter(|_| (features & INTEGRATED_FEATURE_BIT) == INTEGRATED_FEATURE_BIT);
|
||||||
|
|
||||||
let guaranteed = (features & GUARANTEED_FEATURE_BIT) == GUARANTEED_FEATURE_BIT;
|
let guaranteed = (features & GUARANTEED_FEATURE_BIT) == GUARANTEED_FEATURE_BIT;
|
||||||
|
|
||||||
let kind = AddressType::Featured(subaddress, id, guaranteed);
|
let kind = AddressType::Featured { subaddress, payment_id, guaranteed };
|
||||||
let meta = AddressMeta::new(network, kind);
|
let meta = AddressMeta::new(network, kind);
|
||||||
let addr = MoneroAddress::new(meta, spend, view);
|
let addr = MoneroAddress::new(meta, spend, view);
|
||||||
|
|
||||||
|
@ -99,9 +100,9 @@ fn featured() {
|
||||||
assert_eq!(addr.spend, spend);
|
assert_eq!(addr.spend, spend);
|
||||||
assert_eq!(addr.view, view);
|
assert_eq!(addr.view, view);
|
||||||
|
|
||||||
assert_eq!(addr.subaddress(), subaddress);
|
assert_eq!(addr.is_subaddress(), subaddress);
|
||||||
assert_eq!(addr.payment_id(), id);
|
assert_eq!(addr.payment_id(), payment_id);
|
||||||
assert_eq!(addr.guaranteed(), guaranteed);
|
assert_eq!(addr.is_guaranteed(), guaranteed);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -150,16 +151,20 @@ fn featured_vectors() {
|
||||||
assert_eq!(addr.spend, spend);
|
assert_eq!(addr.spend, spend);
|
||||||
assert_eq!(addr.view, view);
|
assert_eq!(addr.view, view);
|
||||||
|
|
||||||
assert_eq!(addr.subaddress(), vector.subaddress);
|
assert_eq!(addr.is_subaddress(), vector.subaddress);
|
||||||
assert_eq!(vector.integrated, vector.payment_id.is_some());
|
assert_eq!(vector.integrated, vector.payment_id.is_some());
|
||||||
assert_eq!(addr.payment_id(), vector.payment_id);
|
assert_eq!(addr.payment_id(), vector.payment_id);
|
||||||
assert_eq!(addr.guaranteed(), vector.guaranteed);
|
assert_eq!(addr.is_guaranteed(), vector.guaranteed);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
MoneroAddress::new(
|
MoneroAddress::new(
|
||||||
AddressMeta::new(
|
AddressMeta::new(
|
||||||
network,
|
network,
|
||||||
AddressType::Featured(vector.subaddress, vector.payment_id, vector.guaranteed)
|
AddressType::Featured {
|
||||||
|
subaddress: vector.subaddress,
|
||||||
|
payment_id: vector.payment_id,
|
||||||
|
guaranteed: vector.guaranteed
|
||||||
|
}
|
||||||
),
|
),
|
||||||
spend,
|
spend,
|
||||||
view
|
view
|
||||||
|
|
|
@ -66,7 +66,7 @@ fn clsag() {
|
||||||
Commitment::new(secrets.1, AMOUNT),
|
Commitment::new(secrets.1, AMOUNT),
|
||||||
Decoys {
|
Decoys {
|
||||||
i: u8::try_from(real).unwrap(),
|
i: u8::try_from(real).unwrap(),
|
||||||
offsets: (1 ..= RING_LEN).into_iter().collect(),
|
offsets: (1 ..= RING_LEN).collect(),
|
||||||
ring: ring.clone(),
|
ring: ring.clone(),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -110,11 +110,7 @@ fn clsag_multisig() {
|
||||||
Arc::new(RwLock::new(Some(ClsagDetails::new(
|
Arc::new(RwLock::new(Some(ClsagDetails::new(
|
||||||
ClsagInput::new(
|
ClsagInput::new(
|
||||||
Commitment::new(randomness, AMOUNT),
|
Commitment::new(randomness, AMOUNT),
|
||||||
Decoys {
|
Decoys { i: RING_INDEX, offsets: (1 ..= RING_LEN).collect(), ring: ring.clone() },
|
||||||
i: RING_INDEX,
|
|
||||||
offsets: (1 ..= RING_LEN).into_iter().collect(),
|
|
||||||
ring: ring.clone(),
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
mask_sum,
|
mask_sum,
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
mod clsag;
|
mod clsag;
|
||||||
mod bulletproofs;
|
mod bulletproofs;
|
||||||
mod address;
|
mod address;
|
||||||
|
mod seed;
|
||||||
|
|
177
coins/monero/src/tests/seed.rs
Normal file
177
coins/monero/src/tests/seed.rs
Normal file
|
@ -0,0 +1,177 @@
|
||||||
|
use zeroize::Zeroizing;
|
||||||
|
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
use curve25519_dalek::scalar::Scalar;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
hash,
|
||||||
|
wallet::seed::{Seed, Language, classic::trim_by_lang},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_classic_seed() {
|
||||||
|
struct Vector {
|
||||||
|
language: Language,
|
||||||
|
seed: String,
|
||||||
|
spend: String,
|
||||||
|
view: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
let vectors = [
|
||||||
|
Vector {
|
||||||
|
language: Language::Chinese,
|
||||||
|
seed: "摇 曲 艺 武 滴 然 效 似 赏 式 祥 歌 买 疑 小 碧 堆 博 键 房 鲜 悲 付 喷 武".into(),
|
||||||
|
spend: "a5e4fff1706ef9212993a69f246f5c95ad6d84371692d63e9bb0ea112a58340d".into(),
|
||||||
|
view: "1176c43ce541477ea2f3ef0b49b25112b084e26b8a843e1304ac4677b74cdf02".into(),
|
||||||
|
},
|
||||||
|
Vector {
|
||||||
|
language: Language::English,
|
||||||
|
seed: "washing thirsty occur lectures tuesday fainted toxic adapt \
|
||||||
|
abnormal memoir nylon mostly building shrugged online ember northern \
|
||||||
|
ruby woes dauntless boil family illness inroads northern"
|
||||||
|
.into(),
|
||||||
|
spend: "c0af65c0dd837e666b9d0dfed62745f4df35aed7ea619b2798a709f0fe545403".into(),
|
||||||
|
view: "513ba91c538a5a9069e0094de90e927c0cd147fa10428ce3ac1afd49f63e3b01".into(),
|
||||||
|
},
|
||||||
|
Vector {
|
||||||
|
language: Language::Dutch,
|
||||||
|
seed: "setwinst riphagen vimmetje extase blief tuitelig fuiven meifeest \
|
||||||
|
ponywagen zesmaal ripdeal matverf codetaal leut ivoor rotten \
|
||||||
|
wisgerhof winzucht typograaf atrium rein zilt traktaat verzaagd setwinst"
|
||||||
|
.into(),
|
||||||
|
spend: "e2d2873085c447c2bc7664222ac8f7d240df3aeac137f5ff2022eaa629e5b10a".into(),
|
||||||
|
view: "eac30b69477e3f68093d131c7fd961564458401b07f8c87ff8f6030c1a0c7301".into(),
|
||||||
|
},
|
||||||
|
Vector {
|
||||||
|
language: Language::French,
|
||||||
|
seed: "poids vaseux tarte bazar poivre effet entier nuance \
|
||||||
|
sensuel ennui pacte osselet poudre battre alibi mouton \
|
||||||
|
stade paquet pliage gibier type question position projet pliage"
|
||||||
|
.into(),
|
||||||
|
spend: "2dd39ff1a4628a94b5c2ec3e42fb3dfe15c2b2f010154dc3b3de6791e805b904".into(),
|
||||||
|
view: "6725b32230400a1032f31d622b44c3a227f88258939b14a7c72e00939e7bdf0e".into(),
|
||||||
|
},
|
||||||
|
Vector {
|
||||||
|
language: Language::Spanish,
|
||||||
|
seed: "minero ocupar mirar evadir octubre cal logro miope \
|
||||||
|
opaco disco ancla litio clase cuello nasal clase \
|
||||||
|
fiar avance deseo mente grumo negro cordón croqueta clase"
|
||||||
|
.into(),
|
||||||
|
spend: "ae2c9bebdddac067d73ec0180147fc92bdf9ac7337f1bcafbbe57dd13558eb02".into(),
|
||||||
|
view: "18deafb34d55b7a43cae2c1c1c206a3c80c12cc9d1f84640b484b95b7fec3e05".into(),
|
||||||
|
},
|
||||||
|
Vector {
|
||||||
|
language: Language::German,
|
||||||
|
seed: "Kaliber Gabelung Tapir Liveband Favorit Specht Enklave Nabel \
|
||||||
|
Jupiter Foliant Chronik nisten löten Vase Aussage Rekord \
|
||||||
|
Yeti Gesetz Eleganz Alraune Künstler Almweide Jahr Kastanie Almweide"
|
||||||
|
.into(),
|
||||||
|
spend: "79801b7a1b9796856e2397d862a113862e1fdc289a205e79d8d70995b276db06".into(),
|
||||||
|
view: "99f0ec556643bd9c038a4ed86edcb9c6c16032c4622ed2e000299d527a792701".into(),
|
||||||
|
},
|
||||||
|
Vector {
|
||||||
|
language: Language::Italian,
|
||||||
|
seed: "cavo pancetta auto fulmine alleanza filmato diavolo prato \
|
||||||
|
forzare meritare litigare lezione segreto evasione votare buio \
|
||||||
|
licenza cliente dorso natale crescere vento tutelare vetta evasione"
|
||||||
|
.into(),
|
||||||
|
spend: "5e7fd774eb00fa5877e2a8b4dc9c7ffe111008a3891220b56a6e49ac816d650a".into(),
|
||||||
|
view: "698a1dce6018aef5516e82ca0cb3e3ec7778d17dfb41a137567bfa2e55e63a03".into(),
|
||||||
|
},
|
||||||
|
Vector {
|
||||||
|
language: Language::Portuguese,
|
||||||
|
seed: "agito eventualidade onus itrio holograma sodomizar objetos dobro \
|
||||||
|
iugoslavo bcrepuscular odalisca abjeto iuane darwinista eczema acetona \
|
||||||
|
cibernetico hoquei gleba driver buffer azoto megera nogueira agito"
|
||||||
|
.into(),
|
||||||
|
spend: "13b3115f37e35c6aa1db97428b897e584698670c1b27854568d678e729200c0f".into(),
|
||||||
|
view: "ad1b4fd35270f5f36c4da7166672b347e75c3f4d41346ec2a06d1d0193632801".into(),
|
||||||
|
},
|
||||||
|
Vector {
|
||||||
|
language: Language::Japanese,
|
||||||
|
seed: "ぜんぶ どうぐ おたがい せんきょ おうじ そんちょう じゅしん いろえんぴつ \
|
||||||
|
かほう つかれる えらぶ にちじょう くのう にちようび ぬまえび さんきゃく \
|
||||||
|
おおや ちぬき うすめる いがく せつでん さうな すいえい せつだん おおや"
|
||||||
|
.into(),
|
||||||
|
spend: "c56e895cdb13007eda8399222974cdbab493640663804b93cbef3d8c3df80b0b".into(),
|
||||||
|
view: "6c3634a313ec2ee979d565c33888fd7c3502d696ce0134a8bc1a2698c7f2c508".into(),
|
||||||
|
},
|
||||||
|
Vector {
|
||||||
|
language: Language::Russian,
|
||||||
|
seed: "шатер икра нация ехать получать инерция доза реальный \
|
||||||
|
рыжий таможня лопата душа веселый клетка атлас лекция \
|
||||||
|
обгонять паек наивный лыжный дурак стать ежик задача паек"
|
||||||
|
.into(),
|
||||||
|
spend: "7cb5492df5eb2db4c84af20766391cd3e3662ab1a241c70fc881f3d02c381f05".into(),
|
||||||
|
view: "fcd53e41ec0df995ab43927f7c44bc3359c93523d5009fb3f5ba87431d545a03".into(),
|
||||||
|
},
|
||||||
|
Vector {
|
||||||
|
language: Language::Esperanto,
|
||||||
|
seed: "ukazo klini peco etikedo fabriko imitado onklino urino \
|
||||||
|
pudro incidento kumuluso ikono smirgi hirundo uretro krii \
|
||||||
|
sparkado super speciala pupo alpinisto cvana vokegi zombio fabriko"
|
||||||
|
.into(),
|
||||||
|
spend: "82ebf0336d3b152701964ed41df6b6e9a035e57fc98b84039ed0bd4611c58904".into(),
|
||||||
|
view: "cd4d120e1ea34360af528f6a3e6156063312d9cefc9aa6b5218d366c0ed6a201".into(),
|
||||||
|
},
|
||||||
|
Vector {
|
||||||
|
language: Language::Lojban,
|
||||||
|
seed: "jetnu vensa julne xrotu xamsi julne cutci dakli \
|
||||||
|
mlatu xedja muvgau palpi xindo sfubu ciste cinri \
|
||||||
|
blabi darno dembi janli blabi fenki bukpu burcu blabi"
|
||||||
|
.into(),
|
||||||
|
spend: "e4f8c6819ab6cf792cebb858caabac9307fd646901d72123e0367ebc0a79c200".into(),
|
||||||
|
view: "c806ce62bafaa7b2d597f1a1e2dbe4a2f96bfd804bf6f8420fc7f4a6bd700c00".into(),
|
||||||
|
},
|
||||||
|
Vector {
|
||||||
|
language: Language::EnglishOld,
|
||||||
|
seed: "glorious especially puff son moment add youth nowhere \
|
||||||
|
throw glide grip wrong rhythm consume very swear \
|
||||||
|
bitter heavy eventually begin reason flirt type unable"
|
||||||
|
.into(),
|
||||||
|
spend: "647f4765b66b636ff07170ab6280a9a6804dfbaf19db2ad37d23be024a18730b".into(),
|
||||||
|
view: "045da65316a906a8c30046053119c18020b07a7a3a6ef5c01ab2a8755416bd02".into(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
for vector in vectors {
|
||||||
|
let trim_seed = |seed: &str| {
|
||||||
|
seed
|
||||||
|
.split_whitespace()
|
||||||
|
.map(|word| trim_by_lang(word, vector.language))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(" ")
|
||||||
|
};
|
||||||
|
|
||||||
|
// Test against Monero
|
||||||
|
{
|
||||||
|
let seed = Seed::from_string(Zeroizing::new(vector.seed.clone())).unwrap();
|
||||||
|
assert_eq!(seed, Seed::from_string(Zeroizing::new(trim_seed(&vector.seed))).unwrap());
|
||||||
|
|
||||||
|
let spend: [u8; 32] = hex::decode(vector.spend).unwrap().try_into().unwrap();
|
||||||
|
// For classical seeds, Monero directly uses the entropy as a spend key
|
||||||
|
assert_eq!(
|
||||||
|
Scalar::from_canonical_bytes(*seed.entropy()),
|
||||||
|
Scalar::from_canonical_bytes(spend)
|
||||||
|
);
|
||||||
|
|
||||||
|
let view: [u8; 32] = hex::decode(vector.view).unwrap().try_into().unwrap();
|
||||||
|
// Monero then derives the view key as H(spend)
|
||||||
|
assert_eq!(
|
||||||
|
Scalar::from_bytes_mod_order(hash(&spend)),
|
||||||
|
Scalar::from_canonical_bytes(view).unwrap()
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(Seed::from_entropy(vector.language, Zeroizing::new(spend)).unwrap(), seed);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test against ourself
|
||||||
|
{
|
||||||
|
let seed = Seed::new(&mut OsRng, vector.language);
|
||||||
|
assert_eq!(seed, Seed::from_string(Zeroizing::new(trim_seed(&seed.to_string()))).unwrap());
|
||||||
|
assert_eq!(seed, Seed::from_entropy(vector.language, seed.entropy()).unwrap());
|
||||||
|
assert_eq!(seed, Seed::from_string(seed.to_string()).unwrap());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,4 +1,5 @@
|
||||||
use core::cmp::Ordering;
|
use core::cmp::Ordering;
|
||||||
|
use std::io::{self, Read, Write};
|
||||||
|
|
||||||
use zeroize::Zeroize;
|
use zeroize::Zeroize;
|
||||||
|
|
||||||
|
@ -27,7 +28,7 @@ impl Input {
|
||||||
1 + 1 + 1 + (8 * ring_len) + 32
|
1 + 1 + 1 + (8 * ring_len) + 32
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
Input::Gen(height) => {
|
Input::Gen(height) => {
|
||||||
w.write_all(&[255])?;
|
w.write_all(&[255])?;
|
||||||
|
@ -43,7 +44,7 @@ impl Input {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Input> {
|
pub fn read<R: Read>(r: &mut R) -> io::Result<Input> {
|
||||||
Ok(match read_byte(r)? {
|
Ok(match read_byte(r)? {
|
||||||
255 => Input::Gen(read_varint(r)?),
|
255 => Input::Gen(read_varint(r)?),
|
||||||
2 => Input::ToKey {
|
2 => Input::ToKey {
|
||||||
|
@ -51,10 +52,9 @@ impl Input {
|
||||||
key_offsets: read_vec(read_varint, r)?,
|
key_offsets: read_vec(read_varint, r)?,
|
||||||
key_image: read_torsion_free_point(r)?,
|
key_image: read_torsion_free_point(r)?,
|
||||||
},
|
},
|
||||||
_ => Err(std::io::Error::new(
|
_ => {
|
||||||
std::io::ErrorKind::Other,
|
Err(io::Error::new(io::ErrorKind::Other, "Tried to deserialize unknown/unused input type"))?
|
||||||
"Tried to deserialize unknown/unused input type",
|
}
|
||||||
))?,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -72,7 +72,7 @@ impl Output {
|
||||||
1 + 1 + 32 + 1
|
1 + 1 + 32 + 1
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
write_varint(&self.amount, w)?;
|
write_varint(&self.amount, w)?;
|
||||||
w.write_all(&[2 + u8::from(self.view_tag.is_some())])?;
|
w.write_all(&[2 + u8::from(self.view_tag.is_some())])?;
|
||||||
w.write_all(&self.key.to_bytes())?;
|
w.write_all(&self.key.to_bytes())?;
|
||||||
|
@ -82,13 +82,13 @@ impl Output {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Output> {
|
pub fn read<R: Read>(r: &mut R) -> io::Result<Output> {
|
||||||
let amount = read_varint(r)?;
|
let amount = read_varint(r)?;
|
||||||
let view_tag = match read_byte(r)? {
|
let view_tag = match read_byte(r)? {
|
||||||
2 => false,
|
2 => false,
|
||||||
3 => true,
|
3 => true,
|
||||||
_ => Err(std::io::Error::new(
|
_ => Err(io::Error::new(
|
||||||
std::io::ErrorKind::Other,
|
io::ErrorKind::Other,
|
||||||
"Tried to deserialize unknown/unused output type",
|
"Tried to deserialize unknown/unused output type",
|
||||||
))?,
|
))?,
|
||||||
};
|
};
|
||||||
|
@ -119,7 +119,7 @@ impl Timelock {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
write_varint(
|
write_varint(
|
||||||
&match self {
|
&match self {
|
||||||
Timelock::None => 0,
|
Timelock::None => 0,
|
||||||
|
@ -163,21 +163,21 @@ impl TransactionPrefix {
|
||||||
extra
|
extra
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
write_varint(&self.version, w)?;
|
write_varint(&self.version, w)?;
|
||||||
self.timelock.serialize(w)?;
|
self.timelock.write(w)?;
|
||||||
write_vec(Input::serialize, &self.inputs, w)?;
|
write_vec(Input::write, &self.inputs, w)?;
|
||||||
write_vec(Output::serialize, &self.outputs, w)?;
|
write_vec(Output::write, &self.outputs, w)?;
|
||||||
write_varint(&self.extra.len().try_into().unwrap(), w)?;
|
write_varint(&self.extra.len().try_into().unwrap(), w)?;
|
||||||
w.write_all(&self.extra)
|
w.write_all(&self.extra)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<TransactionPrefix> {
|
pub fn read<R: Read>(r: &mut R) -> io::Result<TransactionPrefix> {
|
||||||
let mut prefix = TransactionPrefix {
|
let mut prefix = TransactionPrefix {
|
||||||
version: read_varint(r)?,
|
version: read_varint(r)?,
|
||||||
timelock: Timelock::from_raw(read_varint(r)?),
|
timelock: Timelock::from_raw(read_varint(r)?),
|
||||||
inputs: read_vec(Input::deserialize, r)?,
|
inputs: read_vec(Input::read, r)?,
|
||||||
outputs: read_vec(Output::deserialize, r)?,
|
outputs: read_vec(Output::read, r)?,
|
||||||
extra: vec![],
|
extra: vec![],
|
||||||
};
|
};
|
||||||
prefix.extra = read_vec(read_byte, r)?;
|
prefix.extra = read_vec(read_byte, r)?;
|
||||||
|
@ -204,8 +204,8 @@ impl Transaction {
|
||||||
RctSignatures::fee_weight(protocol, inputs, outputs)
|
RctSignatures::fee_weight(protocol, inputs, outputs)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
self.prefix.serialize(w)?;
|
self.prefix.write(w)?;
|
||||||
if self.prefix.version == 1 {
|
if self.prefix.version == 1 {
|
||||||
for sig in &self.signatures {
|
for sig in &self.signatures {
|
||||||
write_scalar(&sig.0, w)?;
|
write_scalar(&sig.0, w)?;
|
||||||
|
@ -213,14 +213,14 @@ impl Transaction {
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
} else if self.prefix.version == 2 {
|
} else if self.prefix.version == 2 {
|
||||||
self.rct_signatures.serialize(w)
|
self.rct_signatures.write(w)
|
||||||
} else {
|
} else {
|
||||||
panic!("Serializing a transaction with an unknown version");
|
panic!("Serializing a transaction with an unknown version");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Transaction> {
|
pub fn read<R: Read>(r: &mut R) -> io::Result<Transaction> {
|
||||||
let prefix = TransactionPrefix::deserialize(r)?;
|
let prefix = TransactionPrefix::read(r)?;
|
||||||
let mut signatures = vec![];
|
let mut signatures = vec![];
|
||||||
let mut rct_signatures = RctSignatures {
|
let mut rct_signatures = RctSignatures {
|
||||||
base: RctBase { fee: 0, ecdh_info: vec![], commitments: vec![] },
|
base: RctBase { fee: 0, ecdh_info: vec![], commitments: vec![] },
|
||||||
|
@ -241,7 +241,7 @@ impl Transaction {
|
||||||
.sum::<u64>()
|
.sum::<u64>()
|
||||||
.saturating_sub(prefix.outputs.iter().map(|output| output.amount).sum());
|
.saturating_sub(prefix.outputs.iter().map(|output| output.amount).sum());
|
||||||
} else if prefix.version == 2 {
|
} else if prefix.version == 2 {
|
||||||
rct_signatures = RctSignatures::deserialize(
|
rct_signatures = RctSignatures::read(
|
||||||
prefix
|
prefix
|
||||||
.inputs
|
.inputs
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -254,64 +254,56 @@ impl Transaction {
|
||||||
r,
|
r,
|
||||||
)?;
|
)?;
|
||||||
} else {
|
} else {
|
||||||
Err(std::io::Error::new(std::io::ErrorKind::Other, "Tried to deserialize unknown version"))?;
|
Err(io::Error::new(io::ErrorKind::Other, "Tried to deserialize unknown version"))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Transaction { prefix, signatures, rct_signatures })
|
Ok(Transaction { prefix, signatures, rct_signatures })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hash(&self) -> [u8; 32] {
|
pub fn hash(&self) -> [u8; 32] {
|
||||||
let mut serialized = Vec::with_capacity(2048);
|
let mut buf = Vec::with_capacity(2048);
|
||||||
if self.prefix.version == 1 {
|
if self.prefix.version == 1 {
|
||||||
self.serialize(&mut serialized).unwrap();
|
self.write(&mut buf).unwrap();
|
||||||
hash(&serialized)
|
hash(&buf)
|
||||||
} else {
|
} else {
|
||||||
let mut sig_hash = Vec::with_capacity(96);
|
let mut hashes = Vec::with_capacity(96);
|
||||||
|
|
||||||
self.prefix.serialize(&mut serialized).unwrap();
|
self.prefix.write(&mut buf).unwrap();
|
||||||
sig_hash.extend(hash(&serialized));
|
hashes.extend(hash(&buf));
|
||||||
serialized.clear();
|
buf.clear();
|
||||||
|
|
||||||
self
|
self.rct_signatures.base.write(&mut buf, self.rct_signatures.prunable.rct_type()).unwrap();
|
||||||
.rct_signatures
|
hashes.extend(hash(&buf));
|
||||||
.base
|
buf.clear();
|
||||||
.serialize(&mut serialized, self.rct_signatures.prunable.rct_type())
|
|
||||||
.unwrap();
|
|
||||||
sig_hash.extend(hash(&serialized));
|
|
||||||
serialized.clear();
|
|
||||||
|
|
||||||
match self.rct_signatures.prunable {
|
match self.rct_signatures.prunable {
|
||||||
RctPrunable::Null => serialized.resize(32, 0),
|
RctPrunable::Null => buf.resize(32, 0),
|
||||||
_ => {
|
_ => {
|
||||||
self.rct_signatures.prunable.serialize(&mut serialized).unwrap();
|
self.rct_signatures.prunable.write(&mut buf).unwrap();
|
||||||
serialized = hash(&serialized).to_vec();
|
buf = hash(&buf).to_vec();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sig_hash.extend(&serialized);
|
hashes.extend(&buf);
|
||||||
|
|
||||||
hash(&sig_hash)
|
hash(&hashes)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Calculate the hash of this transaction as needed for signing it.
|
/// Calculate the hash of this transaction as needed for signing it.
|
||||||
pub fn signature_hash(&self) -> [u8; 32] {
|
pub fn signature_hash(&self) -> [u8; 32] {
|
||||||
let mut serialized = Vec::with_capacity(2048);
|
let mut buf = Vec::with_capacity(2048);
|
||||||
let mut sig_hash = Vec::with_capacity(96);
|
let mut sig_hash = Vec::with_capacity(96);
|
||||||
|
|
||||||
self.prefix.serialize(&mut serialized).unwrap();
|
self.prefix.write(&mut buf).unwrap();
|
||||||
sig_hash.extend(hash(&serialized));
|
sig_hash.extend(hash(&buf));
|
||||||
serialized.clear();
|
buf.clear();
|
||||||
|
|
||||||
self
|
self.rct_signatures.base.write(&mut buf, self.rct_signatures.prunable.rct_type()).unwrap();
|
||||||
.rct_signatures
|
sig_hash.extend(hash(&buf));
|
||||||
.base
|
buf.clear();
|
||||||
.serialize(&mut serialized, self.rct_signatures.prunable.rct_type())
|
|
||||||
.unwrap();
|
|
||||||
sig_hash.extend(hash(&serialized));
|
|
||||||
serialized.clear();
|
|
||||||
|
|
||||||
self.rct_signatures.prunable.signature_serialize(&mut serialized).unwrap();
|
self.rct_signatures.prunable.signature_write(&mut buf).unwrap();
|
||||||
sig_hash.extend(hash(&serialized));
|
sig_hash.extend(hash(&buf));
|
||||||
|
|
||||||
hash(&sig_hash)
|
hash(&sig_hash)
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,26 +24,59 @@ pub enum AddressType {
|
||||||
Standard,
|
Standard,
|
||||||
Integrated([u8; 8]),
|
Integrated([u8; 8]),
|
||||||
Subaddress,
|
Subaddress,
|
||||||
Featured(bool, Option<[u8; 8]>, bool),
|
Featured { subaddress: bool, payment_id: Option<[u8; 8]>, guaranteed: bool },
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||||
|
pub struct SubaddressIndex {
|
||||||
|
pub(crate) account: u32,
|
||||||
|
pub(crate) address: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SubaddressIndex {
|
||||||
|
pub const fn new(account: u32, address: u32) -> Option<SubaddressIndex> {
|
||||||
|
if (account == 0) && (address == 0) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
Some(SubaddressIndex { account, address })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn account(&self) -> u32 {
|
||||||
|
self.account
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn address(&self) -> u32 {
|
||||||
|
self.address
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Address specification. Used internally to create addresses.
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||||
|
pub enum AddressSpec {
|
||||||
|
Standard,
|
||||||
|
Integrated([u8; 8]),
|
||||||
|
Subaddress(SubaddressIndex),
|
||||||
|
Featured { subaddress: Option<SubaddressIndex>, payment_id: Option<[u8; 8]>, guaranteed: bool },
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AddressType {
|
impl AddressType {
|
||||||
pub fn subaddress(&self) -> bool {
|
pub fn is_subaddress(&self) -> bool {
|
||||||
matches!(self, AddressType::Subaddress) || matches!(self, AddressType::Featured(true, ..))
|
matches!(self, AddressType::Subaddress) ||
|
||||||
|
matches!(self, AddressType::Featured { subaddress: true, .. })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn payment_id(&self) -> Option<[u8; 8]> {
|
pub fn payment_id(&self) -> Option<[u8; 8]> {
|
||||||
if let AddressType::Integrated(id) = self {
|
if let AddressType::Integrated(id) = self {
|
||||||
Some(*id)
|
Some(*id)
|
||||||
} else if let AddressType::Featured(_, id, _) = self {
|
} else if let AddressType::Featured { payment_id, .. } = self {
|
||||||
*id
|
*payment_id
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn guaranteed(&self) -> bool {
|
pub fn is_guaranteed(&self) -> bool {
|
||||||
matches!(self, AddressType::Featured(_, _, true))
|
matches!(self, AddressType::Featured { guaranteed: true, .. })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -105,7 +138,7 @@ impl<B: AddressBytes> AddressMeta<B> {
|
||||||
AddressType::Standard => bytes.0,
|
AddressType::Standard => bytes.0,
|
||||||
AddressType::Integrated(_) => bytes.1,
|
AddressType::Integrated(_) => bytes.1,
|
||||||
AddressType::Subaddress => bytes.2,
|
AddressType::Subaddress => bytes.2,
|
||||||
AddressType::Featured(..) => bytes.3,
|
AddressType::Featured { .. } => bytes.3,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -114,7 +147,7 @@ impl<B: AddressBytes> AddressMeta<B> {
|
||||||
AddressMeta { _bytes: PhantomData, network, kind }
|
AddressMeta { _bytes: PhantomData, network, kind }
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns an incomplete type in the case of Integrated/Featured addresses
|
// Returns an incomplete instantiation in the case of Integrated/Featured addresses
|
||||||
fn from_byte(byte: u8) -> Result<Self, AddressError> {
|
fn from_byte(byte: u8) -> Result<Self, AddressError> {
|
||||||
let mut meta = None;
|
let mut meta = None;
|
||||||
for network in [Network::Mainnet, Network::Testnet, Network::Stagenet] {
|
for network in [Network::Mainnet, Network::Testnet, Network::Stagenet] {
|
||||||
|
@ -123,7 +156,9 @@ impl<B: AddressBytes> AddressMeta<B> {
|
||||||
_ if byte == standard => Some(AddressType::Standard),
|
_ if byte == standard => Some(AddressType::Standard),
|
||||||
_ if byte == integrated => Some(AddressType::Integrated([0; 8])),
|
_ if byte == integrated => Some(AddressType::Integrated([0; 8])),
|
||||||
_ if byte == subaddress => Some(AddressType::Subaddress),
|
_ if byte == subaddress => Some(AddressType::Subaddress),
|
||||||
_ if byte == featured => Some(AddressType::Featured(false, None, false)),
|
_ if byte == featured => {
|
||||||
|
Some(AddressType::Featured { subaddress: false, payment_id: None, guaranteed: false })
|
||||||
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
} {
|
} {
|
||||||
meta = Some(AddressMeta::new(network, kind));
|
meta = Some(AddressMeta::new(network, kind));
|
||||||
|
@ -134,16 +169,16 @@ impl<B: AddressBytes> AddressMeta<B> {
|
||||||
meta.ok_or(AddressError::InvalidByte)
|
meta.ok_or(AddressError::InvalidByte)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn subaddress(&self) -> bool {
|
pub fn is_subaddress(&self) -> bool {
|
||||||
self.kind.subaddress()
|
self.kind.is_subaddress()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn payment_id(&self) -> Option<[u8; 8]> {
|
pub fn payment_id(&self) -> Option<[u8; 8]> {
|
||||||
self.kind.payment_id()
|
self.kind.payment_id()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn guaranteed(&self) -> bool {
|
pub fn is_guaranteed(&self) -> bool {
|
||||||
self.kind.guaranteed()
|
self.kind.is_guaranteed()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -168,7 +203,7 @@ impl<B: AddressBytes> ToString for Address<B> {
|
||||||
let mut data = vec![self.meta.to_byte()];
|
let mut data = vec![self.meta.to_byte()];
|
||||||
data.extend(self.spend.compress().to_bytes());
|
data.extend(self.spend.compress().to_bytes());
|
||||||
data.extend(self.view.compress().to_bytes());
|
data.extend(self.view.compress().to_bytes());
|
||||||
if let AddressType::Featured(subaddress, payment_id, guaranteed) = self.meta.kind {
|
if let AddressType::Featured { subaddress, payment_id, guaranteed } = self.meta.kind {
|
||||||
// Technically should be a VarInt, yet we don't have enough features it's needed
|
// Technically should be a VarInt, yet we don't have enough features it's needed
|
||||||
data.push(
|
data.push(
|
||||||
u8::from(subaddress) + (u8::from(payment_id.is_some()) << 1) + (u8::from(guaranteed) << 2),
|
u8::from(subaddress) + (u8::from(payment_id.is_some()) << 1) + (u8::from(guaranteed) << 2),
|
||||||
|
@ -201,7 +236,7 @@ impl<B: AddressBytes> Address<B> {
|
||||||
.ok_or(AddressError::InvalidKey)?;
|
.ok_or(AddressError::InvalidKey)?;
|
||||||
let mut read = 65;
|
let mut read = 65;
|
||||||
|
|
||||||
if matches!(meta.kind, AddressType::Featured(..)) {
|
if matches!(meta.kind, AddressType::Featured { .. }) {
|
||||||
if raw[read] >= (2 << 3) {
|
if raw[read] >= (2 << 3) {
|
||||||
Err(AddressError::UnknownFeatures)?;
|
Err(AddressError::UnknownFeatures)?;
|
||||||
}
|
}
|
||||||
|
@ -210,8 +245,11 @@ impl<B: AddressBytes> Address<B> {
|
||||||
let integrated = ((raw[read] >> 1) & 1) == 1;
|
let integrated = ((raw[read] >> 1) & 1) == 1;
|
||||||
let guaranteed = ((raw[read] >> 2) & 1) == 1;
|
let guaranteed = ((raw[read] >> 2) & 1) == 1;
|
||||||
|
|
||||||
meta.kind =
|
meta.kind = AddressType::Featured {
|
||||||
AddressType::Featured(subaddress, Some([0; 8]).filter(|_| integrated), guaranteed);
|
subaddress,
|
||||||
|
payment_id: Some([0; 8]).filter(|_| integrated),
|
||||||
|
guaranteed,
|
||||||
|
};
|
||||||
read += 1;
|
read += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -226,7 +264,7 @@ impl<B: AddressBytes> Address<B> {
|
||||||
if let AddressType::Integrated(ref mut id) = meta.kind {
|
if let AddressType::Integrated(ref mut id) = meta.kind {
|
||||||
id.copy_from_slice(&raw[(read - 8) .. read]);
|
id.copy_from_slice(&raw[(read - 8) .. read]);
|
||||||
}
|
}
|
||||||
if let AddressType::Featured(_, Some(ref mut id), _) = meta.kind {
|
if let AddressType::Featured { payment_id: Some(ref mut id), .. } = meta.kind {
|
||||||
id.copy_from_slice(&raw[(read - 8) .. read]);
|
id.copy_from_slice(&raw[(read - 8) .. read]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -247,16 +285,16 @@ impl<B: AddressBytes> Address<B> {
|
||||||
self.meta.network
|
self.meta.network
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn subaddress(&self) -> bool {
|
pub fn is_subaddress(&self) -> bool {
|
||||||
self.meta.subaddress()
|
self.meta.is_subaddress()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn payment_id(&self) -> Option<[u8; 8]> {
|
pub fn payment_id(&self) -> Option<[u8; 8]> {
|
||||||
self.meta.payment_id()
|
self.meta.payment_id()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn guaranteed(&self) -> bool {
|
pub fn is_guaranteed(&self) -> bool {
|
||||||
self.meta.guaranteed()
|
self.meta.is_guaranteed()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
use std::{sync::Mutex, collections::HashSet};
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
use futures::lock::{Mutex, MutexGuard};
|
||||||
|
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
|
|
||||||
|
@ -23,13 +25,16 @@ const TIP_APPLICATION: f64 = (LOCK_WINDOW * BLOCK_TIME) as f64;
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref GAMMA: Gamma<f64> = Gamma::new(19.28, 1.0 / 1.61).unwrap();
|
static ref GAMMA: Gamma<f64> = Gamma::new(19.28, 1.0 / 1.61).unwrap();
|
||||||
|
// TODO: Expose an API to reset this in case a reorg occurs/the RPC fails/returns garbage
|
||||||
|
// TODO: Update this when scanning a block, as possible
|
||||||
static ref DISTRIBUTION: Mutex<Vec<u64>> = Mutex::new(Vec::with_capacity(3000000));
|
static ref DISTRIBUTION: Mutex<Vec<u64>> = Mutex::new(Vec::with_capacity(3000000));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
async fn select_n<R: RngCore + CryptoRng>(
|
async fn select_n<'a, R: RngCore + CryptoRng>(
|
||||||
rng: &mut R,
|
rng: &mut R,
|
||||||
rpc: &Rpc,
|
rpc: &Rpc,
|
||||||
|
distribution: &MutexGuard<'a, Vec<u64>>,
|
||||||
height: usize,
|
height: usize,
|
||||||
high: u64,
|
high: u64,
|
||||||
per_second: f64,
|
per_second: f64,
|
||||||
|
@ -61,7 +66,6 @@ async fn select_n<R: RngCore + CryptoRng>(
|
||||||
|
|
||||||
let o = (age * per_second) as u64;
|
let o = (age * per_second) as u64;
|
||||||
if o < high {
|
if o < high {
|
||||||
let distribution = DISTRIBUTION.lock().unwrap();
|
|
||||||
let i = distribution.partition_point(|s| *s < (high - 1 - o));
|
let i = distribution.partition_point(|s| *s < (high - 1 - o));
|
||||||
let prev = i.saturating_sub(1);
|
let prev = i.saturating_sub(1);
|
||||||
let n = distribution[i] - distribution[prev];
|
let n = distribution[i] - distribution[prev];
|
||||||
|
@ -136,6 +140,8 @@ impl Decoys {
|
||||||
height: usize,
|
height: usize,
|
||||||
inputs: &[SpendableOutput],
|
inputs: &[SpendableOutput],
|
||||||
) -> Result<Vec<Decoys>, RpcError> {
|
) -> Result<Vec<Decoys>, RpcError> {
|
||||||
|
let mut distribution = DISTRIBUTION.lock().await;
|
||||||
|
|
||||||
let decoy_count = ring_len - 1;
|
let decoy_count = ring_len - 1;
|
||||||
|
|
||||||
// Convert the inputs in question to the raw output data
|
// Convert the inputs in question to the raw output data
|
||||||
|
@ -146,29 +152,19 @@ impl Decoys {
|
||||||
outputs.push((real[real.len() - 1], [input.key(), input.commitment().calculate()]));
|
outputs.push((real[real.len() - 1], [input.key(), input.commitment().calculate()]));
|
||||||
}
|
}
|
||||||
|
|
||||||
let distribution_len = {
|
if distribution.len() <= height {
|
||||||
let distribution = DISTRIBUTION.lock().unwrap();
|
let extension = rpc.get_output_distribution(distribution.len(), height).await?;
|
||||||
distribution.len()
|
distribution.extend(extension);
|
||||||
};
|
|
||||||
if distribution_len <= height {
|
|
||||||
let extension = rpc.get_output_distribution(distribution_len, height).await?;
|
|
||||||
DISTRIBUTION.lock().unwrap().extend(extension);
|
|
||||||
}
|
}
|
||||||
|
// If asked to use an older height than previously asked, truncate to ensure accuracy
|
||||||
|
// Should never happen, yet risks desyncing if it did
|
||||||
|
distribution.truncate(height + 1); // height is inclusive, and 0 is a valid height
|
||||||
|
|
||||||
let high;
|
let high = distribution[distribution.len() - 1];
|
||||||
let per_second;
|
let per_second = {
|
||||||
{
|
let blocks = distribution.len().min(BLOCKS_PER_YEAR);
|
||||||
let mut distribution = DISTRIBUTION.lock().unwrap();
|
let outputs = high - distribution[distribution.len().saturating_sub(blocks + 1)];
|
||||||
// If asked to use an older height than previously asked, truncate to ensure accuracy
|
(outputs as f64) / ((blocks * BLOCK_TIME) as f64)
|
||||||
// Should never happen, yet risks desyncing if it did
|
|
||||||
distribution.truncate(height + 1); // height is inclusive, and 0 is a valid height
|
|
||||||
|
|
||||||
high = distribution[distribution.len() - 1];
|
|
||||||
per_second = {
|
|
||||||
let blocks = distribution.len().min(BLOCKS_PER_YEAR);
|
|
||||||
let outputs = high - distribution[distribution.len().saturating_sub(blocks + 1)];
|
|
||||||
(outputs as f64) / ((blocks * BLOCK_TIME) as f64)
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut used = HashSet::<u64>::new();
|
let mut used = HashSet::<u64>::new();
|
||||||
|
@ -184,9 +180,18 @@ impl Decoys {
|
||||||
// Select all decoys for this transaction, assuming we generate a sane transaction
|
// Select all decoys for this transaction, assuming we generate a sane transaction
|
||||||
// We should almost never naturally generate an insane transaction, hence why this doesn't
|
// We should almost never naturally generate an insane transaction, hence why this doesn't
|
||||||
// bother with an overage
|
// bother with an overage
|
||||||
let mut decoys =
|
let mut decoys = select_n(
|
||||||
select_n(rng, rpc, height, high, per_second, &real, &mut used, inputs.len() * decoy_count)
|
rng,
|
||||||
.await?;
|
rpc,
|
||||||
|
&distribution,
|
||||||
|
height,
|
||||||
|
high,
|
||||||
|
per_second,
|
||||||
|
&real,
|
||||||
|
&mut used,
|
||||||
|
inputs.len() * decoy_count,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
real.zeroize();
|
real.zeroize();
|
||||||
|
|
||||||
let mut res = Vec::with_capacity(inputs.len());
|
let mut res = Vec::with_capacity(inputs.len());
|
||||||
|
@ -224,8 +229,18 @@ impl Decoys {
|
||||||
|
|
||||||
// Select new outputs until we have a full sized ring again
|
// Select new outputs until we have a full sized ring again
|
||||||
ring.extend(
|
ring.extend(
|
||||||
select_n(rng, rpc, height, high, per_second, &[], &mut used, ring_len - ring.len())
|
select_n(
|
||||||
.await?,
|
rng,
|
||||||
|
rpc,
|
||||||
|
&distribution,
|
||||||
|
height,
|
||||||
|
high,
|
||||||
|
per_second,
|
||||||
|
&[],
|
||||||
|
&mut used,
|
||||||
|
ring_len - ring.len(),
|
||||||
|
)
|
||||||
|
.await?,
|
||||||
);
|
);
|
||||||
ring.sort_by(|a, b| a.0.cmp(&b.0));
|
ring.sort_by(|a, b| a.0.cmp(&b.0));
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use core::ops::BitXor;
|
use core::ops::BitXor;
|
||||||
use std::io::{self, Read, Write, Cursor};
|
use std::io::{self, Read, Write};
|
||||||
|
|
||||||
use zeroize::Zeroize;
|
use zeroize::Zeroize;
|
||||||
|
|
||||||
|
@ -12,8 +12,16 @@ use crate::serialize::{
|
||||||
|
|
||||||
pub const MAX_TX_EXTRA_NONCE_SIZE: usize = 255;
|
pub const MAX_TX_EXTRA_NONCE_SIZE: usize = 255;
|
||||||
|
|
||||||
|
pub const PAYMENT_ID_MARKER: u8 = 0;
|
||||||
|
pub const ENCRYPTED_PAYMENT_ID_MARKER: u8 = 1;
|
||||||
|
// Used as it's the highest value not interpretable as a continued VarInt
|
||||||
|
pub const ARBITRARY_DATA_MARKER: u8 = 127;
|
||||||
|
|
||||||
|
// 1 byte is used for the marker
|
||||||
|
pub const MAX_ARBITRARY_DATA_SIZE: usize = MAX_TX_EXTRA_NONCE_SIZE - 1;
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||||
pub(crate) enum PaymentId {
|
pub enum PaymentId {
|
||||||
Unencrypted([u8; 32]),
|
Unencrypted([u8; 32]),
|
||||||
Encrypted([u8; 8]),
|
Encrypted([u8; 8]),
|
||||||
}
|
}
|
||||||
|
@ -23,6 +31,7 @@ impl BitXor<[u8; 8]> for PaymentId {
|
||||||
|
|
||||||
fn bitxor(self, bytes: [u8; 8]) -> PaymentId {
|
fn bitxor(self, bytes: [u8; 8]) -> PaymentId {
|
||||||
match self {
|
match self {
|
||||||
|
// Don't perform the xor since this isn't intended to be encrypted with xor
|
||||||
PaymentId::Unencrypted(_) => self,
|
PaymentId::Unencrypted(_) => self,
|
||||||
PaymentId::Encrypted(id) => {
|
PaymentId::Encrypted(id) => {
|
||||||
PaymentId::Encrypted((u64::from_le_bytes(id) ^ u64::from_le_bytes(bytes)).to_le_bytes())
|
PaymentId::Encrypted((u64::from_le_bytes(id) ^ u64::from_le_bytes(bytes)).to_le_bytes())
|
||||||
|
@ -32,21 +41,21 @@ impl BitXor<[u8; 8]> for PaymentId {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PaymentId {
|
impl PaymentId {
|
||||||
pub(crate) fn serialize<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
PaymentId::Unencrypted(id) => {
|
PaymentId::Unencrypted(id) => {
|
||||||
w.write_all(&[0])?;
|
w.write_all(&[PAYMENT_ID_MARKER])?;
|
||||||
w.write_all(id)?;
|
w.write_all(id)?;
|
||||||
}
|
}
|
||||||
PaymentId::Encrypted(id) => {
|
PaymentId::Encrypted(id) => {
|
||||||
w.write_all(&[1])?;
|
w.write_all(&[ENCRYPTED_PAYMENT_ID_MARKER])?;
|
||||||
w.write_all(id)?;
|
w.write_all(id)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn deserialize<R: Read>(r: &mut R) -> io::Result<PaymentId> {
|
pub fn read<R: Read>(r: &mut R) -> io::Result<PaymentId> {
|
||||||
Ok(match read_byte(r)? {
|
Ok(match read_byte(r)? {
|
||||||
0 => PaymentId::Unencrypted(read_bytes(r)?),
|
0 => PaymentId::Unencrypted(read_bytes(r)?),
|
||||||
1 => PaymentId::Encrypted(read_bytes(r)?),
|
1 => PaymentId::Encrypted(read_bytes(r)?),
|
||||||
|
@ -57,7 +66,7 @@ impl PaymentId {
|
||||||
|
|
||||||
// Doesn't bother with padding nor MinerGate
|
// Doesn't bother with padding nor MinerGate
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||||
pub(crate) enum ExtraField {
|
pub enum ExtraField {
|
||||||
PublicKey(EdwardsPoint),
|
PublicKey(EdwardsPoint),
|
||||||
Nonce(Vec<u8>),
|
Nonce(Vec<u8>),
|
||||||
MergeMining(usize, [u8; 32]),
|
MergeMining(usize, [u8; 32]),
|
||||||
|
@ -65,7 +74,7 @@ pub(crate) enum ExtraField {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExtraField {
|
impl ExtraField {
|
||||||
fn serialize<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
ExtraField::PublicKey(key) => {
|
ExtraField::PublicKey(key) => {
|
||||||
w.write_all(&[1])?;
|
w.write_all(&[1])?;
|
||||||
|
@ -88,7 +97,7 @@ impl ExtraField {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn deserialize<R: Read>(r: &mut R) -> io::Result<ExtraField> {
|
pub fn read<R: Read>(r: &mut R) -> io::Result<ExtraField> {
|
||||||
Ok(match read_byte(r)? {
|
Ok(match read_byte(r)? {
|
||||||
1 => ExtraField::PublicKey(read_point(r)?),
|
1 => ExtraField::PublicKey(read_point(r)?),
|
||||||
2 => ExtraField::Nonce({
|
2 => ExtraField::Nonce({
|
||||||
|
@ -110,52 +119,50 @@ impl ExtraField {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||||
pub(crate) struct Extra(Vec<ExtraField>);
|
pub struct Extra(Vec<ExtraField>);
|
||||||
impl Extra {
|
impl Extra {
|
||||||
pub(crate) fn keys(&self) -> Vec<EdwardsPoint> {
|
pub fn keys(&self) -> Option<(EdwardsPoint, Option<Vec<EdwardsPoint>>)> {
|
||||||
let mut keys = Vec::with_capacity(2);
|
let mut key = None;
|
||||||
|
let mut additional = None;
|
||||||
for field in &self.0 {
|
for field in &self.0 {
|
||||||
match field.clone() {
|
match field.clone() {
|
||||||
ExtraField::PublicKey(key) => keys.push(key),
|
ExtraField::PublicKey(this_key) => key = key.or(Some(this_key)),
|
||||||
ExtraField::PublicKeys(additional) => keys.extend(additional),
|
ExtraField::PublicKeys(these_additional) => {
|
||||||
|
additional = additional.or(Some(these_additional))
|
||||||
|
}
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
keys
|
// Don't return any keys if this was non-standard and didn't include the primary key
|
||||||
|
key.map(|key| (key, additional))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn payment_id(&self) -> Option<PaymentId> {
|
pub fn payment_id(&self) -> Option<PaymentId> {
|
||||||
for field in &self.0 {
|
for field in &self.0 {
|
||||||
if let ExtraField::Nonce(data) = field {
|
if let ExtraField::Nonce(data) = field {
|
||||||
return PaymentId::deserialize(&mut Cursor::new(data)).ok();
|
return PaymentId::read::<&[u8]>(&mut data.as_ref()).ok();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn data(&self) -> Vec<Vec<u8>> {
|
pub fn data(&self) -> Vec<Vec<u8>> {
|
||||||
let mut first = true;
|
|
||||||
let mut res = vec![];
|
let mut res = vec![];
|
||||||
for field in &self.0 {
|
for field in &self.0 {
|
||||||
if let ExtraField::Nonce(data) = field {
|
if let ExtraField::Nonce(data) = field {
|
||||||
// Skip the first Nonce, which should be the payment ID
|
if data[0] == ARBITRARY_DATA_MARKER {
|
||||||
if first {
|
res.push(data[1 ..].to_vec());
|
||||||
first = false;
|
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
res.push(data.clone());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn new(mut keys: Vec<EdwardsPoint>) -> Extra {
|
pub(crate) fn new(key: EdwardsPoint, additional: Vec<EdwardsPoint>) -> Extra {
|
||||||
let mut res = Extra(Vec::with_capacity(3));
|
let mut res = Extra(Vec::with_capacity(3));
|
||||||
if !keys.is_empty() {
|
res.push(ExtraField::PublicKey(key));
|
||||||
res.push(ExtraField::PublicKey(keys[0]));
|
if !additional.is_empty() {
|
||||||
}
|
res.push(ExtraField::PublicKeys(additional));
|
||||||
if keys.len() > 1 {
|
|
||||||
res.push(ExtraField::PublicKeys(keys.drain(1 ..).collect()));
|
|
||||||
}
|
}
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
@ -165,29 +172,35 @@ impl Extra {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
pub(crate) fn fee_weight(outputs: usize, data: &[Vec<u8>]) -> usize {
|
pub(crate) fn fee_weight(outputs: usize, payment_id: bool, data: &[Vec<u8>]) -> usize {
|
||||||
// PublicKey, key
|
// PublicKey, key
|
||||||
(1 + 32) +
|
(1 + 32) +
|
||||||
// PublicKeys, length, additional keys
|
// PublicKeys, length, additional keys
|
||||||
(1 + 1 + (outputs.saturating_sub(1) * 32)) +
|
(1 + 1 + (outputs.saturating_sub(1) * 32)) +
|
||||||
// PaymentId (Nonce), length, encrypted, ID
|
// PaymentId (Nonce), length, encrypted, ID
|
||||||
(1 + 1 + 1 + 8) +
|
(if payment_id { 1 + 1 + 1 + 8 } else { 0 }) +
|
||||||
// Nonce, length, data (if existent)
|
// Nonce, length, data (if existent)
|
||||||
data.iter().map(|v| 1 + varint_len(v.len()) + v.len()).sum::<usize>()
|
data.iter().map(|v| 1 + varint_len(v.len()) + v.len()).sum::<usize>()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn serialize<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
for field in &self.0 {
|
for field in &self.0 {
|
||||||
field.serialize(w)?;
|
field.write(w)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn deserialize<R: Read>(r: &mut R) -> io::Result<Extra> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
|
let mut buf = vec![];
|
||||||
|
self.write(&mut buf).unwrap();
|
||||||
|
buf
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read<R: Read>(r: &mut R) -> io::Result<Extra> {
|
||||||
let mut res = Extra(vec![]);
|
let mut res = Extra(vec![]);
|
||||||
let mut field;
|
let mut field;
|
||||||
while {
|
while {
|
||||||
field = ExtraField::deserialize(r);
|
field = ExtraField::read(r);
|
||||||
field.is_ok()
|
field.is_ok()
|
||||||
} {
|
} {
|
||||||
res.0.push(field.unwrap());
|
res.0.push(field.unwrap());
|
||||||
|
|
|
@ -11,21 +11,26 @@ use curve25519_dalek::{
|
||||||
|
|
||||||
use crate::{hash, hash_to_scalar, serialize::write_varint, transaction::Input};
|
use crate::{hash, hash_to_scalar, serialize::write_varint, transaction::Input};
|
||||||
|
|
||||||
mod extra;
|
pub mod extra;
|
||||||
pub(crate) use extra::{PaymentId, ExtraField, Extra};
|
pub(crate) use extra::{PaymentId, ExtraField, Extra};
|
||||||
|
|
||||||
|
/// Seed creation and parsing functionality.
|
||||||
|
pub mod seed;
|
||||||
|
|
||||||
/// Address encoding and decoding functionality.
|
/// Address encoding and decoding functionality.
|
||||||
pub mod address;
|
pub mod address;
|
||||||
use address::{Network, AddressType, AddressMeta, MoneroAddress};
|
use address::{Network, AddressType, SubaddressIndex, AddressSpec, AddressMeta, MoneroAddress};
|
||||||
|
|
||||||
mod scan;
|
mod scan;
|
||||||
pub use scan::{ReceivedOutput, SpendableOutput};
|
pub use scan::{ReceivedOutput, SpendableOutput, Timelocked};
|
||||||
|
|
||||||
pub(crate) mod decoys;
|
pub(crate) mod decoys;
|
||||||
pub(crate) use decoys::Decoys;
|
pub(crate) use decoys::Decoys;
|
||||||
|
|
||||||
mod send;
|
mod send;
|
||||||
pub use send::{Fee, TransactionError, SignableTransaction, SignableTransactionBuilder};
|
pub use send::{Fee, TransactionError, Change, SignableTransaction, SignableTransactionBuilder};
|
||||||
|
#[cfg(feature = "multisig")]
|
||||||
|
pub(crate) use send::InternalPayment;
|
||||||
#[cfg(feature = "multisig")]
|
#[cfg(feature = "multisig")]
|
||||||
pub use send::TransactionMachine;
|
pub use send::TransactionMachine;
|
||||||
|
|
||||||
|
@ -54,19 +59,20 @@ pub(crate) fn uniqueness(inputs: &[Input]) -> [u8; 32] {
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
pub(crate) fn shared_key(
|
pub(crate) fn shared_key(
|
||||||
uniqueness: Option<[u8; 32]>,
|
uniqueness: Option<[u8; 32]>,
|
||||||
s: &Scalar,
|
ecdh: EdwardsPoint,
|
||||||
P: &EdwardsPoint,
|
|
||||||
o: usize,
|
o: usize,
|
||||||
) -> (u8, Scalar, [u8; 8]) {
|
) -> (u8, Scalar, [u8; 8]) {
|
||||||
// 8Ra
|
// 8Ra
|
||||||
let mut output_derivation = (s * P).mul_by_cofactor().compress().to_bytes().to_vec();
|
let mut output_derivation = ecdh.mul_by_cofactor().compress().to_bytes().to_vec();
|
||||||
|
|
||||||
|
let mut payment_id_xor = [0; 8];
|
||||||
|
payment_id_xor
|
||||||
|
.copy_from_slice(&hash(&[output_derivation.as_ref(), [0x8d].as_ref()].concat())[.. 8]);
|
||||||
|
|
||||||
// || o
|
// || o
|
||||||
write_varint(&o.try_into().unwrap(), &mut output_derivation).unwrap();
|
write_varint(&o.try_into().unwrap(), &mut output_derivation).unwrap();
|
||||||
|
|
||||||
let view_tag = hash(&[b"view_tag".as_ref(), &output_derivation].concat())[0];
|
let view_tag = hash(&[b"view_tag".as_ref(), &output_derivation].concat())[0];
|
||||||
let mut payment_id_xor = [0; 8];
|
|
||||||
payment_id_xor
|
|
||||||
.copy_from_slice(&hash(&[output_derivation.as_ref(), [0x8d].as_ref()].concat())[.. 8]);
|
|
||||||
|
|
||||||
// uniqueness ||
|
// uniqueness ||
|
||||||
let shared_key = if let Some(uniqueness) = uniqueness {
|
let shared_key = if let Some(uniqueness) = uniqueness {
|
||||||
|
@ -106,21 +112,61 @@ impl ViewPair {
|
||||||
ViewPair { spend, view }
|
ViewPair { spend, view }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn subaddress(&self, index: (u32, u32)) -> Scalar {
|
pub fn spend(&self) -> EdwardsPoint {
|
||||||
if index == (0, 0) {
|
self.spend
|
||||||
return Scalar::zero();
|
}
|
||||||
}
|
|
||||||
|
|
||||||
|
pub fn view(&self) -> EdwardsPoint {
|
||||||
|
self.view.deref() * &ED25519_BASEPOINT_TABLE
|
||||||
|
}
|
||||||
|
|
||||||
|
fn subaddress_derivation(&self, index: SubaddressIndex) -> Scalar {
|
||||||
hash_to_scalar(&Zeroizing::new(
|
hash_to_scalar(&Zeroizing::new(
|
||||||
[
|
[
|
||||||
b"SubAddr\0".as_ref(),
|
b"SubAddr\0".as_ref(),
|
||||||
Zeroizing::new(self.view.to_bytes()).as_ref(),
|
Zeroizing::new(self.view.to_bytes()).as_ref(),
|
||||||
&index.0.to_le_bytes(),
|
&index.account().to_le_bytes(),
|
||||||
&index.1.to_le_bytes(),
|
&index.address().to_le_bytes(),
|
||||||
]
|
]
|
||||||
.concat(),
|
.concat(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn subaddress_keys(&self, index: SubaddressIndex) -> (EdwardsPoint, EdwardsPoint) {
|
||||||
|
let scalar = self.subaddress_derivation(index);
|
||||||
|
let spend = self.spend + (&scalar * &ED25519_BASEPOINT_TABLE);
|
||||||
|
let view = self.view.deref() * spend;
|
||||||
|
(spend, view)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns an address with the provided specification.
|
||||||
|
pub fn address(&self, network: Network, spec: AddressSpec) -> MoneroAddress {
|
||||||
|
let mut spend = self.spend;
|
||||||
|
let mut view: EdwardsPoint = self.view.deref() * &ED25519_BASEPOINT_TABLE;
|
||||||
|
|
||||||
|
// construct the address meta
|
||||||
|
let meta = match spec {
|
||||||
|
AddressSpec::Standard => AddressMeta::new(network, AddressType::Standard),
|
||||||
|
AddressSpec::Integrated(payment_id) => {
|
||||||
|
AddressMeta::new(network, AddressType::Integrated(payment_id))
|
||||||
|
}
|
||||||
|
AddressSpec::Subaddress(index) => {
|
||||||
|
(spend, view) = self.subaddress_keys(index);
|
||||||
|
AddressMeta::new(network, AddressType::Subaddress)
|
||||||
|
}
|
||||||
|
AddressSpec::Featured { subaddress, payment_id, guaranteed } => {
|
||||||
|
if let Some(index) = subaddress {
|
||||||
|
(spend, view) = self.subaddress_keys(index);
|
||||||
|
}
|
||||||
|
AddressMeta::new(
|
||||||
|
network,
|
||||||
|
AddressType::Featured { subaddress: subaddress.is_some(), payment_id, guaranteed },
|
||||||
|
)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
MoneroAddress::new(meta, spend, view)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Transaction scanner.
|
/// Transaction scanner.
|
||||||
|
@ -130,15 +176,14 @@ impl ViewPair {
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Scanner {
|
pub struct Scanner {
|
||||||
pair: ViewPair,
|
pair: ViewPair,
|
||||||
network: Network,
|
// Also contains the spend key as None
|
||||||
pub(crate) subaddresses: HashMap<CompressedEdwardsY, (u32, u32)>,
|
pub(crate) subaddresses: HashMap<CompressedEdwardsY, Option<SubaddressIndex>>,
|
||||||
pub(crate) burning_bug: Option<HashSet<CompressedEdwardsY>>,
|
pub(crate) burning_bug: Option<HashSet<CompressedEdwardsY>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Zeroize for Scanner {
|
impl Zeroize for Scanner {
|
||||||
fn zeroize(&mut self) {
|
fn zeroize(&mut self) {
|
||||||
self.pair.zeroize();
|
self.pair.zeroize();
|
||||||
self.network.zeroize();
|
|
||||||
|
|
||||||
// These may not be effective, unfortunately
|
// These may not be effective, unfortunately
|
||||||
for (mut key, mut value) in self.subaddresses.drain() {
|
for (mut key, mut value) in self.subaddresses.drain() {
|
||||||
|
@ -163,59 +208,24 @@ impl ZeroizeOnDrop for Scanner {}
|
||||||
|
|
||||||
impl Scanner {
|
impl Scanner {
|
||||||
/// Create a Scanner from a ViewPair.
|
/// Create a Scanner from a ViewPair.
|
||||||
/// The network is used for generating subaddresses.
|
|
||||||
/// burning_bug is a HashSet of used keys, intended to prevent key reuse which would burn funds.
|
/// burning_bug is a HashSet of used keys, intended to prevent key reuse which would burn funds.
|
||||||
/// When an output is successfully scanned, the output key MUST be saved to disk.
|
/// When an output is successfully scanned, the output key MUST be saved to disk.
|
||||||
/// When a new scanner is created, ALL saved output keys must be passed in to be secure.
|
/// When a new scanner is created, ALL saved output keys must be passed in to be secure.
|
||||||
/// If None is passed, a modified shared key derivation is used which is immune to the burning
|
/// If None is passed, a modified shared key derivation is used which is immune to the burning
|
||||||
/// bug (specifically the Guaranteed feature from Featured Addresses).
|
/// bug (specifically the Guaranteed feature from Featured Addresses).
|
||||||
// TODO: Should this take in a DB access handle to ensure output keys are saved?
|
pub fn from_view(pair: ViewPair, burning_bug: Option<HashSet<CompressedEdwardsY>>) -> Scanner {
|
||||||
pub fn from_view(
|
|
||||||
pair: ViewPair,
|
|
||||||
network: Network,
|
|
||||||
burning_bug: Option<HashSet<CompressedEdwardsY>>,
|
|
||||||
) -> Scanner {
|
|
||||||
let mut subaddresses = HashMap::new();
|
let mut subaddresses = HashMap::new();
|
||||||
subaddresses.insert(pair.spend.compress(), (0, 0));
|
subaddresses.insert(pair.spend.compress(), None);
|
||||||
Scanner { pair, network, subaddresses, burning_bug }
|
Scanner { pair, subaddresses, burning_bug }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the main address for this view pair.
|
/// Register a subaddress.
|
||||||
pub fn address(&self) -> MoneroAddress {
|
// There used to be an address function here, yet it wasn't safe. It could generate addresses
|
||||||
MoneroAddress::new(
|
// incompatible with the Scanner. While we could return None for that, then we have the issue
|
||||||
AddressMeta::new(
|
// of runtime failures to generate an address.
|
||||||
self.network,
|
// Removing that API was the simplest option.
|
||||||
if self.burning_bug.is_none() {
|
pub fn register_subaddress(&mut self, subaddress: SubaddressIndex) {
|
||||||
AddressType::Featured(false, None, true)
|
let (spend, _) = self.pair.subaddress_keys(subaddress);
|
||||||
} else {
|
self.subaddresses.insert(spend.compress(), Some(subaddress));
|
||||||
AddressType::Standard
|
|
||||||
},
|
|
||||||
),
|
|
||||||
self.pair.spend,
|
|
||||||
self.pair.view.deref() * &ED25519_BASEPOINT_TABLE,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the specified subaddress for this view pair.
|
|
||||||
pub fn subaddress(&mut self, index: (u32, u32)) -> MoneroAddress {
|
|
||||||
if index == (0, 0) {
|
|
||||||
return self.address();
|
|
||||||
}
|
|
||||||
|
|
||||||
let spend = self.pair.spend + (&self.pair.subaddress(index) * &ED25519_BASEPOINT_TABLE);
|
|
||||||
self.subaddresses.insert(spend.compress(), index);
|
|
||||||
|
|
||||||
MoneroAddress::new(
|
|
||||||
AddressMeta::new(
|
|
||||||
self.network,
|
|
||||||
if self.burning_bug.is_none() {
|
|
||||||
AddressType::Featured(true, None, true)
|
|
||||||
} else {
|
|
||||||
AddressType::Subaddress
|
|
||||||
},
|
|
||||||
),
|
|
||||||
spend,
|
|
||||||
self.pair.view.deref() * spend,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use std::io::Cursor;
|
use core::ops::Deref;
|
||||||
|
use std::io::{self, Read, Write};
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||||
|
|
||||||
|
@ -10,7 +11,10 @@ use crate::{
|
||||||
transaction::{Input, Timelock, Transaction},
|
transaction::{Input, Timelock, Transaction},
|
||||||
block::Block,
|
block::Block,
|
||||||
rpc::{Rpc, RpcError},
|
rpc::{Rpc, RpcError},
|
||||||
wallet::{PaymentId, Extra, Scanner, uniqueness, shared_key, amount_decryption, commitment_mask},
|
wallet::{
|
||||||
|
PaymentId, Extra, address::SubaddressIndex, Scanner, uniqueness, shared_key, amount_decryption,
|
||||||
|
commitment_mask,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
/// An absolute output ID, defined as its transaction hash and output index.
|
/// An absolute output ID, defined as its transaction hash and output index.
|
||||||
|
@ -21,14 +25,18 @@ pub struct AbsoluteId {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AbsoluteId {
|
impl AbsoluteId {
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
let mut res = Vec::with_capacity(32 + 1);
|
w.write_all(&self.tx)?;
|
||||||
res.extend(self.tx);
|
w.write_all(&[self.o])
|
||||||
res.push(self.o);
|
|
||||||
res
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<AbsoluteId> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
|
let mut serialized = Vec::with_capacity(32 + 1);
|
||||||
|
self.write(&mut serialized).unwrap();
|
||||||
|
serialized
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read<R: Read>(r: &mut R) -> io::Result<AbsoluteId> {
|
||||||
Ok(AbsoluteId { tx: read_bytes(r)?, o: read_byte(r)? })
|
Ok(AbsoluteId { tx: read_bytes(r)?, o: read_byte(r)? })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -43,16 +51,20 @@ pub struct OutputData {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl OutputData {
|
impl OutputData {
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
let mut res = Vec::with_capacity(32 + 32 + 40);
|
w.write_all(&self.key.compress().to_bytes())?;
|
||||||
res.extend(self.key.compress().to_bytes());
|
w.write_all(&self.key_offset.to_bytes())?;
|
||||||
res.extend(self.key_offset.to_bytes());
|
w.write_all(&self.commitment.mask.to_bytes())?;
|
||||||
res.extend(self.commitment.mask.to_bytes());
|
w.write_all(&self.commitment.amount.to_le_bytes())
|
||||||
res.extend(self.commitment.amount.to_le_bytes());
|
|
||||||
res
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<OutputData> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
|
let mut serialized = Vec::with_capacity(32 + 32 + 32 + 8);
|
||||||
|
self.write(&mut serialized).unwrap();
|
||||||
|
serialized
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read<R: Read>(r: &mut R) -> io::Result<OutputData> {
|
||||||
Ok(OutputData {
|
Ok(OutputData {
|
||||||
key: read_point(r)?,
|
key: read_point(r)?,
|
||||||
key_offset: read_scalar(r)?,
|
key_offset: read_scalar(r)?,
|
||||||
|
@ -64,9 +76,8 @@ impl OutputData {
|
||||||
/// The metadata for an output.
|
/// The metadata for an output.
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||||
pub struct Metadata {
|
pub struct Metadata {
|
||||||
// Does not have to be an Option since the 0 subaddress is the main address
|
|
||||||
/// The subaddress this output was sent to.
|
/// The subaddress this output was sent to.
|
||||||
pub subaddress: (u32, u32),
|
pub subaddress: Option<SubaddressIndex>,
|
||||||
/// The payment ID included with this output.
|
/// The payment ID included with this output.
|
||||||
/// This will be gibberish if the payment ID wasn't intended for the recipient or wasn't included.
|
/// This will be gibberish if the payment ID wasn't intended for the recipient or wasn't included.
|
||||||
// Could be an Option, as extra doesn't necessarily have a payment ID, yet all Monero TXs should
|
// Could be an Option, as extra doesn't necessarily have a payment ID, yet all Monero TXs should
|
||||||
|
@ -77,23 +88,42 @@ pub struct Metadata {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Metadata {
|
impl Metadata {
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
let mut res = Vec::with_capacity(4 + 4 + 8 + 1);
|
if let Some(subaddress) = self.subaddress {
|
||||||
res.extend(self.subaddress.0.to_le_bytes());
|
w.write_all(&[1])?;
|
||||||
res.extend(self.subaddress.1.to_le_bytes());
|
w.write_all(&subaddress.account().to_le_bytes())?;
|
||||||
res.extend(self.payment_id);
|
w.write_all(&subaddress.address().to_le_bytes())?;
|
||||||
|
} else {
|
||||||
res.extend(u32::try_from(self.arbitrary_data.len()).unwrap().to_le_bytes());
|
w.write_all(&[0])?;
|
||||||
for part in &self.arbitrary_data {
|
|
||||||
res.extend([u8::try_from(part.len()).unwrap()]);
|
|
||||||
res.extend(part);
|
|
||||||
}
|
}
|
||||||
res
|
w.write_all(&self.payment_id)?;
|
||||||
|
|
||||||
|
w.write_all(&u32::try_from(self.arbitrary_data.len()).unwrap().to_le_bytes())?;
|
||||||
|
for part in &self.arbitrary_data {
|
||||||
|
w.write_all(&[u8::try_from(part.len()).unwrap()])?;
|
||||||
|
w.write_all(part)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Metadata> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
|
let mut serialized = Vec::with_capacity(1 + 8 + 1);
|
||||||
|
self.write(&mut serialized).unwrap();
|
||||||
|
serialized
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read<R: Read>(r: &mut R) -> io::Result<Metadata> {
|
||||||
|
let subaddress = if read_byte(r)? == 1 {
|
||||||
|
Some(
|
||||||
|
SubaddressIndex::new(read_u32(r)?, read_u32(r)?)
|
||||||
|
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid subaddress in metadata"))?,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
Ok(Metadata {
|
Ok(Metadata {
|
||||||
subaddress: (read_u32(r)?, read_u32(r)?),
|
subaddress,
|
||||||
payment_id: read_bytes(r)?,
|
payment_id: read_bytes(r)?,
|
||||||
arbitrary_data: {
|
arbitrary_data: {
|
||||||
let mut data = vec![];
|
let mut data = vec![];
|
||||||
|
@ -132,18 +162,23 @@ impl ReceivedOutput {
|
||||||
&self.metadata.arbitrary_data
|
&self.metadata.arbitrary_data
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
|
self.absolute.write(w)?;
|
||||||
|
self.data.write(w)?;
|
||||||
|
self.metadata.write(w)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut serialized = self.absolute.serialize();
|
let mut serialized = vec![];
|
||||||
serialized.extend(&self.data.serialize());
|
self.write(&mut serialized).unwrap();
|
||||||
serialized.extend(&self.metadata.serialize());
|
|
||||||
serialized
|
serialized
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<ReceivedOutput> {
|
pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> {
|
||||||
Ok(ReceivedOutput {
|
Ok(ReceivedOutput {
|
||||||
absolute: AbsoluteId::deserialize(r)?,
|
absolute: AbsoluteId::read(r)?,
|
||||||
data: OutputData::deserialize(r)?,
|
data: OutputData::read(r)?,
|
||||||
metadata: Metadata::deserialize(r)?,
|
metadata: Metadata::read(r)?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -184,14 +219,19 @@ impl SpendableOutput {
|
||||||
self.output.commitment()
|
self.output.commitment()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
|
self.output.write(w)?;
|
||||||
|
w.write_all(&self.global_index.to_le_bytes())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut serialized = self.output.serialize();
|
let mut serialized = vec![];
|
||||||
serialized.extend(self.global_index.to_le_bytes());
|
self.write(&mut serialized).unwrap();
|
||||||
serialized
|
serialized
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<SpendableOutput> {
|
pub fn read<R: Read>(r: &mut R) -> io::Result<SpendableOutput> {
|
||||||
Ok(SpendableOutput { output: ReceivedOutput::deserialize(r)?, global_index: read_u64(r)? })
|
Ok(SpendableOutput { output: ReceivedOutput::read(r)?, global_index: read_u64(r)? })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -232,14 +272,19 @@ impl<O: Clone + Zeroize> Timelocked<O> {
|
||||||
impl Scanner {
|
impl Scanner {
|
||||||
/// Scan a transaction to discover the received outputs.
|
/// Scan a transaction to discover the received outputs.
|
||||||
pub fn scan_transaction(&mut self, tx: &Transaction) -> Timelocked<ReceivedOutput> {
|
pub fn scan_transaction(&mut self, tx: &Transaction) -> Timelocked<ReceivedOutput> {
|
||||||
let extra = Extra::deserialize(&mut Cursor::new(&tx.prefix.extra));
|
let extra = Extra::read::<&[u8]>(&mut tx.prefix.extra.as_ref());
|
||||||
let keys;
|
|
||||||
let extra = if let Ok(extra) = extra {
|
let extra = if let Ok(extra) = extra {
|
||||||
keys = extra.keys();
|
|
||||||
extra
|
extra
|
||||||
} else {
|
} else {
|
||||||
return Timelocked(tx.prefix.timelock, vec![]);
|
return Timelocked(tx.prefix.timelock, vec![]);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let (tx_key, additional) = if let Some((tx_key, additional)) = extra.keys() {
|
||||||
|
(tx_key, additional)
|
||||||
|
} else {
|
||||||
|
return Timelocked(tx.prefix.timelock, vec![]);
|
||||||
|
};
|
||||||
|
|
||||||
let payment_id = extra.payment_id();
|
let payment_id = extra.payment_id();
|
||||||
|
|
||||||
let mut res = vec![];
|
let mut res = vec![];
|
||||||
|
@ -257,11 +302,22 @@ impl Scanner {
|
||||||
}
|
}
|
||||||
let output_key = output_key.unwrap();
|
let output_key = output_key.unwrap();
|
||||||
|
|
||||||
for key in &keys {
|
for key in [Some(Some(&tx_key)), additional.as_ref().map(|additional| additional.get(o))] {
|
||||||
|
let key = if let Some(Some(key)) = key {
|
||||||
|
key
|
||||||
|
} else if let Some(None) = key {
|
||||||
|
// This is non-standard. There were additional keys, yet not one for this output
|
||||||
|
// https://github.com/monero-project/monero/
|
||||||
|
// blob/04a1e2875d6e35e27bb21497988a6c822d319c28/
|
||||||
|
// src/cryptonote_basic/cryptonote_format_utils.cpp#L1062
|
||||||
|
// TODO: Should this return? Where does Monero set the trap handler for this exception?
|
||||||
|
continue;
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
};
|
||||||
let (view_tag, shared_key, payment_id_xor) = shared_key(
|
let (view_tag, shared_key, payment_id_xor) = shared_key(
|
||||||
if self.burning_bug.is_none() { Some(uniqueness(&tx.prefix.inputs)) } else { None },
|
if self.burning_bug.is_none() { Some(uniqueness(&tx.prefix.inputs)) } else { None },
|
||||||
&self.pair.view,
|
self.pair.view.deref() * key,
|
||||||
key,
|
|
||||||
o,
|
o,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -291,9 +347,12 @@ impl Scanner {
|
||||||
// We will not have a torsioned key in our HashMap of keys, so we wouldn't identify it as
|
// We will not have a torsioned key in our HashMap of keys, so we wouldn't identify it as
|
||||||
// ours
|
// ours
|
||||||
// If we did though, it'd enable bypassing the included burning bug protection
|
// If we did though, it'd enable bypassing the included burning bug protection
|
||||||
debug_assert!(output_key.is_torsion_free());
|
assert!(output_key.is_torsion_free());
|
||||||
|
|
||||||
let key_offset = shared_key + self.pair.subaddress(subaddress);
|
let mut key_offset = shared_key;
|
||||||
|
if let Some(subaddress) = subaddress {
|
||||||
|
key_offset += self.pair.subaddress_derivation(subaddress);
|
||||||
|
}
|
||||||
// Since we've found an output to us, get its amount
|
// Since we've found an output to us, get its amount
|
||||||
let mut commitment = Commitment::zero();
|
let mut commitment = Commitment::zero();
|
||||||
|
|
||||||
|
|
262
coins/monero/src/wallet/seed/classic.rs
Normal file
262
coins/monero/src/wallet/seed/classic.rs
Normal file
|
@ -0,0 +1,262 @@
|
||||||
|
use core::ops::Deref;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
|
||||||
|
use zeroize::{Zeroize, Zeroizing};
|
||||||
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
|
use crc::{Crc, CRC_32_ISO_HDLC};
|
||||||
|
|
||||||
|
use curve25519_dalek::scalar::Scalar;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
random_scalar,
|
||||||
|
wallet::seed::{SeedError, Language},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub(crate) const CLASSIC_SEED_LENGTH: usize = 24;
|
||||||
|
pub(crate) const CLASSIC_SEED_LENGTH_WITH_CHECKSUM: usize = 25;
|
||||||
|
|
||||||
|
fn trim(word: &str, len: usize) -> Zeroizing<String> {
|
||||||
|
Zeroizing::new(word.chars().take(len).collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
struct WordList {
|
||||||
|
word_list: Vec<String>,
|
||||||
|
word_map: HashMap<String, usize>,
|
||||||
|
trimmed_word_map: HashMap<String, usize>,
|
||||||
|
unique_prefix_length: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WordList {
|
||||||
|
fn new(words: &'static str, prefix_length: usize) -> WordList {
|
||||||
|
let mut lang = WordList {
|
||||||
|
word_list: serde_json::from_str(words).unwrap(),
|
||||||
|
word_map: HashMap::new(),
|
||||||
|
trimmed_word_map: HashMap::new(),
|
||||||
|
unique_prefix_length: prefix_length,
|
||||||
|
};
|
||||||
|
|
||||||
|
for (i, word) in lang.word_list.iter().enumerate() {
|
||||||
|
lang.word_map.insert(word.clone(), i);
|
||||||
|
lang.trimmed_word_map.insert(trim(word, lang.unique_prefix_length).deref().clone(), i);
|
||||||
|
}
|
||||||
|
|
||||||
|
lang
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref LANGUAGES: HashMap<Language, WordList> = HashMap::from([
|
||||||
|
(Language::Chinese, WordList::new(include_str!("./classic/zh.json"), 1)),
|
||||||
|
(Language::English, WordList::new(include_str!("./classic/en.json"), 3)),
|
||||||
|
(Language::Dutch, WordList::new(include_str!("./classic/nl.json"), 4)),
|
||||||
|
(Language::French, WordList::new(include_str!("./classic/fr.json"), 4)),
|
||||||
|
(Language::Spanish, WordList::new(include_str!("./classic/es.json"), 4)),
|
||||||
|
(Language::German, WordList::new(include_str!("./classic/de.json"), 4)),
|
||||||
|
(Language::Italian, WordList::new(include_str!("./classic/it.json"), 4)),
|
||||||
|
(Language::Portuguese, WordList::new(include_str!("./classic/pt.json"), 4)),
|
||||||
|
(Language::Japanese, WordList::new(include_str!("./classic/ja.json"), 3)),
|
||||||
|
(Language::Russian, WordList::new(include_str!("./classic/ru.json"), 4)),
|
||||||
|
(Language::Esperanto, WordList::new(include_str!("./classic/eo.json"), 4)),
|
||||||
|
(Language::Lojban, WordList::new(include_str!("./classic/jbo.json"), 4)),
|
||||||
|
(Language::EnglishOld, WordList::new(include_str!("./classic/ang.json"), 4)),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub(crate) fn trim_by_lang(word: &str, lang: Language) -> String {
|
||||||
|
if lang != Language::EnglishOld {
|
||||||
|
word.chars().take(LANGUAGES[&lang].unique_prefix_length).collect()
|
||||||
|
} else {
|
||||||
|
word.to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn checksum_index(words: &[Zeroizing<String>], lang: &WordList) -> usize {
|
||||||
|
let mut trimmed_words = Zeroizing::new(String::new());
|
||||||
|
for w in words {
|
||||||
|
*trimmed_words += &trim(w, lang.unique_prefix_length);
|
||||||
|
}
|
||||||
|
|
||||||
|
let crc = Crc::<u32>::new(&CRC_32_ISO_HDLC);
|
||||||
|
let mut digest = crc.digest();
|
||||||
|
digest.update(trimmed_words.as_bytes());
|
||||||
|
|
||||||
|
usize::try_from(digest.finalize()).unwrap() % words.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert a private key to a seed
|
||||||
|
fn key_to_seed(lang: Language, key: Zeroizing<Scalar>) -> ClassicSeed {
|
||||||
|
let bytes = Zeroizing::new(key.to_bytes());
|
||||||
|
|
||||||
|
// get the language words
|
||||||
|
let words = &LANGUAGES[&lang].word_list;
|
||||||
|
let list_len = u64::try_from(words.len()).unwrap();
|
||||||
|
|
||||||
|
// To store the found words & add the checksum word later.
|
||||||
|
let mut seed = Vec::with_capacity(25);
|
||||||
|
|
||||||
|
// convert to words
|
||||||
|
// 4 bytes -> 3 words. 8 digits base 16 -> 3 digits base 1626
|
||||||
|
let mut segment = [0; 4];
|
||||||
|
let mut indices = [0; 4];
|
||||||
|
for i in 0 .. 8 {
|
||||||
|
// convert first 4 byte to u32 & get the word indices
|
||||||
|
let start = i * 4;
|
||||||
|
// convert 4 byte to u32
|
||||||
|
segment.copy_from_slice(&bytes[start .. (start + 4)]);
|
||||||
|
// Actually convert to a u64 so we can add without overflowing
|
||||||
|
indices[0] = u64::from(u32::from_le_bytes(segment));
|
||||||
|
indices[1] = indices[0];
|
||||||
|
indices[0] /= list_len;
|
||||||
|
indices[2] = indices[0] + indices[1];
|
||||||
|
indices[0] /= list_len;
|
||||||
|
indices[3] = indices[0] + indices[2];
|
||||||
|
|
||||||
|
// append words to seed
|
||||||
|
for i in indices.iter().skip(1) {
|
||||||
|
let word = usize::try_from(i % list_len).unwrap();
|
||||||
|
seed.push(Zeroizing::new(words[word].clone()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
segment.zeroize();
|
||||||
|
indices.zeroize();
|
||||||
|
|
||||||
|
// create a checksum word for all languages except old english
|
||||||
|
if lang != Language::EnglishOld {
|
||||||
|
let checksum = seed[checksum_index(&seed, &LANGUAGES[&lang])].clone();
|
||||||
|
seed.push(checksum);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut res = Zeroizing::new(String::new());
|
||||||
|
for (i, word) in seed.iter().enumerate() {
|
||||||
|
if i != 0 {
|
||||||
|
*res += " ";
|
||||||
|
}
|
||||||
|
*res += word;
|
||||||
|
}
|
||||||
|
ClassicSeed(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert a seed to bytes
|
||||||
|
pub(crate) fn seed_to_bytes(words: &str) -> Result<(Language, Zeroizing<[u8; 32]>), SeedError> {
|
||||||
|
// get seed words
|
||||||
|
let words = words.split_whitespace().map(|w| Zeroizing::new(w.to_string())).collect::<Vec<_>>();
|
||||||
|
if (words.len() != CLASSIC_SEED_LENGTH) && (words.len() != CLASSIC_SEED_LENGTH_WITH_CHECKSUM) {
|
||||||
|
panic!("invalid seed passed to seed_to_bytes");
|
||||||
|
}
|
||||||
|
|
||||||
|
// find the language
|
||||||
|
let (matched_indices, lang_name, lang) = (|| {
|
||||||
|
let has_checksum = words.len() == CLASSIC_SEED_LENGTH_WITH_CHECKSUM;
|
||||||
|
let mut matched_indices = Zeroizing::new(vec![]);
|
||||||
|
|
||||||
|
// Iterate through all the languages
|
||||||
|
'language: for (lang_name, lang) in LANGUAGES.iter() {
|
||||||
|
matched_indices.zeroize();
|
||||||
|
matched_indices.clear();
|
||||||
|
|
||||||
|
let map_in_use = if has_checksum { &lang.trimmed_word_map } else { &lang.word_map };
|
||||||
|
|
||||||
|
// Iterate through all the words and see if they're all present
|
||||||
|
for word in &words {
|
||||||
|
let trimmed = trim(word, lang.unique_prefix_length);
|
||||||
|
let word = if has_checksum { &trimmed } else { word };
|
||||||
|
|
||||||
|
if let Some(index) = map_in_use.get(word.deref()) {
|
||||||
|
matched_indices.push(*index);
|
||||||
|
} else {
|
||||||
|
continue 'language;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if has_checksum {
|
||||||
|
if lang_name == &Language::EnglishOld {
|
||||||
|
Err(SeedError::EnglishOldWithChecksum)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// exclude the last word when calculating a checksum.
|
||||||
|
let last_word = words.last().unwrap().clone();
|
||||||
|
let checksum = words[checksum_index(&words[.. words.len() - 1], lang)].clone();
|
||||||
|
|
||||||
|
// check the trimmed checksum and trimmed last word line up
|
||||||
|
if trim(&checksum, lang.unique_prefix_length) != trim(&last_word, lang.unique_prefix_length)
|
||||||
|
{
|
||||||
|
Err(SeedError::InvalidChecksum)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Ok((matched_indices, lang_name, lang));
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(SeedError::UnknownLanguage)?
|
||||||
|
})()?;
|
||||||
|
|
||||||
|
// convert to bytes
|
||||||
|
let mut res = Zeroizing::new([0; 32]);
|
||||||
|
let mut indices = Zeroizing::new([0; 4]);
|
||||||
|
for i in 0 .. 8 {
|
||||||
|
// read 3 indices at a time
|
||||||
|
let i3 = i * 3;
|
||||||
|
indices[1] = matched_indices[i3];
|
||||||
|
indices[2] = matched_indices[i3 + 1];
|
||||||
|
indices[3] = matched_indices[i3 + 2];
|
||||||
|
|
||||||
|
let inner = |i| {
|
||||||
|
let mut base = (lang.word_list.len() - indices[i] + indices[i + 1]) % lang.word_list.len();
|
||||||
|
// Shift the index over
|
||||||
|
for _ in 0 .. i {
|
||||||
|
base *= lang.word_list.len();
|
||||||
|
}
|
||||||
|
base
|
||||||
|
};
|
||||||
|
// set the last index
|
||||||
|
indices[0] = indices[1] + inner(1) + inner(2);
|
||||||
|
if (indices[0] % lang.word_list.len()) != indices[1] {
|
||||||
|
Err(SeedError::InvalidSeed)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let pos = i * 4;
|
||||||
|
let mut bytes = u32::try_from(indices[0]).unwrap().to_le_bytes();
|
||||||
|
res[pos .. (pos + 4)].copy_from_slice(&bytes);
|
||||||
|
bytes.zeroize();
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((*lang_name, res))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Zeroize)]
|
||||||
|
pub struct ClassicSeed(Zeroizing<String>);
|
||||||
|
impl ClassicSeed {
|
||||||
|
pub(crate) fn new<R: RngCore + CryptoRng>(rng: &mut R, lang: Language) -> ClassicSeed {
|
||||||
|
key_to_seed(lang, Zeroizing::new(random_scalar(rng)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_string(words: Zeroizing<String>) -> Result<ClassicSeed, SeedError> {
|
||||||
|
let (lang, entropy) = seed_to_bytes(&words)?;
|
||||||
|
|
||||||
|
// Make sure this is a valid scalar
|
||||||
|
let mut scalar = Scalar::from_canonical_bytes(*entropy);
|
||||||
|
if scalar.is_none() {
|
||||||
|
Err(SeedError::InvalidSeed)?;
|
||||||
|
}
|
||||||
|
scalar.zeroize();
|
||||||
|
|
||||||
|
// Call from_entropy so a trimmed seed becomes a full seed
|
||||||
|
Ok(Self::from_entropy(lang, entropy).unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_entropy(lang: Language, entropy: Zeroizing<[u8; 32]>) -> Option<ClassicSeed> {
|
||||||
|
Scalar::from_canonical_bytes(*entropy).map(|scalar| key_to_seed(lang, Zeroizing::new(scalar)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn to_string(&self) -> Zeroizing<String> {
|
||||||
|
self.0.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn entropy(&self) -> Zeroizing<[u8; 32]> {
|
||||||
|
seed_to_bytes(&self.0).unwrap().1
|
||||||
|
}
|
||||||
|
}
|
1628
coins/monero/src/wallet/seed/classic/ang.json
Normal file
1628
coins/monero/src/wallet/seed/classic/ang.json
Normal file
File diff suppressed because it is too large
Load diff
1628
coins/monero/src/wallet/seed/classic/de.json
Normal file
1628
coins/monero/src/wallet/seed/classic/de.json
Normal file
File diff suppressed because it is too large
Load diff
1628
coins/monero/src/wallet/seed/classic/en.json
Normal file
1628
coins/monero/src/wallet/seed/classic/en.json
Normal file
File diff suppressed because it is too large
Load diff
1628
coins/monero/src/wallet/seed/classic/eo.json
Normal file
1628
coins/monero/src/wallet/seed/classic/eo.json
Normal file
File diff suppressed because it is too large
Load diff
1628
coins/monero/src/wallet/seed/classic/es.json
Normal file
1628
coins/monero/src/wallet/seed/classic/es.json
Normal file
File diff suppressed because it is too large
Load diff
1628
coins/monero/src/wallet/seed/classic/fr.json
Normal file
1628
coins/monero/src/wallet/seed/classic/fr.json
Normal file
File diff suppressed because it is too large
Load diff
1628
coins/monero/src/wallet/seed/classic/it.json
Normal file
1628
coins/monero/src/wallet/seed/classic/it.json
Normal file
File diff suppressed because it is too large
Load diff
1628
coins/monero/src/wallet/seed/classic/ja.json
Normal file
1628
coins/monero/src/wallet/seed/classic/ja.json
Normal file
File diff suppressed because it is too large
Load diff
1628
coins/monero/src/wallet/seed/classic/jbo.json
Normal file
1628
coins/monero/src/wallet/seed/classic/jbo.json
Normal file
File diff suppressed because it is too large
Load diff
1628
coins/monero/src/wallet/seed/classic/nl.json
Normal file
1628
coins/monero/src/wallet/seed/classic/nl.json
Normal file
File diff suppressed because it is too large
Load diff
1628
coins/monero/src/wallet/seed/classic/pt.json
Normal file
1628
coins/monero/src/wallet/seed/classic/pt.json
Normal file
File diff suppressed because it is too large
Load diff
1628
coins/monero/src/wallet/seed/classic/ru.json
Normal file
1628
coins/monero/src/wallet/seed/classic/ru.json
Normal file
File diff suppressed because it is too large
Load diff
1628
coins/monero/src/wallet/seed/classic/zh.json
Normal file
1628
coins/monero/src/wallet/seed/classic/zh.json
Normal file
File diff suppressed because it is too large
Load diff
92
coins/monero/src/wallet/seed/mod.rs
Normal file
92
coins/monero/src/wallet/seed/mod.rs
Normal file
|
@ -0,0 +1,92 @@
|
||||||
|
use core::fmt;
|
||||||
|
|
||||||
|
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
||||||
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
pub(crate) mod classic;
|
||||||
|
use classic::{CLASSIC_SEED_LENGTH, CLASSIC_SEED_LENGTH_WITH_CHECKSUM, ClassicSeed};
|
||||||
|
|
||||||
|
/// Error when decoding a seed.
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
|
||||||
|
pub enum SeedError {
|
||||||
|
#[error("invalid number of words in seed")]
|
||||||
|
InvalidSeedLength,
|
||||||
|
#[error("unknown language")]
|
||||||
|
UnknownLanguage,
|
||||||
|
#[error("invalid checksum")]
|
||||||
|
InvalidChecksum,
|
||||||
|
#[error("english old seeds don't support checksums")]
|
||||||
|
EnglishOldWithChecksum,
|
||||||
|
#[error("invalid seed")]
|
||||||
|
InvalidSeed,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
|
||||||
|
pub enum Language {
|
||||||
|
Chinese,
|
||||||
|
English,
|
||||||
|
Dutch,
|
||||||
|
French,
|
||||||
|
Spanish,
|
||||||
|
German,
|
||||||
|
Italian,
|
||||||
|
Portuguese,
|
||||||
|
Japanese,
|
||||||
|
Russian,
|
||||||
|
Esperanto,
|
||||||
|
Lojban,
|
||||||
|
EnglishOld,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A Monero seed.
|
||||||
|
// TODO: Add polyseed to enum
|
||||||
|
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
||||||
|
pub enum Seed {
|
||||||
|
Classic(ClassicSeed),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for Seed {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
Seed::Classic(_) => f.debug_struct("Seed::Classic").finish_non_exhaustive(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Seed {
|
||||||
|
/// Create a new seed.
|
||||||
|
pub fn new<R: RngCore + CryptoRng>(rng: &mut R, lang: Language) -> Seed {
|
||||||
|
Seed::Classic(ClassicSeed::new(rng, lang))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a seed from a String.
|
||||||
|
pub fn from_string(words: Zeroizing<String>) -> Result<Seed, SeedError> {
|
||||||
|
match words.split_whitespace().count() {
|
||||||
|
CLASSIC_SEED_LENGTH | CLASSIC_SEED_LENGTH_WITH_CHECKSUM => {
|
||||||
|
ClassicSeed::from_string(words).map(Seed::Classic)
|
||||||
|
}
|
||||||
|
_ => Err(SeedError::InvalidSeedLength)?,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a Seed from entropy.
|
||||||
|
pub fn from_entropy(lang: Language, entropy: Zeroizing<[u8; 32]>) -> Option<Seed> {
|
||||||
|
ClassicSeed::from_entropy(lang, entropy).map(Seed::Classic)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert a seed to a String.
|
||||||
|
pub fn to_string(&self) -> Zeroizing<String> {
|
||||||
|
match self {
|
||||||
|
Seed::Classic(seed) => seed.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the entropy for this seed.
|
||||||
|
pub fn entropy(&self) -> Zeroizing<[u8; 32]> {
|
||||||
|
match self {
|
||||||
|
Seed::Classic(seed) => seed.entropy(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -5,8 +5,8 @@ use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||||
use crate::{
|
use crate::{
|
||||||
Protocol,
|
Protocol,
|
||||||
wallet::{
|
wallet::{
|
||||||
address::MoneroAddress, Fee, SpendableOutput, SignableTransaction, TransactionError,
|
address::MoneroAddress, Fee, SpendableOutput, Change, SignableTransaction, TransactionError,
|
||||||
extra::MAX_TX_EXTRA_NONCE_SIZE,
|
extra::MAX_ARBITRARY_DATA_SIZE,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -17,14 +17,14 @@ struct SignableTransactionBuilderInternal {
|
||||||
|
|
||||||
inputs: Vec<SpendableOutput>,
|
inputs: Vec<SpendableOutput>,
|
||||||
payments: Vec<(MoneroAddress, u64)>,
|
payments: Vec<(MoneroAddress, u64)>,
|
||||||
change_address: Option<MoneroAddress>,
|
change_address: Option<Change>,
|
||||||
data: Vec<Vec<u8>>,
|
data: Vec<Vec<u8>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SignableTransactionBuilderInternal {
|
impl SignableTransactionBuilderInternal {
|
||||||
// Takes in the change address so users don't miss that they have to manually set one
|
// Takes in the change address so users don't miss that they have to manually set one
|
||||||
// If they don't, all leftover funds will become part of the fee
|
// If they don't, all leftover funds will become part of the fee
|
||||||
fn new(protocol: Protocol, fee: Fee, change_address: Option<MoneroAddress>) -> Self {
|
fn new(protocol: Protocol, fee: Fee, change_address: Option<Change>) -> Self {
|
||||||
Self { protocol, fee, inputs: vec![], payments: vec![], change_address, data: vec![] }
|
Self { protocol, fee, inputs: vec![], payments: vec![], change_address, data: vec![] }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,7 +77,7 @@ impl SignableTransactionBuilder {
|
||||||
Self(self.0.clone())
|
Self(self.0.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(protocol: Protocol, fee: Fee, change_address: Option<MoneroAddress>) -> Self {
|
pub fn new(protocol: Protocol, fee: Fee, change_address: Option<Change>) -> Self {
|
||||||
Self(Arc::new(RwLock::new(SignableTransactionBuilderInternal::new(
|
Self(Arc::new(RwLock::new(SignableTransactionBuilderInternal::new(
|
||||||
protocol,
|
protocol,
|
||||||
fee,
|
fee,
|
||||||
|
@ -104,7 +104,7 @@ impl SignableTransactionBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_data(&mut self, data: Vec<u8>) -> Result<Self, TransactionError> {
|
pub fn add_data(&mut self, data: Vec<u8>) -> Result<Self, TransactionError> {
|
||||||
if data.len() > MAX_TX_EXTRA_NONCE_SIZE {
|
if data.len() > MAX_ARBITRARY_DATA_SIZE {
|
||||||
Err(TransactionError::TooMuchData)?;
|
Err(TransactionError::TooMuchData)?;
|
||||||
}
|
}
|
||||||
self.0.write().unwrap().add_data(data);
|
self.0.write().unwrap().add_data(data);
|
||||||
|
@ -117,7 +117,7 @@ impl SignableTransactionBuilder {
|
||||||
read.protocol,
|
read.protocol,
|
||||||
read.inputs.clone(),
|
read.inputs.clone(),
|
||||||
read.payments.clone(),
|
read.payments.clone(),
|
||||||
read.change_address,
|
read.change_address.clone(),
|
||||||
read.data.clone(),
|
read.data.clone(),
|
||||||
read.fee,
|
read.fee,
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use core::ops::Deref;
|
use core::{ops::Deref, fmt};
|
||||||
|
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
|
@ -7,7 +7,13 @@ use rand::seq::SliceRandom;
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
||||||
|
|
||||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
|
use group::Group;
|
||||||
|
use curve25519_dalek::{
|
||||||
|
constants::{ED25519_BASEPOINT_POINT, ED25519_BASEPOINT_TABLE},
|
||||||
|
scalar::Scalar,
|
||||||
|
edwards::EdwardsPoint,
|
||||||
|
};
|
||||||
|
use dalek_ff_group as dfg;
|
||||||
|
|
||||||
#[cfg(feature = "multisig")]
|
#[cfg(feature = "multisig")]
|
||||||
use frost::FrostError;
|
use frost::FrostError;
|
||||||
|
@ -23,8 +29,10 @@ use crate::{
|
||||||
transaction::{Input, Output, Timelock, TransactionPrefix, Transaction},
|
transaction::{Input, Output, Timelock, TransactionPrefix, Transaction},
|
||||||
rpc::{Rpc, RpcError},
|
rpc::{Rpc, RpcError},
|
||||||
wallet::{
|
wallet::{
|
||||||
address::MoneroAddress, SpendableOutput, Decoys, PaymentId, ExtraField, Extra, key_image_sort,
|
address::{Network, AddressSpec, MoneroAddress},
|
||||||
uniqueness, shared_key, commitment_mask, amount_encryption, extra::MAX_TX_EXTRA_NONCE_SIZE,
|
ViewPair, SpendableOutput, Decoys, PaymentId, ExtraField, Extra, key_image_sort, uniqueness,
|
||||||
|
shared_key, commitment_mask, amount_encryption,
|
||||||
|
extra::{ARBITRARY_DATA_MARKER, MAX_ARBITRARY_DATA_SIZE},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -47,25 +55,22 @@ struct SendOutput {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SendOutput {
|
impl SendOutput {
|
||||||
fn new<R: RngCore + CryptoRng>(
|
#[allow(non_snake_case)]
|
||||||
rng: &mut R,
|
fn internal(
|
||||||
unique: [u8; 32],
|
unique: [u8; 32],
|
||||||
output: (usize, (MoneroAddress, u64)),
|
output: (usize, (MoneroAddress, u64)),
|
||||||
|
ecdh: EdwardsPoint,
|
||||||
|
R: EdwardsPoint,
|
||||||
) -> (SendOutput, Option<[u8; 8]>) {
|
) -> (SendOutput, Option<[u8; 8]>) {
|
||||||
let o = output.0;
|
let o = output.0;
|
||||||
let output = output.1;
|
let output = output.1;
|
||||||
|
|
||||||
let r = random_scalar(rng);
|
|
||||||
let (view_tag, shared_key, payment_id_xor) =
|
let (view_tag, shared_key, payment_id_xor) =
|
||||||
shared_key(Some(unique).filter(|_| output.0.meta.kind.guaranteed()), &r, &output.0.view, o);
|
shared_key(Some(unique).filter(|_| output.0.is_guaranteed()), ecdh, o);
|
||||||
|
|
||||||
(
|
(
|
||||||
SendOutput {
|
SendOutput {
|
||||||
R: if !output.0.meta.kind.subaddress() {
|
R,
|
||||||
&r * &ED25519_BASEPOINT_TABLE
|
|
||||||
} else {
|
|
||||||
r * output.0.spend
|
|
||||||
},
|
|
||||||
view_tag,
|
view_tag,
|
||||||
dest: ((&shared_key * &ED25519_BASEPOINT_TABLE) + output.0.spend),
|
dest: ((&shared_key * &ED25519_BASEPOINT_TABLE) + output.0.spend),
|
||||||
commitment: Commitment::new(commitment_mask(shared_key), output.1),
|
commitment: Commitment::new(commitment_mask(shared_key), output.1),
|
||||||
|
@ -77,6 +82,32 @@ impl SendOutput {
|
||||||
.map(|id| (u64::from_le_bytes(id) ^ u64::from_le_bytes(payment_id_xor)).to_le_bytes()),
|
.map(|id| (u64::from_le_bytes(id) ^ u64::from_le_bytes(payment_id_xor)).to_le_bytes()),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn new(
|
||||||
|
r: &Zeroizing<Scalar>,
|
||||||
|
unique: [u8; 32],
|
||||||
|
output: (usize, (MoneroAddress, u64)),
|
||||||
|
) -> (SendOutput, Option<[u8; 8]>) {
|
||||||
|
let address = output.1 .0;
|
||||||
|
SendOutput::internal(
|
||||||
|
unique,
|
||||||
|
output,
|
||||||
|
r.deref() * address.view,
|
||||||
|
if !address.is_subaddress() {
|
||||||
|
r.deref() * &ED25519_BASEPOINT_TABLE
|
||||||
|
} else {
|
||||||
|
r.deref() * address.spend
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn change(
|
||||||
|
ecdh: EdwardsPoint,
|
||||||
|
unique: [u8; 32],
|
||||||
|
output: (usize, (MoneroAddress, u64)),
|
||||||
|
) -> (SendOutput, Option<[u8; 8]>) {
|
||||||
|
SendOutput::internal(unique, output, ecdh, ED25519_BASEPOINT_POINT)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Error)]
|
#[derive(Clone, PartialEq, Eq, Debug, Error)]
|
||||||
|
@ -93,6 +124,8 @@ pub enum TransactionError {
|
||||||
TooManyOutputs,
|
TooManyOutputs,
|
||||||
#[error("too much data")]
|
#[error("too much data")]
|
||||||
TooMuchData,
|
TooMuchData,
|
||||||
|
#[error("too many inputs/too much arbitrary data")]
|
||||||
|
TooLargeTransaction,
|
||||||
#[error("not enough funds (in {0}, out {1})")]
|
#[error("not enough funds (in {0}, out {1})")]
|
||||||
NotEnoughFunds(u64, u64),
|
NotEnoughFunds(u64, u64),
|
||||||
#[error("wrong spend private key")]
|
#[error("wrong spend private key")]
|
||||||
|
@ -176,26 +209,71 @@ impl Fee {
|
||||||
pub struct SignableTransaction {
|
pub struct SignableTransaction {
|
||||||
protocol: Protocol,
|
protocol: Protocol,
|
||||||
inputs: Vec<SpendableOutput>,
|
inputs: Vec<SpendableOutput>,
|
||||||
payments: Vec<(MoneroAddress, u64)>,
|
payments: Vec<InternalPayment>,
|
||||||
data: Vec<Vec<u8>>,
|
data: Vec<Vec<u8>>,
|
||||||
fee: u64,
|
fee: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Specification for a change output.
|
||||||
|
#[derive(Clone, PartialEq, Eq, Zeroize)]
|
||||||
|
pub struct Change {
|
||||||
|
address: MoneroAddress,
|
||||||
|
view: Option<Zeroizing<Scalar>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for Change {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
f.debug_struct("Change").field("address", &self.address).finish_non_exhaustive()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Change {
|
||||||
|
/// Create a change output specification from a ViewPair, as needed to maintain privacy.
|
||||||
|
pub fn new(view: &ViewPair, guaranteed: bool) -> Change {
|
||||||
|
Change {
|
||||||
|
address: view.address(
|
||||||
|
Network::Mainnet,
|
||||||
|
if !guaranteed {
|
||||||
|
AddressSpec::Standard
|
||||||
|
} else {
|
||||||
|
AddressSpec::Featured { subaddress: None, payment_id: None, guaranteed: true }
|
||||||
|
},
|
||||||
|
),
|
||||||
|
view: Some(view.view.clone()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a fingerprintable change output specification which will harm privacy. Only use this
|
||||||
|
/// if you know what you're doing.
|
||||||
|
pub fn fingerprintable(address: MoneroAddress) -> Change {
|
||||||
|
Change { address, view: None }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||||
|
pub(crate) enum InternalPayment {
|
||||||
|
Payment((MoneroAddress, u64)),
|
||||||
|
Change(Change, u64),
|
||||||
|
}
|
||||||
|
|
||||||
impl SignableTransaction {
|
impl SignableTransaction {
|
||||||
/// Create a signable transaction. If the change address is specified, leftover funds will be
|
/// Create a signable transaction.
|
||||||
/// sent to it. If the change address isn't specified, up to 16 outputs may be specified, using
|
///
|
||||||
/// any leftover funds as a bonus to the fee. The optional data field will be embedded in TX
|
/// Up to 16 outputs may be present, including the change output.
|
||||||
/// extra.
|
///
|
||||||
|
/// If the change address is specified, leftover funds will be sent to it.
|
||||||
|
///
|
||||||
|
/// Each chunk of data must not exceed MAX_ARBITRARY_DATA_SIZE.
|
||||||
pub fn new(
|
pub fn new(
|
||||||
protocol: Protocol,
|
protocol: Protocol,
|
||||||
inputs: Vec<SpendableOutput>,
|
inputs: Vec<SpendableOutput>,
|
||||||
mut payments: Vec<(MoneroAddress, u64)>,
|
mut payments: Vec<(MoneroAddress, u64)>,
|
||||||
change_address: Option<MoneroAddress>,
|
change_address: Option<Change>,
|
||||||
data: Vec<Vec<u8>>,
|
data: Vec<Vec<u8>>,
|
||||||
fee_rate: Fee,
|
fee_rate: Fee,
|
||||||
) -> Result<SignableTransaction, TransactionError> {
|
) -> Result<SignableTransaction, TransactionError> {
|
||||||
// Make sure there's only one payment ID
|
// Make sure there's only one payment ID
|
||||||
{
|
let mut has_payment_id = {
|
||||||
let mut payment_ids = 0;
|
let mut payment_ids = 0;
|
||||||
let mut count = |addr: MoneroAddress| {
|
let mut count = |addr: MoneroAddress| {
|
||||||
if addr.payment_id().is_some() {
|
if addr.payment_id().is_some() {
|
||||||
|
@ -205,13 +283,14 @@ impl SignableTransaction {
|
||||||
for payment in &payments {
|
for payment in &payments {
|
||||||
count(payment.0);
|
count(payment.0);
|
||||||
}
|
}
|
||||||
if let Some(change) = change_address {
|
if let Some(change) = change_address.as_ref() {
|
||||||
count(change);
|
count(change.address);
|
||||||
}
|
}
|
||||||
if payment_ids > 1 {
|
if payment_ids > 1 {
|
||||||
Err(TransactionError::MultiplePaymentIds)?;
|
Err(TransactionError::MultiplePaymentIds)?;
|
||||||
}
|
}
|
||||||
}
|
payment_ids == 1
|
||||||
|
};
|
||||||
|
|
||||||
if inputs.is_empty() {
|
if inputs.is_empty() {
|
||||||
Err(TransactionError::NoInputs)?;
|
Err(TransactionError::NoInputs)?;
|
||||||
|
@ -221,55 +300,57 @@ impl SignableTransaction {
|
||||||
}
|
}
|
||||||
|
|
||||||
for part in &data {
|
for part in &data {
|
||||||
if part.len() > MAX_TX_EXTRA_NONCE_SIZE {
|
if part.len() > MAX_ARBITRARY_DATA_SIZE {
|
||||||
Err(TransactionError::TooMuchData)?;
|
Err(TransactionError::TooMuchData)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO TX MAX SIZE
|
// If we don't have two outputs, as required by Monero, error
|
||||||
|
if (payments.len() == 1) && change_address.is_none() {
|
||||||
// If we don't have two outputs, as required by Monero, add a second
|
|
||||||
let mut change = payments.len() == 1;
|
|
||||||
if change && change_address.is_none() {
|
|
||||||
Err(TransactionError::NoChange)?;
|
Err(TransactionError::NoChange)?;
|
||||||
}
|
}
|
||||||
let outputs = payments.len() + usize::from(change);
|
let outputs = payments.len() + usize::from(change_address.is_some());
|
||||||
|
// Add a dummy payment ID if there's only 2 payments
|
||||||
|
has_payment_id |= outputs == 2;
|
||||||
|
|
||||||
// Calculate the extra length
|
// Calculate the extra length
|
||||||
let extra = Extra::fee_weight(outputs, data.as_ref());
|
let extra = Extra::fee_weight(outputs, has_payment_id, data.as_ref());
|
||||||
|
|
||||||
|
// This is a extremely heavy fee weight estimation which can only be trusted for two things
|
||||||
|
// 1) Ensuring we have enough for whatever fee we end up using
|
||||||
|
// 2) Ensuring we aren't over the max size
|
||||||
|
let estimated_tx_size = Transaction::fee_weight(protocol, inputs.len(), outputs, extra);
|
||||||
|
|
||||||
|
// The actual limit is half the block size, and for the minimum block size of 300k, that'd be
|
||||||
|
// 150k
|
||||||
|
// wallet2 will only create transactions up to 100k bytes however
|
||||||
|
const MAX_TX_SIZE: usize = 100_000;
|
||||||
|
|
||||||
|
// This uses the weight (estimated_tx_size) despite the BP clawback
|
||||||
|
// The clawback *increases* the weight, so this will over-estimate, yet it's still safe
|
||||||
|
if estimated_tx_size >= MAX_TX_SIZE {
|
||||||
|
Err(TransactionError::TooLargeTransaction)?;
|
||||||
|
}
|
||||||
|
|
||||||
// Calculate the fee.
|
// Calculate the fee.
|
||||||
let mut fee =
|
let fee = fee_rate.calculate(estimated_tx_size);
|
||||||
fee_rate.calculate(Transaction::fee_weight(protocol, inputs.len(), outputs, extra));
|
|
||||||
|
|
||||||
// Make sure we have enough funds
|
// Make sure we have enough funds
|
||||||
let in_amount = inputs.iter().map(|input| input.commitment().amount).sum::<u64>();
|
let in_amount = inputs.iter().map(|input| input.commitment().amount).sum::<u64>();
|
||||||
let mut out_amount = payments.iter().map(|payment| payment.1).sum::<u64>() + fee;
|
let out_amount = payments.iter().map(|payment| payment.1).sum::<u64>() + fee;
|
||||||
if in_amount < out_amount {
|
if in_amount < out_amount {
|
||||||
Err(TransactionError::NotEnoughFunds(in_amount, out_amount))?;
|
Err(TransactionError::NotEnoughFunds(in_amount, out_amount))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we have yet to add a change output, do so if it's economically viable
|
if outputs > MAX_OUTPUTS {
|
||||||
if (!change) && change_address.is_some() && (in_amount != out_amount) {
|
|
||||||
// Check even with the new fee, there's remaining funds
|
|
||||||
let change_fee =
|
|
||||||
fee_rate.calculate(Transaction::fee_weight(protocol, inputs.len(), outputs + 1, extra)) -
|
|
||||||
fee;
|
|
||||||
if (out_amount + change_fee) < in_amount {
|
|
||||||
change = true;
|
|
||||||
out_amount += change_fee;
|
|
||||||
fee += change_fee;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if change {
|
|
||||||
payments.push((change_address.unwrap(), in_amount - out_amount));
|
|
||||||
}
|
|
||||||
|
|
||||||
if payments.len() > MAX_OUTPUTS {
|
|
||||||
Err(TransactionError::TooManyOutputs)?;
|
Err(TransactionError::TooManyOutputs)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let mut payments = payments.drain(..).map(InternalPayment::Payment).collect::<Vec<_>>();
|
||||||
|
if let Some(change) = change_address {
|
||||||
|
payments.push(InternalPayment::Change(change, in_amount - out_amount));
|
||||||
|
}
|
||||||
|
|
||||||
Ok(SignableTransaction { protocol, inputs, payments, data, fee })
|
Ok(SignableTransaction { protocol, inputs, payments, data, fee })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -281,24 +362,109 @@ impl SignableTransaction {
|
||||||
// Shuffle the payments
|
// Shuffle the payments
|
||||||
self.payments.shuffle(rng);
|
self.payments.shuffle(rng);
|
||||||
|
|
||||||
|
// Used for all non-subaddress outputs, or if there's only one subaddress output and a change
|
||||||
|
let tx_key = Zeroizing::new(random_scalar(rng));
|
||||||
|
let mut tx_public_key = tx_key.deref() * &ED25519_BASEPOINT_TABLE;
|
||||||
|
|
||||||
|
// If any of these outputs are to a subaddress, we need keys distinct to them
|
||||||
|
// The only time this *does not* force having additional keys is when the only other output
|
||||||
|
// is a change output we have the view key for, enabling rewriting rA to aR
|
||||||
|
let mut has_change_view = false;
|
||||||
|
let subaddresses = self
|
||||||
|
.payments
|
||||||
|
.iter()
|
||||||
|
.filter(|payment| match *payment {
|
||||||
|
InternalPayment::Payment(payment) => payment.0.is_subaddress(),
|
||||||
|
InternalPayment::Change(change, _) => {
|
||||||
|
if change.view.is_some() {
|
||||||
|
has_change_view = true;
|
||||||
|
// It should not be possible to construct a change specification to a subaddress with a
|
||||||
|
// view key
|
||||||
|
debug_assert!(!change.address.is_subaddress());
|
||||||
|
}
|
||||||
|
change.address.is_subaddress()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.count() !=
|
||||||
|
0;
|
||||||
|
|
||||||
|
// We need additional keys if we have any subaddresses
|
||||||
|
let mut additional = subaddresses;
|
||||||
|
// Unless the above change view key path is taken
|
||||||
|
if (self.payments.len() == 2) && has_change_view {
|
||||||
|
additional = false;
|
||||||
|
}
|
||||||
|
let modified_change_ecdh = subaddresses && (!additional);
|
||||||
|
|
||||||
|
// If we're using the aR rewrite, update tx_public_key from rG to rB
|
||||||
|
if modified_change_ecdh {
|
||||||
|
for payment in &self.payments {
|
||||||
|
match payment {
|
||||||
|
InternalPayment::Payment(payment) => {
|
||||||
|
// This should be the only payment and it should be a subaddress
|
||||||
|
debug_assert!(payment.0.is_subaddress());
|
||||||
|
tx_public_key = tx_key.deref() * payment.0.spend;
|
||||||
|
}
|
||||||
|
InternalPayment::Change(_, _) => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
debug_assert!(tx_public_key != (tx_key.deref() * &ED25519_BASEPOINT_TABLE));
|
||||||
|
}
|
||||||
|
|
||||||
// Actually create the outputs
|
// Actually create the outputs
|
||||||
let mut outputs = Vec::with_capacity(self.payments.len());
|
let mut outputs = Vec::with_capacity(self.payments.len());
|
||||||
let mut id = None;
|
let mut id = None;
|
||||||
for payment in self.payments.drain(..).enumerate() {
|
for (o, mut payment) in self.payments.drain(..).enumerate() {
|
||||||
let (output, payment_id) = SendOutput::new(rng, uniqueness, payment);
|
// Downcast the change output to a payment output if it doesn't require special handling
|
||||||
|
// regarding it's view key
|
||||||
|
payment = if !modified_change_ecdh {
|
||||||
|
if let InternalPayment::Change(change, amount) = &payment {
|
||||||
|
InternalPayment::Payment((change.address, *amount))
|
||||||
|
} else {
|
||||||
|
payment
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
payment
|
||||||
|
};
|
||||||
|
|
||||||
|
let (output, payment_id) = match payment {
|
||||||
|
InternalPayment::Payment(payment) => {
|
||||||
|
// If this is a subaddress, generate a dedicated r. Else, reuse the TX key
|
||||||
|
let dedicated = Zeroizing::new(random_scalar(&mut *rng));
|
||||||
|
let use_dedicated = additional && payment.0.is_subaddress();
|
||||||
|
let r = if use_dedicated { &dedicated } else { &tx_key };
|
||||||
|
|
||||||
|
let (mut output, payment_id) = SendOutput::new(r, uniqueness, (o, payment));
|
||||||
|
if modified_change_ecdh {
|
||||||
|
debug_assert_eq!(tx_public_key, output.R);
|
||||||
|
}
|
||||||
|
// If this used tx_key, randomize its R
|
||||||
|
if !use_dedicated {
|
||||||
|
output.R = dfg::EdwardsPoint::random(&mut *rng).0;
|
||||||
|
}
|
||||||
|
(output, payment_id)
|
||||||
|
}
|
||||||
|
InternalPayment::Change(change, amount) => {
|
||||||
|
// Instead of rA, use Ra, where R is r * subaddress_spend_key
|
||||||
|
// change.view must be Some as if it's None, this payment would've been downcast
|
||||||
|
let ecdh = tx_public_key * change.view.unwrap().deref();
|
||||||
|
SendOutput::change(ecdh, uniqueness, (o, (change.address, amount)))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
outputs.push(output);
|
outputs.push(output);
|
||||||
id = id.or(payment_id);
|
id = id.or(payment_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Include a random payment ID if we don't actually have one
|
// Include a random payment ID if we don't actually have one
|
||||||
// It prevents transactions from leaking if they're sending to integrated addresses or not
|
// It prevents transactions from leaking if they're sending to integrated addresses or not
|
||||||
let id = if let Some(id) = id {
|
// Only do this if we only have two outputs though, as Monero won't add a dummy if there's
|
||||||
id
|
// more than two outputs
|
||||||
} else {
|
if outputs.len() <= 2 {
|
||||||
let mut id = [0; 8];
|
let mut rand = [0; 8];
|
||||||
rng.fill_bytes(&mut id);
|
rng.fill_bytes(&mut rand);
|
||||||
id
|
id = id.or(Some(rand));
|
||||||
};
|
}
|
||||||
|
|
||||||
let commitments = outputs.iter().map(|output| output.commitment.clone()).collect::<Vec<_>>();
|
let commitments = outputs.iter().map(|output| output.commitment.clone()).collect::<Vec<_>>();
|
||||||
let sum = commitments.iter().map(|commitment| commitment.mask).sum();
|
let sum = commitments.iter().map(|commitment| commitment.mask).sum();
|
||||||
|
@ -308,19 +474,27 @@ impl SignableTransaction {
|
||||||
|
|
||||||
// Create the TX extra
|
// Create the TX extra
|
||||||
let extra = {
|
let extra = {
|
||||||
let mut extra = Extra::new(outputs.iter().map(|output| output.R).collect());
|
let mut extra = Extra::new(
|
||||||
|
tx_public_key,
|
||||||
|
if additional { outputs.iter().map(|output| output.R).collect() } else { vec![] },
|
||||||
|
);
|
||||||
|
|
||||||
let mut id_vec = Vec::with_capacity(1 + 8);
|
if let Some(id) = id {
|
||||||
PaymentId::Encrypted(id).serialize(&mut id_vec).unwrap();
|
let mut id_vec = Vec::with_capacity(1 + 8);
|
||||||
extra.push(ExtraField::Nonce(id_vec));
|
PaymentId::Encrypted(id).write(&mut id_vec).unwrap();
|
||||||
|
extra.push(ExtraField::Nonce(id_vec));
|
||||||
|
}
|
||||||
|
|
||||||
// Include data if present
|
// Include data if present
|
||||||
for part in self.data.drain(..) {
|
for part in self.data.drain(..) {
|
||||||
extra.push(ExtraField::Nonce(part));
|
let mut arb = vec![ARBITRARY_DATA_MARKER];
|
||||||
|
arb.extend(part);
|
||||||
|
extra.push(ExtraField::Nonce(arb));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut serialized = Vec::with_capacity(Extra::fee_weight(outputs.len(), self.data.as_ref()));
|
let mut serialized =
|
||||||
extra.serialize(&mut serialized).unwrap();
|
Vec::with_capacity(Extra::fee_weight(outputs.len(), id.is_some(), self.data.as_ref()));
|
||||||
|
extra.write(&mut serialized).unwrap();
|
||||||
serialized
|
serialized
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,8 @@ use std::{
|
||||||
collections::HashMap,
|
collections::HashMap,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use zeroize::Zeroizing;
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng, SeedableRng};
|
use rand_core::{RngCore, CryptoRng, SeedableRng};
|
||||||
use rand_chacha::ChaCha20Rng;
|
use rand_chacha::ChaCha20Rng;
|
||||||
|
|
||||||
|
@ -29,7 +31,9 @@ use crate::{
|
||||||
},
|
},
|
||||||
transaction::{Input, Transaction},
|
transaction::{Input, Transaction},
|
||||||
rpc::Rpc,
|
rpc::Rpc,
|
||||||
wallet::{TransactionError, SignableTransaction, Decoys, key_image_sort, uniqueness},
|
wallet::{
|
||||||
|
TransactionError, InternalPayment, SignableTransaction, Decoys, key_image_sort, uniqueness,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
/// FROST signing machine to produce a signed transaction.
|
/// FROST signing machine to produce a signed transaction.
|
||||||
|
@ -108,8 +112,19 @@ impl SignableTransaction {
|
||||||
transcript.append_message(b"input_shared_key", input.key_offset().to_bytes());
|
transcript.append_message(b"input_shared_key", input.key_offset().to_bytes());
|
||||||
}
|
}
|
||||||
for payment in &self.payments {
|
for payment in &self.payments {
|
||||||
transcript.append_message(b"payment_address", payment.0.to_string().as_bytes());
|
match payment {
|
||||||
transcript.append_message(b"payment_amount", payment.1.to_le_bytes());
|
InternalPayment::Payment(payment) => {
|
||||||
|
transcript.append_message(b"payment_address", payment.0.to_string().as_bytes());
|
||||||
|
transcript.append_message(b"payment_amount", payment.1.to_le_bytes());
|
||||||
|
}
|
||||||
|
InternalPayment::Change(change, amount) => {
|
||||||
|
transcript.append_message(b"change_address", change.address.to_string().as_bytes());
|
||||||
|
if let Some(view) = change.view.as_ref() {
|
||||||
|
transcript.append_message(b"change_view_key", Zeroizing::new(view.to_bytes()));
|
||||||
|
}
|
||||||
|
transcript.append_message(b"change_amount", amount.to_le_bytes());
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut key_images = vec![];
|
let mut key_images = vec![];
|
||||||
|
@ -123,7 +138,7 @@ impl SignableTransaction {
|
||||||
let clsag = ClsagMultisig::new(transcript.clone(), input.key(), inputs[i].clone());
|
let clsag = ClsagMultisig::new(transcript.clone(), input.key(), inputs[i].clone());
|
||||||
key_images.push((
|
key_images.push((
|
||||||
clsag.H,
|
clsag.H,
|
||||||
keys.current_offset().unwrap_or(dfg::Scalar::zero()).0 + self.inputs[i].key_offset(),
|
keys.current_offset().unwrap_or_else(dfg::Scalar::zero).0 + self.inputs[i].key_offset(),
|
||||||
));
|
));
|
||||||
clsags.push(AlgorithmMachine::new(clsag, offset).map_err(TransactionError::FrostError)?);
|
clsags.push(AlgorithmMachine::new(clsag, offset).map_err(TransactionError::FrostError)?);
|
||||||
}
|
}
|
||||||
|
@ -248,7 +263,7 @@ impl SignMachine<Transaction> for TransactionSignMachine {
|
||||||
// Find out who's included
|
// Find out who's included
|
||||||
// This may not be a valid set of signers yet the algorithm machine will error if it's not
|
// This may not be a valid set of signers yet the algorithm machine will error if it's not
|
||||||
commitments.remove(&self.i); // Remove, if it was included for some reason
|
commitments.remove(&self.i); // Remove, if it was included for some reason
|
||||||
let mut included = commitments.keys().into_iter().cloned().collect::<Vec<_>>();
|
let mut included = commitments.keys().cloned().collect::<Vec<_>>();
|
||||||
included.push(self.i);
|
included.push(self.i);
|
||||||
included.sort_unstable();
|
included.sort_unstable();
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,15 @@
|
||||||
use monero_serai::{rpc::Rpc, wallet::TransactionError, transaction::Transaction};
|
use monero_serai::{
|
||||||
|
wallet::{TransactionError, extra::MAX_ARBITRARY_DATA_SIZE},
|
||||||
|
transaction::Transaction,
|
||||||
|
};
|
||||||
|
|
||||||
mod runner;
|
mod runner;
|
||||||
|
|
||||||
test!(
|
test!(
|
||||||
add_single_data_less_than_255,
|
add_single_data_less_than_max,
|
||||||
(
|
(
|
||||||
|_, mut builder: Builder, addr| async move {
|
|_, mut builder: Builder, addr| async move {
|
||||||
let arbitrary_data = vec![b'\0', 254];
|
let arbitrary_data = vec![b'\0'; MAX_ARBITRARY_DATA_SIZE - 1];
|
||||||
|
|
||||||
// make sure we can add to tx
|
// make sure we can add to tx
|
||||||
let result = builder.add_data(arbitrary_data.clone());
|
let result = builder.add_data(arbitrary_data.clone());
|
||||||
|
@ -15,8 +18,7 @@ test!(
|
||||||
builder.add_payment(addr, 5);
|
builder.add_payment(addr, 5);
|
||||||
(builder.build().unwrap(), (arbitrary_data,))
|
(builder.build().unwrap(), (arbitrary_data,))
|
||||||
},
|
},
|
||||||
|rpc: Rpc, signed: Transaction, mut scanner: Scanner, data: (Vec<u8>,)| async move {
|
|_, tx: Transaction, mut scanner: Scanner, data: (Vec<u8>,)| async move {
|
||||||
let tx = rpc.get_transaction(signed.hash()).await.unwrap();
|
|
||||||
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
|
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||||
assert_eq!(output.commitment().amount, 5);
|
assert_eq!(output.commitment().amount, 5);
|
||||||
assert_eq!(output.arbitrary_data()[0], data.0);
|
assert_eq!(output.arbitrary_data()[0], data.0);
|
||||||
|
@ -25,10 +27,10 @@ test!(
|
||||||
);
|
);
|
||||||
|
|
||||||
test!(
|
test!(
|
||||||
add_multiple_data_less_than_255,
|
add_multiple_data_less_than_max,
|
||||||
(
|
(
|
||||||
|_, mut builder: Builder, addr| async move {
|
|_, mut builder: Builder, addr| async move {
|
||||||
let data = vec![b'\0', 254];
|
let data = vec![b'\0'; MAX_ARBITRARY_DATA_SIZE - 1];
|
||||||
|
|
||||||
// Add tx multiple times
|
// Add tx multiple times
|
||||||
for _ in 0 .. 5 {
|
for _ in 0 .. 5 {
|
||||||
|
@ -39,8 +41,7 @@ test!(
|
||||||
builder.add_payment(addr, 5);
|
builder.add_payment(addr, 5);
|
||||||
(builder.build().unwrap(), data)
|
(builder.build().unwrap(), data)
|
||||||
},
|
},
|
||||||
|rpc: Rpc, signed: Transaction, mut scanner: Scanner, data: Vec<u8>| async move {
|
|_, tx: Transaction, mut scanner: Scanner, data: Vec<u8>| async move {
|
||||||
let tx = rpc.get_transaction(signed.hash()).await.unwrap();
|
|
||||||
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
|
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||||
assert_eq!(output.commitment().amount, 5);
|
assert_eq!(output.commitment().amount, 5);
|
||||||
assert_eq!(output.arbitrary_data(), vec![data; 5]);
|
assert_eq!(output.arbitrary_data(), vec![data; 5]);
|
||||||
|
@ -49,24 +50,24 @@ test!(
|
||||||
);
|
);
|
||||||
|
|
||||||
test!(
|
test!(
|
||||||
add_single_data_more_than_255,
|
add_single_data_more_than_max,
|
||||||
(
|
(
|
||||||
|_, mut builder: Builder, addr| async move {
|
|_, mut builder: Builder, addr| async move {
|
||||||
// Make a data that is bigger than 255 bytes
|
// Make a data that is bigger than the maximum
|
||||||
let mut data = vec![b'a'; 256];
|
let mut data = vec![b'a'; MAX_ARBITRARY_DATA_SIZE + 1];
|
||||||
|
|
||||||
// Make sure we get an error if we try to add it to the TX
|
// Make sure we get an error if we try to add it to the TX
|
||||||
assert_eq!(builder.add_data(data.clone()), Err(TransactionError::TooMuchData));
|
assert_eq!(builder.add_data(data.clone()), Err(TransactionError::TooMuchData));
|
||||||
|
|
||||||
// Reduce data size and retry. The data will now be 255 bytes long, exactly
|
// Reduce data size and retry. The data will now be 255 bytes long (including the added
|
||||||
|
// marker), exactly
|
||||||
data.pop();
|
data.pop();
|
||||||
assert!(builder.add_data(data.clone()).is_ok());
|
assert!(builder.add_data(data.clone()).is_ok());
|
||||||
|
|
||||||
builder.add_payment(addr, 5);
|
builder.add_payment(addr, 5);
|
||||||
(builder.build().unwrap(), data)
|
(builder.build().unwrap(), data)
|
||||||
},
|
},
|
||||||
|rpc: Rpc, signed: Transaction, mut scanner: Scanner, data: Vec<u8>| async move {
|
|_, tx: Transaction, mut scanner: Scanner, data: Vec<u8>| async move {
|
||||||
let tx = rpc.get_transaction(signed.hash()).await.unwrap();
|
|
||||||
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
|
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||||
assert_eq!(output.commitment().amount, 5);
|
assert_eq!(output.commitment().amount, 5);
|
||||||
assert_eq!(output.arbitrary_data(), vec![data]);
|
assert_eq!(output.arbitrary_data(), vec![data]);
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use core::ops::Deref;
|
use core::ops::Deref;
|
||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
|
|
||||||
|
@ -10,10 +11,11 @@ use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
|
||||||
use tokio::sync::Mutex;
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
use monero_serai::{
|
use monero_serai::{
|
||||||
Protocol, random_scalar,
|
random_scalar,
|
||||||
wallet::{
|
wallet::{
|
||||||
ViewPair,
|
ViewPair, Scanner,
|
||||||
address::{Network, AddressType, AddressMeta, MoneroAddress},
|
address::{Network, AddressType, AddressSpec, AddressMeta, MoneroAddress},
|
||||||
|
SpendableOutput,
|
||||||
},
|
},
|
||||||
rpc::Rpc,
|
rpc::Rpc,
|
||||||
};
|
};
|
||||||
|
@ -41,7 +43,7 @@ pub async fn mine_until_unlocked(rpc: &Rpc, addr: &str, tx_hash: [u8; 32]) {
|
||||||
let mut height = rpc.get_height().await.unwrap();
|
let mut height = rpc.get_height().await.unwrap();
|
||||||
let mut found = false;
|
let mut found = false;
|
||||||
while !found {
|
while !found {
|
||||||
let block = rpc.get_block(height - 1).await.unwrap();
|
let block = rpc.get_block_by_number(height - 1).await.unwrap();
|
||||||
found = match block.txs.iter().find(|&&x| x == tx_hash) {
|
found = match block.txs.iter().find(|&&x| x == tx_hash) {
|
||||||
Some(_) => true,
|
Some(_) => true,
|
||||||
None => {
|
None => {
|
||||||
|
@ -56,6 +58,22 @@ pub async fn mine_until_unlocked(rpc: &Rpc, addr: &str, tx_hash: [u8; 32]) {
|
||||||
rpc.generate_blocks(addr, 9).await.unwrap();
|
rpc.generate_blocks(addr, 9).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Mines 60 blocks and returns an unlocked miner TX output.
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub async fn get_miner_tx_output(rpc: &Rpc, view: &ViewPair) -> SpendableOutput {
|
||||||
|
let mut scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||||
|
|
||||||
|
// Mine 60 blocks to unlock a miner TX
|
||||||
|
let start = rpc.get_height().await.unwrap();
|
||||||
|
rpc
|
||||||
|
.generate_blocks(&view.address(Network::Mainnet, AddressSpec::Standard).to_string(), 60)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let block = rpc.get_block_by_number(start).await.unwrap();
|
||||||
|
scanner.scan(rpc, &block).await.unwrap().swap_remove(0).ignore_timelock().swap_remove(0)
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn rpc() -> Rpc {
|
pub async fn rpc() -> Rpc {
|
||||||
let rpc = Rpc::new("http://127.0.0.1:18081".to_string()).unwrap();
|
let rpc = Rpc::new("http://127.0.0.1:18081".to_string()).unwrap();
|
||||||
|
|
||||||
|
@ -73,7 +91,9 @@ pub async fn rpc() -> Rpc {
|
||||||
|
|
||||||
// Mine 40 blocks to ensure decoy availability
|
// Mine 40 blocks to ensure decoy availability
|
||||||
rpc.generate_blocks(&addr, 40).await.unwrap();
|
rpc.generate_blocks(&addr, 40).await.unwrap();
|
||||||
assert!(!matches!(rpc.get_protocol().await.unwrap(), Protocol::Unsupported(_)));
|
|
||||||
|
// Make sure we recognize the protocol
|
||||||
|
rpc.get_protocol().await.unwrap();
|
||||||
|
|
||||||
rpc
|
rpc
|
||||||
}
|
}
|
||||||
|
@ -138,12 +158,12 @@ macro_rules! test {
|
||||||
use monero_serai::{
|
use monero_serai::{
|
||||||
random_scalar,
|
random_scalar,
|
||||||
wallet::{
|
wallet::{
|
||||||
address::Network, ViewPair, Scanner, SignableTransaction,
|
address::{Network, AddressSpec}, ViewPair, Scanner, Change, SignableTransaction,
|
||||||
SignableTransactionBuilder,
|
SignableTransactionBuilder,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
use runner::{random_address, rpc, mine_until_unlocked};
|
use runner::{random_address, rpc, mine_until_unlocked, get_miner_tx_output};
|
||||||
|
|
||||||
type Builder = SignableTransactionBuilder;
|
type Builder = SignableTransactionBuilder;
|
||||||
|
|
||||||
|
@ -169,33 +189,23 @@ macro_rules! test {
|
||||||
keys[&Participant::new(1).unwrap()].group_key().0
|
keys[&Participant::new(1).unwrap()].group_key().0
|
||||||
};
|
};
|
||||||
|
|
||||||
let view = ViewPair::new(spend_pub, Zeroizing::new(random_scalar(&mut OsRng)));
|
|
||||||
|
|
||||||
let rpc = rpc().await;
|
let rpc = rpc().await;
|
||||||
|
|
||||||
let (addr, miner_tx) = {
|
let view = ViewPair::new(spend_pub, Zeroizing::new(random_scalar(&mut OsRng)));
|
||||||
let mut scanner =
|
let addr = view.address(Network::Mainnet, AddressSpec::Standard);
|
||||||
Scanner::from_view(view.clone(), Network::Mainnet, Some(HashSet::new()));
|
|
||||||
let addr = scanner.address();
|
|
||||||
|
|
||||||
// mine 60 blocks to unlock a miner tx
|
let miner_tx = get_miner_tx_output(&rpc, &view).await;
|
||||||
let start = rpc.get_height().await.unwrap();
|
|
||||||
rpc.generate_blocks(&addr.to_string(), 60).await.unwrap();
|
|
||||||
|
|
||||||
let block = rpc.get_block(start).await.unwrap();
|
|
||||||
(
|
|
||||||
addr,
|
|
||||||
scanner.scan(
|
|
||||||
&rpc,
|
|
||||||
&block
|
|
||||||
).await.unwrap().swap_remove(0).ignore_timelock().swap_remove(0)
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
let builder = SignableTransactionBuilder::new(
|
let builder = SignableTransactionBuilder::new(
|
||||||
rpc.get_protocol().await.unwrap(),
|
rpc.get_protocol().await.unwrap(),
|
||||||
rpc.get_fee().await.unwrap(),
|
rpc.get_fee().await.unwrap(),
|
||||||
Some(random_address().2),
|
Some(Change::new(
|
||||||
|
&ViewPair::new(
|
||||||
|
&random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE,
|
||||||
|
Zeroizing::new(random_scalar(&mut OsRng))
|
||||||
|
),
|
||||||
|
false
|
||||||
|
)),
|
||||||
);
|
);
|
||||||
|
|
||||||
let sign = |tx: SignableTransaction| {
|
let sign = |tx: SignableTransaction| {
|
||||||
|
@ -247,7 +257,7 @@ macro_rules! test {
|
||||||
mine_until_unlocked(&rpc, &random_address().2.to_string(), signed.hash()).await;
|
mine_until_unlocked(&rpc, &random_address().2.to_string(), signed.hash()).await;
|
||||||
let tx = rpc.get_transaction(signed.hash()).await.unwrap();
|
let tx = rpc.get_transaction(signed.hash()).await.unwrap();
|
||||||
let scanner =
|
let scanner =
|
||||||
Scanner::from_view(view.clone(), Network::Mainnet, Some(HashSet::new()));
|
Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||||
($first_checks)(rpc.clone(), tx, scanner, state).await
|
($first_checks)(rpc.clone(), tx, scanner, state).await
|
||||||
});
|
});
|
||||||
#[allow(unused_variables, unused_mut, unused_assignments)]
|
#[allow(unused_variables, unused_mut, unused_assignments)]
|
||||||
|
@ -268,7 +278,7 @@ macro_rules! test {
|
||||||
#[allow(unused_assignments)]
|
#[allow(unused_assignments)]
|
||||||
{
|
{
|
||||||
let scanner =
|
let scanner =
|
||||||
Scanner::from_view(view.clone(), Network::Mainnet, Some(HashSet::new()));
|
Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||||
carried_state =
|
carried_state =
|
||||||
Box::new(($checks)(rpc.clone(), tx, scanner, state).await);
|
Box::new(($checks)(rpc.clone(), tx, scanner, state).await);
|
||||||
}
|
}
|
||||||
|
|
300
coins/monero/tests/scan.rs
Normal file
300
coins/monero/tests/scan.rs
Normal file
|
@ -0,0 +1,300 @@
|
||||||
|
use rand::RngCore;
|
||||||
|
|
||||||
|
use monero_serai::{transaction::Transaction, wallet::address::SubaddressIndex};
|
||||||
|
|
||||||
|
mod runner;
|
||||||
|
|
||||||
|
test!(
|
||||||
|
scan_standard_address,
|
||||||
|
(
|
||||||
|
|_, mut builder: Builder, _| async move {
|
||||||
|
let view = runner::random_address().1;
|
||||||
|
let scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||||
|
builder.add_payment(view.address(Network::Mainnet, AddressSpec::Standard), 5);
|
||||||
|
(builder.build().unwrap(), scanner)
|
||||||
|
},
|
||||||
|
|_, tx: Transaction, _, mut state: Scanner| async move {
|
||||||
|
let output = state.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||||
|
assert_eq!(output.commitment().amount, 5);
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
test!(
|
||||||
|
scan_subaddress,
|
||||||
|
(
|
||||||
|
|_, mut builder: Builder, _| async move {
|
||||||
|
let subaddress = SubaddressIndex::new(0, 1).unwrap();
|
||||||
|
|
||||||
|
let view = runner::random_address().1;
|
||||||
|
let mut scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||||
|
scanner.register_subaddress(subaddress);
|
||||||
|
|
||||||
|
builder.add_payment(view.address(Network::Mainnet, AddressSpec::Subaddress(subaddress)), 5);
|
||||||
|
(builder.build().unwrap(), (scanner, subaddress))
|
||||||
|
},
|
||||||
|
|_, tx: Transaction, _, mut state: (Scanner, SubaddressIndex)| async move {
|
||||||
|
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||||
|
assert_eq!(output.commitment().amount, 5);
|
||||||
|
assert_eq!(output.metadata.subaddress, Some(state.1));
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
test!(
|
||||||
|
scan_integrated_address,
|
||||||
|
(
|
||||||
|
|_, mut builder: Builder, _| async move {
|
||||||
|
let view = runner::random_address().1;
|
||||||
|
let scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||||
|
|
||||||
|
let mut payment_id = [0u8; 8];
|
||||||
|
OsRng.fill_bytes(&mut payment_id);
|
||||||
|
|
||||||
|
builder.add_payment(view.address(Network::Mainnet, AddressSpec::Integrated(payment_id)), 5);
|
||||||
|
(builder.build().unwrap(), (scanner, payment_id))
|
||||||
|
},
|
||||||
|
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8])| async move {
|
||||||
|
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||||
|
assert_eq!(output.commitment().amount, 5);
|
||||||
|
assert_eq!(output.metadata.payment_id, state.1);
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
test!(
|
||||||
|
scan_featured_standard,
|
||||||
|
(
|
||||||
|
|_, mut builder: Builder, _| async move {
|
||||||
|
let view = runner::random_address().1;
|
||||||
|
let scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||||
|
builder.add_payment(
|
||||||
|
view.address(
|
||||||
|
Network::Mainnet,
|
||||||
|
AddressSpec::Featured { subaddress: None, payment_id: None, guaranteed: false },
|
||||||
|
),
|
||||||
|
5,
|
||||||
|
);
|
||||||
|
(builder.build().unwrap(), scanner)
|
||||||
|
},
|
||||||
|
|_, tx: Transaction, _, mut state: Scanner| async move {
|
||||||
|
let output = state.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||||
|
assert_eq!(output.commitment().amount, 5);
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
test!(
|
||||||
|
scan_featured_subaddress,
|
||||||
|
(
|
||||||
|
|_, mut builder: Builder, _| async move {
|
||||||
|
let subaddress = SubaddressIndex::new(0, 2).unwrap();
|
||||||
|
|
||||||
|
let view = runner::random_address().1;
|
||||||
|
let mut scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||||
|
scanner.register_subaddress(subaddress);
|
||||||
|
|
||||||
|
builder.add_payment(
|
||||||
|
view.address(
|
||||||
|
Network::Mainnet,
|
||||||
|
AddressSpec::Featured {
|
||||||
|
subaddress: Some(subaddress),
|
||||||
|
payment_id: None,
|
||||||
|
guaranteed: false,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
5,
|
||||||
|
);
|
||||||
|
(builder.build().unwrap(), (scanner, subaddress))
|
||||||
|
},
|
||||||
|
|_, tx: Transaction, _, mut state: (Scanner, SubaddressIndex)| async move {
|
||||||
|
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||||
|
assert_eq!(output.commitment().amount, 5);
|
||||||
|
assert_eq!(output.metadata.subaddress, Some(state.1));
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
test!(
|
||||||
|
scan_featured_integrated,
|
||||||
|
(
|
||||||
|
|_, mut builder: Builder, _| async move {
|
||||||
|
let view = runner::random_address().1;
|
||||||
|
let scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||||
|
let mut payment_id = [0u8; 8];
|
||||||
|
OsRng.fill_bytes(&mut payment_id);
|
||||||
|
|
||||||
|
builder.add_payment(
|
||||||
|
view.address(
|
||||||
|
Network::Mainnet,
|
||||||
|
AddressSpec::Featured {
|
||||||
|
subaddress: None,
|
||||||
|
payment_id: Some(payment_id),
|
||||||
|
guaranteed: false,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
5,
|
||||||
|
);
|
||||||
|
(builder.build().unwrap(), (scanner, payment_id))
|
||||||
|
},
|
||||||
|
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8])| async move {
|
||||||
|
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||||
|
assert_eq!(output.commitment().amount, 5);
|
||||||
|
assert_eq!(output.metadata.payment_id, state.1);
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
test!(
|
||||||
|
scan_featured_integrated_subaddress,
|
||||||
|
(
|
||||||
|
|_, mut builder: Builder, _| async move {
|
||||||
|
let subaddress = SubaddressIndex::new(0, 3).unwrap();
|
||||||
|
|
||||||
|
let view = runner::random_address().1;
|
||||||
|
let mut scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||||
|
scanner.register_subaddress(subaddress);
|
||||||
|
|
||||||
|
let mut payment_id = [0u8; 8];
|
||||||
|
OsRng.fill_bytes(&mut payment_id);
|
||||||
|
|
||||||
|
builder.add_payment(
|
||||||
|
view.address(
|
||||||
|
Network::Mainnet,
|
||||||
|
AddressSpec::Featured {
|
||||||
|
subaddress: Some(subaddress),
|
||||||
|
payment_id: Some(payment_id),
|
||||||
|
guaranteed: false,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
5,
|
||||||
|
);
|
||||||
|
(builder.build().unwrap(), (scanner, payment_id, subaddress))
|
||||||
|
},
|
||||||
|
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8], SubaddressIndex)| async move {
|
||||||
|
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||||
|
assert_eq!(output.commitment().amount, 5);
|
||||||
|
assert_eq!(output.metadata.payment_id, state.1);
|
||||||
|
assert_eq!(output.metadata.subaddress, Some(state.2));
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
test!(
|
||||||
|
scan_guaranteed_standard,
|
||||||
|
(
|
||||||
|
|_, mut builder: Builder, _| async move {
|
||||||
|
let view = runner::random_address().1;
|
||||||
|
let scanner = Scanner::from_view(view.clone(), None);
|
||||||
|
|
||||||
|
builder.add_payment(
|
||||||
|
view.address(
|
||||||
|
Network::Mainnet,
|
||||||
|
AddressSpec::Featured { subaddress: None, payment_id: None, guaranteed: true },
|
||||||
|
),
|
||||||
|
5,
|
||||||
|
);
|
||||||
|
(builder.build().unwrap(), scanner)
|
||||||
|
},
|
||||||
|
|_, tx: Transaction, _, mut state: Scanner| async move {
|
||||||
|
let output = state.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||||
|
assert_eq!(output.commitment().amount, 5);
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
test!(
|
||||||
|
scan_guaranteed_subaddress,
|
||||||
|
(
|
||||||
|
|_, mut builder: Builder, _| async move {
|
||||||
|
let subaddress = SubaddressIndex::new(1, 0).unwrap();
|
||||||
|
|
||||||
|
let view = runner::random_address().1;
|
||||||
|
let mut scanner = Scanner::from_view(view.clone(), None);
|
||||||
|
scanner.register_subaddress(subaddress);
|
||||||
|
|
||||||
|
builder.add_payment(
|
||||||
|
view.address(
|
||||||
|
Network::Mainnet,
|
||||||
|
AddressSpec::Featured {
|
||||||
|
subaddress: Some(subaddress),
|
||||||
|
payment_id: None,
|
||||||
|
guaranteed: true,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
5,
|
||||||
|
);
|
||||||
|
(builder.build().unwrap(), (scanner, subaddress))
|
||||||
|
},
|
||||||
|
|_, tx: Transaction, _, mut state: (Scanner, SubaddressIndex)| async move {
|
||||||
|
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||||
|
assert_eq!(output.commitment().amount, 5);
|
||||||
|
assert_eq!(output.metadata.subaddress, Some(state.1));
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
test!(
|
||||||
|
scan_guaranteed_integrated,
|
||||||
|
(
|
||||||
|
|_, mut builder: Builder, _| async move {
|
||||||
|
let view = runner::random_address().1;
|
||||||
|
let scanner = Scanner::from_view(view.clone(), None);
|
||||||
|
let mut payment_id = [0u8; 8];
|
||||||
|
OsRng.fill_bytes(&mut payment_id);
|
||||||
|
|
||||||
|
builder.add_payment(
|
||||||
|
view.address(
|
||||||
|
Network::Mainnet,
|
||||||
|
AddressSpec::Featured {
|
||||||
|
subaddress: None,
|
||||||
|
payment_id: Some(payment_id),
|
||||||
|
guaranteed: true,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
5,
|
||||||
|
);
|
||||||
|
(builder.build().unwrap(), (scanner, payment_id))
|
||||||
|
},
|
||||||
|
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8])| async move {
|
||||||
|
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||||
|
assert_eq!(output.commitment().amount, 5);
|
||||||
|
assert_eq!(output.metadata.payment_id, state.1);
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
test!(
|
||||||
|
scan_guaranteed_integrated_subaddress,
|
||||||
|
(
|
||||||
|
|_, mut builder: Builder, _| async move {
|
||||||
|
let subaddress = SubaddressIndex::new(1, 1).unwrap();
|
||||||
|
|
||||||
|
let view = runner::random_address().1;
|
||||||
|
let mut scanner = Scanner::from_view(view.clone(), None);
|
||||||
|
scanner.register_subaddress(subaddress);
|
||||||
|
|
||||||
|
let mut payment_id = [0u8; 8];
|
||||||
|
OsRng.fill_bytes(&mut payment_id);
|
||||||
|
|
||||||
|
builder.add_payment(
|
||||||
|
view.address(
|
||||||
|
Network::Mainnet,
|
||||||
|
AddressSpec::Featured {
|
||||||
|
subaddress: Some(subaddress),
|
||||||
|
payment_id: Some(payment_id),
|
||||||
|
guaranteed: true,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
5,
|
||||||
|
);
|
||||||
|
(builder.build().unwrap(), (scanner, payment_id, subaddress))
|
||||||
|
},
|
||||||
|
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8], SubaddressIndex)| async move {
|
||||||
|
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||||
|
assert_eq!(output.commitment().amount, 5);
|
||||||
|
assert_eq!(output.metadata.payment_id, state.1);
|
||||||
|
assert_eq!(output.metadata.subaddress, Some(state.2));
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
|
@ -1,6 +1,7 @@
|
||||||
use monero_serai::{
|
use monero_serai::{
|
||||||
wallet::{ReceivedOutput, SpendableOutput},
|
wallet::{extra::Extra, address::SubaddressIndex, ReceivedOutput, SpendableOutput},
|
||||||
transaction::Transaction,
|
transaction::Transaction,
|
||||||
|
rpc::Rpc,
|
||||||
};
|
};
|
||||||
|
|
||||||
mod runner;
|
mod runner;
|
||||||
|
@ -49,3 +50,69 @@ test!(
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
test!(
|
||||||
|
// Ideally, this would be single_R, yet it isn't feasible to apply allow(non_snake_case) here
|
||||||
|
single_r_subaddress_send,
|
||||||
|
(
|
||||||
|
// Consume this builder for an output we can use in the future
|
||||||
|
// This is needed because we can't get the input from the passed in builder
|
||||||
|
|_, mut builder: Builder, addr| async move {
|
||||||
|
builder.add_payment(addr, 1000000000000);
|
||||||
|
(builder.build().unwrap(), ())
|
||||||
|
},
|
||||||
|
|_, tx: Transaction, mut scanner: Scanner, _| async move {
|
||||||
|
let mut outputs = scanner.scan_transaction(&tx).not_locked();
|
||||||
|
outputs.sort_by(|x, y| x.commitment().amount.cmp(&y.commitment().amount));
|
||||||
|
assert_eq!(outputs[0].commitment().amount, 1000000000000);
|
||||||
|
outputs
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
|rpc: Rpc, _, _, mut outputs: Vec<ReceivedOutput>| async move {
|
||||||
|
let change_view = ViewPair::new(
|
||||||
|
&random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE,
|
||||||
|
Zeroizing::new(random_scalar(&mut OsRng)),
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut builder = SignableTransactionBuilder::new(
|
||||||
|
rpc.get_protocol().await.unwrap(),
|
||||||
|
rpc.get_fee().await.unwrap(),
|
||||||
|
Some(Change::new(&change_view, false)),
|
||||||
|
);
|
||||||
|
builder.add_input(SpendableOutput::from(&rpc, outputs.swap_remove(0)).await.unwrap());
|
||||||
|
|
||||||
|
// Send to a subaddress
|
||||||
|
let sub_view = ViewPair::new(
|
||||||
|
&random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE,
|
||||||
|
Zeroizing::new(random_scalar(&mut OsRng)),
|
||||||
|
);
|
||||||
|
builder.add_payment(
|
||||||
|
sub_view
|
||||||
|
.address(Network::Mainnet, AddressSpec::Subaddress(SubaddressIndex::new(0, 1).unwrap())),
|
||||||
|
1,
|
||||||
|
);
|
||||||
|
(builder.build().unwrap(), (change_view, sub_view))
|
||||||
|
},
|
||||||
|
|_, tx: Transaction, _, views: (ViewPair, ViewPair)| async move {
|
||||||
|
// Make sure the change can pick up its output
|
||||||
|
let mut change_scanner = Scanner::from_view(views.0, Some(HashSet::new()));
|
||||||
|
assert!(change_scanner.scan_transaction(&tx).not_locked().len() == 1);
|
||||||
|
|
||||||
|
// Make sure the subaddress can pick up its output
|
||||||
|
let mut sub_scanner = Scanner::from_view(views.1, Some(HashSet::new()));
|
||||||
|
sub_scanner.register_subaddress(SubaddressIndex::new(0, 1).unwrap());
|
||||||
|
let sub_outputs = sub_scanner.scan_transaction(&tx).not_locked();
|
||||||
|
assert!(sub_outputs.len() == 1);
|
||||||
|
assert_eq!(sub_outputs[0].commitment().amount, 1);
|
||||||
|
|
||||||
|
// Make sure only one R was included in TX extra
|
||||||
|
assert!(Extra::read::<&[u8]>(&mut tx.prefix.extra.as_ref())
|
||||||
|
.unwrap()
|
||||||
|
.keys()
|
||||||
|
.unwrap()
|
||||||
|
.1
|
||||||
|
.is_none());
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
245
coins/monero/tests/wallet2_compatibility.rs
Normal file
245
coins/monero/tests/wallet2_compatibility.rs
Normal file
|
@ -0,0 +1,245 @@
|
||||||
|
use std::{
|
||||||
|
collections::{HashSet, HashMap},
|
||||||
|
str::FromStr,
|
||||||
|
};
|
||||||
|
|
||||||
|
use rand_core::{OsRng, RngCore};
|
||||||
|
|
||||||
|
use serde::Deserialize;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
use monero_rpc::{
|
||||||
|
monero::{
|
||||||
|
Amount, Address,
|
||||||
|
cryptonote::{hash::Hash, subaddress::Index},
|
||||||
|
util::address::PaymentId,
|
||||||
|
},
|
||||||
|
TransferOptions, WalletClient,
|
||||||
|
};
|
||||||
|
|
||||||
|
use monero_serai::{
|
||||||
|
transaction::Transaction,
|
||||||
|
wallet::{
|
||||||
|
address::{Network, AddressSpec, SubaddressIndex, MoneroAddress},
|
||||||
|
extra::{MAX_TX_EXTRA_NONCE_SIZE, Extra},
|
||||||
|
Scanner,
|
||||||
|
},
|
||||||
|
rpc::Rpc,
|
||||||
|
};
|
||||||
|
|
||||||
|
mod runner;
|
||||||
|
|
||||||
|
async fn make_integrated_address(payment_id: [u8; 8]) -> String {
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct IntegratedAddressResponse {
|
||||||
|
integrated_address: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
let rpc = Rpc::new("http://127.0.0.1:6061".to_string()).unwrap();
|
||||||
|
let res = rpc
|
||||||
|
.json_rpc_call::<IntegratedAddressResponse>(
|
||||||
|
"make_integrated_address",
|
||||||
|
Some(json!({ "payment_id": hex::encode(payment_id) })),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
res.integrated_address
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn initialize_rpcs() -> (WalletClient, Rpc, monero_rpc::monero::Address) {
|
||||||
|
let wallet_rpc =
|
||||||
|
monero_rpc::RpcClientBuilder::new().build("http://127.0.0.1:6061").unwrap().wallet();
|
||||||
|
let daemon_rpc = runner::rpc().await;
|
||||||
|
|
||||||
|
let address_resp = wallet_rpc.get_address(0, None).await;
|
||||||
|
let wallet_rpc_addr = if address_resp.is_ok() {
|
||||||
|
address_resp.unwrap().address
|
||||||
|
} else {
|
||||||
|
wallet_rpc.create_wallet("wallet".to_string(), None, "English".to_string()).await.unwrap();
|
||||||
|
let addr = wallet_rpc.get_address(0, None).await.unwrap().address;
|
||||||
|
daemon_rpc.generate_blocks(&addr.to_string(), 70).await.unwrap();
|
||||||
|
addr
|
||||||
|
};
|
||||||
|
(wallet_rpc, daemon_rpc, wallet_rpc_addr)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn from_wallet_rpc_to_self(spec: AddressSpec) {
|
||||||
|
// initialize rpc
|
||||||
|
let (wallet_rpc, daemon_rpc, wallet_rpc_addr) = initialize_rpcs().await;
|
||||||
|
|
||||||
|
// make an addr
|
||||||
|
let (_, view_pair, _) = runner::random_address();
|
||||||
|
let addr = Address::from_str(&view_pair.address(Network::Mainnet, spec).to_string()[..]).unwrap();
|
||||||
|
|
||||||
|
// refresh & make a tx
|
||||||
|
wallet_rpc.refresh(None).await.unwrap();
|
||||||
|
let tx = wallet_rpc
|
||||||
|
.transfer(
|
||||||
|
HashMap::from([(addr, Amount::ONE_XMR)]),
|
||||||
|
monero_rpc::TransferPriority::Default,
|
||||||
|
TransferOptions::default(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let tx_hash: [u8; 32] = tx.tx_hash.0.try_into().unwrap();
|
||||||
|
|
||||||
|
// unlock it
|
||||||
|
runner::mine_until_unlocked(&daemon_rpc, &wallet_rpc_addr.to_string(), tx_hash).await;
|
||||||
|
|
||||||
|
// create the scanner
|
||||||
|
let mut scanner = Scanner::from_view(view_pair, Some(HashSet::new()));
|
||||||
|
if let AddressSpec::Subaddress(index) = spec {
|
||||||
|
scanner.register_subaddress(index);
|
||||||
|
}
|
||||||
|
|
||||||
|
// retrieve it and confirm
|
||||||
|
let tx = daemon_rpc.get_transaction(tx_hash).await.unwrap();
|
||||||
|
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||||
|
|
||||||
|
match spec {
|
||||||
|
AddressSpec::Subaddress(index) => assert_eq!(output.metadata.subaddress, Some(index)),
|
||||||
|
AddressSpec::Integrated(payment_id) => {
|
||||||
|
assert_eq!(output.metadata.payment_id, payment_id);
|
||||||
|
assert_eq!(output.metadata.subaddress, None);
|
||||||
|
}
|
||||||
|
_ => assert_eq!(output.metadata.subaddress, None),
|
||||||
|
}
|
||||||
|
assert_eq!(output.commitment().amount, 1000000000000);
|
||||||
|
}
|
||||||
|
|
||||||
|
async_sequential!(
|
||||||
|
async fn receipt_of_wallet_rpc_tx_standard() {
|
||||||
|
from_wallet_rpc_to_self(AddressSpec::Standard).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn receipt_of_wallet_rpc_tx_subaddress() {
|
||||||
|
from_wallet_rpc_to_self(AddressSpec::Subaddress(SubaddressIndex::new(0, 1).unwrap())).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn receipt_of_wallet_rpc_tx_integrated() {
|
||||||
|
let mut payment_id = [0u8; 8];
|
||||||
|
OsRng.fill_bytes(&mut payment_id);
|
||||||
|
from_wallet_rpc_to_self(AddressSpec::Integrated(payment_id)).await;
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
test!(
|
||||||
|
send_to_wallet_rpc_standard,
|
||||||
|
(
|
||||||
|
|_, mut builder: Builder, _| async move {
|
||||||
|
// initialize rpc
|
||||||
|
let (wallet_rpc, _, wallet_rpc_addr) = initialize_rpcs().await;
|
||||||
|
|
||||||
|
// add destination
|
||||||
|
builder.add_payment(
|
||||||
|
MoneroAddress::from_str(Network::Mainnet, &wallet_rpc_addr.to_string()).unwrap(),
|
||||||
|
1000000,
|
||||||
|
);
|
||||||
|
(builder.build().unwrap(), (wallet_rpc,))
|
||||||
|
},
|
||||||
|
|_, tx: Transaction, _, data: (WalletClient,)| async move {
|
||||||
|
// confirm receipt
|
||||||
|
data.0.refresh(None).await.unwrap();
|
||||||
|
let transfer =
|
||||||
|
data.0.get_transfer(Hash::from_slice(&tx.hash()), None).await.unwrap().unwrap();
|
||||||
|
assert_eq!(transfer.amount.as_pico(), 1000000);
|
||||||
|
assert_eq!(transfer.subaddr_index, Index { major: 0, minor: 0 });
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
test!(
|
||||||
|
send_to_wallet_rpc_subaddress,
|
||||||
|
(
|
||||||
|
|_, mut builder: Builder, _| async move {
|
||||||
|
// initialize rpc
|
||||||
|
let (wallet_rpc, _, _) = initialize_rpcs().await;
|
||||||
|
|
||||||
|
// make the addr
|
||||||
|
let (subaddress, index) = wallet_rpc.create_address(0, None).await.unwrap();
|
||||||
|
|
||||||
|
builder.add_payment(
|
||||||
|
MoneroAddress::from_str(Network::Mainnet, &subaddress.to_string()).unwrap(),
|
||||||
|
1000000,
|
||||||
|
);
|
||||||
|
(builder.build().unwrap(), (wallet_rpc, index))
|
||||||
|
},
|
||||||
|
|_, tx: Transaction, _, data: (WalletClient, u32)| async move {
|
||||||
|
// confirm receipt
|
||||||
|
data.0.refresh(None).await.unwrap();
|
||||||
|
let transfer =
|
||||||
|
data.0.get_transfer(Hash::from_slice(&tx.hash()), None).await.unwrap().unwrap();
|
||||||
|
assert_eq!(transfer.amount.as_pico(), 1000000);
|
||||||
|
assert_eq!(transfer.subaddr_index, Index { major: 0, minor: data.1 });
|
||||||
|
|
||||||
|
// Make sure only one R was included in TX extra
|
||||||
|
assert!(Extra::read::<&[u8]>(&mut tx.prefix.extra.as_ref())
|
||||||
|
.unwrap()
|
||||||
|
.keys()
|
||||||
|
.unwrap()
|
||||||
|
.1
|
||||||
|
.is_none());
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
test!(
|
||||||
|
send_to_wallet_rpc_integrated,
|
||||||
|
(
|
||||||
|
|_, mut builder: Builder, _| async move {
|
||||||
|
// initialize rpc
|
||||||
|
let (wallet_rpc, _, _) = initialize_rpcs().await;
|
||||||
|
|
||||||
|
// make the addr
|
||||||
|
let mut payment_id = [0u8; 8];
|
||||||
|
OsRng.fill_bytes(&mut payment_id);
|
||||||
|
let addr = make_integrated_address(payment_id).await;
|
||||||
|
|
||||||
|
builder.add_payment(MoneroAddress::from_str(Network::Mainnet, &addr).unwrap(), 1000000);
|
||||||
|
(builder.build().unwrap(), (wallet_rpc, payment_id))
|
||||||
|
},
|
||||||
|
|_, tx: Transaction, _, data: (WalletClient, [u8; 8])| async move {
|
||||||
|
// confirm receipt
|
||||||
|
data.0.refresh(None).await.unwrap();
|
||||||
|
let transfer =
|
||||||
|
data.0.get_transfer(Hash::from_slice(&tx.hash()), None).await.unwrap().unwrap();
|
||||||
|
assert_eq!(transfer.amount.as_pico(), 1000000);
|
||||||
|
assert_eq!(transfer.subaddr_index, Index { major: 0, minor: 0 });
|
||||||
|
assert_eq!(transfer.payment_id.0, PaymentId::from_slice(&data.1));
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
test!(
|
||||||
|
send_to_wallet_rpc_with_arb_data,
|
||||||
|
(
|
||||||
|
|_, mut builder: Builder, _| async move {
|
||||||
|
// initialize rpc
|
||||||
|
let (wallet_rpc, _, wallet_rpc_addr) = initialize_rpcs().await;
|
||||||
|
|
||||||
|
// add destination
|
||||||
|
builder.add_payment(
|
||||||
|
MoneroAddress::from_str(Network::Mainnet, &wallet_rpc_addr.to_string()).unwrap(),
|
||||||
|
1000000,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Make 2 data that is the full 255 bytes
|
||||||
|
for _ in 0 .. 2 {
|
||||||
|
// Subtract 1 since we prefix data with 127
|
||||||
|
let data = vec![b'a'; MAX_TX_EXTRA_NONCE_SIZE - 1];
|
||||||
|
assert!(builder.add_data(data).is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
(builder.build().unwrap(), (wallet_rpc,))
|
||||||
|
},
|
||||||
|
|_, tx: Transaction, _, data: (WalletClient,)| async move {
|
||||||
|
// confirm receipt
|
||||||
|
data.0.refresh(None).await.unwrap();
|
||||||
|
let transfer =
|
||||||
|
data.0.get_transfer(Hash::from_slice(&tx.hash()), None).await.unwrap().unwrap();
|
||||||
|
assert_eq!(transfer.amount.as_pico(), 1000000);
|
||||||
|
assert_eq!(transfer.subaddr_index, Index { major: 0, minor: 0 });
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
|
@ -1,6 +1,6 @@
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2022 Luke Parker
|
Copyright (c) 2022-2023 Luke Parker
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
|
@ -1,24 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "serai-extension"
|
|
||||||
version = "0.1.0"
|
|
||||||
description = "An ink! extension for exposing Serai to ink"
|
|
||||||
license = "AGPL-3.0-only"
|
|
||||||
repository = "https://github.com/serai-dex/serai/tree/develop/contracts/extension"
|
|
||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
|
||||||
edition = "2021"
|
|
||||||
publish = false
|
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
|
||||||
all-features = true
|
|
||||||
rustdoc-args = ["--cfg", "docsrs"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] }
|
|
||||||
|
|
||||||
ink_env = { version = "3", default-features = false }
|
|
||||||
ink_lang = { version = "3", default-features = false }
|
|
||||||
|
|
||||||
[features]
|
|
||||||
default = ["std"]
|
|
||||||
std = ["ink_env/std"]
|
|
||||||
ink-as-dependency = []
|
|
|
@ -1,122 +0,0 @@
|
||||||
#![cfg_attr(not(feature = "std"), no_std)]
|
|
||||||
|
|
||||||
use ink_lang as ink;
|
|
||||||
use ink_env::{Environment, DefaultEnvironment, AccountId};
|
|
||||||
|
|
||||||
pub type Curve = u16;
|
|
||||||
pub type Coin = u32;
|
|
||||||
pub type GlobalValidatorSetId = u32;
|
|
||||||
pub type ValidatorSetIndex = u8;
|
|
||||||
pub type Key = Vec<u8>;
|
|
||||||
|
|
||||||
#[ink::chain_extension]
|
|
||||||
pub trait SeraiExtension {
|
|
||||||
type ErrorCode = ();
|
|
||||||
|
|
||||||
/// Returns the ID for the current global validator set.
|
|
||||||
#[ink(extension = 0, handle_status = false, returns_result = false)]
|
|
||||||
fn global_validator_set_id() -> GlobalValidatorSetId;
|
|
||||||
|
|
||||||
/// Returns the amount of active validator sets within the global validator set.
|
|
||||||
#[ink(extension = 1, handle_status = false, returns_result = false)]
|
|
||||||
fn validator_sets() -> u8;
|
|
||||||
|
|
||||||
/// Returns the amount of key shares used within the specified validator set.
|
|
||||||
#[ink(extension = 2, handle_status = false, returns_result = false)]
|
|
||||||
fn validator_set_shares(set: ValidatorSetIndex) -> u16;
|
|
||||||
|
|
||||||
/// Returns the validator set the specified account is in, along with their amount of shares in
|
|
||||||
/// that validator set, if they are in a current validator
|
|
||||||
#[ink(extension = 3, handle_status = false, returns_result = false)]
|
|
||||||
fn active_validator(account: &AccountId) -> Option<(ValidatorSetIndex, u16)>;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct SeraiEnvironment;
|
|
||||||
impl Environment for SeraiEnvironment {
|
|
||||||
const MAX_EVENT_TOPICS: usize = <DefaultEnvironment as Environment>::MAX_EVENT_TOPICS;
|
|
||||||
|
|
||||||
type AccountId = <DefaultEnvironment as Environment>::AccountId;
|
|
||||||
type Balance = <DefaultEnvironment as Environment>::Balance;
|
|
||||||
type Hash = <DefaultEnvironment as Environment>::Hash;
|
|
||||||
type BlockNumber = <DefaultEnvironment as Environment>::BlockNumber;
|
|
||||||
type Timestamp = <DefaultEnvironment as Environment>::Timestamp;
|
|
||||||
|
|
||||||
type ChainExtension = SeraiExtension;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn test_validators() -> Vec<AccountId> {
|
|
||||||
vec![
|
|
||||||
AccountId::from([1; 32]),
|
|
||||||
AccountId::from([2; 32]),
|
|
||||||
AccountId::from([3; 32]),
|
|
||||||
AccountId::from([4; 32]),
|
|
||||||
AccountId::from([5; 32]),
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn test_register() {
|
|
||||||
struct ExtensionId;
|
|
||||||
impl ink_env::test::ChainExtension for ExtensionId {
|
|
||||||
fn func_id(&self) -> u32 {
|
|
||||||
0
|
|
||||||
}
|
|
||||||
|
|
||||||
fn call(&mut self, _: &[u8], output: &mut Vec<u8>) -> u32 {
|
|
||||||
// Non-0 global validator set ID
|
|
||||||
scale::Encode::encode_to(&1u32, output);
|
|
||||||
0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ink_env::test::register_chain_extension(ExtensionId);
|
|
||||||
|
|
||||||
struct ExtensionSets;
|
|
||||||
impl ink_env::test::ChainExtension for ExtensionSets {
|
|
||||||
fn func_id(&self) -> u32 {
|
|
||||||
1
|
|
||||||
}
|
|
||||||
|
|
||||||
fn call(&mut self, _: &[u8], output: &mut Vec<u8>) -> u32 {
|
|
||||||
// 1 validator set
|
|
||||||
scale::Encode::encode_to(&1u8, output);
|
|
||||||
0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ink_env::test::register_chain_extension(ExtensionSets);
|
|
||||||
|
|
||||||
struct ExtensionShares;
|
|
||||||
impl ink_env::test::ChainExtension for ExtensionShares {
|
|
||||||
fn func_id(&self) -> u32 {
|
|
||||||
2
|
|
||||||
}
|
|
||||||
|
|
||||||
fn call(&mut self, _: &[u8], output: &mut Vec<u8>) -> u32 {
|
|
||||||
// 1 key share per validator
|
|
||||||
scale::Encode::encode_to(&u16::try_from(test_validators().len()).unwrap(), output);
|
|
||||||
0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ink_env::test::register_chain_extension(ExtensionShares);
|
|
||||||
|
|
||||||
struct ExtensionActive;
|
|
||||||
impl ink_env::test::ChainExtension for ExtensionActive {
|
|
||||||
fn func_id(&self) -> u32 {
|
|
||||||
3
|
|
||||||
}
|
|
||||||
|
|
||||||
fn call(&mut self, input: &[u8], output: &mut Vec<u8>) -> u32 {
|
|
||||||
use scale::Decode;
|
|
||||||
let potential = AccountId::decode(&mut &input[1 ..]).unwrap(); // TODO: Why is this [1 ..]?
|
|
||||||
|
|
||||||
let mut presence = false;
|
|
||||||
for validator in test_validators() {
|
|
||||||
if potential == validator {
|
|
||||||
presence = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Validator set 0, 1 key share
|
|
||||||
scale::Encode::encode_to(&Some((0u8, 1u16)).filter(|_| presence), output);
|
|
||||||
0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ink_env::test::register_chain_extension(ExtensionActive);
|
|
||||||
}
|
|
|
@ -1,48 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "serai-multisig"
|
|
||||||
version = "0.1.0"
|
|
||||||
description = "An ink! tracker for Serai's current multisig"
|
|
||||||
license = "AGPL-3.0-only"
|
|
||||||
repository = "https://github.com/serai-dex/serai/tree/develop/contracts/multisig"
|
|
||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
|
||||||
edition = "2021"
|
|
||||||
publish = false
|
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
|
||||||
all-features = true
|
|
||||||
rustdoc-args = ["--cfg", "docsrs"]
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
name = "serai_multisig"
|
|
||||||
path = "lib.rs"
|
|
||||||
crate-type = ["cdylib"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] }
|
|
||||||
scale-info = { version = "2", default-features = false, features = ["derive"], optional = true }
|
|
||||||
|
|
||||||
ink_primitives = { version = "3", default-features = false }
|
|
||||||
ink_metadata = { version = "3", default-features = false, features = ["derive"], optional = true }
|
|
||||||
ink_env = { version = "3", default-features = false }
|
|
||||||
ink_storage = { version = "3", default-features = false }
|
|
||||||
ink_lang = { version = "3", default-features = false }
|
|
||||||
|
|
||||||
serai-extension = { path = "../extension", default-features = false }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
lazy_static = "1"
|
|
||||||
|
|
||||||
[features]
|
|
||||||
default = ["std"]
|
|
||||||
std = [
|
|
||||||
"scale/std",
|
|
||||||
"scale-info/std",
|
|
||||||
|
|
||||||
"ink_primitives/std",
|
|
||||||
"ink_metadata/std",
|
|
||||||
"ink_env/std",
|
|
||||||
"ink_storage/std",
|
|
||||||
|
|
||||||
"serai-extension/std",
|
|
||||||
]
|
|
||||||
ink-as-dependency = []
|
|
|
@ -1,356 +0,0 @@
|
||||||
#![cfg_attr(not(feature = "std"), no_std)]
|
|
||||||
|
|
||||||
use ink_lang as ink;
|
|
||||||
|
|
||||||
use serai_extension::{Curve, GlobalValidatorSetId, ValidatorSetIndex, Key};
|
|
||||||
|
|
||||||
type KeysHash = [u8; 32];
|
|
||||||
|
|
||||||
#[allow(clippy::all)]
|
|
||||||
#[ink::contract(env = serai_extension::SeraiEnvironment)]
|
|
||||||
mod multisig {
|
|
||||||
use scale::Encode;
|
|
||||||
|
|
||||||
use ink_storage::{traits::SpreadAllocate, Mapping};
|
|
||||||
use ink_env::{hash::Blake2x256, hash_encoded};
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
/// A contract which tracks the current multisig keys.
|
|
||||||
/// Mapping of each validator set to their multisigs.
|
|
||||||
#[ink(storage)]
|
|
||||||
#[derive(SpreadAllocate)]
|
|
||||||
pub struct Multisig {
|
|
||||||
/// Global validator set ID under which this multisig was updated.
|
|
||||||
/// Used to track if the multisig has been updated to the latest instantiation of a validator
|
|
||||||
/// set or not.
|
|
||||||
/// May be behind, and still healthy, if a validator set didn't change despite the global
|
|
||||||
/// validator set doing so.
|
|
||||||
updated_at: Mapping<ValidatorSetIndex, GlobalValidatorSetId>,
|
|
||||||
/// Mapping from a curve's index to the multisig's current public key for it, if it has one.
|
|
||||||
// This is a mapping due to ink's eager loading. Considering we're right now only considering
|
|
||||||
// Secp256k1 and Ed25519, it may be notably more efficient to use a Vec here.
|
|
||||||
// In practice, we're likely discussing up to 7 curves in total, so it may always be better to
|
|
||||||
// simply use a Vec here, especially since it'd be Vec<Option<Key>>.
|
|
||||||
keys: Mapping<(ValidatorSetIndex, Curve), Key>,
|
|
||||||
/// Validator + Keys -> Voted already or not.
|
|
||||||
/// Prevents voting multiple times on the same set of keys.
|
|
||||||
voted: Mapping<(AccountId, KeysHash), ()>,
|
|
||||||
/// Global Validator Set ID + Validator + Keys -> Vote Count.
|
|
||||||
/// Including the GVSID locks it to a specific time period, preventing a validator from joining
|
|
||||||
/// a set, voting on old keys, and then moving their bond to a new account to vote again.
|
|
||||||
votes: Mapping<(GlobalValidatorSetId, ValidatorSetIndex, KeysHash), u16>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Event emitted when a new set of multisig keys is voted on.
|
|
||||||
#[ink(event)]
|
|
||||||
pub struct Vote {
|
|
||||||
/// Validator who issued the vote.
|
|
||||||
#[ink(topic)]
|
|
||||||
validator: AccountId,
|
|
||||||
/// Global validator set ID under which keys are being generated.
|
|
||||||
#[ink(topic)]
|
|
||||||
global_validator_set: GlobalValidatorSetId,
|
|
||||||
/// Validator set for which keys are being generated.
|
|
||||||
#[ink(topic)]
|
|
||||||
validator_set: ValidatorSetIndex,
|
|
||||||
/// Hash of the keys voted on.
|
|
||||||
#[ink(topic)]
|
|
||||||
hash: KeysHash,
|
|
||||||
/// Keys voted on. Only present in the first event for a given set of keys.
|
|
||||||
keys: Option<Vec<Option<Key>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Event emitted when the new keys are fully generated for a validator set, having been fully
|
|
||||||
/// voted on.
|
|
||||||
#[ink(event)]
|
|
||||||
pub struct KeyGen {
|
|
||||||
#[ink(topic)]
|
|
||||||
global_validator_set: GlobalValidatorSetId,
|
|
||||||
#[ink(topic)]
|
|
||||||
validator_set: ValidatorSetIndex,
|
|
||||||
#[ink(topic)]
|
|
||||||
hash: KeysHash,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The Multisig error types.
|
|
||||||
#[derive(Debug, PartialEq, Eq, scale::Encode, scale::Decode)]
|
|
||||||
#[cfg_attr(feature = "std", derive(scale_info::TypeInfo))]
|
|
||||||
pub enum Error {
|
|
||||||
/// Returned if a validator set hasn't had keys registered for it yet.
|
|
||||||
NonExistentValidatorSet,
|
|
||||||
/// Returned if a validator set and curve index doesn't have a key registered for it.
|
|
||||||
NonExistentKey,
|
|
||||||
/// Returned if a curve index doesn't exist.
|
|
||||||
NonExistentCurve,
|
|
||||||
/// Returned if a non-validator is voting.
|
|
||||||
NotValidator,
|
|
||||||
/// Returned if this validator set already generated keys.
|
|
||||||
AlreadyGeneratedKeys,
|
|
||||||
/// Returned if this validator has already voted for these keys.
|
|
||||||
AlreadyVoted,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The Multisig result type.
|
|
||||||
pub type Result<T> = core::result::Result<T, Error>;
|
|
||||||
|
|
||||||
impl Multisig {
|
|
||||||
/// Deploys the Multisig contract.
|
|
||||||
#[ink(constructor)]
|
|
||||||
pub fn new() -> Self {
|
|
||||||
ink_lang::utils::initialize_contract(|_| {})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Global validator set ID under which a validator set updated their multisig.
|
|
||||||
#[ink(message)]
|
|
||||||
pub fn updated_at(&self, validator_set: ValidatorSetIndex) -> Result<GlobalValidatorSetId> {
|
|
||||||
self.updated_at.get(validator_set).ok_or(Error::NonExistentValidatorSet)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the key currently in-use for a given validator set and curve.
|
|
||||||
/// This is then bound to a given chain by applying a network-specific additive offset, as done
|
|
||||||
/// by the processor. Each chain then has its own way of receiving funds to these keys, leaving
|
|
||||||
/// this not for usage by wallets, nor the processor which is expected to track events for this
|
|
||||||
/// information. This is really solely for debugging purposes.
|
|
||||||
#[ink(message)]
|
|
||||||
pub fn key(&self, validator_set: ValidatorSetIndex, curve: Curve) -> Result<Key> {
|
|
||||||
self.keys.get((validator_set, curve)).ok_or(Error::NonExistentKey)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: voted
|
|
||||||
// TODO: votes
|
|
||||||
|
|
||||||
fn hash<T: Encode>(value: &T) -> KeysHash {
|
|
||||||
let mut output = KeysHash::default();
|
|
||||||
hash_encoded::<Blake2x256, _>(value, &mut output);
|
|
||||||
output
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Vote for a given set of keys.
|
|
||||||
#[ink(message)]
|
|
||||||
pub fn vote(&mut self, keys: Vec<Option<Key>>) -> Result<()> {
|
|
||||||
if keys.len() > 256 {
|
|
||||||
Err(Error::NonExistentCurve)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Make sure they're a valid validator.
|
|
||||||
let validator = self.env().caller();
|
|
||||||
let active_validator = self.env().extension().active_validator(&validator);
|
|
||||||
if active_validator.is_none() {
|
|
||||||
Err(Error::NotValidator)?;
|
|
||||||
}
|
|
||||||
let (validator_set, shares) = active_validator.unwrap();
|
|
||||||
|
|
||||||
// Prevent a validator set from generating keys multiple times. Only the first-voted-in keys
|
|
||||||
// should be acknowledged.
|
|
||||||
let global_validator_set = self.env().extension().global_validator_set_id();
|
|
||||||
if self.updated_at.get(validator_set) == Some(global_validator_set) {
|
|
||||||
Err(Error::AlreadyGeneratedKeys)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prevent a validator from voting on keys multiple times.
|
|
||||||
let keys_hash = Self::hash(&keys);
|
|
||||||
if self.voted.get((validator, keys_hash)).is_some() {
|
|
||||||
Err(Error::AlreadyVoted)?;
|
|
||||||
}
|
|
||||||
self.voted.insert((validator, keys_hash), &());
|
|
||||||
|
|
||||||
let votes =
|
|
||||||
if let Some(votes) = self.votes.get((global_validator_set, validator_set, keys_hash)) {
|
|
||||||
self.env().emit_event(Vote {
|
|
||||||
validator,
|
|
||||||
global_validator_set,
|
|
||||||
validator_set,
|
|
||||||
hash: keys_hash,
|
|
||||||
keys: None,
|
|
||||||
});
|
|
||||||
votes + shares
|
|
||||||
} else {
|
|
||||||
self.env().emit_event(Vote {
|
|
||||||
validator,
|
|
||||||
global_validator_set,
|
|
||||||
validator_set,
|
|
||||||
hash: keys_hash,
|
|
||||||
keys: Some(keys.clone()),
|
|
||||||
});
|
|
||||||
shares
|
|
||||||
};
|
|
||||||
// We could skip writing this if we've reached consensus, yet best to keep our ducks in a row
|
|
||||||
self.votes.insert((global_validator_set, validator_set, keys_hash), &votes);
|
|
||||||
|
|
||||||
// If we've reached consensus, action this.
|
|
||||||
if votes == self.env().extension().validator_set_shares(validator_set) {
|
|
||||||
self.updated_at.insert(validator_set, &global_validator_set);
|
|
||||||
for (k, key) in keys.iter().enumerate() {
|
|
||||||
if let Some(key) = key {
|
|
||||||
self.keys.insert((validator_set, Curve::try_from(k).unwrap()), key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.env().emit_event(KeyGen { global_validator_set, validator_set, hash: keys_hash });
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use lazy_static::lazy_static;
|
|
||||||
|
|
||||||
use ink_env::{
|
|
||||||
hash::{CryptoHash, Blake2x256},
|
|
||||||
AccountId,
|
|
||||||
topics::PrefixedValue,
|
|
||||||
};
|
|
||||||
use ink_lang as ink;
|
|
||||||
|
|
||||||
use serai_extension::{test_validators, test_register};
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
type Event = <Multisig as ::ink_lang::reflect::ContractEventBase>::Type;
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref EXPECTED_GLOBAL_VALIDATOR_SET: GlobalValidatorSetId = 1;
|
|
||||||
static ref EXPECTED_VALIDATOR_SET: ValidatorSetIndex = 0;
|
|
||||||
static ref KEYS: Vec<Option<Key>> = vec![Some(vec![0, 1]), Some(vec![2, 3])];
|
|
||||||
static ref EXPECTED_HASH: KeysHash = {
|
|
||||||
let mut hash = KeysHash::default();
|
|
||||||
ink_env::hash_encoded::<Blake2x256, _>(&*KEYS, &mut hash);
|
|
||||||
hash
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn hash_prefixed<T: scale::Encode>(prefixed: PrefixedValue<T>) -> [u8; 32] {
|
|
||||||
let encoded = prefixed.encode();
|
|
||||||
let mut hash = KeysHash::default();
|
|
||||||
if encoded.len() < 32 {
|
|
||||||
hash[.. encoded.len()].copy_from_slice(&encoded);
|
|
||||||
} else {
|
|
||||||
Blake2x256::hash(&encoded, &mut hash);
|
|
||||||
}
|
|
||||||
hash
|
|
||||||
}
|
|
||||||
|
|
||||||
fn assert_vote(
|
|
||||||
event: &ink_env::test::EmittedEvent,
|
|
||||||
expected_validator: AccountId,
|
|
||||||
expected_keys: Option<()>,
|
|
||||||
) {
|
|
||||||
let decoded_event = <Event as scale::Decode>::decode(&mut &event.data[..])
|
|
||||||
.expect("encountered invalid contract event data buffer");
|
|
||||||
|
|
||||||
if let Event::Vote(Vote {
|
|
||||||
validator,
|
|
||||||
global_validator_set,
|
|
||||||
validator_set,
|
|
||||||
hash,
|
|
||||||
keys: actual_keys,
|
|
||||||
}) = decoded_event
|
|
||||||
{
|
|
||||||
assert_eq!(validator, expected_validator);
|
|
||||||
assert_eq!(global_validator_set, *EXPECTED_GLOBAL_VALIDATOR_SET);
|
|
||||||
assert_eq!(validator_set, *EXPECTED_VALIDATOR_SET);
|
|
||||||
assert_eq!(hash, *EXPECTED_HASH);
|
|
||||||
assert_eq!(actual_keys.as_ref(), expected_keys.map(|_| &*KEYS));
|
|
||||||
} else {
|
|
||||||
panic!("invalid Vote event")
|
|
||||||
}
|
|
||||||
|
|
||||||
let expected_topics = vec![
|
|
||||||
hash_prefixed(PrefixedValue { prefix: b"", value: b"Multisig::Vote" }),
|
|
||||||
hash_prefixed(PrefixedValue {
|
|
||||||
prefix: b"Multisig::Vote::validator",
|
|
||||||
value: &expected_validator,
|
|
||||||
}),
|
|
||||||
hash_prefixed(PrefixedValue {
|
|
||||||
prefix: b"Multisig::Vote::global_validator_set",
|
|
||||||
value: &*EXPECTED_GLOBAL_VALIDATOR_SET,
|
|
||||||
}),
|
|
||||||
hash_prefixed(PrefixedValue {
|
|
||||||
prefix: b"Multisig::Vote::validator_set",
|
|
||||||
value: &*EXPECTED_VALIDATOR_SET,
|
|
||||||
}),
|
|
||||||
hash_prefixed(PrefixedValue { prefix: b"Multisig::Vote::hash", value: &*EXPECTED_HASH }),
|
|
||||||
];
|
|
||||||
|
|
||||||
for (n, (actual_topic, expected_topic)) in
|
|
||||||
event.topics.iter().zip(expected_topics).enumerate()
|
|
||||||
{
|
|
||||||
assert_eq!(actual_topic, &expected_topic, "encountered invalid topic at {}", n);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn assert_key_gen(event: &ink_env::test::EmittedEvent) {
|
|
||||||
let decoded_event = <Event as scale::Decode>::decode(&mut &event.data[..])
|
|
||||||
.expect("encountered invalid contract event data buffer");
|
|
||||||
|
|
||||||
if let Event::KeyGen(KeyGen { global_validator_set, validator_set, hash }) = decoded_event {
|
|
||||||
assert_eq!(global_validator_set, *EXPECTED_GLOBAL_VALIDATOR_SET);
|
|
||||||
assert_eq!(validator_set, *EXPECTED_VALIDATOR_SET);
|
|
||||||
assert_eq!(hash, *EXPECTED_HASH);
|
|
||||||
} else {
|
|
||||||
panic!("invalid KeyGen event")
|
|
||||||
}
|
|
||||||
|
|
||||||
let expected_topics = vec![
|
|
||||||
hash_prefixed(PrefixedValue { prefix: b"", value: b"Multisig::KeyGen" }),
|
|
||||||
hash_prefixed(PrefixedValue {
|
|
||||||
prefix: b"Multisig::KeyGen::global_validator_set",
|
|
||||||
value: &*EXPECTED_GLOBAL_VALIDATOR_SET,
|
|
||||||
}),
|
|
||||||
hash_prefixed(PrefixedValue {
|
|
||||||
prefix: b"Multisig::KeyGen::validator_set",
|
|
||||||
value: &*EXPECTED_VALIDATOR_SET,
|
|
||||||
}),
|
|
||||||
hash_prefixed(PrefixedValue { prefix: b"Multisig::KeyGen::hash", value: &*EXPECTED_HASH }),
|
|
||||||
];
|
|
||||||
|
|
||||||
for (n, (actual_topic, expected_topic)) in
|
|
||||||
event.topics.iter().zip(expected_topics).enumerate()
|
|
||||||
{
|
|
||||||
assert_eq!(actual_topic, &expected_topic, "encountered invalid topic at {}", n);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The default constructor does its job.
|
|
||||||
#[ink::test]
|
|
||||||
fn new() {
|
|
||||||
let multisig = Multisig::new();
|
|
||||||
assert_eq!(multisig.updated_at(0), Err(Error::NonExistentValidatorSet));
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Non-existent keys error accordingly.
|
|
||||||
#[ink::test]
|
|
||||||
fn non_existent_key() {
|
|
||||||
assert_eq!(Multisig::new().key(0, 0), Err(Error::NonExistentKey));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[ink::test]
|
|
||||||
fn success() {
|
|
||||||
test_register();
|
|
||||||
let mut multisig = Multisig::new();
|
|
||||||
|
|
||||||
// Test voting on keys works without issue, emitting the keys for the first vote
|
|
||||||
let mut emitted_events = vec![];
|
|
||||||
for (i, validator) in test_validators().iter().enumerate() {
|
|
||||||
ink_env::test::set_caller::<ink_env::DefaultEnvironment>(*validator);
|
|
||||||
multisig.vote(KEYS.clone()).unwrap();
|
|
||||||
|
|
||||||
emitted_events = ink_env::test::recorded_events().collect::<Vec<_>>();
|
|
||||||
// If this is the last validator, it should also trigger a keygen event, hence the + 1
|
|
||||||
assert_eq!(emitted_events.len(), (i + 1) + (i / (test_validators().len() - 1)));
|
|
||||||
assert_vote(
|
|
||||||
&emitted_events[i],
|
|
||||||
*validator,
|
|
||||||
// Only the first event for this hash should have the keys
|
|
||||||
Some(()).filter(|_| i == 0),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Since this should have key gen'd, verify that
|
|
||||||
assert_eq!(multisig.updated_at(0).unwrap(), *EXPECTED_GLOBAL_VALIDATOR_SET);
|
|
||||||
assert_key_gen(&emitted_events[test_validators().len()]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,6 +1,6 @@
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2021-2022 Luke Parker
|
Copyright (c) 2021-2023 Luke Parker
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2022 Luke Parker
|
Copyright (c) 2022-2023 Luke Parker
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2021-2022 Luke Parker
|
Copyright (c) 2021-2023 Luke Parker
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2020-2022 Luke Parker, Lee Bousfield
|
Copyright (c) 2020-2023 Luke Parker, Lee Bousfield
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2022 Luke Parker
|
Copyright (c) 2022-2023 Luke Parker
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2022 Luke Parker
|
Copyright (c) 2022-2023 Luke Parker
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2021-2022 Luke Parker
|
Copyright (c) 2021-2023 Luke Parker
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2022 Luke Parker
|
Copyright (c) 2022-2023 Luke Parker
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2021-2022 Luke Parker
|
Copyright (c) 2021-2023 Luke Parker
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2022 Luke Parker
|
Copyright (c) 2022-2023 Luke Parker
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
18
deny.toml
18
deny.toml
|
@ -9,6 +9,11 @@ unmaintained = "warn"
|
||||||
|
|
||||||
ignore = [
|
ignore = [
|
||||||
"RUSTSEC-2020-0071", # https://github.com/chronotope/chrono/issues/602
|
"RUSTSEC-2020-0071", # https://github.com/chronotope/chrono/issues/602
|
||||||
|
"RUSTSEC-2021-0139", # https://github.com/serai-dex/serai/228
|
||||||
|
"RUSTSEC-2021-0145", # https://github.com/serai-dex/serai/225
|
||||||
|
"RUSTSEC-2022-0061", # https://github.com/serai-dex/serai/227
|
||||||
|
"RUSTSEC-2022-0075", # https://github.com/serai-dex/serai/226
|
||||||
|
"RUSTSEC-2022-0076", # https://github.com/serai-dex/serai/226
|
||||||
]
|
]
|
||||||
|
|
||||||
[licenses]
|
[licenses]
|
||||||
|
@ -39,11 +44,17 @@ allow-osi-fsf-free = "neither"
|
||||||
default = "deny"
|
default = "deny"
|
||||||
|
|
||||||
exceptions = [
|
exceptions = [
|
||||||
|
{ allow = ["AGPL-3.0"], name = "bitcoin-serai" },
|
||||||
{ allow = ["AGPL-3.0"], name = "ethereum-serai" },
|
{ allow = ["AGPL-3.0"], name = "ethereum-serai" },
|
||||||
|
|
||||||
{ allow = ["AGPL-3.0"], name = "serai-processor" },
|
{ allow = ["AGPL-3.0"], name = "serai-processor" },
|
||||||
|
|
||||||
{ allow = ["AGPL-3.0"], name = "serai-extension" },
|
{ allow = ["AGPL-3.0"], name = "tokens-pallet" },
|
||||||
{ allow = ["AGPL-3.0"], name = "serai-multisig" },
|
|
||||||
|
{ allow = ["AGPL-3.0"], name = "in-instructions-pallet" },
|
||||||
|
{ allow = ["AGPL-3.0"], name = "in-instructions-client" },
|
||||||
|
|
||||||
|
{ allow = ["AGPL-3.0"], name = "validator-sets-pallet" },
|
||||||
|
|
||||||
{ allow = ["AGPL-3.0"], name = "sp-tendermint" },
|
{ allow = ["AGPL-3.0"], name = "sp-tendermint" },
|
||||||
{ allow = ["AGPL-3.0"], name = "pallet-tendermint" },
|
{ allow = ["AGPL-3.0"], name = "pallet-tendermint" },
|
||||||
|
@ -51,6 +62,8 @@ exceptions = [
|
||||||
|
|
||||||
{ allow = ["AGPL-3.0"], name = "serai-runtime" },
|
{ allow = ["AGPL-3.0"], name = "serai-runtime" },
|
||||||
{ allow = ["AGPL-3.0"], name = "serai-node" },
|
{ allow = ["AGPL-3.0"], name = "serai-node" },
|
||||||
|
|
||||||
|
{ allow = ["AGPL-3.0"], name = "serai-client" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[licenses.clarify]]
|
[[licenses.clarify]]
|
||||||
|
@ -72,5 +85,4 @@ unknown-git = "deny"
|
||||||
allow-registry = ["https://github.com/rust-lang/crates.io-index"]
|
allow-registry = ["https://github.com/rust-lang/crates.io-index"]
|
||||||
allow-git = [
|
allow-git = [
|
||||||
"https://github.com/serai-dex/substrate",
|
"https://github.com/serai-dex/substrate",
|
||||||
"https://github.com/hack-ink/array-bytes"
|
|
||||||
]
|
]
|
||||||
|
|
|
@ -8,7 +8,7 @@ ENV BITCOIN_DATA=/home/bitcoin/.bitcoin
|
||||||
WORKDIR /home/bitcoin
|
WORKDIR /home/bitcoin
|
||||||
|
|
||||||
RUN apk update \
|
RUN apk update \
|
||||||
&& apk --no-cache add ca-certificates gnupg bash su-exec
|
&& apk --no-cache add ca-certificates gnupg bash su-exec
|
||||||
|
|
||||||
# Get Binary
|
# Get Binary
|
||||||
# TODO: When bitcoin.org publishes 23.0, retrieve checksums from there.
|
# TODO: When bitcoin.org publishes 23.0, retrieve checksums from there.
|
||||||
|
@ -22,7 +22,7 @@ RUN wget https://bitcoincore.org/bin/bitcoin-core-${BITCOIN_VERSION}/bitcoin-${B
|
||||||
# Serai recognizes the builder keys for 16/17 signatures
|
# Serai recognizes the builder keys for 16/17 signatures
|
||||||
# from the 23.0 release
|
# from the 23.0 release
|
||||||
ENV KEYS 152812300785C96444D3334D17565732E08E5E41 0AD83877C1F0CD1EE9BD660AD7CC770B81FD22A8 590B7292695AFFA5B672CBB2E13FC145CD3F4304 948444FCE03B05BA5AB0591EC37B1C1D44C786EE 9EDAFF80E080659604F4A76B2EBB056FD847F8A7 E777299FC265DD04793070EB944D35F9AC3DB76A F4FC70F07310028424EFC20A8E4256593F177720 D1DBF2C4B96F2DEBF4C16654410108112E7EA81F
|
ENV KEYS 152812300785C96444D3334D17565732E08E5E41 0AD83877C1F0CD1EE9BD660AD7CC770B81FD22A8 590B7292695AFFA5B672CBB2E13FC145CD3F4304 948444FCE03B05BA5AB0591EC37B1C1D44C786EE 9EDAFF80E080659604F4A76B2EBB056FD847F8A7 E777299FC265DD04793070EB944D35F9AC3DB76A F4FC70F07310028424EFC20A8E4256593F177720 D1DBF2C4B96F2DEBF4C16654410108112E7EA81F
|
||||||
ENV KEYS2 4DAF18FE948E7A965B30F9457E296D555E7F63A7 28E72909F1717FE9607754F8A7BEB2621678D37D 74E2DEF5D77260B98BC19438099BAD163C70FBFA 71A3B16735405025D447E8F274810B012346C9A6 E463A93F5F3117EEDE6C7316BD02942421F4889F 9D3CC86A72F8494342EA5FD10A41BDC3F4FAFF1C 287AE4CA1187C68C08B49CB2D11BD4F33F1DB499 F9A8737BF4FF5C89C903DF31DD78544CF91B1514
|
ENV KEYS2 4DAF18FE948E7A965B30F9457E296D555E7F63A7 28E72909F1717FE9607754F8A7BEB2621678D37D 74E2DEF5D77260B98BC19438099BAD163C70FBFA 71A3B16735405025D447E8F274810B012346C9A6 9D3CC86A72F8494342EA5FD10A41BDC3F4FAFF1C 287AE4CA1187C68C08B49CB2D11BD4F33F1DB499 F9A8737BF4FF5C89C903DF31DD78544CF91B1514
|
||||||
|
|
||||||
# Use hardcoded prints to get keys from servers. 2 Different servers used.
|
# Use hardcoded prints to get keys from servers. 2 Different servers used.
|
||||||
RUN gpg --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys ${KEYS} \
|
RUN gpg --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys ${KEYS} \
|
||||||
|
@ -49,6 +49,3 @@ COPY ./scripts /scripts
|
||||||
|
|
||||||
EXPOSE 8332 8333 18332 18333 18443 18444
|
EXPOSE 8332 8333 18332 18333 18443 18444
|
||||||
VOLUME ["/home/bitcoin/.bitcoin"]
|
VOLUME ["/home/bitcoin/.bitcoin"]
|
||||||
|
|
||||||
# Run
|
|
||||||
CMD ["bitcoind"]
|
|
||||||
|
|
|
@ -1,29 +1,6 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
RPC_USER="${RPC_USER:=serai}"
|
RPC_USER="${RPC_USER:=serai}"
|
||||||
RPC_PASS="${RPC_PASS:=seraidex}"
|
RPC_PASS="${RPC_PASS:=seraidex}"
|
||||||
|
|
||||||
# address: bcrt1q7kc7tm3a4qljpw4gg5w73cgya6g9nfydtessgs
|
bitcoind -regtest -rpcuser=$RPC_USER -rpcpassword=$RPC_PASS -rpcallowip=0.0.0.0/0 -rpcbind=127.0.0.1 -rpcbind=$(hostname)
|
||||||
# private key: cV9X6E3J9jq7R1XR8uPED2JqFxqcd6KrC8XWPy1GchZj7MA7G9Wx
|
|
||||||
MINER="${MINER:=bcrt1q7kc7tm3a4qljpw4gg5w73cgya6g9nfydtessgs}"
|
|
||||||
PRIV_KEY="${PRIV_KEY:=cV9X6E3J9jq7R1XR8uPED2JqFxqcd6KrC8XWPy1GchZj7MA7G9Wx}"
|
|
||||||
BLOCK_TIME=${BLOCK_TIME:=5}
|
|
||||||
|
|
||||||
bitcoind -regtest -txindex -fallbackfee=0.000001 -rpcuser=$RPC_USER -rpcpassword=$RPC_PASS -rpcallowip=0.0.0.0/0 -rpcbind=127.0.0.1 -rpcbind=$(hostname) &
|
|
||||||
|
|
||||||
# give time to bitcoind to start
|
|
||||||
while true
|
|
||||||
do
|
|
||||||
bitcoin-cli -regtest -rpcuser=$RPC_USER -rpcpassword=$RPC_PASS generatetoaddress 100 $MINER && break
|
|
||||||
sleep 5
|
|
||||||
done
|
|
||||||
|
|
||||||
bitcoin-cli -regtest -rpcuser=$RPC_USER -rpcpassword=$RPC_PASS createwallet "miner" false false $RPC_PASS false false true &&
|
|
||||||
bitcoin-cli -regtest -rpcuser=$RPC_USER -rpcpassword=$RPC_PASS walletpassphrase $RPC_PASS 60 &&
|
|
||||||
bitcoin-cli -regtest -rpcuser=$RPC_USER -rpcpassword=$RPC_PASS importprivkey $PRIV_KEY
|
|
||||||
|
|
||||||
# mine a new block every BLOCK_TIME
|
|
||||||
while true
|
|
||||||
do
|
|
||||||
bitcoin-cli -regtest -rpcuser=$RPC_USER -rpcpassword=$RPC_PASS generatetoaddress 1 $MINER
|
|
||||||
sleep $BLOCK_TIME
|
|
||||||
done
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# Prepare Environment
|
|
||||||
FROM alpine:latest as builder
|
FROM alpine:latest as builder
|
||||||
|
|
||||||
# https://downloads.getmonero.org/cli/monero-linux-x64-v0.18.1.0.tar.bz2
|
# https://downloads.getmonero.org/cli/monero-linux-x64-v0.18.1.0.tar.bz2
|
||||||
# Verification will fail if MONERO_VERSION doesn't match the latest
|
# Verification will fail if MONERO_VERSION doesn't match the latest
|
||||||
# due to the way monero publishes releases. They overwrite a single hashes.txt file
|
# due to the way monero publishes releases. They overwrite a single hashes.txt file
|
||||||
|
@ -38,5 +38,3 @@ COPY ./scripts /scripts
|
||||||
|
|
||||||
EXPOSE 18080 18081
|
EXPOSE 18080 18081
|
||||||
VOLUME /home/monero/.bitmonero
|
VOLUME /home/monero/.bitmonero
|
||||||
|
|
||||||
CMD ["monerod"]
|
|
||||||
|
|
|
@ -9,10 +9,3 @@ BLOCK_TIME=${BLOCK_TIME:=5}
|
||||||
monerod --regtest --rpc-access-control-origins * --confirm-external-bind \
|
monerod --regtest --rpc-access-control-origins * --confirm-external-bind \
|
||||||
--rpc-bind-ip=0.0.0.0 --offline --fixed-difficulty=1 \
|
--rpc-bind-ip=0.0.0.0 --offline --fixed-difficulty=1 \
|
||||||
--non-interactive --mining-threads 1 --detach
|
--non-interactive --mining-threads 1 --detach
|
||||||
|
|
||||||
# give time to monerod to start
|
|
||||||
while true; do
|
|
||||||
sleep 5
|
|
||||||
done
|
|
||||||
|
|
||||||
# Create wallet from PRIV_KEY in monero wallet
|
|
||||||
|
|
|
@ -152,6 +152,8 @@ services:
|
||||||
volumes:
|
volumes:
|
||||||
- "./coins/bitcoin/scripts:/scripts"
|
- "./coins/bitcoin/scripts:/scripts"
|
||||||
entrypoint: /scripts/entry-dev.sh
|
entrypoint: /scripts/entry-dev.sh
|
||||||
|
ports:
|
||||||
|
- "18443:18443"
|
||||||
|
|
||||||
ethereum:
|
ethereum:
|
||||||
profiles:
|
profiles:
|
||||||
|
|
|
@ -1,33 +1,14 @@
|
||||||
# Serai
|
# Serai
|
||||||
|
|
||||||
Serai is a decentralized execution layer whose validators form multisig wallets
|
Serai is a decentralized execution layer whose validators form multisig wallets
|
||||||
for various connected networks, offering secure decentralized custody of foreign
|
for various connected networks, offering secure decentralized control of foreign
|
||||||
assets to applications built on it.
|
coins to applications built on it.
|
||||||
|
|
||||||
Serai is exemplified by Serai DEX, an automated-market-maker (AMM) decentralized
|
Serai is exemplified by Serai DEX, an automated-market-maker (AMM) decentralized
|
||||||
exchange, allowing swapping BTC, ETH, USDC, DAI, and XMR. It is the premier
|
exchange, allowing swapping Bitcoin, Ether, DAI, and Monero. It is the premier
|
||||||
application of Serai.
|
application of Serai.
|
||||||
|
|
||||||
### Substrate
|
### Substrate
|
||||||
|
|
||||||
Serai is based on [Substrate](https://docs.substrate.io), a blockchain framework
|
Serai is based on [Substrate](https://docs.substrate.io), a blockchain framework
|
||||||
offering a robust infrastructure.
|
offering a robust infrastructure.
|
||||||
|
|
||||||
### Smart Contracts
|
|
||||||
|
|
||||||
Serai offers WASM-based smart contracts. All applications are built over these
|
|
||||||
contracts, enabling composable interactions within a mutual layer. These
|
|
||||||
contracts are primarily written in [ink!](https://ink.substrate.io/), a
|
|
||||||
framework for building contracts in Rust.
|
|
||||||
|
|
||||||
Initially, smart contract deployment will not be enabled. Solely Serai DEX will
|
|
||||||
be available, due to the variety of economic considerations around securing the
|
|
||||||
multisig. Serai may expand in the future with more explicitly added
|
|
||||||
applications, each with tailored economic models, or may enable arbitrary
|
|
||||||
contract deployment. At this time, we solely plan for Serai DEX's availabiliy.
|
|
||||||
|
|
||||||
### Application Calls
|
|
||||||
|
|
||||||
Applications, such as Serai DEX, may be called via calling their relevant smart
|
|
||||||
contracts. At a low level, this is done via specifying the address of the
|
|
||||||
contract being interacted with, along with SCALE-encoded calldata.
|
|
||||||
|
|
|
@ -9,11 +9,11 @@ Ethereum addresses are 20-byte hashes.
|
||||||
Ethereum In Instructions are present via being appended to the calldata
|
Ethereum In Instructions are present via being appended to the calldata
|
||||||
transferring funds to Serai. `origin` is automatically set to the party from
|
transferring funds to Serai. `origin` is automatically set to the party from
|
||||||
which funds are being transferred. For an ERC20, this is `from`. For ETH, this
|
which funds are being transferred. For an ERC20, this is `from`. For ETH, this
|
||||||
is the caller. `data` is limited to 255 bytes.
|
is the caller.
|
||||||
|
|
||||||
### Out Instructions
|
### Out Instructions
|
||||||
|
|
||||||
`data` is limited to 255 bytes.
|
`data` is limited to 512 bytes.
|
||||||
|
|
||||||
If `data` is provided, the Ethereum Router will call a contract-calling child
|
If `data` is provided, the Ethereum Router will call a contract-calling child
|
||||||
contract in order to sandbox it. The first byte of `data` designates which child
|
contract in order to sandbox it. The first byte of `data` designates which child
|
||||||
|
|
|
@ -3,90 +3,93 @@
|
||||||
Instructions are used to communicate with networks connected to Serai, and they
|
Instructions are used to communicate with networks connected to Serai, and they
|
||||||
come in two forms:
|
come in two forms:
|
||||||
|
|
||||||
- In Instructions are [Application Calls](../Serai.md#application-calls),
|
- In Instructions are programmable specifications paired with incoming coins,
|
||||||
paired with incoming funds. Encoded in transactions on connected networks,
|
encoded into transactions on connected networks. Serai will parse included
|
||||||
Serai will parse out instructions when it receives funds, executing the included
|
instructions when it receives coins, executing the included specs.
|
||||||
calls.
|
|
||||||
|
|
||||||
- Out Instructions detail how to transfer assets, either to a Serai address or
|
- Out Instructions detail how to transfer coins, either to a Serai address or
|
||||||
an address native to the asset in question.
|
an address native to the coin in question.
|
||||||
|
|
||||||
A transaction containing an In Instruction and an Out Instruction (to a native
|
A transaction containing an In Instruction and an Out Instruction (to a native
|
||||||
address) will receive funds to Serai and send funds from Serai, without
|
address) will receive coins to Serai and send coins from Serai, without
|
||||||
requiring directly performing any transactions on Serai itself.
|
requiring directly performing any transactions on Serai itself.
|
||||||
|
|
||||||
All instructions are encoded under [Shorthand](#shorthand). Shorthand provides
|
All instructions are encoded under [Shorthand](#shorthand). Shorthand provides
|
||||||
frequent use cases to create minimal data representations on connected networks.
|
frequent use cases to create minimal data representations on connected networks.
|
||||||
|
|
||||||
Instructions are interpreted according to their non-Serai network. Addresses
|
Instructions are interpreted according to their non-Serai network. Addresses
|
||||||
have no validation performed, beyond being a valid enum entry (when applicable)
|
have no validation performed unless otherwise noted. If the processor is
|
||||||
of the correct length, unless otherwise noted. If the processor is instructed to
|
instructed to act on invalid data, it will drop the entire instruction.
|
||||||
act on invalid data, or send to itself, it will drop the entire instruction.
|
|
||||||
|
|
||||||
### Serialization
|
### Serialization
|
||||||
|
|
||||||
- Numbers are exclusively unsigned and encoded as compact integers under
|
Instructions are SCALE encoded.
|
||||||
SCALE.
|
|
||||||
- Enums are prefixed by an ordinal byte of their type, followed by their
|
|
||||||
actual values.
|
|
||||||
- Vectors are prefixed by their length.
|
|
||||||
- In Instruction fields are numbered and sequentially encoded, allowing
|
|
||||||
omission, each prefixed by an ordinal byte. This is due to its fields being more
|
|
||||||
frequently omitted than not, making their presence what's notable.
|
|
||||||
- All other types have their fields sequentially encoded with no markers.
|
|
||||||
|
|
||||||
Certain fields may be omitted depending on the network in question.
|
### Application Call
|
||||||
|
|
||||||
### In Instructions
|
- `application` (u16): The application of Serai to call. Currently, only 0,
|
||||||
|
Serai DEX is valid.
|
||||||
|
- `data` (Data): The data to call the application with.
|
||||||
|
|
||||||
- `origin` (Address): Address from the network of origin which sent funds in.
|
### In Instruction
|
||||||
- `target` (Address): The ink! contract to transfer the incoming funds to.
|
|
||||||
- `data` (Vec\<u8>): The data to call `target` with.
|
InInstruction is an enum of SeraiAddress and ApplicationCall.
|
||||||
|
|
||||||
|
The specified target will be minted an appropriate amount of the respective
|
||||||
|
Serai token. If an Application Call, the encoded call will be executed.
|
||||||
|
|
||||||
|
### Refundable In Instruction
|
||||||
|
|
||||||
|
- `origin` (Option\<ExternalAddress>): Address, from the network of
|
||||||
|
origin, which sent coins in.
|
||||||
|
- `instruction` (InInstruction): The action to perform with the
|
||||||
|
incoming coins.
|
||||||
|
|
||||||
Networks may automatically provide `origin`. If they do, the instruction may
|
Networks may automatically provide `origin`. If they do, the instruction may
|
||||||
still provide `origin`, overriding the automatically provided value. If no
|
still provide `origin`, overriding the automatically provided value.
|
||||||
`origin` is provided, the instruction is dropped.
|
|
||||||
|
|
||||||
Upon receiving funds, the respective Serai Asset contract is called, minting the
|
If the instruction fails, coins are scheduled to be returned to `origin`,
|
||||||
appropriate amount of coins, and transferring them to `target`, calling it with
|
if provided.
|
||||||
the attached data.
|
|
||||||
|
|
||||||
If the instruction fails, funds are scheduled to be returned to `origin`.
|
### Out Instruction
|
||||||
|
|
||||||
### Out Instructions
|
- `address` (ExternalAddress): Address to transfer the coins included with
|
||||||
|
this instruction to.
|
||||||
|
- `data` (Option<Data>): Data to include when transferring coins.
|
||||||
|
|
||||||
- `destination` (Enum { Native(Address), Serai(Address) }): Address to receive
|
No validation of external addresses/data is performed on-chain. If data is
|
||||||
funds to.
|
specified for a chain not supporting data, it is silently dropped.
|
||||||
- `data` (Option\<Vec\<u8>>): The data to call
|
|
||||||
the target with.
|
|
||||||
|
|
||||||
Transfer the funds included with this instruction to the specified address with
|
### Destination
|
||||||
the specified data. Asset contracts perform no validation on native
|
|
||||||
addresses/data.
|
Destination is an enum of SeraiAddress and OutInstruction.
|
||||||
|
|
||||||
### Shorthand
|
### Shorthand
|
||||||
|
|
||||||
Shorthand is an enum which expands to an In Instruction.
|
Shorthand is an enum which expands to an Refundable In Instruction.
|
||||||
|
|
||||||
##### Raw
|
##### Raw
|
||||||
|
|
||||||
Raw Shorthand encodes a raw In Instruction with no further processing. This is
|
Raw Shorthand encodes a raw Refundable In Instruction in a Data, with no further
|
||||||
a verbose fallback option for infrequent use cases not covered by Shorthand.
|
processing. This is a verbose fallback option for infrequent use cases not
|
||||||
|
covered by Shorthand.
|
||||||
|
|
||||||
##### Swap
|
##### Swap
|
||||||
|
|
||||||
- `origin` (Option\<Address>): In Instruction's `origin`.
|
- `origin` (Option\<ExternalAddress>): Refundable In Instruction's `origin`.
|
||||||
- `coin` (Coin): Coin to swap funds for.
|
- `coin` (Coin): Coin to swap funds for.
|
||||||
- `minimum` (Amount): Minimum amount of `coin` to receive.
|
- `minimum` (Amount): Minimum amount of `coin` to receive.
|
||||||
- `out` (Out Instruction): Final destination for funds.
|
- `out` (Destination): Final destination for funds.
|
||||||
|
|
||||||
which expands to:
|
which expands to:
|
||||||
|
|
||||||
```
|
```
|
||||||
In Instruction {
|
RefundableInInstruction {
|
||||||
origin,
|
origin,
|
||||||
target: Router,
|
instruction: ApplicationCall {
|
||||||
data: swap(Incoming Asset, out, minimum)
|
application: DEX,
|
||||||
|
data: swap(Incoming Asset, coin, minimum, out)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -99,19 +102,23 @@ where `swap` is a function which:
|
||||||
|
|
||||||
##### Add Liquidity
|
##### Add Liquidity
|
||||||
|
|
||||||
- `origin` (Option\<Address>): In Instruction's `origin`.
|
- `origin` (Option\<ExternalAddress>): Refundable In Instruction's `origin`.
|
||||||
- `minimum` (Amount): Minimum amount of SRI to receive.
|
- `minimum` (Amount): Minimum amount of SRI tokens to swap
|
||||||
- `gas` (Amount): Amount of SRI to send to `address` to cover
|
half for.
|
||||||
gas in the future.
|
- `gas` (Amount): Amount of SRI to send to `address` to
|
||||||
- `address` (Address): Account to send the created liquidity tokens.
|
cover gas in the future.
|
||||||
|
- `address` (Address): Account to send the created liquidity
|
||||||
|
tokens.
|
||||||
|
|
||||||
which expands to:
|
which expands to:
|
||||||
|
|
||||||
```
|
```
|
||||||
In Instruction {
|
RefundableInInstruction {
|
||||||
origin,
|
origin,
|
||||||
target: Router,
|
instruction: ApplicationCall {
|
||||||
data: swap_and_add_liquidity(Incoming Asset, address, minimum, gas)
|
application: DEX,
|
||||||
|
data: swap_and_add_liquidity(Incoming Asset, minimum, gas, address)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -120,5 +127,5 @@ where `swap_and_add_liquidity` is a function which:
|
||||||
1) Swaps half of the incoming funds for SRI.
|
1) Swaps half of the incoming funds for SRI.
|
||||||
2) Checks the amount of SRI received is greater than `minimum`.
|
2) Checks the amount of SRI received is greater than `minimum`.
|
||||||
3) Calls `swap_and_add_liquidity` with the amount of SRI received - `gas`, and
|
3) Calls `swap_and_add_liquidity` with the amount of SRI received - `gas`, and
|
||||||
a matching amount of the incoming asset.
|
a matching amount of the incoming coin.
|
||||||
4) Transfers any leftover funds to `address`.
|
4) Transfers any leftover funds to `address`.
|
||||||
|
|
|
@ -1,98 +0,0 @@
|
||||||
# Scenarios
|
|
||||||
|
|
||||||
### Pong
|
|
||||||
|
|
||||||
Pong has Serai receive funds, just to return them. It's a demonstration of the
|
|
||||||
in/out flow.
|
|
||||||
|
|
||||||
```
|
|
||||||
Shorthand::Raw(
|
|
||||||
In Instruction {
|
|
||||||
target: Incoming Asset Contract,
|
|
||||||
data: native_transfer(Incoming Asset Sender)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
### Wrap
|
|
||||||
|
|
||||||
Wrap wraps an asset from a connected chain into a Serai Asset, making it usable
|
|
||||||
with applications on Serai, such as Serai DEX.
|
|
||||||
|
|
||||||
```
|
|
||||||
Shorthand::Raw(
|
|
||||||
In Instruction {
|
|
||||||
target: Serai Address
|
|
||||||
}
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
### Swap SRI to Bitcoin
|
|
||||||
|
|
||||||
For a SRI to Bitcoin swap, a SRI holder would perform an
|
|
||||||
[Application Call](../Serai.md#application-calls) to Serai DEX, purchasing
|
|
||||||
seraiBTC. Once they have seraiBTC, they are able to call `native_transfer`,
|
|
||||||
transferring the BTC underlying the seraiBTC to a specified Bitcoin address.
|
|
||||||
|
|
||||||
### Swap Bitcoin to Monero
|
|
||||||
|
|
||||||
For a Bitcoin to Monero swap, the following Shorthand would be used.
|
|
||||||
|
|
||||||
```
|
|
||||||
Shorthand::Swap {
|
|
||||||
coin: Monero,
|
|
||||||
minimum: Minimum Monero from Swap,
|
|
||||||
out: Monero Address
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
This Shorthand is expected to generally take:
|
|
||||||
|
|
||||||
- 1 byte to identify as Swap.
|
|
||||||
- 1 byte to not override `origin`.
|
|
||||||
- 1 byte for `coin`.
|
|
||||||
- 4 bytes for `minimum`.
|
|
||||||
- 1 byte for `out`'s `destination`'s ordinal byte.
|
|
||||||
- 65 bytes for `out`'s `destination`'s address.
|
|
||||||
- 1 byte to not include `data` in `out`.
|
|
||||||
|
|
||||||
Or 74 bytes.
|
|
||||||
|
|
||||||
### Add Liquidity (Fresh)
|
|
||||||
|
|
||||||
For a user who has never used Serai before, they have three requirements to add
|
|
||||||
liquidity:
|
|
||||||
|
|
||||||
- Minting the Serai asset they wish to add liquidity for
|
|
||||||
- Acquiring Serai, as liquidity is symmetric
|
|
||||||
- Acquiring Serai for gas fees
|
|
||||||
|
|
||||||
The Add Liquidity Shorthand enables all three of these actions, and actually
|
|
||||||
adding the liquidity, in just one transaction from a connected network.
|
|
||||||
|
|
||||||
```
|
|
||||||
Shorthand::AddLiquidity {
|
|
||||||
minimum: Minimum SRI from Swap,
|
|
||||||
gas: Amount of SRI to keep for gas
|
|
||||||
address: Serai address for the liquidity tokens and gas
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
For adding liquidity from Bitcoin, this Shorthand is expected to generally take:
|
|
||||||
|
|
||||||
- 1 byte to identify as Add Liquidity.
|
|
||||||
- 1 byte to not override `origin`.
|
|
||||||
- 5 bytes for `minimum`.
|
|
||||||
- 4 bytes for `gas`.
|
|
||||||
- 32 bytes for `address`.
|
|
||||||
|
|
||||||
Or 43 bytes.
|
|
||||||
|
|
||||||
### Add Liquidity (SRI Holder)
|
|
||||||
|
|
||||||
For a user who already has SRI, they solely need to have the asset they wish to
|
|
||||||
add liquidity for via their SRI. They can either purchase it from Serai DEX, or
|
|
||||||
wrap it as detailed above.
|
|
||||||
|
|
||||||
Once they have both their SRI and the asset they wish to provide liquidity for,
|
|
||||||
they would use a Serai transaction to call the DEX, adding the liquidity.
|
|
|
@ -5,41 +5,33 @@
|
||||||
These are the list of types used to represent various properties within the
|
These are the list of types used to represent various properties within the
|
||||||
protocol.
|
protocol.
|
||||||
|
|
||||||
| Alias | Shorthand | Type |
|
| Alias | Type |
|
||||||
|-------------------------|-----------|----------|
|
|------------------------|----------------------------------------------|
|
||||||
| Amount | Amount | u64 |
|
| SeraiAddress | sr25519::Public (unchecked [u8; 32] wrapper) |
|
||||||
| Curve | Curve | u16 |
|
| Amount | u64 |
|
||||||
| Coin | Coin | u32 |
|
| Coin | u32 |
|
||||||
| Global Validator Set ID | GVSID | u32 |
|
| Session | u32 |
|
||||||
| Validator Set Index | VS | u8 |
|
| Validator Set Index | u16 |
|
||||||
| Key | Key | Vec\<u8> |
|
| Validator Set Instance | (Session, Validator Set Index) |
|
||||||
|
| Key | BoundedVec\<u8, 96> |
|
||||||
### Curves
|
| ExternalAddress | BoundedVec\<u8, 74> |
|
||||||
|
| Data | BoundedVec\<u8, 512> |
|
||||||
Integer IDs for various curves. It should be noted some curves may be the same,
|
|
||||||
yet have distinct IDs due to having different basepoints, and accordingly
|
|
||||||
different keys. For such cases, the processor is expected to create one secret
|
|
||||||
per curve, and then use DLEq proofs to port keys to other basepoints as needed.
|
|
||||||
|
|
||||||
| Curve | ID |
|
|
||||||
|-----------|----|
|
|
||||||
| Secp256k1 | 0 |
|
|
||||||
| Ed25519 | 1 |
|
|
||||||
|
|
||||||
### Networks
|
### Networks
|
||||||
|
|
||||||
Every network connected to Serai operates over a specific curve. While the
|
Every network connected to Serai operates over a specific curve. The processor
|
||||||
processor generates keys for curves, these keys are bound to specific networks
|
generates a distinct set of keys per network. Beyond the key-generation itself
|
||||||
via an additive offset created by hashing the network's name (among other
|
being isolated, the generated keys are further bound to their respective
|
||||||
things). The network's key is used for all coins on that network.
|
networks via an additive offset created by hashing the network's name (among
|
||||||
|
other properties). The network's key is used for all coins on that network.
|
||||||
|
|
||||||
Networks are not acknowledged by the Serai network, solely by the processor.
|
Networks are not acknowledged by the Serai network, solely by the processor.
|
||||||
|
|
||||||
| Network | Curve |
|
| Network | Curve |
|
||||||
|----------|-------|
|
|----------|-----------|
|
||||||
| Bitcoin | 0 |
|
| Bitcoin | Secp256k1 |
|
||||||
| Ethereum | 0 |
|
| Ethereum | Secp256k1 |
|
||||||
| Monero | 1 |
|
| Monero | Ed25519 |
|
||||||
|
|
||||||
### Coins
|
### Coins
|
||||||
|
|
||||||
|
@ -47,8 +39,8 @@ Coins exist over a network and have a distinct integer ID.
|
||||||
|
|
||||||
| Coin | Network | ID |
|
| Coin | Network | ID |
|
||||||
|----------|----------|----|
|
|----------|----------|----|
|
||||||
| Bitcoin | Bitcoin | 0 |
|
| Serai | Serai | 0 |
|
||||||
| Ethereum | Ethereum | 1 |
|
| Bitcoin | Bitcoin | 1 |
|
||||||
| USDC | Ethereum | 2 |
|
| Ether | Ethereum | 2 |
|
||||||
| DAI | Ethereum | 3 |
|
| DAI | Ethereum | 3 |
|
||||||
| Monero | Monero | 4 |
|
| Monero | Monero | 4 |
|
||||||
|
|
|
@ -1,35 +0,0 @@
|
||||||
# Multisig
|
|
||||||
|
|
||||||
Multisigs are confirmed on-chain by the `Multisig` contract. While the processor
|
|
||||||
does create the multisig, and sign for it, making it irrelevant to the chain,
|
|
||||||
confirming it on-chain solves the question of if the multisig was successfully
|
|
||||||
created or not. If each processor simply asked all other processors for
|
|
||||||
confirmation, votes lost to the network would create an inconsistent view. This
|
|
||||||
is a form of the Byzantine Generals Problem, which can be resolved by placing
|
|
||||||
votes within a BFT system.
|
|
||||||
|
|
||||||
Confirmation requires all participants confirm the new set of keys. While this
|
|
||||||
isn't BFT, despite the voting process being BFT, it avoids the scenario where
|
|
||||||
only t (where t is the BFT threshold, as used in the t-of-n multisig)
|
|
||||||
successfully generated shares, actually creating a t-of-t multisig in practice,
|
|
||||||
which is not BFT. This does mean a single node can delay a churn, which is
|
|
||||||
expected to be handled via a combination of slashing, and if necessary, removal.
|
|
||||||
|
|
||||||
Validators are allowed to vote multiple times across sets of keys, with the
|
|
||||||
first set to be confirmed becoming the set of keys for that validator set. These
|
|
||||||
keys remain valid for the validator set until it is changed. If a validator set
|
|
||||||
remains consistent despite the global validator set updating, their keys carry.
|
|
||||||
If a validator set adds a new member, and then loses them, their historical keys
|
|
||||||
are not reused.
|
|
||||||
|
|
||||||
Once new keys are confirmed for a given validator set, they become tracked and
|
|
||||||
the recommended set of keys for incoming funds. The old keys are still eligible
|
|
||||||
to receive funds for a provided grace period, requiring the current validator
|
|
||||||
set to track both sets of keys. The old keys are also still used to handle all
|
|
||||||
outgoing payments as well, until the end of the grace period, at which point
|
|
||||||
they're no longer eligible to receive funds and they forward all of their funds
|
|
||||||
to the new set of keys.
|
|
||||||
|
|
||||||
### `vote(keys: Vec<Option<Key>>)`
|
|
||||||
|
|
||||||
Lets a validator vote on a set of keys for their validator set.
|
|
19
docs/protocol/Staking.md
Normal file
19
docs/protocol/Staking.md
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
# Staking
|
||||||
|
|
||||||
|
Serai's staking pallet offers a DPoS system. All stake which enters the system
|
||||||
|
is delegated somewhere. Delegates can then bond their stake to different
|
||||||
|
validator sets, justifying their inclusion and providing financial security.
|
||||||
|
|
||||||
|
Delegators may transfer stake whenever, so long as that stake isn't actively
|
||||||
|
bonded. Delegators may also unstake whenever, so long as the prior condition
|
||||||
|
is still met.
|
||||||
|
|
||||||
|
### Stake (message)
|
||||||
|
|
||||||
|
- `delegate` (Address): Address to delegate the newly added stake to.
|
||||||
|
- `amount` (Amount): Amount to stake and delegate.
|
||||||
|
|
||||||
|
### Unstake (message)
|
||||||
|
|
||||||
|
- `delegate` (Address): Address the stake is currently delegated to.
|
||||||
|
- `amount` (Amount): Amount to unstake.
|
|
@ -2,29 +2,80 @@
|
||||||
|
|
||||||
Validator Sets are defined at the protocol level, with the following parameters:
|
Validator Sets are defined at the protocol level, with the following parameters:
|
||||||
|
|
||||||
- `index` (VS): Validator set index, a global key atomically increasing
|
- `bond` (Amount): Amount of bond per key-share.
|
||||||
from 0.
|
- `coins` (Vec\<Coin>): List of coins within this set.
|
||||||
- `bond` (Amount): Amount of bond per key-share of this validator set.
|
- `participants` (Vec\<Coin>): List of participants within this set.
|
||||||
- `coins` (Vec\<Coin>): Coins managed by this validator set.
|
|
||||||
|
|
||||||
At launch, there will solely be validator set 0, managing Bitcoin, Ethereum,
|
Validator Sets are referred to by `ValidatorSetIndex` yet have their data
|
||||||
USDC, DAI, and Monero.
|
accessible via `ValidatorSetInstance`.
|
||||||
|
|
||||||
### Multisig Management
|
At launch, there will solely be Validator Set 0, managing Bitcoin, Ether, DAI,
|
||||||
|
and Monero.
|
||||||
Every validator set is expected to form a t-of-n multisig, where n is the amount
|
|
||||||
of key shares in the validator set and t is `n / 3 * 2 + 1`, per curve required
|
|
||||||
by its coins. This multisig is secure to hold funds up to 67% of the validator
|
|
||||||
set's bond value. If funds exceed that threshold, there's more value in the
|
|
||||||
multisig than in the supermajority of bond that must be put forth to control it.
|
|
||||||
|
|
||||||
### Participation in the BFT process
|
### Participation in the BFT process
|
||||||
|
|
||||||
All validator sets participate in the BFT process. Specifically, a block
|
All Validator Sets participate in the BFT process described under
|
||||||
containing `Oraclization`s for a coin must be approved by the BFT majority of
|
[Consensus](./Consensus.md). Specifically, a block containing In Instructions
|
||||||
the validator set responsible for it, along with the BFT majority of the network
|
for a coin must be approved by the BFT majority of the Validator Set responsible
|
||||||
by bond.
|
for it, along with the BFT majority of the network by bond.
|
||||||
|
|
||||||
At this time, `Oraclization`s for a coin are only expected to be included when a
|
At this time, In Instructions for a coin are only expected to be included when a
|
||||||
validator from the validator set managing the coin is the producer of the block
|
validator from the Validator Set managing the coin is the producer of the block
|
||||||
in question.
|
in question.
|
||||||
|
|
||||||
|
Since there is currently only one Validator Set, the aforementioned BFT
|
||||||
|
conditions collapse to simply the BFT majority by bond. Ensuring BFT majority
|
||||||
|
per responsible Validator Set is accordingly unimplemented for now.
|
||||||
|
|
||||||
|
### Multisig
|
||||||
|
|
||||||
|
Every Validator Set is expected to form a `t`-of-`n` multisig, where `n` is the
|
||||||
|
amount of key shares in the Validator Set and `t` is `n * 2 / 3 + 1`, for each
|
||||||
|
of its networks. This multisig is secure to hold coins up to 67% of the
|
||||||
|
Validator Set's bonded value. If the coins exceed that threshold, there's more
|
||||||
|
value in the multisig than in the supermajority of bond that must be put forth
|
||||||
|
to control it. Accordingly, it'd be no longer financially secure, and it MUST
|
||||||
|
reject newly added coins which would cross that threshold.
|
||||||
|
|
||||||
|
### Multisig Creation
|
||||||
|
|
||||||
|
Multisigs are created by processors, communicating via their Coordinators.
|
||||||
|
They're then confirmed on chain via the `validator-sets` pallet. This is done by
|
||||||
|
having 100% of participants agree on the resulting group key. While this isn't
|
||||||
|
fault tolerant, a malicious actor who forces a `t`-of-`n` multisig to be
|
||||||
|
`t`-of-`n-1` reduces the fault tolerance of the multisig which is a greater
|
||||||
|
issue. If a node does prevent multisig creation, other validators should issue
|
||||||
|
slashes for it/remove it from the Validator Set entirely.
|
||||||
|
|
||||||
|
Due to the fact multiple key generations may occur to account for
|
||||||
|
faulty/malicious nodes, voting on multiple keys for a single coin is allowed,
|
||||||
|
with the first key to be confirmed becoming the key for that coin.
|
||||||
|
|
||||||
|
Placing it on chain also solves the question of if the multisig was successfully
|
||||||
|
created or not. Processors cannot simply ask each other if they succeeded
|
||||||
|
without creating an instance of the Byzantine Generals Problem. Placing results
|
||||||
|
within a Byzantine Fault Tolerant system resolves this.
|
||||||
|
|
||||||
|
### Multisig Lifetime
|
||||||
|
|
||||||
|
The keys for a Validator Set remain valid until its participants change. If a
|
||||||
|
Validator Set adds a new member, and then they leave, the set's historical keys
|
||||||
|
are not reused.
|
||||||
|
|
||||||
|
### Multisig Handoffs
|
||||||
|
|
||||||
|
Once new keys are confirmed for a given Validator Set, they become tracked and
|
||||||
|
the recommended set of keys for incoming coins. The old keys are still eligible
|
||||||
|
to receive coins for a provided grace period, requiring the current Validator
|
||||||
|
Set to track both sets of keys. The old keys are also prioritized for handling
|
||||||
|
outbound transfers, until the end of the grace period, at which point they're
|
||||||
|
no longer eligible to receive coins and they forward all of their coins to the
|
||||||
|
new set of keys. It is only then that validators in the previous instance of the
|
||||||
|
set, yet not the current instance, may unbond their stake.
|
||||||
|
|
||||||
|
### Vote (message)
|
||||||
|
|
||||||
|
- `coin` (Coin): Coin whose key is being voted for.
|
||||||
|
- `key` (Key): Key being voted on.
|
||||||
|
|
||||||
|
Once a key is voted on by every member, it's adopted as detailed above.
|
||||||
|
|
|
@ -1,44 +0,0 @@
|
||||||
# Validators
|
|
||||||
|
|
||||||
### Register (message)
|
|
||||||
|
|
||||||
- `validator` (signer): Address which will be the validator on Substrate.
|
|
||||||
- `manager` (signer): Address which will manage this validator.
|
|
||||||
- `set` (VS): Validator set being joined.
|
|
||||||
|
|
||||||
Marks `validator` as a validator candidate for the specified validator set,
|
|
||||||
enabling delegation.
|
|
||||||
|
|
||||||
### Delegate (message)
|
|
||||||
|
|
||||||
- `delegator` (signer): Address delegating funds to `validator`.
|
|
||||||
- `validator` (address): Registered validator being delegated to.
|
|
||||||
- `amount` (Amount): Amount of funds being delegated to `validator`.
|
|
||||||
|
|
||||||
Delegated funds will be removed from `delegator`'s wallet and moved to
|
|
||||||
`validator`'s bond. `amount` must be a multiple of the validator set's bond, and
|
|
||||||
`delegator` must be `validator`'s manager.
|
|
||||||
|
|
||||||
### Undelegate (message)
|
|
||||||
|
|
||||||
- `delegator` (signer): Address removing delegated funds from `validator`.
|
|
||||||
- `validator` (address): Registered validator no longer being delegated to.
|
|
||||||
- `amount` (Amount): Amount of funds no longer being delegated to
|
|
||||||
`validator`.
|
|
||||||
|
|
||||||
`delegator` must be `validator`'s manager, and `amount` must be a multiple of
|
|
||||||
the validator set's bond. `validator` is scheduled to lose an according amount
|
|
||||||
of key shares at the next churn, and once they do, the specified amount will be
|
|
||||||
moved from `validator`'s bond to `delegator`'s wallet.
|
|
||||||
|
|
||||||
`validator`'s bond must be at least the validator set's bond after the
|
|
||||||
undelegation.
|
|
||||||
|
|
||||||
### Resign (message)
|
|
||||||
|
|
||||||
- `manager` (signer): Manager of `validator`.
|
|
||||||
- `validator` (address): Validator being removed from the pool/candidacy.
|
|
||||||
|
|
||||||
If `validator` is active, they will be removed at the next churn. If they are
|
|
||||||
solely a candidate, they will no longer be eligible for delegations. All bond is
|
|
||||||
refunded after their removal.
|
|
|
@ -14,29 +14,41 @@ all-features = true
|
||||||
rustdoc-args = ["--cfg", "docsrs"]
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
# Macros
|
||||||
async-trait = "0.1"
|
async-trait = "0.1"
|
||||||
zeroize = "^1.5"
|
zeroize = "^1.5"
|
||||||
thiserror = "1"
|
thiserror = "1"
|
||||||
rand_core = "0.6"
|
rand_core = "0.6"
|
||||||
|
|
||||||
group = "0.12"
|
# Cryptography
|
||||||
|
|
||||||
curve25519-dalek = { version = "3", features = ["std"] }
|
|
||||||
|
|
||||||
transcript = { package = "flexible-transcript", path = "../crypto/transcript", features = ["recommended"] }
|
transcript = { package = "flexible-transcript", path = "../crypto/transcript", features = ["recommended"] }
|
||||||
dalek-ff-group = { path = "../crypto/dalek-ff-group", features = ["black_box"] }
|
|
||||||
frost = { package = "modular-frost", path = "../crypto/frost", features = ["ed25519"] }
|
|
||||||
|
|
||||||
|
group = "0.12"
|
||||||
|
frost = { package = "modular-frost", path = "../crypto/frost", features = ["secp256k1", "ed25519"] }
|
||||||
|
|
||||||
|
# Monero
|
||||||
|
curve25519-dalek = { version = "3", features = ["std"] }
|
||||||
|
dalek-ff-group = { path = "../crypto/dalek-ff-group", features = ["black_box"] }
|
||||||
monero-serai = { path = "../coins/monero", features = ["multisig"] }
|
monero-serai = { path = "../coins/monero", features = ["multisig"] }
|
||||||
|
|
||||||
|
# Bitcoin
|
||||||
|
bitcoin-serai = { path = "../coins/bitcoin" }
|
||||||
|
|
||||||
|
k256 = { version = "0.12", features = ["arithmetic"] }
|
||||||
|
bitcoin = "0.29"
|
||||||
|
hex = "0.4"
|
||||||
|
secp256k1 = { version = "0.24", features = ["global-context", "rand-std"] }
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
serde_json = "1.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
rand_core = "0.6"
|
rand_core = "0.6"
|
||||||
|
|
||||||
hex = "0.4"
|
hex = "0.4"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
|
||||||
futures = "0.3"
|
futures = "0.3"
|
||||||
tokio = { version = "1", features = ["full"] }
|
tokio = { version = "1", features = ["full"] }
|
||||||
|
|
||||||
frost = { package = "modular-frost", path = "../crypto/frost", features = ["ed25519", "tests"] }
|
frost = { package = "modular-frost", path = "../crypto/frost", features = ["tests"] }
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
AGPL-3.0-only license
|
AGPL-3.0-only license
|
||||||
|
|
||||||
Copyright (c) 2022 Luke Parker
|
Copyright (c) 2022-2023 Luke Parker
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
This program is free software: you can redistribute it and/or modify
|
||||||
it under the terms of the GNU Affero General Public License Version 3 as
|
it under the terms of the GNU Affero General Public License Version 3 as
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue