stack_wallet/lib/wallets/wallet/wallet_mixin_interfaces/spark_interface.dart

827 lines
26 KiB
Dart
Raw Normal View History

2023-12-05 20:44:50 +00:00
import 'dart:convert';
2023-11-27 20:57:33 +00:00
2023-12-13 17:26:30 +00:00
import 'package:bitcoindart/bitcoindart.dart' as btc;
import 'package:flutter/foundation.dart';
2023-11-28 16:13:10 +00:00
import 'package:flutter_libsparkmobile/flutter_libsparkmobile.dart';
2023-11-27 20:57:33 +00:00
import 'package:isar/isar.dart';
import 'package:stackwallet/models/balance.dart';
2023-11-27 20:57:33 +00:00
import 'package:stackwallet/models/isar/models/blockchain_data/address.dart';
import 'package:stackwallet/utilities/amount/amount.dart';
2023-12-05 22:55:38 +00:00
import 'package:stackwallet/utilities/extensions/extensions.dart';
2023-12-16 20:28:04 +00:00
import 'package:stackwallet/utilities/logger.dart';
2023-11-28 16:13:10 +00:00
import 'package:stackwallet/wallets/crypto_currency/crypto_currency.dart';
import 'package:stackwallet/wallets/isar/models/spark_coin.dart';
2023-11-16 21:30:01 +00:00
import 'package:stackwallet/wallets/models/tx_data.dart';
import 'package:stackwallet/wallets/wallet/intermediate/bip39_hd_wallet.dart';
import 'package:stackwallet/wallets/wallet/wallet_mixin_interfaces/electrumx_interface.dart';
2023-11-16 21:30:01 +00:00
const kDefaultSparkIndex = 1;
mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
2023-12-19 15:20:50 +00:00
static bool validateSparkAddress({
required String address,
required bool isTestNet,
}) =>
LibSpark.validateAddress(address: address, isTestNet: isTestNet);
@override
Future<void> init() async {
Address? address = await getCurrentReceivingSparkAddress();
if (address == null) {
address = await generateNextSparkAddress();
await mainDB.putAddress(address);
} // TODO add other address types to wallet info?
// await info.updateReceivingAddress(
// newAddress: address.value,
// isar: mainDB.isar,
// );
await super.init();
}
@override
Future<List<Address>> fetchAddressesForElectrumXScan() async {
final allAddresses = await mainDB
.getAddresses(walletId)
.filter()
.not()
.group(
(q) => q
.typeEqualTo(AddressType.spark)
.or()
.typeEqualTo(AddressType.nonWallet)
.or()
.subTypeEqualTo(AddressSubType.nonWallet),
)
.findAll();
return allAddresses;
}
2023-11-27 20:57:33 +00:00
Future<Address?> getCurrentReceivingSparkAddress() async {
return await mainDB.isar.addresses
.where()
.walletIdEqualTo(walletId)
.filter()
.typeEqualTo(AddressType.spark)
.sortByDerivationIndexDesc()
.findFirst();
}
Future<Address> generateNextSparkAddress() async {
2023-11-27 20:57:33 +00:00
final highestStoredDiversifier =
(await getCurrentReceivingSparkAddress())?.derivationIndex;
// default to starting at 1 if none found
final int diversifier = (highestStoredDiversifier ?? 0) + 1;
2023-11-28 16:13:10 +00:00
final root = await getRootHDNode();
2023-12-05 22:55:38 +00:00
final String derivationPath;
if (cryptoCurrency.network == CryptoCurrencyNetwork.test) {
derivationPath = "$kSparkBaseDerivationPathTestnet$kDefaultSparkIndex";
2023-12-05 22:55:38 +00:00
} else {
derivationPath = "$kSparkBaseDerivationPath$kDefaultSparkIndex";
2023-12-05 22:55:38 +00:00
}
2023-11-28 16:13:10 +00:00
final keys = root.derivePath(derivationPath);
final String addressString = await LibSpark.getAddress(
privateKey: keys.privateKey.data,
index: kDefaultSparkIndex,
2023-11-28 16:13:10 +00:00
diversifier: diversifier,
isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test,
);
2023-11-27 20:57:33 +00:00
return Address(
walletId: walletId,
value: addressString,
2023-11-28 16:13:10 +00:00
publicKey: keys.publicKey.data,
2023-11-27 20:57:33 +00:00
derivationIndex: diversifier,
derivationPath: DerivationPath()..value = derivationPath,
type: AddressType.spark,
subType: AddressSubType.receiving,
);
}
Future<Amount> estimateFeeForSpark(Amount amount) async {
// int spendAmount = amount.raw.toInt();
// if (spendAmount == 0) {
return Amount(
rawValue: BigInt.from(0),
fractionDigits: cryptoCurrency.fractionDigits,
);
// }
// TODO actual fee estimation
2023-11-27 20:57:33 +00:00
}
2023-11-27 21:18:20 +00:00
/// Spark to Spark/Transparent (spend) creation
2023-11-16 21:30:01 +00:00
Future<TxData> prepareSendSpark({
required TxData txData,
}) async {
2023-12-21 00:00:02 +00:00
// fetch spendable spark coins
final coins = await mainDB.isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.filter()
.isUsedEqualTo(false)
.and()
.heightIsNotNull()
.findAll();
2023-12-21 00:00:02 +00:00
// prepare coin data for ffi
final serializedCoins = coins
.map((e) => (
serializedCoin: e.serializedCoinB64!,
serializedCoinContext: e.contextB64!,
groupId: e.groupId,
height: e.height!,
))
.toList();
2023-12-13 17:26:30 +00:00
final currentId = await electrumXClient.getSparkLatestCoinId();
final List<Map<String, dynamic>> setMaps = [];
final List<({int groupId, String blockHash})> idAndBlockHashes = [];
for (int i = 1; i <= currentId; i++) {
final set = await electrumXCachedClient.getSparkAnonymitySet(
groupId: i.toString(),
coin: info.coin,
2023-12-13 17:26:30 +00:00
);
set["coinGroupID"] = i;
setMaps.add(set);
idAndBlockHashes.add(
(
groupId: i,
blockHash: set["blockHash"] as String,
),
);
2023-12-13 17:26:30 +00:00
}
final allAnonymitySets = setMaps
.map((e) => (
setId: e["coinGroupID"] as int,
setHash: e["setHash"] as String,
set: (e["coins"] as List)
.map((e) => (
serializedCoin: e[0] as String,
txHash: e[1] as String,
))
.toList(),
))
.toList();
final root = await getRootHDNode();
final String derivationPath;
if (cryptoCurrency.network == CryptoCurrencyNetwork.test) {
derivationPath = "$kSparkBaseDerivationPathTestnet$kDefaultSparkIndex";
2023-12-13 17:26:30 +00:00
} else {
derivationPath = "$kSparkBaseDerivationPath$kDefaultSparkIndex";
2023-12-13 17:26:30 +00:00
}
final privateKey = root.derivePath(derivationPath).privateKey.data;
final txb = btc.TransactionBuilder(
network: btc.NetworkType(
messagePrefix: cryptoCurrency.networkParams.messagePrefix,
bech32: cryptoCurrency.networkParams.bech32Hrp,
bip32: btc.Bip32Type(
public: cryptoCurrency.networkParams.pubHDPrefix,
private: cryptoCurrency.networkParams.privHDPrefix,
),
pubKeyHash: cryptoCurrency.networkParams.p2pkhPrefix,
scriptHash: cryptoCurrency.networkParams.p2shPrefix,
wif: cryptoCurrency.networkParams.wifPrefix,
),
);
txb.setLockTime(await chainHeight);
txb.setVersion(3 | (9 << 16));
for (int i = 0; i < (txData.recipients?.length ?? 0); i++) {
if (txData.recipients![i].amount.raw == BigInt.zero) {
continue;
}
if (txData.recipients![i].amount < cryptoCurrency.dustLimit) {
throw Exception("Output below dust limit");
}
//
// transparentOut += txData.recipients![i].amount.raw.toInt();
txb.addOutput(
txData.recipients![i].address,
txData.recipients![i].amount.raw.toInt(),
);
}
2023-12-21 16:23:55 +00:00
final extractedTx = txb.buildIncomplete();
extractedTx.addInput(
'0000000000000000000000000000000000000000000000000000000000000000'
.toUint8ListFromHex,
0xffffffff,
0xffffffff,
"d3".toUint8ListFromHex, // OP_SPARKSPEND
);
extractedTx.setPayload(Uint8List(0));
2023-12-13 17:26:30 +00:00
final spend = LibSpark.createSparkSendTransaction(
privateKeyHex: privateKey.toHex,
index: kDefaultSparkIndex,
2023-12-13 17:26:30 +00:00
recipients: [],
privateRecipients: txData.sparkRecipients
?.map((e) => (
sparkAddress: e.address,
amount: e.amount.raw.toInt(),
subtractFeeFromAmount: e.subtractFeeFromAmount,
memo: e.memo,
))
.toList() ??
[],
serializedCoins: serializedCoins,
2023-12-13 17:26:30 +00:00
allAnonymitySets: allAnonymitySets,
idAndBlockHashes: idAndBlockHashes
.map((e) => (setId: e.groupId, blockHash: base64Decode(e.blockHash)))
.toList(),
2023-12-21 16:23:55 +00:00
txHash: extractedTx.getHash(),
2023-12-13 17:26:30 +00:00
);
for (final outputScript in spend.outputScripts) {
2023-12-21 16:23:55 +00:00
extractedTx.addOutput(outputScript, 0);
2023-12-13 17:26:30 +00:00
}
extractedTx.setPayload(spend.serializedSpendPayload);
2023-12-13 17:26:30 +00:00
final rawTxHex = extractedTx.toHex();
return txData.copyWith(
raw: rawTxHex,
vSize: extractedTx.virtualSize(),
fee: Amount(
rawValue: BigInt.from(spend.fee),
fractionDigits: cryptoCurrency.fractionDigits,
),
// TODO used coins
);
2023-11-16 21:30:01 +00:00
}
2023-11-27 20:57:33 +00:00
2023-11-27 21:18:20 +00:00
// this may not be needed for either mints or spends or both
2023-11-27 20:57:33 +00:00
Future<TxData> confirmSendSpark({
required TxData txData,
}) async {
2023-12-21 16:23:55 +00:00
try {
Logging.instance.log("confirmSend txData: $txData", level: LogLevel.Info);
final txHash = await electrumXClient.broadcastTransaction(
rawTx: txData.raw!,
);
Logging.instance.log("Sent txHash: $txHash", level: LogLevel.Info);
txData = txData.copyWith(
// TODO mark spark coins as spent locally and update balance before waiting to check via electrumx?
// usedUTXOs:
// txData.usedUTXOs!.map((e) => e.copyWith(used: true)).toList(),
// TODO revisit setting these both
txHash: txHash,
txid: txHash,
);
// mark utxos as used
await mainDB.putUTXOs(txData.usedUTXOs!);
return txData;
} catch (e, s) {
Logging.instance.log("Exception rethrown from confirmSend(): $e\n$s",
level: LogLevel.Error);
rethrow;
}
2023-11-27 20:57:33 +00:00
}
// TODO lots of room for performance improvements here. Should be similar to
// recoverSparkWallet but only fetch and check anonymity set data that we
// have not yet parsed.
Future<void> refreshSparkData() async {
2023-12-05 22:55:38 +00:00
final sparkAddresses = await mainDB.isar.addresses
.where()
.walletIdEqualTo(walletId)
.filter()
.typeEqualTo(AddressType.spark)
.findAll();
final Set<String> paths =
sparkAddresses.map((e) => e.derivationPath!.value).toSet();
2023-11-27 20:57:33 +00:00
try {
final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId();
2023-11-27 21:18:20 +00:00
final blockHash = await _getCachedSparkBlockHash();
2023-12-20 23:46:48 +00:00
final anonymitySetFuture = blockHash == null
? electrumXCachedClient.getSparkAnonymitySet(
groupId: latestSparkCoinId.toString(),
coin: info.coin,
)
2023-12-20 23:46:48 +00:00
: electrumXClient.getSparkAnonymitySet(
coinGroupId: latestSparkCoinId.toString(),
startBlockHash: blockHash,
);
2023-12-20 23:46:48 +00:00
final spentCoinTagsFuture =
electrumXClient.getSparkUsedCoinsTags(startNumber: 0);
// electrumXCachedClient.getSparkUsedCoinsTags(coin: info.coin);
final futureResults = await Future.wait([
anonymitySetFuture,
spentCoinTagsFuture,
]);
final anonymitySet = futureResults[0] as Map<String, dynamic>;
final spentCoinTags = futureResults[1] as Set<String>;
final List<SparkCoin> myCoins = [];
if (anonymitySet["coins"] is List &&
(anonymitySet["coins"] as List).isNotEmpty) {
final root = await getRootHDNode();
final privateKeyHexSet = paths
.map(
(e) => root.derivePath(e).privateKey.data.toHex,
)
.toSet();
2023-12-20 23:46:48 +00:00
final identifiedCoins = await compute(
_identifyCoins,
(
anonymitySetCoins: anonymitySet["coins"] as List,
groupId: latestSparkCoinId,
spentCoinTags: spentCoinTags,
privateKeyHexSet: privateKeyHexSet,
walletId: walletId,
isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test,
),
);
2023-11-27 20:57:33 +00:00
2023-12-20 23:46:48 +00:00
myCoins.addAll(identifiedCoins);
2023-11-27 20:57:33 +00:00
// update blockHash in cache
final String newBlockHash =
base64ToReverseHex(anonymitySet["blockHash"] as String);
await _setCachedSparkBlockHash(newBlockHash);
}
2023-12-20 23:46:48 +00:00
// check current coins
final currentCoins = await mainDB.isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.filter()
.isUsedEqualTo(false)
.findAll();
for (final coin in currentCoins) {
if (spentCoinTags.contains(coin.lTagHash)) {
myCoins.add(coin.copyWith(isUsed: true));
}
}
// update wallet spark coins in isar
await _addOrUpdateSparkCoins(myCoins);
// refresh spark balance
await refreshSparkBalance();
2023-11-27 20:57:33 +00:00
} catch (e, s) {
// todo logging
rethrow;
}
}
Future<void> refreshSparkBalance() async {
final currentHeight = await chainHeight;
final unusedCoins = await mainDB.isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.filter()
.isUsedEqualTo(false)
.findAll();
2023-11-27 20:57:33 +00:00
final total = Amount(
rawValue: unusedCoins
.map((e) => e.value)
.fold(BigInt.zero, (prev, e) => prev + e),
fractionDigits: cryptoCurrency.fractionDigits,
);
final spendable = Amount(
rawValue: unusedCoins
.where((e) =>
e.height != null &&
e.height! + cryptoCurrency.minConfirms <= currentHeight)
.map((e) => e.value)
.fold(BigInt.zero, (prev, e) => prev + e),
fractionDigits: cryptoCurrency.fractionDigits,
);
2023-11-27 20:57:33 +00:00
final sparkBalance = Balance(
total: total,
spendable: spendable,
blockedTotal: Amount(
rawValue: BigInt.zero,
fractionDigits: cryptoCurrency.fractionDigits,
),
pendingSpendable: total - spendable,
);
2023-11-27 20:57:33 +00:00
await info.updateBalanceTertiary(
newBalance: sparkBalance,
isar: mainDB.isar,
);
}
2023-11-27 20:57:33 +00:00
/// Should only be called within the standard wallet [recover] function due to
/// mutex locking. Otherwise behaviour MAY be undefined.
Future<void> recoverSparkWallet({
required Map<dynamic, dynamic> anonymitySet,
required Set<String> spentCoinTags,
}) async {
// generate spark addresses if non existing
if (await getCurrentReceivingSparkAddress() == null) {
final address = await generateNextSparkAddress();
await mainDB.putAddress(address);
}
final sparkAddresses = await mainDB.isar.addresses
.where()
.walletIdEqualTo(walletId)
.filter()
.typeEqualTo(AddressType.spark)
.findAll();
2023-11-27 20:57:33 +00:00
final Set<String> paths =
sparkAddresses.map((e) => e.derivationPath!.value).toSet();
try {
final root = await getRootHDNode();
final privateKeyHexSet =
paths.map((e) => root.derivePath(e).privateKey.data.toHex).toSet();
final myCoins = await compute(
_identifyCoins,
(
anonymitySetCoins: anonymitySet["coins"] as List,
groupId: anonymitySet["coinGroupID"] as int,
spentCoinTags: spentCoinTags,
privateKeyHexSet: privateKeyHexSet,
walletId: walletId,
isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test,
),
);
2023-11-27 20:57:33 +00:00
// update wallet spark coins in isar
await _addOrUpdateSparkCoins(myCoins);
2023-11-27 20:57:33 +00:00
// update blockHash in cache
final String newBlockHash = anonymitySet["blockHash"] as String;
await _setCachedSparkBlockHash(newBlockHash);
// refresh spark balance
await refreshSparkBalance();
2023-11-27 20:57:33 +00:00
} catch (e, s) {
// todo logging
rethrow;
}
}
/// Transparent to Spark (mint) transaction creation.
///
/// See https://docs.google.com/document/d/1RG52GoYTZDvKlZz_3G4sQu-PpT6JWSZGHLNswWcrE3o
2023-11-27 21:18:20 +00:00
Future<TxData> prepareSparkMintTransaction({required TxData txData}) async {
// "this kind of transaction is generated like a regular transaction, but in
// place of [regular] outputs we put spark outputs... we construct the input
// part of the transaction first then we generate spark related data [and]
// we sign like regular transactions at the end."
2023-11-27 21:18:20 +00:00
// Validate inputs.
// There should be at least one input.
if (txData.utxos == null || txData.utxos!.isEmpty) {
throw Exception("No inputs provided.");
}
// Validate individual inputs.
for (final utxo in txData.utxos!) {
// Input amount must be greater than zero.
if (utxo.value == 0) {
throw Exception("Input value cannot be zero.");
}
// Input value must be greater than dust limit.
if (BigInt.from(utxo.value) < cryptoCurrency.dustLimit.raw) {
throw Exception("Input value below dust limit.");
}
}
// Validate outputs.
// There should be at least one output.
if (txData.recipients == null || txData.recipients!.isEmpty) {
throw Exception("No recipients provided.");
}
// For now let's limit to one output.
if (txData.recipients!.length > 1) {
throw Exception("Only one recipient supported.");
// TODO remove and test with multiple recipients.
}
// Limit outputs per tx to 16.
//
// See SPARK_OUT_LIMIT_PER_TX at https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/include/spark.h#L16
if (txData.recipients!.length > 16) {
throw Exception("Too many recipients.");
}
// Limit spend value per tx to 1000000000000 satoshis.
//
// See SPARK_VALUE_SPEND_LIMIT_PER_TRANSACTION at https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/include/spark.h#L17
// and COIN https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/bitcoin/amount.h#L17
// Note that as MAX_MONEY is greater than this limit, we can ignore it. See https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/bitcoin/amount.h#L31
//
// This will be added to and checked as we validate outputs.
2023-12-14 02:25:13 +00:00
Amount totalAmount = Amount(
rawValue: BigInt.zero,
fractionDigits: cryptoCurrency.fractionDigits,
);
// Validate individual outputs.
for (final recipient in txData.recipients!) {
// Output amount must be greater than zero.
if (recipient.amount.raw == BigInt.zero) {
throw Exception("Output amount cannot be zero.");
// Could refactor this for loop to use an index and remove this output.
}
// Output amount must be greater than dust limit.
if (recipient.amount < cryptoCurrency.dustLimit) {
throw Exception("Output below dust limit.");
}
// Do not add outputs that would exceed the spend limit.
2023-12-14 02:25:13 +00:00
totalAmount += recipient.amount;
if (totalAmount.raw > BigInt.from(1000000000000)) {
throw Exception(
"Spend limit exceeded (10,000 FIRO per tx).",
);
}
}
2023-12-14 02:25:13 +00:00
// Create a transaction builder and set locktime and version.
final txb = btc.TransactionBuilder(
network: btc.NetworkType(
messagePrefix: cryptoCurrency.networkParams.messagePrefix,
bech32: cryptoCurrency.networkParams.bech32Hrp,
bip32: btc.Bip32Type(
public: cryptoCurrency.networkParams.pubHDPrefix,
private: cryptoCurrency.networkParams.privHDPrefix,
),
pubKeyHash: cryptoCurrency.networkParams.p2pkhPrefix,
scriptHash: cryptoCurrency.networkParams.p2shPrefix,
wif: cryptoCurrency.networkParams.wifPrefix,
),
);
txb.setLockTime(await chainHeight);
2023-12-14 15:15:11 +00:00
txb.setVersion(1);
2023-12-14 02:25:13 +00:00
2023-12-16 20:26:23 +00:00
final signingData = await fetchBuildTxData(txData.utxos!.toList());
// Create the serial context.
//
// "...serial_context is a byte array, which should be unique for each
// transaction, and for that we serialize and put all inputs into
2023-12-14 02:25:13 +00:00
// serial_context vector."
2023-12-16 20:26:23 +00:00
final serialContext = LibSpark.serializeMintContext(
inputs: signingData
.map((e) => (
e.utxo.txid,
e.utxo.vout,
))
.toList(),
);
// Add inputs.
for (final sd in signingData) {
txb.addInput(
sd.utxo.txid,
sd.utxo.vout,
0xffffffff -
1, // minus 1 is important. 0xffffffff on its own will burn funds
2023-12-16 20:26:23 +00:00
sd.output,
2023-12-14 02:25:13 +00:00
);
}
2023-12-07 16:56:45 +00:00
// Create mint recipients.
2023-12-07 16:56:45 +00:00
final mintRecipients = LibSpark.createSparkMintRecipients(
2023-12-07 16:57:54 +00:00
outputs: txData.recipients!
2023-12-07 16:56:45 +00:00
.map((e) => (
2023-12-07 16:57:54 +00:00
sparkAddress: e.address,
value: e.amount.raw.toInt(),
2023-12-16 21:01:47 +00:00
memo: "",
2023-12-07 16:56:45 +00:00
))
.toList(),
serialContext: Uint8List.fromList(serialContext),
2023-12-16 20:26:23 +00:00
generate: true,
2023-12-07 16:56:45 +00:00
);
2023-12-14 02:25:13 +00:00
// Add mint output(s).
for (final mint in mintRecipients) {
txb.addOutput(
mint.scriptPubKey,
mint.amount,
);
}
2023-12-16 20:28:04 +00:00
try {
// Sign the transaction accordingly
for (var i = 0; i < signingData.length; i++) {
txb.sign(
vin: i,
keyPair: signingData[i].keyPair!,
witnessValue: signingData[i].utxo.value,
redeemScript: signingData[i].redeemScript,
);
}
} catch (e, s) {
Logging.instance.log(
"Caught exception while signing spark mint transaction: $e\n$s",
level: LogLevel.Error,
);
rethrow;
}
2023-12-16 20:28:04 +00:00
final builtTx = txb.build();
// TODO any changes to this txData object required?
return txData.copyWith(
// recipients: [
// (
// amount: Amount(
// rawValue: BigInt.from(incomplete.outs[0].value!),
// fractionDigits: cryptoCurrency.fractionDigits,
// ),
// address: "no address for lelantus mints",
// )
// ],
vSize: builtTx.virtualSize(),
txid: builtTx.getId(),
raw: builtTx.toHex(),
);
2023-11-27 21:18:20 +00:00
}
2023-12-07 21:05:27 +00:00
/// Broadcast a tx and TODO update Spark balance.
Future<TxData> confirmSparkMintTransaction({required TxData txData}) async {
// Broadcast tx.
final txid = await electrumXClient.broadcastTransaction(
rawTx: txData.raw!,
);
// Check txid.
2023-12-16 20:28:04 +00:00
if (txid == txData.txid!) {
print("SPARK TXIDS MATCH!!");
} else {
print("SUBMITTED SPARK TXID DOES NOT MATCH WHAT WE GENERATED");
}
2023-12-07 21:05:27 +00:00
// TODO update spark balance.
return txData.copyWith(
txid: txid,
);
}
2023-11-27 20:57:33 +00:00
@override
Future<void> updateBalance() async {
// call to super to update transparent balance (and lelantus balance if
// what ever class this mixin is used on uses LelantusInterface as well)
final normalBalanceFuture = super.updateBalance();
// todo: spark balance aka update info.tertiaryBalance
2023-11-27 20:57:33 +00:00
// wait for normalBalanceFuture to complete before returning
await normalBalanceFuture;
}
// ====================== Private ============================================
final _kSparkAnonSetCachedBlockHashKey = "SparkAnonSetCachedBlockHashKey";
Future<String?> _getCachedSparkBlockHash() async {
return info.otherData[_kSparkAnonSetCachedBlockHashKey] as String?;
}
Future<void> _setCachedSparkBlockHash(String blockHash) async {
await info.updateOtherData(
newEntries: {_kSparkAnonSetCachedBlockHashKey: blockHash},
isar: mainDB.isar,
);
}
Future<void> _addOrUpdateSparkCoins(List<SparkCoin> coins) async {
if (coins.isNotEmpty) {
await mainDB.isar.writeTxn(() async {
await mainDB.isar.sparkCoins.putAll(coins);
});
}
// update wallet spark coin height
final coinsToCheck = await mainDB.isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.filter()
.heightIsNull()
.findAll();
final List<SparkCoin> updatedCoins = [];
for (final coin in coinsToCheck) {
final tx = await electrumXCachedClient.getTransaction(
txHash: coin.txHash,
coin: info.coin,
);
if (tx["height"] is int) {
updatedCoins.add(coin.copyWith(height: tx["height"] as int));
}
}
if (updatedCoins.isNotEmpty) {
await mainDB.isar.writeTxn(() async {
await mainDB.isar.sparkCoins.putAll(updatedCoins);
});
}
}
2023-11-16 21:30:01 +00:00
}
2023-12-05 20:44:50 +00:00
String base64ToReverseHex(String source) =>
base64Decode(LineSplitter.split(source).join())
.reversed
.map((e) => e.toRadixString(16).padLeft(2, '0'))
.join();
/// Top level function which should be called wrapped in [compute]
Future<List<SparkCoin>> _identifyCoins(
({
List<dynamic> anonymitySetCoins,
int groupId,
Set<String> spentCoinTags,
Set<String> privateKeyHexSet,
String walletId,
bool isTestNet,
}) args) async {
final List<SparkCoin> myCoins = [];
for (final privateKeyHex in args.privateKeyHexSet) {
for (final dynData in args.anonymitySetCoins) {
final data = List<String>.from(dynData as List);
if (data.length != 3) {
throw Exception("Unexpected serialized coin info found");
}
final serializedCoinB64 = data[0];
final txHash = base64ToReverseHex(data[1]);
final contextB64 = data[2];
final coin = LibSpark.identifyAndRecoverCoin(
serializedCoinB64,
privateKeyHex: privateKeyHex,
index: kDefaultSparkIndex,
context: base64Decode(contextB64),
isTestNet: args.isTestNet,
);
// its ours
if (coin != null) {
final SparkCoinType coinType;
switch (coin.type.value) {
case 0:
coinType = SparkCoinType.mint;
case 1:
coinType = SparkCoinType.spend;
default:
throw Exception("Unknown spark coin type detected");
}
myCoins.add(
SparkCoin(
walletId: args.walletId,
type: coinType,
isUsed: args.spentCoinTags.contains(coin.lTagHash!),
groupId: args.groupId,
nonce: coin.nonceHex?.toUint8ListFromHex,
address: coin.address!,
txHash: txHash,
valueIntString: coin.value!.toString(),
memo: coin.memo,
serialContext: coin.serialContext,
diversifierIntString: coin.diversifier!.toString(),
encryptedDiversifier: coin.encryptedDiversifier,
serial: coin.serial,
tag: coin.tag,
lTagHash: coin.lTagHash!,
height: coin.height,
serializedCoinB64: serializedCoinB64,
contextB64: contextB64,
),
);
}
}
}
return myCoins;
}