stack_wallet/lib/wallets/wallet/wallet_mixin_interfaces/spark_interface.dart

1852 lines
56 KiB
Dart
Raw Normal View History

2023-12-05 20:44:50 +00:00
import 'dart:convert';
2023-12-23 00:15:44 +00:00
import 'dart:math';
2023-11-27 20:57:33 +00:00
2023-12-13 17:26:30 +00:00
import 'package:bitcoindart/bitcoindart.dart' as btc;
import 'package:decimal/decimal.dart';
import 'package:flutter/foundation.dart';
2023-11-28 16:13:10 +00:00
import 'package:flutter_libsparkmobile/flutter_libsparkmobile.dart';
2023-11-27 20:57:33 +00:00
import 'package:isar/isar.dart';
2024-05-27 23:56:22 +00:00
2024-05-29 19:29:45 +00:00
import '../../../db/sqlite/firo_cache.dart';
import '../../../models/balance.dart';
import '../../../models/isar/models/blockchain_data/v2/input_v2.dart';
import '../../../models/isar/models/blockchain_data/v2/output_v2.dart';
import '../../../models/isar/models/blockchain_data/v2/transaction_v2.dart';
import '../../../models/isar/models/isar_models.dart';
import '../../../models/signing_data.dart';
import '../../../utilities/amount/amount.dart';
import '../../../utilities/enums/derive_path_type_enum.dart';
import '../../../utilities/extensions/extensions.dart';
import '../../../utilities/logger.dart';
import '../../crypto_currency/crypto_currency.dart';
import '../../crypto_currency/interfaces/electrumx_currency_interface.dart';
import '../../isar/models/spark_coin.dart';
import '../../models/tx_data.dart';
import '../intermediate/bip39_hd_wallet.dart';
import 'electrumx_interface.dart';
2023-11-16 21:30:01 +00:00
const kDefaultSparkIndex = 1;
// TODO dart style constants. Maybe move to spark lib?
2023-12-23 00:15:44 +00:00
const MAX_STANDARD_TX_WEIGHT = 400000;
//https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/include/spark.h#L16
const SPARK_OUT_LIMIT_PER_TX = 16;
2023-12-23 00:15:44 +00:00
const OP_SPARKMINT = 0xd1;
const OP_SPARKSMINT = 0xd2;
const OP_SPARKSPEND = 0xd3;
2024-05-15 21:20:45 +00:00
mixin SparkInterface<T extends ElectrumXCurrencyInterface>
on Bip39HDWallet<T>, ElectrumXInterface<T> {
2023-12-29 15:26:32 +00:00
String? _sparkChangeAddressCached;
/// Spark change address. Should generally not be exposed to end users.
String get sparkChangeAddress {
if (_sparkChangeAddressCached == null) {
throw Exception("_sparkChangeAddressCached was not initialized");
}
return _sparkChangeAddressCached!;
}
2023-12-19 15:20:50 +00:00
static bool validateSparkAddress({
required String address,
required bool isTestNet,
}) =>
LibSpark.validateAddress(address: address, isTestNet: isTestNet);
@override
Future<void> init() async {
2024-01-12 19:56:09 +00:00
try {
Address? address = await getCurrentReceivingSparkAddress();
if (address == null) {
address = await generateNextSparkAddress();
await mainDB.putAddress(address);
} // TODO add other address types to wallet info?
2024-01-12 19:56:09 +00:00
if (_sparkChangeAddressCached == null) {
final root = await getRootHDNode();
final String derivationPath;
if (cryptoCurrency.network == CryptoCurrencyNetwork.test) {
derivationPath =
"$kSparkBaseDerivationPathTestnet$kDefaultSparkIndex";
} else {
derivationPath = "$kSparkBaseDerivationPath$kDefaultSparkIndex";
}
final keys = root.derivePath(derivationPath);
2023-12-29 15:26:32 +00:00
2024-01-12 19:56:09 +00:00
_sparkChangeAddressCached = await LibSpark.getAddress(
privateKey: keys.privateKey.data,
index: kDefaultSparkIndex,
diversifier: kSparkChange,
isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test,
);
}
} catch (e, s) {
// do nothing, still allow user into wallet
Logging.instance.log(
"$runtimeType init() failed: $e\n$s",
level: LogLevel.Error,
2023-12-29 15:26:32 +00:00
);
}
// await info.updateReceivingAddress(
// newAddress: address.value,
// isar: mainDB.isar,
// );
await super.init();
}
@override
Future<List<Address>> fetchAddressesForElectrumXScan() async {
final allAddresses = await mainDB
.getAddresses(walletId)
.filter()
.not()
.group(
(q) => q
.typeEqualTo(AddressType.spark)
.or()
.typeEqualTo(AddressType.nonWallet)
.or()
.subTypeEqualTo(AddressSubType.nonWallet),
)
.findAll();
return allAddresses;
}
2023-11-27 20:57:33 +00:00
Future<Address?> getCurrentReceivingSparkAddress() async {
return await mainDB.isar.addresses
.where()
.walletIdEqualTo(walletId)
.filter()
.typeEqualTo(AddressType.spark)
.sortByDerivationIndexDesc()
.findFirst();
}
Future<Address> generateNextSparkAddress() async {
2023-11-27 20:57:33 +00:00
final highestStoredDiversifier =
(await getCurrentReceivingSparkAddress())?.derivationIndex;
// default to starting at 1 if none found
int diversifier = (highestStoredDiversifier ?? 0) + 1;
// change address check
if (diversifier == kSparkChange) {
diversifier++;
}
2023-11-27 20:57:33 +00:00
2023-11-28 16:13:10 +00:00
final root = await getRootHDNode();
2023-12-05 22:55:38 +00:00
final String derivationPath;
if (cryptoCurrency.network == CryptoCurrencyNetwork.test) {
derivationPath = "$kSparkBaseDerivationPathTestnet$kDefaultSparkIndex";
2023-12-05 22:55:38 +00:00
} else {
derivationPath = "$kSparkBaseDerivationPath$kDefaultSparkIndex";
2023-12-05 22:55:38 +00:00
}
2023-11-28 16:13:10 +00:00
final keys = root.derivePath(derivationPath);
final String addressString = await LibSpark.getAddress(
privateKey: keys.privateKey.data,
index: kDefaultSparkIndex,
2023-11-28 16:13:10 +00:00
diversifier: diversifier,
isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test,
);
2023-11-27 20:57:33 +00:00
return Address(
walletId: walletId,
value: addressString,
2023-11-28 16:13:10 +00:00
publicKey: keys.publicKey.data,
2023-11-27 20:57:33 +00:00
derivationIndex: diversifier,
derivationPath: DerivationPath()..value = derivationPath,
type: AddressType.spark,
subType: AddressSubType.receiving,
);
}
Future<Amount> estimateFeeForSpark(Amount amount) async {
// int spendAmount = amount.raw.toInt();
// if (spendAmount == 0) {
return Amount(
rawValue: BigInt.from(0),
fractionDigits: cryptoCurrency.fractionDigits,
);
// }
// TODO actual fee estimation
2023-11-27 20:57:33 +00:00
}
2023-11-27 21:18:20 +00:00
/// Spark to Spark/Transparent (spend) creation
2023-11-16 21:30:01 +00:00
Future<TxData> prepareSendSpark({
required TxData txData,
}) async {
// There should be at least one output.
if (!(txData.recipients?.isNotEmpty == true ||
txData.sparkRecipients?.isNotEmpty == true)) {
throw Exception("No recipients provided.");
}
if (txData.sparkRecipients?.isNotEmpty == true &&
txData.sparkRecipients!.length >= SPARK_OUT_LIMIT_PER_TX - 1) {
throw Exception("Spark shielded output limit exceeded.");
}
final transparentSumOut =
(txData.recipients ?? []).map((e) => e.amount).fold(
2024-05-09 17:56:42 +00:00
Amount(
rawValue: BigInt.zero,
fractionDigits: cryptoCurrency.fractionDigits,
),
(p, e) => p + e,
);
// See SPARK_VALUE_SPEND_LIMIT_PER_TRANSACTION at https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/include/spark.h#L17
// and COIN https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/bitcoin/amount.h#L17
// Note that as MAX_MONEY is greater than this limit, we can ignore it. See https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/bitcoin/amount.h#L31
if (transparentSumOut >
Amount.fromDecimal(
Decimal.parse("10000"),
fractionDigits: cryptoCurrency.fractionDigits,
)) {
throw Exception(
2024-05-09 17:56:42 +00:00
"Spend to transparent address limit exceeded (10,000 Firo per transaction).",
);
}
final sparkSumOut =
(txData.sparkRecipients ?? []).map((e) => e.amount).fold(
2024-05-09 17:56:42 +00:00
Amount(
rawValue: BigInt.zero,
fractionDigits: cryptoCurrency.fractionDigits,
),
(p, e) => p + e,
);
final txAmount = transparentSumOut + sparkSumOut;
2023-12-21 00:00:02 +00:00
// fetch spendable spark coins
final coins = await mainDB.isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.filter()
.isUsedEqualTo(false)
.and()
.heightIsNotNull()
2023-12-21 20:41:29 +00:00
.and()
.not()
.valueIntStringEqualTo("0")
.findAll();
2023-12-21 20:41:29 +00:00
final available = info.cachedBalanceTertiary.spendable;
if (txAmount > available) {
throw Exception("Insufficient Spark balance");
}
final bool isSendAll = available == txAmount;
2023-12-21 00:00:02 +00:00
// prepare coin data for ffi
final serializedCoins = coins
2024-05-09 17:56:42 +00:00
.map(
(e) => (
serializedCoin: e.serializedCoinB64!,
serializedCoinContext: e.contextB64!,
groupId: e.groupId,
height: e.height!,
),
)
.toList();
2023-12-13 17:26:30 +00:00
final currentId = await electrumXClient.getSparkLatestCoinId();
final List<Map<String, dynamic>> setMaps = [];
final List<({int groupId, String blockHash})> idAndBlockHashes = [];
for (int i = 1; i <= currentId; i++) {
2024-05-29 19:29:45 +00:00
final resultSet = await FiroCacheCoordinator.getSetCoinsForGroupId(i);
if (resultSet.isEmpty) {
continue;
}
final info = await FiroCacheCoordinator.getLatestSetInfoForGroupId(
i,
2023-12-13 17:26:30 +00:00
);
2024-05-29 19:29:45 +00:00
if (info == null) {
throw Exception("The `info` should never be null here");
}
final Map<String, dynamic> setData = {
"blockHash": info.blockHash,
"setHash": info.setHash,
"coinGroupID": i,
"coins": resultSet
.map(
(row) => [
row["serialized"] as String,
row["txHash"] as String,
row["context"] as String,
],
)
.toList(),
};
setData["coinGroupID"] = i;
setMaps.add(setData);
idAndBlockHashes.add(
(
groupId: i,
2024-05-29 19:29:45 +00:00
blockHash: setData["blockHash"] as String,
),
);
2023-12-13 17:26:30 +00:00
}
final allAnonymitySets = setMaps
2024-05-09 17:56:42 +00:00
.map(
(e) => (
setId: e["coinGroupID"] as int,
setHash: e["setHash"] as String,
set: (e["coins"] as List)
.map(
(e) => (
serializedCoin: e[0] as String,
txHash: e[1] as String,
),
)
.toList(),
),
)
2023-12-13 17:26:30 +00:00
.toList();
final root = await getRootHDNode();
final String derivationPath;
if (cryptoCurrency.network == CryptoCurrencyNetwork.test) {
derivationPath = "$kSparkBaseDerivationPathTestnet$kDefaultSparkIndex";
2023-12-13 17:26:30 +00:00
} else {
derivationPath = "$kSparkBaseDerivationPath$kDefaultSparkIndex";
2023-12-13 17:26:30 +00:00
}
final privateKey = root.derivePath(derivationPath).privateKey.data;
2023-12-21 20:41:29 +00:00
final txb = btc.TransactionBuilder(
2023-12-23 00:15:44 +00:00
network: _bitcoinDartNetwork,
2023-12-13 17:26:30 +00:00
);
txb.setLockTime(await chainHeight);
txb.setVersion(3 | (9 << 16));
2024-01-05 19:45:42 +00:00
List<
({
String address,
Amount amount,
bool isChange,
})>? recipientsWithFeeSubtracted;
2023-12-21 22:04:49 +00:00
List<
2023-12-21 20:41:29 +00:00
({
String address,
Amount amount,
String memo,
2024-01-05 19:45:42 +00:00
bool isChange,
2023-12-21 22:04:49 +00:00
})>? sparkRecipientsWithFeeSubtracted;
final recipientCount = (txData.recipients
?.where(
(e) => e.amount.raw > BigInt.zero,
)
.length ??
0);
final totalRecipientCount =
recipientCount + (txData.sparkRecipients?.length ?? 0);
2023-12-21 20:41:29 +00:00
final BigInt estimatedFee;
if (isSendAll) {
final estFee = LibSpark.estimateSparkFee(
privateKeyHex: privateKey.toHex,
index: kDefaultSparkIndex,
sendAmount: txAmount.raw.toInt(),
subtractFeeFromAmount: true,
serializedCoins: serializedCoins,
privateRecipientsCount: (txData.sparkRecipients?.length ?? 0),
);
estimatedFee = BigInt.from(estFee);
} else {
estimatedFee = BigInt.zero;
}
2023-12-21 22:04:49 +00:00
if ((txData.sparkRecipients?.length ?? 0) > 0) {
sparkRecipientsWithFeeSubtracted = [];
}
if (recipientCount > 0) {
recipientsWithFeeSubtracted = [];
}
2023-12-21 20:41:29 +00:00
for (int i = 0; i < (txData.sparkRecipients?.length ?? 0); i++) {
2023-12-21 22:04:49 +00:00
sparkRecipientsWithFeeSubtracted!.add(
2023-12-21 20:41:29 +00:00
(
address: txData.sparkRecipients![i].address,
amount: Amount(
rawValue: txData.sparkRecipients![i].amount.raw -
2023-12-21 22:04:49 +00:00
(estimatedFee ~/ BigInt.from(totalRecipientCount)),
2023-12-21 20:41:29 +00:00
fractionDigits: cryptoCurrency.fractionDigits,
),
memo: txData.sparkRecipients![i].memo,
2024-01-05 19:45:42 +00:00
isChange: sparkChangeAddress == txData.sparkRecipients![i].address,
2023-12-21 20:41:29 +00:00
),
);
}
// temp tx data to show in gui while waiting for real data from server
final List<InputV2> tempInputs = [];
final List<OutputV2> tempOutputs = [];
2023-12-13 17:26:30 +00:00
for (int i = 0; i < (txData.recipients?.length ?? 0); i++) {
if (txData.recipients![i].amount.raw == BigInt.zero) {
continue;
}
2023-12-21 22:04:49 +00:00
recipientsWithFeeSubtracted!.add(
2023-12-21 20:41:29 +00:00
(
address: txData.recipients![i].address,
amount: Amount(
rawValue: txData.recipients![i].amount.raw -
2023-12-21 22:04:49 +00:00
(estimatedFee ~/ BigInt.from(totalRecipientCount)),
2023-12-21 20:41:29 +00:00
fractionDigits: cryptoCurrency.fractionDigits,
),
2024-01-05 22:39:05 +00:00
isChange: txData.recipients![i].isChange,
2023-12-21 20:41:29 +00:00
),
);
final scriptPubKey = btc.Address.addressToOutputScript(
2023-12-13 17:26:30 +00:00
txData.recipients![i].address,
2023-12-23 00:15:44 +00:00
_bitcoinDartNetwork,
2023-12-21 20:41:29 +00:00
);
txb.addOutput(
scriptPubKey,
recipientsWithFeeSubtracted[i].amount.raw.toInt(),
2023-12-13 17:26:30 +00:00
);
tempOutputs.add(
OutputV2.isarCantDoRequiredInDefaultConstructor(
scriptPubKeyHex: scriptPubKey.toHex,
valueStringSats: recipientsWithFeeSubtracted[i].amount.raw.toString(),
addresses: [
recipientsWithFeeSubtracted[i].address.toString(),
],
walletOwns: (await mainDB.isar.addresses
.where()
.walletIdEqualTo(walletId)
.filter()
.valueEqualTo(recipientsWithFeeSubtracted[i].address)
.valueProperty()
.findFirst()) !=
null,
),
);
2023-12-13 17:26:30 +00:00
}
if (sparkRecipientsWithFeeSubtracted != null) {
for (final recip in sparkRecipientsWithFeeSubtracted) {
tempOutputs.add(
OutputV2.isarCantDoRequiredInDefaultConstructor(
scriptPubKeyHex: Uint8List.fromList([OP_SPARKSMINT]).toHex,
valueStringSats: recip.amount.raw.toString(),
addresses: [
recip.address.toString(),
],
walletOwns: (await mainDB.isar.addresses
.where()
.walletIdEqualTo(walletId)
.filter()
.valueEqualTo(recip.address)
.valueProperty()
.findFirst()) !=
null,
),
);
}
}
2023-12-21 16:23:55 +00:00
final extractedTx = txb.buildIncomplete();
extractedTx.addInput(
'0000000000000000000000000000000000000000000000000000000000000000'
.toUint8ListFromHex,
0xffffffff,
0xffffffff,
"d3".toUint8ListFromHex, // OP_SPARKSPEND
);
extractedTx.setPayload(Uint8List(0));
2023-12-13 17:26:30 +00:00
2023-12-21 22:18:12 +00:00
final spend = await compute(
_createSparkSend,
(
privateKeyHex: privateKey.toHex,
index: kDefaultSparkIndex,
recipients: txData.recipients
2024-05-09 17:56:42 +00:00
?.map(
(e) => (
address: e.address,
amount: e.amount.raw.toInt(),
subtractFeeFromAmount: isSendAll,
),
)
2023-12-21 22:18:12 +00:00
.toList() ??
[],
privateRecipients: txData.sparkRecipients
2024-05-09 17:56:42 +00:00
?.map(
(e) => (
sparkAddress: e.address,
amount: e.amount.raw.toInt(),
subtractFeeFromAmount: isSendAll,
memo: e.memo,
),
)
2023-12-21 22:18:12 +00:00
.toList() ??
[],
serializedCoins: serializedCoins,
allAnonymitySets: allAnonymitySets,
idAndBlockHashes: idAndBlockHashes
.map(
2024-05-09 17:56:42 +00:00
(e) => (setId: e.groupId, blockHash: base64Decode(e.blockHash)),
)
2023-12-21 22:18:12 +00:00
.toList(),
txHash: extractedTx.getHash(),
),
2023-12-13 17:26:30 +00:00
);
for (final outputScript in spend.outputScripts) {
2023-12-21 16:23:55 +00:00
extractedTx.addOutput(outputScript, 0);
2023-12-13 17:26:30 +00:00
}
extractedTx.setPayload(spend.serializedSpendPayload);
2023-12-13 17:26:30 +00:00
final rawTxHex = extractedTx.toHex();
2023-12-21 20:41:29 +00:00
if (isSendAll) {
txData = txData.copyWith(
recipients: recipientsWithFeeSubtracted,
sparkRecipients: sparkRecipientsWithFeeSubtracted,
);
}
final fee = Amount(
rawValue: BigInt.from(spend.fee),
fractionDigits: cryptoCurrency.fractionDigits,
);
2024-01-05 22:39:05 +00:00
tempInputs.add(
InputV2.isarCantDoRequiredInDefaultConstructor(
scriptSigHex: "d3",
scriptSigAsm: null,
2024-01-05 22:39:05 +00:00
sequence: 0xffffffff,
outpoint: null,
addresses: [],
valueStringSats: tempOutputs
.map((e) => e.value)
.fold(fee.raw, (p, e) => p + e)
.toString(),
witness: null,
innerRedeemScriptAsm: null,
coinbase: null,
walletOwns: true,
),
);
final List<SparkCoin> usedSparkCoins = [];
for (final usedCoin in spend.usedCoins) {
try {
2024-05-09 17:56:42 +00:00
usedSparkCoins.add(
coins
.firstWhere(
(e) =>
usedCoin.height == e.height &&
usedCoin.groupId == e.groupId &&
base64Decode(e.serializedCoinB64!).toHex.startsWith(
base64Decode(usedCoin.serializedCoin).toHex,
),
)
.copyWith(
isUsed: true,
),
);
} catch (_) {
throw Exception(
"Unexpectedly did not find used spark coin. This should never happen.",
);
}
}
2023-12-13 17:26:30 +00:00
return txData.copyWith(
raw: rawTxHex,
vSize: extractedTx.virtualSize(),
fee: fee,
tempTx: TransactionV2(
walletId: walletId,
blockHash: null,
hash: extractedTx.getId(),
txid: extractedTx.getId(),
timestamp: DateTime.timestamp().millisecondsSinceEpoch ~/ 1000,
inputs: List.unmodifiable(tempInputs),
outputs: List.unmodifiable(tempOutputs),
type: tempOutputs.map((e) => e.walletOwns).fold(true, (p, e) => p &= e)
? TransactionType.sentToSelf
: TransactionType.outgoing,
subType: TransactionSubType.sparkSpend,
otherData: jsonEncode(
{
"overrideFee": fee.toJsonString(),
},
),
height: null,
version: 3,
2023-12-13 17:26:30 +00:00
),
usedSparkCoins: usedSparkCoins,
2023-12-13 17:26:30 +00:00
);
2023-11-16 21:30:01 +00:00
}
2023-11-27 20:57:33 +00:00
2023-11-27 21:18:20 +00:00
// this may not be needed for either mints or spends or both
2023-11-27 20:57:33 +00:00
Future<TxData> confirmSendSpark({
required TxData txData,
}) async {
2023-12-21 16:23:55 +00:00
try {
Logging.instance.log("confirmSend txData: $txData", level: LogLevel.Info);
final txHash = await electrumXClient.broadcastTransaction(
rawTx: txData.raw!,
);
Logging.instance.log("Sent txHash: $txHash", level: LogLevel.Info);
txData = txData.copyWith(
// TODO revisit setting these both
txHash: txHash,
txid: txHash,
);
// Update used spark coins as used in database. They should already have
// been marked as isUsed.
// TODO: [prio=med] Could (probably should) throw an exception here if txData.usedSparkCoins is null or empty
if (txData.usedSparkCoins != null && txData.usedSparkCoins!.isNotEmpty) {
await _addOrUpdateSparkCoins(txData.usedSparkCoins!);
}
return await updateSentCachedTxData(txData: txData);
2023-12-21 16:23:55 +00:00
} catch (e, s) {
2024-05-09 17:56:42 +00:00
Logging.instance.log(
"Exception rethrown from confirmSend(): $e\n$s",
level: LogLevel.Error,
);
2023-12-21 16:23:55 +00:00
rethrow;
}
2023-11-27 20:57:33 +00:00
}
Future<void> refreshSparkData() async {
try {
// start by checking if any previous sets are missing from db and add the
// missing groupIds to the list if sets to check and update
final latestGroupId = await electrumXClient.getSparkLatestCoinId();
final List<int> groupIds = [];
if (latestGroupId > 1) {
for (int id = 1; id < latestGroupId; id++) {
final setExists =
await FiroCacheCoordinator.checkSetInfoForGroupIdExists(
id,
);
if (!setExists) {
groupIds.add(id);
}
}
}
groupIds.add(latestGroupId);
// start fetch and update process for each set groupId as required
final possibleFutures = groupIds.map(
(e) =>
FiroCacheCoordinator.runFetchAndUpdateSparkAnonSetCacheForGroupId(
e,
electrumXClient,
),
2024-05-10 20:32:15 +00:00
);
// wait for each fetch and update to complete
await Future.wait([
...possibleFutures,
FiroCacheCoordinator.runFetchAndUpdateSparkUsedCoinTags(
electrumXClient,
),
]);
2023-12-20 23:46:48 +00:00
await _checkAndUpdateCoins();
// refresh spark balance
await refreshSparkBalance();
2023-11-27 20:57:33 +00:00
} catch (e, s) {
Logging.instance.log(
"$runtimeType $walletId ${info.name}: $e\n$s",
level: LogLevel.Error,
);
2023-11-27 20:57:33 +00:00
rethrow;
}
}
Future<void> refreshSparkBalance() async {
final currentHeight = await chainHeight;
final unusedCoins = await mainDB.isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.filter()
.isUsedEqualTo(false)
.findAll();
2023-11-27 20:57:33 +00:00
final total = Amount(
rawValue: unusedCoins
.map((e) => e.value)
.fold(BigInt.zero, (prev, e) => prev + e),
fractionDigits: cryptoCurrency.fractionDigits,
);
final spendable = Amount(
rawValue: unusedCoins
2024-05-09 17:56:42 +00:00
.where(
(e) =>
e.height != null &&
e.height! + cryptoCurrency.minConfirms <= currentHeight,
)
.map((e) => e.value)
.fold(BigInt.zero, (prev, e) => prev + e),
fractionDigits: cryptoCurrency.fractionDigits,
);
2023-11-27 20:57:33 +00:00
final sparkBalance = Balance(
total: total,
spendable: spendable,
blockedTotal: Amount(
rawValue: BigInt.zero,
fractionDigits: cryptoCurrency.fractionDigits,
),
pendingSpendable: total - spendable,
);
2023-11-27 20:57:33 +00:00
await info.updateBalanceTertiary(
newBalance: sparkBalance,
isar: mainDB.isar,
);
}
2023-11-27 20:57:33 +00:00
2024-05-29 19:29:45 +00:00
// TODO: look into persistence for this?
Map<int, int> groupIdTimestampUTCMap = {};
/// Should only be called within the standard wallet [recover] function due to
/// mutex locking. Otherwise behaviour MAY be undefined.
Future<void> recoverSparkWallet({
2024-05-29 19:29:45 +00:00
required int latestSparkCoinId,
}) async {
// generate spark addresses if non existing
if (await getCurrentReceivingSparkAddress() == null) {
final address = await generateNextSparkAddress();
await mainDB.putAddress(address);
}
2024-05-29 19:29:45 +00:00
try {
await _checkAndUpdateCoins();
2024-05-29 19:29:45 +00:00
// refresh spark balance
await refreshSparkBalance();
} catch (e, s) {
Logging.instance.log(
"$runtimeType $walletId ${info.name}: $e\n$s",
level: LogLevel.Error,
);
rethrow;
}
}
Future<void> _checkAndUpdateCoins() async {
final sparkAddresses = await mainDB.isar.addresses
.where()
.walletIdEqualTo(walletId)
.filter()
.typeEqualTo(AddressType.spark)
.findAll();
2024-05-29 19:29:45 +00:00
final root = await getRootHDNode();
final Set<String> privateKeyHexSet = sparkAddresses
.map(
(e) => root.derivePath(e.derivationPath!.value).privateKey.data.toHex,
)
.toSet();
2023-11-27 20:57:33 +00:00
final Map<int, List<List<String>>> rawCoinsBySetId = {};
2024-05-29 19:29:45 +00:00
final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId();
for (int i = 1; i <= latestSparkCoinId; i++) {
final lastCheckedTimeStampUTC = groupIdTimestampUTCMap[i] ?? 0;
final info = await FiroCacheCoordinator.getLatestSetInfoForGroupId(
i,
);
2024-05-29 19:29:45 +00:00
final anonymitySetResult =
await FiroCacheCoordinator.getSetCoinsForGroupId(
i,
newerThanTimeStamp: lastCheckedTimeStampUTC,
);
final coinsRaw = anonymitySetResult
.map(
(row) => [
row["serialized"] as String,
row["txHash"] as String,
row["context"] as String,
],
)
.toList();
if (coinsRaw.isNotEmpty) {
rawCoinsBySetId[i] = coinsRaw;
2024-05-29 19:29:45 +00:00
}
2024-05-29 19:29:45 +00:00
groupIdTimestampUTCMap[i] = max(
lastCheckedTimeStampUTC,
info?.timestampUTC ?? lastCheckedTimeStampUTC,
);
2023-11-27 20:57:33 +00:00
}
final List<SparkCoin> newlyIdCoins = [];
for (final groupId in rawCoinsBySetId.keys) {
final myCoins = await compute(
_identifyCoins,
(
anonymitySetCoins: rawCoinsBySetId[groupId]!,
groupId: groupId,
privateKeyHexSet: privateKeyHexSet,
walletId: walletId,
isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test,
),
);
newlyIdCoins.addAll(myCoins);
}
await _checkAndMarkCoinsUsedInDB(coinsNotInDbYet: newlyIdCoins);
}
Future<void> _checkAndMarkCoinsUsedInDB({
List<SparkCoin> coinsNotInDbYet = const [],
}) async {
final List<SparkCoin> coins = await mainDB.isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.filter()
.isUsedEqualTo(false)
.findAll();
final List<SparkCoin> coinsToWrite = [];
final spentCoinTags = await FiroCacheCoordinator.getUsedCoinTags(0);
for (final coin in coins) {
if (spentCoinTags.contains(coin.lTagHash)) {
coinsToWrite.add(coin.copyWith(isUsed: true));
}
}
for (final coin in coinsNotInDbYet) {
if (spentCoinTags.contains(coin.lTagHash)) {
coinsToWrite.add(coin.copyWith(isUsed: true));
} else {
coinsToWrite.add(coin);
}
}
// update wallet spark coins in isar
await _addOrUpdateSparkCoins(coinsToWrite);
2023-11-27 20:57:33 +00:00
}
// modelled on CSparkWallet::CreateSparkMintTransactions https://github.com/firoorg/firo/blob/39c41e5e7ec634ced3700fe3f4f5509dc2e480d0/src/spark/sparkwallet.cpp#L752
Future<List<TxData>> _createSparkMintTransactions({
2023-12-23 00:15:44 +00:00
required List<UTXO> availableUtxos,
required List<MutableSparkRecipient> outputs,
required bool subtractFeeFromAmount,
required bool autoMintAll,
}) async {
// pre checks
if (outputs.isEmpty) {
throw Exception("Cannot mint without some recipients");
}
// TODO remove when multiple recipients gui is added. Will need to handle
// addresses when confirming the transactions later as well
assert(outputs.length == 1);
2023-12-23 00:15:44 +00:00
BigInt valueToMint =
outputs.map((e) => e.value).reduce((value, element) => value + element);
if (valueToMint <= BigInt.zero) {
throw Exception("Cannot mint amount=$valueToMint");
}
final totalUtxosValue = _sum(availableUtxos);
if (valueToMint > totalUtxosValue) {
throw Exception("Insufficient balance to create spark mint(s)");
}
// organise utxos
2024-05-09 17:56:42 +00:00
final Map<String, List<UTXO>> utxosByAddress = {};
2023-12-23 00:15:44 +00:00
for (final utxo in availableUtxos) {
utxosByAddress[utxo.address!] ??= [];
utxosByAddress[utxo.address!]!.add(utxo);
}
final valueAndUTXOs = utxosByAddress.values.toList();
// setup some vars
int nChangePosInOut = -1;
2024-05-09 17:56:42 +00:00
final int nChangePosRequest = nChangePosInOut;
List<MutableSparkRecipient> outputs_ = outputs
.map((e) => MutableSparkRecipient(e.address, e.value, e.memo))
.toList(); // deep copy
2023-12-24 16:51:08 +00:00
final feesObject = await fees;
2023-12-23 00:15:44 +00:00
final currentHeight = await chainHeight;
final random = Random.secure();
final List<TxData> results = [];
valueAndUTXOs.shuffle(random);
while (valueAndUTXOs.isNotEmpty) {
final lockTime = random.nextInt(10) == 0
? max(0, currentHeight - random.nextInt(100))
: currentHeight;
const txVersion = 1;
final List<SigningData> vin = [];
final List<(dynamic, int, String?)> vout = [];
2023-12-23 00:15:44 +00:00
BigInt nFeeRet = BigInt.zero;
final itr = valueAndUTXOs.first;
BigInt valueToMintInTx = _sum(itr);
if (!autoMintAll) {
valueToMintInTx = _min(valueToMintInTx, valueToMint);
}
BigInt nValueToSelect, mintedValue;
final List<SigningData> setCoins = [];
bool skipCoin = false;
// Start with no fee and loop until there is enough fee
while (true) {
mintedValue = valueToMintInTx;
if (subtractFeeFromAmount) {
nValueToSelect = mintedValue;
} else {
nValueToSelect = mintedValue + nFeeRet;
}
// if not enough coins in this group then subtract fee from mint
if (nValueToSelect > _sum(itr) && !subtractFeeFromAmount) {
nValueToSelect = mintedValue;
mintedValue -= nFeeRet;
}
// if (!MoneyRange(mintedValue) || mintedValue == 0) {
if (mintedValue == BigInt.zero) {
valueAndUTXOs.remove(itr);
skipCoin = true;
break;
}
nChangePosInOut = nChangePosRequest;
vin.clear();
vout.clear();
setCoins.clear();
// deep copy
final remainingOutputs = outputs_
.map((e) => MutableSparkRecipient(e.address, e.value, e.memo))
.toList();
2023-12-23 00:15:44 +00:00
final List<MutableSparkRecipient> singleTxOutputs = [];
2023-12-23 00:15:44 +00:00
if (autoMintAll) {
singleTxOutputs.add(
MutableSparkRecipient(
(await getCurrentReceivingSparkAddress())!.value,
mintedValue,
"",
),
);
} else {
BigInt remainingMintValue = BigInt.parse(mintedValue.toString());
2023-12-23 00:15:44 +00:00
while (remainingMintValue > BigInt.zero) {
final singleMintValue =
_min(remainingMintValue, remainingOutputs.first.value);
singleTxOutputs.add(
MutableSparkRecipient(
remainingOutputs.first.address,
singleMintValue,
remainingOutputs.first.memo,
),
);
// subtract minted amount from remaining value
remainingMintValue -= singleMintValue;
remainingOutputs.first.value -= singleMintValue;
if (remainingOutputs.first.value == BigInt.zero) {
remainingOutputs.remove(remainingOutputs.first);
}
}
}
if (subtractFeeFromAmount) {
final BigInt singleFee =
nFeeRet ~/ BigInt.from(singleTxOutputs.length);
BigInt remainder = nFeeRet % BigInt.from(singleTxOutputs.length);
for (int i = 0; i < singleTxOutputs.length; ++i) {
if (singleTxOutputs[i].value <= singleFee) {
singleTxOutputs.removeAt(i);
remainder += singleTxOutputs[i].value - singleFee;
--i;
}
singleTxOutputs[i].value -= singleFee;
if (remainder > BigInt.zero &&
singleTxOutputs[i].value >
nFeeRet % BigInt.from(singleTxOutputs.length)) {
// first receiver pays the remainder not divisible by output count
singleTxOutputs[i].value -= remainder;
remainder = BigInt.zero;
}
}
}
// Generate dummy mint coins to save time
final dummyRecipients = LibSpark.createSparkMintRecipients(
outputs: singleTxOutputs
2024-05-09 17:56:42 +00:00
.map(
(e) => (
sparkAddress: e.address,
value: e.value.toInt(),
memo: "",
),
)
2023-12-23 00:15:44 +00:00
.toList(),
serialContext: Uint8List(0),
generate: false,
);
final dummyTxb = btc.TransactionBuilder(network: _bitcoinDartNetwork);
dummyTxb.setVersion(txVersion);
dummyTxb.setLockTime(lockTime);
for (int i = 0; i < dummyRecipients.length; i++) {
final recipient = dummyRecipients[i];
2023-12-23 00:15:44 +00:00
if (recipient.amount < cryptoCurrency.dustLimit.raw.toInt()) {
throw Exception("Output amount too small");
}
2024-05-09 17:56:42 +00:00
vout.add(
(
recipient.scriptPubKey,
recipient.amount,
singleTxOutputs[i].address,
),
);
2023-12-23 00:15:44 +00:00
}
// Choose coins to use
BigInt nValueIn = BigInt.zero;
for (final utxo in itr) {
if (nValueToSelect > nValueIn) {
setCoins.add((await fetchBuildTxData([utxo])).first);
nValueIn += BigInt.from(utxo.value);
}
}
if (nValueIn < nValueToSelect) {
throw Exception("Insufficient funds");
}
// priority stuff???
2024-05-09 17:56:42 +00:00
final BigInt nChange = nValueIn - nValueToSelect;
2023-12-23 00:15:44 +00:00
if (nChange > BigInt.zero) {
if (nChange < cryptoCurrency.dustLimit.raw) {
nChangePosInOut = -1;
nFeeRet += nChange;
} else {
if (nChangePosInOut == -1) {
nChangePosInOut = random.nextInt(vout.length + 1);
} else if (nChangePosInOut > vout.length) {
throw Exception("Change index out of range");
}
final changeAddress = await getCurrentChangeAddress();
vout.insert(
nChangePosInOut,
(changeAddress!.value, nChange.toInt(), null),
2023-12-23 00:15:44 +00:00
);
}
}
// add outputs for dummy tx to check fees
for (final out in vout) {
dummyTxb.addOutput(out.$1, out.$2);
}
// fill vin
for (final sd in setCoins) {
vin.add(sd);
2024-04-18 17:33:23 +00:00
final pubKey = sd.keyPair!.publicKey.data;
final btc.PaymentData? data;
switch (sd.derivePathType) {
case DerivePathType.bip44:
data = btc
.P2PKH(
data: btc.PaymentData(
pubkey: pubKey,
),
network: _bitcoinDartNetwork,
)
.data;
break;
case DerivePathType.bip49:
final p2wpkh = btc
.P2WPKH(
data: btc.PaymentData(
pubkey: pubKey,
),
network: _bitcoinDartNetwork,
)
.data;
data = btc
.P2SH(
data: btc.PaymentData(redeem: p2wpkh),
network: _bitcoinDartNetwork,
)
.data;
break;
case DerivePathType.bip84:
data = btc
.P2WPKH(
data: btc.PaymentData(
pubkey: pubKey,
),
network: _bitcoinDartNetwork,
)
.data;
break;
case DerivePathType.bip86:
data = null;
break;
default:
throw Exception("DerivePathType unsupported");
}
2023-12-23 00:15:44 +00:00
// add to dummy tx
dummyTxb.addInput(
sd.utxo.txid,
sd.utxo.vout,
0xffffffff -
1, // minus 1 is important. 0xffffffff on its own will burn funds
2024-04-18 17:33:23 +00:00
data!.output!,
2023-12-23 00:15:44 +00:00
);
}
// sign dummy tx
for (var i = 0; i < setCoins.length; i++) {
dummyTxb.sign(
vin: i,
2024-04-18 17:33:23 +00:00
keyPair: btc.ECPair.fromPrivateKey(
setCoins[i].keyPair!.privateKey.data,
network: _bitcoinDartNetwork,
compressed: setCoins[i].keyPair!.privateKey.compressed,
),
2023-12-23 00:15:44 +00:00
witnessValue: setCoins[i].utxo.value,
2024-04-18 17:33:23 +00:00
// maybe not needed here as this was originally copied from btc? We'll find out...
// redeemScript: setCoins[i].redeemScript,
2023-12-23 00:15:44 +00:00
);
}
final dummyTx = dummyTxb.build();
final nBytes = dummyTx.virtualSize();
if (dummyTx.weight() > MAX_STANDARD_TX_WEIGHT) {
throw Exception("Transaction too large");
}
2023-12-24 16:51:08 +00:00
final nFeeNeeded = BigInt.from(
estimateTxFee(
vSize: nBytes,
feeRatePerKB: feesObject.medium,
),
); // One day we'll do this properly
2023-12-23 00:15:44 +00:00
if (nFeeRet >= nFeeNeeded) {
for (final usedCoin in setCoins) {
itr.removeWhere((e) => e == usedCoin.utxo);
}
if (itr.isEmpty) {
final preLength = valueAndUTXOs.length;
valueAndUTXOs.remove(itr);
assert(preLength - 1 == valueAndUTXOs.length);
}
// Generate real mint coins
final serialContext = LibSpark.serializeMintContext(
inputs: setCoins
2024-05-09 17:56:42 +00:00
.map(
(e) => (
e.utxo.txid,
e.utxo.vout,
),
)
2023-12-23 00:15:44 +00:00
.toList(),
);
final recipients = LibSpark.createSparkMintRecipients(
outputs: singleTxOutputs
.map(
(e) => (
sparkAddress: e.address,
memo: e.memo,
value: e.value.toInt(),
),
)
.toList(),
serialContext: serialContext,
generate: true,
);
for (int i = 0; i < recipients.length; i++) {
final recipient = recipients[i];
final out = (
recipient.scriptPubKey,
recipient.amount,
singleTxOutputs[i].address,
);
2023-12-23 00:15:44 +00:00
while (i < vout.length) {
if (vout[i].$1 is Uint8List &&
(vout[i].$1 as Uint8List).isNotEmpty &&
(vout[i].$1 as Uint8List)[0] == OP_SPARKMINT) {
vout[i] = out;
break;
}
++i;
}
++i;
}
// deep copy
outputs_ = remainingOutputs
.map((e) => MutableSparkRecipient(e.address, e.value, e.memo))
.toList();
2023-12-23 00:15:44 +00:00
break; // Done, enough fee included.
}
// Include more fee and try again.
nFeeRet = nFeeNeeded;
continue;
}
if (skipCoin) {
continue;
}
// temp tx data to show in gui while waiting for real data from server
final List<InputV2> tempInputs = [];
final List<OutputV2> tempOutputs = [];
2023-12-23 00:15:44 +00:00
// sign
final txb = btc.TransactionBuilder(network: _bitcoinDartNetwork);
txb.setVersion(txVersion);
txb.setLockTime(lockTime);
for (final input in vin) {
2024-04-18 17:33:23 +00:00
final pubKey = input.keyPair!.publicKey.data;
final btc.PaymentData? data;
switch (input.derivePathType) {
case DerivePathType.bip44:
data = btc
.P2PKH(
data: btc.PaymentData(
pubkey: pubKey,
),
network: _bitcoinDartNetwork,
)
.data;
break;
case DerivePathType.bip49:
final p2wpkh = btc
.P2WPKH(
data: btc.PaymentData(
pubkey: pubKey,
),
network: _bitcoinDartNetwork,
)
.data;
data = btc
.P2SH(
data: btc.PaymentData(redeem: p2wpkh),
network: _bitcoinDartNetwork,
)
.data;
break;
case DerivePathType.bip84:
data = btc
.P2WPKH(
data: btc.PaymentData(
pubkey: pubKey,
),
network: _bitcoinDartNetwork,
)
.data;
break;
case DerivePathType.bip86:
data = null;
break;
default:
throw Exception("DerivePathType unsupported");
}
2023-12-23 00:15:44 +00:00
txb.addInput(
input.utxo.txid,
input.utxo.vout,
0xffffffff -
1, // minus 1 is important. 0xffffffff on its own will burn funds
2024-04-18 17:33:23 +00:00
data!.output!,
2023-12-23 00:15:44 +00:00
);
tempInputs.add(
InputV2.isarCantDoRequiredInDefaultConstructor(
scriptSigHex: txb.inputs.first.script?.toHex,
scriptSigAsm: null,
sequence: 0xffffffff - 1,
outpoint: OutpointV2.isarCantDoRequiredInDefaultConstructor(
txid: input.utxo.txid,
vout: input.utxo.vout,
),
addresses: input.utxo.address == null ? [] : [input.utxo.address!],
valueStringSats: input.utxo.value.toString(),
witness: null,
innerRedeemScriptAsm: null,
coinbase: null,
walletOwns: true,
),
);
2023-12-23 00:15:44 +00:00
}
for (final output in vout) {
final addressOrScript = output.$1;
final value = output.$2;
txb.addOutput(addressOrScript, value);
tempOutputs.add(
OutputV2.isarCantDoRequiredInDefaultConstructor(
scriptPubKeyHex:
addressOrScript is Uint8List ? addressOrScript.toHex : "000000",
valueStringSats: value.toString(),
addresses: [
if (addressOrScript is String) addressOrScript.toString(),
],
walletOwns: (await mainDB.isar.addresses
.where()
.walletIdEqualTo(walletId)
.filter()
2024-05-09 17:56:42 +00:00
.valueEqualTo(
addressOrScript is Uint8List
? output.$3!
: addressOrScript as String,
)
.valueProperty()
.findFirst()) !=
null,
),
);
2023-12-23 00:15:44 +00:00
}
try {
for (var i = 0; i < vin.length; i++) {
txb.sign(
vin: i,
2024-04-18 17:33:23 +00:00
keyPair: btc.ECPair.fromPrivateKey(
vin[i].keyPair!.privateKey.data,
network: _bitcoinDartNetwork,
compressed: vin[i].keyPair!.privateKey.compressed,
),
2023-12-23 00:15:44 +00:00
witnessValue: vin[i].utxo.value,
2024-04-18 17:33:23 +00:00
// maybe not needed here as this was originally copied from btc? We'll find out...
// redeemScript: setCoins[i].redeemScript,
2023-12-23 00:15:44 +00:00
);
}
} catch (e, s) {
Logging.instance.log(
"Caught exception while signing spark mint transaction: $e\n$s",
level: LogLevel.Error,
);
rethrow;
}
final builtTx = txb.build();
// TODO: see todo at top of this function
assert(outputs.length == 1);
2023-12-23 00:15:44 +00:00
final data = TxData(
sparkRecipients: vout
.where((e) => e.$1 is Uint8List) // ignore change
2023-12-23 00:15:44 +00:00
.map(
(e) => (
address: outputs.first
.address, // for display purposes on confirm tx screen. See todos above
2023-12-23 00:15:44 +00:00
memo: "",
amount: Amount(
rawValue: BigInt.from(e.$2),
fractionDigits: cryptoCurrency.fractionDigits,
),
2024-01-05 22:39:05 +00:00
isChange: false, // ok?
2023-12-23 00:15:44 +00:00
),
)
.toList(),
vSize: builtTx.virtualSize(),
txid: builtTx.getId(),
raw: builtTx.toHex(),
fee: Amount(
rawValue: nFeeRet,
fractionDigits: cryptoCurrency.fractionDigits,
),
usedUTXOs: vin.map((e) => e.utxo).toList(),
tempTx: TransactionV2(
walletId: walletId,
blockHash: null,
hash: builtTx.getId(),
txid: builtTx.getId(),
timestamp: DateTime.timestamp().millisecondsSinceEpoch ~/ 1000,
inputs: List.unmodifiable(tempInputs),
outputs: List.unmodifiable(tempOutputs),
type:
tempOutputs.map((e) => e.walletOwns).fold(true, (p, e) => p &= e)
? TransactionType.sentToSelf
: TransactionType.outgoing,
subType: TransactionSubType.sparkMint,
otherData: null,
height: null,
version: 3,
),
2023-12-23 00:15:44 +00:00
);
2023-12-24 16:51:08 +00:00
if (nFeeRet.toInt() < data.vSize!) {
throw Exception("fee is less than vSize");
}
2023-12-23 00:15:44 +00:00
results.add(data);
if (nChangePosInOut >= 0) {
final vOut = vout[nChangePosInOut];
assert(vOut.$1 is String); // check to make sure is change address
final out = UTXO(
walletId: walletId,
txid: data.txid!,
vout: nChangePosInOut,
value: vOut.$2,
address: vOut.$1 as String,
name: "Spark mint change",
isBlocked: false,
blockedReason: null,
isCoinbase: false,
blockHash: null,
blockHeight: null,
blockTime: null,
);
bool added = false;
for (final utxos in valueAndUTXOs) {
if (utxos.first.address == out.address) {
utxos.add(out);
added = true;
}
}
if (!added) {
valueAndUTXOs.add([out]);
}
}
if (!autoMintAll) {
valueToMint -= mintedValue;
if (valueToMint == BigInt.zero) {
break;
}
}
}
if (!autoMintAll && valueToMint > BigInt.zero) {
// TODO: Is this a valid error message?
throw Exception("Failed to mint expected amounts");
}
return results;
}
Future<void> anonymizeAllSpark() async {
2023-12-24 16:51:08 +00:00
try {
const subtractFeeFromAmount = true; // must be true for mint all
final currentHeight = await chainHeight;
2023-12-23 00:15:44 +00:00
2023-12-24 16:51:08 +00:00
final spendableUtxos = await mainDB.isar.utxos
.where()
.walletIdEqualTo(walletId)
.filter()
.isBlockedEqualTo(false)
.and()
.group((q) => q.usedEqualTo(false).or().usedIsNull())
.and()
.valueGreaterThan(0)
.findAll();
2023-12-23 00:15:44 +00:00
2023-12-24 16:51:08 +00:00
spendableUtxos.removeWhere(
(e) => !e.isConfirmed(
currentHeight,
cryptoCurrency.minConfirms,
2023-12-23 00:15:44 +00:00
),
2023-12-24 16:51:08 +00:00
);
if (spendableUtxos.isEmpty) {
throw Exception("No available UTXOs found to anonymize");
}
2023-12-23 00:15:44 +00:00
final mints = await _createSparkMintTransactions(
2023-12-24 16:51:08 +00:00
subtractFeeFromAmount: subtractFeeFromAmount,
autoMintAll: true,
availableUtxos: spendableUtxos,
outputs: [
MutableSparkRecipient(
(await getCurrentReceivingSparkAddress())!.value,
spendableUtxos
.map((e) => BigInt.from(e.value))
.fold(BigInt.zero, (p, e) => p + e),
"",
),
],
);
await confirmSparkMintTransactions(txData: TxData(sparkMints: mints));
2023-12-24 16:51:08 +00:00
} catch (e, s) {
Logging.instance.log(
"Exception caught in anonymizeAllSpark(): $e\n$s",
level: LogLevel.Warning,
);
rethrow;
2023-12-23 00:15:44 +00:00
}
}
/// Transparent to Spark (mint) transaction creation.
///
/// See https://docs.google.com/document/d/1RG52GoYTZDvKlZz_3G4sQu-PpT6JWSZGHLNswWcrE3o
2023-11-27 21:18:20 +00:00
Future<TxData> prepareSparkMintTransaction({required TxData txData}) async {
try {
if (txData.sparkRecipients?.isNotEmpty != true) {
throw Exception("Missing spark recipients.");
}
final recipients = txData.sparkRecipients!
.map(
(e) => MutableSparkRecipient(
e.address,
e.amount.raw,
e.memo,
),
)
.toList();
final total = recipients
.map((e) => e.value)
.reduce((value, element) => value += element);
if (total < BigInt.zero) {
throw Exception("Attempted send of negative amount");
} else if (total == BigInt.zero) {
throw Exception("Attempted send of zero amount");
}
final currentHeight = await chainHeight;
// coin control not enabled for firo currently so we can ignore this
// final utxosToUse = txData.utxos?.toList() ?? await mainDB.isar.utxos
// .where()
// .walletIdEqualTo(walletId)
// .filter()
// .isBlockedEqualTo(false)
// .and()
// .group((q) => q.usedEqualTo(false).or().usedIsNull())
// .and()
// .valueGreaterThan(0)
// .findAll();
final spendableUtxos = await mainDB.isar.utxos
.where()
.walletIdEqualTo(walletId)
.filter()
.isBlockedEqualTo(false)
.and()
.group((q) => q.usedEqualTo(false).or().usedIsNull())
.and()
.valueGreaterThan(0)
.findAll();
spendableUtxos.removeWhere(
(e) => !e.isConfirmed(
currentHeight,
cryptoCurrency.minConfirms,
),
);
if (spendableUtxos.isEmpty) {
throw Exception("No available UTXOs found to anonymize");
}
final available = spendableUtxos
.map((e) => BigInt.from(e.value))
.reduce((value, element) => value += element);
final bool subtractFeeFromAmount;
if (available < total) {
throw Exception("Insufficient balance");
} else if (available == total) {
subtractFeeFromAmount = true;
} else {
subtractFeeFromAmount = false;
}
final mints = await _createSparkMintTransactions(
subtractFeeFromAmount: subtractFeeFromAmount,
autoMintAll: false,
availableUtxos: spendableUtxos,
outputs: recipients,
2023-12-14 02:25:13 +00:00
);
return txData.copyWith(sparkMints: mints);
2023-12-16 20:28:04 +00:00
} catch (e, s) {
Logging.instance.log(
"Exception caught in prepareSparkMintTransaction(): $e\n$s",
level: LogLevel.Warning,
2023-12-16 20:28:04 +00:00
);
rethrow;
}
2023-11-27 21:18:20 +00:00
}
Future<TxData> confirmSparkMintTransactions({required TxData txData}) async {
final futures = txData.sparkMints!.map((e) => confirmSend(txData: e));
return txData.copyWith(sparkMints: await Future.wait(futures));
2023-12-07 21:05:27 +00:00
}
2023-11-27 20:57:33 +00:00
@override
Future<void> updateBalance() async {
// call to super to update transparent balance (and lelantus balance if
// what ever class this mixin is used on uses LelantusInterface as well)
final normalBalanceFuture = super.updateBalance();
// todo: spark balance aka update info.tertiaryBalance here?
// currently happens on spark coins update/refresh
2023-11-27 20:57:33 +00:00
// wait for normalBalanceFuture to complete before returning
await normalBalanceFuture;
}
// ====================== Private ============================================
Future<void> _addOrUpdateSparkCoins(List<SparkCoin> coins) async {
if (coins.isNotEmpty) {
await mainDB.isar.writeTxn(() async {
await mainDB.isar.sparkCoins.putAll(coins);
});
}
// update wallet spark coin height
final coinsToCheck = await mainDB.isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.filter()
.heightIsNull()
.findAll();
final List<SparkCoin> updatedCoins = [];
for (final coin in coinsToCheck) {
final tx = await electrumXCachedClient.getTransaction(
txHash: coin.txHash,
2024-05-15 21:20:45 +00:00
cryptoCurrency: info.coin,
);
if (tx["height"] is int) {
updatedCoins.add(coin.copyWith(height: tx["height"] as int));
}
}
if (updatedCoins.isNotEmpty) {
await mainDB.isar.writeTxn(() async {
await mainDB.isar.sparkCoins.putAll(updatedCoins);
});
}
}
2023-12-23 00:15:44 +00:00
btc.NetworkType get _bitcoinDartNetwork => btc.NetworkType(
messagePrefix: cryptoCurrency.networkParams.messagePrefix,
bech32: cryptoCurrency.networkParams.bech32Hrp,
bip32: btc.Bip32Type(
public: cryptoCurrency.networkParams.pubHDPrefix,
private: cryptoCurrency.networkParams.privHDPrefix,
),
pubKeyHash: cryptoCurrency.networkParams.p2pkhPrefix,
scriptHash: cryptoCurrency.networkParams.p2shPrefix,
wif: cryptoCurrency.networkParams.wifPrefix,
);
2023-11-16 21:30:01 +00:00
}
2023-12-05 20:44:50 +00:00
String base64ToReverseHex(String source) =>
base64Decode(LineSplitter.split(source).join())
.reversed
.map((e) => e.toRadixString(16).padLeft(2, '0'))
.join();
2023-12-21 22:18:12 +00:00
/// Top level function which should be called wrapped in [compute]
Future<
({
Uint8List serializedSpendPayload,
List<Uint8List> outputScripts,
int fee,
List<
({
int groupId,
int height,
String serializedCoin,
String serializedCoinContext
})> usedCoins,
2023-12-21 22:18:12 +00:00
})> _createSparkSend(
2024-05-09 17:56:42 +00:00
({
String privateKeyHex,
int index,
List<({String address, int amount, bool subtractFeeFromAmount})> recipients,
List<
({
String sparkAddress,
int amount,
bool subtractFeeFromAmount,
String memo
})> privateRecipients,
List<
({
String serializedCoin,
String serializedCoinContext,
int groupId,
int height,
})> serializedCoins,
List<
({
int setId,
String setHash,
List<({String serializedCoin, String txHash})> set
})> allAnonymitySets,
List<
({
int setId,
Uint8List blockHash,
})> idAndBlockHashes,
Uint8List txHash,
}) args,
) async {
2023-12-21 22:18:12 +00:00
final spend = LibSpark.createSparkSendTransaction(
privateKeyHex: args.privateKeyHex,
index: args.index,
recipients: args.recipients,
privateRecipients: args.privateRecipients,
serializedCoins: args.serializedCoins,
allAnonymitySets: args.allAnonymitySets,
idAndBlockHashes: args.idAndBlockHashes,
txHash: args.txHash,
);
return spend;
}
/// Top level function which should be called wrapped in [compute]
Future<List<SparkCoin>> _identifyCoins(
2024-05-09 17:56:42 +00:00
({
List<dynamic> anonymitySetCoins,
int groupId,
Set<String> privateKeyHexSet,
String walletId,
bool isTestNet,
}) args,
) async {
final List<SparkCoin> myCoins = [];
for (final privateKeyHex in args.privateKeyHexSet) {
for (final dynData in args.anonymitySetCoins) {
final data = List<String>.from(dynData as List);
if (data.length != 3) {
throw Exception("Unexpected serialized coin info found");
}
final serializedCoinB64 = data[0];
final txHash = base64ToReverseHex(data[1]);
final contextB64 = data[2];
final coin = LibSpark.identifyAndRecoverCoin(
serializedCoinB64,
privateKeyHex: privateKeyHex,
index: kDefaultSparkIndex,
context: base64Decode(contextB64),
isTestNet: args.isTestNet,
);
// its ours
if (coin != null) {
final SparkCoinType coinType;
switch (coin.type.value) {
case 0:
coinType = SparkCoinType.mint;
case 1:
coinType = SparkCoinType.spend;
default:
throw Exception("Unknown spark coin type detected");
}
myCoins.add(
SparkCoin(
walletId: args.walletId,
type: coinType,
isUsed: false,
groupId: args.groupId,
nonce: coin.nonceHex?.toUint8ListFromHex,
address: coin.address!,
txHash: txHash,
valueIntString: coin.value!.toString(),
memo: coin.memo,
serialContext: coin.serialContext,
diversifierIntString: coin.diversifier!.toString(),
encryptedDiversifier: coin.encryptedDiversifier,
serial: coin.serial,
tag: coin.tag,
lTagHash: coin.lTagHash!,
height: coin.height,
serializedCoinB64: serializedCoinB64,
contextB64: contextB64,
),
);
}
}
}
return myCoins;
}
2023-12-23 00:15:44 +00:00
BigInt _min(BigInt a, BigInt b) {
if (a <= b) {
return a;
} else {
return b;
}
}
BigInt _sum(List<UTXO> utxos) => utxos
.map((e) => BigInt.from(e.value))
.fold(BigInt.zero, (previousValue, element) => previousValue + element);
class MutableSparkRecipient {
String address;
BigInt value;
String memo;
MutableSparkRecipient(this.address, this.value, this.memo);
@override
String toString() {
return 'MutableSparkRecipient{ address: $address, value: $value, memo: $memo }';
}
2023-12-23 00:15:44 +00:00
}