stack_wallet/lib/wallets/wallet/impl/firo_wallet.dart

851 lines
27 KiB
Dart
Raw Normal View History

2024-01-17 14:33:23 +00:00
import 'dart:async';
2023-12-14 16:44:41 +00:00
import 'dart:convert';
2023-11-20 15:15:36 +00:00
import 'dart:math';
import 'package:decimal/decimal.dart';
2023-12-29 15:26:32 +00:00
import 'package:flutter_libsparkmobile/flutter_libsparkmobile.dart';
2023-11-16 21:30:01 +00:00
import 'package:isar/isar.dart';
2024-05-27 23:56:22 +00:00
import '../../../models/isar/models/blockchain_data/v2/input_v2.dart';
import '../../../models/isar/models/blockchain_data/v2/output_v2.dart';
import '../../../models/isar/models/blockchain_data/v2/transaction_v2.dart';
import '../../../models/isar/models/isar_models.dart';
import '../../../utilities/amount/amount.dart';
import '../../../utilities/extensions/extensions.dart';
import '../../../utilities/logger.dart';
import '../../../utilities/util.dart';
import '../../crypto_currency/crypto_currency.dart';
import '../../crypto_currency/interfaces/electrumx_currency_interface.dart';
import '../../isar/models/spark_coin.dart';
import '../../isar/models/wallet_info.dart';
import '../../models/tx_data.dart';
import '../intermediate/bip39_hd_wallet.dart';
import '../wallet_mixin_interfaces/electrumx_interface.dart';
import '../wallet_mixin_interfaces/lelantus_interface.dart';
import '../wallet_mixin_interfaces/spark_interface.dart';
2023-11-16 21:30:01 +00:00
2023-11-27 20:57:33 +00:00
const sparkStartBlock = 819300; // (approx 18 Jan 2024)
2024-05-15 21:20:45 +00:00
class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
with ElectrumXInterface<T>, LelantusInterface<T>, SparkInterface<T> {
2023-11-27 20:57:33 +00:00
// IMPORTANT: The order of the above mixins matters.
// SparkInterface MUST come after LelantusInterface.
2024-05-15 21:20:45 +00:00
FiroWallet(CryptoCurrencyNetwork network) : super(Firo(network) as T);
2023-11-16 21:30:01 +00:00
2023-12-14 16:44:41 +00:00
@override
int get isarTransactionVersion => 2;
2023-11-16 21:30:01 +00:00
@override
FilterOperation? get changeAddressFilterOperation =>
FilterGroup.and(standardChangeAddressFilters);
@override
FilterOperation? get receivingAddressFilterOperation =>
FilterGroup.and(standardReceivingAddressFilters);
final Set<String> _unconfirmedTxids = {};
2023-11-16 21:30:01 +00:00
// ===========================================================================
@override
Future<TxData> updateSentCachedTxData({required TxData txData}) async {
if (txData.tempTx != null) {
await mainDB.updateOrPutTransactionV2s([txData.tempTx!]);
_unconfirmedTxids.add(txData.tempTx!.txid);
Logging.instance.log(
"Added firo unconfirmed: ${txData.tempTx!.txid}",
level: LogLevel.Info,
);
}
return txData;
}
2023-12-14 16:44:41 +00:00
@override
Future<void> updateTransactions() async {
2024-05-07 00:25:10 +00:00
final List<Address> allAddressesOld =
await fetchAddressesForElectrumXScan();
2023-12-14 16:44:41 +00:00
2024-05-07 00:25:10 +00:00
final Set<String> receivingAddresses = allAddressesOld
2023-12-14 16:44:41 +00:00
.where((e) => e.subType == AddressSubType.receiving)
.map((e) => convertAddressString(e.value))
.toSet();
2024-05-07 00:25:10 +00:00
final Set<String> changeAddresses = allAddressesOld
2023-12-14 16:44:41 +00:00
.where((e) => e.subType == AddressSubType.change)
.map((e) => convertAddressString(e.value))
.toSet();
final allAddressesSet = {...receivingAddresses, ...changeAddresses};
final List<Map<String, dynamic>> allTxHashes =
await fetchHistory(allAddressesSet);
2023-12-29 15:26:32 +00:00
final sparkCoins = await mainDB.isar.sparkCoins
2023-12-16 16:19:50 +00:00
.where()
.walletIdEqualToAnyLTagHash(walletId)
.findAll();
2023-12-29 15:26:32 +00:00
final Set<String> sparkTxids = {};
for (final coin in sparkCoins) {
sparkTxids.add(coin.txHash);
2023-12-16 16:19:50 +00:00
// check for duplicates before adding to list
2023-12-29 15:26:32 +00:00
if (allTxHashes.indexWhere((e) => e["tx_hash"] == coin.txHash) == -1) {
2023-12-16 16:19:50 +00:00
final info = {
2023-12-29 15:26:32 +00:00
"tx_hash": coin.txHash,
"height": coin.height,
2023-12-16 16:19:50 +00:00
};
allTxHashes.add(info);
}
}
2024-05-07 00:25:10 +00:00
final List<Map<String, dynamic>> allTransactions = [];
2023-12-14 16:44:41 +00:00
// some lelantus transactions aren't fetched via wallet addresses so they
// will never show as confirmed in the gui.
final unconfirmedTransactions = await mainDB
.getTransactions(walletId)
.filter()
.heightIsNull()
.findAll();
for (final tx in unconfirmedTransactions) {
final txn = await electrumXCachedClient.getTransaction(
txHash: tx.txid,
verbose: true,
2024-05-15 21:20:45 +00:00
cryptoCurrency: info.coin,
2023-12-14 16:44:41 +00:00
);
final height = txn["height"] as int?;
if (height != null) {
// tx was mined
// add to allTxHashes
final info = {
"tx_hash": tx.txid,
"height": height,
"address": tx.address.value?.value,
};
allTxHashes.add(info);
}
}
for (final txHash in allTxHashes) {
// final storedTx = await db
// .getTransactions(walletId)
// .filter()
// .txidEqualTo(txHash["tx_hash"] as String)
// .findFirst();
// if (storedTx == null ||
// !storedTx.isConfirmed(currentHeight, MINIMUM_CONFIRMATIONS)) {
// firod/electrumx seem to take forever to process spark txns so we'll
// just ignore null errors and check again on next refresh.
// This could also be a bug in the custom electrumx rpc code
final Map<String, dynamic> tx;
try {
tx = await electrumXCachedClient.getTransaction(
txHash: txHash["tx_hash"] as String,
verbose: true,
2024-05-15 21:20:45 +00:00
cryptoCurrency: info.coin,
);
} catch (_) {
continue;
}
2023-12-14 16:44:41 +00:00
// check for duplicates before adding to list
if (allTransactions
.indexWhere((e) => e["txid"] == tx["txid"] as String) ==
-1) {
2023-12-16 16:19:50 +00:00
tx["height"] ??= txHash["height"];
2023-12-14 16:44:41 +00:00
allTransactions.add(tx);
2023-11-20 15:15:36 +00:00
}
2023-12-14 16:44:41 +00:00
// }
2023-11-20 15:15:36 +00:00
}
2023-12-14 16:44:41 +00:00
final List<TransactionV2> txns = [];
for (final txData in allTransactions) {
// set to true if any inputs were detected as owned by this wallet
bool wasSentFromThisWallet = false;
// set to true if any outputs were detected as owned by this wallet
bool wasReceivedInThisWallet = false;
BigInt amountReceivedInThisWallet = BigInt.zero;
BigInt changeAmountReceivedInThisWallet = BigInt.zero;
Amount? anonFees;
bool isMint = false;
bool isJMint = false;
bool isSparkMint = false;
2024-05-07 00:25:10 +00:00
final bool isMasterNodePayment = false;
2023-12-14 16:44:41 +00:00
final bool isSparkSpend = txData["type"] == 9 && txData["version"] == 3;
2023-12-29 15:26:32 +00:00
final bool isMySpark = sparkTxids.contains(txData["txid"] as String);
final sparkCoinsInvolved =
sparkCoins.where((e) => e.txHash == txData["txid"]);
if (isMySpark && sparkCoinsInvolved.isEmpty) {
Logging.instance.log(
"sparkCoinsInvolved is empty and should not be! (ignoring tx parsing)",
level: LogLevel.Error,
);
continue;
}
2023-12-14 16:44:41 +00:00
// parse outputs
final List<OutputV2> outputs = [];
for (final outputJson in txData["vout"] as List) {
final outMap = Map<String, dynamic>.from(outputJson as Map);
if (outMap["scriptPubKey"]?["type"] == "lelantusmint") {
final asm = outMap["scriptPubKey"]?["asm"] as String?;
if (asm != null) {
if (asm.startsWith("OP_LELANTUSJMINT")) {
isJMint = true;
} else if (asm.startsWith("OP_LELANTUSMINT")) {
isMint = true;
} else {
Logging.instance.log(
"Unknown mint op code found for lelantusmint tx: ${txData["txid"]}",
level: LogLevel.Error,
);
}
} else {
Logging.instance.log(
"ASM for lelantusmint tx: ${txData["txid"]} is null!",
level: LogLevel.Error,
);
}
}
2023-12-29 15:26:32 +00:00
if (outMap["scriptPubKey"]?["type"] == "sparkmint" ||
outMap["scriptPubKey"]?["type"] == "sparksmint") {
2023-12-14 16:44:41 +00:00
final asm = outMap["scriptPubKey"]?["asm"] as String?;
if (asm != null) {
2023-12-29 15:26:32 +00:00
if (asm.startsWith("OP_SPARKMINT") ||
asm.startsWith("OP_SPARKSMINT")) {
2023-12-14 16:44:41 +00:00
isSparkMint = true;
} else {
Logging.instance.log(
"Unknown mint op code found for sparkmint tx: ${txData["txid"]}",
level: LogLevel.Error,
);
}
} else {
Logging.instance.log(
"ASM for sparkmint tx: ${txData["txid"]} is null!",
level: LogLevel.Error,
);
}
}
OutputV2 output = OutputV2.fromElectrumXJson(
outMap,
decimalPlaces: cryptoCurrency.fractionDigits,
isFullAmountNotSats: true,
2023-12-14 16:44:41 +00:00
// don't know yet if wallet owns. Need addresses first
walletOwns: false,
);
2023-12-29 15:26:32 +00:00
// if (isSparkSpend) {
// // TODO?
// } else
if (isSparkMint) {
if (isMySpark) {
if (output.addresses.isEmpty &&
output.scriptPubKeyHex.length >= 488) {
// likely spark related
final opByte = output.scriptPubKeyHex
.substring(0, 2)
.toUint8ListFromHex
.first;
if (opByte == OP_SPARKMINT || opByte == OP_SPARKSMINT) {
2024-05-27 23:56:22 +00:00
final serCoin = base64Encode(
output.scriptPubKeyHex.substring(2, 488).toUint8ListFromHex,
);
2023-12-29 15:26:32 +00:00
final coin = sparkCoinsInvolved
.where((e) => e.serializedCoinB64!.startsWith(serCoin))
.firstOrNull;
if (coin == null) {
// not ours
} else {
output = output.copyWith(
walletOwns: true,
valueStringSats: coin.value.toString(),
addresses: [
coin.address,
],
);
}
}
}
}
} else if (isMint || isJMint) {
// do nothing extra ?
} else {
// TODO?
}
2023-12-14 16:44:41 +00:00
// if output was to my wallet, add value to amount received
if (receivingAddresses
.intersection(output.addresses.toSet())
.isNotEmpty) {
wasReceivedInThisWallet = true;
amountReceivedInThisWallet += output.value;
output = output.copyWith(walletOwns: true);
} else if (changeAddresses
.intersection(output.addresses.toSet())
.isNotEmpty) {
wasReceivedInThisWallet = true;
changeAmountReceivedInThisWallet += output.value;
output = output.copyWith(walletOwns: true);
2023-12-29 15:26:32 +00:00
} else if (isSparkMint && isMySpark) {
wasReceivedInThisWallet = true;
if (output.addresses.contains(sparkChangeAddress)) {
changeAmountReceivedInThisWallet += output.value;
} else {
amountReceivedInThisWallet += output.value;
}
2023-12-14 16:44:41 +00:00
}
outputs.add(output);
}
if (isJMint || isSparkSpend) {
anonFees = Amount(
rawValue: BigInt.zero,
fractionDigits: cryptoCurrency.fractionDigits,
);
}
// parse inputs
final List<InputV2> inputs = [];
for (final jsonInput in txData["vin"] as List) {
final map = Map<String, dynamic>.from(jsonInput as Map);
final List<String> addresses = [];
String valueStringSats = "0";
OutpointV2? outpoint;
final coinbase = map["coinbase"] as String?;
final txid = map["txid"] as String?;
final vout = map["vout"] as int?;
if (txid != null && vout != null) {
outpoint = OutpointV2.isarCantDoRequiredInDefaultConstructor(
txid: txid,
vout: vout,
);
}
if (isSparkSpend) {
// anon fees
final nFee = Decimal.tryParse(map["nFees"].toString());
if (nFee != null) {
final fees = Amount.fromDecimal(
nFee,
fractionDigits: cryptoCurrency.fractionDigits,
);
anonFees = anonFees! + fees;
}
} else if (isSparkMint) {
final address = map["address"] as String?;
final value = map["valueSat"] as int?;
if (address != null && value != null) {
valueStringSats = value.toString();
addresses.add(address);
}
} else if (isMint) {
// We should be able to assume this belongs to this wallet
final address = map["address"] as String?;
final value = map["valueSat"] as int?;
if (address != null && value != null) {
valueStringSats = value.toString();
addresses.add(address);
}
} else if (isJMint) {
// anon fees
final nFee = Decimal.tryParse(map["nFees"].toString());
if (nFee != null) {
final fees = Amount.fromDecimal(
nFee,
fractionDigits: cryptoCurrency.fractionDigits,
);
anonFees = anonFees! + fees;
}
} else if (coinbase == null && txid != null && vout != null) {
final inputTx = await electrumXCachedClient.getTransaction(
txHash: txid,
2024-05-15 21:20:45 +00:00
cryptoCurrency: cryptoCurrency,
2023-12-14 16:44:41 +00:00
);
final prevOutJson = Map<String, dynamic>.from(
2024-05-27 23:56:22 +00:00
(inputTx["vout"] as List).firstWhere((e) => e["n"] == vout) as Map,
);
2023-12-14 16:44:41 +00:00
final prevOut = OutputV2.fromElectrumXJson(
prevOutJson,
decimalPlaces: cryptoCurrency.fractionDigits,
isFullAmountNotSats: true,
2023-12-14 16:44:41 +00:00
walletOwns: false, // doesn't matter here as this is not saved
);
valueStringSats = prevOut.valueStringSats;
addresses.addAll(prevOut.addresses);
} else if (coinbase == null) {
Util.printJson(map, "NON TXID INPUT");
}
InputV2 input = InputV2.isarCantDoRequiredInDefaultConstructor(
scriptSigHex: map["scriptSig"]?["hex"] as String?,
scriptSigAsm: map["scriptSig"]?["asm"] as String?,
2023-12-14 16:44:41 +00:00
sequence: map["sequence"] as int?,
outpoint: outpoint,
valueStringSats: valueStringSats,
addresses: addresses,
witness: map["witness"] as String?,
coinbase: coinbase,
innerRedeemScriptAsm: map["innerRedeemscriptAsm"] as String?,
// don't know yet if wallet owns. Need addresses first
walletOwns: false,
);
if (allAddressesSet.intersection(input.addresses.toSet()).isNotEmpty) {
wasSentFromThisWallet = true;
input = input.copyWith(walletOwns: true);
2023-12-29 15:26:32 +00:00
} else if (isMySpark) {
final lTags = map["lTags"] as List?;
if (lTags?.isNotEmpty == true) {
final List<SparkCoin> usedCoins = [];
for (final tag in lTags!) {
final components = (tag as String).split(",");
final x = components[0].substring(1);
final y = components[1].substring(0, components[1].length - 1);
final hash = LibSpark.hashTag(x, y);
usedCoins.addAll(sparkCoins.where((e) => e.lTagHash == hash));
}
if (usedCoins.isNotEmpty) {
input = input.copyWith(
addresses: usedCoins.map((e) => e.address).toList(),
valueStringSats: usedCoins
.map((e) => e.value)
.reduce((value, element) => value += element)
.toString(),
walletOwns: true,
);
wasSentFromThisWallet = true;
}
}
2023-12-14 16:44:41 +00:00
}
inputs.add(input);
}
2024-01-05 21:38:01 +00:00
final totalSpentFromWallet = inputs
.where((e) => e.walletOwns)
.map((e) => e.value)
.fold(BigInt.zero, (value, element) => value + element);
final totalReceivedInWallet = outputs
.where((e) => e.walletOwns)
.map((e) => e.value)
.fold(BigInt.zero, (value, element) => value + element);
2023-12-14 16:44:41 +00:00
final totalOut = outputs
.map((e) => e.value)
.fold(BigInt.zero, (value, element) => value + element);
TransactionType type;
TransactionSubType subType = TransactionSubType.none;
// TODO integrate the following with the next bit (maybe)
2023-12-14 16:44:41 +00:00
if (isSparkSpend) {
subType = TransactionSubType.sparkSpend;
} else if (isSparkMint) {
subType = TransactionSubType.sparkMint;
} else if (isMint) {
subType = TransactionSubType.mint;
} else if (isJMint) {
subType = TransactionSubType.join;
}
// at least one input was owned by this wallet
if (wasSentFromThisWallet) {
type = TransactionType.outgoing;
if (wasReceivedInThisWallet) {
2024-01-05 21:38:01 +00:00
if (isSparkSpend) {
if (totalSpentFromWallet -
(totalReceivedInWallet + anonFees!.raw) ==
BigInt.zero) {
// definitely sent all to self
type = TransactionType.sentToSelf;
}
} else if (changeAmountReceivedInThisWallet +
amountReceivedInThisWallet ==
2023-12-14 16:44:41 +00:00
totalOut) {
// definitely sent all to self
type = TransactionType.sentToSelf;
} else if (amountReceivedInThisWallet == BigInt.zero) {
// most likely just a typical send
// do nothing here yet
}
}
} else if (wasReceivedInThisWallet) {
// only found outputs owned by this wallet
type = TransactionType.incoming;
} else {
Logging.instance.log(
"Unexpected tx found (ignoring it): $txData",
level: LogLevel.Error,
);
continue;
}
String? otherData;
if (anonFees != null) {
otherData = jsonEncode(
{
"overrideFee": anonFees.toJsonString(),
2023-12-14 16:44:41 +00:00
},
);
}
final tx = TransactionV2(
walletId: walletId,
blockHash: txData["blockhash"] as String?,
hash: txData["hash"] as String,
txid: txData["txid"] as String,
height: txData["height"] as int?,
version: txData["version"] as int,
timestamp: txData["blocktime"] as int? ??
DateTime.timestamp().millisecondsSinceEpoch ~/ 1000,
inputs: List.unmodifiable(inputs),
outputs: List.unmodifiable(outputs),
type: type,
subType: subType,
otherData: otherData,
);
if (_unconfirmedTxids.contains(tx.txid)) {
if (tx.isConfirmed(await chainHeight, cryptoCurrency.minConfirms)) {
txns.add(tx);
_unconfirmedTxids.removeWhere((e) => e == tx.txid);
} else {
// don't update in db until confirmed
}
} else {
txns.add(tx);
}
2023-12-14 16:44:41 +00:00
}
await mainDB.updateOrPutTransactionV2s(txns);
2023-11-20 15:15:36 +00:00
}
2023-11-16 21:30:01 +00:00
@override
Future<
({
String? blockedReason,
bool blocked,
String? utxoLabel,
})> checkBlockUTXO(
2023-11-16 21:30:01 +00:00
Map<String, dynamic> jsonUTXO,
String? scriptPubKeyHex,
Map<String, dynamic>? jsonTX,
2024-01-05 00:37:46 +00:00
String? utxoOwnerAddress,
) async {
bool blocked = false;
2023-11-20 15:15:36 +00:00
String? blockedReason;
String? label;
if (jsonUTXO["value"] is int) {
// TODO: [prio=med] use special electrumx call to verify the 1000 Firo output is masternode
blocked = Amount.fromDecimal(
Decimal.fromInt(
1000, // 1000 firo output is a possible master node
),
fractionDigits: cryptoCurrency.fractionDigits,
).raw ==
BigInt.from(jsonUTXO["value"] as int);
if (blocked) {
blockedReason = "Possible masternode output. "
"Unlock and spend at your own risk.";
label = "Possible masternode";
}
}
return (blockedReason: blockedReason, blocked: blocked, utxoLabel: label);
2023-11-20 15:15:36 +00:00
}
@override
Future<void> recover({required bool isRescan}) async {
final root = await getRootHDNode();
final List<Future<({int index, List<Address> addresses})>> receiveFutures =
[];
final List<Future<({int index, List<Address> addresses})>> changeFutures =
[];
const receiveChain = 0;
const changeChain = 1;
const txCountBatchSize = 12;
try {
await refreshMutex.protect(() async {
if (isRescan) {
// clear cache
await electrumXCachedClient.clearSharedTransactionCache(
2024-05-27 23:56:22 +00:00
cryptoCurrency: info.coin,
);
2023-11-20 15:15:36 +00:00
// clear blockchain info
await mainDB.deleteWalletBlockchainData(walletId);
}
// Parse otherDataJsonString to get the enableLelantusScanning value.
bool? enableLelantusScanning = false;
if (info.otherDataJsonString != null) {
final otherDataJson = json.decode(info.otherDataJsonString!);
enableLelantusScanning =
otherDataJson[WalletInfoKeys.enableLelantusScanning] as bool? ??
false;
}
// lelantus
int? latestSetId;
Future<Map<int, dynamic>>? setDataMapFuture;
Future<List<String>>? usedSerialNumbersFuture;
if (enableLelantusScanning) {
latestSetId = await electrumXClient.getLelantusLatestCoinId();
setDataMapFuture = getSetDataMap(latestSetId);
usedSerialNumbersFuture = electrumXCachedClient.getUsedCoinSerials(
cryptoCurrency: info.coin,
);
}
2023-11-20 15:15:36 +00:00
// spark
final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId();
final sparkAnonSetFuture = electrumXCachedClient.getSparkAnonymitySet(
groupId: latestSparkCoinId.toString(),
2024-05-15 21:20:45 +00:00
cryptoCurrency: info.coin,
2024-05-10 20:32:15 +00:00
useOnlyCacheIfNotEmpty: false,
);
final sparkUsedCoinTagsFuture =
electrumXCachedClient.getSparkUsedCoinsTags(
2024-05-15 21:20:45 +00:00
cryptoCurrency: info.coin,
);
2023-11-20 15:15:36 +00:00
// receiving addresses
Logging.instance.log(
"checking receiving addresses...",
level: LogLevel.Info,
);
2024-05-07 00:25:31 +00:00
final canBatch = await serverCanBatch;
2023-11-20 15:15:36 +00:00
for (final type in cryptoCurrency.supportedDerivationPathTypes) {
receiveFutures.add(
2024-05-07 00:25:31 +00:00
canBatch
2023-11-20 15:15:36 +00:00
? checkGapsBatched(
txCountBatchSize,
root,
type,
receiveChain,
)
: checkGapsLinearly(
root,
type,
receiveChain,
),
);
}
// change addresses
Logging.instance.log(
"checking change addresses...",
level: LogLevel.Info,
);
for (final type in cryptoCurrency.supportedDerivationPathTypes) {
changeFutures.add(
2024-05-07 00:25:31 +00:00
canBatch
2023-11-20 15:15:36 +00:00
? checkGapsBatched(
txCountBatchSize,
root,
type,
changeChain,
)
: checkGapsLinearly(
root,
type,
changeChain,
),
);
}
// io limitations may require running these linearly instead
final futuresResult = await Future.wait([
Future.wait(receiveFutures),
Future.wait(changeFutures),
]);
final receiveResults = futuresResult[0];
final changeResults = futuresResult[1];
final List<Address> addressesToStore = [];
int highestReceivingIndexWithHistory = 0;
for (final tuple in receiveResults) {
if (tuple.addresses.isEmpty) {
await checkReceivingAddressForTransactions();
} else {
highestReceivingIndexWithHistory = max(
tuple.index,
highestReceivingIndexWithHistory,
);
addressesToStore.addAll(tuple.addresses);
}
}
int highestChangeIndexWithHistory = 0;
// If restoring a wallet that never sent any funds with change, then set changeArray
// manually. If we didn't do this, it'd store an empty array.
for (final tuple in changeResults) {
if (tuple.addresses.isEmpty) {
await checkChangeAddressForTransactions();
} else {
highestChangeIndexWithHistory = max(
tuple.index,
highestChangeIndexWithHistory,
);
addressesToStore.addAll(tuple.addresses);
}
}
// remove extra addresses to help minimize risk of creating a large gap
2024-05-27 23:56:22 +00:00
addressesToStore.removeWhere(
(e) =>
e.subType == AddressSubType.change &&
e.derivationIndex > highestChangeIndexWithHistory,
);
addressesToStore.removeWhere(
(e) =>
e.subType == AddressSubType.receiving &&
e.derivationIndex > highestReceivingIndexWithHistory,
);
2023-11-20 15:15:36 +00:00
await mainDB.updateOrPutAddresses(addressesToStore);
await Future.wait([
updateTransactions(),
updateUTXOs(),
]);
List<Future<dynamic>> futures = [];
futures.add(sparkAnonSetFuture);
futures.add(sparkUsedCoinTagsFuture);
if (enableLelantusScanning) {
futures.add(usedSerialNumbersFuture!);
futures.add(setDataMapFuture!);
}
final futureResults = await Future.wait(futures);
2023-11-20 15:15:36 +00:00
// lelantus
Set<String>? usedSerialsSet;
Map<dynamic, dynamic>? setDataMap;
if (enableLelantusScanning) {
usedSerialsSet = (futureResults[2] as List<String>).toSet();
setDataMap = futureResults[3] as Map<dynamic, dynamic>;
}
// spark
final sparkAnonymitySet = futureResults[0] as Map<String, dynamic>;
final sparkSpentCoinTags = futureResults[1] as Set<String>;
2024-01-17 14:33:23 +00:00
if (Util.isDesktop) {
List<Future<dynamic>> futures = [];
if (enableLelantusScanning) {
futures.add(recoverLelantusWallet(
latestSetId: latestSetId!,
usedSerialNumbers: usedSerialsSet!,
setDataMap: setDataMap!,
));
}
futures.add(recoverSparkWallet(
anonymitySet: sparkAnonymitySet,
spentCoinTags: sparkSpentCoinTags,
));
await Future.wait(futures);
2024-01-17 14:33:23 +00:00
} else {
if (enableLelantusScanning) {
await recoverLelantusWallet(
latestSetId: latestSetId!,
usedSerialNumbers: usedSerialsSet!,
setDataMap: setDataMap!,
);
}
2024-01-17 14:33:23 +00:00
await recoverSparkWallet(
anonymitySet: sparkAnonymitySet,
spentCoinTags: sparkSpentCoinTags,
2024-01-17 14:33:23 +00:00
);
}
2023-11-20 15:15:36 +00:00
});
2024-01-17 14:33:23 +00:00
unawaited(refresh());
2023-11-20 15:15:36 +00:00
} catch (e, s) {
Logging.instance.log(
2024-05-27 23:56:22 +00:00
"Exception rethrown from electrumx_mixin recover(): $e\n$s",
level: LogLevel.Info,
);
2023-11-20 15:15:36 +00:00
rethrow;
}
2023-11-16 21:30:01 +00:00
}
@override
Amount roughFeeEstimate(int inputCount, int outputCount, int feeRatePerKB) {
return Amount(
2024-05-27 23:56:22 +00:00
rawValue: BigInt.from(
((181 * inputCount) + (34 * outputCount) + 10) *
(feeRatePerKB / 1000).ceil(),
),
2023-11-16 21:30:01 +00:00
fractionDigits: cryptoCurrency.fractionDigits,
);
}
@override
int estimateTxFee({required int vSize, required int feeRatePerKB}) {
return vSize * (feeRatePerKB / 1000).ceil();
}
// ===========================================================================
2023-11-20 15:15:36 +00:00
bool get lelantusCoinIsarRescanRequired =>
2024-01-15 15:42:49 +00:00
info.otherData[WalletInfoKeys.lelantusCoinIsarRescanRequired] as bool? ??
2023-11-20 15:15:36 +00:00
true;
Future<bool> firoRescanRecovery() async {
try {
await recover(isRescan: true);
2024-01-15 15:42:49 +00:00
await info.updateOtherData(
newEntries: {WalletInfoKeys.lelantusCoinIsarRescanRequired: false},
isar: mainDB.isar,
);
2023-11-20 15:15:36 +00:00
return true;
} catch (_) {
return false;
}
}
2023-11-16 21:30:01 +00:00
}