2024-01-17 14:33:23 +00:00
|
|
|
import 'dart:async';
|
2023-12-14 16:44:41 +00:00
|
|
|
import 'dart:convert';
|
2023-11-20 15:15:36 +00:00
|
|
|
import 'dart:math';
|
|
|
|
|
|
|
|
import 'package:decimal/decimal.dart';
|
2023-11-16 21:30:01 +00:00
|
|
|
import 'package:isar/isar.dart';
|
2024-05-27 23:56:22 +00:00
|
|
|
|
2024-05-29 19:29:45 +00:00
|
|
|
import '../../../db/sqlite/firo_cache.dart';
|
2024-05-23 00:37:06 +00:00
|
|
|
import '../../../models/isar/models/blockchain_data/v2/input_v2.dart';
|
|
|
|
import '../../../models/isar/models/blockchain_data/v2/output_v2.dart';
|
|
|
|
import '../../../models/isar/models/blockchain_data/v2/transaction_v2.dart';
|
|
|
|
import '../../../models/isar/models/isar_models.dart';
|
|
|
|
import '../../../utilities/amount/amount.dart';
|
|
|
|
import '../../../utilities/extensions/extensions.dart';
|
|
|
|
import '../../../utilities/logger.dart';
|
|
|
|
import '../../../utilities/util.dart';
|
|
|
|
import '../../crypto_currency/crypto_currency.dart';
|
|
|
|
import '../../crypto_currency/interfaces/electrumx_currency_interface.dart';
|
|
|
|
import '../../isar/models/spark_coin.dart';
|
|
|
|
import '../../isar/models/wallet_info.dart';
|
|
|
|
import '../../models/tx_data.dart';
|
|
|
|
import '../intermediate/bip39_hd_wallet.dart';
|
2024-06-04 16:08:37 +00:00
|
|
|
import '../wallet_mixin_interfaces/coin_control_interface.dart';
|
2024-05-23 00:37:06 +00:00
|
|
|
import '../wallet_mixin_interfaces/electrumx_interface.dart';
|
|
|
|
import '../wallet_mixin_interfaces/lelantus_interface.dart';
|
|
|
|
import '../wallet_mixin_interfaces/spark_interface.dart';
|
2023-11-16 21:30:01 +00:00
|
|
|
|
2023-11-27 20:57:33 +00:00
|
|
|
const sparkStartBlock = 819300; // (approx 18 Jan 2024)
|
|
|
|
|
2024-05-15 21:20:45 +00:00
|
|
|
class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
|
2024-06-04 16:08:37 +00:00
|
|
|
with
|
|
|
|
ElectrumXInterface<T>,
|
|
|
|
LelantusInterface<T>,
|
|
|
|
SparkInterface<T>,
|
|
|
|
CoinControlInterface<T> {
|
2023-11-27 20:57:33 +00:00
|
|
|
// IMPORTANT: The order of the above mixins matters.
|
|
|
|
// SparkInterface MUST come after LelantusInterface.
|
|
|
|
|
2024-05-15 21:20:45 +00:00
|
|
|
FiroWallet(CryptoCurrencyNetwork network) : super(Firo(network) as T);
|
2023-11-16 21:30:01 +00:00
|
|
|
|
2023-12-14 16:44:41 +00:00
|
|
|
@override
|
|
|
|
int get isarTransactionVersion => 2;
|
|
|
|
|
2023-11-16 21:30:01 +00:00
|
|
|
@override
|
|
|
|
FilterOperation? get changeAddressFilterOperation =>
|
|
|
|
FilterGroup.and(standardChangeAddressFilters);
|
|
|
|
|
|
|
|
@override
|
|
|
|
FilterOperation? get receivingAddressFilterOperation =>
|
|
|
|
FilterGroup.and(standardReceivingAddressFilters);
|
|
|
|
|
2024-01-05 18:59:01 +00:00
|
|
|
final Set<String> _unconfirmedTxids = {};
|
|
|
|
|
2023-11-16 21:30:01 +00:00
|
|
|
// ===========================================================================
|
|
|
|
|
2024-01-05 18:59:01 +00:00
|
|
|
@override
|
|
|
|
Future<TxData> updateSentCachedTxData({required TxData txData}) async {
|
|
|
|
if (txData.tempTx != null) {
|
|
|
|
await mainDB.updateOrPutTransactionV2s([txData.tempTx!]);
|
|
|
|
_unconfirmedTxids.add(txData.tempTx!.txid);
|
|
|
|
Logging.instance.log(
|
|
|
|
"Added firo unconfirmed: ${txData.tempTx!.txid}",
|
|
|
|
level: LogLevel.Info,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
return txData;
|
|
|
|
}
|
|
|
|
|
2023-12-14 16:44:41 +00:00
|
|
|
@override
|
|
|
|
Future<void> updateTransactions() async {
|
2024-05-07 00:25:10 +00:00
|
|
|
final List<Address> allAddressesOld =
|
|
|
|
await fetchAddressesForElectrumXScan();
|
2023-12-14 16:44:41 +00:00
|
|
|
|
2024-05-07 00:25:10 +00:00
|
|
|
final Set<String> receivingAddresses = allAddressesOld
|
2023-12-14 16:44:41 +00:00
|
|
|
.where((e) => e.subType == AddressSubType.receiving)
|
|
|
|
.map((e) => convertAddressString(e.value))
|
|
|
|
.toSet();
|
|
|
|
|
2024-05-07 00:25:10 +00:00
|
|
|
final Set<String> changeAddresses = allAddressesOld
|
2023-12-14 16:44:41 +00:00
|
|
|
.where((e) => e.subType == AddressSubType.change)
|
|
|
|
.map((e) => convertAddressString(e.value))
|
|
|
|
.toSet();
|
|
|
|
|
|
|
|
final allAddressesSet = {...receivingAddresses, ...changeAddresses};
|
|
|
|
|
|
|
|
final List<Map<String, dynamic>> allTxHashes =
|
|
|
|
await fetchHistory(allAddressesSet);
|
|
|
|
|
2023-12-29 15:26:32 +00:00
|
|
|
final sparkCoins = await mainDB.isar.sparkCoins
|
2023-12-16 16:19:50 +00:00
|
|
|
.where()
|
|
|
|
.walletIdEqualToAnyLTagHash(walletId)
|
|
|
|
.findAll();
|
|
|
|
|
2023-12-29 15:26:32 +00:00
|
|
|
final Set<String> sparkTxids = {};
|
|
|
|
|
|
|
|
for (final coin in sparkCoins) {
|
|
|
|
sparkTxids.add(coin.txHash);
|
2023-12-16 16:19:50 +00:00
|
|
|
// check for duplicates before adding to list
|
2023-12-29 15:26:32 +00:00
|
|
|
if (allTxHashes.indexWhere((e) => e["tx_hash"] == coin.txHash) == -1) {
|
2023-12-16 16:19:50 +00:00
|
|
|
final info = {
|
2023-12-29 15:26:32 +00:00
|
|
|
"tx_hash": coin.txHash,
|
|
|
|
"height": coin.height,
|
2023-12-16 16:19:50 +00:00
|
|
|
};
|
|
|
|
allTxHashes.add(info);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-06-10 19:28:49 +00:00
|
|
|
final missing = await getMissingSparkSpendTransactionIds();
|
|
|
|
for (final txid in missing.map((e) => e.txid).toSet()) {
|
|
|
|
allTxHashes.add({
|
|
|
|
"tx_hash": txid,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2024-05-07 00:25:10 +00:00
|
|
|
final List<Map<String, dynamic>> allTransactions = [];
|
2023-12-14 16:44:41 +00:00
|
|
|
|
|
|
|
// some lelantus transactions aren't fetched via wallet addresses so they
|
|
|
|
// will never show as confirmed in the gui.
|
|
|
|
final unconfirmedTransactions = await mainDB
|
|
|
|
.getTransactions(walletId)
|
|
|
|
.filter()
|
|
|
|
.heightIsNull()
|
|
|
|
.findAll();
|
|
|
|
for (final tx in unconfirmedTransactions) {
|
|
|
|
final txn = await electrumXCachedClient.getTransaction(
|
|
|
|
txHash: tx.txid,
|
|
|
|
verbose: true,
|
2024-05-15 21:20:45 +00:00
|
|
|
cryptoCurrency: info.coin,
|
2023-12-14 16:44:41 +00:00
|
|
|
);
|
|
|
|
final height = txn["height"] as int?;
|
|
|
|
|
|
|
|
if (height != null) {
|
|
|
|
// tx was mined
|
|
|
|
// add to allTxHashes
|
|
|
|
final info = {
|
|
|
|
"tx_hash": tx.txid,
|
|
|
|
"height": height,
|
|
|
|
"address": tx.address.value?.value,
|
|
|
|
};
|
|
|
|
allTxHashes.add(info);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (final txHash in allTxHashes) {
|
|
|
|
// final storedTx = await db
|
|
|
|
// .getTransactions(walletId)
|
|
|
|
// .filter()
|
|
|
|
// .txidEqualTo(txHash["tx_hash"] as String)
|
|
|
|
// .findFirst();
|
|
|
|
|
|
|
|
// if (storedTx == null ||
|
|
|
|
// !storedTx.isConfirmed(currentHeight, MINIMUM_CONFIRMATIONS)) {
|
2024-01-12 22:01:51 +00:00
|
|
|
|
|
|
|
// firod/electrumx seem to take forever to process spark txns so we'll
|
|
|
|
// just ignore null errors and check again on next refresh.
|
|
|
|
// This could also be a bug in the custom electrumx rpc code
|
|
|
|
final Map<String, dynamic> tx;
|
|
|
|
try {
|
|
|
|
tx = await electrumXCachedClient.getTransaction(
|
|
|
|
txHash: txHash["tx_hash"] as String,
|
|
|
|
verbose: true,
|
2024-05-15 21:20:45 +00:00
|
|
|
cryptoCurrency: info.coin,
|
2024-01-12 22:01:51 +00:00
|
|
|
);
|
|
|
|
} catch (_) {
|
|
|
|
continue;
|
|
|
|
}
|
2023-12-14 16:44:41 +00:00
|
|
|
|
|
|
|
// check for duplicates before adding to list
|
|
|
|
if (allTransactions
|
|
|
|
.indexWhere((e) => e["txid"] == tx["txid"] as String) ==
|
|
|
|
-1) {
|
2023-12-16 16:19:50 +00:00
|
|
|
tx["height"] ??= txHash["height"];
|
2023-12-14 16:44:41 +00:00
|
|
|
allTransactions.add(tx);
|
2023-11-20 15:15:36 +00:00
|
|
|
}
|
2023-12-14 16:44:41 +00:00
|
|
|
// }
|
2023-11-20 15:15:36 +00:00
|
|
|
}
|
2023-12-14 16:44:41 +00:00
|
|
|
|
|
|
|
final List<TransactionV2> txns = [];
|
|
|
|
|
|
|
|
for (final txData in allTransactions) {
|
|
|
|
// set to true if any inputs were detected as owned by this wallet
|
|
|
|
bool wasSentFromThisWallet = false;
|
|
|
|
|
|
|
|
// set to true if any outputs were detected as owned by this wallet
|
|
|
|
bool wasReceivedInThisWallet = false;
|
|
|
|
BigInt amountReceivedInThisWallet = BigInt.zero;
|
|
|
|
BigInt changeAmountReceivedInThisWallet = BigInt.zero;
|
|
|
|
|
|
|
|
Amount? anonFees;
|
|
|
|
|
|
|
|
bool isMint = false;
|
|
|
|
bool isJMint = false;
|
|
|
|
bool isSparkMint = false;
|
2024-05-07 00:25:10 +00:00
|
|
|
final bool isMasterNodePayment = false;
|
2023-12-14 16:44:41 +00:00
|
|
|
final bool isSparkSpend = txData["type"] == 9 && txData["version"] == 3;
|
2023-12-29 15:26:32 +00:00
|
|
|
final bool isMySpark = sparkTxids.contains(txData["txid"] as String);
|
2024-06-10 19:28:49 +00:00
|
|
|
final bool isMySpentSpark =
|
|
|
|
missing.where((e) => e.txid == txData["txid"]).isNotEmpty;
|
|
|
|
|
|
|
|
final sparkCoinsInvolvedReceived = sparkCoins.where(
|
|
|
|
(e) =>
|
|
|
|
e.txHash == txData["txid"] ||
|
|
|
|
missing.where((f) => e.lTagHash == f.tag).isNotEmpty,
|
|
|
|
);
|
|
|
|
|
|
|
|
final sparkCoinsInvolvedSpent = sparkCoins.where(
|
|
|
|
(e) => missing.where((f) => e.lTagHash == f.tag).isNotEmpty,
|
|
|
|
);
|
|
|
|
|
|
|
|
if (isMySpark && sparkCoinsInvolvedReceived.isEmpty && !isMySpentSpark) {
|
|
|
|
Logging.instance.log(
|
|
|
|
"sparkCoinsInvolvedReceived is empty and should not be! (ignoring tx parsing)",
|
|
|
|
level: LogLevel.Error,
|
|
|
|
);
|
|
|
|
continue;
|
|
|
|
}
|
2023-12-29 15:26:32 +00:00
|
|
|
|
2024-06-10 19:28:49 +00:00
|
|
|
if (isMySpentSpark && sparkCoinsInvolvedSpent.isEmpty && !isMySpark) {
|
2023-12-29 15:26:32 +00:00
|
|
|
Logging.instance.log(
|
2024-06-10 19:28:49 +00:00
|
|
|
"sparkCoinsInvolvedSpent is empty and should not be! (ignoring tx parsing)",
|
2023-12-29 15:26:32 +00:00
|
|
|
level: LogLevel.Error,
|
|
|
|
);
|
|
|
|
continue;
|
|
|
|
}
|
2023-12-14 16:44:41 +00:00
|
|
|
|
|
|
|
// parse outputs
|
|
|
|
final List<OutputV2> outputs = [];
|
|
|
|
for (final outputJson in txData["vout"] as List) {
|
|
|
|
final outMap = Map<String, dynamic>.from(outputJson as Map);
|
|
|
|
if (outMap["scriptPubKey"]?["type"] == "lelantusmint") {
|
|
|
|
final asm = outMap["scriptPubKey"]?["asm"] as String?;
|
|
|
|
if (asm != null) {
|
|
|
|
if (asm.startsWith("OP_LELANTUSJMINT")) {
|
|
|
|
isJMint = true;
|
|
|
|
} else if (asm.startsWith("OP_LELANTUSMINT")) {
|
|
|
|
isMint = true;
|
|
|
|
} else {
|
|
|
|
Logging.instance.log(
|
|
|
|
"Unknown mint op code found for lelantusmint tx: ${txData["txid"]}",
|
|
|
|
level: LogLevel.Error,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
Logging.instance.log(
|
|
|
|
"ASM for lelantusmint tx: ${txData["txid"]} is null!",
|
|
|
|
level: LogLevel.Error,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
2023-12-29 15:26:32 +00:00
|
|
|
if (outMap["scriptPubKey"]?["type"] == "sparkmint" ||
|
|
|
|
outMap["scriptPubKey"]?["type"] == "sparksmint") {
|
2023-12-14 16:44:41 +00:00
|
|
|
final asm = outMap["scriptPubKey"]?["asm"] as String?;
|
|
|
|
if (asm != null) {
|
2023-12-29 15:26:32 +00:00
|
|
|
if (asm.startsWith("OP_SPARKMINT") ||
|
|
|
|
asm.startsWith("OP_SPARKSMINT")) {
|
2023-12-14 16:44:41 +00:00
|
|
|
isSparkMint = true;
|
|
|
|
} else {
|
|
|
|
Logging.instance.log(
|
|
|
|
"Unknown mint op code found for sparkmint tx: ${txData["txid"]}",
|
|
|
|
level: LogLevel.Error,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
Logging.instance.log(
|
|
|
|
"ASM for sparkmint tx: ${txData["txid"]} is null!",
|
|
|
|
level: LogLevel.Error,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
OutputV2 output = OutputV2.fromElectrumXJson(
|
|
|
|
outMap,
|
|
|
|
decimalPlaces: cryptoCurrency.fractionDigits,
|
2023-12-15 19:30:51 +00:00
|
|
|
isFullAmountNotSats: true,
|
2023-12-14 16:44:41 +00:00
|
|
|
// don't know yet if wallet owns. Need addresses first
|
|
|
|
walletOwns: false,
|
|
|
|
);
|
|
|
|
|
2023-12-29 15:26:32 +00:00
|
|
|
// if (isSparkSpend) {
|
|
|
|
// // TODO?
|
|
|
|
// } else
|
|
|
|
if (isSparkMint) {
|
|
|
|
if (isMySpark) {
|
|
|
|
if (output.addresses.isEmpty &&
|
|
|
|
output.scriptPubKeyHex.length >= 488) {
|
|
|
|
// likely spark related
|
|
|
|
final opByte = output.scriptPubKeyHex
|
|
|
|
.substring(0, 2)
|
|
|
|
.toUint8ListFromHex
|
|
|
|
.first;
|
|
|
|
if (opByte == OP_SPARKMINT || opByte == OP_SPARKSMINT) {
|
2024-05-27 23:56:22 +00:00
|
|
|
final serCoin = base64Encode(
|
|
|
|
output.scriptPubKeyHex.substring(2, 488).toUint8ListFromHex,
|
|
|
|
);
|
2024-06-10 19:28:49 +00:00
|
|
|
final coin = sparkCoinsInvolvedReceived
|
2023-12-29 15:26:32 +00:00
|
|
|
.where((e) => e.serializedCoinB64!.startsWith(serCoin))
|
|
|
|
.firstOrNull;
|
|
|
|
|
|
|
|
if (coin == null) {
|
|
|
|
// not ours
|
|
|
|
} else {
|
|
|
|
output = output.copyWith(
|
|
|
|
walletOwns: true,
|
|
|
|
valueStringSats: coin.value.toString(),
|
|
|
|
addresses: [
|
|
|
|
coin.address,
|
|
|
|
],
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else if (isMint || isJMint) {
|
|
|
|
// do nothing extra ?
|
|
|
|
} else {
|
|
|
|
// TODO?
|
|
|
|
}
|
|
|
|
|
2023-12-14 16:44:41 +00:00
|
|
|
// if output was to my wallet, add value to amount received
|
|
|
|
if (receivingAddresses
|
|
|
|
.intersection(output.addresses.toSet())
|
|
|
|
.isNotEmpty) {
|
|
|
|
wasReceivedInThisWallet = true;
|
|
|
|
amountReceivedInThisWallet += output.value;
|
|
|
|
output = output.copyWith(walletOwns: true);
|
|
|
|
} else if (changeAddresses
|
|
|
|
.intersection(output.addresses.toSet())
|
|
|
|
.isNotEmpty) {
|
|
|
|
wasReceivedInThisWallet = true;
|
|
|
|
changeAmountReceivedInThisWallet += output.value;
|
|
|
|
output = output.copyWith(walletOwns: true);
|
2023-12-29 15:26:32 +00:00
|
|
|
} else if (isSparkMint && isMySpark) {
|
|
|
|
wasReceivedInThisWallet = true;
|
|
|
|
if (output.addresses.contains(sparkChangeAddress)) {
|
|
|
|
changeAmountReceivedInThisWallet += output.value;
|
|
|
|
} else {
|
|
|
|
amountReceivedInThisWallet += output.value;
|
|
|
|
}
|
2023-12-14 16:44:41 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
outputs.add(output);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (isJMint || isSparkSpend) {
|
|
|
|
anonFees = Amount(
|
|
|
|
rawValue: BigInt.zero,
|
|
|
|
fractionDigits: cryptoCurrency.fractionDigits,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
// parse inputs
|
|
|
|
final List<InputV2> inputs = [];
|
|
|
|
for (final jsonInput in txData["vin"] as List) {
|
|
|
|
final map = Map<String, dynamic>.from(jsonInput as Map);
|
|
|
|
|
|
|
|
final List<String> addresses = [];
|
|
|
|
String valueStringSats = "0";
|
|
|
|
OutpointV2? outpoint;
|
|
|
|
|
|
|
|
final coinbase = map["coinbase"] as String?;
|
|
|
|
|
|
|
|
final txid = map["txid"] as String?;
|
|
|
|
final vout = map["vout"] as int?;
|
|
|
|
if (txid != null && vout != null) {
|
|
|
|
outpoint = OutpointV2.isarCantDoRequiredInDefaultConstructor(
|
|
|
|
txid: txid,
|
|
|
|
vout: vout,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2024-06-10 19:28:49 +00:00
|
|
|
void parseAnonFees() {
|
2023-12-14 16:44:41 +00:00
|
|
|
// anon fees
|
|
|
|
final nFee = Decimal.tryParse(map["nFees"].toString());
|
|
|
|
if (nFee != null) {
|
|
|
|
final fees = Amount.fromDecimal(
|
|
|
|
nFee,
|
|
|
|
fractionDigits: cryptoCurrency.fractionDigits,
|
|
|
|
);
|
|
|
|
|
|
|
|
anonFees = anonFees! + fees;
|
|
|
|
}
|
2024-06-10 19:28:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
List<SparkCoin>? spentSparkCoins;
|
|
|
|
|
|
|
|
if (isMySpentSpark) {
|
|
|
|
parseAnonFees();
|
|
|
|
final tags = await FiroCacheCoordinator.getUsedCoinTagsFor(
|
|
|
|
txid: txData["txid"] as String,
|
|
|
|
);
|
|
|
|
spentSparkCoins = sparkCoinsInvolvedSpent
|
|
|
|
.where(
|
|
|
|
(e) => tags.contains(e.lTagHash),
|
|
|
|
)
|
|
|
|
.toList();
|
|
|
|
} else if (isSparkSpend) {
|
|
|
|
parseAnonFees();
|
2023-12-14 16:44:41 +00:00
|
|
|
} else if (isSparkMint) {
|
|
|
|
final address = map["address"] as String?;
|
|
|
|
final value = map["valueSat"] as int?;
|
|
|
|
|
|
|
|
if (address != null && value != null) {
|
|
|
|
valueStringSats = value.toString();
|
|
|
|
addresses.add(address);
|
|
|
|
}
|
|
|
|
} else if (isMint) {
|
|
|
|
// We should be able to assume this belongs to this wallet
|
|
|
|
final address = map["address"] as String?;
|
|
|
|
final value = map["valueSat"] as int?;
|
|
|
|
|
|
|
|
if (address != null && value != null) {
|
|
|
|
valueStringSats = value.toString();
|
|
|
|
addresses.add(address);
|
|
|
|
}
|
|
|
|
} else if (isJMint) {
|
|
|
|
// anon fees
|
|
|
|
final nFee = Decimal.tryParse(map["nFees"].toString());
|
|
|
|
if (nFee != null) {
|
|
|
|
final fees = Amount.fromDecimal(
|
|
|
|
nFee,
|
|
|
|
fractionDigits: cryptoCurrency.fractionDigits,
|
|
|
|
);
|
|
|
|
|
|
|
|
anonFees = anonFees! + fees;
|
|
|
|
}
|
|
|
|
} else if (coinbase == null && txid != null && vout != null) {
|
|
|
|
final inputTx = await electrumXCachedClient.getTransaction(
|
|
|
|
txHash: txid,
|
2024-05-15 21:20:45 +00:00
|
|
|
cryptoCurrency: cryptoCurrency,
|
2023-12-14 16:44:41 +00:00
|
|
|
);
|
|
|
|
|
|
|
|
final prevOutJson = Map<String, dynamic>.from(
|
2024-05-27 23:56:22 +00:00
|
|
|
(inputTx["vout"] as List).firstWhere((e) => e["n"] == vout) as Map,
|
|
|
|
);
|
2023-12-14 16:44:41 +00:00
|
|
|
|
|
|
|
final prevOut = OutputV2.fromElectrumXJson(
|
|
|
|
prevOutJson,
|
|
|
|
decimalPlaces: cryptoCurrency.fractionDigits,
|
2023-12-15 19:30:51 +00:00
|
|
|
isFullAmountNotSats: true,
|
2023-12-14 16:44:41 +00:00
|
|
|
walletOwns: false, // doesn't matter here as this is not saved
|
|
|
|
);
|
|
|
|
|
|
|
|
valueStringSats = prevOut.valueStringSats;
|
|
|
|
addresses.addAll(prevOut.addresses);
|
|
|
|
} else if (coinbase == null) {
|
|
|
|
Util.printJson(map, "NON TXID INPUT");
|
|
|
|
}
|
|
|
|
|
|
|
|
InputV2 input = InputV2.isarCantDoRequiredInDefaultConstructor(
|
|
|
|
scriptSigHex: map["scriptSig"]?["hex"] as String?,
|
2024-01-14 17:26:04 +00:00
|
|
|
scriptSigAsm: map["scriptSig"]?["asm"] as String?,
|
2023-12-14 16:44:41 +00:00
|
|
|
sequence: map["sequence"] as int?,
|
|
|
|
outpoint: outpoint,
|
|
|
|
valueStringSats: valueStringSats,
|
|
|
|
addresses: addresses,
|
|
|
|
witness: map["witness"] as String?,
|
|
|
|
coinbase: coinbase,
|
|
|
|
innerRedeemScriptAsm: map["innerRedeemscriptAsm"] as String?,
|
|
|
|
// don't know yet if wallet owns. Need addresses first
|
|
|
|
walletOwns: false,
|
|
|
|
);
|
|
|
|
|
|
|
|
if (allAddressesSet.intersection(input.addresses.toSet()).isNotEmpty) {
|
|
|
|
wasSentFromThisWallet = true;
|
|
|
|
input = input.copyWith(walletOwns: true);
|
2023-12-29 15:26:32 +00:00
|
|
|
} else if (isMySpark) {
|
|
|
|
final lTags = map["lTags"] as List?;
|
|
|
|
|
|
|
|
if (lTags?.isNotEmpty == true) {
|
|
|
|
final List<SparkCoin> usedCoins = [];
|
|
|
|
for (final tag in lTags!) {
|
2024-06-05 22:23:58 +00:00
|
|
|
final hash = await hashTag(tag as String);
|
2023-12-29 15:26:32 +00:00
|
|
|
usedCoins.addAll(sparkCoins.where((e) => e.lTagHash == hash));
|
|
|
|
}
|
|
|
|
|
|
|
|
if (usedCoins.isNotEmpty) {
|
|
|
|
input = input.copyWith(
|
|
|
|
addresses: usedCoins.map((e) => e.address).toList(),
|
|
|
|
valueStringSats: usedCoins
|
|
|
|
.map((e) => e.value)
|
|
|
|
.reduce((value, element) => value += element)
|
|
|
|
.toString(),
|
|
|
|
walletOwns: true,
|
|
|
|
);
|
|
|
|
wasSentFromThisWallet = true;
|
|
|
|
}
|
|
|
|
}
|
2024-06-10 19:28:49 +00:00
|
|
|
} else if (isMySpentSpark &&
|
|
|
|
spentSparkCoins != null &&
|
|
|
|
spentSparkCoins.isNotEmpty) {
|
|
|
|
input = input.copyWith(
|
|
|
|
addresses: spentSparkCoins.map((e) => e.address).toList(),
|
|
|
|
valueStringSats: spentSparkCoins
|
|
|
|
.map((e) => e.value)
|
|
|
|
.fold(BigInt.zero, (p, e) => p + e)
|
|
|
|
.toString(),
|
|
|
|
walletOwns: true,
|
|
|
|
);
|
|
|
|
wasSentFromThisWallet = true;
|
2023-12-14 16:44:41 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
inputs.add(input);
|
|
|
|
}
|
|
|
|
|
2024-01-05 21:38:01 +00:00
|
|
|
final totalSpentFromWallet = inputs
|
|
|
|
.where((e) => e.walletOwns)
|
|
|
|
.map((e) => e.value)
|
|
|
|
.fold(BigInt.zero, (value, element) => value + element);
|
|
|
|
|
|
|
|
final totalReceivedInWallet = outputs
|
|
|
|
.where((e) => e.walletOwns)
|
|
|
|
.map((e) => e.value)
|
|
|
|
.fold(BigInt.zero, (value, element) => value + element);
|
|
|
|
|
2023-12-14 16:44:41 +00:00
|
|
|
final totalOut = outputs
|
|
|
|
.map((e) => e.value)
|
|
|
|
.fold(BigInt.zero, (value, element) => value + element);
|
|
|
|
|
|
|
|
TransactionType type;
|
|
|
|
TransactionSubType subType = TransactionSubType.none;
|
|
|
|
|
2024-01-05 14:41:22 +00:00
|
|
|
// TODO integrate the following with the next bit (maybe)
|
2023-12-14 16:44:41 +00:00
|
|
|
if (isSparkSpend) {
|
|
|
|
subType = TransactionSubType.sparkSpend;
|
|
|
|
} else if (isSparkMint) {
|
|
|
|
subType = TransactionSubType.sparkMint;
|
|
|
|
} else if (isMint) {
|
|
|
|
subType = TransactionSubType.mint;
|
|
|
|
} else if (isJMint) {
|
|
|
|
subType = TransactionSubType.join;
|
|
|
|
}
|
|
|
|
|
|
|
|
// at least one input was owned by this wallet
|
|
|
|
if (wasSentFromThisWallet) {
|
|
|
|
type = TransactionType.outgoing;
|
|
|
|
|
|
|
|
if (wasReceivedInThisWallet) {
|
2024-01-05 21:38:01 +00:00
|
|
|
if (isSparkSpend) {
|
|
|
|
if (totalSpentFromWallet -
|
|
|
|
(totalReceivedInWallet + anonFees!.raw) ==
|
|
|
|
BigInt.zero) {
|
|
|
|
// definitely sent all to self
|
|
|
|
type = TransactionType.sentToSelf;
|
|
|
|
}
|
|
|
|
} else if (changeAmountReceivedInThisWallet +
|
|
|
|
amountReceivedInThisWallet ==
|
2023-12-14 16:44:41 +00:00
|
|
|
totalOut) {
|
|
|
|
// definitely sent all to self
|
|
|
|
type = TransactionType.sentToSelf;
|
|
|
|
} else if (amountReceivedInThisWallet == BigInt.zero) {
|
|
|
|
// most likely just a typical send
|
|
|
|
// do nothing here yet
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else if (wasReceivedInThisWallet) {
|
|
|
|
// only found outputs owned by this wallet
|
|
|
|
type = TransactionType.incoming;
|
|
|
|
} else {
|
|
|
|
Logging.instance.log(
|
|
|
|
"Unexpected tx found (ignoring it): $txData",
|
|
|
|
level: LogLevel.Error,
|
|
|
|
);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
String? otherData;
|
|
|
|
if (anonFees != null) {
|
|
|
|
otherData = jsonEncode(
|
|
|
|
{
|
2024-06-10 19:28:49 +00:00
|
|
|
"overrideFee": anonFees!.toJsonString(),
|
2023-12-14 16:44:41 +00:00
|
|
|
},
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
final tx = TransactionV2(
|
|
|
|
walletId: walletId,
|
|
|
|
blockHash: txData["blockhash"] as String?,
|
|
|
|
hash: txData["hash"] as String,
|
|
|
|
txid: txData["txid"] as String,
|
|
|
|
height: txData["height"] as int?,
|
|
|
|
version: txData["version"] as int,
|
|
|
|
timestamp: txData["blocktime"] as int? ??
|
|
|
|
DateTime.timestamp().millisecondsSinceEpoch ~/ 1000,
|
|
|
|
inputs: List.unmodifiable(inputs),
|
|
|
|
outputs: List.unmodifiable(outputs),
|
|
|
|
type: type,
|
|
|
|
subType: subType,
|
|
|
|
otherData: otherData,
|
|
|
|
);
|
|
|
|
|
2024-01-05 18:59:01 +00:00
|
|
|
if (_unconfirmedTxids.contains(tx.txid)) {
|
|
|
|
if (tx.isConfirmed(await chainHeight, cryptoCurrency.minConfirms)) {
|
|
|
|
txns.add(tx);
|
|
|
|
_unconfirmedTxids.removeWhere((e) => e == tx.txid);
|
|
|
|
} else {
|
|
|
|
// don't update in db until confirmed
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
txns.add(tx);
|
|
|
|
}
|
2023-12-14 16:44:41 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
await mainDB.updateOrPutTransactionV2s(txns);
|
2023-11-20 15:15:36 +00:00
|
|
|
}
|
|
|
|
|
2023-11-16 21:30:01 +00:00
|
|
|
@override
|
2024-05-07 17:11:16 +00:00
|
|
|
Future<
|
|
|
|
({
|
|
|
|
String? blockedReason,
|
|
|
|
bool blocked,
|
|
|
|
String? utxoLabel,
|
|
|
|
})> checkBlockUTXO(
|
2023-11-16 21:30:01 +00:00
|
|
|
Map<String, dynamic> jsonUTXO,
|
|
|
|
String? scriptPubKeyHex,
|
|
|
|
Map<String, dynamic>? jsonTX,
|
2024-01-05 00:37:46 +00:00
|
|
|
String? utxoOwnerAddress,
|
|
|
|
) async {
|
2024-05-07 17:11:16 +00:00
|
|
|
bool blocked = false;
|
2023-11-20 15:15:36 +00:00
|
|
|
String? blockedReason;
|
2024-05-07 17:11:16 +00:00
|
|
|
String? label;
|
|
|
|
|
|
|
|
if (jsonUTXO["value"] is int) {
|
2024-06-07 14:45:56 +00:00
|
|
|
// TODO: [prio=high] use special electrumx call to verify the 1000 Firo output is masternode
|
|
|
|
// electrumx call should exist now. Unsure if it works though
|
2024-05-07 17:11:16 +00:00
|
|
|
blocked = Amount.fromDecimal(
|
|
|
|
Decimal.fromInt(
|
|
|
|
1000, // 1000 firo output is a possible master node
|
|
|
|
),
|
|
|
|
fractionDigits: cryptoCurrency.fractionDigits,
|
|
|
|
).raw ==
|
|
|
|
BigInt.from(jsonUTXO["value"] as int);
|
|
|
|
|
|
|
|
if (blocked) {
|
2024-06-25 17:59:55 +00:00
|
|
|
blocked = await electrumXClient.isMasterNodeCollateral(
|
|
|
|
txid: jsonTX!["txid"] as String,
|
|
|
|
index: jsonUTXO["tx_pos"] as int,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (blocked) {
|
|
|
|
blockedReason = "Masternode collateral. "
|
2024-05-07 17:11:16 +00:00
|
|
|
"Unlock and spend at your own risk.";
|
2024-06-25 17:59:55 +00:00
|
|
|
label = "Masternode collateral";
|
2024-05-07 17:11:16 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return (blockedReason: blockedReason, blocked: blocked, utxoLabel: label);
|
2023-11-20 15:15:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
@override
|
|
|
|
Future<void> recover({required bool isRescan}) async {
|
2024-05-30 21:10:56 +00:00
|
|
|
// reset last checked values
|
2024-05-31 04:06:48 +00:00
|
|
|
await info.updateOtherData(
|
|
|
|
newEntries: {
|
|
|
|
WalletInfoKeys.firoSparkCacheSetTimestampCache: <String, int>{},
|
|
|
|
},
|
|
|
|
isar: mainDB.isar,
|
|
|
|
);
|
2024-05-30 21:10:56 +00:00
|
|
|
|
2024-05-29 19:29:45 +00:00
|
|
|
final start = DateTime.now();
|
2023-11-20 15:15:36 +00:00
|
|
|
final root = await getRootHDNode();
|
|
|
|
|
|
|
|
final List<Future<({int index, List<Address> addresses})>> receiveFutures =
|
|
|
|
[];
|
|
|
|
final List<Future<({int index, List<Address> addresses})>> changeFutures =
|
|
|
|
[];
|
|
|
|
|
|
|
|
const receiveChain = 0;
|
|
|
|
const changeChain = 1;
|
|
|
|
|
|
|
|
const txCountBatchSize = 12;
|
|
|
|
|
|
|
|
try {
|
|
|
|
await refreshMutex.protect(() async {
|
|
|
|
if (isRescan) {
|
|
|
|
// clear cache
|
|
|
|
await electrumXCachedClient.clearSharedTransactionCache(
|
2024-05-27 23:56:22 +00:00
|
|
|
cryptoCurrency: info.coin,
|
|
|
|
);
|
2023-11-20 15:15:36 +00:00
|
|
|
// clear blockchain info
|
|
|
|
await mainDB.deleteWalletBlockchainData(walletId);
|
|
|
|
}
|
|
|
|
|
2023-12-18 20:05:22 +00:00
|
|
|
// lelantus
|
2024-05-29 22:45:42 +00:00
|
|
|
int? latestSetId;
|
2024-05-31 04:05:47 +00:00
|
|
|
final List<Future<dynamic>> lelantusFutures = [];
|
|
|
|
final enableLelantusScanning =
|
|
|
|
info.otherData[WalletInfoKeys.enableLelantusScanning] as bool? ??
|
|
|
|
false;
|
2024-05-29 22:45:42 +00:00
|
|
|
if (enableLelantusScanning) {
|
|
|
|
latestSetId = await electrumXClient.getLelantusLatestCoinId();
|
2024-05-31 04:05:47 +00:00
|
|
|
lelantusFutures.add(
|
2023-11-20 15:15:36 +00:00
|
|
|
electrumXCachedClient.getUsedCoinSerials(
|
2024-05-31 04:05:47 +00:00
|
|
|
cryptoCurrency: info.coin,
|
|
|
|
),
|
2024-05-29 22:45:42 +00:00
|
|
|
);
|
2024-05-31 04:05:47 +00:00
|
|
|
lelantusFutures.add(getSetDataMap(latestSetId));
|
2024-05-29 22:45:42 +00:00
|
|
|
}
|
2023-11-20 15:15:36 +00:00
|
|
|
|
2023-12-18 20:05:22 +00:00
|
|
|
// spark
|
|
|
|
final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId();
|
2024-05-29 19:29:45 +00:00
|
|
|
final List<Future<void>> sparkAnonSetFutures = [];
|
|
|
|
for (int i = 1; i <= latestSparkCoinId; i++) {
|
|
|
|
sparkAnonSetFutures.add(
|
|
|
|
FiroCacheCoordinator.runFetchAndUpdateSparkAnonSetCacheForGroupId(
|
|
|
|
i,
|
|
|
|
electrumXClient,
|
|
|
|
),
|
|
|
|
);
|
|
|
|
}
|
2023-12-18 20:05:22 +00:00
|
|
|
final sparkUsedCoinTagsFuture =
|
2024-05-30 21:10:56 +00:00
|
|
|
FiroCacheCoordinator.runFetchAndUpdateSparkUsedCoinTags(
|
|
|
|
electrumXClient,
|
2023-12-18 20:05:22 +00:00
|
|
|
);
|
|
|
|
|
2023-11-20 15:15:36 +00:00
|
|
|
// receiving addresses
|
|
|
|
Logging.instance.log(
|
|
|
|
"checking receiving addresses...",
|
|
|
|
level: LogLevel.Info,
|
|
|
|
);
|
|
|
|
|
2024-05-07 00:25:31 +00:00
|
|
|
final canBatch = await serverCanBatch;
|
|
|
|
|
2023-11-20 15:15:36 +00:00
|
|
|
for (final type in cryptoCurrency.supportedDerivationPathTypes) {
|
|
|
|
receiveFutures.add(
|
2024-05-07 00:25:31 +00:00
|
|
|
canBatch
|
2023-11-20 15:15:36 +00:00
|
|
|
? checkGapsBatched(
|
|
|
|
txCountBatchSize,
|
|
|
|
root,
|
|
|
|
type,
|
|
|
|
receiveChain,
|
|
|
|
)
|
|
|
|
: checkGapsLinearly(
|
|
|
|
root,
|
|
|
|
type,
|
|
|
|
receiveChain,
|
|
|
|
),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
// change addresses
|
|
|
|
Logging.instance.log(
|
|
|
|
"checking change addresses...",
|
|
|
|
level: LogLevel.Info,
|
|
|
|
);
|
|
|
|
for (final type in cryptoCurrency.supportedDerivationPathTypes) {
|
|
|
|
changeFutures.add(
|
2024-05-07 00:25:31 +00:00
|
|
|
canBatch
|
2023-11-20 15:15:36 +00:00
|
|
|
? checkGapsBatched(
|
|
|
|
txCountBatchSize,
|
|
|
|
root,
|
|
|
|
type,
|
|
|
|
changeChain,
|
|
|
|
)
|
|
|
|
: checkGapsLinearly(
|
|
|
|
root,
|
|
|
|
type,
|
|
|
|
changeChain,
|
|
|
|
),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
// io limitations may require running these linearly instead
|
|
|
|
final futuresResult = await Future.wait([
|
|
|
|
Future.wait(receiveFutures),
|
|
|
|
Future.wait(changeFutures),
|
|
|
|
]);
|
|
|
|
|
|
|
|
final receiveResults = futuresResult[0];
|
|
|
|
final changeResults = futuresResult[1];
|
|
|
|
|
|
|
|
final List<Address> addressesToStore = [];
|
|
|
|
|
|
|
|
int highestReceivingIndexWithHistory = 0;
|
|
|
|
|
|
|
|
for (final tuple in receiveResults) {
|
|
|
|
if (tuple.addresses.isEmpty) {
|
|
|
|
await checkReceivingAddressForTransactions();
|
|
|
|
} else {
|
|
|
|
highestReceivingIndexWithHistory = max(
|
|
|
|
tuple.index,
|
|
|
|
highestReceivingIndexWithHistory,
|
|
|
|
);
|
|
|
|
addressesToStore.addAll(tuple.addresses);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
int highestChangeIndexWithHistory = 0;
|
|
|
|
// If restoring a wallet that never sent any funds with change, then set changeArray
|
|
|
|
// manually. If we didn't do this, it'd store an empty array.
|
|
|
|
for (final tuple in changeResults) {
|
|
|
|
if (tuple.addresses.isEmpty) {
|
|
|
|
await checkChangeAddressForTransactions();
|
|
|
|
} else {
|
|
|
|
highestChangeIndexWithHistory = max(
|
|
|
|
tuple.index,
|
|
|
|
highestChangeIndexWithHistory,
|
|
|
|
);
|
|
|
|
addressesToStore.addAll(tuple.addresses);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// remove extra addresses to help minimize risk of creating a large gap
|
2024-05-27 23:56:22 +00:00
|
|
|
addressesToStore.removeWhere(
|
|
|
|
(e) =>
|
|
|
|
e.subType == AddressSubType.change &&
|
|
|
|
e.derivationIndex > highestChangeIndexWithHistory,
|
|
|
|
);
|
|
|
|
addressesToStore.removeWhere(
|
|
|
|
(e) =>
|
|
|
|
e.subType == AddressSubType.receiving &&
|
|
|
|
e.derivationIndex > highestReceivingIndexWithHistory,
|
|
|
|
);
|
2023-11-20 15:15:36 +00:00
|
|
|
|
|
|
|
await mainDB.updateOrPutAddresses(addressesToStore);
|
|
|
|
|
|
|
|
await Future.wait([
|
|
|
|
updateTransactions(),
|
|
|
|
updateUTXOs(),
|
|
|
|
]);
|
|
|
|
|
2024-05-31 04:05:47 +00:00
|
|
|
final List<Future<dynamic>> futures = [];
|
2024-05-29 22:45:42 +00:00
|
|
|
if (enableLelantusScanning) {
|
2024-05-31 04:05:47 +00:00
|
|
|
futures.add(lelantusFutures[0]);
|
|
|
|
futures.add(lelantusFutures[1]);
|
2024-05-29 22:45:42 +00:00
|
|
|
}
|
2024-05-31 04:05:47 +00:00
|
|
|
futures.add(sparkUsedCoinTagsFuture);
|
|
|
|
futures.addAll(sparkAnonSetFutures);
|
2024-05-29 22:45:42 +00:00
|
|
|
|
|
|
|
final futureResults = await Future.wait(futures);
|
2023-11-20 15:15:36 +00:00
|
|
|
|
2023-12-18 20:05:22 +00:00
|
|
|
// lelantus
|
2024-05-29 22:45:42 +00:00
|
|
|
Set<String>? usedSerialsSet;
|
|
|
|
Map<dynamic, dynamic>? setDataMap;
|
|
|
|
if (enableLelantusScanning) {
|
2024-05-31 04:05:47 +00:00
|
|
|
usedSerialsSet = (futureResults[0] as List<String>).toSet();
|
|
|
|
setDataMap = futureResults[1] as Map<dynamic, dynamic>;
|
2024-05-29 22:45:42 +00:00
|
|
|
}
|
2023-11-27 17:50:35 +00:00
|
|
|
|
2024-01-17 14:33:23 +00:00
|
|
|
if (Util.isDesktop) {
|
|
|
|
await Future.wait([
|
2024-05-31 04:05:47 +00:00
|
|
|
if (enableLelantusScanning)
|
|
|
|
recoverLelantusWallet(
|
2024-05-31 04:06:48 +00:00
|
|
|
latestSetId: latestSetId!,
|
|
|
|
usedSerialNumbers: usedSerialsSet!,
|
|
|
|
setDataMap: setDataMap!,
|
|
|
|
),
|
2024-01-17 14:33:23 +00:00
|
|
|
recoverSparkWallet(
|
2024-05-29 19:29:45 +00:00
|
|
|
latestSparkCoinId: latestSparkCoinId,
|
2024-01-17 14:33:23 +00:00
|
|
|
),
|
|
|
|
]);
|
|
|
|
} else {
|
2024-05-29 22:45:42 +00:00
|
|
|
if (enableLelantusScanning) {
|
|
|
|
await recoverLelantusWallet(
|
|
|
|
latestSetId: latestSetId!,
|
|
|
|
usedSerialNumbers: usedSerialsSet!,
|
|
|
|
setDataMap: setDataMap!,
|
|
|
|
);
|
|
|
|
}
|
2024-01-17 14:33:23 +00:00
|
|
|
await recoverSparkWallet(
|
2024-05-29 19:29:45 +00:00
|
|
|
latestSparkCoinId: latestSparkCoinId,
|
2024-01-17 14:33:23 +00:00
|
|
|
);
|
|
|
|
}
|
2023-11-20 15:15:36 +00:00
|
|
|
});
|
2023-11-27 17:50:35 +00:00
|
|
|
|
2024-01-17 14:33:23 +00:00
|
|
|
unawaited(refresh());
|
2024-05-29 19:29:45 +00:00
|
|
|
Logging.instance.log(
|
|
|
|
"Firo recover for "
|
|
|
|
"${info.name}: ${DateTime.now().difference(start)}",
|
|
|
|
level: LogLevel.Info,
|
|
|
|
);
|
2023-11-20 15:15:36 +00:00
|
|
|
} catch (e, s) {
|
|
|
|
Logging.instance.log(
|
2024-05-27 23:56:22 +00:00
|
|
|
"Exception rethrown from electrumx_mixin recover(): $e\n$s",
|
|
|
|
level: LogLevel.Info,
|
|
|
|
);
|
2023-11-20 15:15:36 +00:00
|
|
|
|
|
|
|
rethrow;
|
|
|
|
}
|
2023-11-16 21:30:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
@override
|
|
|
|
Amount roughFeeEstimate(int inputCount, int outputCount, int feeRatePerKB) {
|
|
|
|
return Amount(
|
2024-05-27 23:56:22 +00:00
|
|
|
rawValue: BigInt.from(
|
|
|
|
((181 * inputCount) + (34 * outputCount) + 10) *
|
|
|
|
(feeRatePerKB / 1000).ceil(),
|
|
|
|
),
|
2023-11-16 21:30:01 +00:00
|
|
|
fractionDigits: cryptoCurrency.fractionDigits,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
@override
|
|
|
|
int estimateTxFee({required int vSize, required int feeRatePerKB}) {
|
|
|
|
return vSize * (feeRatePerKB / 1000).ceil();
|
|
|
|
}
|
|
|
|
|
|
|
|
// ===========================================================================
|
2023-11-20 15:15:36 +00:00
|
|
|
|
|
|
|
bool get lelantusCoinIsarRescanRequired =>
|
2024-01-15 15:42:49 +00:00
|
|
|
info.otherData[WalletInfoKeys.lelantusCoinIsarRescanRequired] as bool? ??
|
2023-11-20 15:15:36 +00:00
|
|
|
true;
|
|
|
|
|
|
|
|
Future<bool> firoRescanRecovery() async {
|
|
|
|
try {
|
|
|
|
await recover(isRescan: true);
|
2024-01-15 15:42:49 +00:00
|
|
|
await info.updateOtherData(
|
|
|
|
newEntries: {WalletInfoKeys.lelantusCoinIsarRescanRequired: false},
|
|
|
|
isar: mainDB.isar,
|
|
|
|
);
|
2023-11-20 15:15:36 +00:00
|
|
|
return true;
|
|
|
|
} catch (_) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
2023-11-16 21:30:01 +00:00
|
|
|
}
|