mirror of
https://github.com/cypherstack/stack_wallet.git
synced 2025-01-03 17:29:23 +00:00
implement usage of scanning for zero change output spark spends
This commit is contained in:
parent
aa195cb98e
commit
929e334063
4 changed files with 151 additions and 7 deletions
|
@ -1,5 +1,7 @@
|
|||
part of 'firo_cache.dart';
|
||||
|
||||
typedef LTagPair = ({String tag, String txid});
|
||||
|
||||
/// Wrapper class for [_FiroCache] as [_FiroCache] should eventually be handled in a
|
||||
/// background isolate and [FiroCacheCoordinator] should manage that isolate
|
||||
abstract class FiroCacheCoordinator {
|
||||
|
@ -107,6 +109,40 @@ abstract class FiroCacheCoordinator {
|
|||
return result.first["count"] as int? ?? 0;
|
||||
}
|
||||
|
||||
static Future<List<LTagPair>> getUsedCoinTxidsFor({
|
||||
required List<String> tags,
|
||||
}) async {
|
||||
if (tags.isEmpty) {
|
||||
return [];
|
||||
}
|
||||
final result = await _Reader._getUsedCoinTxidsFor(
|
||||
tags,
|
||||
db: _FiroCache.usedTagsCacheDB,
|
||||
);
|
||||
|
||||
if (result.isEmpty) {
|
||||
return [];
|
||||
}
|
||||
return result.rows
|
||||
.map(
|
||||
(e) => (
|
||||
tag: e[0] as String,
|
||||
txid: e[1] as String,
|
||||
),
|
||||
)
|
||||
.toList();
|
||||
}
|
||||
|
||||
static Future<Set<String>> getUsedCoinTagsFor({
|
||||
required String txid,
|
||||
}) async {
|
||||
final result = await _Reader._getUsedCoinTagsFor(
|
||||
txid,
|
||||
db: _FiroCache.usedTagsCacheDB,
|
||||
);
|
||||
return result.map((e) => e["tag"] as String).toSet();
|
||||
}
|
||||
|
||||
static Future<bool> checkTagIsUsed(
|
||||
String tag,
|
||||
) async {
|
||||
|
|
|
@ -86,6 +86,35 @@ abstract class _Reader {
|
|||
return db.select("$query;");
|
||||
}
|
||||
|
||||
static Future<ResultSet> _getUsedCoinTxidsFor(
|
||||
List<String> tags, {
|
||||
required Database db,
|
||||
}) async {
|
||||
final tagsConcat = tags.join("', '");
|
||||
|
||||
final query = """
|
||||
SELECT tag, GROUP_CONCAT(txid) AS txids
|
||||
FROM SparkUsedCoinTags
|
||||
WHERE tag IN ('$tagsConcat')
|
||||
GROUP BY tag;
|
||||
""";
|
||||
|
||||
return db.select("$query;");
|
||||
}
|
||||
|
||||
static Future<ResultSet> _getUsedCoinTagsFor(
|
||||
String txid, {
|
||||
required Database db,
|
||||
}) async {
|
||||
final query = """
|
||||
SELECT tag
|
||||
FROM SparkUsedCoinTags
|
||||
WHERE txid = '$txid';
|
||||
""";
|
||||
|
||||
return db.select("$query;");
|
||||
}
|
||||
|
||||
static Future<bool> _checkTagIsUsed(
|
||||
String tag, {
|
||||
required Database db,
|
||||
|
|
|
@ -105,6 +105,13 @@ class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
|
|||
}
|
||||
}
|
||||
|
||||
final missing = await getMissingSparkSpendTransactionIds();
|
||||
for (final txid in missing.map((e) => e.txid).toSet()) {
|
||||
allTxHashes.add({
|
||||
"tx_hash": txid,
|
||||
});
|
||||
}
|
||||
|
||||
final List<Map<String, dynamic>> allTransactions = [];
|
||||
|
||||
// some lelantus transactions aren't fetched via wallet addresses so they
|
||||
|
@ -187,12 +194,30 @@ class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
|
|||
final bool isMasterNodePayment = false;
|
||||
final bool isSparkSpend = txData["type"] == 9 && txData["version"] == 3;
|
||||
final bool isMySpark = sparkTxids.contains(txData["txid"] as String);
|
||||
final bool isMySpentSpark =
|
||||
missing.where((e) => e.txid == txData["txid"]).isNotEmpty;
|
||||
|
||||
final sparkCoinsInvolved =
|
||||
sparkCoins.where((e) => e.txHash == txData["txid"]);
|
||||
if (isMySpark && sparkCoinsInvolved.isEmpty) {
|
||||
final sparkCoinsInvolvedReceived = sparkCoins.where(
|
||||
(e) =>
|
||||
e.txHash == txData["txid"] ||
|
||||
missing.where((f) => e.lTagHash == f.tag).isNotEmpty,
|
||||
);
|
||||
|
||||
final sparkCoinsInvolvedSpent = sparkCoins.where(
|
||||
(e) => missing.where((f) => e.lTagHash == f.tag).isNotEmpty,
|
||||
);
|
||||
|
||||
if (isMySpark && sparkCoinsInvolvedReceived.isEmpty && !isMySpentSpark) {
|
||||
Logging.instance.log(
|
||||
"sparkCoinsInvolved is empty and should not be! (ignoring tx parsing)",
|
||||
"sparkCoinsInvolvedReceived is empty and should not be! (ignoring tx parsing)",
|
||||
level: LogLevel.Error,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isMySpentSpark && sparkCoinsInvolvedSpent.isEmpty && !isMySpark) {
|
||||
Logging.instance.log(
|
||||
"sparkCoinsInvolvedSpent is empty and should not be! (ignoring tx parsing)",
|
||||
level: LogLevel.Error,
|
||||
);
|
||||
continue;
|
||||
|
@ -267,7 +292,7 @@ class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
|
|||
final serCoin = base64Encode(
|
||||
output.scriptPubKeyHex.substring(2, 488).toUint8ListFromHex,
|
||||
);
|
||||
final coin = sparkCoinsInvolved
|
||||
final coin = sparkCoinsInvolvedReceived
|
||||
.where((e) => e.serializedCoinB64!.startsWith(serCoin))
|
||||
.firstOrNull;
|
||||
|
||||
|
@ -343,7 +368,7 @@ class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
|
|||
);
|
||||
}
|
||||
|
||||
if (isSparkSpend) {
|
||||
void parseAnonFees() {
|
||||
// anon fees
|
||||
final nFee = Decimal.tryParse(map["nFees"].toString());
|
||||
if (nFee != null) {
|
||||
|
@ -354,6 +379,22 @@ class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
|
|||
|
||||
anonFees = anonFees! + fees;
|
||||
}
|
||||
}
|
||||
|
||||
List<SparkCoin>? spentSparkCoins;
|
||||
|
||||
if (isMySpentSpark) {
|
||||
parseAnonFees();
|
||||
final tags = await FiroCacheCoordinator.getUsedCoinTagsFor(
|
||||
txid: txData["txid"] as String,
|
||||
);
|
||||
spentSparkCoins = sparkCoinsInvolvedSpent
|
||||
.where(
|
||||
(e) => tags.contains(e.lTagHash),
|
||||
)
|
||||
.toList();
|
||||
} else if (isSparkSpend) {
|
||||
parseAnonFees();
|
||||
} else if (isSparkMint) {
|
||||
final address = map["address"] as String?;
|
||||
final value = map["valueSat"] as int?;
|
||||
|
@ -444,6 +485,18 @@ class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
|
|||
wasSentFromThisWallet = true;
|
||||
}
|
||||
}
|
||||
} else if (isMySpentSpark &&
|
||||
spentSparkCoins != null &&
|
||||
spentSparkCoins.isNotEmpty) {
|
||||
input = input.copyWith(
|
||||
addresses: spentSparkCoins.map((e) => e.address).toList(),
|
||||
valueStringSats: spentSparkCoins
|
||||
.map((e) => e.value)
|
||||
.fold(BigInt.zero, (p, e) => p + e)
|
||||
.toString(),
|
||||
walletOwns: true,
|
||||
);
|
||||
wasSentFromThisWallet = true;
|
||||
}
|
||||
|
||||
inputs.add(input);
|
||||
|
@ -514,7 +567,7 @@ class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
|
|||
if (anonFees != null) {
|
||||
otherData = jsonEncode(
|
||||
{
|
||||
"overrideFee": anonFees.toJsonString(),
|
||||
"overrideFee": anonFees!.toJsonString(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
|
|
@ -696,6 +696,32 @@ mixin SparkInterface<T extends ElectrumXCurrencyInterface>
|
|||
}
|
||||
}
|
||||
|
||||
Future<Set<LTagPair>> getMissingSparkSpendTransactionIds() async {
|
||||
final tags = await mainDB.isar.sparkCoins
|
||||
.where()
|
||||
.walletIdEqualToAnyLTagHash(walletId)
|
||||
.filter()
|
||||
.isUsedEqualTo(true)
|
||||
.lTagHashProperty()
|
||||
.findAll();
|
||||
|
||||
final usedCoinTxidsFoundLocally = await mainDB.isar.transactionV2s
|
||||
.where()
|
||||
.walletIdEqualTo(walletId)
|
||||
.filter()
|
||||
.subTypeEqualTo(TransactionSubType.sparkSpend)
|
||||
.txidProperty()
|
||||
.findAll();
|
||||
|
||||
final pairs = await FiroCacheCoordinator.getUsedCoinTxidsFor(
|
||||
tags: tags,
|
||||
);
|
||||
|
||||
pairs.removeWhere((e) => usedCoinTxidsFoundLocally.contains(e.txid));
|
||||
|
||||
return pairs.toSet();
|
||||
}
|
||||
|
||||
Future<void> refreshSparkBalance() async {
|
||||
final currentHeight = await chainHeight;
|
||||
final unusedCoins = await mainDB.isar.sparkCoins
|
||||
|
|
Loading…
Reference in a new issue