cache used spark tags in sqlite as well

This commit is contained in:
julian 2024-05-30 15:10:56 -06:00
parent d99231c973
commit 08f01d3141
8 changed files with 357 additions and 159 deletions

View file

@ -717,11 +717,13 @@ class DbVersionMigrator with WalletDB {
}
Future<void> _v12(SecureStorageInterface secureStore) async {
for (final identifier in ["firo", "firoTestNet"]) {
await DB.instance.deleteBoxFromDisk(
boxName: "firo_anonymitySetSparkCache",
boxName: "${identifier}_anonymitySetSparkCache",
);
await DB.instance.deleteBoxFromDisk(
boxName: "firoTestNet_anonymitySetSparkCache",
boxName: "${identifier}_sparkUsedCoinsTagsCache",
);
}
}
}

View file

@ -58,8 +58,6 @@ class DB {
"${currency.identifier}_anonymitySetCache";
String _boxNameUsedSerialsCache({required CryptoCurrency currency}) =>
"${currency.identifier}_usedSerialsCache";
String _boxNameSparkUsedCoinsTagsCache({required CryptoCurrency currency}) =>
"${currency.identifier}_sparkUsedCoinsTagsCache";
Box<NodeModel>? _boxNodeModels;
Box<NodeModel>? _boxPrimaryNodes;
@ -229,18 +227,6 @@ class DB {
);
}
Future<Box<dynamic>> getSparkUsedCoinsTagsCacheBox({
required CryptoCurrency currency,
}) async {
if (_getSparkUsedCoinsTagsCacheBoxes[currency.identifier]?.isOpen != true) {
_getSparkUsedCoinsTagsCacheBoxes.remove(currency.identifier);
}
return _getSparkUsedCoinsTagsCacheBoxes[currency.identifier] ??=
await Hive.openBox<dynamic>(
_boxNameSparkUsedCoinsTagsCache(currency: currency),
);
}
Future<void> closeUsedSerialsCacheBox({
required CryptoCurrency currency,
}) async {
@ -257,9 +243,6 @@ class DB {
await deleteAll<dynamic>(
boxName: _boxNameUsedSerialsCache(currency: currency),
);
await deleteAll<dynamic>(
boxName: _boxNameSparkUsedCoinsTagsCache(currency: currency),
);
}
}

View file

@ -2,9 +2,11 @@ import 'dart:async';
import 'dart:io';
import 'package:flutter/foundation.dart';
import 'package:flutter_libsparkmobile/flutter_libsparkmobile.dart';
import 'package:sqlite3/sqlite3.dart';
import '../../electrumx_rpc/electrumx_client.dart';
import '../../utilities/extensions/extensions.dart';
import '../../utilities/logger.dart';
import '../../utilities/stack_file_system.dart';
@ -18,11 +20,31 @@ void _debugLog(Object? object) {
}
}
/// Wrapper class for [FiroCache] as [FiroCache] should eventually be handled in a
List<String> _ffiHashTagsComputeWrapper(List<String> base64Tags) {
return LibSpark.hashTags(base64Tags: base64Tags);
}
/// Wrapper class for [_FiroCache] as [_FiroCache] should eventually be handled in a
/// background isolate and [FiroCacheCoordinator] should manage that isolate
abstract class FiroCacheCoordinator {
static Future<void> init() => _FiroCache.init();
static Future<void> runFetchAndUpdateSparkUsedCoinTags(
ElectrumXClient client,
) async {
final count = await FiroCacheCoordinator.getUsedCoinTagsLastAddedRowId();
final unhashedTags = await client.getSparkUnhashedUsedCoinsTags(
startNumber: count,
);
if (unhashedTags.isNotEmpty) {
final hashedTags = await compute(
_ffiHashTagsComputeWrapper,
unhashedTags,
);
await _FiroCache._updateSparkUsedTagsWith(hashedTags);
}
}
static Future<void> runFetchAndUpdateSparkAnonSetCacheForGroupId(
int groupId,
ElectrumXClient client,
@ -38,7 +60,36 @@ abstract class FiroCacheCoordinator {
startBlockHash: blockHash.toHexReversedFromBase64,
);
await _FiroCache._updateWith(json, groupId);
await _FiroCache._updateSparkAnonSetCoinsWith(json, groupId);
}
// ===========================================================================
static Future<Set<String>> getUsedCoinTags(int startNumber) async {
final result = await _FiroCache._getSparkUsedCoinTags(
startNumber,
);
return result.map((e) => e["tag"] as String).toSet();
}
/// This should be the equivalent of counting the number of tags in the db.
/// Assuming the integrity of the data. Faster than actually calling count on
/// a table where no records have been deleted. None should be deleted from
/// this table in practice.
static Future<int> getUsedCoinTagsLastAddedRowId() async {
final result = await _FiroCache._getUsedCoinTagsLastAddedRowId();
if (result.isEmpty) {
return 0;
}
return result.first["highestId"] as int? ?? 0;
}
static Future<bool> checkTagIsUsed(
String tag,
) async {
return await _FiroCache._checkTagIsUsed(
tag,
);
}
static Future<ResultSet> getSetCoinsForGroupId(
@ -71,6 +122,14 @@ abstract class FiroCacheCoordinator {
timestampUTC: result.first["timestampUTC"] as int,
);
}
static Future<bool> checkSetInfoForGroupIdExists(
int groupId,
) async {
return await _FiroCache._checkSetInfoForGroupIdExists(
groupId,
);
}
}
abstract class _FiroCache {
@ -137,6 +196,11 @@ abstract class _FiroCache {
FOREIGN KEY (setId) REFERENCES SparkSet(id),
FOREIGN KEY (coinId) REFERENCES SparkCoin(id)
);
CREATE TABLE SparkUsedCoinTags (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
tag TEXT NOT NULL UNIQUE
);
""",
);
@ -181,10 +245,64 @@ abstract class _FiroCache {
return db.select("$query;");
}
// ===========================================================================
// ===========================================================================
static Future<bool> _checkSetInfoForGroupIdExists(
int groupId,
) async {
final query = """
SELECT EXISTS (
SELECT 1
FROM SparkSet
WHERE groupId = $groupId
) AS setExists;
""";
static int _upCount = 0;
return db.select("$query;").first["setExists"] == 1;
}
// ===========================================================================
// =============== Spark used coin tags queries ==============================
static Future<ResultSet> _getSparkUsedCoinTags(
int startNumber,
) async {
String query = """
SELECT tag
FROM SparkUsedCoinTags
""";
if (startNumber > 0) {
query += " WHERE id >= $startNumber";
}
return db.select("$query;");
}
static Future<ResultSet> _getUsedCoinTagsLastAddedRowId() async {
const query = """
SELECT MAX(id) AS highestId
FROM SparkUsedCoinTags;
""";
return db.select("$query;");
}
static Future<bool> _checkTagIsUsed(String tag) async {
final query = """
SELECT EXISTS (
SELECT 1
FROM SparkUsedCoinTags
WHERE tag = '$tag'
) AS tagExists;
""";
return db.select("$query;").first["tagExists"] == 1;
}
// ===========================================================================
// ================== write to spark used tags cache =========================
// debug log counter var
static int _updateTagsCount = 0;
/// update the sqlite cache
/// Expected json format:
@ -201,20 +319,123 @@ abstract class _FiroCache {
/// }
///
/// returns true if successful, otherwise false
static Future<bool> _updateWith(
static Future<bool> _updateSparkUsedTagsWith(
List<String> tags,
) async {
final start = DateTime.now();
_updateTagsCount++;
if (tags.isEmpty) {
_debugLog(
"$_updateTagsCount _updateSparkUsedTagsWith(tags) called "
"where tags is empty",
);
_debugLog(
"$_updateTagsCount _updateSparkUsedTagsWith() "
"duration = ${DateTime.now().difference(start)}",
);
// nothing to add, return early
return true;
} else if (tags.length <= 10) {
_debugLog("$_updateTagsCount _updateSparkUsedTagsWith() called where "
"tags.length=${tags.length}, tags: $tags,");
} else {
_debugLog(
"$_updateTagsCount _updateSparkUsedTagsWith() called where"
" tags.length=${tags.length},"
" first 5 tags: ${tags.sublist(0, 5)},"
" last 5 tags: ${tags.sublist(tags.length - 5, tags.length)}",
);
}
db.execute("BEGIN;");
try {
for (final tag in tags) {
db.execute(
"""
INSERT OR IGNORE INTO SparkUsedCoinTags (tag)
VALUES (?);
""",
[tag],
);
}
db.execute("COMMIT;");
_debugLog("$_updateTagsCount _updateSparkUsedTagsWith() COMMITTED");
_debugLog(
"$_updateTagsCount _updateSparkUsedTagsWith() "
"duration = ${DateTime.now().difference(start)}",
);
return true;
} catch (e, s) {
db.execute("ROLLBACK;");
_debugLog("$_updateTagsCount _updateSparkUsedTagsWith() ROLLBACK");
_debugLog(
"$_updateTagsCount _updateSparkUsedTagsWith() "
"duration = ${DateTime.now().difference(start)}",
);
// NOTE THIS LOGGER MUST BE CALLED ON MAIN ISOLATE FOR NOW
Logging.instance.log(
"$e\n$s",
level: LogLevel.Error,
);
}
return false;
}
// ===========================================================================
// ================== write to spark anon set cache ==========================
// debug log counter var
static int _updateAnonSetCount = 0;
/// update the sqlite cache
/// Expected json format:
/// {
/// "blockHash": "someBlockHash",
/// "setHash": "someSetHash",
/// "coins": [
/// ["serliazed1", "hash1", "context1"],
/// ["serliazed2", "hash2", "context2"],
/// ...
/// ["serliazed3", "hash3", "context3"],
/// ["serliazed4", "hash4", "context4"],
/// ],
/// }
///
/// returns true if successful, otherwise false
static Future<bool> _updateSparkAnonSetCoinsWith(
Map<String, dynamic> json,
int groupId,
) async {
final start = DateTime.now();
_upCount++;
_updateAnonSetCount++;
final blockHash = json["blockHash"] as String;
final setHash = json["setHash"] as String;
final coinsRaw = json["coins"] as List;
_debugLog(
"$_upCount _updateWith() called where groupId=$groupId,"
" blockHash=$blockHash, setHash=$setHash",
"$_updateAnonSetCount _updateSparkAnonSetCoinsWith() "
"called where groupId=$groupId, "
"blockHash=$blockHash (${blockHash.toHexReversedFromBase64}), "
"setHash=$setHash, "
"coins.length: ${coinsRaw.isEmpty ? 0 : coinsRaw.length}",
);
if ((json["coins"] as List).isEmpty) {
_debugLog(
"$_updateAnonSetCount _updateSparkAnonSetCoinsWith()"
" called where json[coins] is Empty",
);
_debugLog(
"$_updateAnonSetCount _updateSparkAnonSetCoinsWith()"
" duration = ${DateTime.now().difference(start)}",
);
// no coins to actually insert
return true;
}
final checkResult = db.select(
"""
SELECT *
@ -228,26 +449,21 @@ abstract class _FiroCache {
],
);
_debugLog("$_upCount _updateWith() called where checkResult=$checkResult");
_debugLog(
"$_updateAnonSetCount _updateSparkAnonSetCoinsWith()"
" called where checkResult=$checkResult",
);
if (checkResult.isNotEmpty) {
_debugLog(
"$_upCount _updateWith() duration = ${DateTime.now().difference(start)}",
"$_updateAnonSetCount _updateSparkAnonSetCoinsWith()"
" duration = ${DateTime.now().difference(start)}",
);
// already up to date
return true;
}
if ((json["coins"] as List).isEmpty) {
_debugLog("$_upCount _updateWith() called where json[coins] is Empty");
_debugLog(
"$_upCount _updateWith() duration = ${DateTime.now().difference(start)}",
);
// no coins to actually insert
return true;
}
final coins = (json["coins"] as List)
final coins = coinsRaw
.map(
(e) => [
e[0] as String,
@ -307,16 +523,20 @@ abstract class _FiroCache {
}
db.execute("COMMIT;");
_debugLog("$_upCount _updateWith() COMMITTED");
_debugLog(
"$_upCount _updateWith() duration = ${DateTime.now().difference(start)}",
"$_updateAnonSetCount _updateSparkAnonSetCoinsWith() COMMITTED",
);
_debugLog(
"$_updateAnonSetCount _updateSparkAnonSetCoinsWith() duration"
" = ${DateTime.now().difference(start)}",
);
return true;
} catch (e, s) {
db.execute("ROLLBACK;");
_debugLog("$_upCount _updateWith() ROLLBACK");
_debugLog("$_updateAnonSetCount _updateSparkAnonSetCoinsWith() ROLLBACK");
_debugLog(
"$_upCount _updateWith() duration = ${DateTime.now().difference(start)}",
"$_updateAnonSetCount _updateSparkAnonSetCoinsWith()"
" duration = ${DateTime.now().difference(start)}",
);
// NOTE THIS LOGGER MUST BE CALLED ON MAIN ISOLATE FOR NOW
Logging.instance.log(

View file

@ -220,53 +220,6 @@ class CachedElectrumXClient {
}
}
Future<Set<String>> getSparkUsedCoinsTags({
required CryptoCurrency cryptoCurrency,
}) async {
try {
final box = await DB.instance.getSparkUsedCoinsTagsCacheBox(
currency: cryptoCurrency,
);
final _list = box.get("tags") as List?;
final Set<String> cachedTags =
_list == null ? {} : List<String>.from(_list).toSet();
final startNumber = max(
0,
cachedTags.length - 100, // 100 being some arbitrary buffer
);
final newTags = await electrumXClient.getSparkUsedCoinsTags(
startNumber: startNumber,
);
// ensure we are getting some overlap so we know we are not missing any
if (cachedTags.isNotEmpty && newTags.isNotEmpty) {
assert(cachedTags.intersection(newTags).isNotEmpty);
}
// Make newTags an Iterable<String>.
final Iterable<String> iterableTags = newTags.map((e) => e.toString());
cachedTags.addAll(iterableTags);
await box.put(
"tags",
cachedTags.toList(),
);
return cachedTags;
} catch (e, s) {
Logging.instance.log(
"Failed to process CachedElectrumX.getSparkUsedCoinsTags(): $e\n$s",
level: LogLevel.Error,
);
rethrow;
}
}
/// Clear all cached transactions for the specified coin
Future<void> clearSharedTransactionCache({
required CryptoCurrency cryptoCurrency,

View file

@ -17,8 +17,6 @@ import 'package:electrum_adapter/electrum_adapter.dart' as electrum_adapter;
import 'package:electrum_adapter/electrum_adapter.dart';
import 'package:electrum_adapter/methods/specific/firo.dart';
import 'package:event_bus/event_bus.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter_libsparkmobile/flutter_libsparkmobile.dart';
import 'package:mutex/mutex.dart';
import 'package:stream_channel/stream_channel.dart';
@ -922,7 +920,7 @@ class ElectrumXClient {
Logging.instance.log(
"Finished ElectrumXClient.getSparkAnonymitySet(coinGroupId"
"=$coinGroupId, startBlockHash=$startBlockHash). "
""
"coins.length: ${(response["coins"] as List?)?.length}"
"Duration=${DateTime.now().difference(start)}",
level: LogLevel.Info,
);
@ -934,16 +932,12 @@ class ElectrumXClient {
/// Takes [startNumber], if it is 0, we get the full set,
/// otherwise the used tags after that number
Future<Set<String>> getSparkUsedCoinsTags({
Future<List<String>> getSparkUnhashedUsedCoinsTags({
String? requestID,
required int startNumber,
}) async {
try {
// Use electrum_adapter package's getSparkUsedCoinsTags method.
Logging.instance.log(
"attempting to fetch spark.getusedcoinstags...",
level: LogLevel.Info,
);
final start = DateTime.now();
await _checkElectrumAdapter();
final Map<String, dynamic> response =
await (getElectrumAdapter() as FiroElectrumClient)
@ -955,8 +949,16 @@ class ElectrumXClient {
level: LogLevel.Info,
);
final map = Map<String, dynamic>.from(response);
final set = Set<String>.from(map["tags"] as List);
return await compute(_ffiHashTagsComputeWrapper, set);
final tags = List<String>.from(map["tags"] as List);
Logging.instance.log(
"Finished ElectrumXClient.getSparkUnhashedUsedCoinsTags(startNumber"
"=$startNumber). "
"Duration=${DateTime.now().difference(start)}",
level: LogLevel.Info,
);
return tags;
} catch (e) {
Logging.instance.log(e, level: LogLevel.Error);
rethrow;
@ -1093,7 +1095,3 @@ class ElectrumXClient {
}
}
}
Set<String> _ffiHashTagsComputeWrapper(Set<String> base64Tags) {
return LibSpark.hashTags(base64Tags: base64Tags);
}

View file

@ -13,5 +13,6 @@ export 'impl/box_shadow.dart';
export 'impl/cl_transaction.dart';
export 'impl/contract_abi.dart';
export 'impl/gradient.dart';
export 'impl/list.dart';
export 'impl/string.dart';
export 'impl/uint8_list.dart';

View file

@ -588,7 +588,9 @@ class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
@override
Future<void> recover({required bool isRescan}) async {
// reset last checked values
groupIdTimestampUTCMap = {};
final start = DateTime.now();
final root = await getRootHDNode();
@ -633,8 +635,8 @@ class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
);
}
final sparkUsedCoinTagsFuture =
electrumXCachedClient.getSparkUsedCoinsTags(
cryptoCurrency: info.coin,
FiroCacheCoordinator.runFetchAndUpdateSparkUsedCoinTags(
electrumXClient,
);
// receiving addresses
@ -754,9 +756,6 @@ class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
final usedSerialsSet = (futureResults[0] as List<String>).toSet();
final setDataMap = futureResults[1] as Map<dynamic, dynamic>;
// spark
final sparkSpentCoinTags = futureResults[2] as Set<String>;
if (Util.isDesktop) {
await Future.wait([
recoverLelantusWallet(
@ -765,7 +764,6 @@ class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
setDataMap: setDataMap,
),
recoverSparkWallet(
spentCoinTags: sparkSpentCoinTags,
latestSparkCoinId: latestSparkCoinId,
),
]);
@ -776,7 +774,6 @@ class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
setDataMap: setDataMap,
);
await recoverSparkWallet(
spentCoinTags: sparkSpentCoinTags,
latestSparkCoinId: latestSparkCoinId,
);
}

View file

@ -631,12 +631,41 @@ mixin SparkInterface<T extends ElectrumXCurrencyInterface>
Future<void> refreshSparkData() async {
try {
final spentCoinTags = await electrumXCachedClient.getSparkUsedCoinsTags(
cryptoCurrency: info.coin,
// start by checking if any previous sets are missing from db and add the
// missing groupIds to the list if sets to check and update
final latestGroupId = await electrumXClient.getSparkLatestCoinId();
final List<int> groupIds = [];
if (latestGroupId > 1) {
for (int id = 1; id < latestGroupId; id++) {
final setExists =
await FiroCacheCoordinator.checkSetInfoForGroupIdExists(
id,
);
if (!setExists) {
groupIds.add(id);
}
}
}
groupIds.add(latestGroupId);
// start fetch and update process for each set groupId as required
final possibleFutures = groupIds.map(
(e) =>
FiroCacheCoordinator.runFetchAndUpdateSparkAnonSetCacheForGroupId(
e,
electrumXClient,
),
);
await _checkAndUpdateCoins(spentCoinTags, true);
// wait for each fetch and update to complete
await Future.wait([
...possibleFutures,
FiroCacheCoordinator.runFetchAndUpdateSparkUsedCoinTags(
electrumXClient,
),
]);
await _checkAndUpdateCoins();
// refresh spark balance
await refreshSparkBalance();
} catch (e, s) {
@ -697,7 +726,6 @@ mixin SparkInterface<T extends ElectrumXCurrencyInterface>
/// Should only be called within the standard wallet [recover] function due to
/// mutex locking. Otherwise behaviour MAY be undefined.
Future<void> recoverSparkWallet({
required Set<String> spentCoinTags,
required int latestSparkCoinId,
}) async {
// generate spark addresses if non existing
@ -707,7 +735,7 @@ mixin SparkInterface<T extends ElectrumXCurrencyInterface>
}
try {
await _checkAndUpdateCoins(spentCoinTags, false);
await _checkAndUpdateCoins();
// refresh spark balance
await refreshSparkBalance();
@ -720,10 +748,7 @@ mixin SparkInterface<T extends ElectrumXCurrencyInterface>
}
}
Future<void> _checkAndUpdateCoins(
Set<String> spentCoinTags,
bool checkUseds,
) async {
Future<void> _checkAndUpdateCoins() async {
final sparkAddresses = await mainDB.isar.addresses
.where()
.walletIdEqualTo(walletId)
@ -737,15 +762,7 @@ mixin SparkInterface<T extends ElectrumXCurrencyInterface>
)
.toSet();
List<SparkCoin>? currentCoins;
if (checkUseds) {
currentCoins = await mainDB.isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.filter()
.isUsedEqualTo(false)
.findAll();
}
final Map<int, List<List<String>>> rawCoinsBySetId = {};
final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId();
for (int i = 1; i <= latestSparkCoinId; i++) {
@ -769,34 +786,62 @@ mixin SparkInterface<T extends ElectrumXCurrencyInterface>
.toList();
if (coinsRaw.isNotEmpty) {
final myCoins = await compute(
_identifyCoins,
(
anonymitySetCoins: coinsRaw,
groupId: i,
spentCoinTags: spentCoinTags,
privateKeyHexSet: privateKeyHexSet,
walletId: walletId,
isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test,
),
);
if (checkUseds && currentCoins != null) {
for (final coin in currentCoins) {
if (spentCoinTags.contains(coin.lTagHash)) {
myCoins.add(coin.copyWith(isUsed: true));
}
}
rawCoinsBySetId[i] = coinsRaw;
}
// update wallet spark coins in isar
await _addOrUpdateSparkCoins(myCoins);
}
groupIdTimestampUTCMap[i] = max(
lastCheckedTimeStampUTC,
info?.timestampUTC ?? lastCheckedTimeStampUTC,
);
}
final List<SparkCoin> newlyIdCoins = [];
for (final groupId in rawCoinsBySetId.keys) {
final myCoins = await compute(
_identifyCoins,
(
anonymitySetCoins: rawCoinsBySetId[groupId]!,
groupId: groupId,
privateKeyHexSet: privateKeyHexSet,
walletId: walletId,
isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test,
),
);
newlyIdCoins.addAll(myCoins);
}
await _checkAndMarkCoinsUsedInDB(coinsNotInDbYet: newlyIdCoins);
}
Future<void> _checkAndMarkCoinsUsedInDB({
List<SparkCoin> coinsNotInDbYet = const [],
}) async {
final List<SparkCoin> coins = await mainDB.isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.filter()
.isUsedEqualTo(false)
.findAll();
final List<SparkCoin> coinsToWrite = [];
final spentCoinTags = await FiroCacheCoordinator.getUsedCoinTags(0);
for (final coin in coins) {
if (spentCoinTags.contains(coin.lTagHash)) {
coinsToWrite.add(coin.copyWith(isUsed: true));
}
}
for (final coin in coinsNotInDbYet) {
if (spentCoinTags.contains(coin.lTagHash)) {
coinsToWrite.add(coin.copyWith(isUsed: true));
} else {
coinsToWrite.add(coin);
}
}
// update wallet spark coins in isar
await _addOrUpdateSparkCoins(coinsToWrite);
}
// modelled on CSparkWallet::CreateSparkMintTransactions https://github.com/firoorg/firo/blob/39c41e5e7ec634ced3700fe3f4f5509dc2e480d0/src/spark/sparkwallet.cpp#L752
@ -1713,7 +1758,6 @@ Future<List<SparkCoin>> _identifyCoins(
({
List<dynamic> anonymitySetCoins,
int groupId,
Set<String> spentCoinTags,
Set<String> privateKeyHexSet,
String walletId,
bool isTestNet,
@ -1756,7 +1800,7 @@ Future<List<SparkCoin>> _identifyCoins(
SparkCoin(
walletId: args.walletId,
type: coinType,
isUsed: args.spentCoinTags.contains(coin.lTagHash!),
isUsed: false,
groupId: args.groupId,
nonce: coin.nonceHex?.toUint8ListFromHex,
address: coin.address!,