diff --git a/lib/db/db_version_migration.dart b/lib/db/db_version_migration.dart index 8fc4c6963..b5da9f505 100644 --- a/lib/db/db_version_migration.dart +++ b/lib/db/db_version_migration.dart @@ -717,11 +717,13 @@ class DbVersionMigrator with WalletDB { } Future _v12(SecureStorageInterface secureStore) async { - await DB.instance.deleteBoxFromDisk( - boxName: "firo_anonymitySetSparkCache", - ); - await DB.instance.deleteBoxFromDisk( - boxName: "firoTestNet_anonymitySetSparkCache", - ); + for (final identifier in ["firo", "firoTestNet"]) { + await DB.instance.deleteBoxFromDisk( + boxName: "${identifier}_anonymitySetSparkCache", + ); + await DB.instance.deleteBoxFromDisk( + boxName: "${identifier}_sparkUsedCoinsTagsCache", + ); + } } } diff --git a/lib/db/hive/db.dart b/lib/db/hive/db.dart index 7fe515c4a..2e9b5435b 100644 --- a/lib/db/hive/db.dart +++ b/lib/db/hive/db.dart @@ -58,8 +58,6 @@ class DB { "${currency.identifier}_anonymitySetCache"; String _boxNameUsedSerialsCache({required CryptoCurrency currency}) => "${currency.identifier}_usedSerialsCache"; - String _boxNameSparkUsedCoinsTagsCache({required CryptoCurrency currency}) => - "${currency.identifier}_sparkUsedCoinsTagsCache"; Box? _boxNodeModels; Box? _boxPrimaryNodes; @@ -229,18 +227,6 @@ class DB { ); } - Future> getSparkUsedCoinsTagsCacheBox({ - required CryptoCurrency currency, - }) async { - if (_getSparkUsedCoinsTagsCacheBoxes[currency.identifier]?.isOpen != true) { - _getSparkUsedCoinsTagsCacheBoxes.remove(currency.identifier); - } - return _getSparkUsedCoinsTagsCacheBoxes[currency.identifier] ??= - await Hive.openBox( - _boxNameSparkUsedCoinsTagsCache(currency: currency), - ); - } - Future closeUsedSerialsCacheBox({ required CryptoCurrency currency, }) async { @@ -257,9 +243,6 @@ class DB { await deleteAll( boxName: _boxNameUsedSerialsCache(currency: currency), ); - await deleteAll( - boxName: _boxNameSparkUsedCoinsTagsCache(currency: currency), - ); } } diff --git a/lib/db/sqlite/firo_cache.dart b/lib/db/sqlite/firo_cache.dart index f77e4da15..a6c2882a0 100644 --- a/lib/db/sqlite/firo_cache.dart +++ b/lib/db/sqlite/firo_cache.dart @@ -2,9 +2,11 @@ import 'dart:async'; import 'dart:io'; import 'package:flutter/foundation.dart'; +import 'package:flutter_libsparkmobile/flutter_libsparkmobile.dart'; import 'package:sqlite3/sqlite3.dart'; import '../../electrumx_rpc/electrumx_client.dart'; +import '../../utilities/extensions/extensions.dart'; import '../../utilities/logger.dart'; import '../../utilities/stack_file_system.dart'; @@ -18,11 +20,31 @@ void _debugLog(Object? object) { } } -/// Wrapper class for [FiroCache] as [FiroCache] should eventually be handled in a +List _ffiHashTagsComputeWrapper(List base64Tags) { + return LibSpark.hashTags(base64Tags: base64Tags); +} + +/// Wrapper class for [_FiroCache] as [_FiroCache] should eventually be handled in a /// background isolate and [FiroCacheCoordinator] should manage that isolate abstract class FiroCacheCoordinator { static Future init() => _FiroCache.init(); + static Future runFetchAndUpdateSparkUsedCoinTags( + ElectrumXClient client, + ) async { + final count = await FiroCacheCoordinator.getUsedCoinTagsLastAddedRowId(); + final unhashedTags = await client.getSparkUnhashedUsedCoinsTags( + startNumber: count, + ); + if (unhashedTags.isNotEmpty) { + final hashedTags = await compute( + _ffiHashTagsComputeWrapper, + unhashedTags, + ); + await _FiroCache._updateSparkUsedTagsWith(hashedTags); + } + } + static Future runFetchAndUpdateSparkAnonSetCacheForGroupId( int groupId, ElectrumXClient client, @@ -38,7 +60,36 @@ abstract class FiroCacheCoordinator { startBlockHash: blockHash.toHexReversedFromBase64, ); - await _FiroCache._updateWith(json, groupId); + await _FiroCache._updateSparkAnonSetCoinsWith(json, groupId); + } + + // =========================================================================== + + static Future> getUsedCoinTags(int startNumber) async { + final result = await _FiroCache._getSparkUsedCoinTags( + startNumber, + ); + return result.map((e) => e["tag"] as String).toSet(); + } + + /// This should be the equivalent of counting the number of tags in the db. + /// Assuming the integrity of the data. Faster than actually calling count on + /// a table where no records have been deleted. None should be deleted from + /// this table in practice. + static Future getUsedCoinTagsLastAddedRowId() async { + final result = await _FiroCache._getUsedCoinTagsLastAddedRowId(); + if (result.isEmpty) { + return 0; + } + return result.first["highestId"] as int? ?? 0; + } + + static Future checkTagIsUsed( + String tag, + ) async { + return await _FiroCache._checkTagIsUsed( + tag, + ); } static Future getSetCoinsForGroupId( @@ -71,6 +122,14 @@ abstract class FiroCacheCoordinator { timestampUTC: result.first["timestampUTC"] as int, ); } + + static Future checkSetInfoForGroupIdExists( + int groupId, + ) async { + return await _FiroCache._checkSetInfoForGroupIdExists( + groupId, + ); + } } abstract class _FiroCache { @@ -137,6 +196,11 @@ abstract class _FiroCache { FOREIGN KEY (setId) REFERENCES SparkSet(id), FOREIGN KEY (coinId) REFERENCES SparkCoin(id) ); + + CREATE TABLE SparkUsedCoinTags ( + id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE, + tag TEXT NOT NULL UNIQUE + ); """, ); @@ -181,10 +245,64 @@ abstract class _FiroCache { return db.select("$query;"); } - // =========================================================================== - // =========================================================================== + static Future _checkSetInfoForGroupIdExists( + int groupId, + ) async { + final query = """ + SELECT EXISTS ( + SELECT 1 + FROM SparkSet + WHERE groupId = $groupId + ) AS setExists; + """; - static int _upCount = 0; + return db.select("$query;").first["setExists"] == 1; + } + + // =========================================================================== + // =============== Spark used coin tags queries ============================== + + static Future _getSparkUsedCoinTags( + int startNumber, + ) async { + String query = """ + SELECT tag + FROM SparkUsedCoinTags + """; + + if (startNumber > 0) { + query += " WHERE id >= $startNumber"; + } + + return db.select("$query;"); + } + + static Future _getUsedCoinTagsLastAddedRowId() async { + const query = """ + SELECT MAX(id) AS highestId + FROM SparkUsedCoinTags; + """; + + return db.select("$query;"); + } + + static Future _checkTagIsUsed(String tag) async { + final query = """ + SELECT EXISTS ( + SELECT 1 + FROM SparkUsedCoinTags + WHERE tag = '$tag' + ) AS tagExists; + """; + + return db.select("$query;").first["tagExists"] == 1; + } + + // =========================================================================== + // ================== write to spark used tags cache ========================= + + // debug log counter var + static int _updateTagsCount = 0; /// update the sqlite cache /// Expected json format: @@ -201,20 +319,123 @@ abstract class _FiroCache { /// } /// /// returns true if successful, otherwise false - static Future _updateWith( + static Future _updateSparkUsedTagsWith( + List tags, + ) async { + final start = DateTime.now(); + _updateTagsCount++; + + if (tags.isEmpty) { + _debugLog( + "$_updateTagsCount _updateSparkUsedTagsWith(tags) called " + "where tags is empty", + ); + _debugLog( + "$_updateTagsCount _updateSparkUsedTagsWith() " + "duration = ${DateTime.now().difference(start)}", + ); + // nothing to add, return early + return true; + } else if (tags.length <= 10) { + _debugLog("$_updateTagsCount _updateSparkUsedTagsWith() called where " + "tags.length=${tags.length}, tags: $tags,"); + } else { + _debugLog( + "$_updateTagsCount _updateSparkUsedTagsWith() called where" + " tags.length=${tags.length}," + " first 5 tags: ${tags.sublist(0, 5)}," + " last 5 tags: ${tags.sublist(tags.length - 5, tags.length)}", + ); + } + + db.execute("BEGIN;"); + try { + for (final tag in tags) { + db.execute( + """ + INSERT OR IGNORE INTO SparkUsedCoinTags (tag) + VALUES (?); + """, + [tag], + ); + } + + db.execute("COMMIT;"); + _debugLog("$_updateTagsCount _updateSparkUsedTagsWith() COMMITTED"); + _debugLog( + "$_updateTagsCount _updateSparkUsedTagsWith() " + "duration = ${DateTime.now().difference(start)}", + ); + return true; + } catch (e, s) { + db.execute("ROLLBACK;"); + _debugLog("$_updateTagsCount _updateSparkUsedTagsWith() ROLLBACK"); + _debugLog( + "$_updateTagsCount _updateSparkUsedTagsWith() " + "duration = ${DateTime.now().difference(start)}", + ); + // NOTE THIS LOGGER MUST BE CALLED ON MAIN ISOLATE FOR NOW + Logging.instance.log( + "$e\n$s", + level: LogLevel.Error, + ); + } + + return false; + } + + // =========================================================================== + // ================== write to spark anon set cache ========================== + + // debug log counter var + static int _updateAnonSetCount = 0; + + /// update the sqlite cache + /// Expected json format: + /// { + /// "blockHash": "someBlockHash", + /// "setHash": "someSetHash", + /// "coins": [ + /// ["serliazed1", "hash1", "context1"], + /// ["serliazed2", "hash2", "context2"], + /// ... + /// ["serliazed3", "hash3", "context3"], + /// ["serliazed4", "hash4", "context4"], + /// ], + /// } + /// + /// returns true if successful, otherwise false + static Future _updateSparkAnonSetCoinsWith( Map json, int groupId, ) async { final start = DateTime.now(); - _upCount++; + _updateAnonSetCount++; final blockHash = json["blockHash"] as String; final setHash = json["setHash"] as String; + final coinsRaw = json["coins"] as List; _debugLog( - "$_upCount _updateWith() called where groupId=$groupId," - " blockHash=$blockHash, setHash=$setHash", + "$_updateAnonSetCount _updateSparkAnonSetCoinsWith() " + "called where groupId=$groupId, " + "blockHash=$blockHash (${blockHash.toHexReversedFromBase64}), " + "setHash=$setHash, " + "coins.length: ${coinsRaw.isEmpty ? 0 : coinsRaw.length}", ); + if ((json["coins"] as List).isEmpty) { + _debugLog( + "$_updateAnonSetCount _updateSparkAnonSetCoinsWith()" + " called where json[coins] is Empty", + ); + _debugLog( + "$_updateAnonSetCount _updateSparkAnonSetCoinsWith()" + " duration = ${DateTime.now().difference(start)}", + ); + // no coins to actually insert + return true; + } + final checkResult = db.select( """ SELECT * @@ -228,26 +449,21 @@ abstract class _FiroCache { ], ); - _debugLog("$_upCount _updateWith() called where checkResult=$checkResult"); + _debugLog( + "$_updateAnonSetCount _updateSparkAnonSetCoinsWith()" + " called where checkResult=$checkResult", + ); if (checkResult.isNotEmpty) { _debugLog( - "$_upCount _updateWith() duration = ${DateTime.now().difference(start)}", + "$_updateAnonSetCount _updateSparkAnonSetCoinsWith()" + " duration = ${DateTime.now().difference(start)}", ); // already up to date return true; } - if ((json["coins"] as List).isEmpty) { - _debugLog("$_upCount _updateWith() called where json[coins] is Empty"); - _debugLog( - "$_upCount _updateWith() duration = ${DateTime.now().difference(start)}", - ); - // no coins to actually insert - return true; - } - - final coins = (json["coins"] as List) + final coins = coinsRaw .map( (e) => [ e[0] as String, @@ -307,16 +523,20 @@ abstract class _FiroCache { } db.execute("COMMIT;"); - _debugLog("$_upCount _updateWith() COMMITTED"); _debugLog( - "$_upCount _updateWith() duration = ${DateTime.now().difference(start)}", + "$_updateAnonSetCount _updateSparkAnonSetCoinsWith() COMMITTED", + ); + _debugLog( + "$_updateAnonSetCount _updateSparkAnonSetCoinsWith() duration" + " = ${DateTime.now().difference(start)}", ); return true; } catch (e, s) { db.execute("ROLLBACK;"); - _debugLog("$_upCount _updateWith() ROLLBACK"); + _debugLog("$_updateAnonSetCount _updateSparkAnonSetCoinsWith() ROLLBACK"); _debugLog( - "$_upCount _updateWith() duration = ${DateTime.now().difference(start)}", + "$_updateAnonSetCount _updateSparkAnonSetCoinsWith()" + " duration = ${DateTime.now().difference(start)}", ); // NOTE THIS LOGGER MUST BE CALLED ON MAIN ISOLATE FOR NOW Logging.instance.log( diff --git a/lib/electrumx_rpc/cached_electrumx_client.dart b/lib/electrumx_rpc/cached_electrumx_client.dart index c2dbbb6b8..8b1ff10c8 100644 --- a/lib/electrumx_rpc/cached_electrumx_client.dart +++ b/lib/electrumx_rpc/cached_electrumx_client.dart @@ -220,53 +220,6 @@ class CachedElectrumXClient { } } - Future> getSparkUsedCoinsTags({ - required CryptoCurrency cryptoCurrency, - }) async { - try { - final box = await DB.instance.getSparkUsedCoinsTagsCacheBox( - currency: cryptoCurrency, - ); - - final _list = box.get("tags") as List?; - - final Set cachedTags = - _list == null ? {} : List.from(_list).toSet(); - - final startNumber = max( - 0, - cachedTags.length - 100, // 100 being some arbitrary buffer - ); - - final newTags = await electrumXClient.getSparkUsedCoinsTags( - startNumber: startNumber, - ); - - // ensure we are getting some overlap so we know we are not missing any - if (cachedTags.isNotEmpty && newTags.isNotEmpty) { - assert(cachedTags.intersection(newTags).isNotEmpty); - } - - // Make newTags an Iterable. - final Iterable iterableTags = newTags.map((e) => e.toString()); - - cachedTags.addAll(iterableTags); - - await box.put( - "tags", - cachedTags.toList(), - ); - - return cachedTags; - } catch (e, s) { - Logging.instance.log( - "Failed to process CachedElectrumX.getSparkUsedCoinsTags(): $e\n$s", - level: LogLevel.Error, - ); - rethrow; - } - } - /// Clear all cached transactions for the specified coin Future clearSharedTransactionCache({ required CryptoCurrency cryptoCurrency, diff --git a/lib/electrumx_rpc/electrumx_client.dart b/lib/electrumx_rpc/electrumx_client.dart index 6d473a71f..1e6018a95 100644 --- a/lib/electrumx_rpc/electrumx_client.dart +++ b/lib/electrumx_rpc/electrumx_client.dart @@ -17,8 +17,6 @@ import 'package:electrum_adapter/electrum_adapter.dart' as electrum_adapter; import 'package:electrum_adapter/electrum_adapter.dart'; import 'package:electrum_adapter/methods/specific/firo.dart'; import 'package:event_bus/event_bus.dart'; -import 'package:flutter/foundation.dart'; -import 'package:flutter_libsparkmobile/flutter_libsparkmobile.dart'; import 'package:mutex/mutex.dart'; import 'package:stream_channel/stream_channel.dart'; @@ -922,7 +920,7 @@ class ElectrumXClient { Logging.instance.log( "Finished ElectrumXClient.getSparkAnonymitySet(coinGroupId" "=$coinGroupId, startBlockHash=$startBlockHash). " - "" + "coins.length: ${(response["coins"] as List?)?.length}" "Duration=${DateTime.now().difference(start)}", level: LogLevel.Info, ); @@ -934,16 +932,12 @@ class ElectrumXClient { /// Takes [startNumber], if it is 0, we get the full set, /// otherwise the used tags after that number - Future> getSparkUsedCoinsTags({ + Future> getSparkUnhashedUsedCoinsTags({ String? requestID, required int startNumber, }) async { try { - // Use electrum_adapter package's getSparkUsedCoinsTags method. - Logging.instance.log( - "attempting to fetch spark.getusedcoinstags...", - level: LogLevel.Info, - ); + final start = DateTime.now(); await _checkElectrumAdapter(); final Map response = await (getElectrumAdapter() as FiroElectrumClient) @@ -955,8 +949,16 @@ class ElectrumXClient { level: LogLevel.Info, ); final map = Map.from(response); - final set = Set.from(map["tags"] as List); - return await compute(_ffiHashTagsComputeWrapper, set); + final tags = List.from(map["tags"] as List); + + Logging.instance.log( + "Finished ElectrumXClient.getSparkUnhashedUsedCoinsTags(startNumber" + "=$startNumber). " + "Duration=${DateTime.now().difference(start)}", + level: LogLevel.Info, + ); + + return tags; } catch (e) { Logging.instance.log(e, level: LogLevel.Error); rethrow; @@ -1093,7 +1095,3 @@ class ElectrumXClient { } } } - -Set _ffiHashTagsComputeWrapper(Set base64Tags) { - return LibSpark.hashTags(base64Tags: base64Tags); -} diff --git a/lib/utilities/extensions/extensions.dart b/lib/utilities/extensions/extensions.dart index a798c002a..678e12844 100644 --- a/lib/utilities/extensions/extensions.dart +++ b/lib/utilities/extensions/extensions.dart @@ -13,5 +13,6 @@ export 'impl/box_shadow.dart'; export 'impl/cl_transaction.dart'; export 'impl/contract_abi.dart'; export 'impl/gradient.dart'; +export 'impl/list.dart'; export 'impl/string.dart'; export 'impl/uint8_list.dart'; diff --git a/lib/wallets/wallet/impl/firo_wallet.dart b/lib/wallets/wallet/impl/firo_wallet.dart index 918ec563b..03f175b99 100644 --- a/lib/wallets/wallet/impl/firo_wallet.dart +++ b/lib/wallets/wallet/impl/firo_wallet.dart @@ -588,7 +588,9 @@ class FiroWallet extends Bip39HDWallet @override Future recover({required bool isRescan}) async { + // reset last checked values groupIdTimestampUTCMap = {}; + final start = DateTime.now(); final root = await getRootHDNode(); @@ -633,8 +635,8 @@ class FiroWallet extends Bip39HDWallet ); } final sparkUsedCoinTagsFuture = - electrumXCachedClient.getSparkUsedCoinsTags( - cryptoCurrency: info.coin, + FiroCacheCoordinator.runFetchAndUpdateSparkUsedCoinTags( + electrumXClient, ); // receiving addresses @@ -754,9 +756,6 @@ class FiroWallet extends Bip39HDWallet final usedSerialsSet = (futureResults[0] as List).toSet(); final setDataMap = futureResults[1] as Map; - // spark - final sparkSpentCoinTags = futureResults[2] as Set; - if (Util.isDesktop) { await Future.wait([ recoverLelantusWallet( @@ -765,7 +764,6 @@ class FiroWallet extends Bip39HDWallet setDataMap: setDataMap, ), recoverSparkWallet( - spentCoinTags: sparkSpentCoinTags, latestSparkCoinId: latestSparkCoinId, ), ]); @@ -776,7 +774,6 @@ class FiroWallet extends Bip39HDWallet setDataMap: setDataMap, ); await recoverSparkWallet( - spentCoinTags: sparkSpentCoinTags, latestSparkCoinId: latestSparkCoinId, ); } diff --git a/lib/wallets/wallet/wallet_mixin_interfaces/spark_interface.dart b/lib/wallets/wallet/wallet_mixin_interfaces/spark_interface.dart index 8b94e941d..5b2c2c601 100644 --- a/lib/wallets/wallet/wallet_mixin_interfaces/spark_interface.dart +++ b/lib/wallets/wallet/wallet_mixin_interfaces/spark_interface.dart @@ -631,12 +631,41 @@ mixin SparkInterface Future refreshSparkData() async { try { - final spentCoinTags = await electrumXCachedClient.getSparkUsedCoinsTags( - cryptoCurrency: info.coin, + // start by checking if any previous sets are missing from db and add the + // missing groupIds to the list if sets to check and update + final latestGroupId = await electrumXClient.getSparkLatestCoinId(); + final List groupIds = []; + if (latestGroupId > 1) { + for (int id = 1; id < latestGroupId; id++) { + final setExists = + await FiroCacheCoordinator.checkSetInfoForGroupIdExists( + id, + ); + if (!setExists) { + groupIds.add(id); + } + } + } + groupIds.add(latestGroupId); + + // start fetch and update process for each set groupId as required + final possibleFutures = groupIds.map( + (e) => + FiroCacheCoordinator.runFetchAndUpdateSparkAnonSetCacheForGroupId( + e, + electrumXClient, + ), ); - await _checkAndUpdateCoins(spentCoinTags, true); + // wait for each fetch and update to complete + await Future.wait([ + ...possibleFutures, + FiroCacheCoordinator.runFetchAndUpdateSparkUsedCoinTags( + electrumXClient, + ), + ]); + await _checkAndUpdateCoins(); // refresh spark balance await refreshSparkBalance(); } catch (e, s) { @@ -697,7 +726,6 @@ mixin SparkInterface /// Should only be called within the standard wallet [recover] function due to /// mutex locking. Otherwise behaviour MAY be undefined. Future recoverSparkWallet({ - required Set spentCoinTags, required int latestSparkCoinId, }) async { // generate spark addresses if non existing @@ -707,7 +735,7 @@ mixin SparkInterface } try { - await _checkAndUpdateCoins(spentCoinTags, false); + await _checkAndUpdateCoins(); // refresh spark balance await refreshSparkBalance(); @@ -720,10 +748,7 @@ mixin SparkInterface } } - Future _checkAndUpdateCoins( - Set spentCoinTags, - bool checkUseds, - ) async { + Future _checkAndUpdateCoins() async { final sparkAddresses = await mainDB.isar.addresses .where() .walletIdEqualTo(walletId) @@ -737,15 +762,7 @@ mixin SparkInterface ) .toSet(); - List? currentCoins; - if (checkUseds) { - currentCoins = await mainDB.isar.sparkCoins - .where() - .walletIdEqualToAnyLTagHash(walletId) - .filter() - .isUsedEqualTo(false) - .findAll(); - } + final Map>> rawCoinsBySetId = {}; final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId(); for (int i = 1; i <= latestSparkCoinId; i++) { @@ -769,34 +786,62 @@ mixin SparkInterface .toList(); if (coinsRaw.isNotEmpty) { - final myCoins = await compute( - _identifyCoins, - ( - anonymitySetCoins: coinsRaw, - groupId: i, - spentCoinTags: spentCoinTags, - privateKeyHexSet: privateKeyHexSet, - walletId: walletId, - isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test, - ), - ); - - if (checkUseds && currentCoins != null) { - for (final coin in currentCoins) { - if (spentCoinTags.contains(coin.lTagHash)) { - myCoins.add(coin.copyWith(isUsed: true)); - } - } - } - - // update wallet spark coins in isar - await _addOrUpdateSparkCoins(myCoins); + rawCoinsBySetId[i] = coinsRaw; } + groupIdTimestampUTCMap[i] = max( lastCheckedTimeStampUTC, info?.timestampUTC ?? lastCheckedTimeStampUTC, ); } + + final List newlyIdCoins = []; + for (final groupId in rawCoinsBySetId.keys) { + final myCoins = await compute( + _identifyCoins, + ( + anonymitySetCoins: rawCoinsBySetId[groupId]!, + groupId: groupId, + privateKeyHexSet: privateKeyHexSet, + walletId: walletId, + isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test, + ), + ); + newlyIdCoins.addAll(myCoins); + } + + await _checkAndMarkCoinsUsedInDB(coinsNotInDbYet: newlyIdCoins); + } + + Future _checkAndMarkCoinsUsedInDB({ + List coinsNotInDbYet = const [], + }) async { + final List coins = await mainDB.isar.sparkCoins + .where() + .walletIdEqualToAnyLTagHash(walletId) + .filter() + .isUsedEqualTo(false) + .findAll(); + + final List coinsToWrite = []; + + final spentCoinTags = await FiroCacheCoordinator.getUsedCoinTags(0); + + for (final coin in coins) { + if (spentCoinTags.contains(coin.lTagHash)) { + coinsToWrite.add(coin.copyWith(isUsed: true)); + } + } + for (final coin in coinsNotInDbYet) { + if (spentCoinTags.contains(coin.lTagHash)) { + coinsToWrite.add(coin.copyWith(isUsed: true)); + } else { + coinsToWrite.add(coin); + } + } + + // update wallet spark coins in isar + await _addOrUpdateSparkCoins(coinsToWrite); } // modelled on CSparkWallet::CreateSparkMintTransactions https://github.com/firoorg/firo/blob/39c41e5e7ec634ced3700fe3f4f5509dc2e480d0/src/spark/sparkwallet.cpp#L752 @@ -1713,7 +1758,6 @@ Future> _identifyCoins( ({ List anonymitySetCoins, int groupId, - Set spentCoinTags, Set privateKeyHexSet, String walletId, bool isTestNet, @@ -1756,7 +1800,7 @@ Future> _identifyCoins( SparkCoin( walletId: args.walletId, type: coinType, - isUsed: args.spentCoinTags.contains(coin.lTagHash!), + isUsed: false, groupId: args.groupId, nonce: coin.nonceHex?.toUint8ListFromHex, address: coin.address!,