diff --git a/lib/db/db_version_migration.dart b/lib/db/db_version_migration.dart index fe563f5ea..8fc4c6963 100644 --- a/lib/db/db_version_migration.dart +++ b/lib/db/db_version_migration.dart @@ -422,6 +422,20 @@ class DbVersionMigrator with WalletDB { // try to continue migrating return await migrate(12, secureStore: secureStore); + case 12: + // migrate + await _v12(secureStore); + + // update version + await DB.instance.put( + boxName: DB.boxNameDBInfo, + key: "hive_data_version", + value: 13, + ); + + // try to continue migrating + return await migrate(13, secureStore: secureStore); + default: // finally return return; @@ -701,4 +715,13 @@ class DbVersionMigrator with WalletDB { Future _v11(SecureStorageInterface secureStore) async { await migrateWalletsToIsar(secureStore: secureStore); } + + Future _v12(SecureStorageInterface secureStore) async { + await DB.instance.deleteBoxFromDisk( + boxName: "firo_anonymitySetSparkCache", + ); + await DB.instance.deleteBoxFromDisk( + boxName: "firoTestNet_anonymitySetSparkCache", + ); + } } diff --git a/lib/db/hive/db.dart b/lib/db/hive/db.dart index 3f1c86cb7..7fe515c4a 100644 --- a/lib/db/hive/db.dart +++ b/lib/db/hive/db.dart @@ -13,6 +13,7 @@ import 'dart:isolate'; import 'package:cw_core/wallet_info.dart' as xmr; import 'package:hive/hive.dart'; import 'package:mutex/mutex.dart'; + import '../../app_config.dart'; import '../../models/exchange/response_objects/trade.dart'; import '../../models/node_model.dart'; @@ -55,8 +56,6 @@ class DB { // firo only String _boxNameSetCache({required CryptoCurrency currency}) => "${currency.identifier}_anonymitySetCache"; - String _boxNameSetSparkCache({required CryptoCurrency currency}) => - "${currency.identifier}_anonymitySetSparkCache"; String _boxNameUsedSerialsCache({required CryptoCurrency currency}) => "${currency.identifier}_usedSerialsCache"; String _boxNameSparkUsedCoinsTagsCache({required CryptoCurrency currency}) => @@ -81,7 +80,6 @@ class DB { final Map> _txCacheBoxes = {}; final Map> _setCacheBoxes = {}; - final Map> _setSparkCacheBoxes = {}; final Map> _usedSerialsCacheBoxes = {}; final Map> _getSparkUsedCoinsTagsCacheBoxes = {}; @@ -213,16 +211,6 @@ class DB { await Hive.openBox(_boxNameSetCache(currency: currency)); } - Future> getSparkAnonymitySetCacheBox({ - required CryptoCurrency currency, - }) async { - if (_setSparkCacheBoxes[currency.identifier]?.isOpen != true) { - _setSparkCacheBoxes.remove(currency.identifier); - } - return _setSparkCacheBoxes[currency.identifier] ??= - await Hive.openBox(_boxNameSetSparkCache(currency: currency)); - } - Future closeAnonymitySetCacheBox({ required CryptoCurrency currency, }) async { @@ -266,9 +254,6 @@ class DB { await deleteAll(boxName: _boxNameTxCache(currency: currency)); if (currency is Firo) { await deleteAll(boxName: _boxNameSetCache(currency: currency)); - await deleteAll( - boxName: _boxNameSetSparkCache(currency: currency), - ); await deleteAll( boxName: _boxNameUsedSerialsCache(currency: currency), ); diff --git a/lib/db/sqlite/firo_cache.dart b/lib/db/sqlite/firo_cache.dart new file mode 100644 index 000000000..c8ac015e8 --- /dev/null +++ b/lib/db/sqlite/firo_cache.dart @@ -0,0 +1,330 @@ +import 'dart:async'; +import 'dart:io'; + +import 'package:flutter/foundation.dart'; +import 'package:sqlite3/sqlite3.dart'; + +import '../../electrumx_rpc/electrumx_client.dart'; +import '../../utilities/logger.dart'; +import '../../utilities/stack_file_system.dart'; + +/// Temporary debugging log function for this file +void _debugLog(Object? object) { + if (kDebugMode) { + Logging.instance.log( + object, + level: LogLevel.Fatal, + ); + } +} + +/// Wrapper class for [FiroCache] as [FiroCache] should eventually be handled in a +/// background isolate and [FiroCacheCoordinator] should manage that isolate +abstract class FiroCacheCoordinator { + static Future init() => _FiroCache.init(); + + static Future runFetchAndUpdateSparkAnonSetCacheForGroupId( + int groupId, + ElectrumXClient client, + ) async { + final blockhashResult = + await FiroCacheCoordinator.getLatestSetInfoForGroupId( + groupId, + ); + final blockHash = blockhashResult?.blockHash ?? ""; + + final json = await client.getSparkAnonymitySet( + coinGroupId: groupId.toString(), + startBlockHash: blockHash, + ); + + await _FiroCache._updateWith(json, groupId); + } + + static Future getSetCoinsForGroupId( + int groupId, { + int? newerThanTimeStamp, + }) async { + return await _FiroCache._getSetCoinsForGroupId( + groupId, + newerThanTimeStamp: newerThanTimeStamp, + ); + } + + static Future< + ({ + String blockHash, + String setHash, + int timestampUTC, + })?> getLatestSetInfoForGroupId( + int groupId, + ) async { + final result = await _FiroCache._getLatestSetInfoForGroupId(groupId); + + if (result.isEmpty) { + return null; + } + + return ( + blockHash: result.first["blockHash"] as String, + setHash: result.first["setHash"] as String, + timestampUTC: result.first["timestampUTC"] as int, + ); + } +} + +abstract class _FiroCache { + static const String sqliteDbFileName = "firo_ex_cache.sqlite3"; + + static Database? _db; + static Database get db { + if (_db == null) { + throw Exception( + "FiroCache.init() must be called before accessing FiroCache.db!", + ); + } + return _db!; + } + + static Future? _initFuture; + static Future init() => _initFuture ??= _init(); + + static Future _init() async { + final sqliteDir = await StackFileSystem.applicationSQLiteDirectory(); + + final file = File("${sqliteDir.path}/$sqliteDbFileName"); + + final exists = await file.exists(); + if (!exists) { + await _createDb(file.path); + } + + _db = sqlite3.open( + file.path, + mode: OpenMode.readWrite, + ); + } + + static Future _createDb(String file) async { + final db = sqlite3.open( + file, + mode: OpenMode.readWriteCreate, + ); + + db.execute( + """ + CREATE TABLE SparkSet ( + id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE, + blockHash TEXT NOT NULL, + setHash TEXT NOT NULL, + groupId INTEGER NOT NULL, + UNIQUE (blockHash, setHash, groupId) + ); + + CREATE TABLE SparkCoin ( + id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE, + serialized TEXT NOT NULL, + txHash TEXT NOT NULL, + context TEXT NOT NULL, + UNIQUE(serialized, txHash, context) + ); + + CREATE TABLE SparkSetCoins ( + id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE, + timestampUTC INTEGER NOT NULL, + setId INTEGER NOT NULL, + coinId INTEGER NOT NULL, + FOREIGN KEY (setId) REFERENCES SparkSet(id), + FOREIGN KEY (coinId) REFERENCES SparkCoin(id) + ); + """, + ); + + db.dispose(); + } + + // =========================================================================== + // =============== Spark anonymity set queries =============================== + + static Future _getSetCoinsForGroupId( + int groupId, { + int? newerThanTimeStamp, + }) async { + String query = """ + SELECT sc.id, sc.serialized, sc.txHash, sc.context + FROM SparkSetCoins AS ssc + JOIN SparkSet AS ss ON ssc.setId = ss.id + JOIN SparkCoin AS sc ON ssc.coinId = sc.id + WHERE ss.groupId = $groupId + """; + + if (newerThanTimeStamp != null) { + query += " AND ssc.timestampUTC" + " > $newerThanTimeStamp"; + } + + return db.select("$query;"); + } + + static Future _getLatestSetInfoForGroupId( + int groupId, + ) async { + final query = """ + SELECT ss.blockHash, ss.setHash, ssc.timestampUTC + FROM SparkSet ss + JOIN SparkSetCoins ssc ON ss.id = ssc.setId + WHERE ss.groupId = $groupId + ORDER BY ssc.timestampUTC DESC + LIMIT 1; + """; + + return db.select("$query;"); + } + + // =========================================================================== + // =========================================================================== + + static int _upCount = 0; + + /// update the sqlite cache + /// Expected json format: + /// { + /// "blockHash": "someBlockHash", + /// "setHash": "someSetHash", + /// "coins": [ + /// ["serliazed1", "hash1", "context1"], + /// ["serliazed2", "hash2", "context2"], + /// ... + /// ["serliazed3", "hash3", "context3"], + /// ["serliazed4", "hash4", "context4"], + /// ], + /// } + /// + /// returns true if successful, otherwise false + static Future _updateWith( + Map json, + int groupId, + ) async { + final start = DateTime.now(); + _upCount++; + final blockHash = json["blockHash"] as String; + final setHash = json["setHash"] as String; + + _debugLog( + "$_upCount _updateWith() called where groupId=$groupId," + " blockHash=$blockHash, setHash=$setHash", + ); + + final checkResult = db.select( + """ + SELECT * + FROM SparkSet + WHERE blockHash = ? AND setHash = ? AND groupId = ?; + """, + [ + blockHash, + setHash, + groupId, + ], + ); + + _debugLog("$_upCount _updateWith() called where checkResult=$checkResult"); + + if (checkResult.isNotEmpty) { + _debugLog( + "$_upCount _updateWith() duration = ${DateTime.now().difference(start)}", + ); + // already up to date + return true; + } + + if ((json["coins"] as List).isEmpty) { + _debugLog("$_upCount _updateWith() called where json[coins] is Empty"); + _debugLog( + "$_upCount _updateWith() duration = ${DateTime.now().difference(start)}", + ); + // no coins to actually insert + return true; + } + + final coins = (json["coins"] as List) + .map( + (e) => [ + e[0] as String, + e[1] as String, + e[2] as String, + ], + ) + .toList(); + + final timestamp = DateTime.now().toUtc().millisecondsSinceEpoch ~/ 1000; + + db.execute("BEGIN;"); + try { + db.execute( + """ + INSERT INTO SparkSet (blockHash, setHash, groupId) + VALUES (?, ?, ?); + """, + [blockHash, setHash, groupId], + ); + final setId = db.lastInsertRowId; + + for (final coin in coins) { + int coinId; + try { + db.execute( + """ + INSERT INTO SparkCoin (serialized, txHash, context) + VALUES (?, ?, ?); + """, + coin, + ); + coinId = db.lastInsertRowId; + } on SqliteException catch (e) { + if (e.extendedResultCode == 2067) { + final result = db.select( + """ + SELECT id + FROM SparkCoin + WHERE serialized = ? AND txHash = ? AND context = ?; + """, + coin, + ); + coinId = result.first["id"] as int; + } else { + rethrow; + } + } + + db.execute( + """ + INSERT INTO SparkSetCoins (timestampUTC, setId, coinId) + VALUES (?, ?, ?); + """, + [timestamp, setId, coinId], + ); + } + + db.execute("COMMIT;"); + _debugLog("$_upCount _updateWith() COMMITTED"); + _debugLog( + "$_upCount _updateWith() duration = ${DateTime.now().difference(start)}", + ); + return true; + } catch (e, s) { + db.execute("ROLLBACK;"); + _debugLog("$_upCount _updateWith() ROLLBACK"); + _debugLog( + "$_upCount _updateWith() duration = ${DateTime.now().difference(start)}", + ); + // NOTE THIS LOGGER MUST BE CALLED ON MAIN ISOLATE FOR NOW + Logging.instance.log( + "$e\n$s", + level: LogLevel.Error, + ); + } + + return false; + } +} diff --git a/lib/electrumx_rpc/cached_electrumx_client.dart b/lib/electrumx_rpc/cached_electrumx_client.dart index fb4ff4a87..c2dbbb6b8 100644 --- a/lib/electrumx_rpc/cached_electrumx_client.dart +++ b/lib/electrumx_rpc/cached_electrumx_client.dart @@ -116,70 +116,6 @@ class CachedElectrumXClient { } } - Future> getSparkAnonymitySet({ - required String groupId, - String blockhash = "", - required CryptoCurrency cryptoCurrency, - required bool useOnlyCacheIfNotEmpty, - }) async { - try { - final box = await DB.instance.getSparkAnonymitySetCacheBox( - currency: cryptoCurrency, - ); - final cachedSet = box.get(groupId) as Map?; - - Map set; - - // null check to see if there is a cached set - if (cachedSet == null) { - set = { - "coinGroupID": int.parse(groupId), - "blockHash": blockhash, - "setHash": "", - "coins": [], - }; - } else { - set = Map.from(cachedSet); - if (useOnlyCacheIfNotEmpty) { - return set; - } - } - - final newSet = await electrumXClient.getSparkAnonymitySet( - coinGroupId: groupId, - startBlockHash: set["blockHash"] as String, - ); - - // update set with new data - if (newSet["setHash"] != "" && set["setHash"] != newSet["setHash"]) { - set["setHash"] = newSet["setHash"]; - set["blockHash"] = newSet["blockHash"]; - for (int i = (newSet["coins"] as List).length - 1; i >= 0; i--) { - // TODO verify this is correct (or append?) - if ((set["coins"] as List) - .where((e) => e[0] == newSet["coins"][i][0]) - .isEmpty) { - set["coins"].insert(0, newSet["coins"][i]); - } - } - // save set to db - await box.put(groupId, set); - Logging.instance.log( - "Updated current anonymity set for ${cryptoCurrency.identifier} with group ID $groupId", - level: LogLevel.Info, - ); - } - - return set; - } catch (e, s) { - Logging.instance.log( - "Failed to process CachedElectrumX.getSparkAnonymitySet(): $e\n$s", - level: LogLevel.Error, - ); - rethrow; - } - } - String base64ToHex(String source) => base64Decode(LineSplitter.split(source).join()) .map((e) => e.toRadixString(16).padLeft(2, '0')) diff --git a/lib/electrumx_rpc/electrumx_client.dart b/lib/electrumx_rpc/electrumx_client.dart index afe44ec14..6d473a71f 100644 --- a/lib/electrumx_rpc/electrumx_client.dart +++ b/lib/electrumx_rpc/electrumx_client.dart @@ -20,7 +20,8 @@ import 'package:event_bus/event_bus.dart'; import 'package:flutter/foundation.dart'; import 'package:flutter_libsparkmobile/flutter_libsparkmobile.dart'; import 'package:mutex/mutex.dart'; -import 'client_manager.dart'; +import 'package:stream_channel/stream_channel.dart'; + import '../exceptions/electrumx/no_such_transaction.dart'; import '../services/event_bus/events/global/tor_connection_status_changed_event.dart'; import '../services/event_bus/events/global/tor_status_changed_event.dart'; @@ -29,7 +30,7 @@ import '../services/tor_service.dart'; import '../utilities/logger.dart'; import '../utilities/prefs.dart'; import '../wallets/crypto_currency/crypto_currency.dart'; -import 'package:stream_channel/stream_channel.dart'; +import 'client_manager.dart'; class WifiOnlyException implements Exception {} @@ -910,10 +911,7 @@ class ElectrumXClient { String? requestID, }) async { try { - Logging.instance.log( - "attempting to fetch spark.getsparkanonymityset...", - level: LogLevel.Info, - ); + final start = DateTime.now(); await _checkElectrumAdapter(); final Map response = await (getElectrumAdapter() as FiroElectrumClient) @@ -922,7 +920,10 @@ class ElectrumXClient { startBlockHash: startBlockHash, ); Logging.instance.log( - "Fetching spark.getsparkanonymityset finished", + "Finished ElectrumXClient.getSparkAnonymitySet(coinGroupId" + "=$coinGroupId, startBlockHash=$startBlockHash). " + "" + "Duration=${DateTime.now().difference(start)}", level: LogLevel.Info, ); return response; diff --git a/lib/main.dart b/lib/main.dart index 52370d13d..ae7d26466 100644 --- a/lib/main.dart +++ b/lib/main.dart @@ -35,6 +35,7 @@ import 'app_config.dart'; import 'db/db_version_migration.dart'; import 'db/hive/db.dart'; import 'db/isar/main_db.dart'; +import 'db/sqlite/firo_cache.dart'; import 'models/exchange/change_now/exchange_transaction.dart'; import 'models/exchange/change_now/exchange_transaction_status.dart'; import 'models/exchange/response_objects/trade.dart'; @@ -200,6 +201,7 @@ void main(List args) async { } await StackFileSystem.initThemesDir(); + await FiroCacheCoordinator.init(); // Desktop migrate handled elsewhere (currently desktop_login_view.dart) if (!Util.isDesktop) { diff --git a/lib/pages/settings_views/global_settings_view/hidden_settings.dart b/lib/pages/settings_views/global_settings_view/hidden_settings.dart index 973dab913..b389f31a3 100644 --- a/lib/pages/settings_views/global_settings_view/hidden_settings.dart +++ b/lib/pages/settings_views/global_settings_view/hidden_settings.dart @@ -13,6 +13,7 @@ import 'dart:async'; import 'package:flutter/material.dart'; import 'package:flutter_riverpod/flutter_riverpod.dart'; import 'package:flutter_svg/flutter_svg.dart'; + import '../../../notifications/show_flush_bar.dart'; import '../../../providers/global/debug_service_provider.dart'; import '../../../providers/providers.dart'; @@ -284,28 +285,33 @@ class HiddenSettings extends StatelessWidget { ); }, ), - const SizedBox( - height: 12, - ), - Consumer( - builder: (_, ref, __) { - return GestureDetector( - onTap: () async { - // - }, - child: RoundedWhiteContainer( - child: Text( - "Do nothing", - style: STextStyles.button(context).copyWith( - color: Theme.of(context) - .extension()! - .accentColorDark, - ), - ), - ), - ); - }, - ), + // const SizedBox( + // height: 12, + // ), + // Consumer( + // builder: (_, ref, __) { + // return GestureDetector( + // onTap: () async { + // await showLoading( + // whileFuture: FiroCache.init(), + // context: context, + // rootNavigator: true, + // message: "initializing firo cache", + // ); + // }, + // child: RoundedWhiteContainer( + // child: Text( + // "init firo_cache", + // style: STextStyles.button(context).copyWith( + // color: Theme.of(context) + // .extension()! + // .accentColorDark, + // ), + // ), + // ), + // ); + // }, + // ), ], ), ), diff --git a/lib/utilities/constants.dart b/lib/utilities/constants.dart index d68760139..6b64d4a73 100644 --- a/lib/utilities/constants.dart +++ b/lib/utilities/constants.dart @@ -40,7 +40,7 @@ abstract class Constants { // Enable Logger.print statements static const bool disableLogger = false; - static const int currentDataVersion = 12; + static const int currentDataVersion = 13; static const int rescanV1 = 1; diff --git a/lib/utilities/stack_file_system.dart b/lib/utilities/stack_file_system.dart index 3675813d9..281bd2a8f 100644 --- a/lib/utilities/stack_file_system.dart +++ b/lib/utilities/stack_file_system.dart @@ -91,6 +91,19 @@ abstract class StackFileSystem { } } + static Future applicationSQLiteDirectory() async { + final root = await applicationRootDirectory(); + if (Util.isDesktop) { + final dir = Directory("${root.path}/sqlite"); + if (!dir.existsSync()) { + await dir.create(); + } + return dir; + } else { + return root; + } + } + static Future applicationTorDirectory() async { final root = await applicationRootDirectory(); if (Util.isDesktop) { diff --git a/lib/wallets/wallet/impl/firo_wallet.dart b/lib/wallets/wallet/impl/firo_wallet.dart index dce8cd9fa..918ec563b 100644 --- a/lib/wallets/wallet/impl/firo_wallet.dart +++ b/lib/wallets/wallet/impl/firo_wallet.dart @@ -6,6 +6,7 @@ import 'package:decimal/decimal.dart'; import 'package:flutter_libsparkmobile/flutter_libsparkmobile.dart'; import 'package:isar/isar.dart'; +import '../../../db/sqlite/firo_cache.dart'; import '../../../models/isar/models/blockchain_data/v2/input_v2.dart'; import '../../../models/isar/models/blockchain_data/v2/output_v2.dart'; import '../../../models/isar/models/blockchain_data/v2/transaction_v2.dart'; @@ -587,6 +588,8 @@ class FiroWallet extends Bip39HDWallet @override Future recover({required bool isRescan}) async { + groupIdTimestampUTCMap = {}; + final start = DateTime.now(); final root = await getRootHDNode(); final List addresses})>> receiveFutures = @@ -620,11 +623,15 @@ class FiroWallet extends Bip39HDWallet // spark final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId(); - final sparkAnonSetFuture = electrumXCachedClient.getSparkAnonymitySet( - groupId: latestSparkCoinId.toString(), - cryptoCurrency: info.coin, - useOnlyCacheIfNotEmpty: false, - ); + final List> sparkAnonSetFutures = []; + for (int i = 1; i <= latestSparkCoinId; i++) { + sparkAnonSetFutures.add( + FiroCacheCoordinator.runFetchAndUpdateSparkAnonSetCacheForGroupId( + i, + electrumXClient, + ), + ); + } final sparkUsedCoinTagsFuture = electrumXCachedClient.getSparkUsedCoinsTags( cryptoCurrency: info.coin, @@ -739,8 +746,8 @@ class FiroWallet extends Bip39HDWallet final futureResults = await Future.wait([ usedSerialNumbersFuture, setDataMapFuture, - sparkAnonSetFuture, sparkUsedCoinTagsFuture, + ...sparkAnonSetFutures, ]); // lelantus @@ -748,8 +755,7 @@ class FiroWallet extends Bip39HDWallet final setDataMap = futureResults[1] as Map; // spark - final sparkAnonymitySet = futureResults[2] as Map; - final sparkSpentCoinTags = futureResults[3] as Set; + final sparkSpentCoinTags = futureResults[2] as Set; if (Util.isDesktop) { await Future.wait([ @@ -759,8 +765,8 @@ class FiroWallet extends Bip39HDWallet setDataMap: setDataMap, ), recoverSparkWallet( - anonymitySet: sparkAnonymitySet, spentCoinTags: sparkSpentCoinTags, + latestSparkCoinId: latestSparkCoinId, ), ]); } else { @@ -770,13 +776,18 @@ class FiroWallet extends Bip39HDWallet setDataMap: setDataMap, ); await recoverSparkWallet( - anonymitySet: sparkAnonymitySet, spentCoinTags: sparkSpentCoinTags, + latestSparkCoinId: latestSparkCoinId, ); } }); unawaited(refresh()); + Logging.instance.log( + "Firo recover for " + "${info.name}: ${DateTime.now().difference(start)}", + level: LogLevel.Info, + ); } catch (e, s) { Logging.instance.log( "Exception rethrown from electrumx_mixin recover(): $e\n$s", diff --git a/lib/wallets/wallet/wallet.dart b/lib/wallets/wallet/wallet.dart index 8fa0b9ddf..dd96aa1f4 100644 --- a/lib/wallets/wallet/wallet.dart +++ b/lib/wallets/wallet/wallet.dart @@ -473,6 +473,7 @@ abstract class Wallet { if (refreshMutex.isLocked) { return; } + final start = DateTime.now(); try { // this acquire should be almost instant due to above check. @@ -608,6 +609,12 @@ abstract class Wallet { ); } finally { refreshMutex.release(); + + Logging.instance.log( + "Refresh for " + "${info.name}: ${DateTime.now().difference(start)}", + level: LogLevel.Info, + ); } } diff --git a/lib/wallets/wallet/wallet_mixin_interfaces/spark_interface.dart b/lib/wallets/wallet/wallet_mixin_interfaces/spark_interface.dart index 591869cfe..8b94e941d 100644 --- a/lib/wallets/wallet/wallet_mixin_interfaces/spark_interface.dart +++ b/lib/wallets/wallet/wallet_mixin_interfaces/spark_interface.dart @@ -7,6 +7,7 @@ import 'package:flutter/foundation.dart'; import 'package:flutter_libsparkmobile/flutter_libsparkmobile.dart'; import 'package:isar/isar.dart'; +import '../../../db/sqlite/firo_cache.dart'; import '../../../models/balance.dart'; import '../../../models/isar/models/blockchain_data/v2/input_v2.dart'; import '../../../models/isar/models/blockchain_data/v2/output_v2.dart'; @@ -259,17 +260,39 @@ mixin SparkInterface final List> setMaps = []; final List<({int groupId, String blockHash})> idAndBlockHashes = []; for (int i = 1; i <= currentId; i++) { - final set = await electrumXCachedClient.getSparkAnonymitySet( - groupId: i.toString(), - cryptoCurrency: info.coin, - useOnlyCacheIfNotEmpty: true, + final resultSet = await FiroCacheCoordinator.getSetCoinsForGroupId(i); + if (resultSet.isEmpty) { + continue; + } + + final info = await FiroCacheCoordinator.getLatestSetInfoForGroupId( + i, ); - set["coinGroupID"] = i; - setMaps.add(set); + if (info == null) { + throw Exception("The `info` should never be null here"); + } + + final Map setData = { + "blockHash": info.blockHash, + "setHash": info.setHash, + "coinGroupID": i, + "coins": resultSet + .map( + (row) => [ + row["serialized"] as String, + row["txHash"] as String, + row["context"] as String, + ], + ) + .toList(), + }; + + setData["coinGroupID"] = i; + setMaps.add(setData); idAndBlockHashes.add( ( groupId: i, - blockHash: set["blockHash"] as String, + blockHash: setData["blockHash"] as String, ), ); } @@ -607,78 +630,12 @@ mixin SparkInterface } Future refreshSparkData() async { - final sparkAddresses = await mainDB.isar.addresses - .where() - .walletIdEqualTo(walletId) - .filter() - .typeEqualTo(AddressType.spark) - .findAll(); - - final Set paths = - sparkAddresses.map((e) => e.derivationPath!.value).toSet(); - try { - final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId(); - - final anonymitySetFuture = electrumXCachedClient.getSparkAnonymitySet( - groupId: latestSparkCoinId.toString(), - cryptoCurrency: info.coin, - useOnlyCacheIfNotEmpty: false, - ); - - final spentCoinTagsFuture = electrumXCachedClient.getSparkUsedCoinsTags( + final spentCoinTags = await electrumXCachedClient.getSparkUsedCoinsTags( cryptoCurrency: info.coin, ); - final futureResults = await Future.wait([ - anonymitySetFuture, - spentCoinTagsFuture, - ]); - - final anonymitySet = futureResults[0] as Map; - final spentCoinTags = futureResults[1] as Set; - - final List myCoins = []; - - if (anonymitySet["coins"] is List && - (anonymitySet["coins"] as List).isNotEmpty) { - final root = await getRootHDNode(); - final privateKeyHexSet = paths - .map( - (e) => root.derivePath(e).privateKey.data.toHex, - ) - .toSet(); - - final identifiedCoins = await compute( - _identifyCoins, - ( - anonymitySetCoins: anonymitySet["coins"] as List, - groupId: latestSparkCoinId, - spentCoinTags: spentCoinTags, - privateKeyHexSet: privateKeyHexSet, - walletId: walletId, - isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test, - ), - ); - - myCoins.addAll(identifiedCoins); - } - - // check current coins - final currentCoins = await mainDB.isar.sparkCoins - .where() - .walletIdEqualToAnyLTagHash(walletId) - .filter() - .isUsedEqualTo(false) - .findAll(); - for (final coin in currentCoins) { - if (spentCoinTags.contains(coin.lTagHash)) { - myCoins.add(coin.copyWith(isUsed: true)); - } - } - - // update wallet spark coins in isar - await _addOrUpdateSparkCoins(myCoins); + await _checkAndUpdateCoins(spentCoinTags, true); // refresh spark balance await refreshSparkBalance(); @@ -734,11 +691,14 @@ mixin SparkInterface ); } + // TODO: look into persistence for this? + Map groupIdTimestampUTCMap = {}; + /// Should only be called within the standard wallet [recover] function due to /// mutex locking. Otherwise behaviour MAY be undefined. Future recoverSparkWallet({ - required Map anonymitySet, required Set spentCoinTags, + required int latestSparkCoinId, }) async { // generate spark addresses if non existing if (await getCurrentReceivingSparkAddress() == null) { @@ -746,35 +706,8 @@ mixin SparkInterface await mainDB.putAddress(address); } - final sparkAddresses = await mainDB.isar.addresses - .where() - .walletIdEqualTo(walletId) - .filter() - .typeEqualTo(AddressType.spark) - .findAll(); - - final Set paths = - sparkAddresses.map((e) => e.derivationPath!.value).toSet(); - try { - final root = await getRootHDNode(); - final privateKeyHexSet = - paths.map((e) => root.derivePath(e).privateKey.data.toHex).toSet(); - - final myCoins = await compute( - _identifyCoins, - ( - anonymitySetCoins: anonymitySet["coins"] as List, - groupId: anonymitySet["coinGroupID"] as int, - spentCoinTags: spentCoinTags, - privateKeyHexSet: privateKeyHexSet, - walletId: walletId, - isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test, - ), - ); - - // update wallet spark coins in isar - await _addOrUpdateSparkCoins(myCoins); + await _checkAndUpdateCoins(spentCoinTags, false); // refresh spark balance await refreshSparkBalance(); @@ -787,6 +720,85 @@ mixin SparkInterface } } + Future _checkAndUpdateCoins( + Set spentCoinTags, + bool checkUseds, + ) async { + final sparkAddresses = await mainDB.isar.addresses + .where() + .walletIdEqualTo(walletId) + .filter() + .typeEqualTo(AddressType.spark) + .findAll(); + final root = await getRootHDNode(); + final Set privateKeyHexSet = sparkAddresses + .map( + (e) => root.derivePath(e.derivationPath!.value).privateKey.data.toHex, + ) + .toSet(); + + List? currentCoins; + if (checkUseds) { + currentCoins = await mainDB.isar.sparkCoins + .where() + .walletIdEqualToAnyLTagHash(walletId) + .filter() + .isUsedEqualTo(false) + .findAll(); + } + + final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId(); + for (int i = 1; i <= latestSparkCoinId; i++) { + final lastCheckedTimeStampUTC = groupIdTimestampUTCMap[i] ?? 0; + final info = await FiroCacheCoordinator.getLatestSetInfoForGroupId( + i, + ); + final anonymitySetResult = + await FiroCacheCoordinator.getSetCoinsForGroupId( + i, + newerThanTimeStamp: lastCheckedTimeStampUTC, + ); + final coinsRaw = anonymitySetResult + .map( + (row) => [ + row["serialized"] as String, + row["txHash"] as String, + row["context"] as String, + ], + ) + .toList(); + + if (coinsRaw.isNotEmpty) { + final myCoins = await compute( + _identifyCoins, + ( + anonymitySetCoins: coinsRaw, + groupId: i, + spentCoinTags: spentCoinTags, + privateKeyHexSet: privateKeyHexSet, + walletId: walletId, + isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test, + ), + ); + + if (checkUseds && currentCoins != null) { + for (final coin in currentCoins) { + if (spentCoinTags.contains(coin.lTagHash)) { + myCoins.add(coin.copyWith(isUsed: true)); + } + } + } + + // update wallet spark coins in isar + await _addOrUpdateSparkCoins(myCoins); + } + groupIdTimestampUTCMap[i] = max( + lastCheckedTimeStampUTC, + info?.timestampUTC ?? lastCheckedTimeStampUTC, + ); + } + } + // modelled on CSparkWallet::CreateSparkMintTransactions https://github.com/firoorg/firo/blob/39c41e5e7ec634ced3700fe3f4f5509dc2e480d0/src/spark/sparkwallet.cpp#L752 Future> _createSparkMintTransactions({ required List availableUtxos,