mirror of
https://github.com/cypherstack/stack_wallet.git
synced 2024-12-22 19:39:22 +00:00
Cache Spark anonymity sets in sqlite
This commit is contained in:
parent
fb429b12f8
commit
3eb5a0a39c
12 changed files with 550 additions and 224 deletions
|
@ -422,6 +422,20 @@ class DbVersionMigrator with WalletDB {
|
|||
// try to continue migrating
|
||||
return await migrate(12, secureStore: secureStore);
|
||||
|
||||
case 12:
|
||||
// migrate
|
||||
await _v12(secureStore);
|
||||
|
||||
// update version
|
||||
await DB.instance.put<dynamic>(
|
||||
boxName: DB.boxNameDBInfo,
|
||||
key: "hive_data_version",
|
||||
value: 13,
|
||||
);
|
||||
|
||||
// try to continue migrating
|
||||
return await migrate(13, secureStore: secureStore);
|
||||
|
||||
default:
|
||||
// finally return
|
||||
return;
|
||||
|
@ -701,4 +715,13 @@ class DbVersionMigrator with WalletDB {
|
|||
Future<void> _v11(SecureStorageInterface secureStore) async {
|
||||
await migrateWalletsToIsar(secureStore: secureStore);
|
||||
}
|
||||
|
||||
Future<void> _v12(SecureStorageInterface secureStore) async {
|
||||
await DB.instance.deleteBoxFromDisk(
|
||||
boxName: "firo_anonymitySetSparkCache",
|
||||
);
|
||||
await DB.instance.deleteBoxFromDisk(
|
||||
boxName: "firoTestNet_anonymitySetSparkCache",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@ import 'dart:isolate';
|
|||
import 'package:cw_core/wallet_info.dart' as xmr;
|
||||
import 'package:hive/hive.dart';
|
||||
import 'package:mutex/mutex.dart';
|
||||
|
||||
import '../../app_config.dart';
|
||||
import '../../models/exchange/response_objects/trade.dart';
|
||||
import '../../models/node_model.dart';
|
||||
|
@ -55,8 +56,6 @@ class DB {
|
|||
// firo only
|
||||
String _boxNameSetCache({required CryptoCurrency currency}) =>
|
||||
"${currency.identifier}_anonymitySetCache";
|
||||
String _boxNameSetSparkCache({required CryptoCurrency currency}) =>
|
||||
"${currency.identifier}_anonymitySetSparkCache";
|
||||
String _boxNameUsedSerialsCache({required CryptoCurrency currency}) =>
|
||||
"${currency.identifier}_usedSerialsCache";
|
||||
String _boxNameSparkUsedCoinsTagsCache({required CryptoCurrency currency}) =>
|
||||
|
@ -81,7 +80,6 @@ class DB {
|
|||
|
||||
final Map<String, Box<dynamic>> _txCacheBoxes = {};
|
||||
final Map<String, Box<dynamic>> _setCacheBoxes = {};
|
||||
final Map<String, Box<dynamic>> _setSparkCacheBoxes = {};
|
||||
final Map<String, Box<dynamic>> _usedSerialsCacheBoxes = {};
|
||||
final Map<String, Box<dynamic>> _getSparkUsedCoinsTagsCacheBoxes = {};
|
||||
|
||||
|
@ -213,16 +211,6 @@ class DB {
|
|||
await Hive.openBox<dynamic>(_boxNameSetCache(currency: currency));
|
||||
}
|
||||
|
||||
Future<Box<dynamic>> getSparkAnonymitySetCacheBox({
|
||||
required CryptoCurrency currency,
|
||||
}) async {
|
||||
if (_setSparkCacheBoxes[currency.identifier]?.isOpen != true) {
|
||||
_setSparkCacheBoxes.remove(currency.identifier);
|
||||
}
|
||||
return _setSparkCacheBoxes[currency.identifier] ??=
|
||||
await Hive.openBox<dynamic>(_boxNameSetSparkCache(currency: currency));
|
||||
}
|
||||
|
||||
Future<void> closeAnonymitySetCacheBox({
|
||||
required CryptoCurrency currency,
|
||||
}) async {
|
||||
|
@ -266,9 +254,6 @@ class DB {
|
|||
await deleteAll<dynamic>(boxName: _boxNameTxCache(currency: currency));
|
||||
if (currency is Firo) {
|
||||
await deleteAll<dynamic>(boxName: _boxNameSetCache(currency: currency));
|
||||
await deleteAll<dynamic>(
|
||||
boxName: _boxNameSetSparkCache(currency: currency),
|
||||
);
|
||||
await deleteAll<dynamic>(
|
||||
boxName: _boxNameUsedSerialsCache(currency: currency),
|
||||
);
|
||||
|
|
330
lib/db/sqlite/firo_cache.dart
Normal file
330
lib/db/sqlite/firo_cache.dart
Normal file
|
@ -0,0 +1,330 @@
|
|||
import 'dart:async';
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:sqlite3/sqlite3.dart';
|
||||
|
||||
import '../../electrumx_rpc/electrumx_client.dart';
|
||||
import '../../utilities/logger.dart';
|
||||
import '../../utilities/stack_file_system.dart';
|
||||
|
||||
/// Temporary debugging log function for this file
|
||||
void _debugLog(Object? object) {
|
||||
if (kDebugMode) {
|
||||
Logging.instance.log(
|
||||
object,
|
||||
level: LogLevel.Fatal,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Wrapper class for [FiroCache] as [FiroCache] should eventually be handled in a
|
||||
/// background isolate and [FiroCacheCoordinator] should manage that isolate
|
||||
abstract class FiroCacheCoordinator {
|
||||
static Future<void> init() => _FiroCache.init();
|
||||
|
||||
static Future<void> runFetchAndUpdateSparkAnonSetCacheForGroupId(
|
||||
int groupId,
|
||||
ElectrumXClient client,
|
||||
) async {
|
||||
final blockhashResult =
|
||||
await FiroCacheCoordinator.getLatestSetInfoForGroupId(
|
||||
groupId,
|
||||
);
|
||||
final blockHash = blockhashResult?.blockHash ?? "";
|
||||
|
||||
final json = await client.getSparkAnonymitySet(
|
||||
coinGroupId: groupId.toString(),
|
||||
startBlockHash: blockHash,
|
||||
);
|
||||
|
||||
await _FiroCache._updateWith(json, groupId);
|
||||
}
|
||||
|
||||
static Future<ResultSet> getSetCoinsForGroupId(
|
||||
int groupId, {
|
||||
int? newerThanTimeStamp,
|
||||
}) async {
|
||||
return await _FiroCache._getSetCoinsForGroupId(
|
||||
groupId,
|
||||
newerThanTimeStamp: newerThanTimeStamp,
|
||||
);
|
||||
}
|
||||
|
||||
static Future<
|
||||
({
|
||||
String blockHash,
|
||||
String setHash,
|
||||
int timestampUTC,
|
||||
})?> getLatestSetInfoForGroupId(
|
||||
int groupId,
|
||||
) async {
|
||||
final result = await _FiroCache._getLatestSetInfoForGroupId(groupId);
|
||||
|
||||
if (result.isEmpty) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
blockHash: result.first["blockHash"] as String,
|
||||
setHash: result.first["setHash"] as String,
|
||||
timestampUTC: result.first["timestampUTC"] as int,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
abstract class _FiroCache {
|
||||
static const String sqliteDbFileName = "firo_ex_cache.sqlite3";
|
||||
|
||||
static Database? _db;
|
||||
static Database get db {
|
||||
if (_db == null) {
|
||||
throw Exception(
|
||||
"FiroCache.init() must be called before accessing FiroCache.db!",
|
||||
);
|
||||
}
|
||||
return _db!;
|
||||
}
|
||||
|
||||
static Future<void>? _initFuture;
|
||||
static Future<void> init() => _initFuture ??= _init();
|
||||
|
||||
static Future<void> _init() async {
|
||||
final sqliteDir = await StackFileSystem.applicationSQLiteDirectory();
|
||||
|
||||
final file = File("${sqliteDir.path}/$sqliteDbFileName");
|
||||
|
||||
final exists = await file.exists();
|
||||
if (!exists) {
|
||||
await _createDb(file.path);
|
||||
}
|
||||
|
||||
_db = sqlite3.open(
|
||||
file.path,
|
||||
mode: OpenMode.readWrite,
|
||||
);
|
||||
}
|
||||
|
||||
static Future<void> _createDb(String file) async {
|
||||
final db = sqlite3.open(
|
||||
file,
|
||||
mode: OpenMode.readWriteCreate,
|
||||
);
|
||||
|
||||
db.execute(
|
||||
"""
|
||||
CREATE TABLE SparkSet (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
|
||||
blockHash TEXT NOT NULL,
|
||||
setHash TEXT NOT NULL,
|
||||
groupId INTEGER NOT NULL,
|
||||
UNIQUE (blockHash, setHash, groupId)
|
||||
);
|
||||
|
||||
CREATE TABLE SparkCoin (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
|
||||
serialized TEXT NOT NULL,
|
||||
txHash TEXT NOT NULL,
|
||||
context TEXT NOT NULL,
|
||||
UNIQUE(serialized, txHash, context)
|
||||
);
|
||||
|
||||
CREATE TABLE SparkSetCoins (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
|
||||
timestampUTC INTEGER NOT NULL,
|
||||
setId INTEGER NOT NULL,
|
||||
coinId INTEGER NOT NULL,
|
||||
FOREIGN KEY (setId) REFERENCES SparkSet(id),
|
||||
FOREIGN KEY (coinId) REFERENCES SparkCoin(id)
|
||||
);
|
||||
""",
|
||||
);
|
||||
|
||||
db.dispose();
|
||||
}
|
||||
|
||||
// ===========================================================================
|
||||
// =============== Spark anonymity set queries ===============================
|
||||
|
||||
static Future<ResultSet> _getSetCoinsForGroupId(
|
||||
int groupId, {
|
||||
int? newerThanTimeStamp,
|
||||
}) async {
|
||||
String query = """
|
||||
SELECT sc.id, sc.serialized, sc.txHash, sc.context
|
||||
FROM SparkSetCoins AS ssc
|
||||
JOIN SparkSet AS ss ON ssc.setId = ss.id
|
||||
JOIN SparkCoin AS sc ON ssc.coinId = sc.id
|
||||
WHERE ss.groupId = $groupId
|
||||
""";
|
||||
|
||||
if (newerThanTimeStamp != null) {
|
||||
query += " AND ssc.timestampUTC"
|
||||
" > $newerThanTimeStamp";
|
||||
}
|
||||
|
||||
return db.select("$query;");
|
||||
}
|
||||
|
||||
static Future<ResultSet> _getLatestSetInfoForGroupId(
|
||||
int groupId,
|
||||
) async {
|
||||
final query = """
|
||||
SELECT ss.blockHash, ss.setHash, ssc.timestampUTC
|
||||
FROM SparkSet ss
|
||||
JOIN SparkSetCoins ssc ON ss.id = ssc.setId
|
||||
WHERE ss.groupId = $groupId
|
||||
ORDER BY ssc.timestampUTC DESC
|
||||
LIMIT 1;
|
||||
""";
|
||||
|
||||
return db.select("$query;");
|
||||
}
|
||||
|
||||
// ===========================================================================
|
||||
// ===========================================================================
|
||||
|
||||
static int _upCount = 0;
|
||||
|
||||
/// update the sqlite cache
|
||||
/// Expected json format:
|
||||
/// {
|
||||
/// "blockHash": "someBlockHash",
|
||||
/// "setHash": "someSetHash",
|
||||
/// "coins": [
|
||||
/// ["serliazed1", "hash1", "context1"],
|
||||
/// ["serliazed2", "hash2", "context2"],
|
||||
/// ...
|
||||
/// ["serliazed3", "hash3", "context3"],
|
||||
/// ["serliazed4", "hash4", "context4"],
|
||||
/// ],
|
||||
/// }
|
||||
///
|
||||
/// returns true if successful, otherwise false
|
||||
static Future<bool> _updateWith(
|
||||
Map<String, dynamic> json,
|
||||
int groupId,
|
||||
) async {
|
||||
final start = DateTime.now();
|
||||
_upCount++;
|
||||
final blockHash = json["blockHash"] as String;
|
||||
final setHash = json["setHash"] as String;
|
||||
|
||||
_debugLog(
|
||||
"$_upCount _updateWith() called where groupId=$groupId,"
|
||||
" blockHash=$blockHash, setHash=$setHash",
|
||||
);
|
||||
|
||||
final checkResult = db.select(
|
||||
"""
|
||||
SELECT *
|
||||
FROM SparkSet
|
||||
WHERE blockHash = ? AND setHash = ? AND groupId = ?;
|
||||
""",
|
||||
[
|
||||
blockHash,
|
||||
setHash,
|
||||
groupId,
|
||||
],
|
||||
);
|
||||
|
||||
_debugLog("$_upCount _updateWith() called where checkResult=$checkResult");
|
||||
|
||||
if (checkResult.isNotEmpty) {
|
||||
_debugLog(
|
||||
"$_upCount _updateWith() duration = ${DateTime.now().difference(start)}",
|
||||
);
|
||||
// already up to date
|
||||
return true;
|
||||
}
|
||||
|
||||
if ((json["coins"] as List).isEmpty) {
|
||||
_debugLog("$_upCount _updateWith() called where json[coins] is Empty");
|
||||
_debugLog(
|
||||
"$_upCount _updateWith() duration = ${DateTime.now().difference(start)}",
|
||||
);
|
||||
// no coins to actually insert
|
||||
return true;
|
||||
}
|
||||
|
||||
final coins = (json["coins"] as List)
|
||||
.map(
|
||||
(e) => [
|
||||
e[0] as String,
|
||||
e[1] as String,
|
||||
e[2] as String,
|
||||
],
|
||||
)
|
||||
.toList();
|
||||
|
||||
final timestamp = DateTime.now().toUtc().millisecondsSinceEpoch ~/ 1000;
|
||||
|
||||
db.execute("BEGIN;");
|
||||
try {
|
||||
db.execute(
|
||||
"""
|
||||
INSERT INTO SparkSet (blockHash, setHash, groupId)
|
||||
VALUES (?, ?, ?);
|
||||
""",
|
||||
[blockHash, setHash, groupId],
|
||||
);
|
||||
final setId = db.lastInsertRowId;
|
||||
|
||||
for (final coin in coins) {
|
||||
int coinId;
|
||||
try {
|
||||
db.execute(
|
||||
"""
|
||||
INSERT INTO SparkCoin (serialized, txHash, context)
|
||||
VALUES (?, ?, ?);
|
||||
""",
|
||||
coin,
|
||||
);
|
||||
coinId = db.lastInsertRowId;
|
||||
} on SqliteException catch (e) {
|
||||
if (e.extendedResultCode == 2067) {
|
||||
final result = db.select(
|
||||
"""
|
||||
SELECT id
|
||||
FROM SparkCoin
|
||||
WHERE serialized = ? AND txHash = ? AND context = ?;
|
||||
""",
|
||||
coin,
|
||||
);
|
||||
coinId = result.first["id"] as int;
|
||||
} else {
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
db.execute(
|
||||
"""
|
||||
INSERT INTO SparkSetCoins (timestampUTC, setId, coinId)
|
||||
VALUES (?, ?, ?);
|
||||
""",
|
||||
[timestamp, setId, coinId],
|
||||
);
|
||||
}
|
||||
|
||||
db.execute("COMMIT;");
|
||||
_debugLog("$_upCount _updateWith() COMMITTED");
|
||||
_debugLog(
|
||||
"$_upCount _updateWith() duration = ${DateTime.now().difference(start)}",
|
||||
);
|
||||
return true;
|
||||
} catch (e, s) {
|
||||
db.execute("ROLLBACK;");
|
||||
_debugLog("$_upCount _updateWith() ROLLBACK");
|
||||
_debugLog(
|
||||
"$_upCount _updateWith() duration = ${DateTime.now().difference(start)}",
|
||||
);
|
||||
// NOTE THIS LOGGER MUST BE CALLED ON MAIN ISOLATE FOR NOW
|
||||
Logging.instance.log(
|
||||
"$e\n$s",
|
||||
level: LogLevel.Error,
|
||||
);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
|
@ -116,70 +116,6 @@ class CachedElectrumXClient {
|
|||
}
|
||||
}
|
||||
|
||||
Future<Map<String, dynamic>> getSparkAnonymitySet({
|
||||
required String groupId,
|
||||
String blockhash = "",
|
||||
required CryptoCurrency cryptoCurrency,
|
||||
required bool useOnlyCacheIfNotEmpty,
|
||||
}) async {
|
||||
try {
|
||||
final box = await DB.instance.getSparkAnonymitySetCacheBox(
|
||||
currency: cryptoCurrency,
|
||||
);
|
||||
final cachedSet = box.get(groupId) as Map?;
|
||||
|
||||
Map<String, dynamic> set;
|
||||
|
||||
// null check to see if there is a cached set
|
||||
if (cachedSet == null) {
|
||||
set = {
|
||||
"coinGroupID": int.parse(groupId),
|
||||
"blockHash": blockhash,
|
||||
"setHash": "",
|
||||
"coins": <dynamic>[],
|
||||
};
|
||||
} else {
|
||||
set = Map<String, dynamic>.from(cachedSet);
|
||||
if (useOnlyCacheIfNotEmpty) {
|
||||
return set;
|
||||
}
|
||||
}
|
||||
|
||||
final newSet = await electrumXClient.getSparkAnonymitySet(
|
||||
coinGroupId: groupId,
|
||||
startBlockHash: set["blockHash"] as String,
|
||||
);
|
||||
|
||||
// update set with new data
|
||||
if (newSet["setHash"] != "" && set["setHash"] != newSet["setHash"]) {
|
||||
set["setHash"] = newSet["setHash"];
|
||||
set["blockHash"] = newSet["blockHash"];
|
||||
for (int i = (newSet["coins"] as List).length - 1; i >= 0; i--) {
|
||||
// TODO verify this is correct (or append?)
|
||||
if ((set["coins"] as List)
|
||||
.where((e) => e[0] == newSet["coins"][i][0])
|
||||
.isEmpty) {
|
||||
set["coins"].insert(0, newSet["coins"][i]);
|
||||
}
|
||||
}
|
||||
// save set to db
|
||||
await box.put(groupId, set);
|
||||
Logging.instance.log(
|
||||
"Updated current anonymity set for ${cryptoCurrency.identifier} with group ID $groupId",
|
||||
level: LogLevel.Info,
|
||||
);
|
||||
}
|
||||
|
||||
return set;
|
||||
} catch (e, s) {
|
||||
Logging.instance.log(
|
||||
"Failed to process CachedElectrumX.getSparkAnonymitySet(): $e\n$s",
|
||||
level: LogLevel.Error,
|
||||
);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
String base64ToHex(String source) =>
|
||||
base64Decode(LineSplitter.split(source).join())
|
||||
.map((e) => e.toRadixString(16).padLeft(2, '0'))
|
||||
|
|
|
@ -20,7 +20,8 @@ import 'package:event_bus/event_bus.dart';
|
|||
import 'package:flutter/foundation.dart';
|
||||
import 'package:flutter_libsparkmobile/flutter_libsparkmobile.dart';
|
||||
import 'package:mutex/mutex.dart';
|
||||
import 'client_manager.dart';
|
||||
import 'package:stream_channel/stream_channel.dart';
|
||||
|
||||
import '../exceptions/electrumx/no_such_transaction.dart';
|
||||
import '../services/event_bus/events/global/tor_connection_status_changed_event.dart';
|
||||
import '../services/event_bus/events/global/tor_status_changed_event.dart';
|
||||
|
@ -29,7 +30,7 @@ import '../services/tor_service.dart';
|
|||
import '../utilities/logger.dart';
|
||||
import '../utilities/prefs.dart';
|
||||
import '../wallets/crypto_currency/crypto_currency.dart';
|
||||
import 'package:stream_channel/stream_channel.dart';
|
||||
import 'client_manager.dart';
|
||||
|
||||
class WifiOnlyException implements Exception {}
|
||||
|
||||
|
@ -910,10 +911,7 @@ class ElectrumXClient {
|
|||
String? requestID,
|
||||
}) async {
|
||||
try {
|
||||
Logging.instance.log(
|
||||
"attempting to fetch spark.getsparkanonymityset...",
|
||||
level: LogLevel.Info,
|
||||
);
|
||||
final start = DateTime.now();
|
||||
await _checkElectrumAdapter();
|
||||
final Map<String, dynamic> response =
|
||||
await (getElectrumAdapter() as FiroElectrumClient)
|
||||
|
@ -922,7 +920,10 @@ class ElectrumXClient {
|
|||
startBlockHash: startBlockHash,
|
||||
);
|
||||
Logging.instance.log(
|
||||
"Fetching spark.getsparkanonymityset finished",
|
||||
"Finished ElectrumXClient.getSparkAnonymitySet(coinGroupId"
|
||||
"=$coinGroupId, startBlockHash=$startBlockHash). "
|
||||
""
|
||||
"Duration=${DateTime.now().difference(start)}",
|
||||
level: LogLevel.Info,
|
||||
);
|
||||
return response;
|
||||
|
|
|
@ -35,6 +35,7 @@ import 'app_config.dart';
|
|||
import 'db/db_version_migration.dart';
|
||||
import 'db/hive/db.dart';
|
||||
import 'db/isar/main_db.dart';
|
||||
import 'db/sqlite/firo_cache.dart';
|
||||
import 'models/exchange/change_now/exchange_transaction.dart';
|
||||
import 'models/exchange/change_now/exchange_transaction_status.dart';
|
||||
import 'models/exchange/response_objects/trade.dart';
|
||||
|
@ -200,6 +201,7 @@ void main(List<String> args) async {
|
|||
}
|
||||
|
||||
await StackFileSystem.initThemesDir();
|
||||
await FiroCacheCoordinator.init();
|
||||
|
||||
// Desktop migrate handled elsewhere (currently desktop_login_view.dart)
|
||||
if (!Util.isDesktop) {
|
||||
|
|
|
@ -13,6 +13,7 @@ import 'dart:async';
|
|||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter_riverpod/flutter_riverpod.dart';
|
||||
import 'package:flutter_svg/flutter_svg.dart';
|
||||
|
||||
import '../../../notifications/show_flush_bar.dart';
|
||||
import '../../../providers/global/debug_service_provider.dart';
|
||||
import '../../../providers/providers.dart';
|
||||
|
@ -284,28 +285,33 @@ class HiddenSettings extends StatelessWidget {
|
|||
);
|
||||
},
|
||||
),
|
||||
const SizedBox(
|
||||
height: 12,
|
||||
),
|
||||
Consumer(
|
||||
builder: (_, ref, __) {
|
||||
return GestureDetector(
|
||||
onTap: () async {
|
||||
//
|
||||
},
|
||||
child: RoundedWhiteContainer(
|
||||
child: Text(
|
||||
"Do nothing",
|
||||
style: STextStyles.button(context).copyWith(
|
||||
color: Theme.of(context)
|
||||
.extension<StackColors>()!
|
||||
.accentColorDark,
|
||||
),
|
||||
),
|
||||
),
|
||||
);
|
||||
},
|
||||
),
|
||||
// const SizedBox(
|
||||
// height: 12,
|
||||
// ),
|
||||
// Consumer(
|
||||
// builder: (_, ref, __) {
|
||||
// return GestureDetector(
|
||||
// onTap: () async {
|
||||
// await showLoading(
|
||||
// whileFuture: FiroCache.init(),
|
||||
// context: context,
|
||||
// rootNavigator: true,
|
||||
// message: "initializing firo cache",
|
||||
// );
|
||||
// },
|
||||
// child: RoundedWhiteContainer(
|
||||
// child: Text(
|
||||
// "init firo_cache",
|
||||
// style: STextStyles.button(context).copyWith(
|
||||
// color: Theme.of(context)
|
||||
// .extension<StackColors>()!
|
||||
// .accentColorDark,
|
||||
// ),
|
||||
// ),
|
||||
// ),
|
||||
// );
|
||||
// },
|
||||
// ),
|
||||
],
|
||||
),
|
||||
),
|
||||
|
|
|
@ -40,7 +40,7 @@ abstract class Constants {
|
|||
// Enable Logger.print statements
|
||||
static const bool disableLogger = false;
|
||||
|
||||
static const int currentDataVersion = 12;
|
||||
static const int currentDataVersion = 13;
|
||||
|
||||
static const int rescanV1 = 1;
|
||||
|
||||
|
|
|
@ -91,6 +91,19 @@ abstract class StackFileSystem {
|
|||
}
|
||||
}
|
||||
|
||||
static Future<Directory> applicationSQLiteDirectory() async {
|
||||
final root = await applicationRootDirectory();
|
||||
if (Util.isDesktop) {
|
||||
final dir = Directory("${root.path}/sqlite");
|
||||
if (!dir.existsSync()) {
|
||||
await dir.create();
|
||||
}
|
||||
return dir;
|
||||
} else {
|
||||
return root;
|
||||
}
|
||||
}
|
||||
|
||||
static Future<Directory> applicationTorDirectory() async {
|
||||
final root = await applicationRootDirectory();
|
||||
if (Util.isDesktop) {
|
||||
|
|
|
@ -6,6 +6,7 @@ import 'package:decimal/decimal.dart';
|
|||
import 'package:flutter_libsparkmobile/flutter_libsparkmobile.dart';
|
||||
import 'package:isar/isar.dart';
|
||||
|
||||
import '../../../db/sqlite/firo_cache.dart';
|
||||
import '../../../models/isar/models/blockchain_data/v2/input_v2.dart';
|
||||
import '../../../models/isar/models/blockchain_data/v2/output_v2.dart';
|
||||
import '../../../models/isar/models/blockchain_data/v2/transaction_v2.dart';
|
||||
|
@ -587,6 +588,8 @@ class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
|
|||
|
||||
@override
|
||||
Future<void> recover({required bool isRescan}) async {
|
||||
groupIdTimestampUTCMap = {};
|
||||
final start = DateTime.now();
|
||||
final root = await getRootHDNode();
|
||||
|
||||
final List<Future<({int index, List<Address> addresses})>> receiveFutures =
|
||||
|
@ -620,11 +623,15 @@ class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
|
|||
|
||||
// spark
|
||||
final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId();
|
||||
final sparkAnonSetFuture = electrumXCachedClient.getSparkAnonymitySet(
|
||||
groupId: latestSparkCoinId.toString(),
|
||||
cryptoCurrency: info.coin,
|
||||
useOnlyCacheIfNotEmpty: false,
|
||||
);
|
||||
final List<Future<void>> sparkAnonSetFutures = [];
|
||||
for (int i = 1; i <= latestSparkCoinId; i++) {
|
||||
sparkAnonSetFutures.add(
|
||||
FiroCacheCoordinator.runFetchAndUpdateSparkAnonSetCacheForGroupId(
|
||||
i,
|
||||
electrumXClient,
|
||||
),
|
||||
);
|
||||
}
|
||||
final sparkUsedCoinTagsFuture =
|
||||
electrumXCachedClient.getSparkUsedCoinsTags(
|
||||
cryptoCurrency: info.coin,
|
||||
|
@ -739,8 +746,8 @@ class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
|
|||
final futureResults = await Future.wait([
|
||||
usedSerialNumbersFuture,
|
||||
setDataMapFuture,
|
||||
sparkAnonSetFuture,
|
||||
sparkUsedCoinTagsFuture,
|
||||
...sparkAnonSetFutures,
|
||||
]);
|
||||
|
||||
// lelantus
|
||||
|
@ -748,8 +755,7 @@ class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
|
|||
final setDataMap = futureResults[1] as Map<dynamic, dynamic>;
|
||||
|
||||
// spark
|
||||
final sparkAnonymitySet = futureResults[2] as Map<String, dynamic>;
|
||||
final sparkSpentCoinTags = futureResults[3] as Set<String>;
|
||||
final sparkSpentCoinTags = futureResults[2] as Set<String>;
|
||||
|
||||
if (Util.isDesktop) {
|
||||
await Future.wait([
|
||||
|
@ -759,8 +765,8 @@ class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
|
|||
setDataMap: setDataMap,
|
||||
),
|
||||
recoverSparkWallet(
|
||||
anonymitySet: sparkAnonymitySet,
|
||||
spentCoinTags: sparkSpentCoinTags,
|
||||
latestSparkCoinId: latestSparkCoinId,
|
||||
),
|
||||
]);
|
||||
} else {
|
||||
|
@ -770,13 +776,18 @@ class FiroWallet<T extends ElectrumXCurrencyInterface> extends Bip39HDWallet<T>
|
|||
setDataMap: setDataMap,
|
||||
);
|
||||
await recoverSparkWallet(
|
||||
anonymitySet: sparkAnonymitySet,
|
||||
spentCoinTags: sparkSpentCoinTags,
|
||||
latestSparkCoinId: latestSparkCoinId,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
unawaited(refresh());
|
||||
Logging.instance.log(
|
||||
"Firo recover for "
|
||||
"${info.name}: ${DateTime.now().difference(start)}",
|
||||
level: LogLevel.Info,
|
||||
);
|
||||
} catch (e, s) {
|
||||
Logging.instance.log(
|
||||
"Exception rethrown from electrumx_mixin recover(): $e\n$s",
|
||||
|
|
|
@ -473,6 +473,7 @@ abstract class Wallet<T extends CryptoCurrency> {
|
|||
if (refreshMutex.isLocked) {
|
||||
return;
|
||||
}
|
||||
final start = DateTime.now();
|
||||
|
||||
try {
|
||||
// this acquire should be almost instant due to above check.
|
||||
|
@ -608,6 +609,12 @@ abstract class Wallet<T extends CryptoCurrency> {
|
|||
);
|
||||
} finally {
|
||||
refreshMutex.release();
|
||||
|
||||
Logging.instance.log(
|
||||
"Refresh for "
|
||||
"${info.name}: ${DateTime.now().difference(start)}",
|
||||
level: LogLevel.Info,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ import 'package:flutter/foundation.dart';
|
|||
import 'package:flutter_libsparkmobile/flutter_libsparkmobile.dart';
|
||||
import 'package:isar/isar.dart';
|
||||
|
||||
import '../../../db/sqlite/firo_cache.dart';
|
||||
import '../../../models/balance.dart';
|
||||
import '../../../models/isar/models/blockchain_data/v2/input_v2.dart';
|
||||
import '../../../models/isar/models/blockchain_data/v2/output_v2.dart';
|
||||
|
@ -259,17 +260,39 @@ mixin SparkInterface<T extends ElectrumXCurrencyInterface>
|
|||
final List<Map<String, dynamic>> setMaps = [];
|
||||
final List<({int groupId, String blockHash})> idAndBlockHashes = [];
|
||||
for (int i = 1; i <= currentId; i++) {
|
||||
final set = await electrumXCachedClient.getSparkAnonymitySet(
|
||||
groupId: i.toString(),
|
||||
cryptoCurrency: info.coin,
|
||||
useOnlyCacheIfNotEmpty: true,
|
||||
final resultSet = await FiroCacheCoordinator.getSetCoinsForGroupId(i);
|
||||
if (resultSet.isEmpty) {
|
||||
continue;
|
||||
}
|
||||
|
||||
final info = await FiroCacheCoordinator.getLatestSetInfoForGroupId(
|
||||
i,
|
||||
);
|
||||
set["coinGroupID"] = i;
|
||||
setMaps.add(set);
|
||||
if (info == null) {
|
||||
throw Exception("The `info` should never be null here");
|
||||
}
|
||||
|
||||
final Map<String, dynamic> setData = {
|
||||
"blockHash": info.blockHash,
|
||||
"setHash": info.setHash,
|
||||
"coinGroupID": i,
|
||||
"coins": resultSet
|
||||
.map(
|
||||
(row) => [
|
||||
row["serialized"] as String,
|
||||
row["txHash"] as String,
|
||||
row["context"] as String,
|
||||
],
|
||||
)
|
||||
.toList(),
|
||||
};
|
||||
|
||||
setData["coinGroupID"] = i;
|
||||
setMaps.add(setData);
|
||||
idAndBlockHashes.add(
|
||||
(
|
||||
groupId: i,
|
||||
blockHash: set["blockHash"] as String,
|
||||
blockHash: setData["blockHash"] as String,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
@ -607,78 +630,12 @@ mixin SparkInterface<T extends ElectrumXCurrencyInterface>
|
|||
}
|
||||
|
||||
Future<void> refreshSparkData() async {
|
||||
final sparkAddresses = await mainDB.isar.addresses
|
||||
.where()
|
||||
.walletIdEqualTo(walletId)
|
||||
.filter()
|
||||
.typeEqualTo(AddressType.spark)
|
||||
.findAll();
|
||||
|
||||
final Set<String> paths =
|
||||
sparkAddresses.map((e) => e.derivationPath!.value).toSet();
|
||||
|
||||
try {
|
||||
final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId();
|
||||
|
||||
final anonymitySetFuture = electrumXCachedClient.getSparkAnonymitySet(
|
||||
groupId: latestSparkCoinId.toString(),
|
||||
cryptoCurrency: info.coin,
|
||||
useOnlyCacheIfNotEmpty: false,
|
||||
);
|
||||
|
||||
final spentCoinTagsFuture = electrumXCachedClient.getSparkUsedCoinsTags(
|
||||
final spentCoinTags = await electrumXCachedClient.getSparkUsedCoinsTags(
|
||||
cryptoCurrency: info.coin,
|
||||
);
|
||||
|
||||
final futureResults = await Future.wait([
|
||||
anonymitySetFuture,
|
||||
spentCoinTagsFuture,
|
||||
]);
|
||||
|
||||
final anonymitySet = futureResults[0] as Map<String, dynamic>;
|
||||
final spentCoinTags = futureResults[1] as Set<String>;
|
||||
|
||||
final List<SparkCoin> myCoins = [];
|
||||
|
||||
if (anonymitySet["coins"] is List &&
|
||||
(anonymitySet["coins"] as List).isNotEmpty) {
|
||||
final root = await getRootHDNode();
|
||||
final privateKeyHexSet = paths
|
||||
.map(
|
||||
(e) => root.derivePath(e).privateKey.data.toHex,
|
||||
)
|
||||
.toSet();
|
||||
|
||||
final identifiedCoins = await compute(
|
||||
_identifyCoins,
|
||||
(
|
||||
anonymitySetCoins: anonymitySet["coins"] as List,
|
||||
groupId: latestSparkCoinId,
|
||||
spentCoinTags: spentCoinTags,
|
||||
privateKeyHexSet: privateKeyHexSet,
|
||||
walletId: walletId,
|
||||
isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test,
|
||||
),
|
||||
);
|
||||
|
||||
myCoins.addAll(identifiedCoins);
|
||||
}
|
||||
|
||||
// check current coins
|
||||
final currentCoins = await mainDB.isar.sparkCoins
|
||||
.where()
|
||||
.walletIdEqualToAnyLTagHash(walletId)
|
||||
.filter()
|
||||
.isUsedEqualTo(false)
|
||||
.findAll();
|
||||
for (final coin in currentCoins) {
|
||||
if (spentCoinTags.contains(coin.lTagHash)) {
|
||||
myCoins.add(coin.copyWith(isUsed: true));
|
||||
}
|
||||
}
|
||||
|
||||
// update wallet spark coins in isar
|
||||
await _addOrUpdateSparkCoins(myCoins);
|
||||
await _checkAndUpdateCoins(spentCoinTags, true);
|
||||
|
||||
// refresh spark balance
|
||||
await refreshSparkBalance();
|
||||
|
@ -734,11 +691,14 @@ mixin SparkInterface<T extends ElectrumXCurrencyInterface>
|
|||
);
|
||||
}
|
||||
|
||||
// TODO: look into persistence for this?
|
||||
Map<int, int> groupIdTimestampUTCMap = {};
|
||||
|
||||
/// Should only be called within the standard wallet [recover] function due to
|
||||
/// mutex locking. Otherwise behaviour MAY be undefined.
|
||||
Future<void> recoverSparkWallet({
|
||||
required Map<dynamic, dynamic> anonymitySet,
|
||||
required Set<String> spentCoinTags,
|
||||
required int latestSparkCoinId,
|
||||
}) async {
|
||||
// generate spark addresses if non existing
|
||||
if (await getCurrentReceivingSparkAddress() == null) {
|
||||
|
@ -746,35 +706,8 @@ mixin SparkInterface<T extends ElectrumXCurrencyInterface>
|
|||
await mainDB.putAddress(address);
|
||||
}
|
||||
|
||||
final sparkAddresses = await mainDB.isar.addresses
|
||||
.where()
|
||||
.walletIdEqualTo(walletId)
|
||||
.filter()
|
||||
.typeEqualTo(AddressType.spark)
|
||||
.findAll();
|
||||
|
||||
final Set<String> paths =
|
||||
sparkAddresses.map((e) => e.derivationPath!.value).toSet();
|
||||
|
||||
try {
|
||||
final root = await getRootHDNode();
|
||||
final privateKeyHexSet =
|
||||
paths.map((e) => root.derivePath(e).privateKey.data.toHex).toSet();
|
||||
|
||||
final myCoins = await compute(
|
||||
_identifyCoins,
|
||||
(
|
||||
anonymitySetCoins: anonymitySet["coins"] as List,
|
||||
groupId: anonymitySet["coinGroupID"] as int,
|
||||
spentCoinTags: spentCoinTags,
|
||||
privateKeyHexSet: privateKeyHexSet,
|
||||
walletId: walletId,
|
||||
isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test,
|
||||
),
|
||||
);
|
||||
|
||||
// update wallet spark coins in isar
|
||||
await _addOrUpdateSparkCoins(myCoins);
|
||||
await _checkAndUpdateCoins(spentCoinTags, false);
|
||||
|
||||
// refresh spark balance
|
||||
await refreshSparkBalance();
|
||||
|
@ -787,6 +720,85 @@ mixin SparkInterface<T extends ElectrumXCurrencyInterface>
|
|||
}
|
||||
}
|
||||
|
||||
Future<void> _checkAndUpdateCoins(
|
||||
Set<String> spentCoinTags,
|
||||
bool checkUseds,
|
||||
) async {
|
||||
final sparkAddresses = await mainDB.isar.addresses
|
||||
.where()
|
||||
.walletIdEqualTo(walletId)
|
||||
.filter()
|
||||
.typeEqualTo(AddressType.spark)
|
||||
.findAll();
|
||||
final root = await getRootHDNode();
|
||||
final Set<String> privateKeyHexSet = sparkAddresses
|
||||
.map(
|
||||
(e) => root.derivePath(e.derivationPath!.value).privateKey.data.toHex,
|
||||
)
|
||||
.toSet();
|
||||
|
||||
List<SparkCoin>? currentCoins;
|
||||
if (checkUseds) {
|
||||
currentCoins = await mainDB.isar.sparkCoins
|
||||
.where()
|
||||
.walletIdEqualToAnyLTagHash(walletId)
|
||||
.filter()
|
||||
.isUsedEqualTo(false)
|
||||
.findAll();
|
||||
}
|
||||
|
||||
final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId();
|
||||
for (int i = 1; i <= latestSparkCoinId; i++) {
|
||||
final lastCheckedTimeStampUTC = groupIdTimestampUTCMap[i] ?? 0;
|
||||
final info = await FiroCacheCoordinator.getLatestSetInfoForGroupId(
|
||||
i,
|
||||
);
|
||||
final anonymitySetResult =
|
||||
await FiroCacheCoordinator.getSetCoinsForGroupId(
|
||||
i,
|
||||
newerThanTimeStamp: lastCheckedTimeStampUTC,
|
||||
);
|
||||
final coinsRaw = anonymitySetResult
|
||||
.map(
|
||||
(row) => [
|
||||
row["serialized"] as String,
|
||||
row["txHash"] as String,
|
||||
row["context"] as String,
|
||||
],
|
||||
)
|
||||
.toList();
|
||||
|
||||
if (coinsRaw.isNotEmpty) {
|
||||
final myCoins = await compute(
|
||||
_identifyCoins,
|
||||
(
|
||||
anonymitySetCoins: coinsRaw,
|
||||
groupId: i,
|
||||
spentCoinTags: spentCoinTags,
|
||||
privateKeyHexSet: privateKeyHexSet,
|
||||
walletId: walletId,
|
||||
isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test,
|
||||
),
|
||||
);
|
||||
|
||||
if (checkUseds && currentCoins != null) {
|
||||
for (final coin in currentCoins) {
|
||||
if (spentCoinTags.contains(coin.lTagHash)) {
|
||||
myCoins.add(coin.copyWith(isUsed: true));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// update wallet spark coins in isar
|
||||
await _addOrUpdateSparkCoins(myCoins);
|
||||
}
|
||||
groupIdTimestampUTCMap[i] = max(
|
||||
lastCheckedTimeStampUTC,
|
||||
info?.timestampUTC ?? lastCheckedTimeStampUTC,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// modelled on CSparkWallet::CreateSparkMintTransactions https://github.com/firoorg/firo/blob/39c41e5e7ec634ced3700fe3f4f5509dc2e480d0/src/spark/sparkwallet.cpp#L752
|
||||
Future<List<TxData>> _createSparkMintTransactions({
|
||||
required List<UTXO> availableUtxos,
|
||||
|
|
Loading…
Reference in a new issue