speed up spark sends

This commit is contained in:
julian 2024-05-10 14:32:15 -06:00
parent cb70b5c92f
commit d92b712146
3 changed files with 13 additions and 53 deletions

View file

@ -109,6 +109,7 @@ class CachedElectrumXClient {
required String groupId, required String groupId,
String blockhash = "", String blockhash = "",
required Coin coin, required Coin coin,
required bool useOnlyCacheIfNotEmpty,
}) async { }) async {
try { try {
final box = await DB.instance.getSparkAnonymitySetCacheBox(coin: coin); final box = await DB.instance.getSparkAnonymitySetCacheBox(coin: coin);
@ -126,6 +127,9 @@ class CachedElectrumXClient {
}; };
} else { } else {
set = Map<String, dynamic>.from(cachedSet); set = Map<String, dynamic>.from(cachedSet);
if (useOnlyCacheIfNotEmpty) {
return set;
}
} }
final newSet = await electrumXClient.getSparkAnonymitySet( final newSet = await electrumXClient.getSparkAnonymitySet(

View file

@ -621,6 +621,7 @@ class FiroWallet extends Bip39HDWallet
final sparkAnonSetFuture = electrumXCachedClient.getSparkAnonymitySet( final sparkAnonSetFuture = electrumXCachedClient.getSparkAnonymitySet(
groupId: latestSparkCoinId.toString(), groupId: latestSparkCoinId.toString(),
coin: info.coin, coin: info.coin,
useOnlyCacheIfNotEmpty: false,
); );
final sparkUsedCoinTagsFuture = final sparkUsedCoinTagsFuture =
electrumXCachedClient.getSparkUsedCoinsTags( electrumXCachedClient.getSparkUsedCoinsTags(

View file

@ -259,6 +259,7 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
final set = await electrumXCachedClient.getSparkAnonymitySet( final set = await electrumXCachedClient.getSparkAnonymitySet(
groupId: i.toString(), groupId: i.toString(),
coin: info.coin, coin: info.coin,
useOnlyCacheIfNotEmpty: true,
); );
set["coinGroupID"] = i; set["coinGroupID"] = i;
setMaps.add(set); setMaps.add(set);
@ -616,22 +617,14 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
try { try {
final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId(); final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId();
final blockHash = await _getCachedSparkBlockHash(); final anonymitySetFuture = electrumXCachedClient.getSparkAnonymitySet(
final startNumber = await _getSparkCoinsStartNumber(); groupId: latestSparkCoinId.toString(),
coin: info.coin,
useOnlyCacheIfNotEmpty: false,
);
final anonymitySetFuture = blockHash == null final spentCoinTagsFuture =
? electrumXCachedClient.getSparkAnonymitySet( electrumXCachedClient.getSparkUsedCoinsTags(coin: info.coin);
groupId: latestSparkCoinId.toString(),
coin: info.coin,
)
: electrumXClient.getSparkAnonymitySet(
coinGroupId: latestSparkCoinId.toString(),
startBlockHash: blockHash,
);
final spentCoinTagsFuture = startNumber == null
? electrumXCachedClient.getSparkUsedCoinsTags(coin: info.coin)
: electrumXClient.getSparkUsedCoinsTags(startNumber: startNumber);
final futureResults = await Future.wait([ final futureResults = await Future.wait([
anonymitySetFuture, anonymitySetFuture,
@ -665,15 +658,6 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
); );
myCoins.addAll(identifiedCoins); myCoins.addAll(identifiedCoins);
// update blockHash in cache
final String newBlockHash =
base64ToReverseHex(anonymitySet["blockHash"] as String);
await Future.wait([
_setCachedSparkBlockHash(newBlockHash),
_setSparkCoinsStartNumber(spentCoinTags.length - 1),
]);
} }
// check current coins // check current coins
@ -788,10 +772,6 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
// update wallet spark coins in isar // update wallet spark coins in isar
await _addOrUpdateSparkCoins(myCoins); await _addOrUpdateSparkCoins(myCoins);
// update blockHash in cache
final String newBlockHash = anonymitySet["blockHash"] as String;
await _setCachedSparkBlockHash(newBlockHash);
// refresh spark balance // refresh spark balance
await refreshSparkBalance(); await refreshSparkBalance();
} catch (e, s) { } catch (e, s) {
@ -1602,31 +1582,6 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
// ====================== Private ============================================ // ====================== Private ============================================
final _kSparkAnonSetCachedBlockHashKey = "SparkAnonSetCachedBlockHashKey";
final _kSparkCoinsStartNumberKey = "SparkCoinsStartNumberKey";
Future<String?> _getCachedSparkBlockHash() async {
return info.otherData[_kSparkAnonSetCachedBlockHashKey] as String?;
}
Future<void> _setCachedSparkBlockHash(String blockHash) async {
await info.updateOtherData(
newEntries: {_kSparkAnonSetCachedBlockHashKey: blockHash},
isar: mainDB.isar,
);
}
Future<int?> _getSparkCoinsStartNumber() async {
return info.otherData[_kSparkCoinsStartNumberKey] as int?;
}
Future<void> _setSparkCoinsStartNumber(int startNumber) async {
await info.updateOtherData(
newEntries: {_kSparkCoinsStartNumberKey: startNumber},
isar: mainDB.isar,
);
}
Future<void> _addOrUpdateSparkCoins(List<SparkCoin> coins) async { Future<void> _addOrUpdateSparkCoins(List<SparkCoin> coins) async {
if (coins.isNotEmpty) { if (coins.isNotEmpty) {
await mainDB.isar.writeTxn(() async { await mainDB.isar.writeTxn(() async {