hash used spark tags

This commit is contained in:
julian 2023-12-20 17:46:48 -06:00
parent c16c97d74d
commit f61acd90b7
3 changed files with 73 additions and 10 deletions

View file

@ -15,6 +15,8 @@ import 'dart:io';
import 'package:connectivity_plus/connectivity_plus.dart';
import 'package:decimal/decimal.dart';
import 'package:event_bus/event_bus.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter_libsparkmobile/flutter_libsparkmobile.dart';
import 'package:mutex/mutex.dart';
import 'package:stackwallet/electrumx_rpc/rpc.dart';
import 'package:stackwallet/exceptions/electrumx/no_such_transaction.dart';
@ -922,7 +924,8 @@ class ElectrumXClient {
requestTimeout: const Duration(minutes: 2),
);
final map = Map<String, dynamic>.from(response["result"] as Map);
return Set<String>.from(map["tags"] as List);
final set = Set<String>.from(map["tags"] as List);
return await compute(_ffiHashTagsComputeWrapper, set);
} catch (e) {
Logging.instance.log(e, level: LogLevel.Error);
rethrow;
@ -1036,3 +1039,7 @@ class ElectrumXClient {
}
}
}
Set<String> _ffiHashTagsComputeWrapper(Set<String> base64Tags) {
return LibSpark.hashTags(base64Tags: base64Tags);
}

View file

@ -131,6 +131,14 @@ class _SparkCoinsViewState extends ConsumerState<SparkCoinsView> {
textAlign: TextAlign.left,
),
),
Expanded(
flex: 9,
child: Text(
"LTag Hash",
style: STextStyles.itemSubtitle(context),
textAlign: TextAlign.left,
),
),
Expanded(
flex: 3,
child: Text(
@ -147,6 +155,14 @@ class _SparkCoinsViewState extends ConsumerState<SparkCoinsView> {
textAlign: TextAlign.right,
),
),
Expanded(
flex: 2,
child: Text(
"Group Id",
style: STextStyles.itemSubtitle(context),
textAlign: TextAlign.right,
),
),
Expanded(
flex: 2,
child: Text(
@ -190,6 +206,13 @@ class _SparkCoinsViewState extends ConsumerState<SparkCoinsView> {
style: STextStyles.itemSubtitle12(context),
),
),
Expanded(
flex: 9,
child: SelectableText(
_coins[index].lTagHash,
style: STextStyles.itemSubtitle12(context),
),
),
Expanded(
flex: 3,
child: SelectableText(
@ -206,6 +229,14 @@ class _SparkCoinsViewState extends ConsumerState<SparkCoinsView> {
textAlign: TextAlign.right,
),
),
Expanded(
flex: 2,
child: SelectableText(
_coins[index].groupId.toString(),
style: STextStyles.itemSubtitle12(context),
textAlign: TextAlign.right,
),
),
Expanded(
flex: 2,
child: SelectableText(

View file

@ -456,21 +456,31 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
final blockHash = await _getCachedSparkBlockHash();
final anonymitySet = blockHash == null
? await electrumXCachedClient.getSparkAnonymitySet(
final anonymitySetFuture = blockHash == null
? electrumXCachedClient.getSparkAnonymitySet(
groupId: latestSparkCoinId.toString(),
coin: info.coin,
)
: await electrumXClient.getSparkAnonymitySet(
: electrumXClient.getSparkAnonymitySet(
coinGroupId: latestSparkCoinId.toString(),
startBlockHash: blockHash,
);
final spentCoinTagsFuture =
electrumXClient.getSparkUsedCoinsTags(startNumber: 0);
// electrumXCachedClient.getSparkUsedCoinsTags(coin: info.coin);
final futureResults = await Future.wait([
anonymitySetFuture,
spentCoinTagsFuture,
]);
final anonymitySet = futureResults[0] as Map<String, dynamic>;
final spentCoinTags = futureResults[1] as Set<String>;
final List<SparkCoin> myCoins = [];
if (anonymitySet["coins"] is List &&
(anonymitySet["coins"] as List).isNotEmpty) {
final spentCoinTags =
await electrumXCachedClient.getSparkUsedCoinsTags(coin: info.coin);
final root = await getRootHDNode();
final privateKeyHexSet = paths
.map(
@ -478,7 +488,7 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
)
.toSet();
final myCoins = await compute(
final identifiedCoins = await compute(
_identifyCoins,
(
anonymitySetCoins: anonymitySet["coins"] as List,
@ -490,8 +500,7 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
),
);
// update wallet spark coins in isar
await _addOrUpdateSparkCoins(myCoins);
myCoins.addAll(identifiedCoins);
// update blockHash in cache
final String newBlockHash =
@ -499,6 +508,22 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
await _setCachedSparkBlockHash(newBlockHash);
}
// check current coins
final currentCoins = await mainDB.isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.filter()
.isUsedEqualTo(false)
.findAll();
for (final coin in currentCoins) {
if (spentCoinTags.contains(coin.lTagHash)) {
myCoins.add(coin.copyWith(isUsed: true));
}
}
// update wallet spark coins in isar
await _addOrUpdateSparkCoins(myCoins);
// refresh spark balance
await refreshSparkBalance();
} catch (e, s) {