mirror of
https://github.com/cypherstack/stack_wallet.git
synced 2024-12-22 19:39:22 +00:00
fix spark cache coins order
This commit is contained in:
parent
eb13c2dc00
commit
1ffddc6781
4 changed files with 31 additions and 21 deletions
|
@ -120,15 +120,32 @@ abstract class FiroCacheCoordinator {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Future<ResultSet> getSetCoinsForGroupId(
|
static Future<
|
||||||
|
List<
|
||||||
|
({
|
||||||
|
String serialized,
|
||||||
|
String txHash,
|
||||||
|
String context,
|
||||||
|
})>> getSetCoinsForGroupId(
|
||||||
int groupId, {
|
int groupId, {
|
||||||
int? newerThanTimeStamp,
|
int? newerThanTimeStamp,
|
||||||
}) async {
|
}) async {
|
||||||
return await _Reader._getSetCoinsForGroupId(
|
final resultSet = await _Reader._getSetCoinsForGroupId(
|
||||||
groupId,
|
groupId,
|
||||||
db: _FiroCache.setCacheDB,
|
db: _FiroCache.setCacheDB,
|
||||||
newerThanTimeStamp: newerThanTimeStamp,
|
newerThanTimeStamp: newerThanTimeStamp,
|
||||||
);
|
);
|
||||||
|
return resultSet
|
||||||
|
.map(
|
||||||
|
(row) => (
|
||||||
|
serialized: row["serialized"] as String,
|
||||||
|
txHash: row["txHash"] as String,
|
||||||
|
context: row["context"] as String,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.toList()
|
||||||
|
.reversed
|
||||||
|
.toList();
|
||||||
}
|
}
|
||||||
|
|
||||||
static Future<
|
static Future<
|
||||||
|
|
|
@ -104,7 +104,8 @@ FCResult _updateSparkAnonSetCoinsWith(
|
||||||
e[2] as String,
|
e[2] as String,
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
.toList();
|
.toList()
|
||||||
|
.reversed;
|
||||||
|
|
||||||
final timestamp = DateTime.now().toUtc().millisecondsSinceEpoch ~/ 1000;
|
final timestamp = DateTime.now().toUtc().millisecondsSinceEpoch ~/ 1000;
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import 'dart:async';
|
import 'dart:async';
|
||||||
import 'dart:convert';
|
|
||||||
|
|
||||||
import 'package:isar/isar.dart';
|
import 'package:isar/isar.dart';
|
||||||
import 'package:meta/meta.dart';
|
import 'package:meta/meta.dart';
|
||||||
|
@ -560,15 +559,8 @@ abstract class Wallet<T extends CryptoCurrency> {
|
||||||
|
|
||||||
// TODO: [prio=low] handle this differently. Extra modification of this file for coin specific functionality should be avoided.
|
// TODO: [prio=low] handle this differently. Extra modification of this file for coin specific functionality should be avoided.
|
||||||
if (this is LelantusInterface) {
|
if (this is LelantusInterface) {
|
||||||
// Parse otherDataJsonString to get the enableLelantusScanning value.
|
if (info.otherData[WalletInfoKeys.enableLelantusScanning] as bool? ??
|
||||||
bool enableLelantusScanning = false;
|
false) {
|
||||||
if (this.info.otherDataJsonString != null) {
|
|
||||||
final otherDataJson = json.decode(this.info.otherDataJsonString!);
|
|
||||||
enableLelantusScanning =
|
|
||||||
otherDataJson[WalletInfoKeys.enableLelantusScanning] as bool? ??
|
|
||||||
false;
|
|
||||||
}
|
|
||||||
if (enableLelantusScanning) {
|
|
||||||
await (this as LelantusInterface).refreshLelantusData();
|
await (this as LelantusInterface).refreshLelantusData();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -297,10 +297,10 @@ mixin SparkInterface<T extends ElectrumXCurrencyInterface>
|
||||||
"coinGroupID": i,
|
"coinGroupID": i,
|
||||||
"coins": resultSet
|
"coins": resultSet
|
||||||
.map(
|
.map(
|
||||||
(row) => [
|
(e) => [
|
||||||
row["serialized"] as String,
|
e.serialized,
|
||||||
row["txHash"] as String,
|
e.txHash,
|
||||||
row["context"] as String,
|
e.context,
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
.toList(),
|
.toList(),
|
||||||
|
@ -799,10 +799,10 @@ mixin SparkInterface<T extends ElectrumXCurrencyInterface>
|
||||||
);
|
);
|
||||||
final coinsRaw = anonymitySetResult
|
final coinsRaw = anonymitySetResult
|
||||||
.map(
|
.map(
|
||||||
(row) => [
|
(e) => [
|
||||||
row["serialized"] as String,
|
e.serialized,
|
||||||
row["txHash"] as String,
|
e.txHash,
|
||||||
row["context"] as String,
|
e.context,
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
.toList();
|
.toList();
|
||||||
|
|
Loading…
Reference in a new issue