optimize spark coin refresh, refactor and clean up spark wallet recovery, and add extra data fields to the spark coin schema

This commit is contained in:
julian 2023-12-18 14:05:22 -06:00
parent 1c0b9bec1b
commit f8a5e44d7b
5 changed files with 736 additions and 226 deletions

View file

@ -484,6 +484,12 @@ class MainDB {
// .findAll();
// await isar.lelantusCoins.deleteAll(lelantusCoinIds);
// }
// spark coins
await isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.deleteAll();
});
}

View file

@ -49,6 +49,9 @@ class SparkCoin {
final int? height;
final String? serializedCoinB64;
final String? contextB64;
@ignore
BigInt get value => BigInt.parse(valueIntString);
@ -71,6 +74,8 @@ class SparkCoin {
this.tag,
required this.lTagHash,
this.height,
this.serializedCoinB64,
this.contextB64,
});
SparkCoin copyWith({
@ -88,6 +93,8 @@ class SparkCoin {
List<int>? tag,
String? lTagHash,
int? height,
String? serializedCoinB64,
String? contextB64,
}) {
return SparkCoin(
walletId: walletId,
@ -106,6 +113,8 @@ class SparkCoin {
tag: tag ?? this.tag,
lTagHash: lTagHash ?? this.lTagHash,
height: height ?? this.height,
serializedCoinB64: serializedCoinB64 ?? this.serializedCoinB64,
contextB64: contextB64 ?? this.contextB64,
);
}
@ -127,6 +136,8 @@ class SparkCoin {
', tag: $tag'
', lTagHash: $lTagHash'
', height: $height'
', serializedCoinB64: $serializedCoinB64'
', contextB64: $contextB64'
')';
}
}

View file

@ -22,74 +22,84 @@ const SparkCoinSchema = CollectionSchema(
name: r'address',
type: IsarType.string,
),
r'diversifierIntString': PropertySchema(
r'contextB64': PropertySchema(
id: 1,
name: r'contextB64',
type: IsarType.string,
),
r'diversifierIntString': PropertySchema(
id: 2,
name: r'diversifierIntString',
type: IsarType.string,
),
r'encryptedDiversifier': PropertySchema(
id: 2,
id: 3,
name: r'encryptedDiversifier',
type: IsarType.longList,
),
r'height': PropertySchema(
id: 3,
id: 4,
name: r'height',
type: IsarType.long,
),
r'isUsed': PropertySchema(
id: 4,
id: 5,
name: r'isUsed',
type: IsarType.bool,
),
r'lTagHash': PropertySchema(
id: 5,
id: 6,
name: r'lTagHash',
type: IsarType.string,
),
r'memo': PropertySchema(
id: 6,
id: 7,
name: r'memo',
type: IsarType.string,
),
r'nonce': PropertySchema(
id: 7,
id: 8,
name: r'nonce',
type: IsarType.longList,
),
r'serial': PropertySchema(
id: 8,
id: 9,
name: r'serial',
type: IsarType.longList,
),
r'serialContext': PropertySchema(
id: 9,
id: 10,
name: r'serialContext',
type: IsarType.longList,
),
r'serializedCoinB64': PropertySchema(
id: 11,
name: r'serializedCoinB64',
type: IsarType.string,
),
r'tag': PropertySchema(
id: 10,
id: 12,
name: r'tag',
type: IsarType.longList,
),
r'txHash': PropertySchema(
id: 11,
id: 13,
name: r'txHash',
type: IsarType.string,
),
r'type': PropertySchema(
id: 12,
id: 14,
name: r'type',
type: IsarType.byte,
enumMap: _SparkCointypeEnumValueMap,
),
r'valueIntString': PropertySchema(
id: 13,
id: 15,
name: r'valueIntString',
type: IsarType.string,
),
r'walletId': PropertySchema(
id: 14,
id: 16,
name: r'walletId',
type: IsarType.string,
)
@ -134,6 +144,12 @@ int _sparkCoinEstimateSize(
) {
var bytesCount = offsets.last;
bytesCount += 3 + object.address.length * 3;
{
final value = object.contextB64;
if (value != null) {
bytesCount += 3 + value.length * 3;
}
}
bytesCount += 3 + object.diversifierIntString.length * 3;
{
final value = object.encryptedDiversifier;
@ -166,6 +182,12 @@ int _sparkCoinEstimateSize(
bytesCount += 3 + value.length * 8;
}
}
{
final value = object.serializedCoinB64;
if (value != null) {
bytesCount += 3 + value.length * 3;
}
}
{
final value = object.tag;
if (value != null) {
@ -185,20 +207,22 @@ void _sparkCoinSerialize(
Map<Type, List<int>> allOffsets,
) {
writer.writeString(offsets[0], object.address);
writer.writeString(offsets[1], object.diversifierIntString);
writer.writeLongList(offsets[2], object.encryptedDiversifier);
writer.writeLong(offsets[3], object.height);
writer.writeBool(offsets[4], object.isUsed);
writer.writeString(offsets[5], object.lTagHash);
writer.writeString(offsets[6], object.memo);
writer.writeLongList(offsets[7], object.nonce);
writer.writeLongList(offsets[8], object.serial);
writer.writeLongList(offsets[9], object.serialContext);
writer.writeLongList(offsets[10], object.tag);
writer.writeString(offsets[11], object.txHash);
writer.writeByte(offsets[12], object.type.index);
writer.writeString(offsets[13], object.valueIntString);
writer.writeString(offsets[14], object.walletId);
writer.writeString(offsets[1], object.contextB64);
writer.writeString(offsets[2], object.diversifierIntString);
writer.writeLongList(offsets[3], object.encryptedDiversifier);
writer.writeLong(offsets[4], object.height);
writer.writeBool(offsets[5], object.isUsed);
writer.writeString(offsets[6], object.lTagHash);
writer.writeString(offsets[7], object.memo);
writer.writeLongList(offsets[8], object.nonce);
writer.writeLongList(offsets[9], object.serial);
writer.writeLongList(offsets[10], object.serialContext);
writer.writeString(offsets[11], object.serializedCoinB64);
writer.writeLongList(offsets[12], object.tag);
writer.writeString(offsets[13], object.txHash);
writer.writeByte(offsets[14], object.type.index);
writer.writeString(offsets[15], object.valueIntString);
writer.writeString(offsets[16], object.walletId);
}
SparkCoin _sparkCoinDeserialize(
@ -209,21 +233,23 @@ SparkCoin _sparkCoinDeserialize(
) {
final object = SparkCoin(
address: reader.readString(offsets[0]),
diversifierIntString: reader.readString(offsets[1]),
encryptedDiversifier: reader.readLongList(offsets[2]),
height: reader.readLongOrNull(offsets[3]),
isUsed: reader.readBool(offsets[4]),
lTagHash: reader.readString(offsets[5]),
memo: reader.readStringOrNull(offsets[6]),
nonce: reader.readLongList(offsets[7]),
serial: reader.readLongList(offsets[8]),
serialContext: reader.readLongList(offsets[9]),
tag: reader.readLongList(offsets[10]),
txHash: reader.readString(offsets[11]),
type: _SparkCointypeValueEnumMap[reader.readByteOrNull(offsets[12])] ??
contextB64: reader.readStringOrNull(offsets[1]),
diversifierIntString: reader.readString(offsets[2]),
encryptedDiversifier: reader.readLongList(offsets[3]),
height: reader.readLongOrNull(offsets[4]),
isUsed: reader.readBool(offsets[5]),
lTagHash: reader.readString(offsets[6]),
memo: reader.readStringOrNull(offsets[7]),
nonce: reader.readLongList(offsets[8]),
serial: reader.readLongList(offsets[9]),
serialContext: reader.readLongList(offsets[10]),
serializedCoinB64: reader.readStringOrNull(offsets[11]),
tag: reader.readLongList(offsets[12]),
txHash: reader.readString(offsets[13]),
type: _SparkCointypeValueEnumMap[reader.readByteOrNull(offsets[14])] ??
SparkCoinType.mint,
valueIntString: reader.readString(offsets[13]),
walletId: reader.readString(offsets[14]),
valueIntString: reader.readString(offsets[15]),
walletId: reader.readString(offsets[16]),
);
object.id = id;
return object;
@ -239,19 +265,19 @@ P _sparkCoinDeserializeProp<P>(
case 0:
return (reader.readString(offset)) as P;
case 1:
return (reader.readString(offset)) as P;
case 2:
return (reader.readLongList(offset)) as P;
case 3:
return (reader.readLongOrNull(offset)) as P;
case 4:
return (reader.readBool(offset)) as P;
case 5:
return (reader.readString(offset)) as P;
case 6:
return (reader.readStringOrNull(offset)) as P;
case 7:
case 2:
return (reader.readString(offset)) as P;
case 3:
return (reader.readLongList(offset)) as P;
case 4:
return (reader.readLongOrNull(offset)) as P;
case 5:
return (reader.readBool(offset)) as P;
case 6:
return (reader.readString(offset)) as P;
case 7:
return (reader.readStringOrNull(offset)) as P;
case 8:
return (reader.readLongList(offset)) as P;
case 9:
@ -259,13 +285,17 @@ P _sparkCoinDeserializeProp<P>(
case 10:
return (reader.readLongList(offset)) as P;
case 11:
return (reader.readString(offset)) as P;
return (reader.readStringOrNull(offset)) as P;
case 12:
return (_SparkCointypeValueEnumMap[reader.readByteOrNull(offset)] ??
SparkCoinType.mint) as P;
return (reader.readLongList(offset)) as P;
case 13:
return (reader.readString(offset)) as P;
case 14:
return (_SparkCointypeValueEnumMap[reader.readByteOrNull(offset)] ??
SparkCoinType.mint) as P;
case 15:
return (reader.readString(offset)) as P;
case 16:
return (reader.readString(offset)) as P;
default:
throw IsarError('Unknown property with id $propertyId');
@ -681,6 +711,157 @@ extension SparkCoinQueryFilter
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> contextB64IsNull() {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(const FilterCondition.isNull(
property: r'contextB64',
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
contextB64IsNotNull() {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(const FilterCondition.isNotNull(
property: r'contextB64',
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> contextB64EqualTo(
String? value, {
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.equalTo(
property: r'contextB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
contextB64GreaterThan(
String? value, {
bool include = false,
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.greaterThan(
include: include,
property: r'contextB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> contextB64LessThan(
String? value, {
bool include = false,
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.lessThan(
include: include,
property: r'contextB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> contextB64Between(
String? lower,
String? upper, {
bool includeLower = true,
bool includeUpper = true,
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.between(
property: r'contextB64',
lower: lower,
includeLower: includeLower,
upper: upper,
includeUpper: includeUpper,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
contextB64StartsWith(
String value, {
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.startsWith(
property: r'contextB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> contextB64EndsWith(
String value, {
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.endsWith(
property: r'contextB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> contextB64Contains(
String value,
{bool caseSensitive = true}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.contains(
property: r'contextB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> contextB64Matches(
String pattern,
{bool caseSensitive = true}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.matches(
property: r'contextB64',
wildcard: pattern,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
contextB64IsEmpty() {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.equalTo(
property: r'contextB64',
value: '',
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
contextB64IsNotEmpty() {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.greaterThan(
property: r'contextB64',
value: '',
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
diversifierIntStringEqualTo(
String value, {
@ -1866,6 +2047,160 @@ extension SparkCoinQueryFilter
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64IsNull() {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(const FilterCondition.isNull(
property: r'serializedCoinB64',
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64IsNotNull() {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(const FilterCondition.isNotNull(
property: r'serializedCoinB64',
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64EqualTo(
String? value, {
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.equalTo(
property: r'serializedCoinB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64GreaterThan(
String? value, {
bool include = false,
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.greaterThan(
include: include,
property: r'serializedCoinB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64LessThan(
String? value, {
bool include = false,
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.lessThan(
include: include,
property: r'serializedCoinB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64Between(
String? lower,
String? upper, {
bool includeLower = true,
bool includeUpper = true,
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.between(
property: r'serializedCoinB64',
lower: lower,
includeLower: includeLower,
upper: upper,
includeUpper: includeUpper,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64StartsWith(
String value, {
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.startsWith(
property: r'serializedCoinB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64EndsWith(
String value, {
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.endsWith(
property: r'serializedCoinB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64Contains(String value, {bool caseSensitive = true}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.contains(
property: r'serializedCoinB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64Matches(String pattern, {bool caseSensitive = true}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.matches(
property: r'serializedCoinB64',
wildcard: pattern,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64IsEmpty() {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.equalTo(
property: r'serializedCoinB64',
value: '',
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64IsNotEmpty() {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.greaterThan(
property: r'serializedCoinB64',
value: '',
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> tagIsNull() {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(const FilterCondition.isNull(
@ -2491,6 +2826,18 @@ extension SparkCoinQuerySortBy on QueryBuilder<SparkCoin, SparkCoin, QSortBy> {
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> sortByContextB64() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'contextB64', Sort.asc);
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> sortByContextB64Desc() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'contextB64', Sort.desc);
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy>
sortByDiversifierIntString() {
return QueryBuilder.apply(this, (query) {
@ -2553,6 +2900,19 @@ extension SparkCoinQuerySortBy on QueryBuilder<SparkCoin, SparkCoin, QSortBy> {
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> sortBySerializedCoinB64() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'serializedCoinB64', Sort.asc);
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy>
sortBySerializedCoinB64Desc() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'serializedCoinB64', Sort.desc);
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> sortByTxHash() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'txHash', Sort.asc);
@ -2616,6 +2976,18 @@ extension SparkCoinQuerySortThenBy
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> thenByContextB64() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'contextB64', Sort.asc);
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> thenByContextB64Desc() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'contextB64', Sort.desc);
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy>
thenByDiversifierIntString() {
return QueryBuilder.apply(this, (query) {
@ -2690,6 +3062,19 @@ extension SparkCoinQuerySortThenBy
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> thenBySerializedCoinB64() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'serializedCoinB64', Sort.asc);
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy>
thenBySerializedCoinB64Desc() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'serializedCoinB64', Sort.desc);
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> thenByTxHash() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'txHash', Sort.asc);
@ -2748,6 +3133,13 @@ extension SparkCoinQueryWhereDistinct
});
}
QueryBuilder<SparkCoin, SparkCoin, QDistinct> distinctByContextB64(
{bool caseSensitive = true}) {
return QueryBuilder.apply(this, (query) {
return query.addDistinctBy(r'contextB64', caseSensitive: caseSensitive);
});
}
QueryBuilder<SparkCoin, SparkCoin, QDistinct> distinctByDiversifierIntString(
{bool caseSensitive = true}) {
return QueryBuilder.apply(this, (query) {
@ -2807,6 +3199,14 @@ extension SparkCoinQueryWhereDistinct
});
}
QueryBuilder<SparkCoin, SparkCoin, QDistinct> distinctBySerializedCoinB64(
{bool caseSensitive = true}) {
return QueryBuilder.apply(this, (query) {
return query.addDistinctBy(r'serializedCoinB64',
caseSensitive: caseSensitive);
});
}
QueryBuilder<SparkCoin, SparkCoin, QDistinct> distinctByTag() {
return QueryBuilder.apply(this, (query) {
return query.addDistinctBy(r'tag');
@ -2856,6 +3256,12 @@ extension SparkCoinQueryProperty
});
}
QueryBuilder<SparkCoin, String?, QQueryOperations> contextB64Property() {
return QueryBuilder.apply(this, (query) {
return query.addPropertyName(r'contextB64');
});
}
QueryBuilder<SparkCoin, String, QQueryOperations>
diversifierIntStringProperty() {
return QueryBuilder.apply(this, (query) {
@ -2913,6 +3319,13 @@ extension SparkCoinQueryProperty
});
}
QueryBuilder<SparkCoin, String?, QQueryOperations>
serializedCoinB64Property() {
return QueryBuilder.apply(this, (query) {
return query.addPropertyName(r'serializedCoinB64');
});
}
QueryBuilder<SparkCoin, List<int>?, QQueryOperations> tagProperty() {
return QueryBuilder.apply(this, (query) {
return query.addPropertyName(r'tag');

View file

@ -485,6 +485,7 @@ class FiroWallet extends Bip39HDWallet
await mainDB.deleteWalletBlockchainData(walletId);
}
// lelantus
final latestSetId = await electrumXClient.getLelantusLatestCoinId();
final setDataMapFuture = getSetDataMap(latestSetId);
final usedSerialNumbersFuture =
@ -492,6 +493,17 @@ class FiroWallet extends Bip39HDWallet
coin: info.coin,
);
// spark
final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId();
final sparkAnonSetFuture = electrumXCachedClient.getSparkAnonymitySet(
groupId: latestSparkCoinId.toString(),
coin: info.coin,
);
final sparkUsedCoinTagsFuture =
electrumXCachedClient.getSparkUsedCoinsTags(
coin: info.coin,
);
// receiving addresses
Logging.instance.log(
"checking receiving addresses...",
@ -595,16 +607,29 @@ class FiroWallet extends Bip39HDWallet
final futureResults = await Future.wait([
usedSerialNumbersFuture,
setDataMapFuture,
sparkAnonSetFuture,
sparkUsedCoinTagsFuture,
]);
// lelantus
final usedSerialsSet = (futureResults[0] as List<String>).toSet();
final setDataMap = futureResults[1] as Map<dynamic, dynamic>;
await recoverLelantusWallet(
latestSetId: latestSetId,
usedSerialNumbers: usedSerialsSet,
setDataMap: setDataMap,
);
// spark
final sparkAnonymitySet = futureResults[2] as Map<String, dynamic>;
final sparkSpentCoinTags = futureResults[3] as Set<String>;
await Future.wait([
recoverLelantusWallet(
latestSetId: latestSetId,
usedSerialNumbers: usedSerialsSet,
setDataMap: setDataMap,
),
recoverSparkWallet(
anonymitySet: sparkAnonymitySet,
spentCoinTags: sparkSpentCoinTags,
),
]);
});
await refresh();

View file

@ -16,6 +16,8 @@ import 'package:stackwallet/wallets/models/tx_data.dart';
import 'package:stackwallet/wallets/wallet/intermediate/bip39_hd_wallet.dart';
import 'package:stackwallet/wallets/wallet/wallet_mixin_interfaces/electrumx_interface.dart';
const kDefaultSparkIndex = 1;
mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
@override
Future<void> init() async {
@ -68,21 +70,18 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
// default to starting at 1 if none found
final int diversifier = (highestStoredDiversifier ?? 0) + 1;
// TODO: check that this stays constant and only the diversifier changes?
const index = 1;
final root = await getRootHDNode();
final String derivationPath;
if (cryptoCurrency.network == CryptoCurrencyNetwork.test) {
derivationPath = "$kSparkBaseDerivationPathTestnet$index";
derivationPath = "$kSparkBaseDerivationPathTestnet$kDefaultSparkIndex";
} else {
derivationPath = "$kSparkBaseDerivationPath$index";
derivationPath = "$kSparkBaseDerivationPath$kDefaultSparkIndex";
}
final keys = root.derivePath(derivationPath);
final String addressString = await LibSpark.getAddress(
privateKey: keys.privateKey.data,
index: index,
index: kDefaultSparkIndex,
diversifier: diversifier,
isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test,
);
@ -138,14 +137,13 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
// https://docs.google.com/document/d/1RG52GoYTZDvKlZz_3G4sQu-PpT6JWSZGHLNswWcrE3o/edit
// To generate a spark spend we need to call createSparkSpendTransaction,
// first unlock the wallet and generate all 3 spark keys,
const index = 1;
final root = await getRootHDNode();
final String derivationPath;
if (cryptoCurrency.network == CryptoCurrencyNetwork.test) {
derivationPath = "$kSparkBaseDerivationPathTestnet$index";
derivationPath = "$kSparkBaseDerivationPathTestnet$kDefaultSparkIndex";
} else {
derivationPath = "$kSparkBaseDerivationPath$index";
derivationPath = "$kSparkBaseDerivationPath$kDefaultSparkIndex";
}
final privateKey = root.derivePath(derivationPath).privateKey.data;
//
@ -355,7 +353,7 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
final spend = LibSpark.createSparkSendTransaction(
privateKeyHex: privateKey.toHex,
index: index,
index: kDefaultSparkIndex,
recipients: [],
privateRecipients: txData.sparkRecipients
?.map((e) => (
@ -366,7 +364,7 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
))
.toList() ??
[],
serializedMintMetas: serializedMintMetas,
serializedCoins: serializedCoins,
allAnonymitySets: allAnonymitySets,
);
@ -421,152 +419,41 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
sparkAddresses.map((e) => e.derivationPath!.value).toSet();
try {
const index = 1;
final root = await getRootHDNode();
final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId();
final blockHash = await _getCachedSparkBlockHash();
final futureResults = await Future.wait([
electrumXCachedClient.getSparkAnonymitySet(
groupId: latestSparkCoinId.toString(),
coin: info.coin,
),
blockHash == null
? electrumXCachedClient.getSparkAnonymitySet(
groupId: latestSparkCoinId.toString(),
coin: info.coin,
)
: electrumXClient.getSparkAnonymitySet(
coinGroupId: latestSparkCoinId.toString(),
startBlockHash: blockHash,
),
electrumXCachedClient.getSparkUsedCoinsTags(coin: info.coin),
]);
final anonymitySet = futureResults[0] as Map<String, dynamic>;
final spentCoinTags = futureResults[1] as Set<String>;
// find our coins
final List<SparkCoin> myCoins = [];
for (final path in paths) {
final keys = root.derivePath(path);
final privateKeyHex = keys.privateKey.data.toHex;
for (final dynData in anonymitySet["coins"] as List) {
final data = List<String>.from(dynData as List);
if (data.length != 3) {
throw Exception("Unexpected serialized coin info found");
}
final serializedCoin = data[0];
final txHash = base64ToReverseHex(data[1]);
final coin = LibSpark.identifyAndRecoverCoin(
serializedCoin,
privateKeyHex: privateKeyHex,
index: index,
context: base64Decode(data[2]),
isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test,
);
// its ours
if (coin != null) {
final SparkCoinType coinType;
switch (coin.type.value) {
case 0:
coinType = SparkCoinType.mint;
case 1:
coinType = SparkCoinType.spend;
default:
throw Exception("Unknown spark coin type detected");
}
myCoins.add(
SparkCoin(
walletId: walletId,
type: coinType,
isUsed: spentCoinTags.contains(coin.lTagHash!),
nonce: coin.nonceHex?.toUint8ListFromHex,
address: coin.address!,
txHash: txHash,
valueIntString: coin.value!.toString(),
memo: coin.memo,
serialContext: coin.serialContext,
diversifierIntString: coin.diversifier!.toString(),
encryptedDiversifier: coin.encryptedDiversifier,
serial: coin.serial,
tag: coin.tag,
lTagHash: coin.lTagHash!,
height: coin.height,
),
);
}
}
}
final myCoins = await _identifyCoins(
anonymitySet: anonymitySet,
spentCoinTags: spentCoinTags,
sparkAddressDerivationPaths: paths,
);
// update wallet spark coins in isar
if (myCoins.isNotEmpty) {
await mainDB.isar.writeTxn(() async {
await mainDB.isar.sparkCoins.putAll(myCoins);
});
}
await _addOrUpdateSparkCoins(myCoins);
// update wallet spark coin height
final coinsToCheck = await mainDB.isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.filter()
.heightIsNull()
.findAll();
final List<SparkCoin> updatedCoins = [];
for (final coin in coinsToCheck) {
final tx = await electrumXCachedClient.getTransaction(
txHash: coin.txHash,
coin: info.coin,
);
if (tx["height"] is int) {
updatedCoins.add(coin.copyWith(height: tx["height"] as int));
}
}
if (updatedCoins.isNotEmpty) {
await mainDB.isar.writeTxn(() async {
await mainDB.isar.sparkCoins.putAll(updatedCoins);
});
}
// update blockHash in cache
final String newBlockHash = anonymitySet["blockHash"] as String;
await _setCachedSparkBlockHash(newBlockHash);
// refresh spark balance
final currentHeight = await chainHeight;
final unusedCoins = await mainDB.isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.filter()
.isUsedEqualTo(false)
.findAll();
final total = Amount(
rawValue: unusedCoins
.map((e) => e.value)
.fold(BigInt.zero, (prev, e) => prev + e),
fractionDigits: cryptoCurrency.fractionDigits,
);
final spendable = Amount(
rawValue: unusedCoins
.where((e) =>
e.height != null &&
e.height! + cryptoCurrency.minConfirms >= currentHeight)
.map((e) => e.value)
.fold(BigInt.zero, (prev, e) => prev + e),
fractionDigits: cryptoCurrency.fractionDigits,
);
final sparkBalance = Balance(
total: total,
spendable: spendable,
blockedTotal: Amount(
rawValue: BigInt.zero,
fractionDigits: cryptoCurrency.fractionDigits,
),
pendingSpendable: total - spendable,
);
await info.updateBalanceTertiary(
newBalance: sparkBalance,
isar: mainDB.isar,
);
await refreshSparkBalance();
} catch (e, s) {
// todo logging
@ -574,35 +461,85 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
}
}
Future<void> refreshSparkBalance() async {
final currentHeight = await chainHeight;
final unusedCoins = await mainDB.isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.filter()
.isUsedEqualTo(false)
.findAll();
final total = Amount(
rawValue: unusedCoins
.map((e) => e.value)
.fold(BigInt.zero, (prev, e) => prev + e),
fractionDigits: cryptoCurrency.fractionDigits,
);
final spendable = Amount(
rawValue: unusedCoins
.where((e) =>
e.height != null &&
e.height! + cryptoCurrency.minConfirms >= currentHeight)
.map((e) => e.value)
.fold(BigInt.zero, (prev, e) => prev + e),
fractionDigits: cryptoCurrency.fractionDigits,
);
final sparkBalance = Balance(
total: total,
spendable: spendable,
blockedTotal: Amount(
rawValue: BigInt.zero,
fractionDigits: cryptoCurrency.fractionDigits,
),
pendingSpendable: total - spendable,
);
await info.updateBalanceTertiary(
newBalance: sparkBalance,
isar: mainDB.isar,
);
}
/// Should only be called within the standard wallet [recover] function due to
/// mutex locking. Otherwise behaviour MAY be undefined.
Future<void> recoverSparkWallet(
// {
// required int latestSetId,
// required Map<dynamic, dynamic> setDataMap,
// required Set<String> usedSerialNumbers,
// }
) async {
Future<void> recoverSparkWallet({
required Map<dynamic, dynamic> anonymitySet,
required Set<String> spentCoinTags,
}) async {
// generate spark addresses if non existing
if (await getCurrentReceivingSparkAddress() == null) {
final address = await generateNextSparkAddress();
await mainDB.putAddress(address);
}
final sparkAddresses = await mainDB.isar.addresses
.where()
.walletIdEqualTo(walletId)
.filter()
.typeEqualTo(AddressType.spark)
.findAll();
final Set<String> paths =
sparkAddresses.map((e) => e.derivationPath!.value).toSet();
try {
// do we need to generate any spark address(es) here?
final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId();
final anonymitySet = await electrumXCachedClient.getSparkAnonymitySet(
groupId: latestSparkCoinId.toString(),
coin: info.coin,
final myCoins = await _identifyCoins(
anonymitySet: anonymitySet,
spentCoinTags: spentCoinTags,
sparkAddressDerivationPaths: paths,
);
// TODO loop over set and see which coins are ours using the FFI call `identifyCoin`
List myCoins = [];
// fetch metadata for myCoins
// create list of Spark Coin isar objects
// update wallet spark coins in isar
await _addOrUpdateSparkCoins(myCoins);
throw UnimplementedError();
// update blockHash in cache
final String newBlockHash = anonymitySet["blockHash"] as String;
await _setCachedSparkBlockHash(newBlockHash);
// refresh spark balance
await refreshSparkBalance();
} catch (e, s) {
// todo logging
@ -826,6 +763,124 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
// wait for normalBalanceFuture to complete before returning
await normalBalanceFuture;
}
// ====================== Private ============================================
final _kSparkAnonSetCachedBlockHashKey = "SparkAnonSetCachedBlockHashKey";
Future<String?> _getCachedSparkBlockHash() async {
return info.otherData[_kSparkAnonSetCachedBlockHashKey] as String?;
}
Future<void> _setCachedSparkBlockHash(String blockHash) async {
await info.updateOtherData(
newEntries: {_kSparkAnonSetCachedBlockHashKey: blockHash},
isar: mainDB.isar,
);
}
Future<List<SparkCoin>> _identifyCoins({
required Map<dynamic, dynamic> anonymitySet,
required Set<String> spentCoinTags,
required Set<String> sparkAddressDerivationPaths,
}) async {
final root = await getRootHDNode();
final List<SparkCoin> myCoins = [];
for (final path in sparkAddressDerivationPaths) {
final keys = root.derivePath(path);
final privateKeyHex = keys.privateKey.data.toHex;
for (final dynData in anonymitySet["coins"] as List) {
final data = List<String>.from(dynData as List);
if (data.length != 3) {
throw Exception("Unexpected serialized coin info found");
}
final serializedCoinB64 = data[0];
final txHash = base64ToReverseHex(data[1]);
final contextB64 = data[2];
final coin = LibSpark.identifyAndRecoverCoin(
serializedCoinB64,
privateKeyHex: privateKeyHex,
index: kDefaultSparkIndex,
context: base64Decode(contextB64),
isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test,
);
// its ours
if (coin != null) {
final SparkCoinType coinType;
switch (coin.type.value) {
case 0:
coinType = SparkCoinType.mint;
case 1:
coinType = SparkCoinType.spend;
default:
throw Exception("Unknown spark coin type detected");
}
myCoins.add(
SparkCoin(
walletId: walletId,
type: coinType,
isUsed: spentCoinTags.contains(coin.lTagHash!),
nonce: coin.nonceHex?.toUint8ListFromHex,
address: coin.address!,
txHash: txHash,
valueIntString: coin.value!.toString(),
memo: coin.memo,
serialContext: coin.serialContext,
diversifierIntString: coin.diversifier!.toString(),
encryptedDiversifier: coin.encryptedDiversifier,
serial: coin.serial,
tag: coin.tag,
lTagHash: coin.lTagHash!,
height: coin.height,
serializedCoinB64: serializedCoinB64,
contextB64: contextB64,
),
);
}
}
}
return myCoins;
}
Future<void> _addOrUpdateSparkCoins(List<SparkCoin> coins) async {
if (coins.isNotEmpty) {
await mainDB.isar.writeTxn(() async {
await mainDB.isar.sparkCoins.putAll(coins);
});
}
// update wallet spark coin height
final coinsToCheck = await mainDB.isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.filter()
.heightIsNull()
.findAll();
final List<SparkCoin> updatedCoins = [];
for (final coin in coinsToCheck) {
final tx = await electrumXCachedClient.getTransaction(
txHash: coin.txHash,
coin: info.coin,
);
if (tx["height"] is int) {
updatedCoins.add(coin.copyWith(height: tx["height"] as int));
}
}
if (updatedCoins.isNotEmpty) {
await mainDB.isar.writeTxn(() async {
await mainDB.isar.sparkCoins.putAll(updatedCoins);
});
}
}
}
String base64ToReverseHex(String source) =>