optimize spark coin refresh, refactor and clean up spark wallet recovery, and add extra data fields to the spark coin schema

This commit is contained in:
julian 2023-12-18 14:05:22 -06:00
parent 1c0b9bec1b
commit f8a5e44d7b
5 changed files with 736 additions and 226 deletions

View file

@ -484,6 +484,12 @@ class MainDB {
// .findAll(); // .findAll();
// await isar.lelantusCoins.deleteAll(lelantusCoinIds); // await isar.lelantusCoins.deleteAll(lelantusCoinIds);
// } // }
// spark coins
await isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.deleteAll();
}); });
} }

View file

@ -49,6 +49,9 @@ class SparkCoin {
final int? height; final int? height;
final String? serializedCoinB64;
final String? contextB64;
@ignore @ignore
BigInt get value => BigInt.parse(valueIntString); BigInt get value => BigInt.parse(valueIntString);
@ -71,6 +74,8 @@ class SparkCoin {
this.tag, this.tag,
required this.lTagHash, required this.lTagHash,
this.height, this.height,
this.serializedCoinB64,
this.contextB64,
}); });
SparkCoin copyWith({ SparkCoin copyWith({
@ -88,6 +93,8 @@ class SparkCoin {
List<int>? tag, List<int>? tag,
String? lTagHash, String? lTagHash,
int? height, int? height,
String? serializedCoinB64,
String? contextB64,
}) { }) {
return SparkCoin( return SparkCoin(
walletId: walletId, walletId: walletId,
@ -106,6 +113,8 @@ class SparkCoin {
tag: tag ?? this.tag, tag: tag ?? this.tag,
lTagHash: lTagHash ?? this.lTagHash, lTagHash: lTagHash ?? this.lTagHash,
height: height ?? this.height, height: height ?? this.height,
serializedCoinB64: serializedCoinB64 ?? this.serializedCoinB64,
contextB64: contextB64 ?? this.contextB64,
); );
} }
@ -127,6 +136,8 @@ class SparkCoin {
', tag: $tag' ', tag: $tag'
', lTagHash: $lTagHash' ', lTagHash: $lTagHash'
', height: $height' ', height: $height'
', serializedCoinB64: $serializedCoinB64'
', contextB64: $contextB64'
')'; ')';
} }
} }

View file

@ -22,74 +22,84 @@ const SparkCoinSchema = CollectionSchema(
name: r'address', name: r'address',
type: IsarType.string, type: IsarType.string,
), ),
r'diversifierIntString': PropertySchema( r'contextB64': PropertySchema(
id: 1, id: 1,
name: r'contextB64',
type: IsarType.string,
),
r'diversifierIntString': PropertySchema(
id: 2,
name: r'diversifierIntString', name: r'diversifierIntString',
type: IsarType.string, type: IsarType.string,
), ),
r'encryptedDiversifier': PropertySchema( r'encryptedDiversifier': PropertySchema(
id: 2, id: 3,
name: r'encryptedDiversifier', name: r'encryptedDiversifier',
type: IsarType.longList, type: IsarType.longList,
), ),
r'height': PropertySchema( r'height': PropertySchema(
id: 3, id: 4,
name: r'height', name: r'height',
type: IsarType.long, type: IsarType.long,
), ),
r'isUsed': PropertySchema( r'isUsed': PropertySchema(
id: 4, id: 5,
name: r'isUsed', name: r'isUsed',
type: IsarType.bool, type: IsarType.bool,
), ),
r'lTagHash': PropertySchema( r'lTagHash': PropertySchema(
id: 5, id: 6,
name: r'lTagHash', name: r'lTagHash',
type: IsarType.string, type: IsarType.string,
), ),
r'memo': PropertySchema( r'memo': PropertySchema(
id: 6, id: 7,
name: r'memo', name: r'memo',
type: IsarType.string, type: IsarType.string,
), ),
r'nonce': PropertySchema( r'nonce': PropertySchema(
id: 7, id: 8,
name: r'nonce', name: r'nonce',
type: IsarType.longList, type: IsarType.longList,
), ),
r'serial': PropertySchema( r'serial': PropertySchema(
id: 8, id: 9,
name: r'serial', name: r'serial',
type: IsarType.longList, type: IsarType.longList,
), ),
r'serialContext': PropertySchema( r'serialContext': PropertySchema(
id: 9, id: 10,
name: r'serialContext', name: r'serialContext',
type: IsarType.longList, type: IsarType.longList,
), ),
r'serializedCoinB64': PropertySchema(
id: 11,
name: r'serializedCoinB64',
type: IsarType.string,
),
r'tag': PropertySchema( r'tag': PropertySchema(
id: 10, id: 12,
name: r'tag', name: r'tag',
type: IsarType.longList, type: IsarType.longList,
), ),
r'txHash': PropertySchema( r'txHash': PropertySchema(
id: 11, id: 13,
name: r'txHash', name: r'txHash',
type: IsarType.string, type: IsarType.string,
), ),
r'type': PropertySchema( r'type': PropertySchema(
id: 12, id: 14,
name: r'type', name: r'type',
type: IsarType.byte, type: IsarType.byte,
enumMap: _SparkCointypeEnumValueMap, enumMap: _SparkCointypeEnumValueMap,
), ),
r'valueIntString': PropertySchema( r'valueIntString': PropertySchema(
id: 13, id: 15,
name: r'valueIntString', name: r'valueIntString',
type: IsarType.string, type: IsarType.string,
), ),
r'walletId': PropertySchema( r'walletId': PropertySchema(
id: 14, id: 16,
name: r'walletId', name: r'walletId',
type: IsarType.string, type: IsarType.string,
) )
@ -134,6 +144,12 @@ int _sparkCoinEstimateSize(
) { ) {
var bytesCount = offsets.last; var bytesCount = offsets.last;
bytesCount += 3 + object.address.length * 3; bytesCount += 3 + object.address.length * 3;
{
final value = object.contextB64;
if (value != null) {
bytesCount += 3 + value.length * 3;
}
}
bytesCount += 3 + object.diversifierIntString.length * 3; bytesCount += 3 + object.diversifierIntString.length * 3;
{ {
final value = object.encryptedDiversifier; final value = object.encryptedDiversifier;
@ -166,6 +182,12 @@ int _sparkCoinEstimateSize(
bytesCount += 3 + value.length * 8; bytesCount += 3 + value.length * 8;
} }
} }
{
final value = object.serializedCoinB64;
if (value != null) {
bytesCount += 3 + value.length * 3;
}
}
{ {
final value = object.tag; final value = object.tag;
if (value != null) { if (value != null) {
@ -185,20 +207,22 @@ void _sparkCoinSerialize(
Map<Type, List<int>> allOffsets, Map<Type, List<int>> allOffsets,
) { ) {
writer.writeString(offsets[0], object.address); writer.writeString(offsets[0], object.address);
writer.writeString(offsets[1], object.diversifierIntString); writer.writeString(offsets[1], object.contextB64);
writer.writeLongList(offsets[2], object.encryptedDiversifier); writer.writeString(offsets[2], object.diversifierIntString);
writer.writeLong(offsets[3], object.height); writer.writeLongList(offsets[3], object.encryptedDiversifier);
writer.writeBool(offsets[4], object.isUsed); writer.writeLong(offsets[4], object.height);
writer.writeString(offsets[5], object.lTagHash); writer.writeBool(offsets[5], object.isUsed);
writer.writeString(offsets[6], object.memo); writer.writeString(offsets[6], object.lTagHash);
writer.writeLongList(offsets[7], object.nonce); writer.writeString(offsets[7], object.memo);
writer.writeLongList(offsets[8], object.serial); writer.writeLongList(offsets[8], object.nonce);
writer.writeLongList(offsets[9], object.serialContext); writer.writeLongList(offsets[9], object.serial);
writer.writeLongList(offsets[10], object.tag); writer.writeLongList(offsets[10], object.serialContext);
writer.writeString(offsets[11], object.txHash); writer.writeString(offsets[11], object.serializedCoinB64);
writer.writeByte(offsets[12], object.type.index); writer.writeLongList(offsets[12], object.tag);
writer.writeString(offsets[13], object.valueIntString); writer.writeString(offsets[13], object.txHash);
writer.writeString(offsets[14], object.walletId); writer.writeByte(offsets[14], object.type.index);
writer.writeString(offsets[15], object.valueIntString);
writer.writeString(offsets[16], object.walletId);
} }
SparkCoin _sparkCoinDeserialize( SparkCoin _sparkCoinDeserialize(
@ -209,21 +233,23 @@ SparkCoin _sparkCoinDeserialize(
) { ) {
final object = SparkCoin( final object = SparkCoin(
address: reader.readString(offsets[0]), address: reader.readString(offsets[0]),
diversifierIntString: reader.readString(offsets[1]), contextB64: reader.readStringOrNull(offsets[1]),
encryptedDiversifier: reader.readLongList(offsets[2]), diversifierIntString: reader.readString(offsets[2]),
height: reader.readLongOrNull(offsets[3]), encryptedDiversifier: reader.readLongList(offsets[3]),
isUsed: reader.readBool(offsets[4]), height: reader.readLongOrNull(offsets[4]),
lTagHash: reader.readString(offsets[5]), isUsed: reader.readBool(offsets[5]),
memo: reader.readStringOrNull(offsets[6]), lTagHash: reader.readString(offsets[6]),
nonce: reader.readLongList(offsets[7]), memo: reader.readStringOrNull(offsets[7]),
serial: reader.readLongList(offsets[8]), nonce: reader.readLongList(offsets[8]),
serialContext: reader.readLongList(offsets[9]), serial: reader.readLongList(offsets[9]),
tag: reader.readLongList(offsets[10]), serialContext: reader.readLongList(offsets[10]),
txHash: reader.readString(offsets[11]), serializedCoinB64: reader.readStringOrNull(offsets[11]),
type: _SparkCointypeValueEnumMap[reader.readByteOrNull(offsets[12])] ?? tag: reader.readLongList(offsets[12]),
txHash: reader.readString(offsets[13]),
type: _SparkCointypeValueEnumMap[reader.readByteOrNull(offsets[14])] ??
SparkCoinType.mint, SparkCoinType.mint,
valueIntString: reader.readString(offsets[13]), valueIntString: reader.readString(offsets[15]),
walletId: reader.readString(offsets[14]), walletId: reader.readString(offsets[16]),
); );
object.id = id; object.id = id;
return object; return object;
@ -239,19 +265,19 @@ P _sparkCoinDeserializeProp<P>(
case 0: case 0:
return (reader.readString(offset)) as P; return (reader.readString(offset)) as P;
case 1: case 1:
return (reader.readString(offset)) as P;
case 2:
return (reader.readLongList(offset)) as P;
case 3:
return (reader.readLongOrNull(offset)) as P;
case 4:
return (reader.readBool(offset)) as P;
case 5:
return (reader.readString(offset)) as P;
case 6:
return (reader.readStringOrNull(offset)) as P; return (reader.readStringOrNull(offset)) as P;
case 7: case 2:
return (reader.readString(offset)) as P;
case 3:
return (reader.readLongList(offset)) as P; return (reader.readLongList(offset)) as P;
case 4:
return (reader.readLongOrNull(offset)) as P;
case 5:
return (reader.readBool(offset)) as P;
case 6:
return (reader.readString(offset)) as P;
case 7:
return (reader.readStringOrNull(offset)) as P;
case 8: case 8:
return (reader.readLongList(offset)) as P; return (reader.readLongList(offset)) as P;
case 9: case 9:
@ -259,13 +285,17 @@ P _sparkCoinDeserializeProp<P>(
case 10: case 10:
return (reader.readLongList(offset)) as P; return (reader.readLongList(offset)) as P;
case 11: case 11:
return (reader.readString(offset)) as P; return (reader.readStringOrNull(offset)) as P;
case 12: case 12:
return (_SparkCointypeValueEnumMap[reader.readByteOrNull(offset)] ?? return (reader.readLongList(offset)) as P;
SparkCoinType.mint) as P;
case 13: case 13:
return (reader.readString(offset)) as P; return (reader.readString(offset)) as P;
case 14: case 14:
return (_SparkCointypeValueEnumMap[reader.readByteOrNull(offset)] ??
SparkCoinType.mint) as P;
case 15:
return (reader.readString(offset)) as P;
case 16:
return (reader.readString(offset)) as P; return (reader.readString(offset)) as P;
default: default:
throw IsarError('Unknown property with id $propertyId'); throw IsarError('Unknown property with id $propertyId');
@ -681,6 +711,157 @@ extension SparkCoinQueryFilter
}); });
} }
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> contextB64IsNull() {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(const FilterCondition.isNull(
property: r'contextB64',
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
contextB64IsNotNull() {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(const FilterCondition.isNotNull(
property: r'contextB64',
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> contextB64EqualTo(
String? value, {
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.equalTo(
property: r'contextB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
contextB64GreaterThan(
String? value, {
bool include = false,
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.greaterThan(
include: include,
property: r'contextB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> contextB64LessThan(
String? value, {
bool include = false,
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.lessThan(
include: include,
property: r'contextB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> contextB64Between(
String? lower,
String? upper, {
bool includeLower = true,
bool includeUpper = true,
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.between(
property: r'contextB64',
lower: lower,
includeLower: includeLower,
upper: upper,
includeUpper: includeUpper,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
contextB64StartsWith(
String value, {
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.startsWith(
property: r'contextB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> contextB64EndsWith(
String value, {
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.endsWith(
property: r'contextB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> contextB64Contains(
String value,
{bool caseSensitive = true}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.contains(
property: r'contextB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> contextB64Matches(
String pattern,
{bool caseSensitive = true}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.matches(
property: r'contextB64',
wildcard: pattern,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
contextB64IsEmpty() {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.equalTo(
property: r'contextB64',
value: '',
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
contextB64IsNotEmpty() {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.greaterThan(
property: r'contextB64',
value: '',
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
diversifierIntStringEqualTo( diversifierIntStringEqualTo(
String value, { String value, {
@ -1866,6 +2047,160 @@ extension SparkCoinQueryFilter
}); });
} }
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64IsNull() {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(const FilterCondition.isNull(
property: r'serializedCoinB64',
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64IsNotNull() {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(const FilterCondition.isNotNull(
property: r'serializedCoinB64',
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64EqualTo(
String? value, {
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.equalTo(
property: r'serializedCoinB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64GreaterThan(
String? value, {
bool include = false,
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.greaterThan(
include: include,
property: r'serializedCoinB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64LessThan(
String? value, {
bool include = false,
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.lessThan(
include: include,
property: r'serializedCoinB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64Between(
String? lower,
String? upper, {
bool includeLower = true,
bool includeUpper = true,
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.between(
property: r'serializedCoinB64',
lower: lower,
includeLower: includeLower,
upper: upper,
includeUpper: includeUpper,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64StartsWith(
String value, {
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.startsWith(
property: r'serializedCoinB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64EndsWith(
String value, {
bool caseSensitive = true,
}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.endsWith(
property: r'serializedCoinB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64Contains(String value, {bool caseSensitive = true}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.contains(
property: r'serializedCoinB64',
value: value,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64Matches(String pattern, {bool caseSensitive = true}) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.matches(
property: r'serializedCoinB64',
wildcard: pattern,
caseSensitive: caseSensitive,
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64IsEmpty() {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.equalTo(
property: r'serializedCoinB64',
value: '',
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition>
serializedCoinB64IsNotEmpty() {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.greaterThan(
property: r'serializedCoinB64',
value: '',
));
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> tagIsNull() { QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> tagIsNull() {
return QueryBuilder.apply(this, (query) { return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(const FilterCondition.isNull( return query.addFilterCondition(const FilterCondition.isNull(
@ -2491,6 +2826,18 @@ extension SparkCoinQuerySortBy on QueryBuilder<SparkCoin, SparkCoin, QSortBy> {
}); });
} }
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> sortByContextB64() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'contextB64', Sort.asc);
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> sortByContextB64Desc() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'contextB64', Sort.desc);
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy>
sortByDiversifierIntString() { sortByDiversifierIntString() {
return QueryBuilder.apply(this, (query) { return QueryBuilder.apply(this, (query) {
@ -2553,6 +2900,19 @@ extension SparkCoinQuerySortBy on QueryBuilder<SparkCoin, SparkCoin, QSortBy> {
}); });
} }
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> sortBySerializedCoinB64() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'serializedCoinB64', Sort.asc);
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy>
sortBySerializedCoinB64Desc() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'serializedCoinB64', Sort.desc);
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> sortByTxHash() { QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> sortByTxHash() {
return QueryBuilder.apply(this, (query) { return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'txHash', Sort.asc); return query.addSortBy(r'txHash', Sort.asc);
@ -2616,6 +2976,18 @@ extension SparkCoinQuerySortThenBy
}); });
} }
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> thenByContextB64() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'contextB64', Sort.asc);
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> thenByContextB64Desc() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'contextB64', Sort.desc);
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy>
thenByDiversifierIntString() { thenByDiversifierIntString() {
return QueryBuilder.apply(this, (query) { return QueryBuilder.apply(this, (query) {
@ -2690,6 +3062,19 @@ extension SparkCoinQuerySortThenBy
}); });
} }
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> thenBySerializedCoinB64() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'serializedCoinB64', Sort.asc);
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy>
thenBySerializedCoinB64Desc() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'serializedCoinB64', Sort.desc);
});
}
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> thenByTxHash() { QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> thenByTxHash() {
return QueryBuilder.apply(this, (query) { return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'txHash', Sort.asc); return query.addSortBy(r'txHash', Sort.asc);
@ -2748,6 +3133,13 @@ extension SparkCoinQueryWhereDistinct
}); });
} }
QueryBuilder<SparkCoin, SparkCoin, QDistinct> distinctByContextB64(
{bool caseSensitive = true}) {
return QueryBuilder.apply(this, (query) {
return query.addDistinctBy(r'contextB64', caseSensitive: caseSensitive);
});
}
QueryBuilder<SparkCoin, SparkCoin, QDistinct> distinctByDiversifierIntString( QueryBuilder<SparkCoin, SparkCoin, QDistinct> distinctByDiversifierIntString(
{bool caseSensitive = true}) { {bool caseSensitive = true}) {
return QueryBuilder.apply(this, (query) { return QueryBuilder.apply(this, (query) {
@ -2807,6 +3199,14 @@ extension SparkCoinQueryWhereDistinct
}); });
} }
QueryBuilder<SparkCoin, SparkCoin, QDistinct> distinctBySerializedCoinB64(
{bool caseSensitive = true}) {
return QueryBuilder.apply(this, (query) {
return query.addDistinctBy(r'serializedCoinB64',
caseSensitive: caseSensitive);
});
}
QueryBuilder<SparkCoin, SparkCoin, QDistinct> distinctByTag() { QueryBuilder<SparkCoin, SparkCoin, QDistinct> distinctByTag() {
return QueryBuilder.apply(this, (query) { return QueryBuilder.apply(this, (query) {
return query.addDistinctBy(r'tag'); return query.addDistinctBy(r'tag');
@ -2856,6 +3256,12 @@ extension SparkCoinQueryProperty
}); });
} }
QueryBuilder<SparkCoin, String?, QQueryOperations> contextB64Property() {
return QueryBuilder.apply(this, (query) {
return query.addPropertyName(r'contextB64');
});
}
QueryBuilder<SparkCoin, String, QQueryOperations> QueryBuilder<SparkCoin, String, QQueryOperations>
diversifierIntStringProperty() { diversifierIntStringProperty() {
return QueryBuilder.apply(this, (query) { return QueryBuilder.apply(this, (query) {
@ -2913,6 +3319,13 @@ extension SparkCoinQueryProperty
}); });
} }
QueryBuilder<SparkCoin, String?, QQueryOperations>
serializedCoinB64Property() {
return QueryBuilder.apply(this, (query) {
return query.addPropertyName(r'serializedCoinB64');
});
}
QueryBuilder<SparkCoin, List<int>?, QQueryOperations> tagProperty() { QueryBuilder<SparkCoin, List<int>?, QQueryOperations> tagProperty() {
return QueryBuilder.apply(this, (query) { return QueryBuilder.apply(this, (query) {
return query.addPropertyName(r'tag'); return query.addPropertyName(r'tag');

View file

@ -485,6 +485,7 @@ class FiroWallet extends Bip39HDWallet
await mainDB.deleteWalletBlockchainData(walletId); await mainDB.deleteWalletBlockchainData(walletId);
} }
// lelantus
final latestSetId = await electrumXClient.getLelantusLatestCoinId(); final latestSetId = await electrumXClient.getLelantusLatestCoinId();
final setDataMapFuture = getSetDataMap(latestSetId); final setDataMapFuture = getSetDataMap(latestSetId);
final usedSerialNumbersFuture = final usedSerialNumbersFuture =
@ -492,6 +493,17 @@ class FiroWallet extends Bip39HDWallet
coin: info.coin, coin: info.coin,
); );
// spark
final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId();
final sparkAnonSetFuture = electrumXCachedClient.getSparkAnonymitySet(
groupId: latestSparkCoinId.toString(),
coin: info.coin,
);
final sparkUsedCoinTagsFuture =
electrumXCachedClient.getSparkUsedCoinsTags(
coin: info.coin,
);
// receiving addresses // receiving addresses
Logging.instance.log( Logging.instance.log(
"checking receiving addresses...", "checking receiving addresses...",
@ -595,16 +607,29 @@ class FiroWallet extends Bip39HDWallet
final futureResults = await Future.wait([ final futureResults = await Future.wait([
usedSerialNumbersFuture, usedSerialNumbersFuture,
setDataMapFuture, setDataMapFuture,
sparkAnonSetFuture,
sparkUsedCoinTagsFuture,
]); ]);
// lelantus
final usedSerialsSet = (futureResults[0] as List<String>).toSet(); final usedSerialsSet = (futureResults[0] as List<String>).toSet();
final setDataMap = futureResults[1] as Map<dynamic, dynamic>; final setDataMap = futureResults[1] as Map<dynamic, dynamic>;
await recoverLelantusWallet( // spark
latestSetId: latestSetId, final sparkAnonymitySet = futureResults[2] as Map<String, dynamic>;
usedSerialNumbers: usedSerialsSet, final sparkSpentCoinTags = futureResults[3] as Set<String>;
setDataMap: setDataMap,
); await Future.wait([
recoverLelantusWallet(
latestSetId: latestSetId,
usedSerialNumbers: usedSerialsSet,
setDataMap: setDataMap,
),
recoverSparkWallet(
anonymitySet: sparkAnonymitySet,
spentCoinTags: sparkSpentCoinTags,
),
]);
}); });
await refresh(); await refresh();

View file

@ -16,6 +16,8 @@ import 'package:stackwallet/wallets/models/tx_data.dart';
import 'package:stackwallet/wallets/wallet/intermediate/bip39_hd_wallet.dart'; import 'package:stackwallet/wallets/wallet/intermediate/bip39_hd_wallet.dart';
import 'package:stackwallet/wallets/wallet/wallet_mixin_interfaces/electrumx_interface.dart'; import 'package:stackwallet/wallets/wallet/wallet_mixin_interfaces/electrumx_interface.dart';
const kDefaultSparkIndex = 1;
mixin SparkInterface on Bip39HDWallet, ElectrumXInterface { mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
@override @override
Future<void> init() async { Future<void> init() async {
@ -68,21 +70,18 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
// default to starting at 1 if none found // default to starting at 1 if none found
final int diversifier = (highestStoredDiversifier ?? 0) + 1; final int diversifier = (highestStoredDiversifier ?? 0) + 1;
// TODO: check that this stays constant and only the diversifier changes?
const index = 1;
final root = await getRootHDNode(); final root = await getRootHDNode();
final String derivationPath; final String derivationPath;
if (cryptoCurrency.network == CryptoCurrencyNetwork.test) { if (cryptoCurrency.network == CryptoCurrencyNetwork.test) {
derivationPath = "$kSparkBaseDerivationPathTestnet$index"; derivationPath = "$kSparkBaseDerivationPathTestnet$kDefaultSparkIndex";
} else { } else {
derivationPath = "$kSparkBaseDerivationPath$index"; derivationPath = "$kSparkBaseDerivationPath$kDefaultSparkIndex";
} }
final keys = root.derivePath(derivationPath); final keys = root.derivePath(derivationPath);
final String addressString = await LibSpark.getAddress( final String addressString = await LibSpark.getAddress(
privateKey: keys.privateKey.data, privateKey: keys.privateKey.data,
index: index, index: kDefaultSparkIndex,
diversifier: diversifier, diversifier: diversifier,
isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test, isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test,
); );
@ -138,14 +137,13 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
// https://docs.google.com/document/d/1RG52GoYTZDvKlZz_3G4sQu-PpT6JWSZGHLNswWcrE3o/edit // https://docs.google.com/document/d/1RG52GoYTZDvKlZz_3G4sQu-PpT6JWSZGHLNswWcrE3o/edit
// To generate a spark spend we need to call createSparkSpendTransaction, // To generate a spark spend we need to call createSparkSpendTransaction,
// first unlock the wallet and generate all 3 spark keys, // first unlock the wallet and generate all 3 spark keys,
const index = 1;
final root = await getRootHDNode(); final root = await getRootHDNode();
final String derivationPath; final String derivationPath;
if (cryptoCurrency.network == CryptoCurrencyNetwork.test) { if (cryptoCurrency.network == CryptoCurrencyNetwork.test) {
derivationPath = "$kSparkBaseDerivationPathTestnet$index"; derivationPath = "$kSparkBaseDerivationPathTestnet$kDefaultSparkIndex";
} else { } else {
derivationPath = "$kSparkBaseDerivationPath$index"; derivationPath = "$kSparkBaseDerivationPath$kDefaultSparkIndex";
} }
final privateKey = root.derivePath(derivationPath).privateKey.data; final privateKey = root.derivePath(derivationPath).privateKey.data;
// //
@ -355,7 +353,7 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
final spend = LibSpark.createSparkSendTransaction( final spend = LibSpark.createSparkSendTransaction(
privateKeyHex: privateKey.toHex, privateKeyHex: privateKey.toHex,
index: index, index: kDefaultSparkIndex,
recipients: [], recipients: [],
privateRecipients: txData.sparkRecipients privateRecipients: txData.sparkRecipients
?.map((e) => ( ?.map((e) => (
@ -366,7 +364,7 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
)) ))
.toList() ?? .toList() ??
[], [],
serializedMintMetas: serializedMintMetas, serializedCoins: serializedCoins,
allAnonymitySets: allAnonymitySets, allAnonymitySets: allAnonymitySets,
); );
@ -421,152 +419,41 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
sparkAddresses.map((e) => e.derivationPath!.value).toSet(); sparkAddresses.map((e) => e.derivationPath!.value).toSet();
try { try {
const index = 1;
final root = await getRootHDNode();
final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId(); final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId();
final blockHash = await _getCachedSparkBlockHash();
final futureResults = await Future.wait([ final futureResults = await Future.wait([
electrumXCachedClient.getSparkAnonymitySet( blockHash == null
groupId: latestSparkCoinId.toString(), ? electrumXCachedClient.getSparkAnonymitySet(
coin: info.coin, groupId: latestSparkCoinId.toString(),
), coin: info.coin,
)
: electrumXClient.getSparkAnonymitySet(
coinGroupId: latestSparkCoinId.toString(),
startBlockHash: blockHash,
),
electrumXCachedClient.getSparkUsedCoinsTags(coin: info.coin), electrumXCachedClient.getSparkUsedCoinsTags(coin: info.coin),
]); ]);
final anonymitySet = futureResults[0] as Map<String, dynamic>; final anonymitySet = futureResults[0] as Map<String, dynamic>;
final spentCoinTags = futureResults[1] as Set<String>; final spentCoinTags = futureResults[1] as Set<String>;
// find our coins final myCoins = await _identifyCoins(
final List<SparkCoin> myCoins = []; anonymitySet: anonymitySet,
spentCoinTags: spentCoinTags,
for (final path in paths) { sparkAddressDerivationPaths: paths,
final keys = root.derivePath(path); );
final privateKeyHex = keys.privateKey.data.toHex;
for (final dynData in anonymitySet["coins"] as List) {
final data = List<String>.from(dynData as List);
if (data.length != 3) {
throw Exception("Unexpected serialized coin info found");
}
final serializedCoin = data[0];
final txHash = base64ToReverseHex(data[1]);
final coin = LibSpark.identifyAndRecoverCoin(
serializedCoin,
privateKeyHex: privateKeyHex,
index: index,
context: base64Decode(data[2]),
isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test,
);
// its ours
if (coin != null) {
final SparkCoinType coinType;
switch (coin.type.value) {
case 0:
coinType = SparkCoinType.mint;
case 1:
coinType = SparkCoinType.spend;
default:
throw Exception("Unknown spark coin type detected");
}
myCoins.add(
SparkCoin(
walletId: walletId,
type: coinType,
isUsed: spentCoinTags.contains(coin.lTagHash!),
nonce: coin.nonceHex?.toUint8ListFromHex,
address: coin.address!,
txHash: txHash,
valueIntString: coin.value!.toString(),
memo: coin.memo,
serialContext: coin.serialContext,
diversifierIntString: coin.diversifier!.toString(),
encryptedDiversifier: coin.encryptedDiversifier,
serial: coin.serial,
tag: coin.tag,
lTagHash: coin.lTagHash!,
height: coin.height,
),
);
}
}
}
// update wallet spark coins in isar // update wallet spark coins in isar
if (myCoins.isNotEmpty) { await _addOrUpdateSparkCoins(myCoins);
await mainDB.isar.writeTxn(() async {
await mainDB.isar.sparkCoins.putAll(myCoins);
});
}
// update wallet spark coin height // update blockHash in cache
final coinsToCheck = await mainDB.isar.sparkCoins final String newBlockHash = anonymitySet["blockHash"] as String;
.where() await _setCachedSparkBlockHash(newBlockHash);
.walletIdEqualToAnyLTagHash(walletId)
.filter()
.heightIsNull()
.findAll();
final List<SparkCoin> updatedCoins = [];
for (final coin in coinsToCheck) {
final tx = await electrumXCachedClient.getTransaction(
txHash: coin.txHash,
coin: info.coin,
);
if (tx["height"] is int) {
updatedCoins.add(coin.copyWith(height: tx["height"] as int));
}
}
if (updatedCoins.isNotEmpty) {
await mainDB.isar.writeTxn(() async {
await mainDB.isar.sparkCoins.putAll(updatedCoins);
});
}
// refresh spark balance // refresh spark balance
final currentHeight = await chainHeight; await refreshSparkBalance();
final unusedCoins = await mainDB.isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.filter()
.isUsedEqualTo(false)
.findAll();
final total = Amount(
rawValue: unusedCoins
.map((e) => e.value)
.fold(BigInt.zero, (prev, e) => prev + e),
fractionDigits: cryptoCurrency.fractionDigits,
);
final spendable = Amount(
rawValue: unusedCoins
.where((e) =>
e.height != null &&
e.height! + cryptoCurrency.minConfirms >= currentHeight)
.map((e) => e.value)
.fold(BigInt.zero, (prev, e) => prev + e),
fractionDigits: cryptoCurrency.fractionDigits,
);
final sparkBalance = Balance(
total: total,
spendable: spendable,
blockedTotal: Amount(
rawValue: BigInt.zero,
fractionDigits: cryptoCurrency.fractionDigits,
),
pendingSpendable: total - spendable,
);
await info.updateBalanceTertiary(
newBalance: sparkBalance,
isar: mainDB.isar,
);
} catch (e, s) { } catch (e, s) {
// todo logging // todo logging
@ -574,35 +461,85 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
} }
} }
Future<void> refreshSparkBalance() async {
final currentHeight = await chainHeight;
final unusedCoins = await mainDB.isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.filter()
.isUsedEqualTo(false)
.findAll();
final total = Amount(
rawValue: unusedCoins
.map((e) => e.value)
.fold(BigInt.zero, (prev, e) => prev + e),
fractionDigits: cryptoCurrency.fractionDigits,
);
final spendable = Amount(
rawValue: unusedCoins
.where((e) =>
e.height != null &&
e.height! + cryptoCurrency.minConfirms >= currentHeight)
.map((e) => e.value)
.fold(BigInt.zero, (prev, e) => prev + e),
fractionDigits: cryptoCurrency.fractionDigits,
);
final sparkBalance = Balance(
total: total,
spendable: spendable,
blockedTotal: Amount(
rawValue: BigInt.zero,
fractionDigits: cryptoCurrency.fractionDigits,
),
pendingSpendable: total - spendable,
);
await info.updateBalanceTertiary(
newBalance: sparkBalance,
isar: mainDB.isar,
);
}
/// Should only be called within the standard wallet [recover] function due to /// Should only be called within the standard wallet [recover] function due to
/// mutex locking. Otherwise behaviour MAY be undefined. /// mutex locking. Otherwise behaviour MAY be undefined.
Future<void> recoverSparkWallet( Future<void> recoverSparkWallet({
// { required Map<dynamic, dynamic> anonymitySet,
// required int latestSetId, required Set<String> spentCoinTags,
// required Map<dynamic, dynamic> setDataMap, }) async {
// required Set<String> usedSerialNumbers, // generate spark addresses if non existing
// } if (await getCurrentReceivingSparkAddress() == null) {
) async { final address = await generateNextSparkAddress();
await mainDB.putAddress(address);
}
final sparkAddresses = await mainDB.isar.addresses
.where()
.walletIdEqualTo(walletId)
.filter()
.typeEqualTo(AddressType.spark)
.findAll();
final Set<String> paths =
sparkAddresses.map((e) => e.derivationPath!.value).toSet();
try { try {
// do we need to generate any spark address(es) here? final myCoins = await _identifyCoins(
anonymitySet: anonymitySet,
final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId(); spentCoinTags: spentCoinTags,
sparkAddressDerivationPaths: paths,
final anonymitySet = await electrumXCachedClient.getSparkAnonymitySet(
groupId: latestSparkCoinId.toString(),
coin: info.coin,
); );
// TODO loop over set and see which coins are ours using the FFI call `identifyCoin`
List myCoins = [];
// fetch metadata for myCoins
// create list of Spark Coin isar objects
// update wallet spark coins in isar // update wallet spark coins in isar
await _addOrUpdateSparkCoins(myCoins);
throw UnimplementedError(); // update blockHash in cache
final String newBlockHash = anonymitySet["blockHash"] as String;
await _setCachedSparkBlockHash(newBlockHash);
// refresh spark balance
await refreshSparkBalance();
} catch (e, s) { } catch (e, s) {
// todo logging // todo logging
@ -826,6 +763,124 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
// wait for normalBalanceFuture to complete before returning // wait for normalBalanceFuture to complete before returning
await normalBalanceFuture; await normalBalanceFuture;
} }
// ====================== Private ============================================
final _kSparkAnonSetCachedBlockHashKey = "SparkAnonSetCachedBlockHashKey";
Future<String?> _getCachedSparkBlockHash() async {
return info.otherData[_kSparkAnonSetCachedBlockHashKey] as String?;
}
Future<void> _setCachedSparkBlockHash(String blockHash) async {
await info.updateOtherData(
newEntries: {_kSparkAnonSetCachedBlockHashKey: blockHash},
isar: mainDB.isar,
);
}
Future<List<SparkCoin>> _identifyCoins({
required Map<dynamic, dynamic> anonymitySet,
required Set<String> spentCoinTags,
required Set<String> sparkAddressDerivationPaths,
}) async {
final root = await getRootHDNode();
final List<SparkCoin> myCoins = [];
for (final path in sparkAddressDerivationPaths) {
final keys = root.derivePath(path);
final privateKeyHex = keys.privateKey.data.toHex;
for (final dynData in anonymitySet["coins"] as List) {
final data = List<String>.from(dynData as List);
if (data.length != 3) {
throw Exception("Unexpected serialized coin info found");
}
final serializedCoinB64 = data[0];
final txHash = base64ToReverseHex(data[1]);
final contextB64 = data[2];
final coin = LibSpark.identifyAndRecoverCoin(
serializedCoinB64,
privateKeyHex: privateKeyHex,
index: kDefaultSparkIndex,
context: base64Decode(contextB64),
isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test,
);
// its ours
if (coin != null) {
final SparkCoinType coinType;
switch (coin.type.value) {
case 0:
coinType = SparkCoinType.mint;
case 1:
coinType = SparkCoinType.spend;
default:
throw Exception("Unknown spark coin type detected");
}
myCoins.add(
SparkCoin(
walletId: walletId,
type: coinType,
isUsed: spentCoinTags.contains(coin.lTagHash!),
nonce: coin.nonceHex?.toUint8ListFromHex,
address: coin.address!,
txHash: txHash,
valueIntString: coin.value!.toString(),
memo: coin.memo,
serialContext: coin.serialContext,
diversifierIntString: coin.diversifier!.toString(),
encryptedDiversifier: coin.encryptedDiversifier,
serial: coin.serial,
tag: coin.tag,
lTagHash: coin.lTagHash!,
height: coin.height,
serializedCoinB64: serializedCoinB64,
contextB64: contextB64,
),
);
}
}
}
return myCoins;
}
Future<void> _addOrUpdateSparkCoins(List<SparkCoin> coins) async {
if (coins.isNotEmpty) {
await mainDB.isar.writeTxn(() async {
await mainDB.isar.sparkCoins.putAll(coins);
});
}
// update wallet spark coin height
final coinsToCheck = await mainDB.isar.sparkCoins
.where()
.walletIdEqualToAnyLTagHash(walletId)
.filter()
.heightIsNull()
.findAll();
final List<SparkCoin> updatedCoins = [];
for (final coin in coinsToCheck) {
final tx = await electrumXCachedClient.getTransaction(
txHash: coin.txHash,
coin: info.coin,
);
if (tx["height"] is int) {
updatedCoins.add(coin.copyWith(height: tx["height"] as int));
}
}
if (updatedCoins.isNotEmpty) {
await mainDB.isar.writeTxn(() async {
await mainDB.isar.sparkCoins.putAll(updatedCoins);
});
}
}
} }
String base64ToReverseHex(String source) => String base64ToReverseHex(String source) =>