mirror of
https://github.com/cypherstack/stack_wallet.git
synced 2024-12-23 11:59:30 +00:00
add required data to spark coin schema and some other small fixes for spark spend script creation
This commit is contained in:
parent
35fafb5c5d
commit
c16c97d74d
3 changed files with 178 additions and 57 deletions
|
@ -28,6 +28,7 @@ class SparkCoin {
|
||||||
final SparkCoinType type;
|
final SparkCoinType type;
|
||||||
|
|
||||||
final bool isUsed;
|
final bool isUsed;
|
||||||
|
final int groupId;
|
||||||
|
|
||||||
final List<int>? nonce;
|
final List<int>? nonce;
|
||||||
|
|
||||||
|
@ -62,6 +63,7 @@ class SparkCoin {
|
||||||
required this.walletId,
|
required this.walletId,
|
||||||
required this.type,
|
required this.type,
|
||||||
required this.isUsed,
|
required this.isUsed,
|
||||||
|
required this.groupId,
|
||||||
this.nonce,
|
this.nonce,
|
||||||
required this.address,
|
required this.address,
|
||||||
required this.txHash,
|
required this.txHash,
|
||||||
|
@ -81,6 +83,7 @@ class SparkCoin {
|
||||||
SparkCoin copyWith({
|
SparkCoin copyWith({
|
||||||
SparkCoinType? type,
|
SparkCoinType? type,
|
||||||
bool? isUsed,
|
bool? isUsed,
|
||||||
|
int? groupId,
|
||||||
List<int>? nonce,
|
List<int>? nonce,
|
||||||
String? address,
|
String? address,
|
||||||
String? txHash,
|
String? txHash,
|
||||||
|
@ -100,6 +103,7 @@ class SparkCoin {
|
||||||
walletId: walletId,
|
walletId: walletId,
|
||||||
type: type ?? this.type,
|
type: type ?? this.type,
|
||||||
isUsed: isUsed ?? this.isUsed,
|
isUsed: isUsed ?? this.isUsed,
|
||||||
|
groupId: groupId ?? this.groupId,
|
||||||
nonce: nonce ?? this.nonce,
|
nonce: nonce ?? this.nonce,
|
||||||
address: address ?? this.address,
|
address: address ?? this.address,
|
||||||
txHash: txHash ?? this.txHash,
|
txHash: txHash ?? this.txHash,
|
||||||
|
@ -124,6 +128,7 @@ class SparkCoin {
|
||||||
'walletId: $walletId'
|
'walletId: $walletId'
|
||||||
', type: $type'
|
', type: $type'
|
||||||
', isUsed: $isUsed'
|
', isUsed: $isUsed'
|
||||||
|
', groupId: $groupId'
|
||||||
', k: $nonce'
|
', k: $nonce'
|
||||||
', address: $address'
|
', address: $address'
|
||||||
', txHash: $txHash'
|
', txHash: $txHash'
|
||||||
|
|
|
@ -37,69 +37,74 @@ const SparkCoinSchema = CollectionSchema(
|
||||||
name: r'encryptedDiversifier',
|
name: r'encryptedDiversifier',
|
||||||
type: IsarType.longList,
|
type: IsarType.longList,
|
||||||
),
|
),
|
||||||
r'height': PropertySchema(
|
r'groupId': PropertySchema(
|
||||||
id: 4,
|
id: 4,
|
||||||
|
name: r'groupId',
|
||||||
|
type: IsarType.long,
|
||||||
|
),
|
||||||
|
r'height': PropertySchema(
|
||||||
|
id: 5,
|
||||||
name: r'height',
|
name: r'height',
|
||||||
type: IsarType.long,
|
type: IsarType.long,
|
||||||
),
|
),
|
||||||
r'isUsed': PropertySchema(
|
r'isUsed': PropertySchema(
|
||||||
id: 5,
|
id: 6,
|
||||||
name: r'isUsed',
|
name: r'isUsed',
|
||||||
type: IsarType.bool,
|
type: IsarType.bool,
|
||||||
),
|
),
|
||||||
r'lTagHash': PropertySchema(
|
r'lTagHash': PropertySchema(
|
||||||
id: 6,
|
id: 7,
|
||||||
name: r'lTagHash',
|
name: r'lTagHash',
|
||||||
type: IsarType.string,
|
type: IsarType.string,
|
||||||
),
|
),
|
||||||
r'memo': PropertySchema(
|
r'memo': PropertySchema(
|
||||||
id: 7,
|
id: 8,
|
||||||
name: r'memo',
|
name: r'memo',
|
||||||
type: IsarType.string,
|
type: IsarType.string,
|
||||||
),
|
),
|
||||||
r'nonce': PropertySchema(
|
r'nonce': PropertySchema(
|
||||||
id: 8,
|
id: 9,
|
||||||
name: r'nonce',
|
name: r'nonce',
|
||||||
type: IsarType.longList,
|
type: IsarType.longList,
|
||||||
),
|
),
|
||||||
r'serial': PropertySchema(
|
r'serial': PropertySchema(
|
||||||
id: 9,
|
id: 10,
|
||||||
name: r'serial',
|
name: r'serial',
|
||||||
type: IsarType.longList,
|
type: IsarType.longList,
|
||||||
),
|
),
|
||||||
r'serialContext': PropertySchema(
|
r'serialContext': PropertySchema(
|
||||||
id: 10,
|
id: 11,
|
||||||
name: r'serialContext',
|
name: r'serialContext',
|
||||||
type: IsarType.longList,
|
type: IsarType.longList,
|
||||||
),
|
),
|
||||||
r'serializedCoinB64': PropertySchema(
|
r'serializedCoinB64': PropertySchema(
|
||||||
id: 11,
|
id: 12,
|
||||||
name: r'serializedCoinB64',
|
name: r'serializedCoinB64',
|
||||||
type: IsarType.string,
|
type: IsarType.string,
|
||||||
),
|
),
|
||||||
r'tag': PropertySchema(
|
r'tag': PropertySchema(
|
||||||
id: 12,
|
id: 13,
|
||||||
name: r'tag',
|
name: r'tag',
|
||||||
type: IsarType.longList,
|
type: IsarType.longList,
|
||||||
),
|
),
|
||||||
r'txHash': PropertySchema(
|
r'txHash': PropertySchema(
|
||||||
id: 13,
|
id: 14,
|
||||||
name: r'txHash',
|
name: r'txHash',
|
||||||
type: IsarType.string,
|
type: IsarType.string,
|
||||||
),
|
),
|
||||||
r'type': PropertySchema(
|
r'type': PropertySchema(
|
||||||
id: 14,
|
id: 15,
|
||||||
name: r'type',
|
name: r'type',
|
||||||
type: IsarType.byte,
|
type: IsarType.byte,
|
||||||
enumMap: _SparkCointypeEnumValueMap,
|
enumMap: _SparkCointypeEnumValueMap,
|
||||||
),
|
),
|
||||||
r'valueIntString': PropertySchema(
|
r'valueIntString': PropertySchema(
|
||||||
id: 15,
|
id: 16,
|
||||||
name: r'valueIntString',
|
name: r'valueIntString',
|
||||||
type: IsarType.string,
|
type: IsarType.string,
|
||||||
),
|
),
|
||||||
r'walletId': PropertySchema(
|
r'walletId': PropertySchema(
|
||||||
id: 16,
|
id: 17,
|
||||||
name: r'walletId',
|
name: r'walletId',
|
||||||
type: IsarType.string,
|
type: IsarType.string,
|
||||||
)
|
)
|
||||||
|
@ -210,19 +215,20 @@ void _sparkCoinSerialize(
|
||||||
writer.writeString(offsets[1], object.contextB64);
|
writer.writeString(offsets[1], object.contextB64);
|
||||||
writer.writeString(offsets[2], object.diversifierIntString);
|
writer.writeString(offsets[2], object.diversifierIntString);
|
||||||
writer.writeLongList(offsets[3], object.encryptedDiversifier);
|
writer.writeLongList(offsets[3], object.encryptedDiversifier);
|
||||||
writer.writeLong(offsets[4], object.height);
|
writer.writeLong(offsets[4], object.groupId);
|
||||||
writer.writeBool(offsets[5], object.isUsed);
|
writer.writeLong(offsets[5], object.height);
|
||||||
writer.writeString(offsets[6], object.lTagHash);
|
writer.writeBool(offsets[6], object.isUsed);
|
||||||
writer.writeString(offsets[7], object.memo);
|
writer.writeString(offsets[7], object.lTagHash);
|
||||||
writer.writeLongList(offsets[8], object.nonce);
|
writer.writeString(offsets[8], object.memo);
|
||||||
writer.writeLongList(offsets[9], object.serial);
|
writer.writeLongList(offsets[9], object.nonce);
|
||||||
writer.writeLongList(offsets[10], object.serialContext);
|
writer.writeLongList(offsets[10], object.serial);
|
||||||
writer.writeString(offsets[11], object.serializedCoinB64);
|
writer.writeLongList(offsets[11], object.serialContext);
|
||||||
writer.writeLongList(offsets[12], object.tag);
|
writer.writeString(offsets[12], object.serializedCoinB64);
|
||||||
writer.writeString(offsets[13], object.txHash);
|
writer.writeLongList(offsets[13], object.tag);
|
||||||
writer.writeByte(offsets[14], object.type.index);
|
writer.writeString(offsets[14], object.txHash);
|
||||||
writer.writeString(offsets[15], object.valueIntString);
|
writer.writeByte(offsets[15], object.type.index);
|
||||||
writer.writeString(offsets[16], object.walletId);
|
writer.writeString(offsets[16], object.valueIntString);
|
||||||
|
writer.writeString(offsets[17], object.walletId);
|
||||||
}
|
}
|
||||||
|
|
||||||
SparkCoin _sparkCoinDeserialize(
|
SparkCoin _sparkCoinDeserialize(
|
||||||
|
@ -236,20 +242,21 @@ SparkCoin _sparkCoinDeserialize(
|
||||||
contextB64: reader.readStringOrNull(offsets[1]),
|
contextB64: reader.readStringOrNull(offsets[1]),
|
||||||
diversifierIntString: reader.readString(offsets[2]),
|
diversifierIntString: reader.readString(offsets[2]),
|
||||||
encryptedDiversifier: reader.readLongList(offsets[3]),
|
encryptedDiversifier: reader.readLongList(offsets[3]),
|
||||||
height: reader.readLongOrNull(offsets[4]),
|
groupId: reader.readLong(offsets[4]),
|
||||||
isUsed: reader.readBool(offsets[5]),
|
height: reader.readLongOrNull(offsets[5]),
|
||||||
lTagHash: reader.readString(offsets[6]),
|
isUsed: reader.readBool(offsets[6]),
|
||||||
memo: reader.readStringOrNull(offsets[7]),
|
lTagHash: reader.readString(offsets[7]),
|
||||||
nonce: reader.readLongList(offsets[8]),
|
memo: reader.readStringOrNull(offsets[8]),
|
||||||
serial: reader.readLongList(offsets[9]),
|
nonce: reader.readLongList(offsets[9]),
|
||||||
serialContext: reader.readLongList(offsets[10]),
|
serial: reader.readLongList(offsets[10]),
|
||||||
serializedCoinB64: reader.readStringOrNull(offsets[11]),
|
serialContext: reader.readLongList(offsets[11]),
|
||||||
tag: reader.readLongList(offsets[12]),
|
serializedCoinB64: reader.readStringOrNull(offsets[12]),
|
||||||
txHash: reader.readString(offsets[13]),
|
tag: reader.readLongList(offsets[13]),
|
||||||
type: _SparkCointypeValueEnumMap[reader.readByteOrNull(offsets[14])] ??
|
txHash: reader.readString(offsets[14]),
|
||||||
|
type: _SparkCointypeValueEnumMap[reader.readByteOrNull(offsets[15])] ??
|
||||||
SparkCoinType.mint,
|
SparkCoinType.mint,
|
||||||
valueIntString: reader.readString(offsets[15]),
|
valueIntString: reader.readString(offsets[16]),
|
||||||
walletId: reader.readString(offsets[16]),
|
walletId: reader.readString(offsets[17]),
|
||||||
);
|
);
|
||||||
object.id = id;
|
object.id = id;
|
||||||
return object;
|
return object;
|
||||||
|
@ -271,32 +278,34 @@ P _sparkCoinDeserializeProp<P>(
|
||||||
case 3:
|
case 3:
|
||||||
return (reader.readLongList(offset)) as P;
|
return (reader.readLongList(offset)) as P;
|
||||||
case 4:
|
case 4:
|
||||||
return (reader.readLongOrNull(offset)) as P;
|
return (reader.readLong(offset)) as P;
|
||||||
case 5:
|
case 5:
|
||||||
return (reader.readBool(offset)) as P;
|
return (reader.readLongOrNull(offset)) as P;
|
||||||
case 6:
|
case 6:
|
||||||
return (reader.readString(offset)) as P;
|
return (reader.readBool(offset)) as P;
|
||||||
case 7:
|
case 7:
|
||||||
return (reader.readStringOrNull(offset)) as P;
|
return (reader.readString(offset)) as P;
|
||||||
case 8:
|
case 8:
|
||||||
return (reader.readLongList(offset)) as P;
|
return (reader.readStringOrNull(offset)) as P;
|
||||||
case 9:
|
case 9:
|
||||||
return (reader.readLongList(offset)) as P;
|
return (reader.readLongList(offset)) as P;
|
||||||
case 10:
|
case 10:
|
||||||
return (reader.readLongList(offset)) as P;
|
return (reader.readLongList(offset)) as P;
|
||||||
case 11:
|
case 11:
|
||||||
return (reader.readStringOrNull(offset)) as P;
|
|
||||||
case 12:
|
|
||||||
return (reader.readLongList(offset)) as P;
|
return (reader.readLongList(offset)) as P;
|
||||||
|
case 12:
|
||||||
|
return (reader.readStringOrNull(offset)) as P;
|
||||||
case 13:
|
case 13:
|
||||||
return (reader.readString(offset)) as P;
|
return (reader.readLongList(offset)) as P;
|
||||||
case 14:
|
case 14:
|
||||||
|
return (reader.readString(offset)) as P;
|
||||||
|
case 15:
|
||||||
return (_SparkCointypeValueEnumMap[reader.readByteOrNull(offset)] ??
|
return (_SparkCointypeValueEnumMap[reader.readByteOrNull(offset)] ??
|
||||||
SparkCoinType.mint) as P;
|
SparkCoinType.mint) as P;
|
||||||
case 15:
|
|
||||||
return (reader.readString(offset)) as P;
|
|
||||||
case 16:
|
case 16:
|
||||||
return (reader.readString(offset)) as P;
|
return (reader.readString(offset)) as P;
|
||||||
|
case 17:
|
||||||
|
return (reader.readString(offset)) as P;
|
||||||
default:
|
default:
|
||||||
throw IsarError('Unknown property with id $propertyId');
|
throw IsarError('Unknown property with id $propertyId');
|
||||||
}
|
}
|
||||||
|
@ -1161,6 +1170,59 @@ extension SparkCoinQueryFilter
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> groupIdEqualTo(
|
||||||
|
int value) {
|
||||||
|
return QueryBuilder.apply(this, (query) {
|
||||||
|
return query.addFilterCondition(FilterCondition.equalTo(
|
||||||
|
property: r'groupId',
|
||||||
|
value: value,
|
||||||
|
));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> groupIdGreaterThan(
|
||||||
|
int value, {
|
||||||
|
bool include = false,
|
||||||
|
}) {
|
||||||
|
return QueryBuilder.apply(this, (query) {
|
||||||
|
return query.addFilterCondition(FilterCondition.greaterThan(
|
||||||
|
include: include,
|
||||||
|
property: r'groupId',
|
||||||
|
value: value,
|
||||||
|
));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> groupIdLessThan(
|
||||||
|
int value, {
|
||||||
|
bool include = false,
|
||||||
|
}) {
|
||||||
|
return QueryBuilder.apply(this, (query) {
|
||||||
|
return query.addFilterCondition(FilterCondition.lessThan(
|
||||||
|
include: include,
|
||||||
|
property: r'groupId',
|
||||||
|
value: value,
|
||||||
|
));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> groupIdBetween(
|
||||||
|
int lower,
|
||||||
|
int upper, {
|
||||||
|
bool includeLower = true,
|
||||||
|
bool includeUpper = true,
|
||||||
|
}) {
|
||||||
|
return QueryBuilder.apply(this, (query) {
|
||||||
|
return query.addFilterCondition(FilterCondition.between(
|
||||||
|
property: r'groupId',
|
||||||
|
lower: lower,
|
||||||
|
includeLower: includeLower,
|
||||||
|
upper: upper,
|
||||||
|
includeUpper: includeUpper,
|
||||||
|
));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> heightIsNull() {
|
QueryBuilder<SparkCoin, SparkCoin, QAfterFilterCondition> heightIsNull() {
|
||||||
return QueryBuilder.apply(this, (query) {
|
return QueryBuilder.apply(this, (query) {
|
||||||
return query.addFilterCondition(const FilterCondition.isNull(
|
return query.addFilterCondition(const FilterCondition.isNull(
|
||||||
|
@ -2852,6 +2914,18 @@ extension SparkCoinQuerySortBy on QueryBuilder<SparkCoin, SparkCoin, QSortBy> {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> sortByGroupId() {
|
||||||
|
return QueryBuilder.apply(this, (query) {
|
||||||
|
return query.addSortBy(r'groupId', Sort.asc);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> sortByGroupIdDesc() {
|
||||||
|
return QueryBuilder.apply(this, (query) {
|
||||||
|
return query.addSortBy(r'groupId', Sort.desc);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> sortByHeight() {
|
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> sortByHeight() {
|
||||||
return QueryBuilder.apply(this, (query) {
|
return QueryBuilder.apply(this, (query) {
|
||||||
return query.addSortBy(r'height', Sort.asc);
|
return query.addSortBy(r'height', Sort.asc);
|
||||||
|
@ -3002,6 +3076,18 @@ extension SparkCoinQuerySortThenBy
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> thenByGroupId() {
|
||||||
|
return QueryBuilder.apply(this, (query) {
|
||||||
|
return query.addSortBy(r'groupId', Sort.asc);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> thenByGroupIdDesc() {
|
||||||
|
return QueryBuilder.apply(this, (query) {
|
||||||
|
return query.addSortBy(r'groupId', Sort.desc);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> thenByHeight() {
|
QueryBuilder<SparkCoin, SparkCoin, QAfterSortBy> thenByHeight() {
|
||||||
return QueryBuilder.apply(this, (query) {
|
return QueryBuilder.apply(this, (query) {
|
||||||
return query.addSortBy(r'height', Sort.asc);
|
return query.addSortBy(r'height', Sort.asc);
|
||||||
|
@ -3155,6 +3241,12 @@ extension SparkCoinQueryWhereDistinct
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
QueryBuilder<SparkCoin, SparkCoin, QDistinct> distinctByGroupId() {
|
||||||
|
return QueryBuilder.apply(this, (query) {
|
||||||
|
return query.addDistinctBy(r'groupId');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
QueryBuilder<SparkCoin, SparkCoin, QDistinct> distinctByHeight() {
|
QueryBuilder<SparkCoin, SparkCoin, QDistinct> distinctByHeight() {
|
||||||
return QueryBuilder.apply(this, (query) {
|
return QueryBuilder.apply(this, (query) {
|
||||||
return query.addDistinctBy(r'height');
|
return query.addDistinctBy(r'height');
|
||||||
|
@ -3276,6 +3368,12 @@ extension SparkCoinQueryProperty
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
QueryBuilder<SparkCoin, int, QQueryOperations> groupIdProperty() {
|
||||||
|
return QueryBuilder.apply(this, (query) {
|
||||||
|
return query.addPropertyName(r'groupId');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
QueryBuilder<SparkCoin, int?, QQueryOperations> heightProperty() {
|
QueryBuilder<SparkCoin, int?, QQueryOperations> heightProperty() {
|
||||||
return QueryBuilder.apply(this, (query) {
|
return QueryBuilder.apply(this, (query) {
|
||||||
return query.addPropertyName(r'height');
|
return query.addPropertyName(r'height');
|
||||||
|
|
|
@ -123,21 +123,35 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
|
||||||
.walletIdEqualToAnyLTagHash(walletId)
|
.walletIdEqualToAnyLTagHash(walletId)
|
||||||
.filter()
|
.filter()
|
||||||
.isUsedEqualTo(false)
|
.isUsedEqualTo(false)
|
||||||
|
.and()
|
||||||
|
.heightIsNotNull()
|
||||||
.findAll();
|
.findAll();
|
||||||
|
|
||||||
final serializedCoins =
|
final serializedCoins = coins
|
||||||
coins.map((e) => (e.serializedCoinB64!, e.contextB64!)).toList();
|
.map((e) => (
|
||||||
|
serializedCoin: e.serializedCoinB64!,
|
||||||
|
serializedCoinContext: e.contextB64!,
|
||||||
|
groupId: e.groupId,
|
||||||
|
height: e.height!,
|
||||||
|
))
|
||||||
|
.toList();
|
||||||
|
|
||||||
final currentId = await electrumXClient.getSparkLatestCoinId();
|
final currentId = await electrumXClient.getSparkLatestCoinId();
|
||||||
final List<Map<String, dynamic>> setMaps = [];
|
final List<Map<String, dynamic>> setMaps = [];
|
||||||
// for (int i = 0; i <= currentId; i++) {
|
final List<({int groupId, String blockHash})> idAndBlockHashes = [];
|
||||||
for (int i = currentId; i <= currentId; i++) {
|
for (int i = 1; i <= currentId; i++) {
|
||||||
final set = await electrumXCachedClient.getSparkAnonymitySet(
|
final set = await electrumXCachedClient.getSparkAnonymitySet(
|
||||||
groupId: i.toString(),
|
groupId: i.toString(),
|
||||||
coin: info.coin,
|
coin: info.coin,
|
||||||
);
|
);
|
||||||
set["coinGroupID"] = i;
|
set["coinGroupID"] = i;
|
||||||
setMaps.add(set);
|
setMaps.add(set);
|
||||||
|
idAndBlockHashes.add(
|
||||||
|
(
|
||||||
|
groupId: i,
|
||||||
|
blockHash: set["blockHash"] as String,
|
||||||
|
),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
final allAnonymitySets = setMaps
|
final allAnonymitySets = setMaps
|
||||||
|
@ -385,6 +399,9 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
|
||||||
[],
|
[],
|
||||||
serializedCoins: serializedCoins,
|
serializedCoins: serializedCoins,
|
||||||
allAnonymitySets: allAnonymitySets,
|
allAnonymitySets: allAnonymitySets,
|
||||||
|
idAndBlockHashes: idAndBlockHashes
|
||||||
|
.map((e) => (setId: e.groupId, blockHash: base64Decode(e.blockHash)))
|
||||||
|
.toList(),
|
||||||
);
|
);
|
||||||
|
|
||||||
print("SPARK SPEND ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
|
print("SPARK SPEND ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
|
||||||
|
@ -399,10 +416,7 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
|
||||||
}
|
}
|
||||||
|
|
||||||
final extractedTx = txb.buildIncomplete();
|
final extractedTx = txb.buildIncomplete();
|
||||||
|
extractedTx.setPayload(spend.serializedSpendPayload);
|
||||||
// TODO: verify encoding
|
|
||||||
extractedTx.setPayload(spend.serializedSpendPayload.toUint8ListFromUtf8);
|
|
||||||
|
|
||||||
final rawTxHex = extractedTx.toHex();
|
final rawTxHex = extractedTx.toHex();
|
||||||
|
|
||||||
return txData.copyWith(
|
return txData.copyWith(
|
||||||
|
@ -468,6 +482,7 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
|
||||||
_identifyCoins,
|
_identifyCoins,
|
||||||
(
|
(
|
||||||
anonymitySetCoins: anonymitySet["coins"] as List,
|
anonymitySetCoins: anonymitySet["coins"] as List,
|
||||||
|
groupId: latestSparkCoinId,
|
||||||
spentCoinTags: spentCoinTags,
|
spentCoinTags: spentCoinTags,
|
||||||
privateKeyHexSet: privateKeyHexSet,
|
privateKeyHexSet: privateKeyHexSet,
|
||||||
walletId: walletId,
|
walletId: walletId,
|
||||||
|
@ -565,6 +580,7 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
|
||||||
_identifyCoins,
|
_identifyCoins,
|
||||||
(
|
(
|
||||||
anonymitySetCoins: anonymitySet["coins"] as List,
|
anonymitySetCoins: anonymitySet["coins"] as List,
|
||||||
|
groupId: anonymitySet["coinGroupID"] as int,
|
||||||
spentCoinTags: spentCoinTags,
|
spentCoinTags: spentCoinTags,
|
||||||
privateKeyHexSet: privateKeyHexSet,
|
privateKeyHexSet: privateKeyHexSet,
|
||||||
walletId: walletId,
|
walletId: walletId,
|
||||||
|
@ -863,6 +879,7 @@ String base64ToReverseHex(String source) =>
|
||||||
Future<List<SparkCoin>> _identifyCoins(
|
Future<List<SparkCoin>> _identifyCoins(
|
||||||
({
|
({
|
||||||
List<dynamic> anonymitySetCoins,
|
List<dynamic> anonymitySetCoins,
|
||||||
|
int groupId,
|
||||||
Set<String> spentCoinTags,
|
Set<String> spentCoinTags,
|
||||||
Set<String> privateKeyHexSet,
|
Set<String> privateKeyHexSet,
|
||||||
String walletId,
|
String walletId,
|
||||||
|
@ -906,6 +923,7 @@ Future<List<SparkCoin>> _identifyCoins(
|
||||||
walletId: args.walletId,
|
walletId: args.walletId,
|
||||||
type: coinType,
|
type: coinType,
|
||||||
isUsed: args.spentCoinTags.contains(coin.lTagHash!),
|
isUsed: args.spentCoinTags.contains(coin.lTagHash!),
|
||||||
|
groupId: args.groupId,
|
||||||
nonce: coin.nonceHex?.toUint8ListFromHex,
|
nonce: coin.nonceHex?.toUint8ListFromHex,
|
||||||
address: coin.address!,
|
address: coin.address!,
|
||||||
txHash: txHash,
|
txHash: txHash,
|
||||||
|
|
Loading…
Reference in a new issue