Merge remote-tracking branch 'origin/wallets_refactor_spark_integrationn' into spark_refactor

This commit is contained in:
sneurlax 2023-12-13 18:37:14 -06:00
commit 597252f2be
6 changed files with 443 additions and 31 deletions

View file

@ -53,8 +53,12 @@ class DB {
// firo only
String _boxNameSetCache({required Coin coin}) =>
"${coin.name}_anonymitySetCache";
String _boxNameSetSparkCache({required Coin coin}) =>
"${coin.name}_anonymitySetSparkCache";
String _boxNameUsedSerialsCache({required Coin coin}) =>
"${coin.name}_usedSerialsCache";
String _boxNameSparkUsedCoinsTagsCache({required Coin coin}) =>
"${coin.name}_sparkUsedCoinsTagsCache";
Box<NodeModel>? _boxNodeModels;
Box<NodeModel>? _boxPrimaryNodes;
@ -75,7 +79,9 @@ class DB {
final Map<Coin, Box<dynamic>> _txCacheBoxes = {};
final Map<Coin, Box<dynamic>> _setCacheBoxes = {};
final Map<Coin, Box<dynamic>> _setSparkCacheBoxes = {};
final Map<Coin, Box<dynamic>> _usedSerialsCacheBoxes = {};
final Map<Coin, Box<dynamic>> _getSparkUsedCoinsTagsCacheBoxes = {};
// exposed for monero
Box<xmr.WalletInfo> get moneroWalletInfoBox => _walletInfoSource!;
@ -197,6 +203,15 @@ class DB {
await Hive.openBox<dynamic>(_boxNameSetCache(coin: coin));
}
Future<Box<dynamic>> getSparkAnonymitySetCacheBox(
{required Coin coin}) async {
if (_setSparkCacheBoxes[coin]?.isOpen != true) {
_setSparkCacheBoxes.remove(coin);
}
return _setSparkCacheBoxes[coin] ??=
await Hive.openBox<dynamic>(_boxNameSetSparkCache(coin: coin));
}
Future<void> closeAnonymitySetCacheBox({required Coin coin}) async {
await _setCacheBoxes[coin]?.close();
}
@ -209,6 +224,16 @@ class DB {
await Hive.openBox<dynamic>(_boxNameUsedSerialsCache(coin: coin));
}
Future<Box<dynamic>> getSparkUsedCoinsTagsCacheBox(
{required Coin coin}) async {
if (_getSparkUsedCoinsTagsCacheBoxes[coin]?.isOpen != true) {
_getSparkUsedCoinsTagsCacheBoxes.remove(coin);
}
return _getSparkUsedCoinsTagsCacheBoxes[coin] ??=
await Hive.openBox<dynamic>(
_boxNameSparkUsedCoinsTagsCache(coin: coin));
}
Future<void> closeUsedSerialsCacheBox({required Coin coin}) async {
await _usedSerialsCacheBoxes[coin]?.close();
}
@ -216,9 +241,12 @@ class DB {
/// Clear all cached transactions for the specified coin
Future<void> clearSharedTransactionCache({required Coin coin}) async {
await deleteAll<dynamic>(boxName: _boxNameTxCache(coin: coin));
if (coin == Coin.firo) {
if (coin == Coin.firo || coin == Coin.firoTestNet) {
await deleteAll<dynamic>(boxName: _boxNameSetCache(coin: coin));
await deleteAll<dynamic>(boxName: _boxNameSetSparkCache(coin: coin));
await deleteAll<dynamic>(boxName: _boxNameUsedSerialsCache(coin: coin));
await deleteAll<dynamic>(
boxName: _boxNameSparkUsedCoinsTagsCache(coin: coin));
}
}

View file

@ -107,6 +107,59 @@ class CachedElectrumXClient {
}
}
Future<Map<String, dynamic>> getSparkAnonymitySet({
required String groupId,
String blockhash = "",
required Coin coin,
}) async {
try {
final box = await DB.instance.getSparkAnonymitySetCacheBox(coin: coin);
final cachedSet = box.get(groupId) as Map?;
Map<String, dynamic> set;
// null check to see if there is a cached set
if (cachedSet == null) {
set = {
"coinGroupID": int.parse(groupId),
"blockHash": blockhash,
"setHash": "",
"coins": <dynamic>[],
};
} else {
set = Map<String, dynamic>.from(cachedSet);
}
final newSet = await electrumXClient.getSparkAnonymitySet(
coinGroupId: groupId,
startBlockHash: set["blockHash"] as String,
);
// update set with new data
if (newSet["setHash"] != "" && set["setHash"] != newSet["setHash"]) {
set["setHash"] = newSet["setHash"];
set["blockHash"] = newSet["blockHash"];
for (int i = (newSet["coins"] as List).length - 1; i >= 0; i--) {
// TODO verify this is correct (or append?)
set["coins"].insert(0, newSet["coins"][i]);
}
// save set to db
await box.put(groupId, set);
Logging.instance.log(
"Updated current anonymity set for ${coin.name} with group ID $groupId",
level: LogLevel.Info,
);
}
return set;
} catch (e, s) {
Logging.instance.log(
"Failed to process CachedElectrumX.getSparkAnonymitySet(): $e\n$s",
level: LogLevel.Error);
rethrow;
}
}
String base64ToHex(String source) =>
base64Decode(LineSplitter.split(source).join())
.map((e) => e.toRadixString(16).padLeft(2, '0'))
@ -198,14 +251,62 @@ class CachedElectrumXClient {
return resultingList;
} catch (e, s) {
Logging.instance.log(
"Failed to process CachedElectrumX.getTransaction(): $e\n$s",
level: LogLevel.Error);
"Failed to process CachedElectrumX.getUsedCoinSerials(): $e\n$s",
level: LogLevel.Error,
);
rethrow;
}
}
Future<Set<String>> getSparkUsedCoinsTags({
required Coin coin,
}) async {
try {
final box = await DB.instance.getSparkUsedCoinsTagsCacheBox(coin: coin);
final _list = box.get("tags") as List?;
Set<String> cachedTags =
_list == null ? {} : List<String>.from(_list).toSet();
final startNumber = max(
0,
cachedTags.length - 100, // 100 being some arbitrary buffer
);
final tags = await electrumXClient.getSparkUsedCoinsTags(
startNumber: startNumber,
);
// final newSerials = List<String>.from(serials["serials"] as List)
// .map((e) => !isHexadecimal(e) ? base64ToHex(e) : e)
// .toSet();
// ensure we are getting some overlap so we know we are not missing any
if (cachedTags.isNotEmpty && tags.isNotEmpty) {
assert(cachedTags.intersection(tags).isNotEmpty);
}
cachedTags.addAll(tags);
await box.put(
"tags",
cachedTags.toList(),
);
return cachedTags;
} catch (e, s) {
Logging.instance.log(
"Failed to process CachedElectrumX.getSparkUsedCoinsTags(): $e\n$s",
level: LogLevel.Error,
);
rethrow;
}
}
/// Clear all cached transactions for the specified coin
Future<void> clearSharedTransactionCache({required Coin coin}) async {
await DB.instance.clearSharedTransactionCache(coin: coin);
await DB.instance.closeAnonymitySetCacheBox(coin: coin);
}
}

View file

@ -881,7 +881,7 @@ class ElectrumXClient {
///
/// Returns blockHash (last block hash),
/// setHash (hash of current set)
/// and mints (the list of pairs serialized coin and tx hash)
/// and coins (the list of pairs serialized coin and tx hash)
Future<Map<String, dynamic>> getSparkAnonymitySet({
String coinGroupId = "1",
String startBlockHash = "",
@ -908,7 +908,7 @@ class ElectrumXClient {
/// Takes [startNumber], if it is 0, we get the full set,
/// otherwise the used tags after that number
Future<Map<String, dynamic>> getSparkUsedCoinsTags({
Future<Set<String>> getSparkUsedCoinsTags({
String? requestID,
required int startNumber,
}) async {
@ -921,7 +921,8 @@ class ElectrumXClient {
],
requestTimeout: const Duration(minutes: 2),
);
return Map<String, dynamic>.from(response["result"] as Map);
final map = Map<String, dynamic>.from(response["result"] as Map);
return Set<String>.from(map["tags"] as List);
} catch (e) {
Logging.instance.log(e, level: LogLevel.Error);
rethrow;

View file

@ -444,7 +444,7 @@ class HiddenSettings extends StatelessWidget {
.getSparkUsedCoinsTags(startNumber: 0);
print(
"usedCoinsTags['tags'].length: ${usedCoinsTags["tags"].length}");
"usedCoinsTags['tags'].length: ${usedCoinsTags.length}");
Util.printJson(
usedCoinsTags, "usedCoinsTags");
} catch (e, s) {

View file

@ -55,6 +55,15 @@ class TxData {
// tezos specific
final tezart.OperationsList? tezosOperationsList;
// firo spark specific
final List<
({
String address,
Amount amount,
bool subtractFeeFromAmount,
String memo,
})>? sparkRecipients;
TxData({
this.feeRateType,
this.feeRateAmount,
@ -85,6 +94,7 @@ class TxData {
this.txSubType,
this.mintsMapLelantus,
this.tezosOperationsList,
this.sparkRecipients,
});
Amount? get amount => recipients != null && recipients!.isNotEmpty
@ -127,6 +137,14 @@ class TxData {
TransactionSubType? txSubType,
List<Map<String, dynamic>>? mintsMapLelantus,
tezart.OperationsList? tezosOperationsList,
List<
({
String address,
Amount amount,
bool subtractFeeFromAmount,
String memo,
})>?
sparkRecipients,
}) {
return TxData(
feeRateType: feeRateType ?? this.feeRateType,
@ -159,6 +177,7 @@ class TxData {
txSubType: txSubType ?? this.txSubType,
mintsMapLelantus: mintsMapLelantus ?? this.mintsMapLelantus,
tezosOperationsList: tezosOperationsList ?? this.tezosOperationsList,
sparkRecipients: sparkRecipients ?? this.sparkRecipients,
);
}
@ -193,5 +212,6 @@ class TxData {
'txSubType: $txSubType, '
'mintsMapLelantus: $mintsMapLelantus, '
'tezosOperationsList: $tezosOperationsList, '
'sparkRecipients: $sparkRecipients, '
'}';
}

View file

@ -1,6 +1,8 @@
import 'dart:convert';
import 'dart:typed_data';
import 'package:bitcoindart/bitcoindart.dart' as btc;
import 'package:bitcoindart/src/utils/script.dart' as bscript;
import 'package:flutter_libsparkmobile/flutter_libsparkmobile.dart';
import 'package:isar/isar.dart';
import 'package:stackwallet/models/isar/models/blockchain_data/address.dart';
@ -57,15 +59,6 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
.findFirst();
}
Future<Uint8List> _getSpendKey() async {
final mnemonic = await getMnemonic();
final mnemonicPassphrase = await getMnemonicPassphrase();
// TODO call ffi lib to generate spend key
throw UnimplementedError();
}
Future<Address> generateNextSparkAddress() async {
final highestStoredDiversifier =
(await getCurrentReceivingSparkAddress())?.derivationIndex;
@ -111,11 +104,48 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
Future<TxData> prepareSendSpark({
required TxData txData,
}) async {
// todo fetch
final List<Uint8List> serializedMintMetas = [];
final List<LibSparkCoin> myCoins = [];
final currentId = await electrumXClient.getSparkLatestCoinId();
final List<Map<String, dynamic>> setMaps = [];
// for (int i = 0; i <= currentId; i++) {
for (int i = currentId; i <= currentId; i++) {
final set = await electrumXCachedClient.getSparkAnonymitySet(
groupId: i.toString(),
coin: info.coin,
);
set["coinGroupID"] = i;
setMaps.add(set);
}
final allAnonymitySets = setMaps
.map((e) => (
setId: e["coinGroupID"] as int,
setHash: e["setHash"] as String,
set: (e["coins"] as List)
.map((e) => (
serializedCoin: e[0] as String,
txHash: e[1] as String,
))
.toList(),
))
.toList();
// https://docs.google.com/document/d/1RG52GoYTZDvKlZz_3G4sQu-PpT6JWSZGHLNswWcrE3o/edit
// To generate a spark spend we need to call createSparkSpendTransaction,
// first unlock the wallet and generate all 3 spark keys,
final spendKey = await _getSpendKey();
const index = 1;
final root = await getRootHDNode();
final String derivationPath;
if (cryptoCurrency.network == CryptoCurrencyNetwork.test) {
derivationPath = "$kSparkBaseDerivationPathTestnet$index";
} else {
derivationPath = "$kSparkBaseDerivationPath$index";
}
final privateKey = root.derivePath(derivationPath).privateKey.data;
//
// recipients is a list of pairs of amounts and bools, this is for transparent
// outputs, first how much to send and second, subtractFeeFromAmount argument
@ -144,7 +174,227 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
// outputScripts is a output data, it is a list of scripts, which we need
// to put in separate tx outputs, and keep the order,
throw UnimplementedError();
// Amount vOut = Amount(
// rawValue: BigInt.zero, fractionDigits: cryptoCurrency.fractionDigits);
// Amount mintVOut = Amount(
// rawValue: BigInt.zero, fractionDigits: cryptoCurrency.fractionDigits);
// int recipientsToSubtractFee = 0;
//
// for (int i = 0; i < (txData.recipients?.length ?? 0); i++) {
// vOut += txData.recipients![i].amount;
// }
//
// if (vOut.raw > BigInt.from(SPARK_VALUE_SPEND_LIMIT_PER_TRANSACTION)) {
// throw Exception(
// "Spend to transparent address limit exceeded (10,000 Firo per transaction).",
// );
// }
//
// for (int i = 0; i < (txData.sparkRecipients?.length ?? 0); i++) {
// mintVOut += txData.sparkRecipients![i].amount;
// if (txData.sparkRecipients![i].subtractFeeFromAmount) {
// recipientsToSubtractFee++;
// }
// }
//
// int fee;
final txb = btc.TransactionBuilder(
network: btc.NetworkType(
messagePrefix: cryptoCurrency.networkParams.messagePrefix,
bech32: cryptoCurrency.networkParams.bech32Hrp,
bip32: btc.Bip32Type(
public: cryptoCurrency.networkParams.pubHDPrefix,
private: cryptoCurrency.networkParams.privHDPrefix,
),
pubKeyHash: cryptoCurrency.networkParams.p2pkhPrefix,
scriptHash: cryptoCurrency.networkParams.p2shPrefix,
wif: cryptoCurrency.networkParams.wifPrefix,
),
);
txb.setLockTime(await chainHeight);
txb.setVersion(3 | (9 << 16));
// final estimated = LibSpark.selectSparkCoins(
// requiredAmount: mintVOut.raw.toInt(),
// subtractFeeFromAmount: recipientsToSubtractFee > 0,
// coins: myCoins,
// privateRecipientsCount: txData.sparkRecipients?.length ?? 0,
// );
//
// fee = estimated.fee;
// bool remainderSubtracted = false;
// for (int i = 0; i < (txData.recipients?.length ?? 0); i++) {
//
//
// if (recipient.fSubtractFeeFromAmount) {
// // Subtract fee equally from each selected recipient.
// recipient.nAmount -= fee / recipientsToSubtractFee;
//
// if (!remainderSubtracted) {
// // First receiver pays the remainder not divisible by output count.
// recipient.nAmount -= fee % recipientsToSubtractFee;
// remainderSubtracted = true;
// }
// }
// }
// outputs
// for (int i = 0; i < (txData.sparkRecipients?.length ?? 0); i++) {
// if (txData.sparkRecipients![i].subtractFeeFromAmount) {
// BigInt amount = txData.sparkRecipients![i].amount.raw;
//
// // Subtract fee equally from each selected recipient.
// amount -= BigInt.from(fee / recipientsToSubtractFee);
//
// if (!remainderSubtracted) {
// // First receiver pays the remainder not divisible by output count.
// amount -= BigInt.from(fee % recipientsToSubtractFee);
// remainderSubtracted = true;
// }
//
// txData.sparkRecipients![i] = (
// address: txData.sparkRecipients![i].address,
// amount: Amount(
// rawValue: amount,
// fractionDigits: cryptoCurrency.fractionDigits,
// ),
// subtractFeeFromAmount:
// txData.sparkRecipients![i].subtractFeeFromAmount,
// memo: txData.sparkRecipients![i].memo,
// );
// }
// }
//
// int spendInCurrentTx = 0;
// for (final spendCoin in estimated.coins) {
// spendInCurrentTx += spendCoin.value?.toInt() ?? 0;
// }
// spendInCurrentTx -= fee;
//
// int transparentOut = 0;
for (int i = 0; i < (txData.recipients?.length ?? 0); i++) {
if (txData.recipients![i].amount.raw == BigInt.zero) {
continue;
}
if (txData.recipients![i].amount < cryptoCurrency.dustLimit) {
throw Exception("Output below dust limit");
}
//
// transparentOut += txData.recipients![i].amount.raw.toInt();
txb.addOutput(
txData.recipients![i].address,
txData.recipients![i].amount.raw.toInt(),
);
}
// // spendInCurrentTx -= transparentOut;
// final List<({String address, int amount, String memo})> privOutputs = [];
//
// for (int i = 0; i < (txData.sparkRecipients?.length ?? 0); i++) {
// if (txData.sparkRecipients![i].amount.raw == BigInt.zero) {
// continue;
// }
//
// final recipientAmount = txData.sparkRecipients![i].amount.raw.toInt();
// // spendInCurrentTx -= recipientAmount;
//
// privOutputs.add(
// (
// address: txData.sparkRecipients![i].address,
// amount: recipientAmount,
// memo: txData.sparkRecipients![i].memo,
// ),
// );
// }
// if (spendInCurrentTx < 0) {
// throw Exception("Unable to create spend transaction.");
// }
//
// if (privOutputs.isEmpty || spendInCurrentTx > 0) {
// final changeAddress = await LibSpark.getAddress(
// privateKey: privateKey,
// index: index,
// diversifier: kSparkChange,
// );
//
// privOutputs.add(
// (
// address: changeAddress,
// amount: spendInCurrentTx > 0 ? spendInCurrentTx : 0,
// memo: "",
// ),
// );
// }
// inputs
final opReturnScript = bscript.compile([
0xd3, // OP_SPARKSPEND
Uint8List(0),
]);
txb.addInput(
'0000000000000000000000000000000000000000000000000000000000000000',
0xffffffff,
0xffffffff,
opReturnScript,
);
// final sig = extractedTx.getId();
// for (final coin in estimated.coins) {
// final groupId = coin.id!;
// }
final spend = LibSpark.createSparkSendTransaction(
privateKeyHex: privateKey.toHex,
index: index,
recipients: [],
privateRecipients: txData.sparkRecipients
?.map((e) => (
sparkAddress: e.address,
amount: e.amount.raw.toInt(),
subtractFeeFromAmount: e.subtractFeeFromAmount,
memo: e.memo,
))
.toList() ??
[],
serializedMintMetas: serializedMintMetas,
allAnonymitySets: allAnonymitySets,
);
print("SPARK SPEND ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
print("fee: ${spend.fee}");
print("spend: ${spend.serializedSpendPayload}");
print("scripts:");
spend.outputScripts.forEach(print);
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
for (final outputScript in spend.outputScripts) {
txb.addOutput(outputScript, 0);
}
final extractedTx = txb.buildIncomplete();
// TODO: verify encoding
extractedTx.setPayload(spend.serializedSpendPayload.toUint8ListFromUtf8);
final rawTxHex = extractedTx.toHex();
return txData.copyWith(
raw: rawTxHex,
vSize: extractedTx.virtualSize(),
fee: Amount(
rawValue: BigInt.from(spend.fee),
fractionDigits: cryptoCurrency.fractionDigits,
),
// TODO used coins
);
}
// this may not be needed for either mints or spends or both
@ -175,20 +425,16 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId();
// TODO improve performance by adding these calls to the cached client
final futureResults = await Future.wait([
electrumXClient.getSparkAnonymitySet(
coinGroupId: latestSparkCoinId.toString(),
),
electrumXClient.getSparkUsedCoinsTags(
startNumber: 0,
electrumXCachedClient.getSparkAnonymitySet(
groupId: latestSparkCoinId.toString(),
coin: info.coin,
),
electrumXCachedClient.getSparkUsedCoinsTags(coin: info.coin),
]);
final anonymitySet = futureResults[0];
final spentCoinTags = List<String>.from(
futureResults[1]["tags"] as List,
).toSet();
final anonymitySet = futureResults[0] as Map<String, dynamic>;
final spentCoinTags = futureResults[1] as Set<String>;
// find our coins
final List<SparkCoin> myCoins = [];
@ -247,6 +493,8 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
}
}
print("FOUND COINS: $myCoins");
// update wallet spark coins in isar
if (myCoins.isNotEmpty) {
await mainDB.isar.writeTxn(() async {
@ -256,6 +504,20 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
// refresh spark balance?
await prepareSendSpark(
txData: TxData(
sparkRecipients: [
(
address: (await getCurrentReceivingSparkAddress())!.value,
amount: Amount(
rawValue: BigInt.from(100000000),
fractionDigits: cryptoCurrency.fractionDigits),
subtractFeeFromAmount: true,
memo: "LOL MEMO OPK",
),
],
));
throw UnimplementedError();
} catch (e, s) {
// todo logging
@ -278,9 +540,9 @@ mixin SparkInterface on Bip39HDWallet, ElectrumXInterface {
final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId();
// TODO improve performance by adding this call to the cached client
final anonymitySet = await electrumXClient.getSparkAnonymitySet(
coinGroupId: latestSparkCoinId.toString(),
final anonymitySet = await electrumXCachedClient.getSparkAnonymitySet(
groupId: latestSparkCoinId.toString(),
coin: info.coin,
);
// TODO loop over set and see which coins are ours using the FFI call `identifyCoin`