import 'dart:convert'; import 'dart:typed_data'; import 'package:flutter_libsparkmobile/flutter_libsparkmobile.dart'; import 'package:isar/isar.dart'; import 'package:stackwallet/models/isar/models/blockchain_data/address.dart'; import 'package:stackwallet/utilities/amount/amount.dart'; import 'package:stackwallet/utilities/extensions/extensions.dart'; import 'package:stackwallet/wallets/crypto_currency/crypto_currency.dart'; import 'package:stackwallet/wallets/isar/models/spark_coin.dart'; import 'package:stackwallet/wallets/models/tx_data.dart'; import 'package:stackwallet/wallets/wallet/intermediate/bip39_hd_wallet.dart'; import 'package:stackwallet/wallets/wallet/wallet_mixin_interfaces/electrumx_interface.dart'; mixin SparkInterface on Bip39HDWallet, ElectrumXInterface { @override Future init() async { Address? address = await getCurrentReceivingSparkAddress(); if (address == null) { address = await generateNextSparkAddress(); await mainDB.putAddress(address); } // TODO add other address types to wallet info? // await info.updateReceivingAddress( // newAddress: address.value, // isar: mainDB.isar, // ); await super.init(); } @override Future> fetchAddressesForElectrumXScan() async { final allAddresses = await mainDB .getAddresses(walletId) .filter() .not() .group( (q) => q .typeEqualTo(AddressType.spark) .or() .typeEqualTo(AddressType.nonWallet) .or() .subTypeEqualTo(AddressSubType.nonWallet), ) .findAll(); return allAddresses; } Future getCurrentReceivingSparkAddress() async { return await mainDB.isar.addresses .where() .walletIdEqualTo(walletId) .filter() .typeEqualTo(AddressType.spark) .sortByDerivationIndexDesc() .findFirst(); } Future _getSpendKey() async { final mnemonic = await getMnemonic(); final mnemonicPassphrase = await getMnemonicPassphrase(); // TODO call ffi lib to generate spend key throw UnimplementedError(); } Future
generateNextSparkAddress() async { final highestStoredDiversifier = (await getCurrentReceivingSparkAddress())?.derivationIndex; // default to starting at 1 if none found final int diversifier = (highestStoredDiversifier ?? 0) + 1; // TODO: check that this stays constant and only the diversifier changes? const index = 1; final root = await getRootHDNode(); final String derivationPath; if (cryptoCurrency.network == CryptoCurrencyNetwork.test) { derivationPath = "$kSparkBaseDerivationPathTestnet$index"; } else { derivationPath = "$kSparkBaseDerivationPath$index"; } final keys = root.derivePath(derivationPath); final String addressString = await LibSpark.getAddress( privateKey: keys.privateKey.data, index: index, diversifier: diversifier, isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test, ); return Address( walletId: walletId, value: addressString, publicKey: keys.publicKey.data, derivationIndex: diversifier, derivationPath: DerivationPath()..value = derivationPath, type: AddressType.spark, subType: AddressSubType.receiving, ); } Future estimateFeeForSpark(Amount amount) async { throw UnimplementedError(); } /// Spark to Spark/Transparent (spend) creation Future prepareSendSpark({ required TxData txData, }) async { // https://docs.google.com/document/d/1RG52GoYTZDvKlZz_3G4sQu-PpT6JWSZGHLNswWcrE3o/edit // To generate a spark spend we need to call createSparkSpendTransaction, // first unlock the wallet and generate all 3 spark keys, final spendKey = await _getSpendKey(); // // recipients is a list of pairs of amounts and bools, this is for transparent // outputs, first how much to send and second, subtractFeeFromAmount argument // for each receiver. // // privateRecipients is again the list of pairs, first the receiver data // which has following members, Address which is any spark address, // amount (v) how much we want to send, and memo which can be any string // with 32 length (any string we want to send to receiver), and the second // subtractFeeFromAmount, // // coins is the list of all our available spark coins // // cover_set_data_all is the list of all anonymity sets, // // idAndBlockHashes_all is the list of block hashes for each anonymity set // // txHashSig is the transaction hash only without spark data, tx version, // type, transparent outputs and everything else should be set before generating it. // // fee is a output data // // serializedSpend is a output data, byte array with spark spend, we need // to put it into vExtraPayload (this naming can be different in your codebase) // // outputScripts is a output data, it is a list of scripts, which we need // to put in separate tx outputs, and keep the order, throw UnimplementedError(); } // this may not be needed for either mints or spends or both Future confirmSendSpark({ required TxData txData, }) async { throw UnimplementedError(); } // TODO lots of room for performance improvements here. Should be similar to // recoverSparkWallet but only fetch and check anonymity set data that we // have not yet parsed. Future refreshSparkData() async { final sparkAddresses = await mainDB.isar.addresses .where() .walletIdEqualTo(walletId) .filter() .typeEqualTo(AddressType.spark) .findAll(); final Set paths = sparkAddresses.map((e) => e.derivationPath!.value).toSet(); try { const index = 1; final root = await getRootHDNode(); final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId(); // TODO improve performance by adding these calls to the cached client final futureResults = await Future.wait([ electrumXClient.getSparkAnonymitySet( coinGroupId: latestSparkCoinId.toString(), ), electrumXClient.getSparkUsedCoinsTags( startNumber: 0, ), ]); final anonymitySet = futureResults[0]; final spentCoinTags = List.from( futureResults[1]["tags"] as List, ).toSet(); // find our coins final List myCoins = []; for (final path in paths) { final keys = root.derivePath(path); final privateKeyHex = keys.privateKey.data.toHex; for (final dynData in anonymitySet["coins"] as List) { final data = List.from(dynData as List); if (data.length != 2) { throw Exception("Unexpected serialized coin info found"); } final serializedCoin = data.first; final txHash = base64ToReverseHex(data.last); final coin = LibSpark.identifyAndRecoverCoin( serializedCoin, privateKeyHex: privateKeyHex, index: index, isTestNet: cryptoCurrency.network == CryptoCurrencyNetwork.test, ); // its ours if (coin != null) { final SparkCoinType coinType; switch (coin.type.value) { case 0: coinType = SparkCoinType.mint; case 1: coinType = SparkCoinType.spend; default: throw Exception("Unknown spark coin type detected"); } myCoins.add( SparkCoin( walletId: walletId, type: coinType, isUsed: spentCoinTags.contains(coin.lTagHash!), address: coin.address!, txHash: txHash, valueIntString: coin.value!.toString(), lTagHash: coin.lTagHash!, tag: coin.tag, memo: coin.memo, serial: coin.serial, serialContext: coin.serialContext, diversifierIntString: coin.diversifier!.toString(), encryptedDiversifier: coin.encryptedDiversifier, ), ); } } } // update wallet spark coins in isar if (myCoins.isNotEmpty) { await mainDB.isar.writeTxn(() async { await mainDB.isar.sparkCoins.putAll(myCoins); }); } // refresh spark balance? throw UnimplementedError(); } catch (e, s) { // todo logging rethrow; } } /// Should only be called within the standard wallet [recover] function due to /// mutex locking. Otherwise behaviour MAY be undefined. Future recoverSparkWallet( // { // required int latestSetId, // required Map setDataMap, // required Set usedSerialNumbers, // } ) async { try { // do we need to generate any spark address(es) here? final latestSparkCoinId = await electrumXClient.getSparkLatestCoinId(); // TODO improve performance by adding this call to the cached client final anonymitySet = await electrumXClient.getSparkAnonymitySet( coinGroupId: latestSparkCoinId.toString(), ); // TODO loop over set and see which coins are ours using the FFI call `identifyCoin` List myCoins = []; // fetch metadata for myCoins // create list of Spark Coin isar objects // update wallet spark coins in isar throw UnimplementedError(); } catch (e, s) { // todo logging rethrow; } } /// Transparent to Spark (mint) transaction creation Future prepareSparkMintTransaction({required TxData txData}) async { // https://docs.google.com/document/d/1RG52GoYTZDvKlZz_3G4sQu-PpT6JWSZGHLNswWcrE3o/edit // this kind of transaction is generated like a regular transaction, but in // place of regulart outputs we put spark outputs, so for that we call // createSparkMintRecipients function, we get spark related data, // everything else we do like for regular transaction, and we put CRecipient // object as a tx outputs, we need to keep the order.. // First we pass spark::MintedCoinData>, has following members, Address // which is any spark address, amount (v) how much we want to send, and // memo which can be any string with 32 length (any string we want to send // to receiver), serial_context is a byte array, which should be unique for // each transaction, and for that we serialize and put all inputs into // serial_context vector. So we construct the input part of the transaction // first then we generate spark related data. And we sign like regular // transactions at the end. throw UnimplementedError(); } @override Future updateBalance() async { // call to super to update transparent balance (and lelantus balance if // what ever class this mixin is used on uses LelantusInterface as well) final normalBalanceFuture = super.updateBalance(); // todo: spark balance aka update info.tertiaryBalance // wait for normalBalanceFuture to complete before returning await normalBalanceFuture; } } String base64ToReverseHex(String source) => base64Decode(LineSplitter.split(source).join()) .reversed .map((e) => e.toRadixString(16).padLeft(2, '0')) .join();