2023-12-05 20:44:50 +00:00
import ' dart:convert ' ;
2023-11-27 20:57:33 +00:00
2023-12-13 17:26:30 +00:00
import ' package:bitcoindart/bitcoindart.dart ' as btc ;
2023-12-19 18:06:05 +00:00
import ' package:flutter/foundation.dart ' ;
2023-11-28 16:13:10 +00:00
import ' package:flutter_libsparkmobile/flutter_libsparkmobile.dart ' ;
2023-11-27 20:57:33 +00:00
import ' package:isar/isar.dart ' ;
2023-12-15 14:47:46 +00:00
import ' package:stackwallet/models/balance.dart ' ;
2023-11-27 20:57:33 +00:00
import ' package:stackwallet/models/isar/models/blockchain_data/address.dart ' ;
import ' package:stackwallet/utilities/amount/amount.dart ' ;
2023-12-05 22:55:38 +00:00
import ' package:stackwallet/utilities/extensions/extensions.dart ' ;
2023-12-16 20:28:04 +00:00
import ' package:stackwallet/utilities/logger.dart ' ;
2023-11-28 16:13:10 +00:00
import ' package:stackwallet/wallets/crypto_currency/crypto_currency.dart ' ;
2023-12-04 15:35:59 +00:00
import ' package:stackwallet/wallets/isar/models/spark_coin.dart ' ;
2023-11-16 21:30:01 +00:00
import ' package:stackwallet/wallets/models/tx_data.dart ' ;
import ' package:stackwallet/wallets/wallet/intermediate/bip39_hd_wallet.dart ' ;
2023-11-16 22:25:20 +00:00
import ' package:stackwallet/wallets/wallet/wallet_mixin_interfaces/electrumx_interface.dart ' ;
2023-11-16 21:30:01 +00:00
2023-12-18 20:05:22 +00:00
const kDefaultSparkIndex = 1 ;
2023-11-16 22:25:20 +00:00
mixin SparkInterface on Bip39HDWallet , ElectrumXInterface {
2023-12-19 15:20:50 +00:00
static bool validateSparkAddress ( {
required String address ,
required bool isTestNet ,
} ) = >
LibSpark . validateAddress ( address: address , isTestNet: isTestNet ) ;
2023-11-29 15:53:30 +00:00
@ override
Future < void > init ( ) async {
Address ? address = await getCurrentReceivingSparkAddress ( ) ;
if ( address = = null ) {
address = await generateNextSparkAddress ( ) ;
await mainDB . putAddress ( address ) ;
} // TODO add other address types to wallet info?
// await info.updateReceivingAddress(
// newAddress: address.value,
// isar: mainDB.isar,
// );
await super . init ( ) ;
}
@ override
Future < List < Address > > fetchAddressesForElectrumXScan ( ) async {
final allAddresses = await mainDB
. getAddresses ( walletId )
. filter ( )
. not ( )
. group (
( q ) = > q
. typeEqualTo ( AddressType . spark )
. or ( )
. typeEqualTo ( AddressType . nonWallet )
. or ( )
. subTypeEqualTo ( AddressSubType . nonWallet ) ,
)
. findAll ( ) ;
return allAddresses ;
}
2023-11-27 20:57:33 +00:00
Future < Address ? > getCurrentReceivingSparkAddress ( ) async {
return await mainDB . isar . addresses
. where ( )
. walletIdEqualTo ( walletId )
. filter ( )
. typeEqualTo ( AddressType . spark )
. sortByDerivationIndexDesc ( )
. findFirst ( ) ;
}
2023-11-29 15:53:30 +00:00
Future < Address > generateNextSparkAddress ( ) async {
2023-11-27 20:57:33 +00:00
final highestStoredDiversifier =
( await getCurrentReceivingSparkAddress ( ) ) ? . derivationIndex ;
// default to starting at 1 if none found
final int diversifier = ( highestStoredDiversifier ? ? 0 ) + 1 ;
2023-11-28 16:13:10 +00:00
final root = await getRootHDNode ( ) ;
2023-12-05 22:55:38 +00:00
final String derivationPath ;
if ( cryptoCurrency . network = = CryptoCurrencyNetwork . test ) {
2023-12-18 20:05:22 +00:00
derivationPath = " $ kSparkBaseDerivationPathTestnet $ kDefaultSparkIndex " ;
2023-12-05 22:55:38 +00:00
} else {
2023-12-18 20:05:22 +00:00
derivationPath = " $ kSparkBaseDerivationPath $ kDefaultSparkIndex " ;
2023-12-05 22:55:38 +00:00
}
2023-11-28 16:13:10 +00:00
final keys = root . derivePath ( derivationPath ) ;
final String addressString = await LibSpark . getAddress (
privateKey: keys . privateKey . data ,
2023-12-18 20:05:22 +00:00
index: kDefaultSparkIndex ,
2023-11-28 16:13:10 +00:00
diversifier: diversifier ,
isTestNet: cryptoCurrency . network = = CryptoCurrencyNetwork . test ,
) ;
2023-11-27 20:57:33 +00:00
return Address (
walletId: walletId ,
value: addressString ,
2023-11-28 16:13:10 +00:00
publicKey: keys . publicKey . data ,
2023-11-27 20:57:33 +00:00
derivationIndex: diversifier ,
derivationPath: DerivationPath ( ) . . value = derivationPath ,
type: AddressType . spark ,
subType: AddressSubType . receiving ,
) ;
}
Future < Amount > estimateFeeForSpark ( Amount amount ) async {
2023-12-20 00:34:20 +00:00
// int spendAmount = amount.raw.toInt();
// if (spendAmount == 0) {
return Amount (
rawValue: BigInt . from ( 0 ) ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ;
// }
// TODO actual fee estimation
2023-11-27 20:57:33 +00:00
}
2023-11-27 21:18:20 +00:00
/// Spark to Spark/Transparent (spend) creation
2023-11-16 21:30:01 +00:00
Future < TxData > prepareSendSpark ( {
required TxData txData ,
} ) async {
2023-12-21 00:00:02 +00:00
// fetch spendable spark coins
2023-12-18 21:12:16 +00:00
final coins = await mainDB . isar . sparkCoins
. where ( )
. walletIdEqualToAnyLTagHash ( walletId )
. filter ( )
. isUsedEqualTo ( false )
2023-12-20 19:45:46 +00:00
. and ( )
. heightIsNotNull ( )
2023-12-21 20:41:29 +00:00
. and ( )
. not ( )
. valueIntStringEqualTo ( " 0 " )
2023-12-18 21:12:16 +00:00
. findAll ( ) ;
2023-12-21 20:41:29 +00:00
final available = info . cachedBalanceTertiary . spendable ;
final txAmount = ( txData . recipients ? ? [ ] ) . map ( ( e ) = > e . amount ) . fold (
Amount (
rawValue: BigInt . zero ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
( p , e ) = > p + e ) +
( txData . sparkRecipients ? ? [ ] ) . map ( ( e ) = > e . amount ) . fold (
Amount (
rawValue: BigInt . zero ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
( p , e ) = > p + e ) ;
if ( txAmount > available ) {
throw Exception ( " Insufficient Spark balance " ) ;
}
final bool isSendAll = available = = txAmount ;
2023-12-21 00:00:02 +00:00
// prepare coin data for ffi
2023-12-20 19:45:46 +00:00
final serializedCoins = coins
. map ( ( e ) = > (
serializedCoin: e . serializedCoinB64 ! ,
serializedCoinContext: e . contextB64 ! ,
groupId: e . groupId ,
height: e . height ! ,
) )
. toList ( ) ;
2023-12-13 17:26:30 +00:00
final currentId = await electrumXClient . getSparkLatestCoinId ( ) ;
final List < Map < String , dynamic > > setMaps = [ ] ;
2023-12-20 19:45:46 +00:00
final List < ( { int groupId , String blockHash } ) > idAndBlockHashes = [ ] ;
for ( int i = 1 ; i < = currentId ; i + + ) {
2023-12-13 20:13:11 +00:00
final set = await electrumXCachedClient . getSparkAnonymitySet (
groupId: i . toString ( ) ,
coin: info . coin ,
2023-12-13 17:26:30 +00:00
) ;
set [ " coinGroupID " ] = i ;
setMaps . add ( set ) ;
2023-12-20 19:45:46 +00:00
idAndBlockHashes . add (
(
groupId: i ,
blockHash: set [ " blockHash " ] as String ,
) ,
) ;
2023-12-13 17:26:30 +00:00
}
final allAnonymitySets = setMaps
. map ( ( e ) = > (
setId: e [ " coinGroupID " ] as int ,
setHash: e [ " setHash " ] as String ,
set : ( e [ " coins " ] as List )
. map ( ( e ) = > (
serializedCoin: e [ 0 ] as String ,
txHash: e [ 1 ] as String ,
) )
. toList ( ) ,
) )
. toList ( ) ;
final root = await getRootHDNode ( ) ;
final String derivationPath ;
if ( cryptoCurrency . network = = CryptoCurrencyNetwork . test ) {
2023-12-18 20:05:22 +00:00
derivationPath = " $ kSparkBaseDerivationPathTestnet $ kDefaultSparkIndex " ;
2023-12-13 17:26:30 +00:00
} else {
2023-12-18 20:05:22 +00:00
derivationPath = " $ kSparkBaseDerivationPath $ kDefaultSparkIndex " ;
2023-12-13 17:26:30 +00:00
}
final privateKey = root . derivePath ( derivationPath ) . privateKey . data ;
2023-12-21 20:41:29 +00:00
final btcDartNetwork = btc . NetworkType (
messagePrefix: cryptoCurrency . networkParams . messagePrefix ,
bech32: cryptoCurrency . networkParams . bech32Hrp ,
bip32: btc . Bip32Type (
public: cryptoCurrency . networkParams . pubHDPrefix ,
private: cryptoCurrency . networkParams . privHDPrefix ,
2023-12-13 17:26:30 +00:00
) ,
2023-12-21 20:41:29 +00:00
pubKeyHash: cryptoCurrency . networkParams . p2pkhPrefix ,
scriptHash: cryptoCurrency . networkParams . p2shPrefix ,
wif: cryptoCurrency . networkParams . wifPrefix ,
) ;
final txb = btc . TransactionBuilder (
network: btcDartNetwork ,
2023-12-13 17:26:30 +00:00
) ;
txb . setLockTime ( await chainHeight ) ;
txb . setVersion ( 3 | ( 9 < < 16 ) ) ;
2023-12-21 22:04:49 +00:00
List < ( { String address , Amount amount } ) > ? recipientsWithFeeSubtracted ;
List <
2023-12-21 20:41:29 +00:00
( {
String address ,
Amount amount ,
String memo ,
2023-12-21 22:04:49 +00:00
} ) > ? sparkRecipientsWithFeeSubtracted ;
final recipientCount = ( txData . recipients
? . where (
( e ) = > e . amount . raw > BigInt . zero ,
)
. length ? ?
0 ) ;
final totalRecipientCount =
recipientCount + ( txData . sparkRecipients ? . length ? ? 0 ) ;
2023-12-21 20:41:29 +00:00
final BigInt estimatedFee ;
if ( isSendAll ) {
final estFee = LibSpark . estimateSparkFee (
privateKeyHex: privateKey . toHex ,
index: kDefaultSparkIndex ,
sendAmount: txAmount . raw . toInt ( ) ,
subtractFeeFromAmount: true ,
serializedCoins: serializedCoins ,
privateRecipientsCount: ( txData . sparkRecipients ? . length ? ? 0 ) ,
) ;
estimatedFee = BigInt . from ( estFee ) ;
} else {
estimatedFee = BigInt . zero ;
}
2023-12-21 22:04:49 +00:00
if ( ( txData . sparkRecipients ? . length ? ? 0 ) > 0 ) {
sparkRecipientsWithFeeSubtracted = [ ] ;
}
if ( recipientCount > 0 ) {
recipientsWithFeeSubtracted = [ ] ;
}
2023-12-21 20:41:29 +00:00
for ( int i = 0 ; i < ( txData . sparkRecipients ? . length ? ? 0 ) ; i + + ) {
2023-12-21 22:04:49 +00:00
sparkRecipientsWithFeeSubtracted ! . add (
2023-12-21 20:41:29 +00:00
(
address: txData . sparkRecipients ! [ i ] . address ,
amount: Amount (
rawValue: txData . sparkRecipients ! [ i ] . amount . raw -
2023-12-21 22:04:49 +00:00
( estimatedFee ~ / BigInt . from ( totalRecipientCount ) ) ,
2023-12-21 20:41:29 +00:00
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
memo: txData . sparkRecipients ! [ i ] . memo ,
) ,
) ;
}
2023-12-13 17:26:30 +00:00
for ( int i = 0 ; i < ( txData . recipients ? . length ? ? 0 ) ; i + + ) {
if ( txData . recipients ! [ i ] . amount . raw = = BigInt . zero ) {
continue ;
}
2023-12-21 22:04:49 +00:00
recipientsWithFeeSubtracted ! . add (
2023-12-21 20:41:29 +00:00
(
address: txData . recipients ! [ i ] . address ,
amount: Amount (
rawValue: txData . recipients ! [ i ] . amount . raw -
2023-12-21 22:04:49 +00:00
( estimatedFee ~ / BigInt . from ( totalRecipientCount ) ) ,
2023-12-21 20:41:29 +00:00
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
) ,
) ;
final scriptPubKey = btc . Address . addressToOutputScript (
2023-12-13 17:26:30 +00:00
txData . recipients ! [ i ] . address ,
2023-12-21 20:41:29 +00:00
btcDartNetwork ,
) ;
txb . addOutput (
scriptPubKey ,
recipientsWithFeeSubtracted [ i ] . amount . raw . toInt ( ) ,
2023-12-13 17:26:30 +00:00
) ;
}
2023-12-21 16:23:55 +00:00
final extractedTx = txb . buildIncomplete ( ) ;
extractedTx . addInput (
' 0000000000000000000000000000000000000000000000000000000000000000 '
. toUint8ListFromHex ,
0xffffffff ,
0xffffffff ,
" d3 " . toUint8ListFromHex , // OP_SPARKSPEND
) ;
extractedTx . setPayload ( Uint8List ( 0 ) ) ;
2023-12-13 17:26:30 +00:00
final spend = LibSpark . createSparkSendTransaction (
privateKeyHex: privateKey . toHex ,
2023-12-18 20:05:22 +00:00
index: kDefaultSparkIndex ,
2023-12-21 20:41:29 +00:00
recipients: txData . recipients
? . map ( ( e ) = > (
address: e . address ,
amount: e . amount . raw . toInt ( ) ,
subtractFeeFromAmount: isSendAll ,
) )
. toList ( ) ? ?
[ ] ,
2023-12-13 17:26:30 +00:00
privateRecipients: txData . sparkRecipients
? . map ( ( e ) = > (
sparkAddress: e . address ,
amount: e . amount . raw . toInt ( ) ,
2023-12-21 20:41:29 +00:00
subtractFeeFromAmount: isSendAll ,
2023-12-13 17:26:30 +00:00
memo: e . memo ,
) )
. toList ( ) ? ?
[ ] ,
2023-12-18 20:05:22 +00:00
serializedCoins: serializedCoins ,
2023-12-13 17:26:30 +00:00
allAnonymitySets: allAnonymitySets ,
2023-12-20 19:45:46 +00:00
idAndBlockHashes: idAndBlockHashes
. map ( ( e ) = > ( setId: e . groupId , blockHash: base64Decode ( e . blockHash ) ) )
. toList ( ) ,
2023-12-21 16:23:55 +00:00
txHash: extractedTx . getHash ( ) ,
2023-12-13 17:26:30 +00:00
) ;
for ( final outputScript in spend . outputScripts ) {
2023-12-21 16:23:55 +00:00
extractedTx . addOutput ( outputScript , 0 ) ;
2023-12-13 17:26:30 +00:00
}
2023-12-20 19:45:46 +00:00
extractedTx . setPayload ( spend . serializedSpendPayload ) ;
2023-12-13 17:26:30 +00:00
final rawTxHex = extractedTx . toHex ( ) ;
2023-12-21 20:41:29 +00:00
if ( isSendAll ) {
txData = txData . copyWith (
recipients: recipientsWithFeeSubtracted ,
sparkRecipients: sparkRecipientsWithFeeSubtracted ,
) ;
}
2023-12-13 17:26:30 +00:00
return txData . copyWith (
raw: rawTxHex ,
vSize: extractedTx . virtualSize ( ) ,
fee: Amount (
rawValue: BigInt . from ( spend . fee ) ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
// TODO used coins
) ;
2023-11-16 21:30:01 +00:00
}
2023-11-27 20:57:33 +00:00
2023-11-27 21:18:20 +00:00
// this may not be needed for either mints or spends or both
2023-11-27 20:57:33 +00:00
Future < TxData > confirmSendSpark ( {
required TxData txData ,
} ) async {
2023-12-21 16:23:55 +00:00
try {
Logging . instance . log ( " confirmSend txData: $ txData " , level: LogLevel . Info ) ;
final txHash = await electrumXClient . broadcastTransaction (
rawTx: txData . raw ! ,
) ;
Logging . instance . log ( " Sent txHash: $ txHash " , level: LogLevel . Info ) ;
txData = txData . copyWith (
// TODO mark spark coins as spent locally and update balance before waiting to check via electrumx?
// usedUTXOs:
// txData.usedUTXOs!.map((e) => e.copyWith(used: true)).toList(),
// TODO revisit setting these both
txHash: txHash ,
txid: txHash ,
) ;
2023-12-21 20:41:29 +00:00
// // mark utxos as used
// await mainDB.putUTXOs(txData.usedUTXOs!);
2023-12-21 16:23:55 +00:00
return txData ;
} catch ( e , s ) {
Logging . instance . log ( " Exception rethrown from confirmSend(): $ e \n $ s " ,
level: LogLevel . Error ) ;
rethrow ;
}
2023-11-27 20:57:33 +00:00
}
// TODO lots of room for performance improvements here. Should be similar to
// recoverSparkWallet but only fetch and check anonymity set data that we
// have not yet parsed.
Future < void > refreshSparkData ( ) async {
2023-12-05 22:55:38 +00:00
final sparkAddresses = await mainDB . isar . addresses
. where ( )
. walletIdEqualTo ( walletId )
. filter ( )
. typeEqualTo ( AddressType . spark )
. findAll ( ) ;
final Set < String > paths =
sparkAddresses . map ( ( e ) = > e . derivationPath ! . value ) . toSet ( ) ;
2023-11-27 20:57:33 +00:00
try {
2023-12-04 15:35:59 +00:00
final latestSparkCoinId = await electrumXClient . getSparkLatestCoinId ( ) ;
2023-11-27 21:18:20 +00:00
2023-12-18 20:05:22 +00:00
final blockHash = await _getCachedSparkBlockHash ( ) ;
2023-12-20 23:46:48 +00:00
final anonymitySetFuture = blockHash = = null
? electrumXCachedClient . getSparkAnonymitySet (
2023-12-19 18:06:05 +00:00
groupId: latestSparkCoinId . toString ( ) ,
coin: info . coin ,
)
2023-12-20 23:46:48 +00:00
: electrumXClient . getSparkAnonymitySet (
2023-12-19 18:06:05 +00:00
coinGroupId: latestSparkCoinId . toString ( ) ,
startBlockHash: blockHash ,
) ;
2023-12-20 23:46:48 +00:00
final spentCoinTagsFuture =
electrumXClient . getSparkUsedCoinsTags ( startNumber: 0 ) ;
// electrumXCachedClient.getSparkUsedCoinsTags(coin: info.coin);
final futureResults = await Future . wait ( [
anonymitySetFuture ,
spentCoinTagsFuture ,
] ) ;
final anonymitySet = futureResults [ 0 ] as Map < String , dynamic > ;
final spentCoinTags = futureResults [ 1 ] as Set < String > ;
final List < SparkCoin > myCoins = [ ] ;
2023-12-19 18:06:05 +00:00
if ( anonymitySet [ " coins " ] is List & &
( anonymitySet [ " coins " ] as List ) . isNotEmpty ) {
final root = await getRootHDNode ( ) ;
final privateKeyHexSet = paths
. map (
( e ) = > root . derivePath ( e ) . privateKey . data . toHex ,
)
. toSet ( ) ;
2023-12-20 23:46:48 +00:00
final identifiedCoins = await compute (
2023-12-19 18:06:05 +00:00
_identifyCoins ,
(
anonymitySetCoins: anonymitySet [ " coins " ] as List ,
2023-12-20 19:45:46 +00:00
groupId: latestSparkCoinId ,
2023-12-19 18:06:05 +00:00
spentCoinTags: spentCoinTags ,
privateKeyHexSet: privateKeyHexSet ,
walletId: walletId ,
isTestNet: cryptoCurrency . network = = CryptoCurrencyNetwork . test ,
) ,
) ;
2023-11-27 20:57:33 +00:00
2023-12-20 23:46:48 +00:00
myCoins . addAll ( identifiedCoins ) ;
2023-11-27 20:57:33 +00:00
2023-12-19 18:06:05 +00:00
// update blockHash in cache
final String newBlockHash =
base64ToReverseHex ( anonymitySet [ " blockHash " ] as String ) ;
await _setCachedSparkBlockHash ( newBlockHash ) ;
}
2023-12-15 14:47:46 +00:00
2023-12-20 23:46:48 +00:00
// check current coins
final currentCoins = await mainDB . isar . sparkCoins
. where ( )
. walletIdEqualToAnyLTagHash ( walletId )
. filter ( )
. isUsedEqualTo ( false )
. findAll ( ) ;
for ( final coin in currentCoins ) {
if ( spentCoinTags . contains ( coin . lTagHash ) ) {
myCoins . add ( coin . copyWith ( isUsed: true ) ) ;
}
}
// update wallet spark coins in isar
await _addOrUpdateSparkCoins ( myCoins ) ;
2023-12-15 14:47:46 +00:00
// refresh spark balance
2023-12-18 20:05:22 +00:00
await refreshSparkBalance ( ) ;
2023-11-27 20:57:33 +00:00
} catch ( e , s ) {
// todo logging
rethrow ;
}
}
2023-12-18 20:05:22 +00:00
Future < void > refreshSparkBalance ( ) async {
final currentHeight = await chainHeight ;
final unusedCoins = await mainDB . isar . sparkCoins
. where ( )
. walletIdEqualToAnyLTagHash ( walletId )
. filter ( )
. isUsedEqualTo ( false )
. findAll ( ) ;
2023-11-27 20:57:33 +00:00
2023-12-18 20:05:22 +00:00
final total = Amount (
rawValue: unusedCoins
. map ( ( e ) = > e . value )
. fold ( BigInt . zero , ( prev , e ) = > prev + e ) ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ;
final spendable = Amount (
rawValue: unusedCoins
. where ( ( e ) = >
e . height ! = null & &
2023-12-20 00:34:20 +00:00
e . height ! + cryptoCurrency . minConfirms < = currentHeight )
2023-12-18 20:05:22 +00:00
. map ( ( e ) = > e . value )
. fold ( BigInt . zero , ( prev , e ) = > prev + e ) ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ;
2023-11-27 20:57:33 +00:00
2023-12-18 20:05:22 +00:00
final sparkBalance = Balance (
total: total ,
spendable: spendable ,
blockedTotal: Amount (
rawValue: BigInt . zero ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
pendingSpendable: total - spendable ,
) ;
2023-11-27 20:57:33 +00:00
2023-12-18 20:05:22 +00:00
await info . updateBalanceTertiary (
newBalance: sparkBalance ,
isar: mainDB . isar ,
) ;
}
2023-11-27 20:57:33 +00:00
2023-12-18 20:05:22 +00:00
/// Should only be called within the standard wallet [recover] function due to
/// mutex locking. Otherwise behaviour MAY be undefined.
Future < void > recoverSparkWallet ( {
required Map < dynamic , dynamic > anonymitySet ,
required Set < String > spentCoinTags ,
} ) async {
// generate spark addresses if non existing
if ( await getCurrentReceivingSparkAddress ( ) = = null ) {
final address = await generateNextSparkAddress ( ) ;
await mainDB . putAddress ( address ) ;
}
final sparkAddresses = await mainDB . isar . addresses
. where ( )
. walletIdEqualTo ( walletId )
. filter ( )
. typeEqualTo ( AddressType . spark )
. findAll ( ) ;
2023-11-27 20:57:33 +00:00
2023-12-18 20:05:22 +00:00
final Set < String > paths =
sparkAddresses . map ( ( e ) = > e . derivationPath ! . value ) . toSet ( ) ;
try {
2023-12-19 18:06:05 +00:00
final root = await getRootHDNode ( ) ;
final privateKeyHexSet =
paths . map ( ( e ) = > root . derivePath ( e ) . privateKey . data . toHex ) . toSet ( ) ;
final myCoins = await compute (
_identifyCoins ,
(
anonymitySetCoins: anonymitySet [ " coins " ] as List ,
2023-12-20 19:45:46 +00:00
groupId: anonymitySet [ " coinGroupID " ] as int ,
2023-12-19 18:06:05 +00:00
spentCoinTags: spentCoinTags ,
privateKeyHexSet: privateKeyHexSet ,
walletId: walletId ,
isTestNet: cryptoCurrency . network = = CryptoCurrencyNetwork . test ,
) ,
2023-12-18 20:05:22 +00:00
) ;
2023-11-27 20:57:33 +00:00
// update wallet spark coins in isar
2023-12-18 20:05:22 +00:00
await _addOrUpdateSparkCoins ( myCoins ) ;
2023-11-27 20:57:33 +00:00
2023-12-18 20:05:22 +00:00
// update blockHash in cache
final String newBlockHash = anonymitySet [ " blockHash " ] as String ;
await _setCachedSparkBlockHash ( newBlockHash ) ;
// refresh spark balance
await refreshSparkBalance ( ) ;
2023-11-27 20:57:33 +00:00
} catch ( e , s ) {
// todo logging
rethrow ;
}
}
2023-12-07 21:58:23 +00:00
/// Transparent to Spark (mint) transaction creation.
2023-12-14 02:12:12 +00:00
///
/// See https://docs.google.com/document/d/1RG52GoYTZDvKlZz_3G4sQu-PpT6JWSZGHLNswWcrE3o
2023-11-27 21:18:20 +00:00
Future < TxData > prepareSparkMintTransaction ( { required TxData txData } ) async {
2023-12-14 02:12:12 +00:00
// "this kind of transaction is generated like a regular transaction, but in
// place of [regular] outputs we put spark outputs... we construct the input
// part of the transaction first then we generate spark related data [and]
// we sign like regular transactions at the end."
2023-11-27 21:18:20 +00:00
2023-12-07 21:58:23 +00:00
// Validate inputs.
2023-12-14 02:12:12 +00:00
// There should be at least one input.
if ( txData . utxos = = null | | txData . utxos ! . isEmpty ) {
throw Exception ( " No inputs provided. " ) ;
}
// Validate individual inputs.
for ( final utxo in txData . utxos ! ) {
// Input amount must be greater than zero.
if ( utxo . value = = 0 ) {
throw Exception ( " Input value cannot be zero. " ) ;
}
// Input value must be greater than dust limit.
if ( BigInt . from ( utxo . value ) < cryptoCurrency . dustLimit . raw ) {
throw Exception ( " Input value below dust limit. " ) ;
}
}
// Validate outputs.
// There should be at least one output.
2023-12-07 21:58:23 +00:00
if ( txData . recipients = = null | | txData . recipients ! . isEmpty ) {
2023-12-14 02:12:12 +00:00
throw Exception ( " No recipients provided. " ) ;
2023-12-07 21:58:23 +00:00
}
2023-12-14 02:12:12 +00:00
// For now let's limit to one output.
2023-12-07 21:58:23 +00:00
if ( txData . recipients ! . length > 1 ) {
2023-12-14 02:12:12 +00:00
throw Exception ( " Only one recipient supported. " ) ;
// TODO remove and test with multiple recipients.
2023-12-07 21:58:23 +00:00
}
// Limit outputs per tx to 16.
//
// See SPARK_OUT_LIMIT_PER_TX at https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/include/spark.h#L16
2023-12-14 02:12:12 +00:00
if ( txData . recipients ! . length > 16 ) {
throw Exception ( " Too many recipients. " ) ;
}
2023-12-07 21:58:23 +00:00
// Limit spend value per tx to 1000000000000 satoshis.
//
// See SPARK_VALUE_SPEND_LIMIT_PER_TRANSACTION at https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/include/spark.h#L17
// and COIN https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/bitcoin/amount.h#L17
// Note that as MAX_MONEY is greater than this limit, we can ignore it. See https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/bitcoin/amount.h#L31
2023-12-14 02:12:12 +00:00
//
// This will be added to and checked as we validate outputs.
2023-12-14 02:25:13 +00:00
Amount totalAmount = Amount (
2023-12-14 02:12:12 +00:00
rawValue: BigInt . zero ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ;
// Validate individual outputs.
for ( final recipient in txData . recipients ! ) {
// Output amount must be greater than zero.
if ( recipient . amount . raw = = BigInt . zero ) {
throw Exception ( " Output amount cannot be zero. " ) ;
// Could refactor this for loop to use an index and remove this output.
}
// Output amount must be greater than dust limit.
if ( recipient . amount < cryptoCurrency . dustLimit ) {
throw Exception ( " Output below dust limit. " ) ;
}
// Do not add outputs that would exceed the spend limit.
2023-12-14 02:25:13 +00:00
totalAmount + = recipient . amount ;
if ( totalAmount . raw > BigInt . from ( 1000000000000 ) ) {
2023-12-14 02:12:12 +00:00
throw Exception (
" Spend limit exceeded (10,000 FIRO per tx). " ,
) ;
}
}
2023-12-14 02:25:13 +00:00
// Create a transaction builder and set locktime and version.
final txb = btc . TransactionBuilder (
network: btc . NetworkType (
messagePrefix: cryptoCurrency . networkParams . messagePrefix ,
bech32: cryptoCurrency . networkParams . bech32Hrp ,
bip32: btc . Bip32Type (
public: cryptoCurrency . networkParams . pubHDPrefix ,
private: cryptoCurrency . networkParams . privHDPrefix ,
) ,
pubKeyHash: cryptoCurrency . networkParams . p2pkhPrefix ,
scriptHash: cryptoCurrency . networkParams . p2shPrefix ,
wif: cryptoCurrency . networkParams . wifPrefix ,
) ,
) ;
txb . setLockTime ( await chainHeight ) ;
2023-12-14 15:15:11 +00:00
txb . setVersion ( 1 ) ;
2023-12-14 02:25:13 +00:00
2023-12-16 20:26:23 +00:00
final signingData = await fetchBuildTxData ( txData . utxos ! . toList ( ) ) ;
2023-12-07 21:58:23 +00:00
// Create the serial context.
2023-12-14 02:12:12 +00:00
//
// "...serial_context is a byte array, which should be unique for each
// transaction, and for that we serialize and put all inputs into
2023-12-14 02:25:13 +00:00
// serial_context vector."
2023-12-16 20:26:23 +00:00
final serialContext = LibSpark . serializeMintContext (
inputs: signingData
. map ( ( e ) = > (
e . utxo . txid ,
e . utxo . vout ,
) )
. toList ( ) ,
) ;
// Add inputs.
for ( final sd in signingData ) {
txb . addInput (
sd . utxo . txid ,
sd . utxo . vout ,
2023-12-19 18:06:05 +00:00
0xffffffff -
1 , // minus 1 is important. 0xffffffff on its own will burn funds
2023-12-16 20:26:23 +00:00
sd . output ,
2023-12-14 02:25:13 +00:00
) ;
}
2023-12-07 16:56:45 +00:00
2023-12-14 02:12:12 +00:00
// Create mint recipients.
2023-12-07 16:56:45 +00:00
final mintRecipients = LibSpark . createSparkMintRecipients (
2023-12-07 16:57:54 +00:00
outputs: txData . recipients !
2023-12-07 16:56:45 +00:00
. map ( ( e ) = > (
2023-12-07 16:57:54 +00:00
sparkAddress: e . address ,
value: e . amount . raw . toInt ( ) ,
2023-12-16 21:01:47 +00:00
memo: " " ,
2023-12-07 16:56:45 +00:00
) )
. toList ( ) ,
serialContext: Uint8List . fromList ( serialContext ) ,
2023-12-16 20:26:23 +00:00
generate: true ,
2023-12-07 16:56:45 +00:00
) ;
2023-12-07 21:58:23 +00:00
2023-12-14 02:25:13 +00:00
// Add mint output(s).
for ( final mint in mintRecipients ) {
txb . addOutput (
mint . scriptPubKey ,
mint . amount ,
) ;
}
2023-12-16 20:28:04 +00:00
try {
// Sign the transaction accordingly
for ( var i = 0 ; i < signingData . length ; i + + ) {
txb . sign (
vin: i ,
keyPair: signingData [ i ] . keyPair ! ,
witnessValue: signingData [ i ] . utxo . value ,
redeemScript: signingData [ i ] . redeemScript ,
) ;
}
} catch ( e , s ) {
Logging . instance . log (
" Caught exception while signing spark mint transaction: $ e \n $ s " ,
level: LogLevel . Error ,
) ;
rethrow ;
}
2023-12-07 21:58:23 +00:00
2023-12-16 20:28:04 +00:00
final builtTx = txb . build ( ) ;
// TODO any changes to this txData object required?
return txData . copyWith (
// recipients: [
// (
// amount: Amount(
// rawValue: BigInt.from(incomplete.outs[0].value!),
// fractionDigits: cryptoCurrency.fractionDigits,
// ),
// address: "no address for lelantus mints",
// )
// ],
vSize: builtTx . virtualSize ( ) ,
txid: builtTx . getId ( ) ,
raw: builtTx . toHex ( ) ,
) ;
2023-11-27 21:18:20 +00:00
}
2023-12-07 21:05:27 +00:00
/// Broadcast a tx and TODO update Spark balance.
Future < TxData > confirmSparkMintTransaction ( { required TxData txData } ) async {
// Broadcast tx.
final txid = await electrumXClient . broadcastTransaction (
rawTx: txData . raw ! ,
) ;
// Check txid.
2023-12-16 20:28:04 +00:00
if ( txid = = txData . txid ! ) {
print ( " SPARK TXIDS MATCH!! " ) ;
} else {
print ( " SUBMITTED SPARK TXID DOES NOT MATCH WHAT WE GENERATED " ) ;
}
2023-12-07 21:05:27 +00:00
// TODO update spark balance.
return txData . copyWith (
txid: txid ,
) ;
}
2023-11-27 20:57:33 +00:00
@ override
Future < void > updateBalance ( ) async {
// call to super to update transparent balance (and lelantus balance if
// what ever class this mixin is used on uses LelantusInterface as well)
final normalBalanceFuture = super . updateBalance ( ) ;
2023-11-27 21:07:16 +00:00
// todo: spark balance aka update info.tertiaryBalance
2023-11-27 20:57:33 +00:00
// wait for normalBalanceFuture to complete before returning
await normalBalanceFuture ;
}
2023-12-18 20:05:22 +00:00
// ====================== Private ============================================
final _kSparkAnonSetCachedBlockHashKey = " SparkAnonSetCachedBlockHashKey " ;
Future < String ? > _getCachedSparkBlockHash ( ) async {
return info . otherData [ _kSparkAnonSetCachedBlockHashKey ] as String ? ;
}
Future < void > _setCachedSparkBlockHash ( String blockHash ) async {
await info . updateOtherData (
newEntries: { _kSparkAnonSetCachedBlockHashKey: blockHash } ,
isar: mainDB . isar ,
) ;
}
Future < void > _addOrUpdateSparkCoins ( List < SparkCoin > coins ) async {
if ( coins . isNotEmpty ) {
await mainDB . isar . writeTxn ( ( ) async {
await mainDB . isar . sparkCoins . putAll ( coins ) ;
} ) ;
}
// update wallet spark coin height
final coinsToCheck = await mainDB . isar . sparkCoins
. where ( )
. walletIdEqualToAnyLTagHash ( walletId )
. filter ( )
. heightIsNull ( )
. findAll ( ) ;
final List < SparkCoin > updatedCoins = [ ] ;
for ( final coin in coinsToCheck ) {
final tx = await electrumXCachedClient . getTransaction (
txHash: coin . txHash ,
coin: info . coin ,
) ;
if ( tx [ " height " ] is int ) {
updatedCoins . add ( coin . copyWith ( height: tx [ " height " ] as int ) ) ;
}
}
if ( updatedCoins . isNotEmpty ) {
await mainDB . isar . writeTxn ( ( ) async {
await mainDB . isar . sparkCoins . putAll ( updatedCoins ) ;
} ) ;
}
}
2023-11-16 21:30:01 +00:00
}
2023-12-05 20:44:50 +00:00
String base64ToReverseHex ( String source ) = >
base64Decode ( LineSplitter . split ( source ) . join ( ) )
. reversed
. map ( ( e ) = > e . toRadixString ( 16 ) . padLeft ( 2 , ' 0 ' ) )
. join ( ) ;
2023-12-19 18:06:05 +00:00
/// Top level function which should be called wrapped in [compute]
Future < List < SparkCoin > > _identifyCoins (
( {
List < dynamic > anonymitySetCoins ,
2023-12-20 19:45:46 +00:00
int groupId ,
2023-12-19 18:06:05 +00:00
Set < String > spentCoinTags ,
Set < String > privateKeyHexSet ,
String walletId ,
bool isTestNet ,
} ) args ) async {
final List < SparkCoin > myCoins = [ ] ;
for ( final privateKeyHex in args . privateKeyHexSet ) {
for ( final dynData in args . anonymitySetCoins ) {
final data = List < String > . from ( dynData as List ) ;
if ( data . length ! = 3 ) {
throw Exception ( " Unexpected serialized coin info found " ) ;
}
final serializedCoinB64 = data [ 0 ] ;
final txHash = base64ToReverseHex ( data [ 1 ] ) ;
final contextB64 = data [ 2 ] ;
final coin = LibSpark . identifyAndRecoverCoin (
serializedCoinB64 ,
privateKeyHex: privateKeyHex ,
index: kDefaultSparkIndex ,
context: base64Decode ( contextB64 ) ,
isTestNet: args . isTestNet ,
) ;
// its ours
if ( coin ! = null ) {
final SparkCoinType coinType ;
switch ( coin . type . value ) {
case 0 :
coinType = SparkCoinType . mint ;
case 1 :
coinType = SparkCoinType . spend ;
default :
throw Exception ( " Unknown spark coin type detected " ) ;
}
myCoins . add (
SparkCoin (
walletId: args . walletId ,
type: coinType ,
isUsed: args . spentCoinTags . contains ( coin . lTagHash ! ) ,
2023-12-20 19:45:46 +00:00
groupId: args . groupId ,
2023-12-19 18:06:05 +00:00
nonce: coin . nonceHex ? . toUint8ListFromHex ,
address: coin . address ! ,
txHash: txHash ,
valueIntString: coin . value ! . toString ( ) ,
memo: coin . memo ,
serialContext: coin . serialContext ,
diversifierIntString: coin . diversifier ! . toString ( ) ,
encryptedDiversifier: coin . encryptedDiversifier ,
serial: coin . serial ,
tag: coin . tag ,
lTagHash: coin . lTagHash ! ,
height: coin . height ,
serializedCoinB64: serializedCoinB64 ,
contextB64: contextB64 ,
) ,
) ;
}
}
}
return myCoins ;
}