2023-12-05 20:44:50 +00:00
import ' dart:convert ' ;
2023-12-23 00:15:44 +00:00
import ' dart:math ' ;
2023-11-27 20:57:33 +00:00
2023-12-13 17:26:30 +00:00
import ' package:bitcoindart/bitcoindart.dart ' as btc ;
2023-12-27 16:01:13 +00:00
import ' package:decimal/decimal.dart ' ;
2023-12-19 18:06:05 +00:00
import ' package:flutter/foundation.dart ' ;
2023-11-28 16:13:10 +00:00
import ' package:flutter_libsparkmobile/flutter_libsparkmobile.dart ' ;
2023-11-27 20:57:33 +00:00
import ' package:isar/isar.dart ' ;
2023-12-15 14:47:46 +00:00
import ' package:stackwallet/models/balance.dart ' ;
2023-12-23 00:15:44 +00:00
import ' package:stackwallet/models/isar/models/isar_models.dart ' ;
import ' package:stackwallet/models/signing_data.dart ' ;
2023-11-27 20:57:33 +00:00
import ' package:stackwallet/utilities/amount/amount.dart ' ;
2023-12-05 22:55:38 +00:00
import ' package:stackwallet/utilities/extensions/extensions.dart ' ;
2023-12-16 20:28:04 +00:00
import ' package:stackwallet/utilities/logger.dart ' ;
2023-11-28 16:13:10 +00:00
import ' package:stackwallet/wallets/crypto_currency/crypto_currency.dart ' ;
2023-12-04 15:35:59 +00:00
import ' package:stackwallet/wallets/isar/models/spark_coin.dart ' ;
2023-11-16 21:30:01 +00:00
import ' package:stackwallet/wallets/models/tx_data.dart ' ;
import ' package:stackwallet/wallets/wallet/intermediate/bip39_hd_wallet.dart ' ;
2023-11-16 22:25:20 +00:00
import ' package:stackwallet/wallets/wallet/wallet_mixin_interfaces/electrumx_interface.dart ' ;
2023-11-16 21:30:01 +00:00
2023-12-18 20:05:22 +00:00
const kDefaultSparkIndex = 1 ;
2023-12-27 16:01:13 +00:00
// TODO dart style constants. Maybe move to spark lib?
2023-12-23 00:15:44 +00:00
const MAX_STANDARD_TX_WEIGHT = 400000 ;
2023-12-27 16:01:13 +00:00
//https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/include/spark.h#L16
const SPARK_OUT_LIMIT_PER_TX = 16 ;
2023-12-23 00:15:44 +00:00
const OP_SPARKMINT = 0xd1 ;
const OP_SPARKSMINT = 0xd2 ;
const OP_SPARKSPEND = 0xd3 ;
2023-11-16 22:25:20 +00:00
mixin SparkInterface on Bip39HDWallet , ElectrumXInterface {
2023-12-29 15:26:32 +00:00
String ? _sparkChangeAddressCached ;
/// Spark change address. Should generally not be exposed to end users.
String get sparkChangeAddress {
if ( _sparkChangeAddressCached = = null ) {
throw Exception ( " _sparkChangeAddressCached was not initialized " ) ;
}
return _sparkChangeAddressCached ! ;
}
2023-12-19 15:20:50 +00:00
static bool validateSparkAddress ( {
required String address ,
required bool isTestNet ,
} ) = >
LibSpark . validateAddress ( address: address , isTestNet: isTestNet ) ;
2023-11-29 15:53:30 +00:00
@ override
Future < void > init ( ) async {
Address ? address = await getCurrentReceivingSparkAddress ( ) ;
if ( address = = null ) {
address = await generateNextSparkAddress ( ) ;
await mainDB . putAddress ( address ) ;
} // TODO add other address types to wallet info?
2023-12-29 15:26:32 +00:00
if ( _sparkChangeAddressCached = = null ) {
final root = await getRootHDNode ( ) ;
final String derivationPath ;
if ( cryptoCurrency . network = = CryptoCurrencyNetwork . test ) {
derivationPath = " $ kSparkBaseDerivationPathTestnet $ kDefaultSparkIndex " ;
} else {
derivationPath = " $ kSparkBaseDerivationPath $ kDefaultSparkIndex " ;
}
final keys = root . derivePath ( derivationPath ) ;
_sparkChangeAddressCached = await LibSpark . getAddress (
privateKey: keys . privateKey . data ,
index: kDefaultSparkIndex ,
2024-01-03 17:01:04 +00:00
diversifier: kSparkChange ,
2023-12-29 15:26:32 +00:00
isTestNet: cryptoCurrency . network = = CryptoCurrencyNetwork . test ,
) ;
}
2023-11-29 15:53:30 +00:00
// await info.updateReceivingAddress(
// newAddress: address.value,
// isar: mainDB.isar,
// );
await super . init ( ) ;
}
@ override
Future < List < Address > > fetchAddressesForElectrumXScan ( ) async {
final allAddresses = await mainDB
. getAddresses ( walletId )
. filter ( )
. not ( )
. group (
( q ) = > q
. typeEqualTo ( AddressType . spark )
. or ( )
. typeEqualTo ( AddressType . nonWallet )
. or ( )
. subTypeEqualTo ( AddressSubType . nonWallet ) ,
)
. findAll ( ) ;
return allAddresses ;
}
2023-11-27 20:57:33 +00:00
Future < Address ? > getCurrentReceivingSparkAddress ( ) async {
return await mainDB . isar . addresses
. where ( )
. walletIdEqualTo ( walletId )
. filter ( )
. typeEqualTo ( AddressType . spark )
. sortByDerivationIndexDesc ( )
. findFirst ( ) ;
}
2023-11-29 15:53:30 +00:00
Future < Address > generateNextSparkAddress ( ) async {
2023-11-27 20:57:33 +00:00
final highestStoredDiversifier =
( await getCurrentReceivingSparkAddress ( ) ) ? . derivationIndex ;
// default to starting at 1 if none found
2024-01-03 17:01:04 +00:00
int diversifier = ( highestStoredDiversifier ? ? 0 ) + 1 ;
// change address check
if ( diversifier = = kSparkChange ) {
diversifier + + ;
}
2023-11-27 20:57:33 +00:00
2023-11-28 16:13:10 +00:00
final root = await getRootHDNode ( ) ;
2023-12-05 22:55:38 +00:00
final String derivationPath ;
if ( cryptoCurrency . network = = CryptoCurrencyNetwork . test ) {
2023-12-18 20:05:22 +00:00
derivationPath = " $ kSparkBaseDerivationPathTestnet $ kDefaultSparkIndex " ;
2023-12-05 22:55:38 +00:00
} else {
2023-12-18 20:05:22 +00:00
derivationPath = " $ kSparkBaseDerivationPath $ kDefaultSparkIndex " ;
2023-12-05 22:55:38 +00:00
}
2023-11-28 16:13:10 +00:00
final keys = root . derivePath ( derivationPath ) ;
final String addressString = await LibSpark . getAddress (
privateKey: keys . privateKey . data ,
2023-12-18 20:05:22 +00:00
index: kDefaultSparkIndex ,
2023-11-28 16:13:10 +00:00
diversifier: diversifier ,
isTestNet: cryptoCurrency . network = = CryptoCurrencyNetwork . test ,
) ;
2023-11-27 20:57:33 +00:00
return Address (
walletId: walletId ,
value: addressString ,
2023-11-28 16:13:10 +00:00
publicKey: keys . publicKey . data ,
2023-11-27 20:57:33 +00:00
derivationIndex: diversifier ,
derivationPath: DerivationPath ( ) . . value = derivationPath ,
type: AddressType . spark ,
subType: AddressSubType . receiving ,
) ;
}
Future < Amount > estimateFeeForSpark ( Amount amount ) async {
2023-12-20 00:34:20 +00:00
// int spendAmount = amount.raw.toInt();
// if (spendAmount == 0) {
return Amount (
rawValue: BigInt . from ( 0 ) ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ;
// }
// TODO actual fee estimation
2023-11-27 20:57:33 +00:00
}
2023-11-27 21:18:20 +00:00
/// Spark to Spark/Transparent (spend) creation
2023-11-16 21:30:01 +00:00
Future < TxData > prepareSendSpark ( {
required TxData txData ,
} ) async {
2023-12-27 16:01:13 +00:00
// There should be at least one output.
if ( ! ( txData . recipients ? . isNotEmpty = = true | |
txData . sparkRecipients ? . isNotEmpty = = true ) ) {
throw Exception ( " No recipients provided. " ) ;
}
if ( txData . sparkRecipients ? . isNotEmpty = = true & &
txData . sparkRecipients ! . length > = SPARK_OUT_LIMIT_PER_TX - 1 ) {
throw Exception ( " Spark shielded output limit exceeded. " ) ;
}
final transparentSumOut =
( txData . recipients ? ? [ ] ) . map ( ( e ) = > e . amount ) . fold (
Amount (
rawValue: BigInt . zero ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
( p , e ) = > p + e ) ;
// See SPARK_VALUE_SPEND_LIMIT_PER_TRANSACTION at https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/include/spark.h#L17
// and COIN https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/bitcoin/amount.h#L17
// Note that as MAX_MONEY is greater than this limit, we can ignore it. See https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/bitcoin/amount.h#L31
if ( transparentSumOut >
Amount . fromDecimal (
Decimal . parse ( " 10000 " ) ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ) {
throw Exception (
" Spend to transparent address limit exceeded (10,000 Firo per transaction). " ) ;
}
final sparkSumOut =
( txData . sparkRecipients ? ? [ ] ) . map ( ( e ) = > e . amount ) . fold (
Amount (
rawValue: BigInt . zero ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
( p , e ) = > p + e ) ;
final txAmount = transparentSumOut + sparkSumOut ;
2023-12-21 00:00:02 +00:00
// fetch spendable spark coins
2023-12-18 21:12:16 +00:00
final coins = await mainDB . isar . sparkCoins
. where ( )
. walletIdEqualToAnyLTagHash ( walletId )
. filter ( )
. isUsedEqualTo ( false )
2023-12-20 19:45:46 +00:00
. and ( )
. heightIsNotNull ( )
2023-12-21 20:41:29 +00:00
. and ( )
. not ( )
. valueIntStringEqualTo ( " 0 " )
2023-12-18 21:12:16 +00:00
. findAll ( ) ;
2023-12-21 20:41:29 +00:00
final available = info . cachedBalanceTertiary . spendable ;
if ( txAmount > available ) {
throw Exception ( " Insufficient Spark balance " ) ;
}
final bool isSendAll = available = = txAmount ;
2023-12-21 00:00:02 +00:00
// prepare coin data for ffi
2023-12-20 19:45:46 +00:00
final serializedCoins = coins
. map ( ( e ) = > (
serializedCoin: e . serializedCoinB64 ! ,
serializedCoinContext: e . contextB64 ! ,
groupId: e . groupId ,
height: e . height ! ,
) )
. toList ( ) ;
2023-12-13 17:26:30 +00:00
final currentId = await electrumXClient . getSparkLatestCoinId ( ) ;
final List < Map < String , dynamic > > setMaps = [ ] ;
2023-12-20 19:45:46 +00:00
final List < ( { int groupId , String blockHash } ) > idAndBlockHashes = [ ] ;
for ( int i = 1 ; i < = currentId ; i + + ) {
2023-12-13 20:13:11 +00:00
final set = await electrumXCachedClient . getSparkAnonymitySet (
groupId: i . toString ( ) ,
coin: info . coin ,
2023-12-13 17:26:30 +00:00
) ;
set [ " coinGroupID " ] = i ;
setMaps . add ( set ) ;
2023-12-20 19:45:46 +00:00
idAndBlockHashes . add (
(
groupId: i ,
blockHash: set [ " blockHash " ] as String ,
) ,
) ;
2023-12-13 17:26:30 +00:00
}
final allAnonymitySets = setMaps
. map ( ( e ) = > (
setId: e [ " coinGroupID " ] as int ,
setHash: e [ " setHash " ] as String ,
set : ( e [ " coins " ] as List )
. map ( ( e ) = > (
serializedCoin: e [ 0 ] as String ,
txHash: e [ 1 ] as String ,
) )
. toList ( ) ,
) )
. toList ( ) ;
final root = await getRootHDNode ( ) ;
final String derivationPath ;
if ( cryptoCurrency . network = = CryptoCurrencyNetwork . test ) {
2023-12-18 20:05:22 +00:00
derivationPath = " $ kSparkBaseDerivationPathTestnet $ kDefaultSparkIndex " ;
2023-12-13 17:26:30 +00:00
} else {
2023-12-18 20:05:22 +00:00
derivationPath = " $ kSparkBaseDerivationPath $ kDefaultSparkIndex " ;
2023-12-13 17:26:30 +00:00
}
final privateKey = root . derivePath ( derivationPath ) . privateKey . data ;
2023-12-21 20:41:29 +00:00
final txb = btc . TransactionBuilder (
2023-12-23 00:15:44 +00:00
network: _bitcoinDartNetwork ,
2023-12-13 17:26:30 +00:00
) ;
txb . setLockTime ( await chainHeight ) ;
txb . setVersion ( 3 | ( 9 < < 16 ) ) ;
2023-12-21 22:04:49 +00:00
List < ( { String address , Amount amount } ) > ? recipientsWithFeeSubtracted ;
List <
2023-12-21 20:41:29 +00:00
( {
String address ,
Amount amount ,
String memo ,
2023-12-21 22:04:49 +00:00
} ) > ? sparkRecipientsWithFeeSubtracted ;
final recipientCount = ( txData . recipients
? . where (
( e ) = > e . amount . raw > BigInt . zero ,
)
. length ? ?
0 ) ;
final totalRecipientCount =
recipientCount + ( txData . sparkRecipients ? . length ? ? 0 ) ;
2023-12-21 20:41:29 +00:00
final BigInt estimatedFee ;
if ( isSendAll ) {
final estFee = LibSpark . estimateSparkFee (
privateKeyHex: privateKey . toHex ,
index: kDefaultSparkIndex ,
sendAmount: txAmount . raw . toInt ( ) ,
subtractFeeFromAmount: true ,
serializedCoins: serializedCoins ,
privateRecipientsCount: ( txData . sparkRecipients ? . length ? ? 0 ) ,
) ;
estimatedFee = BigInt . from ( estFee ) ;
} else {
estimatedFee = BigInt . zero ;
}
2023-12-21 22:04:49 +00:00
if ( ( txData . sparkRecipients ? . length ? ? 0 ) > 0 ) {
sparkRecipientsWithFeeSubtracted = [ ] ;
}
if ( recipientCount > 0 ) {
recipientsWithFeeSubtracted = [ ] ;
}
2023-12-21 20:41:29 +00:00
for ( int i = 0 ; i < ( txData . sparkRecipients ? . length ? ? 0 ) ; i + + ) {
2023-12-21 22:04:49 +00:00
sparkRecipientsWithFeeSubtracted ! . add (
2023-12-21 20:41:29 +00:00
(
address: txData . sparkRecipients ! [ i ] . address ,
amount: Amount (
rawValue: txData . sparkRecipients ! [ i ] . amount . raw -
2023-12-21 22:04:49 +00:00
( estimatedFee ~ / BigInt . from ( totalRecipientCount ) ) ,
2023-12-21 20:41:29 +00:00
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
memo: txData . sparkRecipients ! [ i ] . memo ,
) ,
) ;
}
2023-12-13 17:26:30 +00:00
for ( int i = 0 ; i < ( txData . recipients ? . length ? ? 0 ) ; i + + ) {
if ( txData . recipients ! [ i ] . amount . raw = = BigInt . zero ) {
continue ;
}
2023-12-21 22:04:49 +00:00
recipientsWithFeeSubtracted ! . add (
2023-12-21 20:41:29 +00:00
(
address: txData . recipients ! [ i ] . address ,
amount: Amount (
rawValue: txData . recipients ! [ i ] . amount . raw -
2023-12-21 22:04:49 +00:00
( estimatedFee ~ / BigInt . from ( totalRecipientCount ) ) ,
2023-12-21 20:41:29 +00:00
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
) ,
) ;
final scriptPubKey = btc . Address . addressToOutputScript (
2023-12-13 17:26:30 +00:00
txData . recipients ! [ i ] . address ,
2023-12-23 00:15:44 +00:00
_bitcoinDartNetwork ,
2023-12-21 20:41:29 +00:00
) ;
txb . addOutput (
scriptPubKey ,
recipientsWithFeeSubtracted [ i ] . amount . raw . toInt ( ) ,
2023-12-13 17:26:30 +00:00
) ;
}
2023-12-21 16:23:55 +00:00
final extractedTx = txb . buildIncomplete ( ) ;
extractedTx . addInput (
' 0000000000000000000000000000000000000000000000000000000000000000 '
. toUint8ListFromHex ,
0xffffffff ,
0xffffffff ,
" d3 " . toUint8ListFromHex , // OP_SPARKSPEND
) ;
extractedTx . setPayload ( Uint8List ( 0 ) ) ;
2023-12-13 17:26:30 +00:00
2023-12-21 22:18:12 +00:00
final spend = await compute (
_createSparkSend ,
(
privateKeyHex: privateKey . toHex ,
index: kDefaultSparkIndex ,
recipients: txData . recipients
? . map ( ( e ) = > (
address: e . address ,
amount: e . amount . raw . toInt ( ) ,
subtractFeeFromAmount: isSendAll ,
) )
. toList ( ) ? ?
[ ] ,
privateRecipients: txData . sparkRecipients
? . map ( ( e ) = > (
sparkAddress: e . address ,
amount: e . amount . raw . toInt ( ) ,
subtractFeeFromAmount: isSendAll ,
memo: e . memo ,
) )
. toList ( ) ? ?
[ ] ,
serializedCoins: serializedCoins ,
allAnonymitySets: allAnonymitySets ,
idAndBlockHashes: idAndBlockHashes
. map (
( e ) = > ( setId: e . groupId , blockHash: base64Decode ( e . blockHash ) ) )
. toList ( ) ,
txHash: extractedTx . getHash ( ) ,
) ,
2023-12-13 17:26:30 +00:00
) ;
for ( final outputScript in spend . outputScripts ) {
2023-12-21 16:23:55 +00:00
extractedTx . addOutput ( outputScript , 0 ) ;
2023-12-13 17:26:30 +00:00
}
2023-12-20 19:45:46 +00:00
extractedTx . setPayload ( spend . serializedSpendPayload ) ;
2023-12-13 17:26:30 +00:00
final rawTxHex = extractedTx . toHex ( ) ;
2023-12-21 20:41:29 +00:00
if ( isSendAll ) {
txData = txData . copyWith (
recipients: recipientsWithFeeSubtracted ,
sparkRecipients: sparkRecipientsWithFeeSubtracted ,
) ;
}
2023-12-13 17:26:30 +00:00
return txData . copyWith (
raw: rawTxHex ,
vSize: extractedTx . virtualSize ( ) ,
fee: Amount (
rawValue: BigInt . from ( spend . fee ) ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
// TODO used coins
) ;
2023-11-16 21:30:01 +00:00
}
2023-11-27 20:57:33 +00:00
2023-11-27 21:18:20 +00:00
// this may not be needed for either mints or spends or both
2023-11-27 20:57:33 +00:00
Future < TxData > confirmSendSpark ( {
required TxData txData ,
} ) async {
2023-12-21 16:23:55 +00:00
try {
Logging . instance . log ( " confirmSend txData: $ txData " , level: LogLevel . Info ) ;
final txHash = await electrumXClient . broadcastTransaction (
rawTx: txData . raw ! ,
) ;
Logging . instance . log ( " Sent txHash: $ txHash " , level: LogLevel . Info ) ;
txData = txData . copyWith (
// TODO mark spark coins as spent locally and update balance before waiting to check via electrumx?
// usedUTXOs:
// txData.usedUTXOs!.map((e) => e.copyWith(used: true)).toList(),
// TODO revisit setting these both
txHash: txHash ,
txid: txHash ,
) ;
2023-12-21 20:41:29 +00:00
// // mark utxos as used
// await mainDB.putUTXOs(txData.usedUTXOs!);
2023-12-21 16:23:55 +00:00
return txData ;
} catch ( e , s ) {
Logging . instance . log ( " Exception rethrown from confirmSend(): $ e \n $ s " ,
level: LogLevel . Error ) ;
rethrow ;
}
2023-11-27 20:57:33 +00:00
}
// TODO lots of room for performance improvements here. Should be similar to
// recoverSparkWallet but only fetch and check anonymity set data that we
// have not yet parsed.
Future < void > refreshSparkData ( ) async {
2023-12-05 22:55:38 +00:00
final sparkAddresses = await mainDB . isar . addresses
. where ( )
. walletIdEqualTo ( walletId )
. filter ( )
. typeEqualTo ( AddressType . spark )
. findAll ( ) ;
final Set < String > paths =
sparkAddresses . map ( ( e ) = > e . derivationPath ! . value ) . toSet ( ) ;
2023-11-27 20:57:33 +00:00
try {
2023-12-04 15:35:59 +00:00
final latestSparkCoinId = await electrumXClient . getSparkLatestCoinId ( ) ;
2023-11-27 21:18:20 +00:00
2023-12-18 20:05:22 +00:00
final blockHash = await _getCachedSparkBlockHash ( ) ;
2023-12-20 23:46:48 +00:00
final anonymitySetFuture = blockHash = = null
? electrumXCachedClient . getSparkAnonymitySet (
2023-12-19 18:06:05 +00:00
groupId: latestSparkCoinId . toString ( ) ,
coin: info . coin ,
)
2023-12-20 23:46:48 +00:00
: electrumXClient . getSparkAnonymitySet (
2023-12-19 18:06:05 +00:00
coinGroupId: latestSparkCoinId . toString ( ) ,
startBlockHash: blockHash ,
) ;
2023-12-20 23:46:48 +00:00
final spentCoinTagsFuture =
electrumXClient . getSparkUsedCoinsTags ( startNumber: 0 ) ;
// electrumXCachedClient.getSparkUsedCoinsTags(coin: info.coin);
final futureResults = await Future . wait ( [
anonymitySetFuture ,
spentCoinTagsFuture ,
] ) ;
final anonymitySet = futureResults [ 0 ] as Map < String , dynamic > ;
final spentCoinTags = futureResults [ 1 ] as Set < String > ;
final List < SparkCoin > myCoins = [ ] ;
2023-12-19 18:06:05 +00:00
if ( anonymitySet [ " coins " ] is List & &
( anonymitySet [ " coins " ] as List ) . isNotEmpty ) {
final root = await getRootHDNode ( ) ;
final privateKeyHexSet = paths
. map (
( e ) = > root . derivePath ( e ) . privateKey . data . toHex ,
)
. toSet ( ) ;
2023-12-20 23:46:48 +00:00
final identifiedCoins = await compute (
2023-12-19 18:06:05 +00:00
_identifyCoins ,
(
anonymitySetCoins: anonymitySet [ " coins " ] as List ,
2023-12-20 19:45:46 +00:00
groupId: latestSparkCoinId ,
2023-12-19 18:06:05 +00:00
spentCoinTags: spentCoinTags ,
privateKeyHexSet: privateKeyHexSet ,
walletId: walletId ,
isTestNet: cryptoCurrency . network = = CryptoCurrencyNetwork . test ,
) ,
) ;
2023-11-27 20:57:33 +00:00
2023-12-20 23:46:48 +00:00
myCoins . addAll ( identifiedCoins ) ;
2023-11-27 20:57:33 +00:00
2023-12-19 18:06:05 +00:00
// update blockHash in cache
final String newBlockHash =
base64ToReverseHex ( anonymitySet [ " blockHash " ] as String ) ;
await _setCachedSparkBlockHash ( newBlockHash ) ;
}
2023-12-15 14:47:46 +00:00
2023-12-20 23:46:48 +00:00
// check current coins
final currentCoins = await mainDB . isar . sparkCoins
. where ( )
. walletIdEqualToAnyLTagHash ( walletId )
. filter ( )
. isUsedEqualTo ( false )
. findAll ( ) ;
for ( final coin in currentCoins ) {
if ( spentCoinTags . contains ( coin . lTagHash ) ) {
myCoins . add ( coin . copyWith ( isUsed: true ) ) ;
}
}
// update wallet spark coins in isar
await _addOrUpdateSparkCoins ( myCoins ) ;
2023-12-15 14:47:46 +00:00
// refresh spark balance
2023-12-18 20:05:22 +00:00
await refreshSparkBalance ( ) ;
2023-11-27 20:57:33 +00:00
} catch ( e , s ) {
// todo logging
rethrow ;
}
}
2023-12-18 20:05:22 +00:00
Future < void > refreshSparkBalance ( ) async {
final currentHeight = await chainHeight ;
final unusedCoins = await mainDB . isar . sparkCoins
. where ( )
. walletIdEqualToAnyLTagHash ( walletId )
. filter ( )
. isUsedEqualTo ( false )
. findAll ( ) ;
2023-11-27 20:57:33 +00:00
2023-12-18 20:05:22 +00:00
final total = Amount (
rawValue: unusedCoins
. map ( ( e ) = > e . value )
. fold ( BigInt . zero , ( prev , e ) = > prev + e ) ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ;
final spendable = Amount (
rawValue: unusedCoins
. where ( ( e ) = >
e . height ! = null & &
2023-12-20 00:34:20 +00:00
e . height ! + cryptoCurrency . minConfirms < = currentHeight )
2023-12-18 20:05:22 +00:00
. map ( ( e ) = > e . value )
. fold ( BigInt . zero , ( prev , e ) = > prev + e ) ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ;
2023-11-27 20:57:33 +00:00
2023-12-18 20:05:22 +00:00
final sparkBalance = Balance (
total: total ,
spendable: spendable ,
blockedTotal: Amount (
rawValue: BigInt . zero ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
pendingSpendable: total - spendable ,
) ;
2023-11-27 20:57:33 +00:00
2023-12-18 20:05:22 +00:00
await info . updateBalanceTertiary (
newBalance: sparkBalance ,
isar: mainDB . isar ,
) ;
}
2023-11-27 20:57:33 +00:00
2023-12-18 20:05:22 +00:00
/// Should only be called within the standard wallet [recover] function due to
/// mutex locking. Otherwise behaviour MAY be undefined.
Future < void > recoverSparkWallet ( {
required Map < dynamic , dynamic > anonymitySet ,
required Set < String > spentCoinTags ,
} ) async {
// generate spark addresses if non existing
if ( await getCurrentReceivingSparkAddress ( ) = = null ) {
final address = await generateNextSparkAddress ( ) ;
await mainDB . putAddress ( address ) ;
}
final sparkAddresses = await mainDB . isar . addresses
. where ( )
. walletIdEqualTo ( walletId )
. filter ( )
. typeEqualTo ( AddressType . spark )
. findAll ( ) ;
2023-11-27 20:57:33 +00:00
2023-12-18 20:05:22 +00:00
final Set < String > paths =
sparkAddresses . map ( ( e ) = > e . derivationPath ! . value ) . toSet ( ) ;
try {
2023-12-19 18:06:05 +00:00
final root = await getRootHDNode ( ) ;
final privateKeyHexSet =
paths . map ( ( e ) = > root . derivePath ( e ) . privateKey . data . toHex ) . toSet ( ) ;
final myCoins = await compute (
_identifyCoins ,
(
anonymitySetCoins: anonymitySet [ " coins " ] as List ,
2023-12-20 19:45:46 +00:00
groupId: anonymitySet [ " coinGroupID " ] as int ,
2023-12-19 18:06:05 +00:00
spentCoinTags: spentCoinTags ,
privateKeyHexSet: privateKeyHexSet ,
walletId: walletId ,
isTestNet: cryptoCurrency . network = = CryptoCurrencyNetwork . test ,
) ,
2023-12-18 20:05:22 +00:00
) ;
2023-11-27 20:57:33 +00:00
// update wallet spark coins in isar
2023-12-18 20:05:22 +00:00
await _addOrUpdateSparkCoins ( myCoins ) ;
2023-11-27 20:57:33 +00:00
2023-12-18 20:05:22 +00:00
// update blockHash in cache
final String newBlockHash = anonymitySet [ " blockHash " ] as String ;
await _setCachedSparkBlockHash ( newBlockHash ) ;
// refresh spark balance
await refreshSparkBalance ( ) ;
2023-11-27 20:57:33 +00:00
} catch ( e , s ) {
// todo logging
rethrow ;
}
}
2023-12-27 16:01:13 +00:00
// modelled on CSparkWallet::CreateSparkMintTransactions https://github.com/firoorg/firo/blob/39c41e5e7ec634ced3700fe3f4f5509dc2e480d0/src/spark/sparkwallet.cpp#L752
Future < List < TxData > > _createSparkMintTransactions ( {
2023-12-23 00:15:44 +00:00
required List < UTXO > availableUtxos ,
required List < MutableSparkRecipient > outputs ,
required bool subtractFeeFromAmount ,
required bool autoMintAll ,
} ) async {
// pre checks
if ( outputs . isEmpty ) {
throw Exception ( " Cannot mint without some recipients " ) ;
}
2023-12-27 16:01:13 +00:00
// TODO remove when multiple recipients gui is added. Will need to handle
// addresses when confirming the transactions later as well
assert ( outputs . length = = 1 ) ;
2023-12-23 00:15:44 +00:00
BigInt valueToMint =
outputs . map ( ( e ) = > e . value ) . reduce ( ( value , element ) = > value + element ) ;
if ( valueToMint < = BigInt . zero ) {
throw Exception ( " Cannot mint amount= $ valueToMint " ) ;
}
final totalUtxosValue = _sum ( availableUtxos ) ;
if ( valueToMint > totalUtxosValue ) {
throw Exception ( " Insufficient balance to create spark mint(s) " ) ;
}
// organise utxos
Map < String , List < UTXO > > utxosByAddress = { } ;
for ( final utxo in availableUtxos ) {
utxosByAddress [ utxo . address ! ] ? ? = [ ] ;
utxosByAddress [ utxo . address ! ] ! . add ( utxo ) ;
}
final valueAndUTXOs = utxosByAddress . values . toList ( ) ;
// setup some vars
int nChangePosInOut = - 1 ;
int nChangePosRequest = nChangePosInOut ;
2023-12-27 16:01:13 +00:00
List < MutableSparkRecipient > outputs_ = outputs
. map ( ( e ) = > MutableSparkRecipient ( e . address , e . value , e . memo ) )
. toList ( ) ; // deep copy
2023-12-24 16:51:08 +00:00
final feesObject = await fees ;
2023-12-23 00:15:44 +00:00
final currentHeight = await chainHeight ;
final random = Random . secure ( ) ;
final List < TxData > results = [ ] ;
valueAndUTXOs . shuffle ( random ) ;
while ( valueAndUTXOs . isNotEmpty ) {
final lockTime = random . nextInt ( 10 ) = = 0
? max ( 0 , currentHeight - random . nextInt ( 100 ) )
: currentHeight ;
const txVersion = 1 ;
final List < SigningData > vin = [ ] ;
final List < ( dynamic , int ) > vout = [ ] ;
BigInt nFeeRet = BigInt . zero ;
final itr = valueAndUTXOs . first ;
BigInt valueToMintInTx = _sum ( itr ) ;
if ( ! autoMintAll ) {
valueToMintInTx = _min ( valueToMintInTx , valueToMint ) ;
}
BigInt nValueToSelect , mintedValue ;
final List < SigningData > setCoins = [ ] ;
bool skipCoin = false ;
// Start with no fee and loop until there is enough fee
while ( true ) {
mintedValue = valueToMintInTx ;
if ( subtractFeeFromAmount ) {
nValueToSelect = mintedValue ;
} else {
nValueToSelect = mintedValue + nFeeRet ;
}
// if not enough coins in this group then subtract fee from mint
if ( nValueToSelect > _sum ( itr ) & & ! subtractFeeFromAmount ) {
nValueToSelect = mintedValue ;
mintedValue - = nFeeRet ;
}
// if (!MoneyRange(mintedValue) || mintedValue == 0) {
if ( mintedValue = = BigInt . zero ) {
valueAndUTXOs . remove ( itr ) ;
skipCoin = true ;
break ;
}
nChangePosInOut = nChangePosRequest ;
vin . clear ( ) ;
vout . clear ( ) ;
setCoins . clear ( ) ;
2023-12-27 16:01:13 +00:00
// deep copy
final remainingOutputs = outputs_
. map ( ( e ) = > MutableSparkRecipient ( e . address , e . value , e . memo ) )
. toList ( ) ;
2023-12-23 00:15:44 +00:00
final List < MutableSparkRecipient > singleTxOutputs = [ ] ;
2023-12-27 16:01:13 +00:00
2023-12-23 00:15:44 +00:00
if ( autoMintAll ) {
singleTxOutputs . add (
MutableSparkRecipient (
( await getCurrentReceivingSparkAddress ( ) ) ! . value ,
mintedValue ,
" " ,
) ,
) ;
} else {
2023-12-27 16:01:13 +00:00
BigInt remainingMintValue = BigInt . parse ( mintedValue . toString ( ) ) ;
2023-12-23 00:15:44 +00:00
while ( remainingMintValue > BigInt . zero ) {
final singleMintValue =
_min ( remainingMintValue , remainingOutputs . first . value ) ;
singleTxOutputs . add (
MutableSparkRecipient (
remainingOutputs . first . address ,
singleMintValue ,
remainingOutputs . first . memo ,
) ,
) ;
// subtract minted amount from remaining value
remainingMintValue - = singleMintValue ;
remainingOutputs . first . value - = singleMintValue ;
if ( remainingOutputs . first . value = = BigInt . zero ) {
remainingOutputs . remove ( remainingOutputs . first ) ;
}
}
}
if ( subtractFeeFromAmount ) {
final BigInt singleFee =
nFeeRet ~ / BigInt . from ( singleTxOutputs . length ) ;
BigInt remainder = nFeeRet % BigInt . from ( singleTxOutputs . length ) ;
for ( int i = 0 ; i < singleTxOutputs . length ; + + i ) {
if ( singleTxOutputs [ i ] . value < = singleFee ) {
singleTxOutputs . removeAt ( i ) ;
remainder + = singleTxOutputs [ i ] . value - singleFee ;
- - i ;
}
singleTxOutputs [ i ] . value - = singleFee ;
if ( remainder > BigInt . zero & &
singleTxOutputs [ i ] . value >
nFeeRet % BigInt . from ( singleTxOutputs . length ) ) {
// first receiver pays the remainder not divisible by output count
singleTxOutputs [ i ] . value - = remainder ;
remainder = BigInt . zero ;
}
}
}
// Generate dummy mint coins to save time
final dummyRecipients = LibSpark . createSparkMintRecipients (
outputs: singleTxOutputs
. map ( ( e ) = > (
sparkAddress: e . address ,
value: e . value . toInt ( ) ,
memo: " " ,
) )
. toList ( ) ,
serialContext: Uint8List ( 0 ) ,
generate: false ,
) ;
final dummyTxb = btc . TransactionBuilder ( network: _bitcoinDartNetwork ) ;
dummyTxb . setVersion ( txVersion ) ;
dummyTxb . setLockTime ( lockTime ) ;
for ( final recipient in dummyRecipients ) {
if ( recipient . amount < cryptoCurrency . dustLimit . raw . toInt ( ) ) {
throw Exception ( " Output amount too small " ) ;
}
vout . add ( (
recipient . scriptPubKey ,
recipient . amount ,
) ) ;
}
// Choose coins to use
BigInt nValueIn = BigInt . zero ;
for ( final utxo in itr ) {
if ( nValueToSelect > nValueIn ) {
setCoins . add ( ( await fetchBuildTxData ( [ utxo ] ) ) . first ) ;
nValueIn + = BigInt . from ( utxo . value ) ;
}
}
if ( nValueIn < nValueToSelect ) {
throw Exception ( " Insufficient funds " ) ;
}
// priority stuff???
BigInt nChange = nValueIn - nValueToSelect ;
if ( nChange > BigInt . zero ) {
if ( nChange < cryptoCurrency . dustLimit . raw ) {
nChangePosInOut = - 1 ;
nFeeRet + = nChange ;
} else {
if ( nChangePosInOut = = - 1 ) {
nChangePosInOut = random . nextInt ( vout . length + 1 ) ;
} else if ( nChangePosInOut > vout . length ) {
throw Exception ( " Change index out of range " ) ;
}
final changeAddress = await getCurrentChangeAddress ( ) ;
vout . insert (
nChangePosInOut ,
( changeAddress ! . value , nChange . toInt ( ) ) ,
) ;
}
}
// add outputs for dummy tx to check fees
for ( final out in vout ) {
dummyTxb . addOutput ( out . $1 , out . $2 ) ;
}
// fill vin
for ( final sd in setCoins ) {
vin . add ( sd ) ;
// add to dummy tx
dummyTxb . addInput (
sd . utxo . txid ,
sd . utxo . vout ,
0xffffffff -
1 , // minus 1 is important. 0xffffffff on its own will burn funds
sd . output ,
) ;
}
// sign dummy tx
for ( var i = 0 ; i < setCoins . length ; i + + ) {
dummyTxb . sign (
vin: i ,
keyPair: setCoins [ i ] . keyPair ! ,
witnessValue: setCoins [ i ] . utxo . value ,
redeemScript: setCoins [ i ] . redeemScript ,
) ;
}
final dummyTx = dummyTxb . build ( ) ;
final nBytes = dummyTx . virtualSize ( ) ;
if ( dummyTx . weight ( ) > MAX_STANDARD_TX_WEIGHT ) {
throw Exception ( " Transaction too large " ) ;
}
2023-12-24 16:51:08 +00:00
final nFeeNeeded = BigInt . from (
estimateTxFee (
vSize: nBytes ,
feeRatePerKB: feesObject . medium ,
) ,
) ; // One day we'll do this properly
2023-12-23 00:15:44 +00:00
if ( nFeeRet > = nFeeNeeded ) {
for ( final usedCoin in setCoins ) {
itr . removeWhere ( ( e ) = > e = = usedCoin . utxo ) ;
}
if ( itr . isEmpty ) {
final preLength = valueAndUTXOs . length ;
valueAndUTXOs . remove ( itr ) ;
assert ( preLength - 1 = = valueAndUTXOs . length ) ;
}
// Generate real mint coins
final serialContext = LibSpark . serializeMintContext (
inputs: setCoins
. map ( ( e ) = > (
e . utxo . txid ,
e . utxo . vout ,
) )
. toList ( ) ,
) ;
final recipients = LibSpark . createSparkMintRecipients (
outputs: singleTxOutputs
. map (
( e ) = > (
sparkAddress: e . address ,
memo: e . memo ,
value: e . value . toInt ( ) ,
) ,
)
. toList ( ) ,
serialContext: serialContext ,
generate: true ,
) ;
int i = 0 ;
for ( final recipient in recipients ) {
final out = ( recipient . scriptPubKey , recipient . amount ) ;
while ( i < vout . length ) {
if ( vout [ i ] . $1 is Uint8List & &
( vout [ i ] . $1 as Uint8List ) . isNotEmpty & &
( vout [ i ] . $1 as Uint8List ) [ 0 ] = = OP_SPARKMINT ) {
vout [ i ] = out ;
break ;
}
+ + i ;
}
+ + i ;
}
2023-12-27 16:01:13 +00:00
// deep copy
outputs_ = remainingOutputs
. map ( ( e ) = > MutableSparkRecipient ( e . address , e . value , e . memo ) )
. toList ( ) ;
2023-12-23 00:15:44 +00:00
break ; // Done, enough fee included.
}
// Include more fee and try again.
nFeeRet = nFeeNeeded ;
continue ;
}
if ( skipCoin ) {
continue ;
}
// sign
final txb = btc . TransactionBuilder ( network: _bitcoinDartNetwork ) ;
txb . setVersion ( txVersion ) ;
txb . setLockTime ( lockTime ) ;
for ( final input in vin ) {
txb . addInput (
input . utxo . txid ,
input . utxo . vout ,
0xffffffff -
1 , // minus 1 is important. 0xffffffff on its own will burn funds
input . output ,
) ;
}
for ( final output in vout ) {
txb . addOutput ( output . $1 , output . $2 ) ;
}
try {
for ( var i = 0 ; i < vin . length ; i + + ) {
txb . sign (
vin: i ,
keyPair: vin [ i ] . keyPair ! ,
witnessValue: vin [ i ] . utxo . value ,
redeemScript: vin [ i ] . redeemScript ,
) ;
}
} catch ( e , s ) {
Logging . instance . log (
" Caught exception while signing spark mint transaction: $ e \n $ s " ,
level: LogLevel . Error ,
) ;
rethrow ;
}
final builtTx = txb . build ( ) ;
2023-12-27 16:01:13 +00:00
// TODO: see todo at top of this function
assert ( outputs . length = = 1 ) ;
2023-12-23 00:15:44 +00:00
final data = TxData (
sparkRecipients: vout
2023-12-27 16:01:13 +00:00
. where ( ( e ) = > e . $1 is Uint8List ) // ignore change
2023-12-23 00:15:44 +00:00
. map (
( e ) = > (
2023-12-27 16:01:13 +00:00
address: outputs . first
. address , // for display purposes on confirm tx screen. See todos above
2023-12-23 00:15:44 +00:00
memo: " " ,
amount: Amount (
rawValue: BigInt . from ( e . $2 ) ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
) ,
)
. toList ( ) ,
vSize: builtTx . virtualSize ( ) ,
txid: builtTx . getId ( ) ,
raw: builtTx . toHex ( ) ,
fee: Amount (
rawValue: nFeeRet ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
2023-12-27 16:01:13 +00:00
usedUTXOs: vin . map ( ( e ) = > e . utxo ) . toList ( ) ,
2023-12-23 00:15:44 +00:00
) ;
2023-12-24 16:51:08 +00:00
if ( nFeeRet . toInt ( ) < data . vSize ! ) {
throw Exception ( " fee is less than vSize " ) ;
}
2023-12-23 00:15:44 +00:00
results . add ( data ) ;
if ( nChangePosInOut > = 0 ) {
final vOut = vout [ nChangePosInOut ] ;
assert ( vOut . $1 is String ) ; // check to make sure is change address
final out = UTXO (
walletId: walletId ,
txid: data . txid ! ,
vout: nChangePosInOut ,
value: vOut . $2 ,
address: vOut . $1 as String ,
name: " Spark mint change " ,
isBlocked: false ,
blockedReason: null ,
isCoinbase: false ,
blockHash: null ,
blockHeight: null ,
blockTime: null ,
) ;
bool added = false ;
for ( final utxos in valueAndUTXOs ) {
if ( utxos . first . address = = out . address ) {
utxos . add ( out ) ;
added = true ;
}
}
if ( ! added ) {
valueAndUTXOs . add ( [ out ] ) ;
}
}
if ( ! autoMintAll ) {
valueToMint - = mintedValue ;
if ( valueToMint = = BigInt . zero ) {
break ;
}
}
}
if ( ! autoMintAll & & valueToMint > BigInt . zero ) {
// TODO: Is this a valid error message?
throw Exception ( " Failed to mint expected amounts " ) ;
}
return results ;
}
Future < void > anonymizeAllSpark ( ) async {
2023-12-24 16:51:08 +00:00
try {
const subtractFeeFromAmount = true ; // must be true for mint all
final currentHeight = await chainHeight ;
2023-12-23 00:15:44 +00:00
2023-12-24 16:51:08 +00:00
final spendableUtxos = await mainDB . isar . utxos
. where ( )
. walletIdEqualTo ( walletId )
. filter ( )
. isBlockedEqualTo ( false )
. and ( )
. group ( ( q ) = > q . usedEqualTo ( false ) . or ( ) . usedIsNull ( ) )
. and ( )
. valueGreaterThan ( 0 )
. findAll ( ) ;
2023-12-23 00:15:44 +00:00
2023-12-24 16:51:08 +00:00
spendableUtxos . removeWhere (
( e ) = > ! e . isConfirmed (
currentHeight ,
cryptoCurrency . minConfirms ,
2023-12-23 00:15:44 +00:00
) ,
2023-12-24 16:51:08 +00:00
) ;
if ( spendableUtxos . isEmpty ) {
throw Exception ( " No available UTXOs found to anonymize " ) ;
}
2023-12-23 00:15:44 +00:00
2023-12-27 16:01:13 +00:00
final mints = await _createSparkMintTransactions (
2023-12-24 16:51:08 +00:00
subtractFeeFromAmount: subtractFeeFromAmount ,
autoMintAll: true ,
availableUtxos: spendableUtxos ,
outputs: [
MutableSparkRecipient (
( await getCurrentReceivingSparkAddress ( ) ) ! . value ,
spendableUtxos
. map ( ( e ) = > BigInt . from ( e . value ) )
. fold ( BigInt . zero , ( p , e ) = > p + e ) ,
" " ,
) ,
] ,
) ;
2023-12-27 16:01:13 +00:00
await confirmSparkMintTransactions ( txData: TxData ( sparkMints: mints ) ) ;
2023-12-24 16:51:08 +00:00
} catch ( e , s ) {
Logging . instance . log (
" Exception caught in anonymizeAllSpark(): $ e \n $ s " ,
level: LogLevel . Warning ,
) ;
rethrow ;
2023-12-23 00:15:44 +00:00
}
}
2023-12-07 21:58:23 +00:00
/// Transparent to Spark (mint) transaction creation.
2023-12-14 02:12:12 +00:00
///
/// See https://docs.google.com/document/d/1RG52GoYTZDvKlZz_3G4sQu-PpT6JWSZGHLNswWcrE3o
2023-11-27 21:18:20 +00:00
Future < TxData > prepareSparkMintTransaction ( { required TxData txData } ) async {
2023-12-27 16:01:13 +00:00
try {
if ( txData . sparkRecipients ? . isNotEmpty ! = true ) {
throw Exception ( " Missing spark recipients. " ) ;
2023-12-14 02:12:12 +00:00
}
2023-12-27 16:01:13 +00:00
final recipients = txData . sparkRecipients !
. map (
( e ) = > MutableSparkRecipient (
e . address ,
e . amount . raw ,
e . memo ,
) ,
)
. toList ( ) ;
2023-12-14 02:12:12 +00:00
2023-12-27 16:01:13 +00:00
final total = recipients
. map ( ( e ) = > e . value )
. reduce ( ( value , element ) = > value + = element ) ;
2023-12-07 21:58:23 +00:00
2023-12-27 16:01:13 +00:00
if ( total < BigInt . zero ) {
throw Exception ( " Attempted send of negative amount " ) ;
} else if ( total = = BigInt . zero ) {
throw Exception ( " Attempted send of zero amount " ) ;
}
2023-12-07 21:58:23 +00:00
2023-12-27 16:01:13 +00:00
final currentHeight = await chainHeight ;
2023-12-07 21:58:23 +00:00
2023-12-27 16:01:13 +00:00
// coin control not enabled for firo currently so we can ignore this
// final utxosToUse = txData.utxos?.toList() ?? await mainDB.isar.utxos
// .where()
// .walletIdEqualTo(walletId)
// .filter()
// .isBlockedEqualTo(false)
// .and()
// .group((q) => q.usedEqualTo(false).or().usedIsNull())
// .and()
// .valueGreaterThan(0)
// .findAll();
final spendableUtxos = await mainDB . isar . utxos
. where ( )
. walletIdEqualTo ( walletId )
. filter ( )
. isBlockedEqualTo ( false )
. and ( )
. group ( ( q ) = > q . usedEqualTo ( false ) . or ( ) . usedIsNull ( ) )
. and ( )
. valueGreaterThan ( 0 )
. findAll ( ) ;
2023-12-14 02:12:12 +00:00
2023-12-27 16:01:13 +00:00
spendableUtxos . removeWhere (
( e ) = > ! e . isConfirmed (
currentHeight ,
cryptoCurrency . minConfirms ,
) ,
) ;
2023-12-14 02:12:12 +00:00
2023-12-27 16:01:13 +00:00
if ( spendableUtxos . isEmpty ) {
throw Exception ( " No available UTXOs found to anonymize " ) ;
2023-12-14 02:12:12 +00:00
}
2023-12-27 16:01:13 +00:00
final available = spendableUtxos
. map ( ( e ) = > BigInt . from ( e . value ) )
. reduce ( ( value , element ) = > value + = element ) ;
final bool subtractFeeFromAmount ;
if ( available < total ) {
throw Exception ( " Insufficient balance " ) ;
} else if ( available = = total ) {
subtractFeeFromAmount = true ;
} else {
subtractFeeFromAmount = false ;
2023-12-14 02:12:12 +00:00
}
2023-12-07 21:58:23 +00:00
2023-12-27 16:01:13 +00:00
final mints = await _createSparkMintTransactions (
subtractFeeFromAmount: subtractFeeFromAmount ,
autoMintAll: false ,
availableUtxos: spendableUtxos ,
outputs: recipients ,
2023-12-14 02:25:13 +00:00
) ;
2023-12-27 16:01:13 +00:00
return txData . copyWith ( sparkMints: mints ) ;
2023-12-16 20:28:04 +00:00
} catch ( e , s ) {
Logging . instance . log (
2023-12-27 16:01:13 +00:00
" Exception caught in prepareSparkMintTransaction(): $ e \n $ s " ,
level: LogLevel . Warning ,
2023-12-16 20:28:04 +00:00
) ;
rethrow ;
}
2023-11-27 21:18:20 +00:00
}
2023-12-27 16:01:13 +00:00
Future < TxData > confirmSparkMintTransactions ( { required TxData txData } ) async {
final futures = txData . sparkMints ! . map ( ( e ) = > confirmSend ( txData: e ) ) ;
return txData . copyWith ( sparkMints: await Future . wait ( futures ) ) ;
2023-12-07 21:05:27 +00:00
}
2023-11-27 20:57:33 +00:00
@ override
Future < void > updateBalance ( ) async {
// call to super to update transparent balance (and lelantus balance if
// what ever class this mixin is used on uses LelantusInterface as well)
final normalBalanceFuture = super . updateBalance ( ) ;
2023-12-27 16:01:13 +00:00
// todo: spark balance aka update info.tertiaryBalance here?
// currently happens on spark coins update/refresh
2023-11-27 20:57:33 +00:00
// wait for normalBalanceFuture to complete before returning
await normalBalanceFuture ;
}
2023-12-18 20:05:22 +00:00
// ====================== Private ============================================
final _kSparkAnonSetCachedBlockHashKey = " SparkAnonSetCachedBlockHashKey " ;
Future < String ? > _getCachedSparkBlockHash ( ) async {
return info . otherData [ _kSparkAnonSetCachedBlockHashKey ] as String ? ;
}
Future < void > _setCachedSparkBlockHash ( String blockHash ) async {
await info . updateOtherData (
newEntries: { _kSparkAnonSetCachedBlockHashKey: blockHash } ,
isar: mainDB . isar ,
) ;
}
Future < void > _addOrUpdateSparkCoins ( List < SparkCoin > coins ) async {
if ( coins . isNotEmpty ) {
await mainDB . isar . writeTxn ( ( ) async {
await mainDB . isar . sparkCoins . putAll ( coins ) ;
} ) ;
}
// update wallet spark coin height
final coinsToCheck = await mainDB . isar . sparkCoins
. where ( )
. walletIdEqualToAnyLTagHash ( walletId )
. filter ( )
. heightIsNull ( )
. findAll ( ) ;
final List < SparkCoin > updatedCoins = [ ] ;
for ( final coin in coinsToCheck ) {
final tx = await electrumXCachedClient . getTransaction (
txHash: coin . txHash ,
coin: info . coin ,
) ;
if ( tx [ " height " ] is int ) {
updatedCoins . add ( coin . copyWith ( height: tx [ " height " ] as int ) ) ;
}
}
if ( updatedCoins . isNotEmpty ) {
await mainDB . isar . writeTxn ( ( ) async {
await mainDB . isar . sparkCoins . putAll ( updatedCoins ) ;
} ) ;
}
}
2023-12-23 00:15:44 +00:00
btc . NetworkType get _bitcoinDartNetwork = > btc . NetworkType (
messagePrefix: cryptoCurrency . networkParams . messagePrefix ,
bech32: cryptoCurrency . networkParams . bech32Hrp ,
bip32: btc . Bip32Type (
public: cryptoCurrency . networkParams . pubHDPrefix ,
private: cryptoCurrency . networkParams . privHDPrefix ,
) ,
pubKeyHash: cryptoCurrency . networkParams . p2pkhPrefix ,
scriptHash: cryptoCurrency . networkParams . p2shPrefix ,
wif: cryptoCurrency . networkParams . wifPrefix ,
) ;
2023-11-16 21:30:01 +00:00
}
2023-12-05 20:44:50 +00:00
String base64ToReverseHex ( String source ) = >
base64Decode ( LineSplitter . split ( source ) . join ( ) )
. reversed
. map ( ( e ) = > e . toRadixString ( 16 ) . padLeft ( 2 , ' 0 ' ) )
. join ( ) ;
2023-12-19 18:06:05 +00:00
2023-12-21 22:18:12 +00:00
/// Top level function which should be called wrapped in [compute]
Future <
( {
Uint8List serializedSpendPayload ,
List < Uint8List > outputScripts ,
int fee ,
} ) > _createSparkSend (
( {
String privateKeyHex ,
int index ,
List <
( {
String address ,
int amount ,
bool subtractFeeFromAmount
} ) > recipients ,
List <
( {
String sparkAddress ,
int amount ,
bool subtractFeeFromAmount ,
String memo
} ) > privateRecipients ,
List <
( {
String serializedCoin ,
String serializedCoinContext ,
int groupId ,
int height ,
} ) > serializedCoins ,
List <
( {
int setId ,
String setHash ,
List < ( { String serializedCoin , String txHash } ) > set
} ) > allAnonymitySets ,
List <
( {
int setId ,
Uint8List blockHash ,
} ) > idAndBlockHashes ,
Uint8List txHash ,
} ) args ) async {
final spend = LibSpark . createSparkSendTransaction (
privateKeyHex: args . privateKeyHex ,
index: args . index ,
recipients: args . recipients ,
privateRecipients: args . privateRecipients ,
serializedCoins: args . serializedCoins ,
allAnonymitySets: args . allAnonymitySets ,
idAndBlockHashes: args . idAndBlockHashes ,
txHash: args . txHash ,
) ;
return spend ;
}
2023-12-19 18:06:05 +00:00
/// Top level function which should be called wrapped in [compute]
Future < List < SparkCoin > > _identifyCoins (
( {
List < dynamic > anonymitySetCoins ,
2023-12-20 19:45:46 +00:00
int groupId ,
2023-12-19 18:06:05 +00:00
Set < String > spentCoinTags ,
Set < String > privateKeyHexSet ,
String walletId ,
bool isTestNet ,
} ) args ) async {
final List < SparkCoin > myCoins = [ ] ;
for ( final privateKeyHex in args . privateKeyHexSet ) {
for ( final dynData in args . anonymitySetCoins ) {
final data = List < String > . from ( dynData as List ) ;
if ( data . length ! = 3 ) {
throw Exception ( " Unexpected serialized coin info found " ) ;
}
final serializedCoinB64 = data [ 0 ] ;
final txHash = base64ToReverseHex ( data [ 1 ] ) ;
final contextB64 = data [ 2 ] ;
final coin = LibSpark . identifyAndRecoverCoin (
serializedCoinB64 ,
privateKeyHex: privateKeyHex ,
index: kDefaultSparkIndex ,
context: base64Decode ( contextB64 ) ,
isTestNet: args . isTestNet ,
) ;
// its ours
if ( coin ! = null ) {
final SparkCoinType coinType ;
switch ( coin . type . value ) {
case 0 :
coinType = SparkCoinType . mint ;
case 1 :
coinType = SparkCoinType . spend ;
default :
throw Exception ( " Unknown spark coin type detected " ) ;
}
myCoins . add (
SparkCoin (
walletId: args . walletId ,
type: coinType ,
isUsed: args . spentCoinTags . contains ( coin . lTagHash ! ) ,
2023-12-20 19:45:46 +00:00
groupId: args . groupId ,
2023-12-19 18:06:05 +00:00
nonce: coin . nonceHex ? . toUint8ListFromHex ,
address: coin . address ! ,
txHash: txHash ,
valueIntString: coin . value ! . toString ( ) ,
memo: coin . memo ,
serialContext: coin . serialContext ,
diversifierIntString: coin . diversifier ! . toString ( ) ,
encryptedDiversifier: coin . encryptedDiversifier ,
serial: coin . serial ,
tag: coin . tag ,
lTagHash: coin . lTagHash ! ,
height: coin . height ,
serializedCoinB64: serializedCoinB64 ,
contextB64: contextB64 ,
) ,
) ;
}
}
}
return myCoins ;
}
2023-12-23 00:15:44 +00:00
BigInt _min ( BigInt a , BigInt b ) {
if ( a < = b ) {
return a ;
} else {
return b ;
}
}
BigInt _sum ( List < UTXO > utxos ) = > utxos
. map ( ( e ) = > BigInt . from ( e . value ) )
. fold ( BigInt . zero , ( previousValue , element ) = > previousValue + element ) ;
class MutableSparkRecipient {
String address ;
BigInt value ;
String memo ;
MutableSparkRecipient ( this . address , this . value , this . memo ) ;
2023-12-27 16:01:13 +00:00
@ override
String toString ( ) {
return ' MutableSparkRecipient{ address: $ address , value: $ value , memo: $ memo } ' ;
}
2023-12-23 00:15:44 +00:00
}