2023-12-05 20:44:50 +00:00
import ' dart:convert ' ;
2023-12-23 00:15:44 +00:00
import ' dart:math ' ;
2023-11-27 20:57:33 +00:00
2023-12-13 17:26:30 +00:00
import ' package:bitcoindart/bitcoindart.dart ' as btc ;
2023-12-27 16:01:13 +00:00
import ' package:decimal/decimal.dart ' ;
2023-12-19 18:06:05 +00:00
import ' package:flutter/foundation.dart ' ;
2023-11-28 16:13:10 +00:00
import ' package:flutter_libsparkmobile/flutter_libsparkmobile.dart ' ;
2023-11-27 20:57:33 +00:00
import ' package:isar/isar.dart ' ;
2024-05-27 23:56:22 +00:00
2024-05-29 19:29:45 +00:00
import ' ../../../db/sqlite/firo_cache.dart ' ;
2024-05-23 00:37:06 +00:00
import ' ../../../models/balance.dart ' ;
import ' ../../../models/isar/models/blockchain_data/v2/input_v2.dart ' ;
import ' ../../../models/isar/models/blockchain_data/v2/output_v2.dart ' ;
import ' ../../../models/isar/models/blockchain_data/v2/transaction_v2.dart ' ;
import ' ../../../models/isar/models/isar_models.dart ' ;
import ' ../../../models/signing_data.dart ' ;
import ' ../../../utilities/amount/amount.dart ' ;
import ' ../../../utilities/enums/derive_path_type_enum.dart ' ;
import ' ../../../utilities/extensions/extensions.dart ' ;
import ' ../../../utilities/logger.dart ' ;
import ' ../../crypto_currency/interfaces/electrumx_currency_interface.dart ' ;
import ' ../../isar/models/spark_coin.dart ' ;
2024-05-31 04:06:48 +00:00
import ' ../../isar/models/wallet_info.dart ' ;
2024-05-23 00:37:06 +00:00
import ' ../../models/tx_data.dart ' ;
import ' ../intermediate/bip39_hd_wallet.dart ' ;
import ' electrumx_interface.dart ' ;
2023-11-16 21:30:01 +00:00
2023-12-18 20:05:22 +00:00
const kDefaultSparkIndex = 1 ;
2023-12-27 16:01:13 +00:00
// TODO dart style constants. Maybe move to spark lib?
2023-12-23 00:15:44 +00:00
const MAX_STANDARD_TX_WEIGHT = 400000 ;
2023-12-27 16:01:13 +00:00
//https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/include/spark.h#L16
const SPARK_OUT_LIMIT_PER_TX = 16 ;
2023-12-23 00:15:44 +00:00
const OP_SPARKMINT = 0xd1 ;
const OP_SPARKSMINT = 0xd2 ;
const OP_SPARKSPEND = 0xd3 ;
2024-06-05 22:23:58 +00:00
/// top level function for use with [compute]
String _hashTag ( String tag ) {
final components = tag . split ( " , " ) ;
final x = components [ 0 ] . substring ( 1 ) ;
final y = components [ 1 ] . substring ( 0 , components [ 1 ] . length - 1 ) ;
final hash = LibSpark . hashTag ( x , y ) ;
return hash ;
}
2024-05-15 21:20:45 +00:00
mixin SparkInterface < T extends ElectrumXCurrencyInterface >
on Bip39HDWallet < T > , ElectrumXInterface < T > {
2023-12-29 15:26:32 +00:00
String ? _sparkChangeAddressCached ;
/// Spark change address. Should generally not be exposed to end users.
String get sparkChangeAddress {
if ( _sparkChangeAddressCached = = null ) {
throw Exception ( " _sparkChangeAddressCached was not initialized " ) ;
}
return _sparkChangeAddressCached ! ;
}
2023-12-19 15:20:50 +00:00
static bool validateSparkAddress ( {
required String address ,
required bool isTestNet ,
} ) = >
LibSpark . validateAddress ( address: address , isTestNet: isTestNet ) ;
2024-06-05 22:23:58 +00:00
Future < String > hashTag ( String tag ) async {
try {
return await compute ( _hashTag , tag ) ;
} catch ( _ ) {
throw ArgumentError ( " Invalid tag string format " , " tag " ) ;
}
}
2023-11-29 15:53:30 +00:00
@ override
Future < void > init ( ) async {
2024-01-12 19:56:09 +00:00
try {
Address ? address = await getCurrentReceivingSparkAddress ( ) ;
if ( address = = null ) {
address = await generateNextSparkAddress ( ) ;
await mainDB . putAddress ( address ) ;
} // TODO add other address types to wallet info?
2023-11-29 15:53:30 +00:00
2024-01-12 19:56:09 +00:00
if ( _sparkChangeAddressCached = = null ) {
final root = await getRootHDNode ( ) ;
final String derivationPath ;
2024-06-19 15:15:49 +00:00
if ( cryptoCurrency . network . isTestNet ) {
2024-01-12 19:56:09 +00:00
derivationPath =
" $ kSparkBaseDerivationPathTestnet $ kDefaultSparkIndex " ;
} else {
derivationPath = " $ kSparkBaseDerivationPath $ kDefaultSparkIndex " ;
}
final keys = root . derivePath ( derivationPath ) ;
2023-12-29 15:26:32 +00:00
2024-01-12 19:56:09 +00:00
_sparkChangeAddressCached = await LibSpark . getAddress (
privateKey: keys . privateKey . data ,
index: kDefaultSparkIndex ,
diversifier: kSparkChange ,
2024-06-19 15:15:49 +00:00
isTestNet: cryptoCurrency . network . isTestNet ,
2024-01-12 19:56:09 +00:00
) ;
}
} catch ( e , s ) {
// do nothing, still allow user into wallet
Logging . instance . log (
" $ runtimeType init() failed: $ e \n $ s " ,
level: LogLevel . Error ,
2023-12-29 15:26:32 +00:00
) ;
}
2023-11-29 15:53:30 +00:00
// await info.updateReceivingAddress(
// newAddress: address.value,
// isar: mainDB.isar,
// );
await super . init ( ) ;
}
@ override
Future < List < Address > > fetchAddressesForElectrumXScan ( ) async {
final allAddresses = await mainDB
. getAddresses ( walletId )
. filter ( )
. not ( )
. group (
( q ) = > q
. typeEqualTo ( AddressType . spark )
. or ( )
. typeEqualTo ( AddressType . nonWallet )
. or ( )
. subTypeEqualTo ( AddressSubType . nonWallet ) ,
)
. findAll ( ) ;
return allAddresses ;
}
2023-11-27 20:57:33 +00:00
Future < Address ? > getCurrentReceivingSparkAddress ( ) async {
return await mainDB . isar . addresses
. where ( )
. walletIdEqualTo ( walletId )
. filter ( )
. typeEqualTo ( AddressType . spark )
. sortByDerivationIndexDesc ( )
. findFirst ( ) ;
}
2023-11-29 15:53:30 +00:00
Future < Address > generateNextSparkAddress ( ) async {
2023-11-27 20:57:33 +00:00
final highestStoredDiversifier =
( await getCurrentReceivingSparkAddress ( ) ) ? . derivationIndex ;
// default to starting at 1 if none found
2024-01-03 17:01:04 +00:00
int diversifier = ( highestStoredDiversifier ? ? 0 ) + 1 ;
// change address check
if ( diversifier = = kSparkChange ) {
diversifier + + ;
}
2023-11-27 20:57:33 +00:00
2023-11-28 16:13:10 +00:00
final root = await getRootHDNode ( ) ;
2023-12-05 22:55:38 +00:00
final String derivationPath ;
2024-06-19 15:15:49 +00:00
if ( cryptoCurrency . network . isTestNet ) {
2023-12-18 20:05:22 +00:00
derivationPath = " $ kSparkBaseDerivationPathTestnet $ kDefaultSparkIndex " ;
2023-12-05 22:55:38 +00:00
} else {
2023-12-18 20:05:22 +00:00
derivationPath = " $ kSparkBaseDerivationPath $ kDefaultSparkIndex " ;
2023-12-05 22:55:38 +00:00
}
2023-11-28 16:13:10 +00:00
final keys = root . derivePath ( derivationPath ) ;
final String addressString = await LibSpark . getAddress (
privateKey: keys . privateKey . data ,
2023-12-18 20:05:22 +00:00
index: kDefaultSparkIndex ,
2023-11-28 16:13:10 +00:00
diversifier: diversifier ,
2024-06-19 15:15:49 +00:00
isTestNet: cryptoCurrency . network . isTestNet ,
2023-11-28 16:13:10 +00:00
) ;
2023-11-27 20:57:33 +00:00
return Address (
walletId: walletId ,
value: addressString ,
2023-11-28 16:13:10 +00:00
publicKey: keys . publicKey . data ,
2023-11-27 20:57:33 +00:00
derivationIndex: diversifier ,
derivationPath: DerivationPath ( ) . . value = derivationPath ,
type: AddressType . spark ,
subType: AddressSubType . receiving ,
) ;
}
Future < Amount > estimateFeeForSpark ( Amount amount ) async {
2023-12-20 00:34:20 +00:00
// int spendAmount = amount.raw.toInt();
// if (spendAmount == 0) {
return Amount (
rawValue: BigInt . from ( 0 ) ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ;
// }
// TODO actual fee estimation
2023-11-27 20:57:33 +00:00
}
2023-11-27 21:18:20 +00:00
/// Spark to Spark/Transparent (spend) creation
2023-11-16 21:30:01 +00:00
Future < TxData > prepareSendSpark ( {
required TxData txData ,
} ) async {
2023-12-27 16:01:13 +00:00
// There should be at least one output.
if ( ! ( txData . recipients ? . isNotEmpty = = true | |
txData . sparkRecipients ? . isNotEmpty = = true ) ) {
throw Exception ( " No recipients provided. " ) ;
}
if ( txData . sparkRecipients ? . isNotEmpty = = true & &
txData . sparkRecipients ! . length > = SPARK_OUT_LIMIT_PER_TX - 1 ) {
throw Exception ( " Spark shielded output limit exceeded. " ) ;
}
final transparentSumOut =
( txData . recipients ? ? [ ] ) . map ( ( e ) = > e . amount ) . fold (
2024-05-09 17:56:42 +00:00
Amount (
rawValue: BigInt . zero ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
( p , e ) = > p + e ,
) ;
2023-12-27 16:01:13 +00:00
// See SPARK_VALUE_SPEND_LIMIT_PER_TRANSACTION at https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/include/spark.h#L17
// and COIN https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/bitcoin/amount.h#L17
// Note that as MAX_MONEY is greater than this limit, we can ignore it. See https://github.com/firoorg/sparkmobile/blob/ef2e39aae18ecc49e0ddc63a3183e9764b96012e/bitcoin/amount.h#L31
if ( transparentSumOut >
Amount . fromDecimal (
Decimal . parse ( " 10000 " ) ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ) {
throw Exception (
2024-05-09 17:56:42 +00:00
" Spend to transparent address limit exceeded (10,000 Firo per transaction). " ,
) ;
2023-12-27 16:01:13 +00:00
}
final sparkSumOut =
( txData . sparkRecipients ? ? [ ] ) . map ( ( e ) = > e . amount ) . fold (
2024-05-09 17:56:42 +00:00
Amount (
rawValue: BigInt . zero ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
( p , e ) = > p + e ,
) ;
2023-12-27 16:01:13 +00:00
final txAmount = transparentSumOut + sparkSumOut ;
2023-12-21 00:00:02 +00:00
// fetch spendable spark coins
2023-12-18 21:12:16 +00:00
final coins = await mainDB . isar . sparkCoins
. where ( )
. walletIdEqualToAnyLTagHash ( walletId )
. filter ( )
. isUsedEqualTo ( false )
2023-12-20 19:45:46 +00:00
. and ( )
. heightIsNotNull ( )
2023-12-21 20:41:29 +00:00
. and ( )
. not ( )
. valueIntStringEqualTo ( " 0 " )
2023-12-18 21:12:16 +00:00
. findAll ( ) ;
2023-12-21 20:41:29 +00:00
final available = info . cachedBalanceTertiary . spendable ;
if ( txAmount > available ) {
throw Exception ( " Insufficient Spark balance " ) ;
}
final bool isSendAll = available = = txAmount ;
2023-12-21 00:00:02 +00:00
// prepare coin data for ffi
2023-12-20 19:45:46 +00:00
final serializedCoins = coins
2024-05-09 17:56:42 +00:00
. map (
( e ) = > (
serializedCoin: e . serializedCoinB64 ! ,
serializedCoinContext: e . contextB64 ! ,
groupId: e . groupId ,
height: e . height ! ,
) ,
)
2023-12-20 19:45:46 +00:00
. toList ( ) ;
2023-12-13 17:26:30 +00:00
final currentId = await electrumXClient . getSparkLatestCoinId ( ) ;
final List < Map < String , dynamic > > setMaps = [ ] ;
2023-12-20 19:45:46 +00:00
final List < ( { int groupId , String blockHash } ) > idAndBlockHashes = [ ] ;
for ( int i = 1 ; i < = currentId ; i + + ) {
2024-06-26 18:10:32 +00:00
final resultSet = await FiroCacheCoordinator . getSetCoinsForGroupId (
i ,
network: cryptoCurrency . network ,
) ;
2024-05-29 19:29:45 +00:00
if ( resultSet . isEmpty ) {
continue ;
}
final info = await FiroCacheCoordinator . getLatestSetInfoForGroupId (
i ,
2024-06-26 18:10:32 +00:00
cryptoCurrency . network ,
2023-12-13 17:26:30 +00:00
) ;
2024-05-29 19:29:45 +00:00
if ( info = = null ) {
throw Exception ( " The `info` should never be null here " ) ;
}
final Map < String , dynamic > setData = {
" blockHash " : info . blockHash ,
" setHash " : info . setHash ,
" coinGroupID " : i ,
" coins " : resultSet
. map (
2024-06-06 17:21:50 +00:00
( e ) = > [
e . serialized ,
e . txHash ,
e . context ,
2024-05-29 19:29:45 +00:00
] ,
)
. toList ( ) ,
} ;
setData [ " coinGroupID " ] = i ;
setMaps . add ( setData ) ;
2023-12-20 19:45:46 +00:00
idAndBlockHashes . add (
(
groupId: i ,
2024-05-29 19:29:45 +00:00
blockHash: setData [ " blockHash " ] as String ,
2023-12-20 19:45:46 +00:00
) ,
) ;
2023-12-13 17:26:30 +00:00
}
final allAnonymitySets = setMaps
2024-05-09 17:56:42 +00:00
. map (
( e ) = > (
setId: e [ " coinGroupID " ] as int ,
setHash: e [ " setHash " ] as String ,
set : ( e [ " coins " ] as List )
. map (
( e ) = > (
serializedCoin: e [ 0 ] as String ,
txHash: e [ 1 ] as String ,
) ,
)
. toList ( ) ,
) ,
)
2023-12-13 17:26:30 +00:00
. toList ( ) ;
final root = await getRootHDNode ( ) ;
final String derivationPath ;
2024-06-19 15:15:49 +00:00
if ( cryptoCurrency . network . isTestNet ) {
2023-12-18 20:05:22 +00:00
derivationPath = " $ kSparkBaseDerivationPathTestnet $ kDefaultSparkIndex " ;
2023-12-13 17:26:30 +00:00
} else {
2023-12-18 20:05:22 +00:00
derivationPath = " $ kSparkBaseDerivationPath $ kDefaultSparkIndex " ;
2023-12-13 17:26:30 +00:00
}
final privateKey = root . derivePath ( derivationPath ) . privateKey . data ;
2023-12-21 20:41:29 +00:00
final txb = btc . TransactionBuilder (
2023-12-23 00:15:44 +00:00
network: _bitcoinDartNetwork ,
2023-12-13 17:26:30 +00:00
) ;
txb . setLockTime ( await chainHeight ) ;
txb . setVersion ( 3 | ( 9 < < 16 ) ) ;
2024-01-05 19:45:42 +00:00
List <
( {
String address ,
Amount amount ,
bool isChange ,
} ) > ? recipientsWithFeeSubtracted ;
2023-12-21 22:04:49 +00:00
List <
2023-12-21 20:41:29 +00:00
( {
String address ,
Amount amount ,
String memo ,
2024-01-05 19:45:42 +00:00
bool isChange ,
2023-12-21 22:04:49 +00:00
} ) > ? sparkRecipientsWithFeeSubtracted ;
final recipientCount = ( txData . recipients
? . where (
( e ) = > e . amount . raw > BigInt . zero ,
)
. length ? ?
0 ) ;
final totalRecipientCount =
recipientCount + ( txData . sparkRecipients ? . length ? ? 0 ) ;
2023-12-21 20:41:29 +00:00
final BigInt estimatedFee ;
if ( isSendAll ) {
final estFee = LibSpark . estimateSparkFee (
privateKeyHex: privateKey . toHex ,
index: kDefaultSparkIndex ,
sendAmount: txAmount . raw . toInt ( ) ,
subtractFeeFromAmount: true ,
serializedCoins: serializedCoins ,
privateRecipientsCount: ( txData . sparkRecipients ? . length ? ? 0 ) ,
) ;
estimatedFee = BigInt . from ( estFee ) ;
} else {
estimatedFee = BigInt . zero ;
}
2023-12-21 22:04:49 +00:00
if ( ( txData . sparkRecipients ? . length ? ? 0 ) > 0 ) {
sparkRecipientsWithFeeSubtracted = [ ] ;
}
if ( recipientCount > 0 ) {
recipientsWithFeeSubtracted = [ ] ;
}
2023-12-21 20:41:29 +00:00
for ( int i = 0 ; i < ( txData . sparkRecipients ? . length ? ? 0 ) ; i + + ) {
2023-12-21 22:04:49 +00:00
sparkRecipientsWithFeeSubtracted ! . add (
2023-12-21 20:41:29 +00:00
(
address: txData . sparkRecipients ! [ i ] . address ,
amount: Amount (
rawValue: txData . sparkRecipients ! [ i ] . amount . raw -
2023-12-21 22:04:49 +00:00
( estimatedFee ~ / BigInt . from ( totalRecipientCount ) ) ,
2023-12-21 20:41:29 +00:00
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
memo: txData . sparkRecipients ! [ i ] . memo ,
2024-01-05 19:45:42 +00:00
isChange: sparkChangeAddress = = txData . sparkRecipients ! [ i ] . address ,
2023-12-21 20:41:29 +00:00
) ,
) ;
}
2024-01-05 18:59:01 +00:00
// temp tx data to show in gui while waiting for real data from server
final List < InputV2 > tempInputs = [ ] ;
final List < OutputV2 > tempOutputs = [ ] ;
2023-12-13 17:26:30 +00:00
for ( int i = 0 ; i < ( txData . recipients ? . length ? ? 0 ) ; i + + ) {
if ( txData . recipients ! [ i ] . amount . raw = = BigInt . zero ) {
continue ;
}
2023-12-21 22:04:49 +00:00
recipientsWithFeeSubtracted ! . add (
2023-12-21 20:41:29 +00:00
(
address: txData . recipients ! [ i ] . address ,
amount: Amount (
rawValue: txData . recipients ! [ i ] . amount . raw -
2023-12-21 22:04:49 +00:00
( estimatedFee ~ / BigInt . from ( totalRecipientCount ) ) ,
2023-12-21 20:41:29 +00:00
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
2024-01-05 22:39:05 +00:00
isChange: txData . recipients ! [ i ] . isChange ,
2023-12-21 20:41:29 +00:00
) ,
) ;
final scriptPubKey = btc . Address . addressToOutputScript (
2023-12-13 17:26:30 +00:00
txData . recipients ! [ i ] . address ,
2023-12-23 00:15:44 +00:00
_bitcoinDartNetwork ,
2023-12-21 20:41:29 +00:00
) ;
txb . addOutput (
scriptPubKey ,
recipientsWithFeeSubtracted [ i ] . amount . raw . toInt ( ) ,
2023-12-13 17:26:30 +00:00
) ;
2024-01-05 18:59:01 +00:00
tempOutputs . add (
OutputV2 . isarCantDoRequiredInDefaultConstructor (
scriptPubKeyHex: scriptPubKey . toHex ,
valueStringSats: recipientsWithFeeSubtracted [ i ] . amount . raw . toString ( ) ,
addresses: [
recipientsWithFeeSubtracted [ i ] . address . toString ( ) ,
] ,
walletOwns: ( await mainDB . isar . addresses
. where ( )
. walletIdEqualTo ( walletId )
. filter ( )
. valueEqualTo ( recipientsWithFeeSubtracted [ i ] . address )
. valueProperty ( )
. findFirst ( ) ) ! =
null ,
) ,
) ;
2023-12-13 17:26:30 +00:00
}
2024-01-05 18:59:01 +00:00
if ( sparkRecipientsWithFeeSubtracted ! = null ) {
for ( final recip in sparkRecipientsWithFeeSubtracted ) {
tempOutputs . add (
OutputV2 . isarCantDoRequiredInDefaultConstructor (
scriptPubKeyHex: Uint8List . fromList ( [ OP_SPARKSMINT ] ) . toHex ,
valueStringSats: recip . amount . raw . toString ( ) ,
addresses: [
recip . address . toString ( ) ,
] ,
walletOwns: ( await mainDB . isar . addresses
. where ( )
. walletIdEqualTo ( walletId )
. filter ( )
. valueEqualTo ( recip . address )
. valueProperty ( )
. findFirst ( ) ) ! =
null ,
) ,
) ;
}
}
2023-12-21 16:23:55 +00:00
final extractedTx = txb . buildIncomplete ( ) ;
extractedTx . addInput (
' 0000000000000000000000000000000000000000000000000000000000000000 '
. toUint8ListFromHex ,
0xffffffff ,
0xffffffff ,
" d3 " . toUint8ListFromHex , // OP_SPARKSPEND
) ;
extractedTx . setPayload ( Uint8List ( 0 ) ) ;
2023-12-13 17:26:30 +00:00
2023-12-21 22:18:12 +00:00
final spend = await compute (
_createSparkSend ,
(
privateKeyHex: privateKey . toHex ,
index: kDefaultSparkIndex ,
recipients: txData . recipients
2024-05-09 17:56:42 +00:00
? . map (
( e ) = > (
address: e . address ,
amount: e . amount . raw . toInt ( ) ,
subtractFeeFromAmount: isSendAll ,
) ,
)
2023-12-21 22:18:12 +00:00
. toList ( ) ? ?
[ ] ,
privateRecipients: txData . sparkRecipients
2024-05-09 17:56:42 +00:00
? . map (
( e ) = > (
sparkAddress: e . address ,
amount: e . amount . raw . toInt ( ) ,
subtractFeeFromAmount: isSendAll ,
memo: e . memo ,
) ,
)
2023-12-21 22:18:12 +00:00
. toList ( ) ? ?
[ ] ,
serializedCoins: serializedCoins ,
allAnonymitySets: allAnonymitySets ,
idAndBlockHashes: idAndBlockHashes
. map (
2024-05-09 17:56:42 +00:00
( e ) = > ( setId: e . groupId , blockHash: base64Decode ( e . blockHash ) ) ,
)
2023-12-21 22:18:12 +00:00
. toList ( ) ,
txHash: extractedTx . getHash ( ) ,
) ,
2023-12-13 17:26:30 +00:00
) ;
for ( final outputScript in spend . outputScripts ) {
2023-12-21 16:23:55 +00:00
extractedTx . addOutput ( outputScript , 0 ) ;
2023-12-13 17:26:30 +00:00
}
2023-12-20 19:45:46 +00:00
extractedTx . setPayload ( spend . serializedSpendPayload ) ;
2023-12-13 17:26:30 +00:00
final rawTxHex = extractedTx . toHex ( ) ;
2023-12-21 20:41:29 +00:00
if ( isSendAll ) {
txData = txData . copyWith (
recipients: recipientsWithFeeSubtracted ,
sparkRecipients: sparkRecipientsWithFeeSubtracted ,
) ;
}
2024-01-05 18:59:01 +00:00
final fee = Amount (
rawValue: BigInt . from ( spend . fee ) ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ;
2024-01-05 22:39:05 +00:00
tempInputs . add (
InputV2 . isarCantDoRequiredInDefaultConstructor (
scriptSigHex: " d3 " ,
2024-01-14 17:26:04 +00:00
scriptSigAsm: null ,
2024-01-05 22:39:05 +00:00
sequence: 0xffffffff ,
outpoint: null ,
addresses: [ ] ,
valueStringSats: tempOutputs
. map ( ( e ) = > e . value )
. fold ( fee . raw , ( p , e ) = > p + e )
. toString ( ) ,
witness: null ,
innerRedeemScriptAsm: null ,
coinbase: null ,
walletOwns: true ,
) ,
) ;
2024-02-28 07:42:32 +00:00
final List < SparkCoin > usedSparkCoins = [ ] ;
for ( final usedCoin in spend . usedCoins ) {
try {
2024-05-09 17:56:42 +00:00
usedSparkCoins . add (
coins
. firstWhere (
( e ) = >
usedCoin . height = = e . height & &
usedCoin . groupId = = e . groupId & &
base64Decode ( e . serializedCoinB64 ! ) . toHex . startsWith (
base64Decode ( usedCoin . serializedCoin ) . toHex ,
) ,
)
. copyWith (
isUsed: true ,
) ,
) ;
2024-02-28 07:42:32 +00:00
} catch ( _ ) {
throw Exception (
" Unexpectedly did not find used spark coin. This should never happen. " ,
) ;
}
}
2023-12-13 17:26:30 +00:00
return txData . copyWith (
raw: rawTxHex ,
vSize: extractedTx . virtualSize ( ) ,
2024-01-05 18:59:01 +00:00
fee: fee ,
tempTx: TransactionV2 (
walletId: walletId ,
blockHash: null ,
hash: extractedTx . getId ( ) ,
txid: extractedTx . getId ( ) ,
timestamp: DateTime . timestamp ( ) . millisecondsSinceEpoch ~ / 1000 ,
inputs: List . unmodifiable ( tempInputs ) ,
outputs: List . unmodifiable ( tempOutputs ) ,
type: tempOutputs . map ( ( e ) = > e . walletOwns ) . fold ( true , ( p , e ) = > p & = e )
? TransactionType . sentToSelf
: TransactionType . outgoing ,
subType: TransactionSubType . sparkSpend ,
otherData: jsonEncode (
{
2024-01-10 22:28:30 +00:00
" overrideFee " : fee . toJsonString ( ) ,
2024-01-05 18:59:01 +00:00
} ,
) ,
height: null ,
version: 3 ,
2023-12-13 17:26:30 +00:00
) ,
2024-02-28 07:42:32 +00:00
usedSparkCoins: usedSparkCoins ,
2023-12-13 17:26:30 +00:00
) ;
2023-11-16 21:30:01 +00:00
}
2023-11-27 20:57:33 +00:00
2023-11-27 21:18:20 +00:00
// this may not be needed for either mints or spends or both
2023-11-27 20:57:33 +00:00
Future < TxData > confirmSendSpark ( {
required TxData txData ,
} ) async {
2023-12-21 16:23:55 +00:00
try {
Logging . instance . log ( " confirmSend txData: $ txData " , level: LogLevel . Info ) ;
final txHash = await electrumXClient . broadcastTransaction (
rawTx: txData . raw ! ,
) ;
Logging . instance . log ( " Sent txHash: $ txHash " , level: LogLevel . Info ) ;
txData = txData . copyWith (
// TODO revisit setting these both
txHash: txHash ,
txid: txHash ,
) ;
2024-02-28 01:01:53 +00:00
2024-02-28 07:42:32 +00:00
// Update used spark coins as used in database. They should already have
// been marked as isUsed.
// TODO: [prio=med] Could (probably should) throw an exception here if txData.usedSparkCoins is null or empty
if ( txData . usedSparkCoins ! = null & & txData . usedSparkCoins ! . isNotEmpty ) {
2024-06-14 20:52:01 +00:00
await mainDB . isar . writeTxn ( ( ) async {
await mainDB . isar . sparkCoins . putAll ( txData . usedSparkCoins ! ) ;
} ) ;
2024-02-28 01:01:53 +00:00
}
2024-01-05 18:59:01 +00:00
return await updateSentCachedTxData ( txData: txData ) ;
2023-12-21 16:23:55 +00:00
} catch ( e , s ) {
2024-05-09 17:56:42 +00:00
Logging . instance . log (
" Exception rethrown from confirmSend(): $ e \n $ s " ,
level: LogLevel . Error ,
) ;
2023-12-21 16:23:55 +00:00
rethrow ;
}
2023-11-27 20:57:33 +00:00
}
2024-06-14 20:52:01 +00:00
// in mem cache
Set < String > _mempoolTxids = { } ;
Set < String > _mempoolTxidsChecked = { } ;
Future < List < SparkCoin > > _refreshSparkCoinsMempoolCheck ( {
required Set < String > privateKeyHexSet ,
required int groupId ,
} ) async {
final start = DateTime . now ( ) ;
try {
// update cache
_mempoolTxids = await electrumXClient . getMempoolTxids ( ) ;
// remove any checked txids that are not in the mempool anymore
_mempoolTxidsChecked = _mempoolTxidsChecked . intersection ( _mempoolTxids ) ;
// get all unchecked txids currently in mempool
final txidsToCheck = _mempoolTxids . difference ( _mempoolTxidsChecked ) ;
if ( txidsToCheck . isEmpty ) {
return [ ] ;
}
// fetch spark data to scan if we own any unconfirmed spark coins
final sparkDataToCheck = await electrumXClient . getMempoolSparkData (
txids: txidsToCheck . toList ( ) ,
) ;
final Set < String > checkedTxids = { } ;
final List < List < String > > rawCoins = [ ] ;
for ( final data in sparkDataToCheck ) {
for ( int i = 0 ; i < data . coins . length ; i + + ) {
rawCoins . add ( [
data . coins [ i ] ,
data . txid ,
data . serialContext . first ,
] ) ;
}
checkedTxids . add ( data . txid ) ;
}
final result = < SparkCoin > [ ] ;
// if there is new data we try and identify the coins
if ( rawCoins . isNotEmpty ) {
// run identify off main isolate
final myCoins = await compute (
_identifyCoins ,
(
anonymitySetCoins: rawCoins ,
groupId: groupId ,
privateKeyHexSet: privateKeyHexSet ,
walletId: walletId ,
2024-06-19 15:15:49 +00:00
isTestNet: cryptoCurrency . network . isTestNet ,
2024-06-14 20:52:01 +00:00
) ,
) ;
// add checked txids after identification
_mempoolTxidsChecked . addAll ( checkedTxids ) ;
result . addAll ( myCoins ) ;
}
return result ;
} catch ( e ) {
Logging . instance . log (
2024-06-25 20:46:36 +00:00
" _refreshSparkCoinsMempoolCheck() failed: $ e " ,
2024-06-14 20:52:01 +00:00
level: LogLevel . Error ,
) ;
return [ ] ;
} finally {
Logging . instance . log (
2024-06-25 20:46:36 +00:00
" $ walletId ${ info . name } _refreshSparkCoinsMempoolCheck() run "
2024-06-14 20:52:01 +00:00
" duration: ${ DateTime . now ( ) . difference ( start ) } " ,
level: LogLevel . Debug ,
) ;
}
}
2023-11-27 20:57:33 +00:00
Future < void > refreshSparkData ( ) async {
2024-06-14 20:52:01 +00:00
final start = DateTime . now ( ) ;
2023-11-27 20:57:33 +00:00
try {
2024-05-30 21:10:56 +00:00
// start by checking if any previous sets are missing from db and add the
// missing groupIds to the list if sets to check and update
final latestGroupId = await electrumXClient . getSparkLatestCoinId ( ) ;
final List < int > groupIds = [ ] ;
if ( latestGroupId > 1 ) {
for ( int id = 1 ; id < latestGroupId ; id + + ) {
final setExists =
await FiroCacheCoordinator . checkSetInfoForGroupIdExists (
id ,
2024-06-26 18:10:32 +00:00
cryptoCurrency . network ,
2024-05-30 21:10:56 +00:00
) ;
if ( ! setExists ) {
groupIds . add ( id ) ;
}
}
}
groupIds . add ( latestGroupId ) ;
// start fetch and update process for each set groupId as required
final possibleFutures = groupIds . map (
( e ) = >
FiroCacheCoordinator . runFetchAndUpdateSparkAnonSetCacheForGroupId (
e ,
electrumXClient ,
2024-06-26 18:10:32 +00:00
cryptoCurrency . network ,
2024-05-30 21:10:56 +00:00
) ,
2024-05-10 20:32:15 +00:00
) ;
2024-05-09 18:25:32 +00:00
2024-05-30 21:10:56 +00:00
// wait for each fetch and update to complete
await Future . wait ( [
. . . possibleFutures ,
FiroCacheCoordinator . runFetchAndUpdateSparkUsedCoinTags (
electrumXClient ,
2024-06-26 18:10:32 +00:00
cryptoCurrency . network ,
2024-05-30 21:10:56 +00:00
) ,
] ) ;
2023-12-20 23:46:48 +00:00
2024-06-14 20:52:01 +00:00
// Get cached timestamps per groupId. These timestamps are used to check
// and try to id coins that were added to the spark anon set cache
// after that timestamp.
final groupIdTimestampUTCMap =
info . otherData [ WalletInfoKeys . firoSparkCacheSetTimestampCache ]
as Map ? ? ?
{ } ;
// iterate through the cache, fetching spark coin data that hasn't been
// processed by this wallet yet
final Map < int , List < List < String > > > rawCoinsBySetId = { } ;
for ( int i = 1 ; i < = latestGroupId ; i + + ) {
final lastCheckedTimeStampUTC =
groupIdTimestampUTCMap [ i . toString ( ) ] as int ? ? ? 0 ;
final info = await FiroCacheCoordinator . getLatestSetInfoForGroupId (
i ,
2024-06-26 18:10:32 +00:00
cryptoCurrency . network ,
2024-06-14 20:52:01 +00:00
) ;
final anonymitySetResult =
await FiroCacheCoordinator . getSetCoinsForGroupId (
i ,
newerThanTimeStamp: lastCheckedTimeStampUTC ,
2024-06-26 18:10:32 +00:00
network: cryptoCurrency . network ,
2024-06-14 20:52:01 +00:00
) ;
final coinsRaw = anonymitySetResult
. map (
( e ) = > [
e . serialized ,
e . txHash ,
e . context ,
] ,
)
. toList ( ) ;
if ( coinsRaw . isNotEmpty ) {
rawCoinsBySetId [ i ] = coinsRaw ;
}
// update last checked timestamp data
groupIdTimestampUTCMap [ i . toString ( ) ] = max (
lastCheckedTimeStampUTC ,
info ? . timestampUTC ? ? lastCheckedTimeStampUTC ,
) ;
}
// get address(es) to get the private key hex strings required for
// identifying spark coins
final sparkAddresses = await mainDB . isar . addresses
. where ( )
. walletIdEqualTo ( walletId )
. filter ( )
. typeEqualTo ( AddressType . spark )
. findAll ( ) ;
final root = await getRootHDNode ( ) ;
final Set < String > privateKeyHexSet = sparkAddresses
. map (
( e ) = >
root . derivePath ( e . derivationPath ! . value ) . privateKey . data . toHex ,
)
. toSet ( ) ;
// try to identify any coins in the unchecked set data
final List < SparkCoin > newlyIdCoins = [ ] ;
for ( final groupId in rawCoinsBySetId . keys ) {
final myCoins = await compute (
_identifyCoins ,
(
anonymitySetCoins: rawCoinsBySetId [ groupId ] ! ,
groupId: groupId ,
privateKeyHexSet: privateKeyHexSet ,
walletId: walletId ,
2024-06-19 15:15:49 +00:00
isTestNet: cryptoCurrency . network . isTestNet ,
2024-06-14 20:52:01 +00:00
) ,
) ;
newlyIdCoins . addAll ( myCoins ) ;
}
// if any were found, add to database
if ( newlyIdCoins . isNotEmpty ) {
await mainDB . isar . writeTxn ( ( ) async {
await mainDB . isar . sparkCoins . putAll ( newlyIdCoins ) ;
} ) ;
}
// finally update the cached timestamps in the database
await info . updateOtherData (
newEntries: {
WalletInfoKeys . firoSparkCacheSetTimestampCache:
groupIdTimestampUTCMap ,
} ,
isar: mainDB . isar ,
) ;
// check for spark coins in mempool
final mempoolMyCoins = await _refreshSparkCoinsMempoolCheck (
privateKeyHexSet: privateKeyHexSet ,
groupId: latestGroupId ,
) ;
// if any were found, add to database
if ( mempoolMyCoins . isNotEmpty ) {
await mainDB . isar . writeTxn ( ( ) async {
await mainDB . isar . sparkCoins . putAll ( mempoolMyCoins ) ;
} ) ;
}
// get unused and or unconfirmed coins from db
final coinsToCheck = await mainDB . isar . sparkCoins
. where ( )
. walletIdEqualToAnyLTagHash ( walletId )
. filter ( )
. heightIsNull ( )
. or ( )
. isUsedEqualTo ( false )
. findAll ( ) ;
Set < String > ? spentCoinTags ;
// only fetch tags from db if we need them to compare against any items
// in coinsToCheck
if ( coinsToCheck . isNotEmpty ) {
2024-06-26 18:10:32 +00:00
spentCoinTags = await FiroCacheCoordinator . getUsedCoinTags (
0 ,
cryptoCurrency . network ,
) ;
2024-06-14 20:52:01 +00:00
}
// check and update coins if required
final List < SparkCoin > updatedCoins = [ ] ;
for ( final coin in coinsToCheck ) {
SparkCoin updated = coin ;
if ( updated . height = = null ) {
final tx = await electrumXCachedClient . getTransaction (
txHash: updated . txHash ,
cryptoCurrency: info . coin ,
) ;
if ( tx [ " height " ] is int ) {
updated = updated . copyWith ( height: tx [ " height " ] as int ) ;
}
}
if ( updated . height ! = null & &
spentCoinTags ! . contains ( updated . lTagHash ) ) {
updated = coin . copyWith ( isUsed: true ) ;
}
updatedCoins . add ( updated ) ;
}
// update in db if any have changed
if ( updatedCoins . isNotEmpty ) {
await mainDB . isar . writeTxn ( ( ) async {
await mainDB . isar . sparkCoins . putAll ( updatedCoins ) ;
} ) ;
}
// used to check if balance is spendable or total
final currentHeight = await chainHeight ;
// get all unused coins to update wallet spark balance
final unusedCoins = await mainDB . isar . sparkCoins
. where ( )
. walletIdEqualToAnyLTagHash ( walletId )
. filter ( )
. isUsedEqualTo ( false )
. findAll ( ) ;
final total = Amount (
rawValue: unusedCoins
. map ( ( e ) = > e . value )
. fold ( BigInt . zero , ( prev , e ) = > prev + e ) ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ;
final spendable = Amount (
rawValue: unusedCoins
. where (
( e ) = >
e . height ! = null & &
e . height ! + cryptoCurrency . minConfirms < = currentHeight ,
)
. map ( ( e ) = > e . value )
. fold ( BigInt . zero , ( prev , e ) = > prev + e ) ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ;
final sparkBalance = Balance (
total: total ,
spendable: spendable ,
blockedTotal: Amount (
rawValue: BigInt . zero ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
pendingSpendable: total - spendable ,
) ;
// finally update balance in db
await info . updateBalanceTertiary (
newBalance: sparkBalance ,
isar: mainDB . isar ,
) ;
2023-11-27 20:57:33 +00:00
} catch ( e , s ) {
2024-05-09 18:25:32 +00:00
Logging . instance . log (
" $ runtimeType $ walletId ${ info . name } : $ e \n $ s " ,
level: LogLevel . Error ,
) ;
2023-11-27 20:57:33 +00:00
rethrow ;
2024-06-14 20:52:01 +00:00
} finally {
Logging . instance . log (
" ${ info . name } refreshSparkData() duration: "
" ${ DateTime . now ( ) . difference ( start ) } " ,
level: LogLevel . Debug ,
) ;
2023-11-27 20:57:33 +00:00
}
}
2024-06-10 19:28:49 +00:00
Future < Set < LTagPair > > getMissingSparkSpendTransactionIds ( ) async {
final tags = await mainDB . isar . sparkCoins
. where ( )
. walletIdEqualToAnyLTagHash ( walletId )
. filter ( )
. isUsedEqualTo ( true )
. lTagHashProperty ( )
. findAll ( ) ;
final usedCoinTxidsFoundLocally = await mainDB . isar . transactionV2s
. where ( )
. walletIdEqualTo ( walletId )
. filter ( )
. subTypeEqualTo ( TransactionSubType . sparkSpend )
. txidProperty ( )
. findAll ( ) ;
final pairs = await FiroCacheCoordinator . getUsedCoinTxidsFor (
tags: tags ,
2024-06-26 18:10:32 +00:00
network: cryptoCurrency . network ,
2024-06-10 19:28:49 +00:00
) ;
pairs . removeWhere ( ( e ) = > usedCoinTxidsFoundLocally . contains ( e . txid ) ) ;
return pairs . toSet ( ) ;
}
2023-12-18 20:05:22 +00:00
/// Should only be called within the standard wallet [recover] function due to
/// mutex locking. Otherwise behaviour MAY be undefined.
Future < void > recoverSparkWallet ( {
2024-05-29 19:29:45 +00:00
required int latestSparkCoinId ,
2023-12-18 20:05:22 +00:00
} ) async {
// generate spark addresses if non existing
if ( await getCurrentReceivingSparkAddress ( ) = = null ) {
final address = await generateNextSparkAddress ( ) ;
await mainDB . putAddress ( address ) ;
}
2024-05-29 19:29:45 +00:00
try {
2024-06-14 20:52:01 +00:00
await refreshSparkData ( ) ;
2024-05-29 19:29:45 +00:00
} catch ( e , s ) {
Logging . instance . log (
" $ runtimeType $ walletId ${ info . name } : $ e \n $ s " ,
level: LogLevel . Error ,
) ;
rethrow ;
}
}
2023-12-27 16:01:13 +00:00
// modelled on CSparkWallet::CreateSparkMintTransactions https://github.com/firoorg/firo/blob/39c41e5e7ec634ced3700fe3f4f5509dc2e480d0/src/spark/sparkwallet.cpp#L752
Future < List < TxData > > _createSparkMintTransactions ( {
2023-12-23 00:15:44 +00:00
required List < UTXO > availableUtxos ,
required List < MutableSparkRecipient > outputs ,
required bool subtractFeeFromAmount ,
required bool autoMintAll ,
} ) async {
// pre checks
if ( outputs . isEmpty ) {
throw Exception ( " Cannot mint without some recipients " ) ;
}
2023-12-27 16:01:13 +00:00
// TODO remove when multiple recipients gui is added. Will need to handle
// addresses when confirming the transactions later as well
assert ( outputs . length = = 1 ) ;
2023-12-23 00:15:44 +00:00
BigInt valueToMint =
outputs . map ( ( e ) = > e . value ) . reduce ( ( value , element ) = > value + element ) ;
if ( valueToMint < = BigInt . zero ) {
throw Exception ( " Cannot mint amount= $ valueToMint " ) ;
}
final totalUtxosValue = _sum ( availableUtxos ) ;
if ( valueToMint > totalUtxosValue ) {
throw Exception ( " Insufficient balance to create spark mint(s) " ) ;
}
// organise utxos
2024-05-09 17:56:42 +00:00
final Map < String , List < UTXO > > utxosByAddress = { } ;
2023-12-23 00:15:44 +00:00
for ( final utxo in availableUtxos ) {
utxosByAddress [ utxo . address ! ] ? ? = [ ] ;
utxosByAddress [ utxo . address ! ] ! . add ( utxo ) ;
}
final valueAndUTXOs = utxosByAddress . values . toList ( ) ;
// setup some vars
int nChangePosInOut = - 1 ;
2024-05-09 17:56:42 +00:00
final int nChangePosRequest = nChangePosInOut ;
2023-12-27 16:01:13 +00:00
List < MutableSparkRecipient > outputs_ = outputs
. map ( ( e ) = > MutableSparkRecipient ( e . address , e . value , e . memo ) )
. toList ( ) ; // deep copy
2023-12-24 16:51:08 +00:00
final feesObject = await fees ;
2023-12-23 00:15:44 +00:00
final currentHeight = await chainHeight ;
final random = Random . secure ( ) ;
final List < TxData > results = [ ] ;
valueAndUTXOs . shuffle ( random ) ;
while ( valueAndUTXOs . isNotEmpty ) {
final lockTime = random . nextInt ( 10 ) = = 0
? max ( 0 , currentHeight - random . nextInt ( 100 ) )
: currentHeight ;
const txVersion = 1 ;
final List < SigningData > vin = [ ] ;
2024-01-05 18:59:01 +00:00
final List < ( dynamic , int , String ? ) > vout = [ ] ;
2023-12-23 00:15:44 +00:00
BigInt nFeeRet = BigInt . zero ;
final itr = valueAndUTXOs . first ;
BigInt valueToMintInTx = _sum ( itr ) ;
if ( ! autoMintAll ) {
valueToMintInTx = _min ( valueToMintInTx , valueToMint ) ;
}
BigInt nValueToSelect , mintedValue ;
final List < SigningData > setCoins = [ ] ;
bool skipCoin = false ;
// Start with no fee and loop until there is enough fee
while ( true ) {
mintedValue = valueToMintInTx ;
if ( subtractFeeFromAmount ) {
nValueToSelect = mintedValue ;
} else {
nValueToSelect = mintedValue + nFeeRet ;
}
// if not enough coins in this group then subtract fee from mint
if ( nValueToSelect > _sum ( itr ) & & ! subtractFeeFromAmount ) {
nValueToSelect = mintedValue ;
mintedValue - = nFeeRet ;
}
// if (!MoneyRange(mintedValue) || mintedValue == 0) {
if ( mintedValue = = BigInt . zero ) {
valueAndUTXOs . remove ( itr ) ;
skipCoin = true ;
break ;
}
nChangePosInOut = nChangePosRequest ;
vin . clear ( ) ;
vout . clear ( ) ;
setCoins . clear ( ) ;
2023-12-27 16:01:13 +00:00
// deep copy
final remainingOutputs = outputs_
. map ( ( e ) = > MutableSparkRecipient ( e . address , e . value , e . memo ) )
. toList ( ) ;
2023-12-23 00:15:44 +00:00
final List < MutableSparkRecipient > singleTxOutputs = [ ] ;
2023-12-27 16:01:13 +00:00
2023-12-23 00:15:44 +00:00
if ( autoMintAll ) {
singleTxOutputs . add (
MutableSparkRecipient (
( await getCurrentReceivingSparkAddress ( ) ) ! . value ,
mintedValue ,
" " ,
) ,
) ;
} else {
2023-12-27 16:01:13 +00:00
BigInt remainingMintValue = BigInt . parse ( mintedValue . toString ( ) ) ;
2023-12-23 00:15:44 +00:00
while ( remainingMintValue > BigInt . zero ) {
final singleMintValue =
_min ( remainingMintValue , remainingOutputs . first . value ) ;
singleTxOutputs . add (
MutableSparkRecipient (
remainingOutputs . first . address ,
singleMintValue ,
remainingOutputs . first . memo ,
) ,
) ;
// subtract minted amount from remaining value
remainingMintValue - = singleMintValue ;
remainingOutputs . first . value - = singleMintValue ;
if ( remainingOutputs . first . value = = BigInt . zero ) {
remainingOutputs . remove ( remainingOutputs . first ) ;
}
}
}
if ( subtractFeeFromAmount ) {
final BigInt singleFee =
nFeeRet ~ / BigInt . from ( singleTxOutputs . length ) ;
BigInt remainder = nFeeRet % BigInt . from ( singleTxOutputs . length ) ;
for ( int i = 0 ; i < singleTxOutputs . length ; + + i ) {
if ( singleTxOutputs [ i ] . value < = singleFee ) {
singleTxOutputs . removeAt ( i ) ;
remainder + = singleTxOutputs [ i ] . value - singleFee ;
- - i ;
}
singleTxOutputs [ i ] . value - = singleFee ;
if ( remainder > BigInt . zero & &
singleTxOutputs [ i ] . value >
nFeeRet % BigInt . from ( singleTxOutputs . length ) ) {
// first receiver pays the remainder not divisible by output count
singleTxOutputs [ i ] . value - = remainder ;
remainder = BigInt . zero ;
}
}
}
// Generate dummy mint coins to save time
final dummyRecipients = LibSpark . createSparkMintRecipients (
outputs: singleTxOutputs
2024-05-09 17:56:42 +00:00
. map (
( e ) = > (
sparkAddress: e . address ,
value: e . value . toInt ( ) ,
memo: " " ,
) ,
)
2023-12-23 00:15:44 +00:00
. toList ( ) ,
serialContext: Uint8List ( 0 ) ,
generate: false ,
) ;
final dummyTxb = btc . TransactionBuilder ( network: _bitcoinDartNetwork ) ;
dummyTxb . setVersion ( txVersion ) ;
dummyTxb . setLockTime ( lockTime ) ;
2024-01-05 18:59:01 +00:00
for ( int i = 0 ; i < dummyRecipients . length ; i + + ) {
final recipient = dummyRecipients [ i ] ;
2023-12-23 00:15:44 +00:00
if ( recipient . amount < cryptoCurrency . dustLimit . raw . toInt ( ) ) {
throw Exception ( " Output amount too small " ) ;
}
2024-05-09 17:56:42 +00:00
vout . add (
(
recipient . scriptPubKey ,
recipient . amount ,
singleTxOutputs [ i ] . address ,
) ,
) ;
2023-12-23 00:15:44 +00:00
}
// Choose coins to use
BigInt nValueIn = BigInt . zero ;
for ( final utxo in itr ) {
if ( nValueToSelect > nValueIn ) {
setCoins . add ( ( await fetchBuildTxData ( [ utxo ] ) ) . first ) ;
nValueIn + = BigInt . from ( utxo . value ) ;
}
}
if ( nValueIn < nValueToSelect ) {
throw Exception ( " Insufficient funds " ) ;
}
// priority stuff???
2024-05-09 17:56:42 +00:00
final BigInt nChange = nValueIn - nValueToSelect ;
2023-12-23 00:15:44 +00:00
if ( nChange > BigInt . zero ) {
if ( nChange < cryptoCurrency . dustLimit . raw ) {
nChangePosInOut = - 1 ;
nFeeRet + = nChange ;
} else {
if ( nChangePosInOut = = - 1 ) {
nChangePosInOut = random . nextInt ( vout . length + 1 ) ;
} else if ( nChangePosInOut > vout . length ) {
throw Exception ( " Change index out of range " ) ;
}
final changeAddress = await getCurrentChangeAddress ( ) ;
vout . insert (
nChangePosInOut ,
2024-01-05 18:59:01 +00:00
( changeAddress ! . value , nChange . toInt ( ) , null ) ,
2023-12-23 00:15:44 +00:00
) ;
}
}
// add outputs for dummy tx to check fees
for ( final out in vout ) {
dummyTxb . addOutput ( out . $1 , out . $2 ) ;
}
// fill vin
for ( final sd in setCoins ) {
vin . add ( sd ) ;
2024-04-18 17:33:23 +00:00
final pubKey = sd . keyPair ! . publicKey . data ;
final btc . PaymentData ? data ;
switch ( sd . derivePathType ) {
case DerivePathType . bip44:
data = btc
. P2PKH (
data: btc . PaymentData (
pubkey: pubKey ,
) ,
network: _bitcoinDartNetwork ,
)
. data ;
break ;
case DerivePathType . bip49:
final p2wpkh = btc
. P2WPKH (
data: btc . PaymentData (
pubkey: pubKey ,
) ,
network: _bitcoinDartNetwork ,
)
. data ;
data = btc
. P2SH (
data: btc . PaymentData ( redeem: p2wpkh ) ,
network: _bitcoinDartNetwork ,
)
. data ;
break ;
case DerivePathType . bip84:
data = btc
. P2WPKH (
data: btc . PaymentData (
pubkey: pubKey ,
) ,
network: _bitcoinDartNetwork ,
)
. data ;
break ;
case DerivePathType . bip86:
data = null ;
break ;
default :
throw Exception ( " DerivePathType unsupported " ) ;
}
2023-12-23 00:15:44 +00:00
// add to dummy tx
dummyTxb . addInput (
sd . utxo . txid ,
sd . utxo . vout ,
0xffffffff -
1 , // minus 1 is important. 0xffffffff on its own will burn funds
2024-04-18 17:33:23 +00:00
data ! . output ! ,
2023-12-23 00:15:44 +00:00
) ;
}
// sign dummy tx
for ( var i = 0 ; i < setCoins . length ; i + + ) {
dummyTxb . sign (
vin: i ,
2024-04-18 17:33:23 +00:00
keyPair: btc . ECPair . fromPrivateKey (
setCoins [ i ] . keyPair ! . privateKey . data ,
network: _bitcoinDartNetwork ,
compressed: setCoins [ i ] . keyPair ! . privateKey . compressed ,
) ,
2023-12-23 00:15:44 +00:00
witnessValue: setCoins [ i ] . utxo . value ,
2024-04-18 17:33:23 +00:00
// maybe not needed here as this was originally copied from btc? We'll find out...
// redeemScript: setCoins[i].redeemScript,
2023-12-23 00:15:44 +00:00
) ;
}
final dummyTx = dummyTxb . build ( ) ;
final nBytes = dummyTx . virtualSize ( ) ;
if ( dummyTx . weight ( ) > MAX_STANDARD_TX_WEIGHT ) {
throw Exception ( " Transaction too large " ) ;
}
2023-12-24 16:51:08 +00:00
final nFeeNeeded = BigInt . from (
estimateTxFee (
vSize: nBytes ,
feeRatePerKB: feesObject . medium ,
) ,
) ; // One day we'll do this properly
2023-12-23 00:15:44 +00:00
if ( nFeeRet > = nFeeNeeded ) {
for ( final usedCoin in setCoins ) {
itr . removeWhere ( ( e ) = > e = = usedCoin . utxo ) ;
}
if ( itr . isEmpty ) {
final preLength = valueAndUTXOs . length ;
valueAndUTXOs . remove ( itr ) ;
assert ( preLength - 1 = = valueAndUTXOs . length ) ;
}
// Generate real mint coins
final serialContext = LibSpark . serializeMintContext (
inputs: setCoins
2024-05-09 17:56:42 +00:00
. map (
( e ) = > (
e . utxo . txid ,
e . utxo . vout ,
) ,
)
2023-12-23 00:15:44 +00:00
. toList ( ) ,
) ;
final recipients = LibSpark . createSparkMintRecipients (
outputs: singleTxOutputs
. map (
( e ) = > (
sparkAddress: e . address ,
memo: e . memo ,
value: e . value . toInt ( ) ,
) ,
)
. toList ( ) ,
serialContext: serialContext ,
generate: true ,
) ;
2024-01-05 18:59:01 +00:00
for ( int i = 0 ; i < recipients . length ; i + + ) {
final recipient = recipients [ i ] ;
final out = (
recipient . scriptPubKey ,
recipient . amount ,
singleTxOutputs [ i ] . address ,
) ;
2023-12-23 00:15:44 +00:00
while ( i < vout . length ) {
if ( vout [ i ] . $1 is Uint8List & &
( vout [ i ] . $1 as Uint8List ) . isNotEmpty & &
( vout [ i ] . $1 as Uint8List ) [ 0 ] = = OP_SPARKMINT ) {
vout [ i ] = out ;
break ;
}
+ + i ;
}
+ + i ;
}
2023-12-27 16:01:13 +00:00
// deep copy
outputs_ = remainingOutputs
. map ( ( e ) = > MutableSparkRecipient ( e . address , e . value , e . memo ) )
. toList ( ) ;
2023-12-23 00:15:44 +00:00
break ; // Done, enough fee included.
}
// Include more fee and try again.
nFeeRet = nFeeNeeded ;
continue ;
}
if ( skipCoin ) {
continue ;
}
2024-01-05 18:59:01 +00:00
// temp tx data to show in gui while waiting for real data from server
final List < InputV2 > tempInputs = [ ] ;
final List < OutputV2 > tempOutputs = [ ] ;
2023-12-23 00:15:44 +00:00
// sign
final txb = btc . TransactionBuilder ( network: _bitcoinDartNetwork ) ;
txb . setVersion ( txVersion ) ;
txb . setLockTime ( lockTime ) ;
for ( final input in vin ) {
2024-04-18 17:33:23 +00:00
final pubKey = input . keyPair ! . publicKey . data ;
final btc . PaymentData ? data ;
switch ( input . derivePathType ) {
case DerivePathType . bip44:
data = btc
. P2PKH (
data: btc . PaymentData (
pubkey: pubKey ,
) ,
network: _bitcoinDartNetwork ,
)
. data ;
break ;
case DerivePathType . bip49:
final p2wpkh = btc
. P2WPKH (
data: btc . PaymentData (
pubkey: pubKey ,
) ,
network: _bitcoinDartNetwork ,
)
. data ;
data = btc
. P2SH (
data: btc . PaymentData ( redeem: p2wpkh ) ,
network: _bitcoinDartNetwork ,
)
. data ;
break ;
case DerivePathType . bip84:
data = btc
. P2WPKH (
data: btc . PaymentData (
pubkey: pubKey ,
) ,
network: _bitcoinDartNetwork ,
)
. data ;
break ;
case DerivePathType . bip86:
data = null ;
break ;
default :
throw Exception ( " DerivePathType unsupported " ) ;
}
2023-12-23 00:15:44 +00:00
txb . addInput (
input . utxo . txid ,
input . utxo . vout ,
0xffffffff -
1 , // minus 1 is important. 0xffffffff on its own will burn funds
2024-04-18 17:33:23 +00:00
data ! . output ! ,
2023-12-23 00:15:44 +00:00
) ;
2024-01-05 18:59:01 +00:00
tempInputs . add (
InputV2 . isarCantDoRequiredInDefaultConstructor (
scriptSigHex: txb . inputs . first . script ? . toHex ,
2024-01-14 17:26:04 +00:00
scriptSigAsm: null ,
2024-01-05 18:59:01 +00:00
sequence: 0xffffffff - 1 ,
outpoint: OutpointV2 . isarCantDoRequiredInDefaultConstructor (
txid: input . utxo . txid ,
vout: input . utxo . vout ,
) ,
addresses: input . utxo . address = = null ? [ ] : [ input . utxo . address ! ] ,
valueStringSats: input . utxo . value . toString ( ) ,
witness: null ,
innerRedeemScriptAsm: null ,
coinbase: null ,
walletOwns: true ,
) ,
) ;
2023-12-23 00:15:44 +00:00
}
for ( final output in vout ) {
2024-01-05 18:59:01 +00:00
final addressOrScript = output . $1 ;
final value = output . $2 ;
txb . addOutput ( addressOrScript , value ) ;
tempOutputs . add (
OutputV2 . isarCantDoRequiredInDefaultConstructor (
scriptPubKeyHex:
addressOrScript is Uint8List ? addressOrScript . toHex : " 000000 " ,
valueStringSats: value . toString ( ) ,
addresses: [
if ( addressOrScript is String ) addressOrScript . toString ( ) ,
] ,
walletOwns: ( await mainDB . isar . addresses
. where ( )
. walletIdEqualTo ( walletId )
. filter ( )
2024-05-09 17:56:42 +00:00
. valueEqualTo (
addressOrScript is Uint8List
? output . $3 !
: addressOrScript as String ,
)
2024-01-05 18:59:01 +00:00
. valueProperty ( )
. findFirst ( ) ) ! =
null ,
) ,
) ;
2023-12-23 00:15:44 +00:00
}
try {
for ( var i = 0 ; i < vin . length ; i + + ) {
txb . sign (
vin: i ,
2024-04-18 17:33:23 +00:00
keyPair: btc . ECPair . fromPrivateKey (
vin [ i ] . keyPair ! . privateKey . data ,
network: _bitcoinDartNetwork ,
compressed: vin [ i ] . keyPair ! . privateKey . compressed ,
) ,
2023-12-23 00:15:44 +00:00
witnessValue: vin [ i ] . utxo . value ,
2024-04-18 17:33:23 +00:00
// maybe not needed here as this was originally copied from btc? We'll find out...
// redeemScript: setCoins[i].redeemScript,
2023-12-23 00:15:44 +00:00
) ;
}
} catch ( e , s ) {
Logging . instance . log (
" Caught exception while signing spark mint transaction: $ e \n $ s " ,
level: LogLevel . Error ,
) ;
rethrow ;
}
final builtTx = txb . build ( ) ;
2023-12-27 16:01:13 +00:00
// TODO: see todo at top of this function
assert ( outputs . length = = 1 ) ;
2023-12-23 00:15:44 +00:00
final data = TxData (
sparkRecipients: vout
2023-12-27 16:01:13 +00:00
. where ( ( e ) = > e . $1 is Uint8List ) // ignore change
2023-12-23 00:15:44 +00:00
. map (
( e ) = > (
2023-12-27 16:01:13 +00:00
address: outputs . first
. address , // for display purposes on confirm tx screen. See todos above
2023-12-23 00:15:44 +00:00
memo: " " ,
amount: Amount (
rawValue: BigInt . from ( e . $2 ) ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
2024-01-05 22:39:05 +00:00
isChange: false , // ok?
2023-12-23 00:15:44 +00:00
) ,
)
. toList ( ) ,
vSize: builtTx . virtualSize ( ) ,
txid: builtTx . getId ( ) ,
raw: builtTx . toHex ( ) ,
fee: Amount (
rawValue: nFeeRet ,
fractionDigits: cryptoCurrency . fractionDigits ,
) ,
2023-12-27 16:01:13 +00:00
usedUTXOs: vin . map ( ( e ) = > e . utxo ) . toList ( ) ,
2024-01-05 18:59:01 +00:00
tempTx: TransactionV2 (
walletId: walletId ,
blockHash: null ,
hash: builtTx . getId ( ) ,
txid: builtTx . getId ( ) ,
timestamp: DateTime . timestamp ( ) . millisecondsSinceEpoch ~ / 1000 ,
inputs: List . unmodifiable ( tempInputs ) ,
outputs: List . unmodifiable ( tempOutputs ) ,
type:
tempOutputs . map ( ( e ) = > e . walletOwns ) . fold ( true , ( p , e ) = > p & = e )
? TransactionType . sentToSelf
: TransactionType . outgoing ,
subType: TransactionSubType . sparkMint ,
otherData: null ,
height: null ,
version: 3 ,
) ,
2023-12-23 00:15:44 +00:00
) ;
2023-12-24 16:51:08 +00:00
if ( nFeeRet . toInt ( ) < data . vSize ! ) {
throw Exception ( " fee is less than vSize " ) ;
}
2023-12-23 00:15:44 +00:00
results . add ( data ) ;
if ( nChangePosInOut > = 0 ) {
final vOut = vout [ nChangePosInOut ] ;
assert ( vOut . $1 is String ) ; // check to make sure is change address
final out = UTXO (
walletId: walletId ,
txid: data . txid ! ,
vout: nChangePosInOut ,
value: vOut . $2 ,
address: vOut . $1 as String ,
name: " Spark mint change " ,
isBlocked: false ,
blockedReason: null ,
isCoinbase: false ,
blockHash: null ,
blockHeight: null ,
blockTime: null ,
) ;
bool added = false ;
for ( final utxos in valueAndUTXOs ) {
if ( utxos . first . address = = out . address ) {
utxos . add ( out ) ;
added = true ;
}
}
if ( ! added ) {
valueAndUTXOs . add ( [ out ] ) ;
}
}
if ( ! autoMintAll ) {
valueToMint - = mintedValue ;
if ( valueToMint = = BigInt . zero ) {
break ;
}
}
}
if ( ! autoMintAll & & valueToMint > BigInt . zero ) {
// TODO: Is this a valid error message?
throw Exception ( " Failed to mint expected amounts " ) ;
}
return results ;
}
Future < void > anonymizeAllSpark ( ) async {
2023-12-24 16:51:08 +00:00
try {
const subtractFeeFromAmount = true ; // must be true for mint all
final currentHeight = await chainHeight ;
2023-12-23 00:15:44 +00:00
2023-12-24 16:51:08 +00:00
final spendableUtxos = await mainDB . isar . utxos
. where ( )
. walletIdEqualTo ( walletId )
. filter ( )
. isBlockedEqualTo ( false )
. and ( )
. group ( ( q ) = > q . usedEqualTo ( false ) . or ( ) . usedIsNull ( ) )
. and ( )
. valueGreaterThan ( 0 )
. findAll ( ) ;
2023-12-23 00:15:44 +00:00
2023-12-24 16:51:08 +00:00
spendableUtxos . removeWhere (
( e ) = > ! e . isConfirmed (
currentHeight ,
cryptoCurrency . minConfirms ,
2023-12-23 00:15:44 +00:00
) ,
2023-12-24 16:51:08 +00:00
) ;
if ( spendableUtxos . isEmpty ) {
throw Exception ( " No available UTXOs found to anonymize " ) ;
}
2023-12-23 00:15:44 +00:00
2023-12-27 16:01:13 +00:00
final mints = await _createSparkMintTransactions (
2023-12-24 16:51:08 +00:00
subtractFeeFromAmount: subtractFeeFromAmount ,
autoMintAll: true ,
availableUtxos: spendableUtxos ,
outputs: [
MutableSparkRecipient (
( await getCurrentReceivingSparkAddress ( ) ) ! . value ,
spendableUtxos
. map ( ( e ) = > BigInt . from ( e . value ) )
. fold ( BigInt . zero , ( p , e ) = > p + e ) ,
" " ,
) ,
] ,
) ;
2023-12-27 16:01:13 +00:00
await confirmSparkMintTransactions ( txData: TxData ( sparkMints: mints ) ) ;
2023-12-24 16:51:08 +00:00
} catch ( e , s ) {
Logging . instance . log (
" Exception caught in anonymizeAllSpark(): $ e \n $ s " ,
level: LogLevel . Warning ,
) ;
rethrow ;
2023-12-23 00:15:44 +00:00
}
}
2023-12-07 21:58:23 +00:00
/// Transparent to Spark (mint) transaction creation.
2023-12-14 02:12:12 +00:00
///
/// See https://docs.google.com/document/d/1RG52GoYTZDvKlZz_3G4sQu-PpT6JWSZGHLNswWcrE3o
2023-11-27 21:18:20 +00:00
Future < TxData > prepareSparkMintTransaction ( { required TxData txData } ) async {
2023-12-27 16:01:13 +00:00
try {
if ( txData . sparkRecipients ? . isNotEmpty ! = true ) {
throw Exception ( " Missing spark recipients. " ) ;
2023-12-14 02:12:12 +00:00
}
2023-12-27 16:01:13 +00:00
final recipients = txData . sparkRecipients !
. map (
( e ) = > MutableSparkRecipient (
e . address ,
e . amount . raw ,
e . memo ,
) ,
)
. toList ( ) ;
2023-12-14 02:12:12 +00:00
2023-12-27 16:01:13 +00:00
final total = recipients
. map ( ( e ) = > e . value )
. reduce ( ( value , element ) = > value + = element ) ;
2023-12-07 21:58:23 +00:00
2023-12-27 16:01:13 +00:00
if ( total < BigInt . zero ) {
throw Exception ( " Attempted send of negative amount " ) ;
} else if ( total = = BigInt . zero ) {
throw Exception ( " Attempted send of zero amount " ) ;
}
2023-12-07 21:58:23 +00:00
2023-12-27 16:01:13 +00:00
final currentHeight = await chainHeight ;
2023-12-07 21:58:23 +00:00
2023-12-27 16:01:13 +00:00
// coin control not enabled for firo currently so we can ignore this
// final utxosToUse = txData.utxos?.toList() ?? await mainDB.isar.utxos
// .where()
// .walletIdEqualTo(walletId)
// .filter()
// .isBlockedEqualTo(false)
// .and()
// .group((q) => q.usedEqualTo(false).or().usedIsNull())
// .and()
// .valueGreaterThan(0)
// .findAll();
final spendableUtxos = await mainDB . isar . utxos
. where ( )
. walletIdEqualTo ( walletId )
. filter ( )
. isBlockedEqualTo ( false )
. and ( )
. group ( ( q ) = > q . usedEqualTo ( false ) . or ( ) . usedIsNull ( ) )
. and ( )
. valueGreaterThan ( 0 )
. findAll ( ) ;
2023-12-14 02:12:12 +00:00
2023-12-27 16:01:13 +00:00
spendableUtxos . removeWhere (
( e ) = > ! e . isConfirmed (
currentHeight ,
cryptoCurrency . minConfirms ,
) ,
) ;
2023-12-14 02:12:12 +00:00
2023-12-27 16:01:13 +00:00
if ( spendableUtxos . isEmpty ) {
throw Exception ( " No available UTXOs found to anonymize " ) ;
2023-12-14 02:12:12 +00:00
}
2023-12-27 16:01:13 +00:00
final available = spendableUtxos
. map ( ( e ) = > BigInt . from ( e . value ) )
. reduce ( ( value , element ) = > value + = element ) ;
final bool subtractFeeFromAmount ;
if ( available < total ) {
throw Exception ( " Insufficient balance " ) ;
} else if ( available = = total ) {
subtractFeeFromAmount = true ;
} else {
subtractFeeFromAmount = false ;
2023-12-14 02:12:12 +00:00
}
2023-12-07 21:58:23 +00:00
2023-12-27 16:01:13 +00:00
final mints = await _createSparkMintTransactions (
subtractFeeFromAmount: subtractFeeFromAmount ,
autoMintAll: false ,
availableUtxos: spendableUtxos ,
outputs: recipients ,
2023-12-14 02:25:13 +00:00
) ;
2023-12-27 16:01:13 +00:00
return txData . copyWith ( sparkMints: mints ) ;
2023-12-16 20:28:04 +00:00
} catch ( e , s ) {
Logging . instance . log (
2023-12-27 16:01:13 +00:00
" Exception caught in prepareSparkMintTransaction(): $ e \n $ s " ,
level: LogLevel . Warning ,
2023-12-16 20:28:04 +00:00
) ;
rethrow ;
}
2023-11-27 21:18:20 +00:00
}
2023-12-27 16:01:13 +00:00
Future < TxData > confirmSparkMintTransactions ( { required TxData txData } ) async {
final futures = txData . sparkMints ! . map ( ( e ) = > confirmSend ( txData: e ) ) ;
return txData . copyWith ( sparkMints: await Future . wait ( futures ) ) ;
2023-12-07 21:05:27 +00:00
}
2023-11-27 20:57:33 +00:00
@ override
Future < void > updateBalance ( ) async {
// call to super to update transparent balance (and lelantus balance if
// what ever class this mixin is used on uses LelantusInterface as well)
final normalBalanceFuture = super . updateBalance ( ) ;
2023-12-27 16:01:13 +00:00
// todo: spark balance aka update info.tertiaryBalance here?
// currently happens on spark coins update/refresh
2023-11-27 20:57:33 +00:00
// wait for normalBalanceFuture to complete before returning
await normalBalanceFuture ;
}
2023-12-18 20:05:22 +00:00
// ====================== Private ============================================
2023-12-23 00:15:44 +00:00
btc . NetworkType get _bitcoinDartNetwork = > btc . NetworkType (
messagePrefix: cryptoCurrency . networkParams . messagePrefix ,
bech32: cryptoCurrency . networkParams . bech32Hrp ,
bip32: btc . Bip32Type (
public: cryptoCurrency . networkParams . pubHDPrefix ,
private: cryptoCurrency . networkParams . privHDPrefix ,
) ,
pubKeyHash: cryptoCurrency . networkParams . p2pkhPrefix ,
scriptHash: cryptoCurrency . networkParams . p2shPrefix ,
wif: cryptoCurrency . networkParams . wifPrefix ,
) ;
2023-11-16 21:30:01 +00:00
}
2023-12-05 20:44:50 +00:00
2023-12-21 22:18:12 +00:00
/// Top level function which should be called wrapped in [compute]
Future <
( {
Uint8List serializedSpendPayload ,
List < Uint8List > outputScripts ,
int fee ,
2024-02-26 20:30:44 +00:00
List <
( {
int groupId ,
int height ,
String serializedCoin ,
String serializedCoinContext
} ) > usedCoins ,
2023-12-21 22:18:12 +00:00
} ) > _createSparkSend (
2024-05-09 17:56:42 +00:00
( {
String privateKeyHex ,
int index ,
List < ( { String address , int amount , bool subtractFeeFromAmount } ) > recipients ,
List <
( {
String sparkAddress ,
int amount ,
bool subtractFeeFromAmount ,
String memo
} ) > privateRecipients ,
List <
( {
String serializedCoin ,
String serializedCoinContext ,
int groupId ,
int height ,
} ) > serializedCoins ,
List <
( {
int setId ,
String setHash ,
List < ( { String serializedCoin , String txHash } ) > set
} ) > allAnonymitySets ,
List <
( {
int setId ,
Uint8List blockHash ,
} ) > idAndBlockHashes ,
Uint8List txHash ,
} ) args ,
) async {
2023-12-21 22:18:12 +00:00
final spend = LibSpark . createSparkSendTransaction (
privateKeyHex: args . privateKeyHex ,
index: args . index ,
recipients: args . recipients ,
privateRecipients: args . privateRecipients ,
serializedCoins: args . serializedCoins ,
allAnonymitySets: args . allAnonymitySets ,
idAndBlockHashes: args . idAndBlockHashes ,
txHash: args . txHash ,
) ;
return spend ;
}
2023-12-19 18:06:05 +00:00
/// Top level function which should be called wrapped in [compute]
Future < List < SparkCoin > > _identifyCoins (
2024-05-09 17:56:42 +00:00
( {
List < dynamic > anonymitySetCoins ,
int groupId ,
Set < String > privateKeyHexSet ,
String walletId ,
bool isTestNet ,
} ) args ,
) async {
2023-12-19 18:06:05 +00:00
final List < SparkCoin > myCoins = [ ] ;
for ( final privateKeyHex in args . privateKeyHexSet ) {
for ( final dynData in args . anonymitySetCoins ) {
final data = List < String > . from ( dynData as List ) ;
if ( data . length ! = 3 ) {
throw Exception ( " Unexpected serialized coin info found " ) ;
}
final serializedCoinB64 = data [ 0 ] ;
2024-05-31 01:03:15 +00:00
final txHash = data [ 1 ] . toHexReversedFromBase64 ;
2023-12-19 18:06:05 +00:00
final contextB64 = data [ 2 ] ;
final coin = LibSpark . identifyAndRecoverCoin (
serializedCoinB64 ,
privateKeyHex: privateKeyHex ,
index: kDefaultSparkIndex ,
context: base64Decode ( contextB64 ) ,
isTestNet: args . isTestNet ,
) ;
// its ours
if ( coin ! = null ) {
final SparkCoinType coinType ;
switch ( coin . type . value ) {
case 0 :
coinType = SparkCoinType . mint ;
case 1 :
coinType = SparkCoinType . spend ;
default :
throw Exception ( " Unknown spark coin type detected " ) ;
}
myCoins . add (
SparkCoin (
walletId: args . walletId ,
type: coinType ,
2024-05-30 21:10:56 +00:00
isUsed: false ,
2023-12-20 19:45:46 +00:00
groupId: args . groupId ,
2023-12-19 18:06:05 +00:00
nonce: coin . nonceHex ? . toUint8ListFromHex ,
address: coin . address ! ,
txHash: txHash ,
valueIntString: coin . value ! . toString ( ) ,
memo: coin . memo ,
serialContext: coin . serialContext ,
diversifierIntString: coin . diversifier ! . toString ( ) ,
encryptedDiversifier: coin . encryptedDiversifier ,
serial: coin . serial ,
tag: coin . tag ,
lTagHash: coin . lTagHash ! ,
height: coin . height ,
serializedCoinB64: serializedCoinB64 ,
contextB64: contextB64 ,
) ,
) ;
}
}
}
return myCoins ;
}
2023-12-23 00:15:44 +00:00
BigInt _min ( BigInt a , BigInt b ) {
if ( a < = b ) {
return a ;
} else {
return b ;
}
}
BigInt _sum ( List < UTXO > utxos ) = > utxos
. map ( ( e ) = > BigInt . from ( e . value ) )
. fold ( BigInt . zero , ( previousValue , element ) = > previousValue + element ) ;
class MutableSparkRecipient {
String address ;
BigInt value ;
String memo ;
MutableSparkRecipient ( this . address , this . value , this . memo ) ;
2023-12-27 16:01:13 +00:00
@ override
String toString ( ) {
return ' MutableSparkRecipient{ address: $ address , value: $ value , memo: $ memo } ' ;
}
2023-12-23 00:15:44 +00:00
}