2022-11-29 19:11:30 +00:00
import ' dart:async ' ;
import ' dart:convert ' ;
import ' dart:io ' ;
import ' package:bech32/bech32.dart ' ;
import ' package:bip32/bip32.dart ' as bip32 ;
import ' package:bip39/bip39.dart ' as bip39 ;
import ' package:bitcoindart/bitcoindart.dart ' ;
import ' package:bs58check/bs58check.dart ' as bs58check ;
import ' package:crypto/crypto.dart ' ;
import ' package:decimal/decimal.dart ' ;
import ' package:flutter/foundation.dart ' ;
2023-01-12 02:09:08 +00:00
import ' package:isar/isar.dart ' ;
2023-03-01 21:52:13 +00:00
import ' package:stackwallet/db/isar/main_db.dart ' ;
2022-11-29 19:11:30 +00:00
import ' package:stackwallet/electrumx_rpc/cached_electrumx.dart ' ;
import ' package:stackwallet/electrumx_rpc/electrumx.dart ' ;
2023-01-12 02:09:08 +00:00
import ' package:stackwallet/models/balance.dart ' ;
import ' package:stackwallet/models/isar/models/isar_models.dart ' as isar_models ;
2022-11-29 19:11:30 +00:00
import ' package:stackwallet/models/paymint/fee_object_model.dart ' ;
import ' package:stackwallet/services/coins/coin_service.dart ' ;
import ' package:stackwallet/services/event_bus/events/global/node_connection_status_changed_event.dart ' ;
import ' package:stackwallet/services/event_bus/events/global/refresh_percent_changed_event.dart ' ;
import ' package:stackwallet/services/event_bus/events/global/updated_in_background_event.dart ' ;
import ' package:stackwallet/services/event_bus/events/global/wallet_sync_status_changed_event.dart ' ;
import ' package:stackwallet/services/event_bus/global_event_bus.dart ' ;
2023-01-12 18:46:01 +00:00
import ' package:stackwallet/services/mixins/wallet_cache.dart ' ;
import ' package:stackwallet/services/mixins/wallet_db.dart ' ;
2022-11-29 19:11:30 +00:00
import ' package:stackwallet/services/node_service.dart ' ;
import ' package:stackwallet/services/notifications_api.dart ' ;
import ' package:stackwallet/services/transaction_notification_tracker.dart ' ;
import ' package:stackwallet/utilities/assets.dart ' ;
2023-02-03 22:34:06 +00:00
import ' package:stackwallet/utilities/bip32_utils.dart ' ;
2022-11-29 19:11:30 +00:00
import ' package:stackwallet/utilities/constants.dart ' ;
import ' package:stackwallet/utilities/default_nodes.dart ' ;
import ' package:stackwallet/utilities/enums/coin_enum.dart ' ;
2023-01-25 18:08:48 +00:00
import ' package:stackwallet/utilities/enums/derive_path_type_enum.dart ' ;
2022-11-29 19:11:30 +00:00
import ' package:stackwallet/utilities/enums/fee_rate_type_enum.dart ' ;
import ' package:stackwallet/utilities/flutter_secure_storage_interface.dart ' ;
import ' package:stackwallet/utilities/format.dart ' ;
import ' package:stackwallet/utilities/logger.dart ' ;
import ' package:stackwallet/utilities/prefs.dart ' ;
import ' package:tuple/tuple.dart ' ;
import ' package:uuid/uuid.dart ' ;
const int MINIMUM_CONFIRMATIONS = 1 ;
const int DUST_LIMIT = 294 ;
const String GENESIS_HASH_MAINNET =
2022-11-29 20:27:15 +00:00
" 0000ee0784c195317ac95623e22fddb8c7b8825dc3998e0bb924d66866eccf4c " ;
2022-11-29 19:11:30 +00:00
const String GENESIS_HASH_TESTNET =
" 0000594ada5310b367443ee0afd4fa3d0bbd5850ea4e33cdc7d6a904a7ec7c90 " ;
2023-02-03 22:34:06 +00:00
String constructDerivePath ( {
required DerivePathType derivePathType ,
required int networkWIF ,
int account = 0 ,
required int chain ,
required int index ,
} ) {
2022-11-29 19:11:30 +00:00
String coinType ;
2023-02-03 22:34:06 +00:00
switch ( networkWIF ) {
2022-11-29 19:11:30 +00:00
case 0x6c : // PART mainnet wif
coinType = " 44 " ; // PART mainnet
break ;
default :
2023-02-03 22:34:06 +00:00
throw Exception ( " Invalid Particl network wif used! " ) ;
2022-11-29 19:11:30 +00:00
}
2023-02-03 22:34:06 +00:00
int purpose ;
2022-11-29 19:11:30 +00:00
switch ( derivePathType ) {
2022-12-07 12:46:53 +00:00
case DerivePathType . bip44:
2023-02-03 22:34:06 +00:00
purpose = 44 ;
break ;
2022-12-05 10:52:59 +00:00
case DerivePathType . bip84:
2023-02-03 22:34:06 +00:00
purpose = 84 ;
break ;
2022-11-29 19:11:30 +00:00
default :
2023-02-02 15:24:26 +00:00
throw Exception ( " DerivePathType $ derivePathType not supported " ) ;
2022-11-29 19:11:30 +00:00
}
2023-02-03 22:34:06 +00:00
return " m/ $ purpose '/ $ coinType '/ $ account '/ $ chain / $ index " ;
2022-11-29 19:11:30 +00:00
}
2023-01-12 18:46:01 +00:00
class ParticlWallet extends CoinServiceAPI with WalletCache , WalletDB {
2023-02-03 22:48:16 +00:00
ParticlWallet ( {
required String walletId ,
required String walletName ,
required Coin coin ,
required ElectrumX client ,
required CachedElectrumX cachedClient ,
required TransactionNotificationTracker tracker ,
required SecureStorageInterface secureStore ,
MainDB ? mockableOverride ,
} ) {
txTracker = tracker ;
_walletId = walletId ;
_walletName = walletName ;
_coin = coin ;
_electrumXClient = client ;
_cachedElectrumXClient = cachedClient ;
_secureStore = secureStore ;
initCache ( walletId , coin ) ;
initWalletDB ( mockableOverride: mockableOverride ) ;
}
2022-11-29 19:11:30 +00:00
static const integrationTestFlag =
bool . fromEnvironment ( " IS_INTEGRATION_TEST " ) ;
2022-12-05 14:53:21 +00:00
2022-11-29 19:11:30 +00:00
final _prefs = Prefs . instance ;
Timer ? timer ;
2023-01-12 21:32:25 +00:00
late final Coin _coin ;
2022-11-29 19:11:30 +00:00
late final TransactionNotificationTracker txTracker ;
NetworkType get _network {
switch ( coin ) {
case Coin . particl:
return particl ;
default :
2022-12-05 14:53:21 +00:00
throw Exception ( " Invalid network type! " ) ;
2022-11-29 19:11:30 +00:00
}
}
2022-12-05 14:53:21 +00:00
@ override
set isFavorite ( bool markFavorite ) {
2023-01-12 21:32:25 +00:00
_isFavorite = markFavorite ;
updateCachedIsFavorite ( markFavorite ) ;
2022-12-05 14:53:21 +00:00
}
@ override
2023-01-12 21:32:25 +00:00
bool get isFavorite = > _isFavorite ? ? = getCachedIsFavorite ( ) ;
bool ? _isFavorite ;
2022-12-05 14:53:21 +00:00
2022-11-29 19:11:30 +00:00
@ override
Coin get coin = > _coin ;
@ override
2023-01-16 21:04:03 +00:00
Future < List < isar_models . UTXO > > get utxos = > db . getUTXOs ( walletId ) . findAll ( ) ;
2022-11-29 19:11:30 +00:00
@ override
2023-01-12 02:09:08 +00:00
Future < List < isar_models . Transaction > > get transactions = >
2023-01-16 21:04:03 +00:00
db . getTransactions ( walletId ) . sortByTimestampDesc ( ) . findAll ( ) ;
2022-11-29 19:11:30 +00:00
@ override
2023-01-12 02:09:08 +00:00
Future < String > get currentReceivingAddress async = >
( await _currentReceivingAddress ) . value ;
2023-01-23 16:32:53 +00:00
Future < isar_models . Address > get _currentReceivingAddress async = >
( await db
. getAddresses ( walletId )
. filter ( )
. typeEqualTo ( isar_models . AddressType . p2wpkh )
. subTypeEqualTo ( isar_models . AddressSubType . receiving )
. sortByDerivationIndexDesc ( )
. findFirst ( ) ) ? ?
2023-01-25 19:49:14 +00:00
await _generateAddressForChain ( 0 , 0 , DerivePathTypeExt . primaryFor ( coin ) ) ;
2023-01-12 02:09:08 +00:00
Future < String > get currentChangeAddress async = >
( await _currentChangeAddress ) . value ;
2023-01-23 16:32:53 +00:00
Future < isar_models . Address > get _currentChangeAddress async = >
( await db
. getAddresses ( walletId )
. filter ( )
. typeEqualTo ( isar_models . AddressType . p2wpkh )
. subTypeEqualTo ( isar_models . AddressSubType . change )
. sortByDerivationIndexDesc ( )
. findFirst ( ) ) ? ?
2023-01-25 19:49:14 +00:00
await _generateAddressForChain ( 1 , 0 , DerivePathTypeExt . primaryFor ( coin ) ) ;
2022-11-29 19:11:30 +00:00
@ override
Future < void > exit ( ) async {
_hasCalledExit = true ;
timer ? . cancel ( ) ;
timer = null ;
stopNetworkAlivePinging ( ) ;
}
bool _hasCalledExit = false ;
@ override
bool get hasCalledExit = > _hasCalledExit ;
@ override
Future < FeeObject > get fees = > _feeObject ? ? = _getFees ( ) ;
Future < FeeObject > ? _feeObject ;
@ override
Future < int > get maxFee async {
2022-12-05 14:53:21 +00:00
final fee = ( await fees ) . fast as String ;
final satsFee =
Decimal . parse ( fee ) * Decimal . fromInt ( Constants . satsPerCoin ( coin ) ) ;
2022-11-29 19:11:30 +00:00
return satsFee . floor ( ) . toBigInt ( ) . toInt ( ) ;
}
@ override
Future < List < String > > get mnemonic = > _getMnemonicList ( ) ;
2023-02-03 22:34:06 +00:00
@ override
Future < String ? > get mnemonicString = >
_secureStore . read ( key: ' ${ _walletId } _mnemonic ' ) ;
@ override
Future < String ? > get mnemonicPassphrase = > _secureStore . read (
key: ' ${ _walletId } _mnemonicPassphrase ' ,
) ;
2022-11-29 19:11:30 +00:00
Future < int > get chainHeight async {
try {
final result = await _electrumXClient . getBlockHeadTip ( ) ;
2023-01-12 21:32:25 +00:00
final height = result [ " height " ] as int ;
await updateCachedChainHeight ( height ) ;
2023-01-30 17:06:28 +00:00
if ( height > storedChainHeight ) {
GlobalEventBus . instance . fire (
UpdatedInBackgroundEvent (
" Updated current chain height in $ walletId $ walletName ! " ,
walletId ,
) ,
) ;
}
2023-01-12 21:32:25 +00:00
return height ;
2022-11-29 19:11:30 +00:00
} catch ( e , s ) {
Logging . instance . log ( " Exception caught in chainHeight: $ e \n $ s " ,
level: LogLevel . Error ) ;
2023-01-12 21:32:25 +00:00
return storedChainHeight ;
2022-11-29 19:11:30 +00:00
}
}
2023-01-12 02:09:08 +00:00
@ override
2023-01-12 21:32:25 +00:00
int get storedChainHeight = > getCachedChainHeight ( ) ;
2022-11-29 19:11:30 +00:00
DerivePathType addressType ( { required String address } ) {
Uint8List ? decodeBase58 ;
Segwit ? decodeBech32 ;
try {
decodeBase58 = bs58check . decode ( address ) ;
} catch ( err ) {
// Base58check decode fail
}
2022-12-06 16:02:46 +00:00
2022-12-07 12:46:53 +00:00
// return DerivePathType.bip84;
if ( decodeBase58 ! = null ) {
if ( decodeBase58 [ 0 ] = = _network . pubKeyHash ) {
// P2PKH
return DerivePathType . bip44 ;
}
throw ArgumentError ( ' Invalid version or Network mismatch ' ) ;
} else {
try {
decodeBech32 = segwit . decode ( address , particl . bech32 ! ) ;
} catch ( err ) {
// Bech32 decode fail
}
if ( _network . bech32 ! = decodeBech32 ! . hrp ) {
throw ArgumentError ( ' Invalid prefix or Network mismatch ' ) ;
}
if ( decodeBech32 . version ! = 0 ) {
throw ArgumentError ( ' Invalid address version ' ) ;
}
// P2WPKH
return DerivePathType . bip84 ;
}
2022-11-29 19:11:30 +00:00
}
bool longMutex = false ;
@ override
Future < void > recoverFromMnemonic ( {
required String mnemonic ,
2023-02-03 22:34:06 +00:00
String ? mnemonicPassphrase ,
2022-11-29 19:11:30 +00:00
required int maxUnusedAddressGap ,
required int maxNumberOfIndexesToCheck ,
required int height ,
} ) async {
longMutex = true ;
final start = DateTime . now ( ) ;
try {
Logging . instance . log ( " IS_INTEGRATION_TEST: $ integrationTestFlag " ,
level: LogLevel . Info ) ;
if ( ! integrationTestFlag ) {
final features = await electrumXClient . getServerFeatures ( ) ;
Logging . instance . log ( " features: $ features " , level: LogLevel . Info ) ;
switch ( coin ) {
case Coin . particl:
if ( features [ ' genesis_hash ' ] ! = GENESIS_HASH_MAINNET ) {
throw Exception ( " genesis hash does not match main net! " ) ;
}
break ;
default :
throw Exception (
2022-12-05 14:53:21 +00:00
" Attempted to generate a ParticlWallet using a non particl coin type: ${ coin . name } " ) ;
2022-11-29 19:11:30 +00:00
}
2022-12-05 14:53:21 +00:00
// if (_networkType == BasicNetworkType.main) {
// if (features['genesis_hash'] != GENESIS_HASH_MAINNET) {
// throw Exception("genesis hash does not match main net!");
// }
// } else if (_networkType == BasicNetworkType.test) {
// if (features['genesis_hash'] != GENESIS_HASH_TESTNET) {
// throw Exception("genesis hash does not match test net!");
// }
// }
2022-11-29 19:11:30 +00:00
}
// check to make sure we aren't overwriting a mnemonic
// this should never fail
2023-02-03 22:34:06 +00:00
if ( ( await mnemonicString ) ! = null | |
( await this . mnemonicPassphrase ) ! = null ) {
2022-11-29 19:11:30 +00:00
longMutex = false ;
throw Exception ( " Attempted to overwrite mnemonic on restore! " ) ;
}
await _secureStore . write (
key: ' ${ _walletId } _mnemonic ' , value: mnemonic . trim ( ) ) ;
2023-02-03 22:34:06 +00:00
await _secureStore . write (
key: ' ${ _walletId } _mnemonicPassphrase ' ,
value: mnemonicPassphrase ? ? " " ,
) ;
2022-11-29 19:11:30 +00:00
await _recoverWalletFromBIP32SeedPhrase (
mnemonic: mnemonic . trim ( ) ,
2023-02-03 22:34:06 +00:00
mnemonicPassphrase: mnemonicPassphrase ? ? " " ,
2022-11-29 19:11:30 +00:00
maxUnusedAddressGap: maxUnusedAddressGap ,
maxNumberOfIndexesToCheck: maxNumberOfIndexesToCheck ,
) ;
} catch ( e , s ) {
Logging . instance . log (
" Exception rethrown from recoverFromMnemonic(): $ e \n $ s " ,
level: LogLevel . Error ) ;
longMutex = false ;
rethrow ;
}
longMutex = false ;
final end = DateTime . now ( ) ;
Logging . instance . log (
" $ walletName recovery time: ${ end . difference ( start ) . inMilliseconds } millis " ,
level: LogLevel . Info ) ;
}
2022-12-05 14:53:21 +00:00
Future < Map < String , dynamic > > _checkGaps (
int maxNumberOfIndexesToCheck ,
int maxUnusedAddressGap ,
int txCountBatchSize ,
bip32 . BIP32 root ,
DerivePathType type ,
2023-01-12 02:09:08 +00:00
int chain ) async {
List < isar_models . Address > addressArray = [ ] ;
2022-12-05 14:53:21 +00:00
int returningIndex = - 1 ;
Map < String , Map < String , String > > derivations = { } ;
int gapCounter = 0 ;
for ( int index = 0 ;
index < maxNumberOfIndexesToCheck & & gapCounter < maxUnusedAddressGap ;
index + = txCountBatchSize ) {
List < String > iterationsAddressArray = [ ] ;
Logging . instance . log (
2023-01-12 02:09:08 +00:00
" index: $ index , \t GapCounter $ chain ${ type . name } : $ gapCounter " ,
2022-12-05 14:53:21 +00:00
level: LogLevel . Info ) ;
final _id = " k_ $ index " ;
Map < String , String > txCountCallArgs = { } ;
final Map < String , dynamic > receivingNodes = { } ;
for ( int j = 0 ; j < txCountBatchSize ; j + + ) {
2023-02-03 22:34:06 +00:00
final derivePath = constructDerivePath (
derivePathType: type ,
networkWIF: root . network . wif ,
chain: chain ,
index: index + j ,
2022-12-05 14:53:21 +00:00
) ;
2023-02-03 22:34:06 +00:00
final node = await Bip32Utils . getBip32NodeFromRoot ( root , derivePath ) ;
2023-01-12 02:09:08 +00:00
String addressString ;
isar_models . AddressType addrType ;
2022-12-05 14:53:21 +00:00
switch ( type ) {
2022-12-07 12:46:53 +00:00
case DerivePathType . bip44:
2023-01-12 02:09:08 +00:00
addressString = P2PKH (
2022-12-07 12:46:53 +00:00
data: PaymentData ( pubkey: node . publicKey ) ,
network: _network )
. data
. address ! ;
2023-01-12 02:09:08 +00:00
addrType = isar_models . AddressType . p2pkh ;
2022-12-07 12:46:53 +00:00
break ;
2022-12-05 14:53:21 +00:00
case DerivePathType . bip84:
2023-01-12 02:09:08 +00:00
addressString = P2WPKH (
2022-12-05 14:53:21 +00:00
network: _network ,
data: PaymentData ( pubkey: node . publicKey ) )
. data
. address ! ;
2023-01-12 02:09:08 +00:00
addrType = isar_models . AddressType . p2wpkh ;
2022-12-05 14:53:21 +00:00
break ;
default :
2023-02-02 15:24:26 +00:00
throw Exception ( " DerivePathType $ type not supported " ) ;
2022-12-05 14:53:21 +00:00
}
2023-01-12 02:09:08 +00:00
2023-01-16 22:37:00 +00:00
final address = isar_models . Address (
walletId: walletId ,
subType: chain = = 0
2023-01-12 02:09:08 +00:00
? isar_models . AddressSubType . receiving
2023-01-16 22:37:00 +00:00
: isar_models . AddressSubType . change ,
type: addrType ,
publicKey: node . publicKey ,
value: addressString ,
derivationIndex: index + j ,
2023-02-03 23:30:32 +00:00
derivationPath: isar_models . DerivationPath ( ) . . value = derivePath ,
2023-01-16 22:37:00 +00:00
) ;
2023-01-12 02:09:08 +00:00
2022-12-05 14:53:21 +00:00
receivingNodes . addAll ( {
" ${ _id } _ $ j " : {
" node " : node ,
" address " : address ,
}
} ) ;
txCountCallArgs . addAll ( {
2023-01-12 02:09:08 +00:00
" ${ _id } _ $ j " : addressString ,
2022-12-05 14:53:21 +00:00
} ) ;
}
// get address tx counts
final counts = await _getBatchTxCount ( addresses: txCountCallArgs ) ;
// check and add appropriate addresses
for ( int k = 0 ; k < txCountBatchSize ; k + + ) {
int count = counts [ " ${ _id } _ $ k " ] ! ;
if ( count > 0 ) {
final node = receivingNodes [ " ${ _id } _ $ k " ] ;
2023-01-12 02:09:08 +00:00
final address = node [ " address " ] as isar_models . Address ;
2022-12-05 14:53:21 +00:00
// add address to array
2023-01-12 02:09:08 +00:00
addressArray . add ( address ) ;
iterationsAddressArray . add ( address . value ) ;
2022-12-05 14:53:21 +00:00
// set current index
returningIndex = index + k ;
// reset counter
gapCounter = 0 ;
// add info to derivations
2023-01-13 15:26:37 +00:00
derivations [ address . value ] = {
2022-12-05 14:53:21 +00:00
" pubKey " : Format . uint8listToString (
( node [ " node " ] as bip32 . BIP32 ) . publicKey ) ,
" wif " : ( node [ " node " ] as bip32 . BIP32 ) . toWIF ( ) ,
} ;
}
// increase counter when no tx history found
if ( count = = 0 ) {
gapCounter + + ;
}
}
// cache all the transactions while waiting for the current function to finish.
unawaited ( getTransactionCacheEarly ( iterationsAddressArray ) ) ;
}
return {
" addressArray " : addressArray ,
" index " : returningIndex ,
" derivations " : derivations
} ;
}
Future < void > getTransactionCacheEarly ( List < String > allAddresses ) async {
try {
final List < Map < String , dynamic > > allTxHashes =
await _fetchHistory ( allAddresses ) ;
for ( final txHash in allTxHashes ) {
try {
unawaited ( cachedElectrumXClient . getTransaction (
txHash: txHash [ " tx_hash " ] as String ,
verbose: true ,
coin: coin ,
) ) ;
} catch ( e ) {
continue ;
}
}
} catch ( e ) {
//
}
}
2022-11-29 19:11:30 +00:00
Future < void > _recoverWalletFromBIP32SeedPhrase ( {
required String mnemonic ,
2023-02-03 22:34:06 +00:00
required String mnemonicPassphrase ,
2022-11-29 19:11:30 +00:00
int maxUnusedAddressGap = 20 ,
int maxNumberOfIndexesToCheck = 1000 ,
2023-02-02 15:03:57 +00:00
bool isRescan = false ,
2022-11-29 19:11:30 +00:00
} ) async {
longMutex = true ;
2022-12-07 12:46:53 +00:00
Map < String , Map < String , String > > p2pkhReceiveDerivations = { } ;
2022-12-05 14:53:21 +00:00
Map < String , Map < String , String > > p2wpkhReceiveDerivations = { } ;
2022-12-07 12:46:53 +00:00
Map < String , Map < String , String > > p2pkhChangeDerivations = { } ;
2022-12-05 14:53:21 +00:00
Map < String , Map < String , String > > p2wpkhChangeDerivations = { } ;
2022-11-29 19:11:30 +00:00
2023-02-03 22:34:06 +00:00
final root = await Bip32Utils . getBip32Root (
mnemonic ,
mnemonicPassphrase ,
_network ,
) ;
2022-11-29 19:11:30 +00:00
2023-01-12 02:09:08 +00:00
List < isar_models . Address > p2pkhReceiveAddressArray = [ ] ;
List < isar_models . Address > p2wpkhReceiveAddressArray = [ ] ;
2022-12-07 12:46:53 +00:00
int p2pkhReceiveIndex = - 1 ;
2022-12-05 14:53:21 +00:00
int p2wpkhReceiveIndex = - 1 ;
2022-11-29 19:11:30 +00:00
2023-01-12 02:09:08 +00:00
List < isar_models . Address > p2pkhChangeAddressArray = [ ] ;
List < isar_models . Address > p2wpkhChangeAddressArray = [ ] ;
2022-12-07 12:46:53 +00:00
int p2pkhChangeIndex = - 1 ;
2022-12-05 14:53:21 +00:00
int p2wpkhChangeIndex = - 1 ;
2022-11-29 19:11:30 +00:00
2022-12-05 14:53:21 +00:00
// actual size is 24 due to p2pkh, and p2wpkh so 12x2
2022-11-29 19:11:30 +00:00
const txCountBatchSize = 12 ;
try {
// receiving addresses
Logging . instance
. log ( " checking receiving addresses... " , level: LogLevel . Info ) ;
2022-12-07 12:46:53 +00:00
final resultReceive44 = _checkGaps ( maxNumberOfIndexesToCheck ,
maxUnusedAddressGap , txCountBatchSize , root , DerivePathType . bip44 , 0 ) ;
2022-11-29 19:11:30 +00:00
2022-12-05 14:53:21 +00:00
final resultReceive84 = _checkGaps ( maxNumberOfIndexesToCheck ,
maxUnusedAddressGap , txCountBatchSize , root , DerivePathType . bip84 , 0 ) ;
2022-11-29 19:11:30 +00:00
Logging . instance
. log ( " checking change addresses... " , level: LogLevel . Info ) ;
// change addresses
2022-12-07 12:46:53 +00:00
final resultChange44 = _checkGaps ( maxNumberOfIndexesToCheck ,
maxUnusedAddressGap , txCountBatchSize , root , DerivePathType . bip44 , 1 ) ;
2022-11-29 19:11:30 +00:00
2022-12-05 14:53:21 +00:00
final resultChange84 = _checkGaps ( maxNumberOfIndexesToCheck ,
maxUnusedAddressGap , txCountBatchSize , root , DerivePathType . bip84 , 1 ) ;
2022-12-07 12:46:53 +00:00
await Future . wait (
[ resultReceive44 , resultReceive84 , resultChange44 , resultChange84 ] ) ;
2022-11-29 19:11:30 +00:00
2022-12-07 12:46:53 +00:00
p2pkhReceiveAddressArray =
2023-01-12 02:09:08 +00:00
( await resultReceive44 ) [ ' addressArray ' ] as List < isar_models . Address > ;
2022-12-07 12:46:53 +00:00
p2pkhReceiveIndex = ( await resultReceive44 ) [ ' index ' ] as int ;
p2pkhReceiveDerivations = ( await resultReceive44 ) [ ' derivations ' ]
as Map < String , Map < String , String > > ;
2022-11-29 19:11:30 +00:00
2022-12-05 14:53:21 +00:00
p2wpkhReceiveAddressArray =
2023-01-12 02:09:08 +00:00
( await resultReceive84 ) [ ' addressArray ' ] as List < isar_models . Address > ;
2022-12-05 14:53:21 +00:00
p2wpkhReceiveIndex = ( await resultReceive84 ) [ ' index ' ] as int ;
p2wpkhReceiveDerivations = ( await resultReceive84 ) [ ' derivations ' ]
as Map < String , Map < String , String > > ;
2022-12-07 12:46:53 +00:00
p2pkhChangeAddressArray =
2023-01-12 02:09:08 +00:00
( await resultChange44 ) [ ' addressArray ' ] as List < isar_models . Address > ;
2022-12-07 12:46:53 +00:00
p2pkhChangeIndex = ( await resultChange44 ) [ ' index ' ] as int ;
p2pkhChangeDerivations = ( await resultChange44 ) [ ' derivations ' ]
as Map < String , Map < String , String > > ;
2022-11-29 19:11:30 +00:00
2022-12-05 14:53:21 +00:00
p2wpkhChangeAddressArray =
2023-01-12 02:09:08 +00:00
( await resultChange84 ) [ ' addressArray ' ] as List < isar_models . Address > ;
2022-12-05 14:53:21 +00:00
p2wpkhChangeIndex = ( await resultChange84 ) [ ' index ' ] as int ;
p2wpkhChangeDerivations = ( await resultChange84 ) [ ' derivations ' ]
as Map < String , Map < String , String > > ;
2022-11-29 19:11:30 +00:00
// save the derivations (if any)
2022-12-07 12:46:53 +00:00
if ( p2pkhReceiveDerivations . isNotEmpty ) {
await addDerivations (
chain: 0 ,
derivePathType: DerivePathType . bip44 ,
derivationsToAdd: p2pkhReceiveDerivations ) ;
}
2022-12-05 10:52:59 +00:00
2022-12-05 14:53:21 +00:00
if ( p2wpkhReceiveDerivations . isNotEmpty ) {
await addDerivations (
chain: 0 ,
derivePathType: DerivePathType . bip84 ,
derivationsToAdd: p2wpkhReceiveDerivations ) ;
}
2022-12-07 12:46:53 +00:00
if ( p2pkhChangeDerivations . isNotEmpty ) {
await addDerivations (
chain: 1 ,
derivePathType: DerivePathType . bip44 ,
derivationsToAdd: p2pkhChangeDerivations ) ;
}
2022-11-29 19:11:30 +00:00
2022-12-05 14:53:21 +00:00
if ( p2wpkhChangeDerivations . isNotEmpty ) {
await addDerivations (
chain: 1 ,
derivePathType: DerivePathType . bip84 ,
derivationsToAdd: p2wpkhChangeDerivations ) ;
}
2022-11-29 19:11:30 +00:00
// If restoring a wallet that never received any funds, then set receivingArray manually
// If we didn't do this, it'd store an empty array
2022-12-07 12:46:53 +00:00
if ( p2pkhReceiveIndex = = - 1 ) {
final address =
await _generateAddressForChain ( 0 , 0 , DerivePathType . bip44 ) ;
p2pkhReceiveAddressArray . add ( address ) ;
}
2022-11-29 19:11:30 +00:00
2022-12-05 14:53:21 +00:00
if ( p2wpkhReceiveIndex = = - 1 ) {
final address =
await _generateAddressForChain ( 0 , 0 , DerivePathType . bip84 ) ;
p2wpkhReceiveAddressArray . add ( address ) ;
}
2022-11-29 19:11:30 +00:00
// If restoring a wallet that never sent any funds with change, then set changeArray
// manually. If we didn't do this, it'd store an empty array.
2022-12-07 12:46:53 +00:00
if ( p2pkhChangeIndex = = - 1 ) {
final address =
await _generateAddressForChain ( 1 , 0 , DerivePathType . bip44 ) ;
p2pkhChangeAddressArray . add ( address ) ;
}
2022-11-29 19:11:30 +00:00
2022-12-05 14:53:21 +00:00
if ( p2wpkhChangeIndex = = - 1 ) {
final address =
await _generateAddressForChain ( 1 , 0 , DerivePathType . bip84 ) ;
p2wpkhChangeAddressArray . add ( address ) ;
}
2023-02-02 15:03:57 +00:00
if ( isRescan ) {
await db . updateOrPutAddresses ( [
. . . p2wpkhReceiveAddressArray ,
. . . p2wpkhChangeAddressArray ,
. . . p2pkhReceiveAddressArray ,
. . . p2pkhChangeAddressArray ,
] ) ;
} else {
await db . putAddresses ( [
. . . p2wpkhReceiveAddressArray ,
. . . p2wpkhChangeAddressArray ,
. . . p2pkhReceiveAddressArray ,
. . . p2pkhChangeAddressArray ,
] ) ;
}
2023-01-12 02:09:08 +00:00
await _updateUTXOs ( ) ;
2023-01-12 21:32:25 +00:00
await Future . wait ( [
updateCachedId ( walletId ) ,
updateCachedIsFavorite ( false ) ,
] ) ;
2022-11-29 19:11:30 +00:00
longMutex = false ;
} catch ( e , s ) {
Logging . instance . log (
" Exception rethrown from _recoverWalletFromBIP32SeedPhrase(): $ e \n $ s " ,
2022-12-05 14:53:21 +00:00
level: LogLevel . Error ) ;
2022-11-29 19:11:30 +00:00
2022-12-05 14:53:21 +00:00
longMutex = false ;
rethrow ;
2022-11-29 19:11:30 +00:00
}
}
Future < bool > refreshIfThereIsNewData ( ) async {
if ( longMutex ) return false ;
if ( _hasCalledExit ) return false ;
Logging . instance . log ( " refreshIfThereIsNewData " , level: LogLevel . Info ) ;
try {
bool needsRefresh = false ;
Set < String > txnsToCheck = { } ;
for ( final String txid in txTracker . pendings ) {
if ( ! txTracker . wasNotifiedConfirmed ( txid ) ) {
txnsToCheck . add ( txid ) ;
}
}
for ( String txid in txnsToCheck ) {
final txn = await electrumXClient . getTransaction ( txHash: txid ) ;
2022-12-05 14:53:21 +00:00
int confirmations = txn [ " confirmations " ] as int ? ? ? 0 ;
2022-11-29 19:11:30 +00:00
bool isUnconfirmed = confirmations < MINIMUM_CONFIRMATIONS ;
if ( ! isUnconfirmed ) {
// unconfirmedTxs = {};
needsRefresh = true ;
break ;
}
}
if ( ! needsRefresh ) {
var allOwnAddresses = await _fetchAllOwnAddresses ( ) ;
2023-01-12 02:09:08 +00:00
List < Map < String , dynamic > > allTxs = await _fetchHistory (
allOwnAddresses . map ( ( e ) = > e . value ) . toList ( growable: false ) ) ;
2022-11-29 19:11:30 +00:00
for ( Map < String , dynamic > transaction in allTxs ) {
2023-01-12 02:09:08 +00:00
final txid = transaction [ ' tx_hash ' ] as String ;
2023-01-16 21:04:03 +00:00
if ( ( await db
. getTransactions ( walletId )
2023-01-12 02:09:08 +00:00
. filter ( )
. txidMatches ( txid )
. findFirst ( ) ) = =
2022-11-29 19:11:30 +00:00
null ) {
Logging . instance . log (
" txid not found in address history already ${ transaction [ ' tx_hash ' ] } " ,
level: LogLevel . Info ) ;
needsRefresh = true ;
break ;
}
}
}
return needsRefresh ;
} catch ( e , s ) {
Logging . instance . log (
" Exception caught in refreshIfThereIsNewData: $ e \n $ s " ,
2022-12-05 14:53:21 +00:00
level: LogLevel . Error ) ;
2022-11-29 19:11:30 +00:00
rethrow ;
}
}
2023-01-12 02:09:08 +00:00
Future < void > getAllTxsToWatch ( ) async {
2022-11-29 19:11:30 +00:00
if ( _hasCalledExit ) return ;
2023-01-12 02:09:08 +00:00
List < isar_models . Transaction > unconfirmedTxnsToNotifyPending = [ ] ;
List < isar_models . Transaction > unconfirmedTxnsToNotifyConfirmed = [ ] ;
final currentChainHeight = await chainHeight ;
2023-01-16 21:04:03 +00:00
final txCount = await db . getTransactions ( walletId ) . count ( ) ;
2022-11-29 19:11:30 +00:00
2023-01-12 02:09:08 +00:00
const paginateLimit = 50 ;
for ( int i = 0 ; i < txCount ; i + = paginateLimit ) {
2023-01-16 21:04:03 +00:00
final transactions = await db
. getTransactions ( walletId )
2023-01-12 02:09:08 +00:00
. offset ( i )
. limit ( paginateLimit )
. findAll ( ) ;
for ( final tx in transactions ) {
if ( tx . isConfirmed ( currentChainHeight , MINIMUM_CONFIRMATIONS ) ) {
2022-12-05 14:53:21 +00:00
// get all transactions that were notified as pending but not as confirmed
2022-11-29 19:11:30 +00:00
if ( txTracker . wasNotifiedPending ( tx . txid ) & &
! txTracker . wasNotifiedConfirmed ( tx . txid ) ) {
unconfirmedTxnsToNotifyConfirmed . add ( tx ) ;
}
} else {
2022-12-05 14:53:21 +00:00
// get all transactions that were not notified as pending yet
2022-11-29 19:11:30 +00:00
if ( ! txTracker . wasNotifiedPending ( tx . txid ) ) {
unconfirmedTxnsToNotifyPending . add ( tx ) ;
}
}
}
}
2022-12-05 14:53:21 +00:00
// notify on unconfirmed transactions
2022-11-29 19:11:30 +00:00
for ( final tx in unconfirmedTxnsToNotifyPending ) {
2023-01-12 02:09:08 +00:00
final confirmations = tx . getConfirmations ( currentChainHeight ) ;
if ( tx . type = = isar_models . TransactionType . incoming ) {
2022-12-05 14:53:21 +00:00
unawaited ( NotificationApi . showNotification (
title: " Incoming transaction " ,
body: walletName ,
walletId: walletId ,
iconAssetName: Assets . svg . iconFor ( coin: coin ) ,
date: DateTime . fromMillisecondsSinceEpoch ( tx . timestamp * 1000 ) ,
2023-01-12 02:09:08 +00:00
shouldWatchForUpdates: confirmations < MINIMUM_CONFIRMATIONS ,
2022-12-05 14:53:21 +00:00
coinName: coin . name ,
txid: tx . txid ,
2023-01-12 02:09:08 +00:00
confirmations: confirmations ,
2022-12-05 14:53:21 +00:00
requiredConfirmations: MINIMUM_CONFIRMATIONS ,
) ) ;
2022-11-29 19:11:30 +00:00
await txTracker . addNotifiedPending ( tx . txid ) ;
2023-01-12 02:09:08 +00:00
} else if ( tx . type = = isar_models . TransactionType . outgoing ) {
2022-12-05 14:53:21 +00:00
unawaited ( NotificationApi . showNotification (
title: " Sending transaction " ,
body: walletName ,
walletId: walletId ,
iconAssetName: Assets . svg . iconFor ( coin: coin ) ,
date: DateTime . fromMillisecondsSinceEpoch ( tx . timestamp * 1000 ) ,
2023-01-12 02:09:08 +00:00
shouldWatchForUpdates: confirmations < MINIMUM_CONFIRMATIONS ,
2022-12-05 14:53:21 +00:00
coinName: coin . name ,
txid: tx . txid ,
2023-01-12 02:09:08 +00:00
confirmations: confirmations ,
2022-12-05 14:53:21 +00:00
requiredConfirmations: MINIMUM_CONFIRMATIONS ,
) ) ;
2022-11-29 19:11:30 +00:00
await txTracker . addNotifiedPending ( tx . txid ) ;
}
}
// notify on confirmed
for ( final tx in unconfirmedTxnsToNotifyConfirmed ) {
2023-01-12 02:09:08 +00:00
if ( tx . type = = isar_models . TransactionType . incoming ) {
2022-12-05 14:53:21 +00:00
unawaited ( NotificationApi . showNotification (
title: " Incoming transaction confirmed " ,
body: walletName ,
walletId: walletId ,
iconAssetName: Assets . svg . iconFor ( coin: coin ) ,
date: DateTime . fromMillisecondsSinceEpoch ( tx . timestamp * 1000 ) ,
shouldWatchForUpdates: false ,
coinName: coin . name ,
) ) ;
2022-11-29 19:11:30 +00:00
await txTracker . addNotifiedConfirmed ( tx . txid ) ;
2023-01-12 02:09:08 +00:00
} else if ( tx . type = = isar_models . TransactionType . outgoing ) {
2022-12-05 14:53:21 +00:00
unawaited ( NotificationApi . showNotification (
title: " Outgoing transaction confirmed " ,
body: walletName ,
walletId: walletId ,
iconAssetName: Assets . svg . iconFor ( coin: coin ) ,
date: DateTime . fromMillisecondsSinceEpoch ( tx . timestamp * 1000 ) ,
shouldWatchForUpdates: false ,
coinName: coin . name ,
) ) ;
2022-11-29 19:11:30 +00:00
await txTracker . addNotifiedConfirmed ( tx . txid ) ;
}
}
}
bool _shouldAutoSync = false ;
@ override
bool get shouldAutoSync = > _shouldAutoSync ;
@ override
set shouldAutoSync ( bool shouldAutoSync ) {
if ( _shouldAutoSync ! = shouldAutoSync ) {
_shouldAutoSync = shouldAutoSync ;
if ( ! shouldAutoSync ) {
timer ? . cancel ( ) ;
timer = null ;
stopNetworkAlivePinging ( ) ;
} else {
startNetworkAlivePinging ( ) ;
refresh ( ) ;
}
}
}
2022-12-05 14:53:21 +00:00
@ override
bool get isRefreshing = > refreshMutex ;
bool refreshMutex = false ;
2022-11-29 19:11:30 +00:00
//TODO Show percentages properly/more consistently
/// Refreshes display data for the wallet
@ override
Future < void > refresh ( ) async {
if ( refreshMutex ) {
Logging . instance . log ( " $ walletId $ walletName refreshMutex denied " ,
level: LogLevel . Info ) ;
return ;
} else {
refreshMutex = true ;
}
try {
GlobalEventBus . instance . fire (
WalletSyncStatusChangedEvent (
WalletSyncStatus . syncing ,
walletId ,
coin ,
) ,
) ;
GlobalEventBus . instance . fire ( RefreshPercentChangedEvent ( 0.0 , walletId ) ) ;
GlobalEventBus . instance . fire ( RefreshPercentChangedEvent ( 0.1 , walletId ) ) ;
final currentHeight = await chainHeight ;
const storedHeight = 1 ; //await storedChainHeight;
Logging . instance
. log ( " chain height: $ currentHeight " , level: LogLevel . Info ) ;
Logging . instance
. log ( " cached height: $ storedHeight " , level: LogLevel . Info ) ;
if ( currentHeight ! = storedHeight ) {
GlobalEventBus . instance . fire ( RefreshPercentChangedEvent ( 0.2 , walletId ) ) ;
2023-01-12 02:09:08 +00:00
await _checkChangeAddressForTransactions ( ) ;
2022-11-29 19:11:30 +00:00
GlobalEventBus . instance . fire ( RefreshPercentChangedEvent ( 0.3 , walletId ) ) ;
2023-01-12 02:09:08 +00:00
await _checkCurrentReceivingAddressesForTransactions ( ) ;
2022-11-29 19:11:30 +00:00
2023-01-12 02:09:08 +00:00
final fetchFuture = _refreshTransactions ( ) ;
final utxosRefreshFuture = _updateUTXOs ( ) ;
2022-11-29 19:11:30 +00:00
GlobalEventBus . instance
. fire ( RefreshPercentChangedEvent ( 0.50 , walletId ) ) ;
final feeObj = _getFees ( ) ;
GlobalEventBus . instance
. fire ( RefreshPercentChangedEvent ( 0.60 , walletId ) ) ;
GlobalEventBus . instance
. fire ( RefreshPercentChangedEvent ( 0.70 , walletId ) ) ;
_feeObject = Future ( ( ) = > feeObj ) ;
2023-01-12 02:09:08 +00:00
await utxosRefreshFuture ;
2022-11-29 19:11:30 +00:00
GlobalEventBus . instance
. fire ( RefreshPercentChangedEvent ( 0.80 , walletId ) ) ;
2023-01-12 02:09:08 +00:00
await fetchFuture ;
await getAllTxsToWatch ( ) ;
2022-11-29 19:11:30 +00:00
GlobalEventBus . instance
. fire ( RefreshPercentChangedEvent ( 0.90 , walletId ) ) ;
}
2022-12-05 14:53:21 +00:00
refreshMutex = false ;
2022-11-29 19:11:30 +00:00
GlobalEventBus . instance . fire ( RefreshPercentChangedEvent ( 1.0 , walletId ) ) ;
GlobalEventBus . instance . fire (
WalletSyncStatusChangedEvent (
WalletSyncStatus . synced ,
walletId ,
coin ,
) ,
) ;
if ( shouldAutoSync ) {
2022-12-05 14:53:21 +00:00
timer ? ? = Timer . periodic ( const Duration ( seconds: 30 ) , ( timer ) async {
Logging . instance . log (
" Periodic refresh check for $ walletId $ walletName in object instance: $ hashCode " ,
level: LogLevel . Info ) ;
2022-11-29 19:11:30 +00:00
if ( await refreshIfThereIsNewData ( ) ) {
await refresh ( ) ;
GlobalEventBus . instance . fire ( UpdatedInBackgroundEvent (
" New data found in $ walletId $ walletName in background! " ,
walletId ) ) ;
}
} ) ;
}
} catch ( error , strace ) {
refreshMutex = false ;
GlobalEventBus . instance . fire (
NodeConnectionStatusChangedEvent (
NodeConnectionStatus . disconnected ,
walletId ,
coin ,
) ,
) ;
GlobalEventBus . instance . fire (
WalletSyncStatusChangedEvent (
WalletSyncStatus . unableToSync ,
walletId ,
coin ,
) ,
) ;
Logging . instance . log (
" Caught exception in refreshWalletData(): $ error \n $ strace " ,
level: LogLevel . Error ) ;
}
}
@ override
Future < Map < String , dynamic > > prepareSend ( {
required String address ,
required int satoshiAmount ,
Map < String , dynamic > ? args ,
} ) async {
try {
final feeRateType = args ? [ " feeRate " ] ;
final feeRateAmount = args ? [ " feeRateAmount " ] ;
if ( feeRateType is FeeRateType | | feeRateAmount is int ) {
late final int rate ;
if ( feeRateType is FeeRateType ) {
int fee = 0 ;
final feeObject = await fees ;
switch ( feeRateType ) {
case FeeRateType . fast:
fee = feeObject . fast ;
break ;
case FeeRateType . average:
fee = feeObject . medium ;
break ;
case FeeRateType . slow:
fee = feeObject . slow ;
break ;
}
rate = fee ;
} else {
rate = feeRateAmount as int ;
}
2022-12-05 14:53:21 +00:00
2022-11-29 19:11:30 +00:00
// check for send all
bool isSendAll = false ;
2023-01-12 02:09:08 +00:00
if ( satoshiAmount = = balance . spendable ) {
2022-11-29 19:11:30 +00:00
isSendAll = true ;
}
2022-12-05 14:53:21 +00:00
final txData =
2022-11-29 19:11:30 +00:00
await coinSelection ( satoshiAmount , rate , address , isSendAll ) ;
2022-12-05 14:53:21 +00:00
Logging . instance . log ( " prepare send: $ txData " , level: LogLevel . Info ) ;
try {
if ( txData is int ) {
switch ( txData ) {
case 1 :
throw Exception ( " Insufficient balance! " ) ;
case 2 :
throw Exception (
" Insufficient funds to pay for transaction fee! " ) ;
default :
throw Exception ( " Transaction failed with error code $ txData " ) ;
2022-11-29 19:11:30 +00:00
}
} else {
2022-12-05 14:53:21 +00:00
final hex = txData [ " hex " ] ;
if ( hex is String ) {
final fee = txData [ " fee " ] as int ;
final vSize = txData [ " vSize " ] as int ;
Logging . instance
. log ( " prepared txHex: $ hex " , level: LogLevel . Info ) ;
Logging . instance . log ( " prepared fee: $ fee " , level: LogLevel . Info ) ;
Logging . instance
. log ( " prepared vSize: $ vSize " , level: LogLevel . Info ) ;
// fee should never be less than vSize sanity check
if ( fee < vSize ) {
throw Exception (
" Error in fee calculation: Transaction fee cannot be less than vSize " ) ;
}
return txData as Map < String , dynamic > ;
} else {
throw Exception ( " prepared hex is not a String!!! " ) ;
}
2022-11-29 19:11:30 +00:00
}
2022-12-05 14:53:21 +00:00
} catch ( e , s ) {
Logging . instance . log ( " Exception rethrown from prepareSend(): $ e \n $ s " ,
level: LogLevel . Error ) ;
rethrow ;
2022-11-29 19:11:30 +00:00
}
} else {
throw ArgumentError ( " Invalid fee rate argument provided! " ) ;
}
} catch ( e , s ) {
Logging . instance . log ( " Exception rethrown from prepareSend(): $ e \n $ s " ,
level: LogLevel . Error ) ;
rethrow ;
}
}
@ override
2022-12-05 14:53:21 +00:00
Future < String > confirmSend ( { required Map < String , dynamic > txData } ) async {
2022-11-29 19:11:30 +00:00
try {
Logging . instance . log ( " confirmSend txData: $ txData " , level: LogLevel . Info ) ;
2022-12-05 14:53:21 +00:00
final hex = txData [ " hex " ] as String ;
final txHash = await _electrumXClient . broadcastTransaction ( rawTx: hex ) ;
2022-11-29 19:11:30 +00:00
Logging . instance . log ( " Sent txHash: $ txHash " , level: LogLevel . Info ) ;
2022-12-05 14:53:21 +00:00
2022-11-29 19:11:30 +00:00
return txHash ;
} catch ( e , s ) {
Logging . instance . log ( " Exception rethrown from confirmSend(): $ e \n $ s " ,
level: LogLevel . Error ) ;
rethrow ;
}
}
@ override
Future < bool > testNetworkConnection ( ) async {
try {
final result = await _electrumXClient . ping ( ) ;
return result ;
} catch ( _ ) {
return false ;
}
}
Timer ? _networkAliveTimer ;
void startNetworkAlivePinging ( ) {
// call once on start right away
_periodicPingCheck ( ) ;
// then periodically check
_networkAliveTimer = Timer . periodic (
Constants . networkAliveTimerDuration ,
( _ ) async {
_periodicPingCheck ( ) ;
} ,
) ;
}
void _periodicPingCheck ( ) async {
bool hasNetwork = await testNetworkConnection ( ) ;
_isConnected = hasNetwork ;
if ( _isConnected ! = hasNetwork ) {
NodeConnectionStatus status = hasNetwork
? NodeConnectionStatus . connected
: NodeConnectionStatus . disconnected ;
GlobalEventBus . instance
. fire ( NodeConnectionStatusChangedEvent ( status , walletId , coin ) ) ;
}
}
void stopNetworkAlivePinging ( ) {
_networkAliveTimer ? . cancel ( ) ;
_networkAliveTimer = null ;
}
bool _isConnected = false ;
@ override
bool get isConnected = > _isConnected ;
@ override
Future < void > initializeNew ( ) async {
Logging . instance
. log ( " Generating new ${ coin . prettyName } wallet. " , level: LogLevel . Info ) ;
2023-01-12 21:32:25 +00:00
if ( getCachedId ( ) ! = null ) {
2022-11-29 19:11:30 +00:00
throw Exception (
" Attempted to initialize a new wallet using an existing wallet ID! " ) ;
}
2022-12-05 14:53:21 +00:00
2022-11-29 19:11:30 +00:00
await _prefs . init ( ) ;
try {
await _generateNewWallet ( ) ;
} catch ( e , s ) {
Logging . instance . log ( " Exception rethrown from initializeNew(): $ e \n $ s " ,
level: LogLevel . Fatal ) ;
rethrow ;
}
await Future . wait ( [
2023-01-12 21:32:25 +00:00
updateCachedId ( walletId ) ,
updateCachedIsFavorite ( false ) ,
2022-11-29 19:11:30 +00:00
] ) ;
}
@ override
Future < void > initializeExisting ( ) async {
2023-02-13 18:13:30 +00:00
Logging . instance . log ( " initializeExisting() ${ coin . prettyName } wallet. " ,
2022-11-29 19:11:30 +00:00
level: LogLevel . Info ) ;
2023-01-12 21:32:25 +00:00
if ( getCachedId ( ) = = null ) {
2022-11-29 19:11:30 +00:00
throw Exception (
" Attempted to initialize an existing wallet using an unknown wallet ID! " ) ;
}
await _prefs . init ( ) ;
2023-02-13 18:13:30 +00:00
// await _checkCurrentChangeAddressesForTransactions();
// await _checkCurrentReceivingAddressesForTransactions();
2022-11-29 19:11:30 +00:00
}
2022-12-05 14:53:21 +00:00
// TODO make sure this copied implementation from bitcoin_wallet.dart applies for particl just as well--or import it
2022-11-29 19:41:47 +00:00
// hack to add tx to txData before refresh completes
// required based on current app architecture where we don't properly store
// transactions locally in a good way
@ override
Future < void > updateSentCachedTxData ( Map < String , dynamic > txData ) async {
2023-02-02 22:19:14 +00:00
final transaction = isar_models . Transaction (
walletId: walletId ,
txid: txData [ " txid " ] as String ,
timestamp: DateTime . now ( ) . millisecondsSinceEpoch ~ / 1000 ,
type: isar_models . TransactionType . outgoing ,
subType: isar_models . TransactionSubType . none ,
amount: txData [ " recipientAmt " ] as int ,
fee: txData [ " fee " ] as int ,
height: null ,
isCancelled: false ,
isLelantus: false ,
otherData: null ,
slateId: null ,
2023-02-03 19:22:21 +00:00
inputs: [ ] ,
outputs: [ ] ,
2023-02-02 22:19:14 +00:00
) ;
final address = txData [ " address " ] is String
? await db . getAddress ( walletId , txData [ " address " ] as String )
: null ;
await db . addNewTransactionData (
[
2023-02-03 19:22:21 +00:00
Tuple2 ( transaction , address ) ,
2023-02-02 22:19:14 +00:00
] ,
walletId ,
) ;
2022-11-29 19:41:47 +00:00
}
2022-11-29 19:11:30 +00:00
@ override
bool validateAddress ( String address ) {
2022-11-30 09:02:52 +00:00
return Address . validateAddress ( address , _network , particl . bech32 ! ) ;
2022-11-29 19:11:30 +00:00
}
@ override
String get walletId = > _walletId ;
2023-01-12 21:32:25 +00:00
late final String _walletId ;
2022-11-29 19:11:30 +00:00
@ override
String get walletName = > _walletName ;
late String _walletName ;
// setter for updating on rename
@ override
set walletName ( String newName ) = > _walletName = newName ;
late ElectrumX _electrumXClient ;
ElectrumX get electrumXClient = > _electrumXClient ;
late CachedElectrumX _cachedElectrumXClient ;
CachedElectrumX get cachedElectrumXClient = > _cachedElectrumXClient ;
2022-11-29 18:54:54 +00:00
late SecureStorageInterface _secureStore ;
2022-11-29 19:11:30 +00:00
@ override
Future < void > updateNode ( bool shouldRefresh ) async {
2022-11-29 19:41:47 +00:00
final failovers = NodeService ( secureStorageInterface: _secureStore )
2022-11-29 19:11:30 +00:00
. failoverNodesFor ( coin: coin )
. map ( ( e ) = > ElectrumXNode (
address: e . host ,
port: e . port ,
name: e . name ,
id: e . id ,
useSSL: e . useSSL ,
) )
. toList ( ) ;
final newNode = await getCurrentNode ( ) ;
_cachedElectrumXClient = CachedElectrumX . from (
node: newNode ,
prefs: _prefs ,
failovers: failovers ,
) ;
_electrumXClient = ElectrumX . from (
node: newNode ,
prefs: _prefs ,
failovers: failovers ,
) ;
if ( shouldRefresh ) {
unawaited ( refresh ( ) ) ;
}
}
Future < List < String > > _getMnemonicList ( ) async {
2023-02-03 22:34:06 +00:00
final _mnemonicString = await mnemonicString ;
if ( _mnemonicString = = null ) {
2022-11-29 19:11:30 +00:00
return [ ] ;
}
2023-02-03 22:34:06 +00:00
final List < String > data = _mnemonicString . split ( ' ' ) ;
2022-11-29 19:11:30 +00:00
return data ;
}
Future < ElectrumXNode > getCurrentNode ( ) async {
2022-11-30 09:02:52 +00:00
final node = NodeService ( secureStorageInterface: _secureStore )
. getPrimaryNodeFor ( coin: coin ) ? ?
2022-11-29 19:11:30 +00:00
DefaultNodes . getNodeFor ( coin ) ;
return ElectrumXNode (
address: node . host ,
port: node . port ,
name: node . name ,
useSSL: node . useSSL ,
id: node . id ,
) ;
}
2023-01-12 02:09:08 +00:00
Future < List < isar_models . Address > > _fetchAllOwnAddresses ( ) async {
2023-01-16 21:04:03 +00:00
final allAddresses = await db
. getAddresses ( walletId )
2023-01-12 02:09:08 +00:00
. filter ( )
2023-01-18 23:20:23 +00:00
. not ( )
. typeEqualTo ( isar_models . AddressType . nonWallet )
. and ( )
. group ( ( q ) = > q
. subTypeEqualTo ( isar_models . AddressSubType . receiving )
. or ( )
. subTypeEqualTo ( isar_models . AddressSubType . change ) )
2023-01-12 02:09:08 +00:00
. findAll ( ) ;
2023-01-18 23:20:23 +00:00
2023-01-12 02:09:08 +00:00
// final List<String> allAddresses = [];
// final receivingAddresses = DB.instance.get<dynamic>(
// boxName: walletId, key: 'receivingAddressesP2WPKH') as List<dynamic>;
// final changeAddresses = DB.instance.get<dynamic>(
// boxName: walletId, key: 'changeAddressesP2WPKH') as List<dynamic>;
// final receivingAddressesP2PKH = DB.instance.get<dynamic>(
// boxName: walletId, key: 'receivingAddressesP2PKH') as List<dynamic>;
// final changeAddressesP2PKH =
// DB.instance.get<dynamic>(boxName: walletId, key: 'changeAddressesP2PKH')
// as List<dynamic>;
//
// for (var i = 0; i < receivingAddresses.length; i++) {
// if (!allAddresses.contains(receivingAddresses[i])) {
// allAddresses.add(receivingAddresses[i] as String);
// }
// }
// for (var i = 0; i < changeAddresses.length; i++) {
// if (!allAddresses.contains(changeAddresses[i])) {
// allAddresses.add(changeAddresses[i] as String);
// }
// }
// for (var i = 0; i < receivingAddressesP2PKH.length; i++) {
// if (!allAddresses.contains(receivingAddressesP2PKH[i])) {
// allAddresses.add(receivingAddressesP2PKH[i] as String);
// }
// }
// for (var i = 0; i < changeAddressesP2PKH.length; i++) {
// if (!allAddresses.contains(changeAddressesP2PKH[i])) {
// allAddresses.add(changeAddressesP2PKH[i] as String);
// }
// }
2022-12-05 14:53:21 +00:00
2022-11-29 19:11:30 +00:00
return allAddresses ;
}
Future < FeeObject > _getFees ( ) async {
try {
//TODO adjust numbers for different speeds?
const int f = 1 , m = 5 , s = 20 ;
final fast = await electrumXClient . estimateFee ( blocks: f ) ;
final medium = await electrumXClient . estimateFee ( blocks: m ) ;
final slow = await electrumXClient . estimateFee ( blocks: s ) ;
final feeObject = FeeObject (
numberOfBlocksFast: f ,
numberOfBlocksAverage: m ,
numberOfBlocksSlow: s ,
2022-11-29 19:41:47 +00:00
fast: Format . decimalAmountToSatoshis ( fast , coin ) ,
medium: Format . decimalAmountToSatoshis ( medium , coin ) ,
slow: Format . decimalAmountToSatoshis ( slow , coin ) ,
2022-11-29 19:11:30 +00:00
) ;
Logging . instance . log ( " fetched fees: $ feeObject " , level: LogLevel . Info ) ;
return feeObject ;
} catch ( e ) {
Logging . instance
. log ( " Exception rethrown from _getFees(): $ e " , level: LogLevel . Error ) ;
rethrow ;
}
}
Future < void > _generateNewWallet ( ) async {
Logging . instance
. log ( " IS_INTEGRATION_TEST: $ integrationTestFlag " , level: LogLevel . Info ) ;
if ( ! integrationTestFlag ) {
2023-02-02 21:48:06 +00:00
try {
final features = await electrumXClient . getServerFeatures ( ) ;
Logging . instance . log ( " features: $ features " , level: LogLevel . Info ) ;
switch ( coin ) {
case Coin . particl:
if ( features [ ' genesis_hash ' ] ! = GENESIS_HASH_MAINNET ) {
throw Exception ( " genesis hash does not match main net! " ) ;
}
break ;
default :
throw Exception (
" Attempted to generate a ParticlWallet using a non particl coin type: ${ coin . name } " ) ;
}
} catch ( e , s ) {
Logging . instance . log ( " $ e /n $ s " , level: LogLevel . Info ) ;
2022-11-29 19:11:30 +00:00
}
}
// this should never fail
2023-02-03 22:34:06 +00:00
if ( ( await mnemonicString ) ! = null | | ( await mnemonicPassphrase ) ! = null ) {
2022-11-29 19:11:30 +00:00
throw Exception (
" Attempted to overwrite mnemonic on generate new wallet! " ) ;
}
await _secureStore . write (
key: ' ${ _walletId } _mnemonic ' ,
value: bip39 . generateMnemonic ( strength: 256 ) ) ;
2023-02-03 22:34:06 +00:00
await _secureStore . write (
key: ' ${ _walletId } _mnemonicPassphrase ' ,
value: " " ,
) ;
2022-11-29 19:11:30 +00:00
// Generate and add addresses to relevant arrays
2023-01-12 02:09:08 +00:00
final initialAddresses = await Future . wait ( [
2022-12-05 14:53:21 +00:00
// P2WPKH
2023-01-12 02:09:08 +00:00
_generateAddressForChain ( 0 , 0 , DerivePathType . bip84 ) ,
_generateAddressForChain ( 1 , 0 , DerivePathType . bip84 ) ,
2022-11-29 19:11:30 +00:00
2022-12-05 14:53:21 +00:00
// P2PKH
2023-01-12 02:09:08 +00:00
_generateAddressForChain ( 0 , 0 , DerivePathType . bip44 ) ,
_generateAddressForChain ( 1 , 0 , DerivePathType . bip44 ) ,
2022-12-05 14:53:21 +00:00
] ) ;
2022-11-29 19:11:30 +00:00
2023-01-16 21:04:03 +00:00
await db . putAddresses ( initialAddresses ) ;
2023-01-12 02:09:08 +00:00
2022-11-29 19:11:30 +00:00
Logging . instance . log ( " _generateNewWalletFinished " , level: LogLevel . Info ) ;
}
2022-12-05 14:53:21 +00:00
/// Generates a new internal or external chain address for the wallet using a BIP84, BIP44, or BIP49 derivation path.
2022-11-29 19:11:30 +00:00
/// [chain] - Use 0 for receiving (external), 1 for change (internal). Should not be any other value!
/// [index] - This can be any integer >= 0
2023-01-12 02:09:08 +00:00
Future < isar_models . Address > _generateAddressForChain (
2022-11-29 19:11:30 +00:00
int chain ,
int index ,
DerivePathType derivePathType ,
) async {
2023-02-03 22:34:06 +00:00
final _mnemonic = await mnemonicString ;
final _mnemonicPassphrase = await mnemonicPassphrase ;
2023-02-13 22:53:28 +00:00
if ( _mnemonicPassphrase = = null ) {
Logging . instance . log (
" Exception in _generateAddressForChain: mnemonic passphrase null, possible migration issue; if using internal builds, delete wallet and restore from seed, if using a release build, please file bug report " ,
level: LogLevel . Error ) ;
}
2023-02-03 22:34:06 +00:00
final derivePath = constructDerivePath (
derivePathType: derivePathType ,
networkWIF: _network . wif ,
chain: chain ,
index: index ,
) ;
final node = await Bip32Utils . getBip32Node (
_mnemonic ! ,
_mnemonicPassphrase ! ,
_network ,
derivePath ,
2022-11-29 19:11:30 +00:00
) ;
2023-02-03 22:34:06 +00:00
2022-11-29 19:11:30 +00:00
final data = PaymentData ( pubkey: node . publicKey ) ;
2022-12-07 12:46:53 +00:00
String address ;
2023-01-12 02:09:08 +00:00
isar_models . AddressType addrType ;
2022-12-07 12:46:53 +00:00
switch ( derivePathType ) {
case DerivePathType . bip44:
address = P2PKH ( data: data , network: _network ) . data . address ! ;
2023-01-12 02:09:08 +00:00
addrType = isar_models . AddressType . p2pkh ;
2022-12-07 12:46:53 +00:00
break ;
case DerivePathType . bip84:
address = P2WPKH ( network: _network , data: data ) . data . address ! ;
2023-01-12 02:09:08 +00:00
addrType = isar_models . AddressType . p2wpkh ;
2022-12-07 12:46:53 +00:00
break ;
2023-01-25 18:08:48 +00:00
default :
2023-02-02 15:24:26 +00:00
throw Exception ( " DerivePathType $ derivePathType not supported " ) ;
2022-12-07 12:46:53 +00:00
}
2022-11-29 19:11:30 +00:00
// add generated address & info to derivations
await addDerivation (
chain: chain ,
address: address ,
pubKey: Format . uint8listToString ( node . publicKey ) ,
wif: node . toWIF ( ) ,
derivePathType: derivePathType ,
) ;
2023-01-16 22:37:00 +00:00
return isar_models . Address (
walletId: walletId ,
derivationIndex: index ,
2023-02-03 23:30:32 +00:00
derivationPath: isar_models . DerivationPath ( ) . . value = derivePath ,
2023-01-16 22:37:00 +00:00
value: address ,
publicKey: node . publicKey ,
type: addrType ,
subType: chain = = 0
2023-01-12 02:09:08 +00:00
? isar_models . AddressSubType . receiving
2023-01-16 22:37:00 +00:00
: isar_models . AddressSubType . change ,
) ;
2022-11-29 19:11:30 +00:00
}
/// Returns the latest receiving/change (external/internal) address for the wallet depending on [chain]
/// and
/// [chain] - Use 0 for receiving (external), 1 for change (internal). Should not be any other value!
Future < String > _getCurrentAddressForChain (
2023-01-12 02:09:08 +00:00
int chain ,
DerivePathType derivePathType ,
) async {
final subType = chain = = 0 // Here, we assume that chain == 1 if it isn't 0
? isar_models . AddressSubType . receiving
: isar_models . AddressSubType . change ;
isar_models . AddressType type ;
isar_models . Address ? address ;
2022-11-29 19:11:30 +00:00
switch ( derivePathType ) {
2022-12-07 12:46:53 +00:00
case DerivePathType . bip44:
2023-01-12 02:09:08 +00:00
type = isar_models . AddressType . p2pkh ;
2022-12-07 12:46:53 +00:00
break ;
2022-12-05 14:53:21 +00:00
case DerivePathType . bip84:
2023-01-12 02:09:08 +00:00
type = isar_models . AddressType . p2wpkh ;
2022-12-05 14:53:21 +00:00
break ;
2023-01-25 18:08:48 +00:00
default :
2023-02-02 15:24:26 +00:00
throw Exception ( " DerivePathType $ derivePathType not supported " ) ;
2022-11-29 19:11:30 +00:00
}
2023-01-16 21:04:03 +00:00
address = await db
. getAddresses ( walletId )
2023-01-12 02:09:08 +00:00
. filter ( )
. typeEqualTo ( type )
. subTypeEqualTo ( subType )
. sortByDerivationIndexDesc ( )
. findFirst ( ) ;
return address ! . value ;
2022-11-29 19:11:30 +00:00
}
2022-12-05 14:53:21 +00:00
String _buildDerivationStorageKey ( {
required int chain ,
required DerivePathType derivePathType ,
} ) {
2022-11-29 19:11:30 +00:00
String key ;
String chainId = chain = = 0 ? " receive " : " change " ;
2022-12-07 12:46:53 +00:00
switch ( derivePathType ) {
case DerivePathType . bip44:
key = " ${ walletId } _ ${ chainId } DerivationsP2PKH " ;
break ;
case DerivePathType . bip84:
key = " ${ walletId } _ ${ chainId } DerivationsP2WPKH " ;
break ;
2023-01-25 18:08:48 +00:00
default :
2023-02-02 15:24:26 +00:00
throw Exception ( " DerivePathType $ derivePathType not supported " ) ;
2022-12-07 12:46:53 +00:00
}
2022-11-29 19:11:30 +00:00
return key ;
}
2022-12-05 14:53:21 +00:00
Future < Map < String , dynamic > > _fetchDerivations ( {
required int chain ,
required DerivePathType derivePathType ,
} ) async {
2022-11-29 19:11:30 +00:00
// build lookup key
final key = _buildDerivationStorageKey (
chain: chain , derivePathType: derivePathType ) ;
// fetch current derivations
final derivationsString = await _secureStore . read ( key: key ) ;
return Map < String , dynamic > . from (
jsonDecode ( derivationsString ? ? " {} " ) as Map ) ;
}
/// Add a single derivation to the local secure storage for [chain] and
/// [derivePathType] where [chain] must either be 1 for change or 0 for receive.
/// This will overwrite a previous entry where the address of the new derivation
/// matches a derivation currently stored.
Future < void > addDerivation ( {
required int chain ,
required String address ,
required String pubKey ,
required String wif ,
required DerivePathType derivePathType ,
} ) async {
// build lookup key
final key = _buildDerivationStorageKey (
chain: chain , derivePathType: derivePathType ) ;
// fetch current derivations
final derivationsString = await _secureStore . read ( key: key ) ;
final derivations =
Map < String , dynamic > . from ( jsonDecode ( derivationsString ? ? " {} " ) as Map ) ;
// add derivation
derivations [ address ] = {
" pubKey " : pubKey ,
" wif " : wif ,
} ;
// save derivations
final newReceiveDerivationsString = jsonEncode ( derivations ) ;
await _secureStore . write ( key: key , value: newReceiveDerivationsString ) ;
}
/// Add multiple derivations to the local secure storage for [chain] and
/// [derivePathType] where [chain] must either be 1 for change or 0 for receive.
/// This will overwrite any previous entries where the address of the new derivation
/// matches a derivation currently stored.
/// The [derivationsToAdd] must be in the format of:
/// {
/// addressA : {
/// "pubKey": <the pubKey string>,
/// "wif": <the wif string>,
/// },
/// addressB : {
/// "pubKey": <the pubKey string>,
/// "wif": <the wif string>,
/// },
/// }
Future < void > addDerivations ( {
required int chain ,
required DerivePathType derivePathType ,
required Map < String , dynamic > derivationsToAdd ,
} ) async {
// build lookup key
final key = _buildDerivationStorageKey (
chain: chain , derivePathType: derivePathType ) ;
// fetch current derivations
final derivationsString = await _secureStore . read ( key: key ) ;
final derivations =
Map < String , dynamic > . from ( jsonDecode ( derivationsString ? ? " {} " ) as Map ) ;
// add derivation
derivations . addAll ( derivationsToAdd ) ;
// save derivations
final newReceiveDerivationsString = jsonEncode ( derivations ) ;
await _secureStore . write ( key: key , value: newReceiveDerivationsString ) ;
}
2023-01-12 02:09:08 +00:00
Future < void > _updateUTXOs ( ) async {
final allAddresses = await _fetchAllOwnAddresses ( ) ;
2022-11-29 19:11:30 +00:00
try {
final fetchedUtxoList = < List < Map < String , dynamic > > > [ ] ;
final Map < int , Map < String , List < dynamic > > > batches = { } ;
2022-12-05 14:53:21 +00:00
const batchSizeMax = 100 ;
2022-11-29 19:11:30 +00:00
int batchNumber = 0 ;
for ( int i = 0 ; i < allAddresses . length ; i + + ) {
if ( batches [ batchNumber ] = = null ) {
batches [ batchNumber ] = { } ;
}
2023-01-12 02:09:08 +00:00
final scripthash =
_convertToScriptHash ( allAddresses [ i ] . value , _network ) ;
2022-11-29 19:11:30 +00:00
batches [ batchNumber ] ! . addAll ( {
scripthash: [ scripthash ]
} ) ;
if ( i % batchSizeMax = = batchSizeMax - 1 ) {
batchNumber + + ;
}
}
for ( int i = 0 ; i < batches . length ; i + + ) {
final response =
await _electrumXClient . getBatchUTXOs ( args: batches [ i ] ! ) ;
for ( final entry in response . entries ) {
if ( entry . value . isNotEmpty ) {
fetchedUtxoList . add ( entry . value ) ;
}
}
}
2023-01-12 02:09:08 +00:00
final currentChainHeight = await chainHeight ;
final List < isar_models . UTXO > outputArray = [ ] ;
int satoshiBalanceTotal = 0 ;
2022-11-29 19:11:30 +00:00
int satoshiBalancePending = 0 ;
2023-01-12 02:09:08 +00:00
int satoshiBalanceSpendable = 0 ;
int satoshiBalanceBlocked = 0 ;
2022-11-29 19:11:30 +00:00
for ( int i = 0 ; i < fetchedUtxoList . length ; i + + ) {
for ( int j = 0 ; j < fetchedUtxoList [ i ] . length ; j + + ) {
final txn = await cachedElectrumXClient . getTransaction (
txHash: fetchedUtxoList [ i ] [ j ] [ " tx_hash " ] as String ,
verbose: true ,
coin: coin ,
) ;
2023-01-12 02:09:08 +00:00
// todo check here if we should mark as blocked
2023-01-16 22:37:00 +00:00
final utxo = isar_models . UTXO (
walletId: walletId ,
txid: txn [ " txid " ] as String ,
vout: fetchedUtxoList [ i ] [ j ] [ " tx_pos " ] as int ,
value: fetchedUtxoList [ i ] [ j ] [ " value " ] as int ,
name: " " ,
isBlocked: false ,
blockedReason: null ,
isCoinbase: txn [ " is_coinbase " ] as bool ? ? ? false ,
blockHash: txn [ " blockhash " ] as String ? ,
blockHeight: fetchedUtxoList [ i ] [ j ] [ " height " ] as int ? ,
blockTime: txn [ " blocktime " ] as int ? ,
) ;
2023-01-12 02:09:08 +00:00
satoshiBalanceTotal + = utxo . value ;
if ( utxo . isBlocked ) {
satoshiBalanceBlocked + = utxo . value ;
} else {
if ( utxo . isConfirmed ( currentChainHeight , MINIMUM_CONFIRMATIONS ) ) {
satoshiBalanceSpendable + = utxo . value ;
} else {
satoshiBalancePending + = utxo . value ;
}
2022-11-29 19:11:30 +00:00
}
outputArray . add ( utxo ) ;
}
}
2023-01-12 02:09:08 +00:00
Logging . instance
. log ( ' Outputs fetched: $ outputArray ' , level: LogLevel . Info ) ;
2022-11-29 19:11:30 +00:00
2023-01-16 21:04:03 +00:00
// TODO move this out of here and into IDB
await db . isar . writeTxn ( ( ) async {
2023-02-01 01:26:23 +00:00
await db . isar . utxos . where ( ) . walletIdEqualTo ( walletId ) . deleteAll ( ) ;
2023-01-16 21:04:03 +00:00
await db . isar . utxos . putAll ( outputArray ) ;
2023-01-12 02:09:08 +00:00
} ) ;
2022-11-29 19:11:30 +00:00
2023-01-12 02:09:08 +00:00
// finally update balance
_balance = Balance (
coin: coin ,
total: satoshiBalanceTotal ,
spendable: satoshiBalanceSpendable ,
blockedTotal: satoshiBalanceBlocked ,
pendingSpendable: satoshiBalancePending ,
) ;
2023-01-12 21:32:25 +00:00
await updateCachedBalance ( _balance ! ) ;
2022-11-29 19:11:30 +00:00
} catch ( e , s ) {
Logging . instance
. log ( " Output fetch unsuccessful: $ e \n $ s " , level: LogLevel . Error ) ;
}
}
2023-01-12 02:09:08 +00:00
@ override
2023-01-12 21:32:25 +00:00
Balance get balance = > _balance ? ? = getCachedBalance ( ) ;
2023-01-12 02:09:08 +00:00
Balance ? _balance ;
// /// Takes in a list of UtxoObjects and adds a name (dependent on object index within list)
// /// and checks for the txid associated with the utxo being blocked and marks it accordingly.
// /// Now also checks for output labeling.
// Future<void> _sortOutputs(List<UtxoObject> utxos) async {
// final blockedHashArray =
// DB.instance.get<dynamic>(boxName: walletId, key: 'blocked_tx_hashes')
// as List<dynamic>?;
// final List<String> lst = [];
// if (blockedHashArray != null) {
// for (var hash in blockedHashArray) {
// lst.add(hash as String);
// }
// }
// final labels =
// DB.instance.get<dynamic>(boxName: walletId, key: 'labels') as Map? ??
// {};
//
// outputsList = [];
//
// for (var i = 0; i < utxos.length; i++) {
// if (labels[utxos[i].txid] != null) {
// utxos[i].txName = labels[utxos[i].txid] as String? ?? "";
// } else {
// utxos[i].txName = 'Output #$i';
// }
//
// if (utxos[i].status.confirmed == false) {
// outputsList.add(utxos[i]);
// } else {
// if (lst.contains(utxos[i].txid)) {
// utxos[i].blocked = true;
// outputsList.add(utxos[i]);
// } else if (!lst.contains(utxos[i].txid)) {
// outputsList.add(utxos[i]);
// }
// }
// }
// }
2022-11-29 19:11:30 +00:00
Future < int > getTxCount ( { required String address } ) async {
String ? scripthash ;
try {
scripthash = _convertToScriptHash ( address , _network ) ;
final transactions =
await electrumXClient . getHistory ( scripthash: scripthash ) ;
return transactions . length ;
} catch ( e ) {
Logging . instance . log (
" Exception rethrown in _getTxCount(address: $ address , scripthash: $ scripthash ): $ e " ,
level: LogLevel . Error ) ;
rethrow ;
}
}
Future < Map < String , int > > _getBatchTxCount ( {
required Map < String , String > addresses ,
} ) async {
try {
final Map < String , List < dynamic > > args = { } ;
for ( final entry in addresses . entries ) {
args [ entry . key ] = [ _convertToScriptHash ( entry . value , _network ) ] ;
}
2022-12-04 14:17:41 +00:00
final response = await electrumXClient . getBatchHistory ( args: args ) ;
2022-12-05 14:53:21 +00:00
2022-11-29 19:11:30 +00:00
final Map < String , int > result = { } ;
for ( final entry in response . entries ) {
result [ entry . key ] = entry . value . length ;
}
return result ;
} catch ( e , s ) {
Logging . instance . log (
" Exception rethrown in _getBatchTxCount(address: $ addresses : $ e \n $ s " ,
level: LogLevel . Error ) ;
rethrow ;
}
}
2023-01-12 02:09:08 +00:00
Future < void > _checkReceivingAddressForTransactions ( ) async {
2022-11-29 19:11:30 +00:00
try {
2023-01-12 02:09:08 +00:00
final currentReceiving = await _currentReceivingAddress ;
final int txCount = await getTxCount ( address: currentReceiving . value ) ;
2022-11-29 19:11:30 +00:00
Logging . instance . log (
2023-01-12 02:09:08 +00:00
' Number of txs for current receiving address $ currentReceiving : $ txCount ' ,
2022-11-29 19:11:30 +00:00
level: LogLevel . Info ) ;
2023-01-23 16:32:53 +00:00
if ( txCount > = 1 | | currentReceiving . derivationIndex < 0 ) {
2022-11-29 19:11:30 +00:00
// First increment the receiving index
2023-01-12 02:09:08 +00:00
final newReceivingIndex = currentReceiving . derivationIndex + 1 ;
2022-11-29 19:11:30 +00:00
// Use new index to derive a new receiving address
final newReceivingAddress = await _generateAddressForChain (
2023-01-25 19:49:14 +00:00
0 , newReceivingIndex , DerivePathTypeExt . primaryFor ( coin ) ) ;
2022-11-29 19:11:30 +00:00
2023-01-18 22:55:59 +00:00
final existing = await db
. getAddresses ( walletId )
. filter ( )
. valueEqualTo ( newReceivingAddress . value )
. findFirst ( ) ;
if ( existing = = null ) {
// Add that new change address
await db . putAddress ( newReceivingAddress ) ;
} else {
// we need to update the address
await db . updateAddress ( existing , newReceivingAddress ) ;
}
2023-01-23 16:32:53 +00:00
// keep checking until address with no tx history is set as current
await _checkReceivingAddressForTransactions ( ) ;
2022-11-29 19:11:30 +00:00
}
} catch ( e , s ) {
Logging . instance . log (
2023-01-25 19:49:14 +00:00
" Exception rethrown from _checkReceivingAddressForTransactions( ${ DerivePathTypeExt . primaryFor ( coin ) } ): $ e \n $ s " ,
2022-11-29 19:11:30 +00:00
level: LogLevel . Error ) ;
rethrow ;
}
}
2023-01-12 02:09:08 +00:00
Future < void > _checkChangeAddressForTransactions ( ) async {
2022-11-29 19:11:30 +00:00
try {
2023-01-12 02:09:08 +00:00
final currentChange = await _currentChangeAddress ;
final int txCount = await getTxCount ( address: currentChange . value ) ;
2022-11-29 19:11:30 +00:00
Logging . instance . log (
2023-01-12 02:09:08 +00:00
' Number of txs for current change address $ currentChange : $ txCount ' ,
2022-11-29 19:11:30 +00:00
level: LogLevel . Info ) ;
2023-01-23 16:32:53 +00:00
if ( txCount > = 1 | | currentChange . derivationIndex < 0 ) {
2022-11-29 19:11:30 +00:00
// First increment the change index
2023-01-12 02:09:08 +00:00
final newChangeIndex = currentChange . derivationIndex + 1 ;
2022-11-29 19:11:30 +00:00
// Use new index to derive a new change address
2023-01-12 02:09:08 +00:00
final newChangeAddress = await _generateAddressForChain (
2023-01-25 19:49:14 +00:00
1 , newChangeIndex , DerivePathTypeExt . primaryFor ( coin ) ) ;
2022-11-29 19:11:30 +00:00
2023-01-18 22:55:59 +00:00
final existing = await db
. getAddresses ( walletId )
. filter ( )
. valueEqualTo ( newChangeAddress . value )
. findFirst ( ) ;
if ( existing = = null ) {
// Add that new change address
await db . putAddress ( newChangeAddress ) ;
} else {
// we need to update the address
await db . updateAddress ( existing , newChangeAddress ) ;
}
2023-01-23 16:32:53 +00:00
// keep checking until address with no tx history is set as current
await _checkChangeAddressForTransactions ( ) ;
2022-11-29 19:11:30 +00:00
}
2022-12-05 14:53:21 +00:00
} on SocketException catch ( se , s ) {
Logging . instance . log (
2023-01-25 19:49:14 +00:00
" SocketException caught in _checkReceivingAddressForTransactions( ${ DerivePathTypeExt . primaryFor ( coin ) } ): $ se \n $ s " ,
2022-12-05 14:53:21 +00:00
level: LogLevel . Error ) ;
return ;
2022-11-29 19:11:30 +00:00
} catch ( e , s ) {
Logging . instance . log (
2023-01-25 19:49:14 +00:00
" Exception rethrown from _checkReceivingAddressForTransactions( ${ DerivePathTypeExt . primaryFor ( coin ) } ): $ e \n $ s " ,
2022-11-29 19:11:30 +00:00
level: LogLevel . Error ) ;
rethrow ;
}
}
Future < void > _checkCurrentReceivingAddressesForTransactions ( ) async {
try {
2023-01-12 02:09:08 +00:00
// for (final type in DerivePathType.values) {
await _checkReceivingAddressForTransactions ( ) ;
// }
2022-11-29 19:11:30 +00:00
} catch ( e , s ) {
Logging . instance . log (
" Exception rethrown from _checkCurrentReceivingAddressesForTransactions(): $ e \n $ s " ,
2022-12-05 14:53:21 +00:00
level: LogLevel . Error ) ;
2022-11-29 19:11:30 +00:00
rethrow ;
}
}
/// public wrapper because dart can't test private...
Future < void > checkCurrentReceivingAddressesForTransactions ( ) async {
if ( Platform . environment [ " FLUTTER_TEST " ] = = " true " ) {
try {
return _checkCurrentReceivingAddressesForTransactions ( ) ;
} catch ( _ ) {
rethrow ;
}
}
}
Future < void > _checkCurrentChangeAddressesForTransactions ( ) async {
try {
2023-01-12 02:09:08 +00:00
// for (final type in DerivePathType.values) {
await _checkChangeAddressForTransactions ( ) ;
// }
2022-11-29 19:11:30 +00:00
} catch ( e , s ) {
Logging . instance . log (
" Exception rethrown from _checkCurrentChangeAddressesForTransactions(): $ e \n $ s " ,
level: LogLevel . Error ) ;
rethrow ;
}
}
/// public wrapper because dart can't test private...
Future < void > checkCurrentChangeAddressesForTransactions ( ) async {
if ( Platform . environment [ " FLUTTER_TEST " ] = = " true " ) {
try {
return _checkCurrentChangeAddressesForTransactions ( ) ;
} catch ( _ ) {
rethrow ;
}
}
}
/// attempts to convert a string to a valid scripthash
///
2022-12-05 14:53:21 +00:00
/// Returns the scripthash or throws an exception on invalid particl address
2022-11-29 19:11:30 +00:00
String _convertToScriptHash ( String particlAddress , NetworkType network ) {
try {
2022-11-30 09:02:52 +00:00
final output = Address . addressToOutputScript (
particlAddress , network , particl . bech32 ! ) ;
2022-11-29 19:11:30 +00:00
final hash = sha256 . convert ( output . toList ( growable: false ) ) . toString ( ) ;
final chars = hash . split ( " " ) ;
final reversedPairs = < String > [ ] ;
var i = chars . length - 1 ;
while ( i > 0 ) {
reversedPairs . add ( chars [ i - 1 ] ) ;
reversedPairs . add ( chars [ i ] ) ;
i - = 2 ;
}
return reversedPairs . join ( " " ) ;
} catch ( e ) {
rethrow ;
}
}
Future < List < Map < String , dynamic > > > _fetchHistory (
List < String > allAddresses ) async {
try {
List < Map < String , dynamic > > allTxHashes = [ ] ;
final Map < int , Map < String , List < dynamic > > > batches = { } ;
final Map < String , String > requestIdToAddressMap = { } ;
2022-12-05 14:53:21 +00:00
const batchSizeMax = 100 ;
2022-11-29 19:11:30 +00:00
int batchNumber = 0 ;
for ( int i = 0 ; i < allAddresses . length ; i + + ) {
if ( batches [ batchNumber ] = = null ) {
batches [ batchNumber ] = { } ;
}
final scripthash = _convertToScriptHash ( allAddresses [ i ] , _network ) ;
final id = Logger . isTestEnv ? " $ i " : const Uuid ( ) . v1 ( ) ;
requestIdToAddressMap [ id ] = allAddresses [ i ] ;
batches [ batchNumber ] ! . addAll ( {
id: [ scripthash ]
} ) ;
if ( i % batchSizeMax = = batchSizeMax - 1 ) {
batchNumber + + ;
}
}
for ( int i = 0 ; i < batches . length ; i + + ) {
final response =
await _electrumXClient . getBatchHistory ( args: batches [ i ] ! ) ;
for ( final entry in response . entries ) {
for ( int j = 0 ; j < entry . value . length ; j + + ) {
entry . value [ j ] [ " address " ] = requestIdToAddressMap [ entry . key ] ;
if ( ! allTxHashes . contains ( entry . value [ j ] ) ) {
allTxHashes . add ( entry . value [ j ] ) ;
}
}
}
}
return allTxHashes ;
} catch ( e , s ) {
Logging . instance . log ( " _fetchHistory: $ e \n $ s " , level: LogLevel . Error ) ;
rethrow ;
}
}
bool _duplicateTxCheck (
List < Map < String , dynamic > > allTransactions , String txid ) {
for ( int i = 0 ; i < allTransactions . length ; i + + ) {
if ( allTransactions [ i ] [ " txid " ] = = txid ) {
return true ;
}
}
return false ;
}
Future < List < Map < String , dynamic > > > fastFetch ( List < String > allTxHashes ) async {
List < Map < String , dynamic > > allTransactions = [ ] ;
const futureLimit = 30 ;
List < Future < Map < String , dynamic > > > transactionFutures = [ ] ;
int currentFutureCount = 0 ;
for ( final txHash in allTxHashes ) {
Future < Map < String , dynamic > > transactionFuture =
cachedElectrumXClient . getTransaction (
txHash: txHash ,
verbose: true ,
coin: coin ,
) ;
transactionFutures . add ( transactionFuture ) ;
currentFutureCount + + ;
if ( currentFutureCount > futureLimit ) {
currentFutureCount = 0 ;
await Future . wait ( transactionFutures ) ;
for ( final fTx in transactionFutures ) {
final tx = await fTx ;
allTransactions . add ( tx ) ;
}
}
}
if ( currentFutureCount ! = 0 ) {
currentFutureCount = 0 ;
await Future . wait ( transactionFutures ) ;
for ( final fTx in transactionFutures ) {
final tx = await fTx ;
allTransactions . add ( tx ) ;
}
}
return allTransactions ;
}
2023-01-12 02:09:08 +00:00
Future < void > _refreshTransactions ( ) async {
final allAddresses = await _fetchAllOwnAddresses ( ) ;
2022-11-29 19:11:30 +00:00
2023-01-12 02:09:08 +00:00
List < String > changeAddresses = allAddresses
. where ( ( e ) = > e . subType = = isar_models . AddressSubType . change )
. map ( ( e ) = > e . value )
. toList ( ) ;
2022-11-29 19:11:30 +00:00
2023-01-12 02:09:08 +00:00
final List < Map < String , dynamic > > allTxHashes = await _fetchHistory (
allAddresses . map ( ( e ) = > e . value ) . toList ( growable: false ) ) ;
2022-11-29 19:11:30 +00:00
Set < String > hashes = { } ;
for ( var element in allTxHashes ) {
hashes . add ( element [ ' tx_hash ' ] as String ) ;
}
await fastFetch ( hashes . toList ( ) ) ;
List < Map < String , dynamic > > allTransactions = [ ] ;
2023-01-13 21:36:19 +00:00
final currentHeight = await chainHeight ;
2022-11-29 19:11:30 +00:00
for ( final txHash in allTxHashes ) {
2023-01-16 21:04:03 +00:00
final storedTx = await db
. getTransactions ( walletId )
. filter ( )
2023-01-13 21:36:19 +00:00
. txidEqualTo ( txHash [ " tx_hash " ] as String )
. findFirst ( ) ;
if ( storedTx = = null | |
! storedTx . isConfirmed ( currentHeight , MINIMUM_CONFIRMATIONS ) ) {
final tx = await cachedElectrumXClient . getTransaction (
txHash: txHash [ " tx_hash " ] as String ,
verbose: true ,
coin: coin ,
) ;
2022-11-29 19:11:30 +00:00
2023-01-13 21:36:19 +00:00
if ( ! _duplicateTxCheck ( allTransactions , tx [ " txid " ] as String ) ) {
2023-01-17 22:11:04 +00:00
tx [ " address " ] = ( await db
2023-01-16 21:04:03 +00:00
. getAddresses ( walletId )
2023-01-13 21:36:19 +00:00
. filter ( )
. valueEqualTo ( txHash [ " address " ] as String )
2023-01-17 22:11:04 +00:00
. findFirst ( ) ) ! ;
2023-01-13 21:36:19 +00:00
tx [ " height " ] = txHash [ " height " ] ;
allTransactions . add ( tx ) ;
}
2022-11-29 19:11:30 +00:00
}
}
2022-12-23 02:05:01 +00:00
Logging . instance . log ( " addAddresses: $ allAddresses " ,
level: LogLevel . Info , printFullLength: true ) ;
Logging . instance . log ( " allTxHashes: $ allTxHashes " ,
level: LogLevel . Info , printFullLength: true ) ;
2022-11-29 19:11:30 +00:00
Logging . instance . log ( " allTransactions length: ${ allTransactions . length } " ,
level: LogLevel . Info ) ;
2023-01-12 02:09:08 +00:00
// final List<Map<String, dynamic>> midSortedArray = [];
2022-11-29 19:11:30 +00:00
Set < String > vHashes = { } ;
for ( final txObject in allTransactions ) {
for ( int i = 0 ; i < ( txObject [ " vin " ] as List ) . length ; i + + ) {
final input = txObject [ " vin " ] ! [ i ] as Map ;
final prevTxid = input [ " txid " ] as String ;
vHashes . add ( prevTxid ) ;
}
}
await fastFetch ( vHashes . toList ( ) ) ;
2023-02-03 19:22:21 +00:00
final List < Tuple2 < isar_models . Transaction , isar_models . Address ? > > txns = [ ] ;
2023-01-12 02:09:08 +00:00
2022-11-29 19:11:30 +00:00
for ( final txObject in allTransactions ) {
List < String > sendersArray = [ ] ;
List < String > recipientsArray = [ ] ;
// Usually only has value when txType = 'Send'
int inputAmtSentFromWallet = 0 ;
// Usually has value regardless of txType due to change addresses
int outputAmtAddressedToWallet = 0 ;
int fee = 0 ;
Map < String , dynamic > midSortedTx = { } ;
for ( int i = 0 ; i < ( txObject [ " vin " ] as List ) . length ; i + + ) {
final input = txObject [ " vin " ] ! [ i ] as Map ;
final prevTxid = input [ " txid " ] as String ;
final prevOut = input [ " vout " ] as int ;
final tx = await _cachedElectrumXClient . getTransaction (
txHash: prevTxid ,
coin: coin ,
) ;
for ( final out in tx [ " vout " ] as List ) {
if ( prevOut = = out [ " n " ] ) {
2023-01-17 22:11:04 +00:00
final address = out [ " scriptPubKey " ] ? [ " address " ] as String ? ? ?
out [ " scriptPubKey " ] ? [ " addresses " ] ? [ 0 ] as String ? ;
2022-11-29 19:11:30 +00:00
if ( address ! = null ) {
sendersArray . add ( address ) ;
}
}
}
}
Logging . instance . log ( " sendersArray: $ sendersArray " , level: LogLevel . Info ) ;
for ( final output in txObject [ " vout " ] as List ) {
2022-12-08 19:51:49 +00:00
// Particl has different tx types that need to be detected and handled here
if ( output . containsKey ( ' scriptPubKey ' ) as bool ) {
// Logging.instance.log("output is transparent", level: LogLevel.Info);
2023-01-17 22:11:04 +00:00
final address = output [ " scriptPubKey " ] ? [ " address " ] as String ? ? ?
output [ " scriptPubKey " ] ? [ " addresses " ] ? [ 0 ] as String ? ;
2022-12-08 19:51:49 +00:00
if ( address ! = null ) {
recipientsArray . add ( address ) ;
}
} else if ( output . containsKey ( ' ct_fee ' ) as bool ) {
// or type: data
Logging . instance . log ( " output is blinded (CT) " , level: LogLevel . Info ) ;
} else if ( output . containsKey ( ' rangeproof ' ) as bool ) {
// or valueCommitment or type: anon
Logging . instance
. log ( " output is private (RingCT) " , level: LogLevel . Info ) ;
} else {
// TODO detect staking
2023-01-12 02:09:08 +00:00
Logging . instance . log ( " output type not detected; output: $ output " ,
2022-12-08 19:51:49 +00:00
level: LogLevel . Info ) ;
2022-11-29 19:11:30 +00:00
}
}
Logging . instance
. log ( " recipientsArray: $ recipientsArray " , level: LogLevel . Info ) ;
final foundInSenders =
2023-01-12 02:09:08 +00:00
allAddresses . any ( ( element ) = > sendersArray . contains ( element . value ) ) ;
2022-11-29 19:11:30 +00:00
Logging . instance
. log ( " foundInSenders: $ foundInSenders " , level: LogLevel . Info ) ;
// If txType = Sent, then calculate inputAmtSentFromWallet
if ( foundInSenders ) {
int totalInput = 0 ;
for ( int i = 0 ; i < ( txObject [ " vin " ] as List ) . length ; i + + ) {
final input = txObject [ " vin " ] ! [ i ] as Map ;
final prevTxid = input [ " txid " ] as String ;
final prevOut = input [ " vout " ] as int ;
final tx = await _cachedElectrumXClient . getTransaction (
txHash: prevTxid ,
coin: coin ,
) ;
for ( final out in tx [ " vout " ] as List ) {
if ( prevOut = = out [ " n " ] ) {
inputAmtSentFromWallet + =
( Decimal . parse ( out [ " value " ] ! . toString ( ) ) *
2022-11-29 19:41:47 +00:00
Decimal . fromInt ( Constants . satsPerCoin ( coin ) ) )
2022-11-29 19:11:30 +00:00
. toBigInt ( )
. toInt ( ) ;
}
}
}
totalInput = inputAmtSentFromWallet ;
int totalOutput = 0 ;
2023-01-12 02:09:08 +00:00
Logging . instance . log ( " txObject: $ txObject " , level: LogLevel . Info ) ;
2022-12-08 19:51:49 +00:00
2022-11-29 19:11:30 +00:00
for ( final output in txObject [ " vout " ] as List ) {
2022-12-08 19:51:49 +00:00
// Particl has different tx types that need to be detected and handled here
if ( output . containsKey ( ' scriptPubKey ' ) as bool ) {
2022-12-23 02:05:01 +00:00
try {
final String address =
output [ " scriptPubKey " ] ! [ " addresses " ] [ 0 ] as String ;
final value = output [ " value " ] ! ;
final _value = ( Decimal . parse ( value . toString ( ) ) *
Decimal . fromInt ( Constants . satsPerCoin ( coin ) ) )
. toBigInt ( )
. toInt ( ) ;
totalOutput + = _value ;
if ( changeAddresses . contains ( address ) ) {
inputAmtSentFromWallet - = _value ;
} else {
// change address from 'sent from' to the 'sent to' address
2023-01-16 21:04:03 +00:00
txObject [ " address " ] = await db
2023-01-17 22:11:04 +00:00
. getAddresses ( walletId )
. filter ( )
. valueEqualTo ( address )
. findFirst ( ) ? ?
isar_models . Address (
walletId: walletId ,
type: isar_models . AddressType . nonWallet ,
subType: isar_models . AddressSubType . nonWallet ,
value: address ,
publicKey: [ ] ,
derivationIndex: - 1 ,
2023-02-03 23:30:32 +00:00
derivationPath: null ,
2023-01-17 22:11:04 +00:00
) ;
2022-12-23 02:05:01 +00:00
}
2023-01-12 02:09:08 +00:00
} catch ( s ) {
2022-12-23 02:05:01 +00:00
Logging . instance . log ( s . toString ( ) , level: LogLevel . Warning ) ;
2022-12-08 19:51:49 +00:00
}
2022-12-23 02:05:01 +00:00
// Logging.instance.log("output is transparent", level: LogLevel.Info);
2022-12-08 19:51:49 +00:00
} else if ( output . containsKey ( ' ct_fee ' ) as bool ) {
// or type: data
// TODO handle CT tx
Logging . instance . log (
" output is blinded (CT); cannot parse output values " ,
level: LogLevel . Info ) ;
2023-01-12 02:09:08 +00:00
final ctFee = output [ " ct_fee " ] ! ;
final feeValue = ( Decimal . parse ( ctFee . toString ( ) ) *
2022-12-08 23:37:57 +00:00
Decimal . fromInt ( Constants . satsPerCoin ( coin ) ) )
. toBigInt ( )
. toInt ( ) ;
Logging . instance . log (
2023-01-12 02:09:08 +00:00
" ct_fee $ ctFee subtracted from inputAmtSentFromWallet $ inputAmtSentFromWallet " ,
2022-12-08 23:37:57 +00:00
level: LogLevel . Info ) ;
2023-01-12 02:09:08 +00:00
inputAmtSentFromWallet + = feeValue ;
2022-12-08 19:51:49 +00:00
} else if ( output . containsKey ( ' rangeproof ' ) as bool ) {
// or valueCommitment or type: anon
// TODO handle RingCT tx
Logging . instance . log (
" output is private (RingCT); cannot parse output values " ,
level: LogLevel . Info ) ;
2022-11-29 19:11:30 +00:00
} else {
2022-12-08 19:51:49 +00:00
// TODO detect staking
2023-01-12 02:09:08 +00:00
Logging . instance . log ( " output type not detected; output: $ output " ,
2022-12-08 19:51:49 +00:00
level: LogLevel . Info ) ;
2022-11-29 19:11:30 +00:00
}
}
// calculate transaction fee
fee = totalInput - totalOutput ;
// subtract fee from sent to calculate correct value of sent tx
inputAmtSentFromWallet - = fee ;
} else {
// counters for fee calculation
int totalOut = 0 ;
int totalIn = 0 ;
// add up received tx value
for ( final output in txObject [ " vout " ] as List ) {
2022-12-23 02:05:01 +00:00
try {
2023-01-17 22:11:04 +00:00
final address = output [ " scriptPubKey " ] ? [ " address " ] as String ? ? ?
output [ " scriptPubKey " ] ? [ " addresses " ] ? [ 0 ] as String ? ;
2022-12-23 02:05:01 +00:00
if ( address ! = null ) {
2023-01-17 22:11:04 +00:00
final value = ( Decimal . parse ( ( output [ " value " ] ? ? 0 ) . toString ( ) ) *
2022-12-23 02:05:01 +00:00
Decimal . fromInt ( Constants . satsPerCoin ( coin ) ) )
. toBigInt ( )
. toInt ( ) ;
totalOut + = value ;
2023-01-17 22:11:04 +00:00
if ( allAddresses . where ( ( e ) = > e . value = = address ) . isNotEmpty ) {
2022-12-23 02:05:01 +00:00
outputAmtAddressedToWallet + = value ;
}
2022-11-29 19:11:30 +00:00
}
2023-01-12 02:09:08 +00:00
} catch ( s ) {
2022-12-23 02:05:01 +00:00
Logging . instance . log ( s . toString ( ) , level: LogLevel . Info ) ;
2022-11-29 19:11:30 +00:00
}
}
// calculate fee for received tx
for ( int i = 0 ; i < ( txObject [ " vin " ] as List ) . length ; i + + ) {
final input = txObject [ " vin " ] [ i ] as Map ;
final prevTxid = input [ " txid " ] as String ;
final prevOut = input [ " vout " ] as int ;
final tx = await _cachedElectrumXClient . getTransaction (
txHash: prevTxid ,
coin: coin ,
) ;
for ( final out in tx [ " vout " ] as List ) {
if ( prevOut = = out [ " n " ] ) {
2023-01-17 22:11:04 +00:00
totalIn + = ( Decimal . parse ( ( out [ " value " ] ? ? 0 ) . toString ( ) ) *
2022-11-29 19:41:47 +00:00
Decimal . fromInt ( Constants . satsPerCoin ( coin ) ) )
2022-11-29 19:11:30 +00:00
. toBigInt ( )
. toInt ( ) ;
}
}
}
fee = totalIn - totalOut ;
}
// create final tx map
midSortedTx [ " txid " ] = txObject [ " txid " ] ;
2023-01-12 02:09:08 +00:00
2022-11-29 19:11:30 +00:00
midSortedTx [ " timestamp " ] = txObject [ " blocktime " ] ? ?
( DateTime . now ( ) . millisecondsSinceEpoch ~ / 1000 ) ;
midSortedTx [ " address " ] = txObject [ " address " ] ;
midSortedTx [ " inputs " ] = txObject [ " vin " ] ;
midSortedTx [ " outputs " ] = txObject [ " vout " ] ;
2023-01-12 02:09:08 +00:00
// midSortedArray.add(midSortedTx);
2023-01-16 22:37:00 +00:00
isar_models . TransactionType type ;
int amount ;
2023-01-12 02:09:08 +00:00
if ( foundInSenders ) {
2023-01-16 22:37:00 +00:00
type = isar_models . TransactionType . outgoing ;
amount = inputAmtSentFromWallet ;
2022-11-29 19:11:30 +00:00
} else {
2023-01-16 22:37:00 +00:00
type = isar_models . TransactionType . incoming ;
amount = outputAmtAddressedToWallet ;
2023-01-12 02:09:08 +00:00
}
2023-01-17 22:11:04 +00:00
isar_models . Address transactionAddress =
midSortedTx [ " address " ] as isar_models . Address ;
2023-01-13 21:36:19 +00:00
List < isar_models . Input > inputs = [ ] ;
List < isar_models . Output > outputs = [ ] ;
2023-01-12 02:09:08 +00:00
2023-01-17 22:11:04 +00:00
for ( final json in txObject [ " vin " ] as List ) {
2023-01-12 02:09:08 +00:00
bool isCoinBase = json [ ' coinbase ' ] ! = null ;
2023-01-16 22:37:00 +00:00
final input = isar_models . Input (
txid: json [ ' txid ' ] as String ,
vout: json [ ' vout ' ] as int ? ? ? - 1 ,
scriptSig: json [ ' scriptSig ' ] ? [ ' hex ' ] as String ? ,
scriptSigAsm: json [ ' scriptSig ' ] ? [ ' asm ' ] as String ? ,
isCoinbase: isCoinBase ? isCoinBase : json [ ' is_coinbase ' ] as bool ? ,
sequence: json [ ' sequence ' ] as int ? ,
innerRedeemScriptAsm: json [ ' innerRedeemscriptAsm ' ] as String ? ,
) ;
2023-01-13 21:36:19 +00:00
inputs . add ( input ) ;
2023-01-12 02:09:08 +00:00
}
2023-01-17 22:11:04 +00:00
for ( final json in txObject [ " vout " ] as List ) {
2023-01-16 22:37:00 +00:00
final output = isar_models . Output (
scriptPubKey: json [ ' scriptPubKey ' ] ? [ ' hex ' ] as String ? ,
scriptPubKeyAsm: json [ ' scriptPubKey ' ] ? [ ' asm ' ] as String ? ,
scriptPubKeyType: json [ ' scriptPubKey ' ] ? [ ' type ' ] as String ? ,
scriptPubKeyAddress:
json [ " scriptPubKey " ] ? [ " addresses " ] ? [ 0 ] as String ? ? ?
2023-01-17 22:11:04 +00:00
json [ ' scriptPubKey ' ] ? [ ' type ' ] as String ? ? ?
2023-01-16 22:37:00 +00:00
" " ,
value: Format . decimalAmountToSatoshis (
2023-01-17 22:11:04 +00:00
Decimal . parse ( ( json [ " value " ] ? ? 0 ) . toString ( ) ) ,
2023-01-16 22:37:00 +00:00
coin ,
) ,
2023-01-12 02:09:08 +00:00
) ;
2023-01-13 21:36:19 +00:00
outputs . add ( output ) ;
2022-11-29 19:11:30 +00:00
}
2023-02-03 19:22:21 +00:00
final tx = isar_models . Transaction (
walletId: walletId ,
txid: midSortedTx [ " txid " ] as String ,
timestamp: midSortedTx [ " timestamp " ] as int ,
type: type ,
subType: isar_models . TransactionSubType . none ,
amount: amount ,
fee: fee ,
height: txObject [ " height " ] as int ,
inputs: inputs ,
outputs: outputs ,
isCancelled: false ,
isLelantus: false ,
slateId: null ,
otherData: null ,
) ;
txns . add ( Tuple2 ( tx , transactionAddress ) ) ;
2023-01-12 02:09:08 +00:00
}
2023-01-19 21:13:03 +00:00
await db . addNewTransactionData ( txns , walletId ) ;
2023-01-19 16:29:00 +00:00
// quick hack to notify manager to call notifyListeners if
// transactions changed
if ( txns . isNotEmpty ) {
GlobalEventBus . instance . fire (
UpdatedInBackgroundEvent (
" Transactions updated/added for: $ walletId $ walletName " ,
walletId ,
) ,
) ;
}
2022-11-29 19:11:30 +00:00
}
int estimateTxFee ( { required int vSize , required int feeRatePerKB } ) {
return vSize * ( feeRatePerKB / 1000 ) . ceil ( ) ;
}
/// The coinselection algorithm decides whether or not the user is eligible to make the transaction
/// with [satoshiAmountToSend] and [selectedTxFeeRate]. If so, it will call buildTrasaction() and return
/// a map containing the tx hex along with other important information. If not, then it will return
/// an integer (1 or 2)
2022-12-05 14:53:21 +00:00
dynamic coinSelection (
int satoshiAmountToSend ,
int selectedTxFeeRate ,
String _recipientAddress ,
bool isSendAll , {
int additionalOutputs = 0 ,
2023-01-12 02:09:08 +00:00
List < isar_models . UTXO > ? utxos ,
2022-12-05 14:53:21 +00:00
} ) async {
2022-11-29 19:11:30 +00:00
Logging . instance
. log ( " Starting coinSelection ---------- " , level: LogLevel . Info ) ;
2023-01-12 02:09:08 +00:00
final List < isar_models . UTXO > availableOutputs = utxos ? ? await this . utxos ;
final currentChainHeight = await chainHeight ;
final List < isar_models . UTXO > spendableOutputs = [ ] ;
2022-11-29 19:11:30 +00:00
int spendableSatoshiValue = 0 ;
// Build list of spendable outputs and totaling their satoshi amount
for ( var i = 0 ; i < availableOutputs . length ; i + + ) {
2023-01-12 02:09:08 +00:00
if ( availableOutputs [ i ] . isBlocked = = false & &
availableOutputs [ i ]
. isConfirmed ( currentChainHeight , MINIMUM_CONFIRMATIONS ) = =
true ) {
2022-11-29 19:11:30 +00:00
spendableOutputs . add ( availableOutputs [ i ] ) ;
spendableSatoshiValue + = availableOutputs [ i ] . value ;
}
}
// sort spendable by age (oldest first)
2023-01-12 02:09:08 +00:00
spendableOutputs . sort ( ( a , b ) = > b . blockTime ! . compareTo ( a . blockTime ! ) ) ;
2022-11-29 19:11:30 +00:00
Logging . instance . log ( " spendableOutputs.length: ${ spendableOutputs . length } " ,
level: LogLevel . Info ) ;
Logging . instance
. log ( " spendableOutputs: $ spendableOutputs " , level: LogLevel . Info ) ;
Logging . instance . log ( " spendableSatoshiValue: $ spendableSatoshiValue " ,
level: LogLevel . Info ) ;
Logging . instance
. log ( " satoshiAmountToSend: $ satoshiAmountToSend " , level: LogLevel . Info ) ;
// If the amount the user is trying to send is smaller than the amount that they have spendable,
// then return 1, which indicates that they have an insufficient balance.
if ( spendableSatoshiValue < satoshiAmountToSend ) {
return 1 ;
// If the amount the user wants to send is exactly equal to the amount they can spend, then return
// 2, which indicates that they are not leaving enough over to pay the transaction fee
} else if ( spendableSatoshiValue = = satoshiAmountToSend & & ! isSendAll ) {
return 2 ;
}
// If neither of these statements pass, we assume that the user has a spendable balance greater
// than the amount they're attempting to send. Note that this value still does not account for
// the added transaction fee, which may require an extra input and will need to be checked for
// later on.
// Possible situation right here
int satoshisBeingUsed = 0 ;
int inputsBeingConsumed = 0 ;
2023-01-12 02:09:08 +00:00
List < isar_models . UTXO > utxoObjectsToUse = [ ] ;
2022-11-29 19:11:30 +00:00
for ( var i = 0 ;
satoshisBeingUsed < satoshiAmountToSend & & i < spendableOutputs . length ;
i + + ) {
utxoObjectsToUse . add ( spendableOutputs [ i ] ) ;
satoshisBeingUsed + = spendableOutputs [ i ] . value ;
inputsBeingConsumed + = 1 ;
}
for ( int i = 0 ;
i < additionalOutputs & & inputsBeingConsumed < spendableOutputs . length ;
i + + ) {
utxoObjectsToUse . add ( spendableOutputs [ inputsBeingConsumed ] ) ;
satoshisBeingUsed + = spendableOutputs [ inputsBeingConsumed ] . value ;
inputsBeingConsumed + = 1 ;
}
Logging . instance
. log ( " satoshisBeingUsed: $ satoshisBeingUsed " , level: LogLevel . Info ) ;
Logging . instance
. log ( " inputsBeingConsumed: $ inputsBeingConsumed " , level: LogLevel . Info ) ;
Logging . instance
. log ( ' utxoObjectsToUse: $ utxoObjectsToUse ' , level: LogLevel . Info ) ;
// numberOfOutputs' length must always be equal to that of recipientsArray and recipientsAmtArray
List < String > recipientsArray = [ _recipientAddress ] ;
List < int > recipientsAmtArray = [ satoshiAmountToSend ] ;
// gather required signing data
final utxoSigningData = await fetchBuildTxData ( utxoObjectsToUse ) ;
if ( isSendAll ) {
Logging . instance
. log ( " Attempting to send all $ coin " , level: LogLevel . Info ) ;
final int vSizeForOneOutput = ( await buildTransaction (
utxosToUse: utxoObjectsToUse ,
utxoSigningData: utxoSigningData ,
recipients: [ _recipientAddress ] ,
satoshiAmounts: [ satoshisBeingUsed - 1 ] ,
) ) [ " vSize " ] as int ;
int feeForOneOutput = estimateTxFee (
vSize: vSizeForOneOutput ,
feeRatePerKB: selectedTxFeeRate ,
) ;
2022-12-05 14:53:21 +00:00
final int roughEstimate =
roughFeeEstimate ( spendableOutputs . length , 1 , selectedTxFeeRate ) ;
if ( feeForOneOutput < roughEstimate ) {
feeForOneOutput = roughEstimate ;
2022-11-29 19:11:30 +00:00
}
final int amount = satoshiAmountToSend - feeForOneOutput ;
dynamic txn = await buildTransaction (
utxosToUse: utxoObjectsToUse ,
utxoSigningData: utxoSigningData ,
recipients: recipientsArray ,
satoshiAmounts: [ amount ] ,
) ;
Map < String , dynamic > transactionObject = {
" hex " : txn [ " hex " ] ,
" recipient " : recipientsArray [ 0 ] ,
" recipientAmt " : amount ,
" fee " : feeForOneOutput ,
" vSize " : txn [ " vSize " ] ,
} ;
return transactionObject ;
}
final int vSizeForOneOutput = ( await buildTransaction (
utxosToUse: utxoObjectsToUse ,
utxoSigningData: utxoSigningData ,
recipients: [ _recipientAddress ] ,
satoshiAmounts: [ satoshisBeingUsed - 1 ] ,
) ) [ " vSize " ] as int ;
final int vSizeForTwoOutPuts = ( await buildTransaction (
utxosToUse: utxoObjectsToUse ,
utxoSigningData: utxoSigningData ,
recipients: [
_recipientAddress ,
2023-01-25 19:49:14 +00:00
await _getCurrentAddressForChain ( 1 , DerivePathTypeExt . primaryFor ( coin ) ) ,
2022-11-29 19:11:30 +00:00
] ,
satoshiAmounts: [
satoshiAmountToSend ,
2022-12-05 14:53:21 +00:00
satoshisBeingUsed - satoshiAmountToSend - 1
2022-11-29 19:11:30 +00:00
] , // dust limit is the minimum amount a change output should be
) ) [ " vSize " ] as int ;
// Assume 1 output, only for recipient and no change
2022-12-05 14:53:21 +00:00
final feeForOneOutput = estimateTxFee (
2022-11-29 19:11:30 +00:00
vSize: vSizeForOneOutput ,
feeRatePerKB: selectedTxFeeRate ,
) ;
// Assume 2 outputs, one for recipient and one for change
2022-12-05 14:53:21 +00:00
final feeForTwoOutputs = estimateTxFee (
2022-11-29 19:11:30 +00:00
vSize: vSizeForTwoOutPuts ,
feeRatePerKB: selectedTxFeeRate ,
) ;
Logging . instance
. log ( " feeForTwoOutputs: $ feeForTwoOutputs " , level: LogLevel . Info ) ;
Logging . instance
. log ( " feeForOneOutput: $ feeForOneOutput " , level: LogLevel . Info ) ;
if ( satoshisBeingUsed - satoshiAmountToSend > feeForOneOutput ) {
if ( satoshisBeingUsed - satoshiAmountToSend >
feeForOneOutput + DUST_LIMIT ) {
// Here, we know that theoretically, we may be able to include another output(change) but we first need to
// factor in the value of this output in satoshis.
int changeOutputSize =
satoshisBeingUsed - satoshiAmountToSend - feeForTwoOutputs ;
// We check to see if the user can pay for the new transaction with 2 outputs instead of one. If they can and
2022-12-05 14:53:21 +00:00
// the second output's size > DUST_LIMIT satoshis, we perform the mechanics required to properly generate and use a new
2022-11-29 19:11:30 +00:00
// change address.
if ( changeOutputSize > DUST_LIMIT & &
satoshisBeingUsed - satoshiAmountToSend - changeOutputSize = =
feeForTwoOutputs ) {
// generate new change address if current change address has been used
2023-01-12 02:09:08 +00:00
await _checkChangeAddressForTransactions ( ) ;
2023-01-25 19:49:14 +00:00
final String newChangeAddress = await _getCurrentAddressForChain (
1 , DerivePathTypeExt . primaryFor ( coin ) ) ;
2022-11-29 19:11:30 +00:00
int feeBeingPaid =
satoshisBeingUsed - satoshiAmountToSend - changeOutputSize ;
recipientsArray . add ( newChangeAddress ) ;
recipientsAmtArray . add ( changeOutputSize ) ;
// At this point, we have the outputs we're going to use, the amounts to send along with which addresses
// we intend to send these amounts to. We have enough to send instructions to build the transaction.
Logging . instance . log ( ' 2 outputs in tx ' , level: LogLevel . Info ) ;
Logging . instance
. log ( ' Input size: $ satoshisBeingUsed ' , level: LogLevel . Info ) ;
Logging . instance . log ( ' Recipient output size: $ satoshiAmountToSend ' ,
level: LogLevel . Info ) ;
Logging . instance . log ( ' Change Output Size: $ changeOutputSize ' ,
level: LogLevel . Info ) ;
Logging . instance . log (
' Difference (fee being paid): $ feeBeingPaid sats ' ,
level: LogLevel . Info ) ;
Logging . instance
. log ( ' Estimated fee: $ feeForTwoOutputs ' , level: LogLevel . Info ) ;
dynamic txn = await buildTransaction (
utxosToUse: utxoObjectsToUse ,
utxoSigningData: utxoSigningData ,
recipients: recipientsArray ,
satoshiAmounts: recipientsAmtArray ,
) ;
// make sure minimum fee is accurate if that is being used
if ( txn [ " vSize " ] - feeBeingPaid = = 1 ) {
int changeOutputSize =
satoshisBeingUsed - satoshiAmountToSend - ( txn [ " vSize " ] as int ) ;
feeBeingPaid =
satoshisBeingUsed - satoshiAmountToSend - changeOutputSize ;
recipientsAmtArray . removeLast ( ) ;
recipientsAmtArray . add ( changeOutputSize ) ;
Logging . instance . log ( ' Adjusted Input size: $ satoshisBeingUsed ' ,
level: LogLevel . Info ) ;
Logging . instance . log (
' Adjusted Recipient output size: $ satoshiAmountToSend ' ,
level: LogLevel . Info ) ;
Logging . instance . log (
' Adjusted Change Output Size: $ changeOutputSize ' ,
level: LogLevel . Info ) ;
Logging . instance . log (
' Adjusted Difference (fee being paid): $ feeBeingPaid sats ' ,
level: LogLevel . Info ) ;
Logging . instance . log ( ' Adjusted Estimated fee: $ feeForTwoOutputs ' ,
level: LogLevel . Info ) ;
txn = await buildTransaction (
utxosToUse: utxoObjectsToUse ,
utxoSigningData: utxoSigningData ,
recipients: recipientsArray ,
satoshiAmounts: recipientsAmtArray ,
) ;
}
Map < String , dynamic > transactionObject = {
" hex " : txn [ " hex " ] ,
" recipient " : recipientsArray [ 0 ] ,
" recipientAmt " : recipientsAmtArray [ 0 ] ,
" fee " : feeBeingPaid ,
" vSize " : txn [ " vSize " ] ,
} ;
return transactionObject ;
} else {
// Something went wrong here. It either overshot or undershot the estimated fee amount or the changeOutputSize
2022-12-05 14:53:21 +00:00
// is smaller than or equal to DUST_LIMIT. Revert to single output transaction.
2022-11-29 19:11:30 +00:00
Logging . instance . log ( ' 1 output in tx ' , level: LogLevel . Info ) ;
Logging . instance
. log ( ' Input size: $ satoshisBeingUsed ' , level: LogLevel . Info ) ;
Logging . instance . log ( ' Recipient output size: $ satoshiAmountToSend ' ,
level: LogLevel . Info ) ;
Logging . instance . log (
' Difference (fee being paid): ${ satoshisBeingUsed - satoshiAmountToSend } sats ' ,
level: LogLevel . Info ) ;
Logging . instance
. log ( ' Estimated fee: $ feeForOneOutput ' , level: LogLevel . Info ) ;
dynamic txn = await buildTransaction (
utxosToUse: utxoObjectsToUse ,
utxoSigningData: utxoSigningData ,
recipients: recipientsArray ,
satoshiAmounts: recipientsAmtArray ,
) ;
Map < String , dynamic > transactionObject = {
" hex " : txn [ " hex " ] ,
" recipient " : recipientsArray [ 0 ] ,
" recipientAmt " : recipientsAmtArray [ 0 ] ,
" fee " : satoshisBeingUsed - satoshiAmountToSend ,
" vSize " : txn [ " vSize " ] ,
} ;
return transactionObject ;
}
} else {
2022-12-05 14:53:21 +00:00
// No additional outputs needed since adding one would mean that it'd be smaller than DUST_LIMIT sats
2022-11-29 19:11:30 +00:00
// which makes it uneconomical to add to the transaction. Here, we pass data directly to instruct
// the wallet to begin crafting the transaction that the user requested.
Logging . instance . log ( ' 1 output in tx ' , level: LogLevel . Info ) ;
Logging . instance
. log ( ' Input size: $ satoshisBeingUsed ' , level: LogLevel . Info ) ;
Logging . instance . log ( ' Recipient output size: $ satoshiAmountToSend ' ,
level: LogLevel . Info ) ;
Logging . instance . log (
' Difference (fee being paid): ${ satoshisBeingUsed - satoshiAmountToSend } sats ' ,
level: LogLevel . Info ) ;
Logging . instance
. log ( ' Estimated fee: $ feeForOneOutput ' , level: LogLevel . Info ) ;
dynamic txn = await buildTransaction (
utxosToUse: utxoObjectsToUse ,
utxoSigningData: utxoSigningData ,
recipients: recipientsArray ,
satoshiAmounts: recipientsAmtArray ,
) ;
Map < String , dynamic > transactionObject = {
" hex " : txn [ " hex " ] ,
" recipient " : recipientsArray [ 0 ] ,
" recipientAmt " : recipientsAmtArray [ 0 ] ,
" fee " : satoshisBeingUsed - satoshiAmountToSend ,
" vSize " : txn [ " vSize " ] ,
} ;
return transactionObject ;
}
} else if ( satoshisBeingUsed - satoshiAmountToSend = = feeForOneOutput ) {
// In this scenario, no additional change output is needed since inputs - outputs equal exactly
// what we need to pay for fees. Here, we pass data directly to instruct the wallet to begin
// crafting the transaction that the user requested.
Logging . instance . log ( ' 1 output in tx ' , level: LogLevel . Info ) ;
Logging . instance
. log ( ' Input size: $ satoshisBeingUsed ' , level: LogLevel . Info ) ;
Logging . instance . log ( ' Recipient output size: $ satoshiAmountToSend ' ,
level: LogLevel . Info ) ;
Logging . instance . log (
' Fee being paid: ${ satoshisBeingUsed - satoshiAmountToSend } sats ' ,
level: LogLevel . Info ) ;
Logging . instance
. log ( ' Estimated fee: $ feeForOneOutput ' , level: LogLevel . Info ) ;
dynamic txn = await buildTransaction (
utxosToUse: utxoObjectsToUse ,
utxoSigningData: utxoSigningData ,
recipients: recipientsArray ,
satoshiAmounts: recipientsAmtArray ,
) ;
Map < String , dynamic > transactionObject = {
" hex " : txn [ " hex " ] ,
" recipient " : recipientsArray [ 0 ] ,
" recipientAmt " : recipientsAmtArray [ 0 ] ,
" fee " : feeForOneOutput ,
" vSize " : txn [ " vSize " ] ,
} ;
return transactionObject ;
} else {
// Remember that returning 2 indicates that the user does not have a sufficient balance to
// pay for the transaction fee. Ideally, at this stage, we should check if the user has any
// additional outputs they're able to spend and then recalculate fees.
Logging . instance . log (
' Cannot pay tx fee - checking for more outputs and trying again ' ,
level: LogLevel . Warning ) ;
// try adding more outputs
if ( spendableOutputs . length > inputsBeingConsumed ) {
return coinSelection ( satoshiAmountToSend , selectedTxFeeRate ,
_recipientAddress , isSendAll ,
additionalOutputs: additionalOutputs + 1 , utxos: utxos ) ;
}
return 2 ;
}
}
Future < Map < String , dynamic > > fetchBuildTxData (
2023-01-12 02:09:08 +00:00
List < isar_models . UTXO > utxosToUse ,
2022-11-29 19:11:30 +00:00
) async {
// return data
Map < String , dynamic > results = { } ;
Map < String , List < String > > addressTxid = { } ;
// addresses to check
2022-12-07 12:46:53 +00:00
List < String > addressesP2PKH = [ ] ;
2022-12-05 14:53:21 +00:00
List < String > addressesP2WPKH = [ ] ;
2022-11-29 19:11:30 +00:00
try {
// Populating the addresses to check
for ( var i = 0 ; i < utxosToUse . length ; i + + ) {
final txid = utxosToUse [ i ] . txid ;
final tx = await _cachedElectrumXClient . getTransaction (
txHash: txid ,
coin: coin ,
) ;
for ( final output in tx [ " vout " ] as List ) {
final n = output [ " n " ] ;
if ( n ! = null & & n = = utxosToUse [ i ] . vout ) {
2022-12-23 02:05:01 +00:00
final address = output [ " scriptPubKey " ] [ " addresses " ] [ 0 ] as String ;
2022-11-29 19:11:30 +00:00
if ( ! addressTxid . containsKey ( address ) ) {
addressTxid [ address ] = < String > [ ] ;
}
( addressTxid [ address ] as List ) . add ( txid ) ;
switch ( addressType ( address: address ) ) {
2022-12-07 12:46:53 +00:00
case DerivePathType . bip44:
addressesP2PKH . add ( address ) ;
break ;
2022-12-05 14:53:21 +00:00
case DerivePathType . bip84:
addressesP2WPKH . add ( address ) ;
break ;
2023-01-25 18:08:48 +00:00
default :
2023-02-02 15:24:26 +00:00
throw Exception (
" DerivePathType ${ addressType ( address: address ) } not supported " ) ;
2022-11-29 19:11:30 +00:00
}
}
}
}
// p2pkh / bip44
2022-12-07 12:46:53 +00:00
final p2pkhLength = addressesP2PKH . length ;
if ( p2pkhLength > 0 ) {
final receiveDerivations = await _fetchDerivations (
chain: 0 ,
derivePathType: DerivePathType . bip44 ,
) ;
final changeDerivations = await _fetchDerivations (
chain: 1 ,
derivePathType: DerivePathType . bip44 ,
) ;
for ( int i = 0 ; i < p2pkhLength ; i + + ) {
// receives
final receiveDerivation = receiveDerivations [ addressesP2PKH [ i ] ] ;
// if a match exists it will not be null
if ( receiveDerivation ! = null ) {
final data = P2PKH (
data: PaymentData (
pubkey: Format . stringToUint8List (
receiveDerivation [ " pubKey " ] as String ) ) ,
network: _network ,
) . data ;
for ( String tx in addressTxid [ addressesP2PKH [ i ] ] ! ) {
results [ tx ] = {
" output " : data . output ,
" keyPair " : ECPair . fromWIF (
receiveDerivation [ " wif " ] as String ,
network: _network ,
) ,
} ;
}
} else {
// if its not a receive, check change
final changeDerivation = changeDerivations [ addressesP2PKH [ i ] ] ;
// if a match exists it will not be null
if ( changeDerivation ! = null ) {
final data = P2PKH (
data: PaymentData (
pubkey: Format . stringToUint8List (
changeDerivation [ " pubKey " ] as String ) ) ,
network: _network ,
) . data ;
for ( String tx in addressTxid [ addressesP2PKH [ i ] ] ! ) {
results [ tx ] = {
" output " : data . output ,
" keyPair " : ECPair . fromWIF (
changeDerivation [ " wif " ] as String ,
network: _network ,
) ,
} ;
}
}
}
}
}
2022-11-29 19:11:30 +00:00
2022-12-05 14:53:21 +00:00
// p2wpkh / bip84
final p2wpkhLength = addressesP2WPKH . length ;
if ( p2wpkhLength > 0 ) {
final receiveDerivations = await _fetchDerivations (
chain: 0 ,
derivePathType: DerivePathType . bip84 ,
) ;
final changeDerivations = await _fetchDerivations (
chain: 1 ,
derivePathType: DerivePathType . bip84 ,
) ;
for ( int i = 0 ; i < p2wpkhLength ; i + + ) {
// receives
final receiveDerivation = receiveDerivations [ addressesP2WPKH [ i ] ] ;
// if a match exists it will not be null
if ( receiveDerivation ! = null ) {
final data = P2WPKH (
data: PaymentData (
pubkey: Format . stringToUint8List (
receiveDerivation [ " pubKey " ] as String ) ) ,
network: _network ,
) . data ;
for ( String tx in addressTxid [ addressesP2WPKH [ i ] ] ! ) {
results [ tx ] = {
" output " : data . output ,
" keyPair " : ECPair . fromWIF (
receiveDerivation [ " wif " ] as String ,
network: _network ,
) ,
} ;
}
} else {
// if its not a receive, check change
final changeDerivation = changeDerivations [ addressesP2WPKH [ i ] ] ;
// if a match exists it will not be null
if ( changeDerivation ! = null ) {
final data = P2WPKH (
data: PaymentData (
pubkey: Format . stringToUint8List (
changeDerivation [ " pubKey " ] as String ) ) ,
network: _network ,
) . data ;
for ( String tx in addressTxid [ addressesP2WPKH [ i ] ] ! ) {
results [ tx ] = {
" output " : data . output ,
" keyPair " : ECPair . fromWIF (
changeDerivation [ " wif " ] as String ,
network: _network ,
) ,
} ;
}
}
}
}
}
2022-12-01 20:19:32 +00:00
Logging . instance . log ( " FETCHED TX BUILD DATA IS ----- $ results " ,
level: LogLevel . Info , printFullLength: true ) ;
2022-11-29 19:11:30 +00:00
return results ;
} catch ( e , s ) {
Logging . instance
. log ( " fetchBuildTxData() threw: $ e , \n $ s " , level: LogLevel . Error ) ;
rethrow ;
}
}
/// Builds and signs a transaction
Future < Map < String , dynamic > > buildTransaction ( {
2023-01-12 02:09:08 +00:00
required List < isar_models . UTXO > utxosToUse ,
2022-11-29 19:11:30 +00:00
required Map < String , dynamic > utxoSigningData ,
required List < String > recipients ,
required List < int > satoshiAmounts ,
} ) async {
Logging . instance
. log ( " Starting buildTransaction ---------- " , level: LogLevel . Info ) ;
2022-12-01 20:19:32 +00:00
Logging . instance . log ( " UTXOs SIGNING DATA IS ----- $ utxoSigningData " ,
level: LogLevel . Info , printFullLength: true ) ;
2022-11-29 19:11:30 +00:00
final txb = TransactionBuilder ( network: _network ) ;
2022-12-01 15:07:43 +00:00
txb . setVersion ( 160 ) ;
2022-11-29 19:11:30 +00:00
// Add transaction inputs
for ( var i = 0 ; i < utxosToUse . length ; i + + ) {
final txid = utxosToUse [ i ] . txid ;
2022-12-23 02:05:01 +00:00
2022-11-29 19:11:30 +00:00
txb . addInput ( txid , utxosToUse [ i ] . vout , null ,
2022-12-01 15:07:43 +00:00
utxoSigningData [ txid ] [ " output " ] as Uint8List , ' ' ) ;
2022-11-29 19:11:30 +00:00
}
// Add transaction output
for ( var i = 0 ; i < recipients . length ; i + + ) {
2022-11-30 09:02:52 +00:00
txb . addOutput ( recipients [ i ] , satoshiAmounts [ i ] , particl . bech32 ! ) ;
2022-11-29 19:11:30 +00:00
}
try {
// Sign the transaction accordingly
for ( var i = 0 ; i < utxosToUse . length ; i + + ) {
final txid = utxosToUse [ i ] . txid ;
2022-12-05 10:52:59 +00:00
txb . sign (
vin: i ,
keyPair: utxoSigningData [ txid ] [ " keyPair " ] as ECPair ,
witnessValue: utxosToUse [ i ] . value ,
redeemScript: utxoSigningData [ txid ] [ " redeemScript " ] as Uint8List ? ) ;
2022-11-29 19:11:30 +00:00
}
} catch ( e , s ) {
Logging . instance . log ( " Caught exception while signing transaction: $ e \n $ s " ,
level: LogLevel . Error ) ;
rethrow ;
}
2022-12-06 16:02:46 +00:00
final builtTx = txb . build ( ) ;
2022-11-29 19:11:30 +00:00
final vSize = builtTx . virtualSize ( ) ;
2022-12-07 15:47:55 +00:00
String hexBefore = builtTx . toHex ( isParticl: true ) . toString ( ) ;
2022-12-06 18:22:14 +00:00
if ( hexBefore . endsWith ( ' 000000 ' ) ) {
String stripped = hexBefore . substring ( 0 , hexBefore . length - 6 ) ;
return { " hex " : stripped , " vSize " : vSize } ;
} else if ( hexBefore . endsWith ( ' 0000 ' ) ) {
String stripped = hexBefore . substring ( 0 , hexBefore . length - 4 ) ;
return { " hex " : stripped , " vSize " : vSize } ;
} else if ( hexBefore . endsWith ( ' 00 ' ) ) {
String stripped = hexBefore . substring ( 0 , hexBefore . length - 2 ) ;
return { " hex " : stripped , " vSize " : vSize } ;
} else {
return { " hex " : hexBefore , " vSize " : vSize } ;
}
2022-11-29 19:11:30 +00:00
}
@ override
Future < void > fullRescan (
int maxUnusedAddressGap ,
int maxNumberOfIndexesToCheck ,
) async {
Logging . instance . log ( " Starting full rescan! " , level: LogLevel . Info ) ;
longMutex = true ;
GlobalEventBus . instance . fire (
WalletSyncStatusChangedEvent (
WalletSyncStatus . syncing ,
walletId ,
coin ,
) ,
) ;
// clear cache
await _cachedElectrumXClient . clearSharedTransactionCache ( coin: coin ) ;
// back up data
2023-01-12 21:32:25 +00:00
// await _rescanBackup();
2022-11-29 19:11:30 +00:00
2023-01-16 21:04:03 +00:00
await db . deleteWalletBlockchainData ( walletId ) ;
2023-01-17 14:19:30 +00:00
await _deleteDerivations ( ) ;
2023-01-13 21:47:56 +00:00
2022-11-29 19:11:30 +00:00
try {
2023-02-03 22:34:06 +00:00
final _mnemonic = await mnemonicString ;
final _mnemonicPassphrase = await mnemonicPassphrase ;
2023-02-13 22:53:28 +00:00
if ( _mnemonicPassphrase = = null ) {
Logging . instance . log (
" Exception in fullRescan: mnemonic passphrase null, possible migration issue; if using internal builds, delete wallet and restore from seed, if using a release build, please file bug report " ,
level: LogLevel . Error ) ;
}
2023-02-03 22:34:06 +00:00
2022-11-29 19:11:30 +00:00
await _recoverWalletFromBIP32SeedPhrase (
2023-02-03 22:34:06 +00:00
mnemonic: _mnemonic ! ,
mnemonicPassphrase: _mnemonicPassphrase ! ,
2022-11-29 19:11:30 +00:00
maxUnusedAddressGap: maxUnusedAddressGap ,
maxNumberOfIndexesToCheck: maxNumberOfIndexesToCheck ,
2023-02-02 15:03:57 +00:00
isRescan: true ,
2022-11-29 19:11:30 +00:00
) ;
longMutex = false ;
2023-01-19 16:29:00 +00:00
await refresh ( ) ;
2022-11-29 19:11:30 +00:00
Logging . instance . log ( " Full rescan complete! " , level: LogLevel . Info ) ;
GlobalEventBus . instance . fire (
WalletSyncStatusChangedEvent (
WalletSyncStatus . synced ,
walletId ,
coin ,
) ,
) ;
} catch ( e , s ) {
GlobalEventBus . instance . fire (
WalletSyncStatusChangedEvent (
WalletSyncStatus . unableToSync ,
walletId ,
coin ,
) ,
) ;
// restore from backup
2023-01-12 21:32:25 +00:00
// await _rescanRestore();
2022-11-29 19:11:30 +00:00
longMutex = false ;
Logging . instance . log ( " Exception rethrown from fullRescan(): $ e \n $ s " ,
level: LogLevel . Error ) ;
rethrow ;
}
}
2023-01-17 14:19:30 +00:00
Future < void > _deleteDerivations ( ) async {
// P2PKH derivations
await _secureStore . delete ( key: " ${ walletId } _receiveDerivationsP2PKH " ) ;
await _secureStore . delete ( key: " ${ walletId } _changeDerivationsP2PKH " ) ;
// P2WPKH derivations
await _secureStore . delete ( key: " ${ walletId } _receiveDerivationsP2WPKH " ) ;
await _secureStore . delete ( key: " ${ walletId } _changeDerivationsP2WPKH " ) ;
}
2023-01-12 21:32:25 +00:00
// Future<void> _rescanRestore() async {
// Logging.instance.log("starting rescan restore", level: LogLevel.Info);
//
// // restore from backup
// // p2pkh
// final tempReceivingAddressesP2PKH = DB.instance
// .get<dynamic>(boxName: walletId, key: 'receivingAddressesP2PKH_BACKUP');
// final tempChangeAddressesP2PKH = DB.instance
// .get<dynamic>(boxName: walletId, key: 'changeAddressesP2PKH_BACKUP');
// final tempReceivingIndexP2PKH = DB.instance
// .get<dynamic>(boxName: walletId, key: 'receivingIndexP2PKH_BACKUP');
// final tempChangeIndexP2PKH = DB.instance
// .get<dynamic>(boxName: walletId, key: 'changeIndexP2PKH_BACKUP');
// await DB.instance.put<dynamic>(
// boxName: walletId,
// key: 'receivingAddressesP2PKH',
// value: tempReceivingAddressesP2PKH);
// await DB.instance.put<dynamic>(
// boxName: walletId,
// key: 'changeAddressesP2PKH',
// value: tempChangeAddressesP2PKH);
// await DB.instance.put<dynamic>(
// boxName: walletId,
// key: 'receivingIndexP2PKH',
// value: tempReceivingIndexP2PKH);
// await DB.instance.put<dynamic>(
// boxName: walletId,
// key: 'changeIndexP2PKH',
// value: tempChangeIndexP2PKH);
// await DB.instance.delete<dynamic>(
// key: 'receivingAddressesP2PKH_BACKUP', boxName: walletId);
// await DB.instance
// .delete<dynamic>(key: 'changeAddressesP2PKH_BACKUP', boxName: walletId);
// await DB.instance
// .delete<dynamic>(key: 'receivingIndexP2PKH_BACKUP', boxName: walletId);
// await DB.instance
// .delete<dynamic>(key: 'changeIndexP2PKH_BACKUP', boxName: walletId);
//
// // p2wpkh
// final tempReceivingAddressesP2WPKH = DB.instance.get<dynamic>(
// boxName: walletId, key: 'receivingAddressesP2WPKH_BACKUP');
// final tempChangeAddressesP2WPKH = DB.instance
// .get<dynamic>(boxName: walletId, key: 'changeAddressesP2WPKH_BACKUP');
// final tempReceivingIndexP2WPKH = DB.instance
// .get<dynamic>(boxName: walletId, key: 'receivingIndexP2WPKH_BACKUP');
// final tempChangeIndexP2WPKH = DB.instance
// .get<dynamic>(boxName: walletId, key: 'changeIndexP2WPKH_BACKUP');
// await DB.instance.put<dynamic>(
// boxName: walletId,
// key: 'receivingAddressesP2WPKH',
// value: tempReceivingAddressesP2WPKH);
// await DB.instance.put<dynamic>(
// boxName: walletId,
// key: 'changeAddressesP2WPKH',
// value: tempChangeAddressesP2WPKH);
// await DB.instance.put<dynamic>(
// boxName: walletId,
// key: 'receivingIndexP2WPKH',
// value: tempReceivingIndexP2WPKH);
// await DB.instance.put<dynamic>(
// boxName: walletId,
// key: 'changeIndexP2WPKH',
// value: tempChangeIndexP2WPKH);
// await DB.instance.delete<dynamic>(
// key: 'receivingAddressesP2WPKH_BACKUP', boxName: walletId);
// await DB.instance.delete<dynamic>(
// key: 'changeAddressesP2WPKH_BACKUP', boxName: walletId);
// await DB.instance
// .delete<dynamic>(key: 'receivingIndexP2WPKH_BACKUP', boxName: walletId);
// await DB.instance
// .delete<dynamic>(key: 'changeIndexP2WPKH_BACKUP', boxName: walletId);
//
// // P2PKH derivations
// final p2pkhReceiveDerivationsString = await _secureStore.read(
// key: "${walletId}_receiveDerivationsP2PKH_BACKUP");
// final p2pkhChangeDerivationsString = await _secureStore.read(
// key: "${walletId}_changeDerivationsP2PKH_BACKUP");
//
// await _secureStore.write(
// key: "${walletId}_receiveDerivationsP2PKH",
// value: p2pkhReceiveDerivationsString);
// await _secureStore.write(
// key: "${walletId}_changeDerivationsP2PKH",
// value: p2pkhChangeDerivationsString);
//
// await _secureStore.delete(
// key: "${walletId}_receiveDerivationsP2PKH_BACKUP");
// await _secureStore.delete(key: "${walletId}_changeDerivationsP2PKH_BACKUP");
//
// // P2WPKH derivations
// final p2wpkhReceiveDerivationsString = await _secureStore.read(
// key: "${walletId}_receiveDerivationsP2WPKH_BACKUP");
// final p2wpkhChangeDerivationsString = await _secureStore.read(
// key: "${walletId}_changeDerivationsP2WPKH_BACKUP");
//
// await _secureStore.write(
// key: "${walletId}_receiveDerivationsP2WPKH",
// value: p2wpkhReceiveDerivationsString);
// await _secureStore.write(
// key: "${walletId}_changeDerivationsP2WPKH",
// value: p2wpkhChangeDerivationsString);
//
// await _secureStore.delete(
// key: "${walletId}_receiveDerivationsP2WPKH_BACKUP");
// await _secureStore.delete(
// key: "${walletId}_changeDerivationsP2WPKH_BACKUP");
//
// // UTXOs
// final utxoData = DB.instance
// .get<dynamic>(boxName: walletId, key: 'latest_utxo_model_BACKUP');
// await DB.instance.put<dynamic>(
// boxName: walletId, key: 'latest_utxo_model', value: utxoData);
// await DB.instance
// .delete<dynamic>(key: 'latest_utxo_model_BACKUP', boxName: walletId);
//
// Logging.instance.log("rescan restore complete", level: LogLevel.Info);
// }
//
// Future<void> _rescanBackup() async {
// Logging.instance.log("starting rescan backup", level: LogLevel.Info);
//
// // backup current and clear data
// // p2pkh
// final tempReceivingAddressesP2PKH = DB.instance
// .get<dynamic>(boxName: walletId, key: 'receivingAddressesP2PKH');
// await DB.instance.put<dynamic>(
// boxName: walletId,
// key: 'receivingAddressesP2PKH_BACKUP',
// value: tempReceivingAddressesP2PKH);
// await DB.instance
// .delete<dynamic>(key: 'receivingAddressesP2PKH', boxName: walletId);
//
// final tempChangeAddressesP2PKH = DB.instance
// .get<dynamic>(boxName: walletId, key: 'changeAddressesP2PKH');
// await DB.instance.put<dynamic>(
// boxName: walletId,
// key: 'changeAddressesP2PKH_BACKUP',
// value: tempChangeAddressesP2PKH);
// await DB.instance
// .delete<dynamic>(key: 'changeAddressesP2PKH', boxName: walletId);
//
// final tempReceivingIndexP2PKH =
// DB.instance.get<dynamic>(boxName: walletId, key: 'receivingIndexP2PKH');
// await DB.instance.put<dynamic>(
// boxName: walletId,
// key: 'receivingIndexP2PKH_BACKUP',
// value: tempReceivingIndexP2PKH);
// await DB.instance
// .delete<dynamic>(key: 'receivingIndexP2PKH', boxName: walletId);
//
// final tempChangeIndexP2PKH =
// DB.instance.get<dynamic>(boxName: walletId, key: 'changeIndexP2PKH');
// await DB.instance.put<dynamic>(
// boxName: walletId,
// key: 'changeIndexP2PKH_BACKUP',
// value: tempChangeIndexP2PKH);
// await DB.instance
// .delete<dynamic>(key: 'changeIndexP2PKH', boxName: walletId);
//
// // p2wpkh
// final tempReceivingAddressesP2WPKH = DB.instance
// .get<dynamic>(boxName: walletId, key: 'receivingAddressesP2WPKH');
// await DB.instance.put<dynamic>(
// boxName: walletId,
// key: 'receivingAddressesP2WPKH_BACKUP',
// value: tempReceivingAddressesP2WPKH);
// await DB.instance
// .delete<dynamic>(key: 'receivingAddressesP2WPKH', boxName: walletId);
//
// final tempChangeAddressesP2WPKH = DB.instance
// .get<dynamic>(boxName: walletId, key: 'changeAddressesP2WPKH');
// await DB.instance.put<dynamic>(
// boxName: walletId,
// key: 'changeAddressesP2WPKH_BACKUP',
// value: tempChangeAddressesP2WPKH);
// await DB.instance
// .delete<dynamic>(key: 'changeAddressesP2WPKH', boxName: walletId);
//
// final tempReceivingIndexP2WPKH = DB.instance
// .get<dynamic>(boxName: walletId, key: 'receivingIndexP2WPKH');
// await DB.instance.put<dynamic>(
// boxName: walletId,
// key: 'receivingIndexP2WPKH_BACKUP',
// value: tempReceivingIndexP2WPKH);
// await DB.instance
// .delete<dynamic>(key: 'receivingIndexP2WPKH', boxName: walletId);
//
// final tempChangeIndexP2WPKH =
// DB.instance.get<dynamic>(boxName: walletId, key: 'changeIndexP2WPKH');
// await DB.instance.put<dynamic>(
// boxName: walletId,
// key: 'changeIndexP2WPKH_BACKUP',
// value: tempChangeIndexP2WPKH);
// await DB.instance
// .delete<dynamic>(key: 'changeIndexP2WPKH', boxName: walletId);
//
// // P2PKH derivations
// final p2pkhReceiveDerivationsString =
// await _secureStore.read(key: "${walletId}_receiveDerivationsP2PKH");
// final p2pkhChangeDerivationsString =
// await _secureStore.read(key: "${walletId}_changeDerivationsP2PKH");
//
// await _secureStore.write(
// key: "${walletId}_receiveDerivationsP2PKH_BACKUP",
// value: p2pkhReceiveDerivationsString);
// await _secureStore.write(
// key: "${walletId}_changeDerivationsP2PKH_BACKUP",
// value: p2pkhChangeDerivationsString);
//
// await _secureStore.delete(key: "${walletId}_receiveDerivationsP2PKH");
// await _secureStore.delete(key: "${walletId}_changeDerivationsP2PKH");
//
// // P2WPKH derivations
// final p2wpkhReceiveDerivationsString =
// await _secureStore.read(key: "${walletId}_receiveDerivationsP2WPKH");
// final p2wpkhChangeDerivationsString =
// await _secureStore.read(key: "${walletId}_changeDerivationsP2WPKH");
//
// await _secureStore.write(
// key: "${walletId}_receiveDerivationsP2WPKH_BACKUP",
// value: p2wpkhReceiveDerivationsString);
// await _secureStore.write(
// key: "${walletId}_changeDerivationsP2WPKH_BACKUP",
// value: p2wpkhChangeDerivationsString);
//
// await _secureStore.delete(key: "${walletId}_receiveDerivationsP2WPKH");
// await _secureStore.delete(key: "${walletId}_changeDerivationsP2WPKH");
//
// // UTXOs
// final utxoData =
// DB.instance.get<dynamic>(boxName: walletId, key: 'latest_utxo_model');
// await DB.instance.put<dynamic>(
// boxName: walletId, key: 'latest_utxo_model_BACKUP', value: utxoData);
// await DB.instance
// .delete<dynamic>(key: 'latest_utxo_model', boxName: walletId);
//
// Logging.instance.log("rescan backup complete", level: LogLevel.Info);
// }
2022-11-29 19:11:30 +00:00
bool isActive = false ;
@ override
void Function ( bool ) ? get onIsActiveWalletChanged = >
( isActive ) = > this . isActive = isActive ;
@ override
Future < int > estimateFeeFor ( int satoshiAmount , int feeRate ) async {
2023-01-12 02:09:08 +00:00
final available = balance . spendable ;
2022-11-29 19:11:30 +00:00
if ( available = = satoshiAmount ) {
2023-01-12 02:09:08 +00:00
return satoshiAmount - ( await sweepAllEstimate ( feeRate ) ) ;
2022-11-29 19:11:30 +00:00
} else if ( satoshiAmount < = 0 | | satoshiAmount > available ) {
return roughFeeEstimate ( 1 , 2 , feeRate ) ;
}
int runningBalance = 0 ;
int inputCount = 0 ;
2023-01-12 02:09:08 +00:00
for ( final output in ( await utxos ) ) {
if ( ! output . isBlocked ) {
runningBalance + = output . value ;
inputCount + + ;
if ( runningBalance > satoshiAmount ) {
break ;
}
2022-11-29 19:11:30 +00:00
}
}
final oneOutPutFee = roughFeeEstimate ( inputCount , 1 , feeRate ) ;
final twoOutPutFee = roughFeeEstimate ( inputCount , 2 , feeRate ) ;
if ( runningBalance - satoshiAmount > oneOutPutFee ) {
if ( runningBalance - satoshiAmount > oneOutPutFee + DUST_LIMIT ) {
final change = runningBalance - satoshiAmount - twoOutPutFee ;
if ( change > DUST_LIMIT & &
runningBalance - satoshiAmount - change = = twoOutPutFee ) {
return runningBalance - satoshiAmount - change ;
} else {
return runningBalance - satoshiAmount ;
}
} else {
return runningBalance - satoshiAmount ;
}
} else if ( runningBalance - satoshiAmount = = oneOutPutFee ) {
return oneOutPutFee ;
} else {
return twoOutPutFee ;
}
}
int roughFeeEstimate ( int inputCount , int outputCount , int feeRatePerKB ) {
2022-12-05 14:53:21 +00:00
return ( ( 42 + ( 272 * inputCount ) + ( 128 * outputCount ) ) / 4 ) . ceil ( ) *
2022-11-29 19:11:30 +00:00
( feeRatePerKB / 1000 ) . ceil ( ) ;
}
2023-01-12 02:09:08 +00:00
Future < int > sweepAllEstimate ( int feeRate ) async {
2022-11-29 19:11:30 +00:00
int available = 0 ;
int inputCount = 0 ;
2023-01-12 02:09:08 +00:00
for ( final output in ( await utxos ) ) {
if ( ! output . isBlocked & &
output . isConfirmed ( storedChainHeight , MINIMUM_CONFIRMATIONS ) ) {
2022-11-29 19:11:30 +00:00
available + = output . value ;
inputCount + + ;
}
}
// transaction will only have 1 output minus the fee
final estimatedFee = roughFeeEstimate ( inputCount , 1 , feeRate ) ;
return available - estimatedFee ;
}
@ override
Future < bool > generateNewAddress ( ) async {
try {
2023-01-12 02:09:08 +00:00
final currentReceiving = await _currentReceivingAddress ;
final newReceivingIndex = currentReceiving . derivationIndex + 1 ;
// Use new index to derive a new receiving address
2022-11-29 19:11:30 +00:00
final newReceivingAddress = await _generateAddressForChain (
2023-01-25 19:49:14 +00:00
0 , newReceivingIndex , DerivePathTypeExt . primaryFor ( coin ) ) ;
2023-01-12 02:09:08 +00:00
// Add that new receiving address
2023-01-16 21:04:03 +00:00
await db . putAddress ( newReceivingAddress ) ;
2022-11-29 19:11:30 +00:00
return true ;
} catch ( e , s ) {
Logging . instance . log (
" Exception rethrown from generateNewAddress(): $ e \n $ s " ,
level: LogLevel . Error ) ;
return false ;
}
}
}
// Particl Network
final particl = NetworkType (
messagePrefix: ' \x18 Bitcoin Signed Message: \n ' ,
bech32: ' pw ' ,
bip32: Bip32Type ( public: 0x696e82d1 , private: 0x8f1daeb8 ) ,
pubKeyHash: 0x38 ,
scriptHash: 0x3c ,
wif: 0x6c ) ;