diff --git a/basicswap/basicswap.py b/basicswap/basicswap.py index 24ecd5a..787e920 100644 --- a/basicswap/basicswap.py +++ b/basicswap/basicswap.py @@ -146,7 +146,7 @@ from basicswap.db_util import ( remove_expired_data, ) -PROTOCOL_VERSION_SECRET_HASH = 2 +PROTOCOL_VERSION_SECRET_HASH = 3 MINPROTO_VERSION_SECRET_HASH = 2 PROTOCOL_VERSION_ADAPTOR_SIG = 3 @@ -470,6 +470,9 @@ class BasicSwap(BaseApp): 'chain_median_time': None, } + if coin == Coins.FIRO: + self.coin_clients[coin]['use_csv'] = False + if coin == Coins.PART: self.coin_clients[coin]['anon_tx_ring_size'] = chain_client_settings.get('anon_tx_ring_size', 12) self.coin_clients[Coins.PART_ANON] = self.coin_clients[coin] @@ -1503,7 +1506,7 @@ class BasicSwap(BaseApp): ci_from.ensureFunds(msg_buf.amount_from) else: proof_of_funds_hash = getOfferProofOfFundsHash(msg_buf, offer_addr) - proof_addr, proof_sig = self.getProofOfFunds(coin_from_t, int(amount), proof_of_funds_hash) + proof_addr, proof_sig, proof_utxos = self.getProofOfFunds(coin_from_t, int(amount), proof_of_funds_hash) # TODO: For now proof_of_funds is just a client side check, may need to be sent with offers in future however. offer_bytes = msg_buf.SerializeToString() @@ -1964,7 +1967,7 @@ class BasicSwap(BaseApp): self.log.debug('getProofOfFunds %s %s', ci.coin_name(), ci.format_amount(amount_for)) if self.coin_clients[coin_type]['connection_type'] != 'rpc': - return (None, None) + return (None, None, None) return ci.getProofOfFunds(amount_for, extra_commit_bytes) @@ -2136,10 +2139,15 @@ class BasicSwap(BaseApp): now: int = self.getTime() if offer.swap_type == SwapTypes.SELLER_FIRST: - proof_addr, proof_sig = self.getProofOfFunds(coin_to, amount_to, offer_id) + proof_addr, proof_sig, proof_utxos = self.getProofOfFunds(coin_to, amount_to, offer_id) msg_buf.proof_address = proof_addr msg_buf.proof_signature = proof_sig + if len(proof_utxos) > 0: + msg_buf.proof_utxos = bytes() + for utxo in proof_utxos: + msg_buf.proof_utxos += utxo[0] + utxo[1].to_bytes(2, 'big') + contract_count = self.getNewContractId() msg_buf.pkhash_buyer = getKeyID(self.getContractPubkey(dt.datetime.fromtimestamp(now).date(), contract_count)) else: @@ -2161,6 +2169,7 @@ class BasicSwap(BaseApp): rate=msg_buf.rate, pkhash_buyer=msg_buf.pkhash_buyer, proof_address=msg_buf.proof_address, + proof_utxos=msg_buf.proof_utxos, created_at=now, contract_count=contract_count, @@ -3579,11 +3588,8 @@ class BasicSwap(BaseApp): # TODO: Timeout waiting for transactions bid_changed = False - if offer.coin_from == Coins.FIRO: - lock_tx_chain_info = ci_from.getLockTxHeightFiro(bid.xmr_a_lock_tx.txid, xmr_swap.a_lock_tx_script, bid.amount, bid.chain_a_height_start) - else: - a_lock_tx_addr = ci_from.getSCLockScriptAddress(xmr_swap.a_lock_tx_script) - lock_tx_chain_info = ci_from.getLockTxHeight(bid.xmr_a_lock_tx.txid, a_lock_tx_addr, bid.amount, bid.chain_a_height_start) + a_lock_tx_addr = ci_from.getSCLockScriptAddress(xmr_swap.a_lock_tx_script) + lock_tx_chain_info = ci_from.getLockTxHeight(bid.xmr_a_lock_tx.txid, a_lock_tx_addr, bid.amount, bid.chain_a_height_start) if lock_tx_chain_info is None: return rv @@ -3669,11 +3675,8 @@ class BasicSwap(BaseApp): if TxTypes.XMR_SWAP_A_LOCK_REFUND in bid.txns: refund_tx = bid.txns[TxTypes.XMR_SWAP_A_LOCK_REFUND] if refund_tx.block_time is None: - if offer.coin_from == Coins.FIRO: - lock_refund_tx_chain_info = ci_from.getLockTxHeightFiro(refund_tx.txid, xmr_swap.a_lock_refund_tx_script, 0, bid.chain_a_height_start) - else: - refund_tx_addr = ci_from.getSCLockScriptAddress(xmr_swap.a_lock_refund_tx_script) - lock_refund_tx_chain_info = ci_from.getLockTxHeight(refund_tx.txid, refund_tx_addr, 0, bid.chain_a_height_start) + refund_tx_addr = ci_from.getSCLockScriptAddress(xmr_swap.a_lock_refund_tx_script) + lock_refund_tx_chain_info = ci_from.getLockTxHeight(refund_tx.txid, refund_tx_addr, 0, bid.chain_a_height_start) if lock_refund_tx_chain_info is not None and lock_refund_tx_chain_info.get('height', 0) > 0: self.setTxBlockInfoFromHeight(ci_from, refund_tx, lock_refund_tx_chain_info['height']) @@ -4637,7 +4640,8 @@ class BasicSwap(BaseApp): if swap_type == SwapTypes.SELLER_FIRST: ensure(len(bid_data.pkhash_buyer) == 20, 'Bad pkhash_buyer length') - sum_unspent = ci_to.verifyProofOfFunds(bid_data.proof_address, bid_data.proof_signature, offer_id) + proof_utxos = ci_to.decodeProofUtxos(bid_data.proof_utxos) + sum_unspent = ci_to.verifyProofOfFunds(bid_data.proof_address, bid_data.proof_signature, proof_utxos, offer_id) self.log.debug('Proof of funds %s %s', bid_data.proof_address, self.ci(coin_to).format_amount(sum_unspent)) ensure(sum_unspent >= amount_to, 'Proof of funds failed') @@ -4658,6 +4662,8 @@ class BasicSwap(BaseApp): amount=bid_data.amount, rate=bid_data.rate, pkhash_buyer=bid_data.pkhash_buyer, + proof_address=bid_data.proof_address, + proof_utxos=bid_data.proof_utxos, created_at=msg['sent'], amount_to=amount_to, diff --git a/basicswap/db.py b/basicswap/db.py index 0f610f4..56f3927 100644 --- a/basicswap/db.py +++ b/basicswap/db.py @@ -12,7 +12,7 @@ from enum import IntEnum, auto from sqlalchemy.ext.declarative import declarative_base -CURRENT_DB_VERSION = 21 +CURRENT_DB_VERSION = 22 CURRENT_DB_DATA_VERSION = 4 Base = declarative_base() @@ -67,6 +67,7 @@ class Offer(Base): proof_address = sa.Column(sa.String) proof_signature = sa.Column(sa.LargeBinary) + proof_utxos = sa.Column(sa.LargeBinary) pkhash_seller = sa.Column(sa.LargeBinary) secret_hash = sa.Column(sa.LargeBinary) @@ -115,6 +116,7 @@ class Bid(Base): expire_at = sa.Column(sa.BigInteger) bid_addr = sa.Column(sa.String) proof_address = sa.Column(sa.String) + proof_utxos = sa.Column(sa.LargeBinary) withdraw_to_addr = sa.Column(sa.String) # Address to spend lock tx to - address from wallet if empty TODO recovered_secret = sa.Column(sa.LargeBinary) diff --git a/basicswap/db_upgrades.py b/basicswap/db_upgrades.py index e1abe4d..951d60c 100644 --- a/basicswap/db_upgrades.py +++ b/basicswap/db_upgrades.py @@ -293,6 +293,9 @@ def upgradeDatabase(self, db_version): msg_id BLOB, PRIMARY KEY (record_id))''') session.execute('ALTER TABLE offers ADD COLUMN bid_reversed INTEGER') + elif current_version == 21: + session.execute('ALTER TABLE offers ADD COLUMN proof_utxos BLOB') + session.execute('ALTER TABLE bids ADD COLUMN proof_utxos BLOB') if current_version != db_version: self.db_version = db_version diff --git a/basicswap/interface/btc.py b/basicswap/interface/btc.py index aad1cdf..7ac78b7 100644 --- a/basicswap/interface/btc.py +++ b/basicswap/interface/btc.py @@ -1379,9 +1379,20 @@ class BTCInterface(CoinInterface): signature = self.rpc_callback('signmessage', [sign_for_addr, sign_for_addr + '_swap_proof_' + extra_commit_bytes.hex()]) - return (sign_for_addr, signature) + prove_utxos = [] # TODO: Send specific utxos + return (sign_for_addr, signature, prove_utxos) - def verifyProofOfFunds(self, address, signature, extra_commit_bytes): + def decodeProofUtxos(self, msg_utxos): + proof_utxos = [] + if len(msg_utxos) > 0: + num_utxos = len(msg_utxos) // 34 + p: int = 0 + for i in range(num_utxos): + proof_utxos.append((msg_utxos[p: p + 32], int.from_bytes(msg_utxos[p + 32: p + 34]))) + p += 34 + return proof_utxos + + def verifyProofOfFunds(self, address, signature, utxos, extra_commit_bytes): passed = self.verifyMessage(address, address + '_swap_proof_' + extra_commit_bytes.hex(), signature) ensure(passed is True, 'Proof of funds signature invalid') diff --git a/basicswap/interface/contrib/firo_test_framework/__init__.py b/basicswap/interface/contrib/firo_test_framework/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/basicswap/interface/contrib/firo_test_framework/authproxy.py b/basicswap/interface/contrib/firo_test_framework/authproxy.py new file mode 100644 index 0000000..09ed611 --- /dev/null +++ b/basicswap/interface/contrib/firo_test_framework/authproxy.py @@ -0,0 +1,191 @@ + +""" + Copyright (c) 2011 Jeff Garzik + + AuthServiceProxy has the following improvements over python-jsonrpc's + ServiceProxy class: + + - HTTP connections persist for the life of the AuthServiceProxy object + (if server supports HTTP/1.1) + - sends protocol 'version', per JSON-RPC 1.1 + - sends proper, incrementing 'id' + - sends Basic HTTP authentication headers + - parses all JSON numbers that look like floats as Decimal + - uses standard Python json lib + + Previous copyright, from python-jsonrpc/jsonrpc/proxy.py: + + Copyright (c) 2007 Jan-Klaas Kollhof + + This file is part of jsonrpc. + + jsonrpc is free software; you can redistribute it and/or modify + it under the terms of the GNU Lesser General Public License as published by + the Free Software Foundation; either version 2.1 of the License, or + (at your option) any later version. + + This software is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public License + along with this software; if not, write to the Free Software + Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +""" + +try: + import http.client as httplib +except ImportError: + import httplib +import base64 +import decimal +import json +import logging +import socket +try: + import urllib.parse as urlparse +except ImportError: + import urlparse + +USER_AGENT = "AuthServiceProxy/0.1" + +HTTP_TIMEOUT = 30 + +log = logging.getLogger("BitcoinRPC") + +class JSONRPCException(Exception): + def __init__(self, rpc_error): + try: + errmsg = '%(message)s (%(code)i)' % rpc_error + except (KeyError, TypeError): + errmsg = '' + Exception.__init__(self, errmsg) + self.error = rpc_error + + +def EncodeDecimal(o): + if isinstance(o, decimal.Decimal): + return str(o) + raise TypeError(repr(o) + " is not JSON serializable") + +class AuthServiceProxy(object): + __id_count = 0 + + # ensure_ascii: escape unicode as \uXXXX, passed to json.dumps + def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None, ensure_ascii=True): + self.__service_url = service_url + self._service_name = service_name + self.ensure_ascii = ensure_ascii # can be toggled on the fly by tests + self.__url = urlparse.urlparse(service_url) + if self.__url.port is None: + port = 80 + else: + port = self.__url.port + (user, passwd) = (self.__url.username, self.__url.password) + try: + user = user.encode('utf8') + except AttributeError: + pass + try: + passwd = passwd.encode('utf8') + except AttributeError: + pass + authpair = user + b':' + passwd + self.__auth_header = b'Basic ' + base64.b64encode(authpair) + + if connection: + # Callables re-use the connection of the original proxy + self.__conn = connection + elif self.__url.scheme == 'https': + self.__conn = httplib.HTTPSConnection(self.__url.hostname, port, + timeout=timeout) + else: + self.__conn = httplib.HTTPConnection(self.__url.hostname, port, + timeout=timeout) + + def __getattr__(self, name): + if name.startswith('__') and name.endswith('__'): + # Python internal stuff + raise AttributeError + if self._service_name is not None: + name = "%s.%s" % (self._service_name, name) + return AuthServiceProxy(self.__service_url, name, connection=self.__conn) + + def _request(self, method, path, postdata): + ''' + Do a HTTP request, with retry if we get disconnected (e.g. due to a timeout). + This is a workaround for https://bugs.python.org/issue3566 which is fixed in Python 3.5. + ''' + headers = {'Host': self.__url.hostname, + 'User-Agent': USER_AGENT, + 'Authorization': self.__auth_header, + 'Content-type': 'application/json'} + try: + self.__conn.request(method, path, postdata, headers) + return self._get_response() + except httplib.BadStatusLine as e: + if e.line == "''": # if connection was closed, try again + self.__conn.close() + self.__conn.request(method, path, postdata, headers) + return self._get_response() + else: + raise + except (BrokenPipeError,ConnectionResetError): + # Python 3.5+ raises BrokenPipeError instead of BadStatusLine when the connection was reset + # ConnectionResetError happens on FreeBSD with Python 3.4 + self.__conn.close() + self.__conn.request(method, path, postdata, headers) + return self._get_response() + + def __call__(self, *args, **argsn): + AuthServiceProxy.__id_count += 1 + + log.debug("-%s-> %s %s"%(AuthServiceProxy.__id_count, self._service_name, + json.dumps(args, default=EncodeDecimal, ensure_ascii=self.ensure_ascii))) + if args and argsn: + raise ValueError('Cannot handle both named and positional arguments') + postdata = json.dumps({'version': '1.1', + 'method': self._service_name, + 'params': args or argsn, + 'id': AuthServiceProxy.__id_count}, default=EncodeDecimal, ensure_ascii=self.ensure_ascii) + response = self._request('POST', self.__url.path, postdata.encode('utf-8')) + if response['error'] is not None: + raise JSONRPCException(response['error']) + elif 'result' not in response: + raise JSONRPCException({ + 'code': -343, 'message': 'missing JSON-RPC result'}) + else: + return response['result'] + + def _batch(self, rpc_call_list): + postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal, ensure_ascii=self.ensure_ascii) + log.debug("--> "+postdata) + return self._request('POST', self.__url.path, postdata.encode('utf-8')) + + def _get_response(self): + try: + http_response = self.__conn.getresponse() + except socket.timeout as e: + raise JSONRPCException({ + 'code': -344, + 'message': '%r RPC took longer than %f seconds. Consider ' + 'using larger timeout for calls that take ' + 'longer to return.' % (self._service_name, + self.__conn.timeout)}) + if http_response is None: + raise JSONRPCException({ + 'code': -342, 'message': 'missing HTTP response from server'}) + + content_type = http_response.getheader('Content-Type') + if content_type != 'application/json': + raise JSONRPCException({ + 'code': -342, 'message': 'non-JSON HTTP response with \'%i %s\' from server' % (http_response.status, http_response.reason)}) + + responsedata = http_response.read().decode('utf8') + response = json.loads(responsedata, parse_float=decimal.Decimal) + if "error" in response and response["error"] is None: + log.debug("<-%s- %s"%(response["id"], json.dumps(response["result"], default=EncodeDecimal, ensure_ascii=self.ensure_ascii))) + else: + log.debug("<-- "+responsedata) + return response diff --git a/basicswap/interface/contrib/firo_test_framework/bignum.py b/basicswap/interface/contrib/firo_test_framework/bignum.py new file mode 100644 index 0000000..ef800e4 --- /dev/null +++ b/basicswap/interface/contrib/firo_test_framework/bignum.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python3 +# +# bignum.py +# +# This file is copied from python-bitcoinlib. +# +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# + +"""Bignum routines""" + + +import struct + + +# generic big endian MPI format + +def bn_bytes(v, have_ext=False): + ext = 0 + if have_ext: + ext = 1 + return ((v.bit_length()+7)//8) + ext + +def bn2bin(v): + s = bytearray() + i = bn_bytes(v) + while i > 0: + s.append((v >> ((i-1) * 8)) & 0xff) + i -= 1 + return s + +def bin2bn(s): + l = 0 + for ch in s: + l = (l << 8) | ch + return l + +def bn2mpi(v): + have_ext = False + if v.bit_length() > 0: + have_ext = (v.bit_length() & 0x07) == 0 + + neg = False + if v < 0: + neg = True + v = -v + + s = struct.pack(b">I", bn_bytes(v, have_ext)) + ext = bytearray() + if have_ext: + ext.append(0) + v_bin = bn2bin(v) + if neg: + if have_ext: + ext[0] |= 0x80 + else: + v_bin[0] |= 0x80 + return s + ext + v_bin + +def mpi2bn(s): + if len(s) < 4: + return None + s_size = bytes(s[:4]) + v_len = struct.unpack(b">I", s_size)[0] + if len(s) != (v_len + 4): + return None + if v_len == 0: + return 0 + + v_str = bytearray(s[4:]) + neg = False + i = v_str[0] + if i & 0x80: + neg = True + i &= ~0x80 + v_str[0] = i + + v = bin2bn(v_str) + + if neg: + return -v + return v + +# bitcoin-specific little endian format, with implicit size +def mpi2vch(s): + r = s[4:] # strip size + r = r[::-1] # reverse string, converting BE->LE + return r + +def bn2vch(v): + return bytes(mpi2vch(bn2mpi(v))) + +def vch2mpi(s): + r = struct.pack(b">I", len(s)) # size + r += s[::-1] # reverse string, converting LE->BE + return r + +def vch2bn(s): + return mpi2bn(vch2mpi(s)) + diff --git a/basicswap/interface/contrib/firo_test_framework/coverage.py b/basicswap/interface/contrib/firo_test_framework/coverage.py new file mode 100644 index 0000000..13b3386 --- /dev/null +++ b/basicswap/interface/contrib/firo_test_framework/coverage.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2016 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +""" +This module contains utilities for doing coverage analysis on the RPC +interface. + +It provides a way to track which RPC commands are exercised during +testing. + +""" +import os + + +REFERENCE_FILENAME = 'rpc_interface.txt' + + +class AuthServiceProxyWrapper(object): + """ + An object that wraps AuthServiceProxy to record specific RPC calls. + + """ + def __init__(self, auth_service_proxy_instance, coverage_logfile=None): + """ + Kwargs: + auth_service_proxy_instance (AuthServiceProxy): the instance + being wrapped. + coverage_logfile (str): if specified, write each service_name + out to a file when called. + + """ + self.auth_service_proxy_instance = auth_service_proxy_instance + self.coverage_logfile = coverage_logfile + + def __getattr__(self, *args, **kwargs): + return_val = self.auth_service_proxy_instance.__getattr__( + *args, **kwargs) + + return AuthServiceProxyWrapper(return_val, self.coverage_logfile) + + def __call__(self, *args, **kwargs): + """ + Delegates to AuthServiceProxy, then writes the particular RPC method + called to a file. + + """ + return_val = self.auth_service_proxy_instance.__call__(*args, **kwargs) + rpc_method = self.auth_service_proxy_instance._service_name + + if self.coverage_logfile: + with open(self.coverage_logfile, 'a+', encoding='utf8') as f: + f.write("%s\n" % rpc_method) + + return return_val + + @property + def url(self): + return self.auth_service_proxy_instance.url + + +def get_filename(dirname, n_node): + """ + Get a filename unique to the test process ID and node. + + This file will contain a list of RPC commands covered. + """ + pid = str(os.getpid()) + return os.path.join( + dirname, "coverage.pid%s.node%s.txt" % (pid, str(n_node))) + + +def write_all_rpc_commands(dirname, node): + """ + Write out a list of all RPC functions available in `bitcoin-cli` for + coverage comparison. This will only happen once per coverage + directory. + + Args: + dirname (str): temporary test dir + node (AuthServiceProxy): client + + Returns: + bool. if the RPC interface file was written. + + """ + filename = os.path.join(dirname, REFERENCE_FILENAME) + + if os.path.isfile(filename): + return False + + help_output = node.help().split('\n') + commands = set() + + for line in help_output: + line = line.strip() + + # Ignore blanks and headers + if line and not line.startswith('='): + commands.add("%s\n" % line.split()[0]) + + with open(filename, 'w', encoding='utf8') as f: + f.writelines(list(commands)) + + return True diff --git a/basicswap/interface/contrib/firo_test_framework/mininode.py b/basicswap/interface/contrib/firo_test_framework/mininode.py new file mode 100644 index 0000000..be514cf --- /dev/null +++ b/basicswap/interface/contrib/firo_test_framework/mininode.py @@ -0,0 +1,2168 @@ +#!/usr/bin/env python3 +# Copyright (c) 2010 ArtForz -- public domain half-a-node +# Copyright (c) 2012 Jeff Garzik +# Copyright (c) 2010-2016 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +# +# mininode.py - Bitcoin P2P network half-a-node +# +# This python code was modified from ArtForz' public domain half-a-node, as +# found in the mini-node branch of http://github.com/jgarzik/pynode. +# +# NodeConn: an object which manages p2p connectivity to a bitcoin node +# NodeConnCB: a base class that describes the interface for receiving +# callbacks with network messages from a NodeConn +# CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....: +# data structures that should map to corresponding structures in +# bitcoin/primitives +# msg_block, msg_tx, msg_headers, etc.: +# data structures that represent network messages +# ser_*, deser_*: functions that handle serialization/deserialization + + +import struct +import socket +import asyncore +from collections import namedtuple +import time +import sys +import random +from .util import hex_str_to_bytes, bytes_to_hex_str +from io import BytesIO +from codecs import encode +import hashlib +from threading import RLock +from threading import Thread +import logging +import copy +from .siphash import siphash256 + +BIP0031_VERSION = 60000 +MY_VERSION = 90030 # past bip-31 for ping/pong +MY_SUBVERSION = b"/python-mininode-tester:0.0.3/" +MY_RELAY = 1 # from version 70001 onwards, fRelay should be appended to version messages (BIP37) + +MAX_INV_SZ = 50000 +MAX_BLOCK_BASE_SIZE = 1000000 + +COIN = 100000000 # 1 btc in satoshis + +NODE_NETWORK = (1 << 0) +NODE_GETUTXO = (1 << 1) +NODE_BLOOM = (1 << 2) +NODE_WITNESS = (1 << 3) +logger = logging.getLogger("TestFramework.mininode") + +# Keep our own socket map for asyncore, so that we can track disconnects +# ourselves (to workaround an issue with closing an asyncore socket when +# using select) +mininode_socket_map = dict() + +# One lock for synchronizing all data access between the networking thread (see +# NetworkThread below) and the thread running the test logic. For simplicity, +# NodeConn acquires this lock whenever delivering a message to to a NodeConnCB, +# and whenever adding anything to the send buffer (in send_message()). This +# lock should be acquired in the thread running the test logic to synchronize +# access to any data shared with the NodeConnCB or NodeConn. +mininode_lock = RLock() + +# Serialization/deserialization tools +def sha256(s): + return hashlib.new('sha256', s).digest() + +def ripemd160(s): + return hashlib.new('ripemd160', s).digest() + +def hash256(s): + return sha256(sha256(s)) + +def ser_compact_size(l): + r = b"" + if l < 253: + r = struct.pack("B", l) + elif l < 0x10000: + r = struct.pack(">= 32 + return rs + + +def uint256_from_str(s): + r = 0 + t = struct.unpack("> 24) & 0xFF + v = (c & 0xFFFFFF) << (8 * (nbytes - 3)) + return v + + +def deser_vector(f, c): + nit = deser_compact_size(f) + r = [] + for i in range(nit): + t = c() + t.deserialize(f) + r.append(t) + return r + + +# ser_function_name: Allow for an alternate serialization function on the +# entries in the vector (we use this for serializing the vector of transactions +# for a witness block). +def ser_vector(l, ser_function_name=None): + r = ser_compact_size(len(l)) + for i in l: + if ser_function_name: + r += getattr(i, ser_function_name)() + else: + r += i.serialize() + return r + + +def deser_uint256_vector(f): + nit = deser_compact_size(f) + r = [] + for i in range(nit): + t = deser_uint256(f) + r.append(t) + return r + + +def ser_uint256_vector(l): + r = ser_compact_size(len(l)) + for i in l: + r += ser_uint256(i) + return r + + +def deser_string_vector(f): + nit = deser_compact_size(f) + r = [] + for i in range(nit): + t = deser_string(f) + r.append(t) + return r + + +def ser_string_vector(l): + r = ser_compact_size(len(l)) + for sv in l: + r += ser_string(sv) + return r + + +def deser_int_vector(f): + nit = deser_compact_size(f) + r = [] + for i in range(nit): + t = struct.unpack("H", f.read(2))[0] + + def serialize(self): + r = b"" + r += socket.inet_pton(socket.AF_INET6, self.ip) + r += struct.pack(">H", self.port) + return r + + def __repr__(self): + return "CService(ip=%s port=%i)" % (self.ip, self.port) + +# Objects that map to bitcoind objects, which can be serialized/deserialized + +class CAddress(object): + def __init__(self): + self.time = 0; + self.nServices = 1 + self.pchReserved = b"\x00" * 10 + b"\xff" * 2 + self.ip = "0.0.0.0" + self.port = 0 + + def deserialize(self, f, *, with_time = True): + if with_time: + self.time = struct.unpack("H", f.read(2))[0] + + def serialize(self): + r = b"" + r += struct.pack("H", self.port) + return r + + def __repr__(self): + return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices, + self.ip, self.port) + +MSG_WITNESS_FLAG = 1<<30 + +class CInv(object): + typemap = { + 0: "Error", + 1: "TX", + 2: "Block", + 1|MSG_WITNESS_FLAG: "WitnessTx", + 2|MSG_WITNESS_FLAG : "WitnessBlock", + 4: "CompactBlock", + 5: "DandelionTx", + 20: "CompactBlock" + } + + def __init__(self, t=0, h=0): + self.type = t + self.hash = h + + def deserialize(self, f): + self.type = struct.unpack("> 16) & 0xffff + self.vin = deser_vector(f, CTxIn) + self.vout = deser_vector(f, CTxOut) + self.nLockTime = struct.unpack(" 21000000 * COIN: + return False + return True + + def __repr__(self): + return "CTransaction(nVersion=%i vin=%s vout=%s wit=%s nLockTime=%i)" \ + % (self.nVersion, repr(self.vin), repr(self.vout), repr(self.wit), self.nLockTime) + + +class msg_dandeliontx(): + command = b"dandeliontx" + + def __init__(self, tx=CTransaction()): + self.tx = tx + + def deserialize(self, f): + self.tx.deserialize(f) + + def serialize(self): + return self.tx.serialize_without_witness() + + def __repr__(self): + return "msg_dandeliontx(tx=%s)" % (repr(self.tx)) + + +class CBlockHeader(object): + def __init__(self, header=None): + if header is None: + self.set_null() + else: + self.nVersion = header.nVersion + self.hashPrevBlock = header.hashPrevBlock + self.hashMerkleRoot = header.hashMerkleRoot + self.nTime = header.nTime + self.nBits = header.nBits + self.nNonce = header.nNonce + self.sha256 = header.sha256 + self.hash = header.hash + self.calc_sha256() + + def set_null(self): + self.nVersion = 1 + self.hashPrevBlock = 0 + self.hashMerkleRoot = 0 + self.nTime = 0 + self.nBits = 0 + self.nNonce = 0 + self.sha256 = None + self.hash = None + + def deserialize(self, f): + self.nVersion = struct.unpack(" 1: + newhashes = [] + for i in range(0, len(hashes), 2): + i2 = min(i+1, len(hashes)-1) + newhashes.append(hash256(hashes[i] + hashes[i2])) + hashes = newhashes + return uint256_from_str(hashes[0]) + + def calc_merkle_root(self): + hashes = [] + for tx in self.vtx: + tx.calc_sha256() + hashes.append(ser_uint256(tx.sha256)) + return self.get_merkle_root(hashes) + + def calc_witness_merkle_root(self): + # For witness root purposes, the hash of the + # coinbase, with witness, is defined to be 0...0 + hashes = [ser_uint256(0)] + + for tx in self.vtx[1:]: + # Calculate the hashes with witness data + hashes.append(ser_uint256(tx.calc_sha256(True))) + + return self.get_merkle_root(hashes) + + def is_valid(self): + self.calc_sha256() + target = uint256_from_compact(self.nBits) + if self.sha256 > target: + return False + for tx in self.vtx: + if not tx.is_valid(): + return False + if self.calc_merkle_root() != self.hashMerkleRoot: + return False + return True + + def solve(self): + self.rehash() + target = uint256_from_compact(self.nBits) + while self.sha256 > target: + self.nNonce += 1 + self.rehash() + + def __repr__(self): + return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \ + % (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot, + time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx)) + + +class CUnsignedAlert(object): + def __init__(self): + self.nVersion = 1 + self.nRelayUntil = 0 + self.nExpiration = 0 + self.nID = 0 + self.nCancel = 0 + self.setCancel = [] + self.nMinVer = 0 + self.nMaxVer = 0 + self.setSubVer = [] + self.nPriority = 0 + self.strComment = b"" + self.strStatusBar = b"" + self.strReserved = b"" + + def deserialize(self, f): + self.nVersion = struct.unpack("= 2: + self.merkleRootQuorums = deser_uint256(f) + + def serialize(self): + r = b"" + r += struct.pack("= 2: + r += ser_uint256(self.merkleRootQuorums) + return r + + +class CSimplifiedMNListEntry(object): + def __init__(self): + self.set_null() + + def set_null(self): + self.proRegTxHash = 0 + self.confirmedHash = 0 + self.service = CService() + self.pubKeyOperator = b'\\x0' * 48 + self.keyIDVoting = 0 + self.isValid = False + + def deserialize(self, f): + self.proRegTxHash = deser_uint256(f) + self.confirmedHash = deser_uint256(f) + self.service.deserialize(f) + self.pubKeyOperator = f.read(48) + self.keyIDVoting = f.read(20) + self.isValid = struct.unpack("= 106: + self.addrFrom = CAddress() + self.addrFrom.deserialize(f) + self.nNonce = struct.unpack("= 209: + self.nStartingHeight = struct.unpack("= 70001: + # Relay field is optional for version 70001 onwards + try: + self.nRelay = struct.unpack(" +class msg_headers(object): + command = b"headers" + + def __init__(self): + self.headers = [] + + def deserialize(self, f): + # comment in bitcoind indicates these should be deserialized as blocks + blocks = deser_vector(f, CBlock) + for x in blocks: + self.headers.append(CBlockHeader(x)) + + def serialize(self): + blocks = [CBlock(x) for x in self.headers] + return ser_vector(blocks) + + def __repr__(self): + return "msg_headers(headers=%s)" % repr(self.headers) + + +class msg_reject(object): + command = b"reject" + REJECT_MALFORMED = 1 + + def __init__(self): + self.message = b"" + self.code = 0 + self.reason = b"" + self.data = 0 + + def deserialize(self, f): + self.message = deser_string(f) + self.code = struct.unpack("= 209: + conn.send_message(msg_verack()) + conn.ver_send = min(MY_VERSION, message.nVersion) + if message.nVersion < 209: + conn.ver_recv = conn.ver_send + conn.nServices = message.nServices + + def on_verack(self, conn, message): + conn.ver_recv = conn.ver_send + self.verack_received = True + + def on_inv(self, conn, message): + want = msg_getdata() + for i in message.inv: + if i.type != 0: + want.inv.append(i) + if len(want.inv): + conn.send_message(want) + + def on_addr(self, conn, message): pass + def on_alert(self, conn, message): pass + def on_getdata(self, conn, message): pass + def on_getblocks(self, conn, message): pass + def on_tx(self, conn, message): pass + def on_block(self, conn, message): pass + def on_getaddr(self, conn, message): pass + def on_headers(self, conn, message): pass + def on_getheaders(self, conn, message): pass + def on_ping(self, conn, message): + if conn.ver_send > BIP0031_VERSION: + conn.send_message(msg_pong(message.nonce)) + def on_reject(self, conn, message): pass + def on_open(self, conn): pass + def on_close(self, conn): pass + def on_mempool(self, conn): pass + def on_notfound(self, message): pass + def on_pong(self, conn, message): pass + def on_feefilter(self, conn, message): pass + def on_sendheaders(self, conn, message): pass + def on_sendcmpct(self, conn, message): pass + def on_cmpctblock(self, conn, message): pass + def on_dandeliontx(self, conn, message): pass + def on_getblocktxn(self, conn, message): pass + def on_blocktxn(self, conn, message): pass + def on_mnlistdiff(self, conn, message): pass + def on_clsig(self, conn, message): pass + def on_islock(self, conn, message): pass + +class msg_witness_blocktxn(msg_blocktxn): + def serialize(self): + r = b"" + r += self.block_transactions.serialize(with_witness=True) + return r + + +# More useful callbacks and functions for NodeConnCB's which have a single NodeConn +class SingleNodeConnCB(NodeConnCB): + def __init__(self): + NodeConnCB.__init__(self) + self.connection = None + self.ping_counter = 1 + self.last_pong = msg_pong() + + def add_connection(self, conn): + self.connection = conn + + # Wrapper for the NodeConn's send_message function + def send_message(self, message): + self.connection.send_message(message) + + def send_and_ping(self, message): + self.send_message(message) + self.sync_with_ping() + + def on_pong(self, conn, message): + self.last_pong = message + + # Sync up with the node + def sync_with_ping(self, timeout=30): + def received_pong(): + return (self.last_pong.nonce == self.ping_counter) + self.send_message(msg_ping(nonce=self.ping_counter)) + success = wait_until(received_pong, timeout=timeout) + self.ping_counter += 1 + return success + +# The actual NodeConn class +# This class provides an interface for a p2p connection to a specified node +class NodeConn(asyncore.dispatcher): + messagemap = { + b"version": msg_version, + b"verack": msg_verack, + b"addr": msg_addr, + b"alert": msg_alert, + b"inv": msg_inv, + b"getdata": msg_getdata, + b"getblocks": msg_getblocks, + b"tx": msg_tx, + b"block": msg_block, + b"getaddr": msg_getaddr, + b"ping": msg_ping, + b"pong": msg_pong, + b"headers": msg_headers, + b"getheaders": msg_getheaders, + b"reject": msg_reject, + b"mempool": msg_mempool, + b"notfound": msg_notfound, + b"feefilter": msg_feefilter, + b"sendheaders": msg_sendheaders, + b"sendcmpct": msg_sendcmpct, + b"cmpctblock": msg_cmpctblock, + b"dandeliontx": msg_dandeliontx, + b"getblocktxn": msg_getblocktxn, + b"blocktxn": msg_blocktxn, + b"mnlistdiff": msg_mnlistdiff, + b"clsig": msg_clsig, + b"islock": msg_islock + } + MAGIC_BYTES = { + "mainnet": b"\xe3\xd9\xfe\xf1", # mainnet + "testnet3": b"\xcf\xfc\xbe\xea", # testnet3 + "regtest": b"\xfa\xbf\xb5\xda", # regtest + } + + def __init__(self, dstaddr, dstport, rpc, callback, net="regtest", services=NODE_NETWORK, send_version=True): + asyncore.dispatcher.__init__(self, map=mininode_socket_map) + self.dstaddr = dstaddr + self.dstport = dstport + self.create_socket(socket.AF_INET, socket.SOCK_STREAM) + self.sendbuf = b"" + self.recvbuf = b"" + self.ver_send = 209 + self.ver_recv = 209 + self.last_sent = 0 + self.state = "connecting" + self.network = net + self.cb = callback + self.disconnect = False + self.nServices = 0 + + if send_version: + # stuff version msg into sendbuf + vt = msg_version() + vt.nServices = services + vt.addrTo.ip = self.dstaddr + vt.addrTo.port = self.dstport + vt.addrFrom.ip = "0.0.0.0" + vt.addrFrom.port = 0 + self.send_message(vt, True) + + print('MiniNode: Connecting to Bitcoin Node IP # ' + dstaddr + ':' \ + + str(dstport)) + + try: + self.connect((dstaddr, dstport)) + print("Connection to " + dstaddr + ':' + \ + str(dstport) + " successful. State is " + self.state) + except: + print("Connection to " + dstaddr + ':' + str(dstport) + " failed.") + self.handle_close() + self.rpc = rpc + + def handle_connect(self): + if self.state != "connected": + logger.debug("MiniNode: Connected & Listening: \n") + self.state = "connected" + self.cb.on_open(self) + + def handle_close(self): + print("MiniNode: Closing Connection to %s:%d... " + % (self.dstaddr, self.dstport)) + logger.debug("MiniNode: Closing Connection to %s:%d... " + % (self.dstaddr, self.dstport)) + self.state = "closed" + self.recvbuf = b"" + self.sendbuf = b"" + try: + self.close() + except: + pass + self.cb.on_close(self) + + def handle_read(self): + try: + t = self.recv(8192) + if len(t) > 0: + self.recvbuf += t + self.got_data() + except: + pass + + def readable(self): + return True + + def writable(self): + with mininode_lock: + pre_connection = self.state == "connecting" + length = len(self.sendbuf) + return (length > 0 or pre_connection) + + def handle_write(self): + with mininode_lock: + # asyncore does not expose socket connection, only the first read/write + # event, thus we must check connection manually here to know when we + # actually connect + if self.state == "connecting": + self.handle_connect() + if not self.writable(): + return + + try: + sent = self.send(self.sendbuf) + except: + print("Closing connection in handle_write") + self.handle_close() + return + self.sendbuf = self.sendbuf[sent:] + + def got_data(self): + try: + while True: + if len(self.recvbuf) < 4: + return + if self.recvbuf[:4] != self.MAGIC_BYTES[self.network]: + raise ValueError("got garbage %s" % repr(self.recvbuf)) + if self.ver_recv < 209: + if len(self.recvbuf) < 4 + 12 + 4: + return + command = self.recvbuf[4:4+12].split(b"\x00", 1)[0] + msglen = struct.unpack("= 209: + th = sha256(data) + h = sha256(th) + tmsg += h[:4] + tmsg += data + with mininode_lock: + self.sendbuf += tmsg + self.last_sent = time.time() + + def got_message(self, message): + if message.command == b"version": + if message.nVersion <= BIP0031_VERSION: + self.messagemap[b'ping'] = msg_ping_prebip31 + if self.last_sent + 30 * 60 < time.time(): + self.send_message(self.messagemap[b'ping']()) + logger.debug("Recv %s" % repr(message)) + self.cb.deliver(self, message) + + def disconnect_node(self): + self.disconnect = True + + +class NetworkThread(Thread): + def run(self): + while mininode_socket_map: + # We check for whether to disconnect outside of the asyncore + # loop to workaround the behavior of asyncore when using + # select + disconnected = [] + for fd, obj in mininode_socket_map.items(): + if obj.disconnect: + disconnected.append(obj) + print("NetworkThread:run disconnecting %s %s" % (fd, obj.disconnect)) + [ obj.handle_close() for obj in disconnected ] + asyncore.loop(0.1, use_poll=True, map=mininode_socket_map, count=1) + + +# An exception we can raise if we detect a potential disconnect +# (p2p or rpc) before the test is complete +class EarlyDisconnectError(Exception): + def __init__(self, value): + self.value = value + + def __str__(self): + return repr(self.value) diff --git a/basicswap/interface/contrib/firo_test_framework/script.py b/basicswap/interface/contrib/firo_test_framework/script.py new file mode 100644 index 0000000..83bbf20 --- /dev/null +++ b/basicswap/interface/contrib/firo_test_framework/script.py @@ -0,0 +1,947 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2016 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +# +# script.py +# +# This file is modified from python-bitcoinlib. +# + +"""Scripts + +Functionality to build scripts, as well as SignatureHash(). +""" + + +from .mininode import CTransaction, CTxOut, sha256, hash256, uint256_from_str, ser_uint256, ser_string +from binascii import hexlify +import hashlib + +import sys +bchr = chr +bord = ord +if sys.version > '3': + long = int + bchr = lambda x: bytes([x]) + bord = lambda x: x + +import struct + +from .bignum import bn2vch + +MAX_SCRIPT_SIZE = 10000 +MAX_SCRIPT_ELEMENT_SIZE = 520 +MAX_SCRIPT_OPCODES = 201 + +OPCODE_NAMES = {} + +def hash160(s): + return hashlib.new('ripemd160', sha256(s)).digest() + + +_opcode_instances = [] +class CScriptOp(int): + """A single script opcode""" + __slots__ = [] + + @staticmethod + def encode_op_pushdata(d): + """Encode a PUSHDATA op, returning bytes""" + if len(d) < 0x4c: + return b'' + bchr(len(d)) + d # OP_PUSHDATA + elif len(d) <= 0xff: + return b'\x4c' + bchr(len(d)) + d # OP_PUSHDATA1 + elif len(d) <= 0xffff: + return b'\x4d' + struct.pack(b'>= 8 + if r[-1] & 0x80: + r.append(0x80 if neg else 0) + elif neg: + r[-1] |= 0x80 + return bytes(bchr(len(r)) + r) + + +class CScript(bytes): + """Serialized script + + A bytes subclass, so you can use this directly whenever bytes are accepted. + Note that this means that indexing does *not* work - you'll get an index by + byte rather than opcode. This format was chosen for efficiency so that the + general case would not require creating a lot of little CScriptOP objects. + + iter(script) however does iterate by opcode. + """ + @classmethod + def __coerce_instance(cls, other): + # Coerce other into bytes + if isinstance(other, CScriptOp): + other = bchr(other) + elif isinstance(other, CScriptNum): + if (other.value == 0): + other = bchr(CScriptOp(OP_0)) + else: + other = CScriptNum.encode(other) + elif isinstance(other, int): + if 0 <= other <= 16: + other = bytes(bchr(CScriptOp.encode_op_n(other))) + elif other == -1: + other = bytes(bchr(OP_1NEGATE)) + else: + other = CScriptOp.encode_op_pushdata(bn2vch(other)) + elif isinstance(other, (bytes, bytearray)): + other = CScriptOp.encode_op_pushdata(other) + return other + + def __add__(self, other): + # Do the coercion outside of the try block so that errors in it are + # noticed. + other = self.__coerce_instance(other) + + try: + # bytes.__add__ always returns bytes instances unfortunately + return CScript(super(CScript, self).__add__(other)) + except TypeError: + raise TypeError('Can not add a %r instance to a CScript' % other.__class__) + + def join(self, iterable): + # join makes no sense for a CScript() + raise NotImplementedError + + def __new__(cls, value=b''): + if isinstance(value, bytes) or isinstance(value, bytearray): + return super(CScript, cls).__new__(cls, value) + else: + def coerce_iterable(iterable): + for instance in iterable: + yield cls.__coerce_instance(instance) + # Annoyingly on both python2 and python3 bytes.join() always + # returns a bytes instance even when subclassed. + return super(CScript, cls).__new__(cls, b''.join(coerce_iterable(value))) + + def raw_iter(self): + """Raw iteration + + Yields tuples of (opcode, data, sop_idx) so that the different possible + PUSHDATA encodings can be accurately distinguished, as well as + determining the exact opcode byte indexes. (sop_idx) + """ + i = 0 + while i < len(self): + sop_idx = i + opcode = bord(self[i]) + i += 1 + + if opcode > OP_PUSHDATA4: + yield (opcode, None, sop_idx) + else: + datasize = None + pushdata_type = None + if opcode < OP_PUSHDATA1: + pushdata_type = 'PUSHDATA(%d)' % opcode + datasize = opcode + + elif opcode == OP_PUSHDATA1: + pushdata_type = 'PUSHDATA1' + if i >= len(self): + raise CScriptInvalidError('PUSHDATA1: missing data length') + datasize = bord(self[i]) + i += 1 + + elif opcode == OP_PUSHDATA2: + pushdata_type = 'PUSHDATA2' + if i + 1 >= len(self): + raise CScriptInvalidError('PUSHDATA2: missing data length') + datasize = bord(self[i]) + (bord(self[i+1]) << 8) + i += 2 + + elif opcode == OP_PUSHDATA4: + pushdata_type = 'PUSHDATA4' + if i + 3 >= len(self): + raise CScriptInvalidError('PUSHDATA4: missing data length') + datasize = bord(self[i]) + (bord(self[i+1]) << 8) + (bord(self[i+2]) << 16) + (bord(self[i+3]) << 24) + i += 4 + + else: + assert False # shouldn't happen + + + data = bytes(self[i:i+datasize]) + + # Check for truncation + if len(data) < datasize: + raise CScriptTruncatedPushDataError('%s: truncated data' % pushdata_type, data) + + i += datasize + + yield (opcode, data, sop_idx) + + def __iter__(self): + """'Cooked' iteration + + Returns either a CScriptOP instance, an integer, or bytes, as + appropriate. + + See raw_iter() if you need to distinguish the different possible + PUSHDATA encodings. + """ + for (opcode, data, sop_idx) in self.raw_iter(): + if data is not None: + yield data + else: + opcode = CScriptOp(opcode) + + if opcode.is_small_int(): + yield opcode.decode_op_n() + else: + yield CScriptOp(opcode) + + def __repr__(self): + # For Python3 compatibility add b before strings so testcases don't + # need to change + def _repr(o): + if isinstance(o, bytes): + return b"x('%s')" % hexlify(o).decode('ascii') + else: + return repr(o) + + ops = [] + i = iter(self) + while True: + op = None + try: + op = _repr(next(i)) + except CScriptTruncatedPushDataError as err: + op = '%s...' % (_repr(err.data), err) + break + except CScriptInvalidError as err: + op = '' % err + break + except StopIteration: + break + finally: + if op is not None: + ops.append(op) + + return "CScript([%s])" % ', '.join(ops) + + def GetSigOpCount(self, fAccurate): + """Get the SigOp count. + + fAccurate - Accurately count CHECKMULTISIG, see BIP16 for details. + + Note that this is consensus-critical. + """ + n = 0 + lastOpcode = OP_INVALIDOPCODE + for (opcode, data, sop_idx) in self.raw_iter(): + if opcode in (OP_CHECKSIG, OP_CHECKSIGVERIFY): + n += 1 + elif opcode in (OP_CHECKMULTISIG, OP_CHECKMULTISIGVERIFY): + if fAccurate and (OP_1 <= lastOpcode <= OP_16): + n += opcode.decode_op_n() + else: + n += 20 + lastOpcode = opcode + return n + + +SIGHASH_ALL = 1 +SIGHASH_NONE = 2 +SIGHASH_SINGLE = 3 +SIGHASH_ANYONECANPAY = 0x80 + +def FindAndDelete(script, sig): + """Consensus critical, see FindAndDelete() in Satoshi codebase""" + r = b'' + last_sop_idx = sop_idx = 0 + skip = True + for (opcode, data, sop_idx) in script.raw_iter(): + if not skip: + r += script[last_sop_idx:sop_idx] + last_sop_idx = sop_idx + if script[sop_idx:sop_idx + len(sig)] == sig: + skip = True + else: + skip = False + if not skip: + r += script[last_sop_idx:] + return CScript(r) + + +def SignatureHash(script, txTo, inIdx, hashtype): + """Consensus-correct SignatureHash + + Returns (hash, err) to precisely match the consensus-critical behavior of + the SIGHASH_SINGLE bug. (inIdx is *not* checked for validity) + """ + HASH_ONE = b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + + if inIdx >= len(txTo.vin): + return (HASH_ONE, "inIdx %d out of range (%d)" % (inIdx, len(txTo.vin))) + txtmp = CTransaction(txTo) + + for txin in txtmp.vin: + txin.scriptSig = b'' + txtmp.vin[inIdx].scriptSig = FindAndDelete(script, CScript([OP_CODESEPARATOR])) + + if (hashtype & 0x1f) == SIGHASH_NONE: + txtmp.vout = [] + + for i in range(len(txtmp.vin)): + if i != inIdx: + txtmp.vin[i].nSequence = 0 + + elif (hashtype & 0x1f) == SIGHASH_SINGLE: + outIdx = inIdx + if outIdx >= len(txtmp.vout): + return (HASH_ONE, "outIdx %d out of range (%d)" % (outIdx, len(txtmp.vout))) + + tmp = txtmp.vout[outIdx] + txtmp.vout = [] + for i in range(outIdx): + txtmp.vout.append(CTxOut(-1)) + txtmp.vout.append(tmp) + + for i in range(len(txtmp.vin)): + if i != inIdx: + txtmp.vin[i].nSequence = 0 + + if hashtype & SIGHASH_ANYONECANPAY: + tmp = txtmp.vin[inIdx] + txtmp.vin = [] + txtmp.vin.append(tmp) + + s = txtmp.serialize() + s += struct.pack(b"> (64 - b) | (n & ((1 << (64 - b)) - 1)) << b + +def siphash_round(v0, v1, v2, v3): + v0 = (v0 + v1) & ((1 << 64) - 1) + v1 = rotl64(v1, 13) + v1 ^= v0 + v0 = rotl64(v0, 32) + v2 = (v2 + v3) & ((1 << 64) - 1) + v3 = rotl64(v3, 16) + v3 ^= v2 + v0 = (v0 + v3) & ((1 << 64) - 1) + v3 = rotl64(v3, 21) + v3 ^= v0 + v2 = (v2 + v1) & ((1 << 64) - 1) + v1 = rotl64(v1, 17) + v1 ^= v2 + v2 = rotl64(v2, 32) + return (v0, v1, v2, v3) + +def siphash256(k0, k1, h): + n0 = h & ((1 << 64) - 1) + n1 = (h >> 64) & ((1 << 64) - 1) + n2 = (h >> 128) & ((1 << 64) - 1) + n3 = (h >> 192) & ((1 << 64) - 1) + v0 = 0x736f6d6570736575 ^ k0 + v1 = 0x646f72616e646f6d ^ k1 + v2 = 0x6c7967656e657261 ^ k0 + v3 = 0x7465646279746573 ^ k1 ^ n0 + v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3) + v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3) + v0 ^= n0 + v3 ^= n1 + v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3) + v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3) + v0 ^= n1 + v3 ^= n2 + v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3) + v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3) + v0 ^= n2 + v3 ^= n3 + v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3) + v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3) + v0 ^= n3 + v3 ^= 0x2000000000000000 + v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3) + v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3) + v0 ^= 0x2000000000000000 + v2 ^= 0xFF + v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3) + v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3) + v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3) + v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3) + return v0 ^ v1 ^ v2 ^ v3 diff --git a/basicswap/interface/contrib/firo_test_framework/util.py b/basicswap/interface/contrib/firo_test_framework/util.py new file mode 100644 index 0000000..3e320b5 --- /dev/null +++ b/basicswap/interface/contrib/firo_test_framework/util.py @@ -0,0 +1,841 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2016 The Bitcoin Core developers +# Copyright (c) 2014-2017 The Dash Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + + +# +# Helpful routines for regression testing +# + +import os +import sys + +from binascii import hexlify, unhexlify +from base64 import b64encode +from decimal import Decimal, ROUND_DOWN +import json +import http.client +import random +import shutil +import subprocess +import tempfile +import time +import re +import errno +import logging + +from . import coverage +from .authproxy import AuthServiceProxy, JSONRPCException + +COVERAGE_DIR = None + +logger = logging.getLogger("TestFramework.utils") +# The maximum number of nodes a single test can spawn +MAX_NODES = 15 +# Don't assign rpc or p2p ports lower than this +PORT_MIN = 11000 +# The number of ports to "reserve" for p2p and rpc, each +PORT_RANGE = 5000 + +BITCOIND_PROC_WAIT_TIMEOUT = 60 + + +class PortSeed: + # Must be initialized with a unique integer for each process + n = None + +#Set Mocktime default to OFF. +#MOCKTIME is only needed for scripts that use the +#cached version of the blockchain. If the cached +#version of the blockchain is used without MOCKTIME +#then the mempools will not sync due to IBD. +MOCKTIME = 0 + +def enable_mocktime(): + #For backwared compatibility of the python scripts + #with previous versions of the cache, set MOCKTIME + #to Jan 1, 2014 + (201 * 10 * 60) + global MOCKTIME + MOCKTIME = 1414776313 + (201 * 10 * 60) + +def set_mocktime(t): + global MOCKTIME + MOCKTIME = t + +def disable_mocktime(): + global MOCKTIME + MOCKTIME = 0 + +def get_mocktime(): + return MOCKTIME + +def enable_coverage(dirname): + """Maintain a log of which RPC calls are made during testing.""" + global COVERAGE_DIR + COVERAGE_DIR = dirname + + +def get_rpc_proxy(url, node_number, timeout=None): + """ + Args: + url (str): URL of the RPC server to call + node_number (int): the node number (or id) that this calls to + + Kwargs: + timeout (int): HTTP timeout in seconds + + Returns: + AuthServiceProxy. convenience object for making RPC calls. + + """ + proxy_kwargs = {} + if timeout is not None: + proxy_kwargs['timeout'] = timeout + + proxy = AuthServiceProxy(url, **proxy_kwargs) + proxy.url = url # store URL on proxy for info + + coverage_logfile = coverage.get_filename( + COVERAGE_DIR, node_number) if COVERAGE_DIR else None + + return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile) + +def get_evoznsync_status(node): + result = node.evoznsync("status") + return result['IsSynced'] + +def wait_to_sync(node, fast_znsync=False): + tm = 0 + synced = False + while tm < 30: + synced = get_evoznsync_status(node) + if synced: + return + time.sleep(0.2) + if fast_znsync: + # skip mnsync states + node.evoznsync("next") + tm += 0.2 + assert(synced) + +def p2p_port(n): + assert(n <= MAX_NODES) + return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES) + +def rpc_port(n): + return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES) + +def check_json_precision(): + """Make sure json library being used does not lose precision converting BTC values""" + n = Decimal("20000000.00000003") + satoshis = int(json.loads(json.dumps(float(n)))*1.0e8) + if satoshis != 2000000000000003: + raise RuntimeError("JSON encode/decode loses precision") + +def count_bytes(hex_string): + return len(bytearray.fromhex(hex_string)) + +def bytes_to_hex_str(byte_str): + return hexlify(byte_str).decode('ascii') + +def hex_str_to_bytes(hex_str): + return unhexlify(hex_str.encode('ascii')) + +def str_to_b64str(string): + return b64encode(string.encode('utf-8')).decode('ascii') + +def sync_blocks(rpc_connections, *, wait=1, timeout=60): + """ + Wait until everybody has the same tip. + + sync_blocks needs to be called with an rpc_connections set that has least + one node already synced to the latest, stable tip, otherwise there's a + chance it might return before all nodes are stably synced. + """ + # Use getblockcount() instead of waitforblockheight() to determine the + # initial max height because the two RPCs look at different internal global + # variables (chainActive vs latestBlock) and the former gets updated + # earlier. + maxheight = max(x.getblockcount() for x in rpc_connections) + start_time = cur_time = time.time() + while cur_time <= start_time + timeout: + tips = [r.waitforblockheight(maxheight, int(wait * 1000)) for r in rpc_connections] + if all(t["height"] == maxheight for t in tips): + if all(t["hash"] == tips[0]["hash"] for t in tips): + return + raise AssertionError("Block sync failed, mismatched block hashes:{}".format( + "".join("\n {!r}".format(tip) for tip in tips))) + + time.sleep(wait) + cur_time = time.time() + raise AssertionError("Block sync to height {} timed out:{}".format( + maxheight, "".join("\n {!r}".format(tip) for tip in tips))) + +def sync_znodes(rpc_connections, *, timeout=60): + """ + Waits until every node has their znsync status is synced. + """ + start_time = cur_time = time.time() + while cur_time <= start_time + timeout: + statuses = [r.znsync("status") for r in rpc_connections] + if all(stat["IsSynced"] == True for stat in statuses): + return + cur_time = time.time() + raise AssertionError("Znode sync failed.") + +def sync_chain(rpc_connections, *, wait=1, timeout=60): + """ + Wait until everybody has the same best block + """ + while timeout > 0: + best_hash = [x.getbestblockhash() for x in rpc_connections] + if best_hash == [best_hash[0]]*len(best_hash): + return + time.sleep(wait) + timeout -= wait + raise AssertionError("Chain sync failed: Best block hashes don't match") + +def sync_mempools(rpc_connections, *, wait=1, timeout=60): + """ + Wait until everybody has the same transactions in their memory + pools + """ + while timeout > 0: + pool = set(rpc_connections[0].getrawmempool()) + num_match = 1 + for i in range(1, len(rpc_connections)): + if set(rpc_connections[i].getrawmempool()) == pool: + num_match = num_match+1 + if num_match == len(rpc_connections): + return + time.sleep(wait) + timeout -= wait + raise AssertionError("Mempool sync failed") + +def sync_znodes(rpc_connections, fast_mnsync=False): + for node in rpc_connections: + wait_to_sync(node, fast_mnsync) + +bitcoind_processes = {} + +def initialize_datadir(dirname, n): + datadir = os.path.join(dirname, "node"+str(n)) + if not os.path.isdir(datadir): + os.makedirs(datadir) + rpc_u, rpc_p = rpc_auth_pair(n) + with open(os.path.join(datadir, "firo.conf"), 'w', encoding='utf8') as f: + f.write("regtest=1\n") + f.write("rpcuser=" + rpc_u + "\n") + f.write("rpcpassword=" + rpc_p + "\n") + f.write("port="+str(p2p_port(n))+"\n") + f.write("rpcport="+str(rpc_port(n))+"\n") + f.write("listenonion=0\n") + return datadir + +def rpc_auth_pair(n): + return 'rpcuser💻' + str(n), 'rpcpass🔑' + str(n) + +def rpc_url(i, rpchost=None): + rpc_u, rpc_p = rpc_auth_pair(i) + host = '127.0.0.1' + port = rpc_port(i) + if rpchost: + parts = rpchost.split(':') + if len(parts) == 2: + host, port = parts + else: + host = rpchost + return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port)) + +def wait_for_bitcoind_start(process, url, i): + ''' + Wait for firod to start. This means that RPC is accessible and fully initialized. + Raise an exception if firod exits during initialization. + ''' + while True: + if process.poll() is not None: + raise Exception('firod exited with status %i during initialization' % process.returncode) + try: + rpc = get_rpc_proxy(url, i) + blocks = rpc.getblockcount() + break # break out of loop on success + except IOError as e: + if e.errno != errno.ECONNREFUSED: # Port not yet open? + raise # unknown IO error + except JSONRPCException as e: # Initialization phase + if e.error['code'] != -28: # RPC in warmup? + raise # unknown JSON RPC exception + time.sleep(0.25) + +def initialize_chain(test_dir, num_nodes, cachedir): + """ + Create a cache of a 200-block-long chain (with wallet) for MAX_NODES + Afterward, create num_nodes copies from the cache + """ + + assert num_nodes <= MAX_NODES + create_cache = False + for i in range(MAX_NODES): + if not os.path.isdir(os.path.join(cachedir, 'node'+str(i))): + create_cache = True + break + + if create_cache: + + #find and delete old cache directories if any exist + for i in range(MAX_NODES): + if os.path.isdir(os.path.join(cachedir,"node"+str(i))): + shutil.rmtree(os.path.join(cachedir,"node"+str(i))) + + # Create cache directories, run bitcoinds: + for i in range(MAX_NODES): + datadir=initialize_datadir(cachedir, i) + args = [ os.getenv("FIROD", "firod"), "-server", "-keypool=1", "-datadir="+datadir, "-discover=0" ] + if i > 0: + args.append("-connect=127.0.0.1:"+str(p2p_port(0))) + bitcoind_processes[i] = subprocess.Popen(args) + if os.getenv("PYTHON_DEBUG", ""): + print("initialize_chain: bitcoind started, waiting for RPC to come up") + wait_for_bitcoind_start(bitcoind_processes[i], rpc_url(i), i) + if os.getenv("PYTHON_DEBUG", ""): + print("initialize_chain: RPC successfully started") + + rpcs = [] + for i in range(MAX_NODES): + try: + rpcs.append(get_rpc_proxy(rpc_url(i), i)) + except: + sys.stderr.write("Error connecting to "+url+"\n") + sys.exit(1) + + # Create a 200-block-long chain; each of the 4 first nodes + # gets 25 mature blocks and 25 immature. + # Note: To preserve compatibility with older versions of + # initialize_chain, only 4 nodes will generate coins. + # + # blocks are created with timestamps 10 minutes apart + # starting from 2010 minutes in the past + enable_mocktime() + block_time = get_mocktime() - (201 * 10 * 60) + for i in range(2): + for peer in range(4): + for j in range(25): + set_node_times(rpcs, block_time) + rpcs[peer].generate(1) + block_time += 10*60 + # Must sync before next peer starts generating blocks + sync_blocks(rpcs) + + # Shut them down, and clean up cache directories: + stop_nodes(rpcs) + disable_mocktime() + for i in range(MAX_NODES): + try: + os.remove(log_filename(cachedir, i, "debug.log")) + os.remove(log_filename(cachedir, i, "db.log")) + os.remove(log_filename(cachedir, i, "peers.dat")) + os.remove(log_filename(cachedir, i, "fee_estimates.dat")) + except OSError: + pass + + for i in range(num_nodes): + from_dir = os.path.join(cachedir, "node"+str(i)) + to_dir = os.path.join(test_dir, "node"+str(i)) + if from_dir != to_dir: + shutil.copytree(from_dir, to_dir) + initialize_datadir(test_dir, i) # Overwrite port/rpcport in bitcoin.conf + +def initialize_chain_clean(test_dir, num_nodes): + """ + Create an empty blockchain and num_nodes wallets. + Useful if a test case wants complete control over initialization. + """ + for i in range(num_nodes): + datadir=initialize_datadir(test_dir, i) + + +def _rpchost_to_args(rpchost): + '''Convert optional IP:port spec to rpcconnect/rpcport args''' + if rpchost is None: + return [] + + match = re.match('(\[[0-9a-fA-f:]+\]|[^:]+)(?::([0-9]+))?$', rpchost) + if not match: + raise ValueError('Invalid RPC host spec ' + rpchost) + + rpcconnect = match.group(1) + rpcport = match.group(2) + + if rpcconnect.startswith('['): # remove IPv6 [...] wrapping + rpcconnect = rpcconnect[1:-1] + + rv = ['-rpcconnect=' + rpcconnect] + if rpcport: + rv += ['-rpcport=' + rpcport] + return rv + +def start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None, redirect_stderr=False, stderr=None): + """ + Start a bitcoind and return RPC connection to it + """ + datadir = os.path.join(dirname, "node"+str(i)) + if binary is None: + binary = os.getenv("FIROD", "firod") + args = [ binary, "-datadir="+datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-dandelion=0", "-usemnemonic=0", "-mocktime="+str(get_mocktime()) ] +#Useful args for debugging +# "screen", "--", +# "gdb", "-x", "/tmp/gdb_run", "--args", + +# Don't try auto backups (they fail a lot when running tests) + args += [ "-createwalletbackups=0" ] + if extra_args is not None: args.extend(extra_args) + # Allow to redirect stderr to stdout in case we expect some non-critical warnings/errors printed to stderr + # Otherwise the whole test would be considered to be failed in such cases + if redirect_stderr: + stderr = sys.stdout + bitcoind_processes[i] = subprocess.Popen(args, stderr=stderr) + logger.debug("start_node: firod started, waiting for RPC to come up") + url = rpc_url(i, rpchost) + wait_for_bitcoind_start(bitcoind_processes[i], url, i) + logger.debug("start_node: RPC successfully started") + proxy = get_rpc_proxy(url, i, timeout=timewait) + + if COVERAGE_DIR: + coverage.write_all_rpc_commands(COVERAGE_DIR, proxy) + + return proxy + +def start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, timewait=None, binary=None): + """ + Start multiple bitcoinds, return RPC connections to them + """ + if extra_args is None: extra_args = [ None for _ in range(num_nodes) ] + if binary is None: binary = [ None for _ in range(num_nodes) ] + rpcs = [] + try: + for i in range(num_nodes): + rpcs.append(start_node(i, dirname, extra_args[i], rpchost, timewait=timewait, binary=binary[i])) + except: # If one node failed to start, stop the others + stop_nodes(rpcs) + raise + return rpcs + +def copy_datadir(from_node, to_node, dirname): + from_datadir = os.path.join(dirname, "node"+str(from_node), "regtest") + to_datadir = os.path.join(dirname, "node"+str(to_node), "regtest") + + dirs = ["blocks", "chainstate", "evodb", "llmq"] + for d in dirs: + try: + src = os.path.join(from_datadir, d) + dst = os.path.join(to_datadir, d) + shutil.copytree(src, dst) + except: + pass +def log_filename(dirname, n_node, logname): + return os.path.join(dirname, "node"+str(n_node), "regtest", logname) + +def wait_node(i): + return_code = bitcoind_processes[i].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT) + assert_equal(return_code, 0) + del bitcoind_processes[i] + +def stop_node(node, i, wait=True): + logger.debug("Stopping node %d" % i) + try: + node.stop() + except http.client.CannotSendRequest as e: + logger.exception("Unable to stop node") + if wait: + wait_node(i) + +def stop_nodes(nodes, fast=True): + for i, node in enumerate(nodes): + stop_node(node, i, not fast) + if fast: + for i, node in enumerate(nodes): + wait_node(i) + assert not bitcoind_processes.values() # All connections must be gone now + +def set_node_times(nodes, t): + for node in nodes: + node.setmocktime(t) + +def connect_nodes(from_connection, node_num): + # NOTE: In next line p2p_port(0) was replaced by rpc_port(0). + ip_port = "127.0.0.1:"+str(p2p_port(node_num)) + from_connection.addnode(ip_port, "onetry") + # poll until version handshake complete to avoid race conditions + # with transaction relaying + while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()): + time.sleep(0.1) + +def connect_nodes_bi(nodes, a, b): + connect_nodes(nodes[a], b) + connect_nodes(nodes[b], a) + +def isolate_node(node, timeout=5): + node.setnetworkactive(False) + st = time.time() + while time.time() < st + timeout: + if node.getconnectioncount() == 0: + return + time.sleep(0.5) + raise AssertionError("disconnect_node timed out") + +def reconnect_isolated_node(node, node_num): + node.setnetworkactive(True) + connect_nodes(node, node_num) +def find_output(node, txid, amount): + """ + Return index to output of txid with value amount + Raises exception if there is none. + """ + txdata = node.getrawtransaction(txid, 1) + for i in range(len(txdata["vout"])): + if txdata["vout"][i]["value"] == amount: + return i + raise RuntimeError("find_output txid %s : %s not found"%(txid,str(amount))) + + +def gather_inputs(from_node, amount_needed, confirmations_required=1): + """ + Return a random set of unspent txouts that are enough to pay amount_needed + """ + assert(confirmations_required >=0) + utxo = from_node.listunspent(confirmations_required) + random.shuffle(utxo) + inputs = [] + total_in = Decimal("0.00000000") + while total_in < amount_needed and len(utxo) > 0: + t = utxo.pop() + total_in += t["amount"] + inputs.append({ "txid" : t["txid"], "vout" : t["vout"], "address" : t["address"] } ) + if total_in < amount_needed: + raise RuntimeError("Insufficient funds: need %d, have %d"%(amount_needed, total_in)) + return (total_in, inputs) + +def make_change(from_node, amount_in, amount_out, fee): + """ + Create change output(s), return them + """ + outputs = {} + amount = amount_out+fee + change = amount_in - amount + if change > amount*2: + # Create an extra change output to break up big inputs + change_address = from_node.getnewaddress() + # Split change in two, being careful of rounding: + outputs[change_address] = Decimal(change/2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) + change = amount_in - amount - outputs[change_address] + if change > 0: + outputs[from_node.getnewaddress()] = change + return outputs + +def send_zeropri_transaction(from_node, to_node, amount, fee): + """ + Create&broadcast a zero-priority transaction. + Returns (txid, hex-encoded-txdata) + Ensures transaction is zero-priority by first creating a send-to-self, + then using its output + """ + + # Create a send-to-self with confirmed inputs: + self_address = from_node.getnewaddress() + (total_in, inputs) = gather_inputs(from_node, amount+fee*2) + outputs = make_change(from_node, total_in, amount+fee, fee) + outputs[self_address] = float(amount+fee) + + self_rawtx = from_node.createrawtransaction(inputs, outputs) + self_signresult = from_node.signrawtransaction(self_rawtx) + self_txid = from_node.sendrawtransaction(self_signresult["hex"], True) + + vout = find_output(from_node, self_txid, amount+fee) + # Now immediately spend the output to create a 1-input, 1-output + # zero-priority transaction: + inputs = [ { "txid" : self_txid, "vout" : vout } ] + outputs = { to_node.getnewaddress() : float(amount) } + + rawtx = from_node.createrawtransaction(inputs, outputs) + signresult = from_node.signrawtransaction(rawtx) + txid = from_node.sendrawtransaction(signresult["hex"], True) + + return (txid, signresult["hex"]) + +def random_zeropri_transaction(nodes, amount, min_fee, fee_increment, fee_variants): + """ + Create a random zero-priority transaction. + Returns (txid, hex-encoded-transaction-data, fee) + """ + from_node = random.choice(nodes) + to_node = random.choice(nodes) + fee = min_fee + fee_increment*random.randint(0,fee_variants) + (txid, txhex) = send_zeropri_transaction(from_node, to_node, amount, fee) + return (txid, txhex, fee) + +def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants): + """ + Create a random transaction. + Returns (txid, hex-encoded-transaction-data, fee) + """ + from_node = random.choice(nodes) + to_node = random.choice(nodes) + fee = min_fee + fee_increment*random.randint(0,fee_variants) + + (total_in, inputs) = gather_inputs(from_node, amount+fee) + outputs = make_change(from_node, total_in, amount, fee) + outputs[to_node.getnewaddress()] = float(amount) + + rawtx = from_node.createrawtransaction(inputs, outputs) + signresult = from_node.signrawtransaction(rawtx) + txid = from_node.sendrawtransaction(signresult["hex"], True) + + return (txid, signresult["hex"], fee) + +def assert_fee_amount(fee, tx_size, fee_per_kB): + """Assert the fee was in range""" + target_fee = tx_size * fee_per_kB / 1000 + if fee < target_fee: + raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)"%(str(fee), str(target_fee))) + # allow the wallet's estimation to be at most 2 bytes off + if fee > (tx_size + 2) * fee_per_kB / 1000: + raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)"%(str(fee), str(target_fee))) + +def assert_equal(thing1, thing2, *args): + if thing1 != thing2 or any(thing1 != arg for arg in args): + raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args)) + +def assert_greater_than(thing1, thing2): + if thing1 <= thing2: + raise AssertionError("%s <= %s"%(str(thing1),str(thing2))) + +def assert_greater_than_or_equal(thing1, thing2): + if thing1 < thing2: + raise AssertionError("%s < %s"%(str(thing1),str(thing2))) + +def assert_raises(exc, fun, *args, **kwds): + assert_raises_message(exc, None, fun, *args, **kwds) + +def assert_raises_message(exc, message, fun, *args, **kwds): + try: + fun(*args, **kwds) + except exc as e: + if message is not None and message not in e.error['message']: + raise AssertionError("Expected substring not found:"+e.error['message']) + except Exception as e: + raise AssertionError("Unexpected exception raised: "+type(e).__name__) + else: + raise AssertionError("No exception raised") + +def assert_raises_jsonrpc(code, message, fun, *args, **kwds): + """Run an RPC and verify that a specific JSONRPC exception code and message is raised. + + Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException + and verifies that the error code and message are as expected. Throws AssertionError if + no JSONRPCException was returned or if the error code/message are not as expected. + + Args: + code (int), optional: the error code returned by the RPC call (defined + in src/rpc/protocol.h). Set to None if checking the error code is not required. + message (string), optional: [a substring of] the error string returned by the + RPC call. Set to None if checking the error string is not required + fun (function): the function to call. This should be the name of an RPC. + args*: positional arguments for the function. + kwds**: named arguments for the function. + """ + try: + fun(*args, **kwds) + except JSONRPCException as e: + # JSONRPCException was thrown as expected. Check the code and message values are correct. + if (code is not None) and (code != e.error["code"]): + raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"]) + if (message is not None) and (message not in e.error['message']): + raise AssertionError("Expected substring not found:"+e.error['message']) + except Exception as e: + raise AssertionError("Unexpected exception raised: "+type(e).__name__) + else: + raise AssertionError("No exception raised") + +def assert_is_hex_string(string): + try: + int(string, 16) + except Exception as e: + raise AssertionError( + "Couldn't interpret %r as hexadecimal; raised: %s" % (string, e)) + +def assert_is_hash_string(string, length=64): + if not isinstance(string, str): + raise AssertionError("Expected a string, got type %r" % type(string)) + elif length and len(string) != length: + raise AssertionError( + "String of length %d expected; got %d" % (length, len(string))) + elif not re.match('[abcdef0-9]+$', string): + raise AssertionError( + "String %r contains invalid characters for a hash." % string) + +def assert_array_result(object_array, to_match, expected, should_not_find = False): + """ + Pass in array of JSON objects, a dictionary with key/value pairs + to match against, and another dictionary with expected key/value + pairs. + If the should_not_find flag is true, to_match should not be found + in object_array + """ + if should_not_find == True: + assert_equal(expected, { }) + num_matched = 0 + for item in object_array: + all_match = True + for key,value in to_match.items(): + if item[key] != value: + all_match = False + if not all_match: + continue + elif should_not_find == True: + num_matched = num_matched+1 + for key,value in expected.items(): + if item[key] != value: + raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value))) + num_matched = num_matched+1 + if num_matched == 0 and should_not_find != True: + raise AssertionError("No objects matched %s"%(str(to_match))) + if num_matched > 0 and should_not_find == True: + raise AssertionError("Objects were found %s"%(str(to_match))) + +def satoshi_round(amount): + return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) + +# Helper to create at least "count" utxos +# Pass in a fee that is sufficient for relay and mining new transactions. +def create_confirmed_utxos(fee, node, count): + node.generate(int(0.5*count)+101) + utxos = node.listunspent() + iterations = count - len(utxos) + addr1 = node.getnewaddress() + addr2 = node.getnewaddress() + if iterations <= 0: + return utxos + for i in range(iterations): + t = utxos.pop() + inputs = [] + inputs.append({ "txid" : t["txid"], "vout" : t["vout"]}) + outputs = {} + send_value = t['amount'] - fee + outputs[addr1] = satoshi_round(send_value/2) + outputs[addr2] = satoshi_round(send_value/2) + raw_tx = node.createrawtransaction(inputs, outputs) + signed_tx = node.signrawtransaction(raw_tx)["hex"] + txid = node.sendrawtransaction(signed_tx) + + while (node.getmempoolinfo()['size'] > 0): + node.generate(1) + + utxos = node.listunspent() + assert(len(utxos) >= count) + return utxos + +# Create large OP_RETURN txouts that can be appended to a transaction +# to make it large (helper for constructing large transactions). +def gen_return_txouts(): + # Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create + # So we have big transactions (and therefore can't fit very many into each block) + # create one script_pubkey + script_pubkey = "6a4d0200" #OP_RETURN OP_PUSH2 512 bytes + for i in range (512): + script_pubkey = script_pubkey + "01" + # concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change + txouts = "81" + for k in range(128): + # add txout value + txouts = txouts + "0000000000000000" + # add length of script_pubkey + txouts = txouts + "fd0402" + # add script_pubkey + txouts = txouts + script_pubkey + return txouts + +def create_tx(node, coinbase, to_address, amount): + inputs = [{ "txid" : coinbase, "vout" : 0}] + outputs = { to_address : amount } + rawtx = node.createrawtransaction(inputs, outputs) + signresult = node.signrawtransaction(rawtx) + assert_equal(signresult["complete"], True) + return signresult["hex"] + +def create_tx_multi_input(node, inputs, outputs): + rawtx = node.createrawtransaction(inputs, outputs) + signresult = node.signrawtransaction(rawtx) + assert_equal(signresult["complete"], True) + return signresult["hex"] + +# Create a spend of each passed-in utxo, splicing in "txouts" to each raw +# transaction to make it large. See gen_return_txouts() above. +def create_lots_of_big_transactions(node, txouts, utxos, num, fee): + addr = node.getnewaddress() + txids = [] + for _ in range(num): + t = utxos.pop() + inputs=[{ "txid" : t["txid"], "vout" : t["vout"]}] + outputs = {} + change = t['amount'] - fee + outputs[addr] = satoshi_round(change) + rawtx = node.createrawtransaction(inputs, outputs) + newtx = rawtx[0:92] + newtx = newtx + txouts + newtx = newtx + rawtx[94:] + signresult = node.signrawtransaction(newtx, None, None, "NONE") + txid = node.sendrawtransaction(signresult["hex"], True) + txids.append(txid) + return txids + +def mine_large_block(node, utxos=None): + # generate a 66k transaction, + # and 14 of them is close to the 1MB block limit + num = 14 + txouts = gen_return_txouts() + utxos = utxos if utxos is not None else [] + if len(utxos) < num: + utxos.clear() + utxos.extend(node.listunspent()) + fee = 100 * node.getnetworkinfo()["relayfee"] + create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee) + node.generate(1) + +def get_bip9_status(node, key): + info = node.getblockchaininfo() + return info['bip9_softforks'][key] + +def dumpprivkey_otac(node, address): + import re + error_text = '' + try: + return node.dumpprivkey(address) + except JSONRPCException as e: + error_text = e.error + else: + raise + + otac_match = re.search("Your one time authorization code is: ([a-zA-Z0-9]+)", error_text['message']) + if not otac_match: + raise JSONRPCException(error_text) + return node.dumpprivkey(address, otac_match.groups()[0]) + +def get_znsync_status(node): + result = node.znsync("status") + return result['IsSynced'] + +def wait_to_sync_znodes(node, fast_znsync=False): + while True: + synced = get_znsync_status(node) + if synced: + break + time.sleep(0.2) + if fast_znsync: + # skip mnsync states + node.znsync("next") + +def get_full_balance(node): + wallet_info = node.getwalletinfo() + return wallet_info["balance"] + wallet_info["immature_balance"] + wallet_info["unconfirmed_balance"] diff --git a/basicswap/interface/contrib/nav_test_framework/mininode.py b/basicswap/interface/contrib/nav_test_framework/mininode.py index ff476ee..46a6471 100755 --- a/basicswap/interface/contrib/nav_test_framework/mininode.py +++ b/basicswap/interface/contrib/nav_test_framework/mininode.py @@ -475,10 +475,11 @@ class CTransaction(object): if flags != 0: self.wit.vtxinwit = [CTxInWitness() for i in range(len(self.vin))] self.wit.deserialize(f) + self.nLockTime = struct.unpack(" 2: + if self.nVersion >= 2: self.strDZeel = deser_string(f) def serialize_without_witness(self): diff --git a/basicswap/interface/firo.py b/basicswap/interface/firo.py index 092ba20..5d0207d 100644 --- a/basicswap/interface/firo.py +++ b/basicswap/interface/firo.py @@ -1,18 +1,22 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2022 tecnovert +# Copyright (c) 2022-2023 tecnovert # Distributed under the MIT software license, see the accompanying # file LICENSE or http://www.opensource.org/licenses/mit-license.php. +import random import hashlib -from .btc import BTCInterface, find_vout_for_address_from_txobj -from basicswap.chainparams import Coins +from .btc import BTCInterface, find_vout_for_address_from_txobj +from basicswap.util import ( + i2b, + ensure, +) from basicswap.util.address import decodeAddress -from basicswap.contrib.test_framework.script import ( +from basicswap.chainparams import Coins +from basicswap.interface.contrib.firo_test_framework.script import ( CScript, - OP_0, OP_DUP, OP_EQUAL, OP_HASH160, @@ -20,7 +24,9 @@ from basicswap.contrib.test_framework.script import ( OP_EQUALVERIFY, hash160, ) -from basicswap.contrib.test_framework.messages import ( +from basicswap.interface.contrib.firo_test_framework.mininode import ( + CBlock, + FromHex, CTransaction, ) @@ -72,15 +78,14 @@ class FIROInterface(BTCInterface): return address - def getLockTxHeightFiro(self, txid, lock_script, bid_amount, rescan_from, find_index=False): + def getLockTxHeight(self, txid, dest_address, bid_amount, rescan_from, find_index: bool = False): # Add watchonly address and rescan if required - lock_tx_dest = self.getScriptDest(lock_script) - dest_address = self.encodeScriptDest(lock_tx_dest) + if not self.isAddressMine(dest_address, or_watch_only=True): - self.rpc_callback('importaddress', [lock_tx_dest.hex(), 'bid lock', False, True]) + self.importWatchOnlyAddress(dest_address, 'bid') self._log.info('Imported watch-only addr: {}'.format(dest_address)) self._log.info('Rescanning {} chain from height: {}'.format(self.coin_name(), rescan_from)) - self.rpc_callback('rescanblockchain', [rescan_from]) + self.rescanBlockchainForAddress(rescan_from, dest_address) return_txid = True if txid is None else False if txid is None: @@ -154,14 +159,12 @@ class FIROInterface(BTCInterface): return CScript([OP_DUP, OP_HASH160, pkh, OP_EQUALVERIFY, OP_CHECKSIG]) def getScriptDest(self, script: bytearray) -> bytearray: - # P2WSH nested in BIP16_P2SH + # P2SH - script_hash = hashlib.sha256(script).digest() - assert len(script_hash) == 32 - script_hash_hash = hash160(script_hash) - assert len(script_hash_hash) == 20 + script_hash = hash160(script) + assert len(script_hash) == 20 - return CScript([OP_HASH160, script_hash_hash, OP_EQUAL]) + return CScript([OP_HASH160, script_hash, OP_EQUAL]) def getSeedHash(self, seed: bytes) -> bytes: return hash160(seed)[::-1] @@ -171,8 +174,9 @@ class FIROInterface(BTCInterface): script_hash = script_dest[2:-1] return self.sh_to_address(script_hash) - def getScriptScriptSig(self, script: bytes) -> bytearray: - return CScript([OP_0, hashlib.sha256(script).digest()]) + def getDestForScriptHash(self, script_hash): + assert len(script_hash) == 20 + return CScript([OP_HASH160, script_hash, OP_EQUAL]) def withdrawCoin(self, value, addr_to, subfee): params = [addr_to, value, '', '', subfee] @@ -207,3 +211,148 @@ class FIROInterface(BTCInterface): block_height = self.getBlockHeader(rv['blockhash'])['height'] return {'txid': txid_hex, 'amount': 0, 'height': block_height} return None + + def getProofOfFunds(self, amount_for, extra_commit_bytes): + # TODO: Lock unspent and use same output/s to fund bid + + unspents_by_addr = dict() + unspents = self.rpc_callback('listunspent') + for u in unspents: + if u['spendable'] is not True: + continue + if u['address'] not in unspents_by_addr: + unspents_by_addr[u['address']] = {'total': 0, 'utxos': []} + utxo_amount: int = self.make_int(u['amount'], r=1) + unspents_by_addr[u['address']]['total'] += utxo_amount + unspents_by_addr[u['address']]['utxos'].append((utxo_amount, u['txid'], u['vout'])) + + max_utxos: int = 4 + + viable_addrs = [] + for addr, data in unspents_by_addr.items(): + if data['total'] >= amount_for: + # Sort from largest to smallest amount + sorted_utxos = sorted(data['utxos'], key=lambda x: x[0]) + + # Max outputs required to reach amount_for + utxos_req: int = 0 + sum_value: int = 0 + for utxo in sorted_utxos: + sum_value += utxo[0] + utxos_req += 1 + if sum_value >= amount_for: + break + + if utxos_req <= max_utxos: + viable_addrs.append(addr) + continue + + ensure(len(viable_addrs) > 0, 'Could not find address with enough funds for proof') + + sign_for_addr: str = random.choice(viable_addrs) + self._log.debug('sign_for_addr %s', sign_for_addr) + + prove_utxos = [] + sorted_utxos = sorted(unspents_by_addr[sign_for_addr]['utxos'], key=lambda x: x[0]) + + hasher = hashlib.sha256() + + sum_value: int = 0 + for utxo in sorted_utxos: + sum_value += utxo[0] + outpoint = (bytes.fromhex(utxo[1]), utxo[2]) + prove_utxos.append(outpoint) + hasher.update(outpoint[0]) + hasher.update(outpoint[1].to_bytes(2, 'big')) + if sum_value >= amount_for: + break + utxos_hash = hasher.digest() + + self._log.debug('sign_for_addr %s', sign_for_addr) + + if self.using_segwit(): # TODO: Use isSegwitAddress when scantxoutset can use combo + # 'Address does not refer to key' for non p2pkh + pkh = self.decodeAddress(sign_for_addr) + sign_for_addr = self.pkh_to_address(pkh) + self._log.debug('sign_for_addr converted %s', sign_for_addr) + + signature = self.rpc_callback('signmessage', [sign_for_addr, sign_for_addr + '_swap_proof_' + utxos_hash.hex() + extra_commit_bytes.hex()]) + + return (sign_for_addr, signature, prove_utxos) + + def verifyProofOfFunds(self, address, signature, utxos, extra_commit_bytes): + hasher = hashlib.sha256() + sum_value: int = 0 + for outpoint in utxos: + hasher.update(outpoint[0]) + hasher.update(outpoint[1].to_bytes(2, 'big')) + utxos_hash = hasher.digest() + + passed = self.verifyMessage(address, address + '_swap_proof_' + utxos_hash.hex() + extra_commit_bytes.hex(), signature) + ensure(passed is True, 'Proof of funds signature invalid') + + if self.using_segwit(): + address = self.encodeSegwitAddress(decodeAddress(address)[1:]) + + sum_value: int = 0 + for outpoint in utxos: + txout = self.rpc_callback('gettxout', [outpoint[0].hex(), outpoint[1]]) + sum_value += self.make_int(txout['value']) + + return sum_value + + def rescanBlockchainForAddress(self, height_start: int, addr_find: str): + # Very ugly workaround for missing `rescanblockchain` rpc command + + chain_blocks: int = self.getChainHeight() + + current_height: int = chain_blocks + block_hash = self.rpc_callback('getblockhash', [current_height]) + + script_hash: bytes = self.decodeAddress(addr_find) + find_scriptPubKey = self.getDestForScriptHash(script_hash) + + while current_height > height_start: + block_hash = self.rpc_callback('getblockhash', [current_height]) + + block = self.rpc_callback('getblock', [block_hash, False]) + decoded_block = CBlock() + decoded_block = FromHex(decoded_block, block) + for tx in decoded_block.vtx: + for txo in tx.vout: + if txo.scriptPubKey == find_scriptPubKey: + tx.rehash() + txid = i2b(tx.sha256) + self._log.info('Found output to addr: {} in tx {} in block {}'.format(addr_find, txid.hex(), block_hash)) + self._log.info('rescanblockchain hack invalidateblock {}'.format(block_hash)) + self.rpc_callback('invalidateblock', [block_hash]) + self.rpc_callback('reconsiderblock', [block_hash]) + return + current_height -= 1 + + def getBlockWithTxns(self, block_hash): + # TODO: Bypass decoderawtransaction and getblockheader + block = self.rpc_callback('getblock', [block_hash, False]) + block_header = self.rpc_callback('getblockheader', [block_hash]) + decoded_block = CBlock() + decoded_block = FromHex(decoded_block, block) + + tx_rv = [] + for tx in decoded_block.vtx: + tx_hex = tx.serialize_with_witness().hex() + tx_dec = self.rpc_callback('decoderawtransaction', [tx_hex]) + if 'hex' not in tx_dec: + tx_dec['hex'] = tx_hex + + tx_rv.append(tx_dec) + + block_rv = { + 'hash': block_hash, + 'tx': tx_rv, + 'confirmations': block_header['confirmations'], + 'height': block_header['height'], + 'version': block_header['version'], + 'merkleroot': block_header['merkleroot'], + } + + return block_rv diff --git a/basicswap/interface/nav.py b/basicswap/interface/nav.py index 7608732..72c13e2 100644 --- a/basicswap/interface/nav.py +++ b/basicswap/interface/nav.py @@ -5,6 +5,9 @@ # Distributed under the MIT software license, see the accompanying # file LICENSE or http://www.opensource.org/licenses/mit-license.php. +import random +import hashlib + from io import BytesIO from coincurve.keys import ( PublicKey, @@ -148,10 +151,96 @@ class NAVInterface(BTCInterface): tx.wit.vtxinwit[0].scriptWitness.stack = stack return tx.serialize_with_witness() - def verifyProofOfFunds(self, address, signature, extra_commit_bytes): - self._log.warning('verifyProofOfFunds TODO') - # TODO: Port scantxoutset or external lookup or read utxodb directly - return 999999 * self.COIN() + def getProofOfFunds(self, amount_for, extra_commit_bytes): + # TODO: Lock unspent and use same output/s to fund bid + + unspents_by_addr = dict() + unspents = self.rpc_callback('listunspent') + for u in unspents: + if u['spendable'] is not True: + continue + if u['address'] not in unspents_by_addr: + unspents_by_addr[u['address']] = {'total': 0, 'utxos': []} + utxo_amount: int = self.make_int(u['amount'], r=1) + unspents_by_addr[u['address']]['total'] += utxo_amount + unspents_by_addr[u['address']]['utxos'].append((utxo_amount, u['txid'], u['vout'])) + + max_utxos: int = 4 + + viable_addrs = [] + for addr, data in unspents_by_addr.items(): + if data['total'] >= amount_for: + # Sort from largest to smallest amount + sorted_utxos = sorted(data['utxos'], key=lambda x: x[0]) + + # Max outputs required to reach amount_for + utxos_req: int = 0 + sum_value: int = 0 + for utxo in sorted_utxos: + sum_value += utxo[0] + utxos_req += 1 + if sum_value >= amount_for: + break + + if utxos_req <= max_utxos: + viable_addrs.append(addr) + continue + + ensure(len(viable_addrs) > 0, 'Could not find address with enough funds for proof') + + sign_for_addr: str = random.choice(viable_addrs) + self._log.debug('sign_for_addr %s', sign_for_addr) + + prove_utxos = [] + sorted_utxos = sorted(unspents_by_addr[sign_for_addr]['utxos'], key=lambda x: x[0]) + + hasher = hashlib.sha256() + + sum_value: int = 0 + for utxo in sorted_utxos: + sum_value += utxo[0] + outpoint = (bytes.fromhex(utxo[1]), utxo[2]) + prove_utxos.append(outpoint) + hasher.update(outpoint[0]) + hasher.update(outpoint[1].to_bytes(2, 'big')) + if sum_value >= amount_for: + break + utxos_hash = hasher.digest() + + self._log.debug('sign_for_addr %s', sign_for_addr) + + if self.using_segwit(): # TODO: Use isSegwitAddress when scantxoutset can use combo + # 'Address does not refer to key' for non p2pkh + addr_info = self.rpc_callback('validateaddress', [addr, ]) + if 'isscript' in addr_info and addr_info['isscript'] and 'hex' in addr_info: + pkh = bytes.fromhex(addr_info['hex'])[2:] + sign_for_addr = self.pkh_to_address(pkh) + self._log.debug('sign_for_addr converted %s', sign_for_addr) + + signature = self.rpc_callback('signmessage', [sign_for_addr, sign_for_addr + '_swap_proof_' + utxos_hash.hex() + extra_commit_bytes.hex()]) + + return (sign_for_addr, signature, prove_utxos) + + def verifyProofOfFunds(self, address, signature, utxos, extra_commit_bytes): + hasher = hashlib.sha256() + sum_value: int = 0 + for outpoint in utxos: + hasher.update(outpoint[0]) + hasher.update(outpoint[1].to_bytes(2, 'big')) + utxos_hash = hasher.digest() + + passed = self.verifyMessage(address, address + '_swap_proof_' + utxos_hash.hex() + extra_commit_bytes.hex(), signature) + ensure(passed is True, 'Proof of funds signature invalid') + + if self.using_segwit(): + address = self.encodeSegwitAddress(self.decodeAddress(address)[1:]) + + sum_value: int = 0 + for outpoint in utxos: + txout = self.rpc_callback('gettxout', [outpoint[0].hex(), outpoint[1]]) + sum_value += self.make_int(txout['value']) + + return sum_value def createRawFundedTransaction(self, addr_to: str, amount: int, sub_fee: bool = False, lock_unspents: bool = True) -> str: txn = self.rpc_callback('createrawtransaction', [[], {addr_to: self.format_amount(amount)}]) @@ -161,7 +250,8 @@ class NAVInterface(BTCInterface): raise ValueError('Navcoin fundrawtransaction is missing the subtractFeeFromOutputs parameter') # options['subtractFeeFromOutputs'] = [0,] - return self.fundTx(txn, fee_rate, lock_unspents) + fee_rate = self.make_int(fee_rate, r=1) + return self.fundTx(txn, fee_rate, lock_unspents).hex() def isAddressMine(self, address: str, or_watch_only: bool = False) -> bool: addr_info = self.rpc_callback('validateaddress', [address]) @@ -199,32 +289,6 @@ class NAVInterface(BTCInterface): 'amount': txjs['vout'][n]['value'] } - def getProofOfFunds(self, amount_for, extra_commit_bytes): - # TODO: Lock unspent and use same output/s to fund bid - unspent_addr = self.getUnspentsByAddr() - - sign_for_addr = None - for addr, value in unspent_addr.items(): - if value >= amount_for: - sign_for_addr = addr - break - - ensure(sign_for_addr is not None, 'Could not find address with enough funds for proof') - - self._log.debug('sign_for_addr %s', sign_for_addr) - - if self.using_segwit(): # TODO: Use isSegwitAddress when scantxoutset can use combo - # 'Address does not refer to key' for non p2pkh - addr_info = self.rpc_callback('validateaddress', [addr, ]) - if 'isscript' in addr_info and addr_info['isscript'] and 'hex' in addr_info: - pkh = bytes.fromhex(addr_info['hex'])[2:] - sign_for_addr = self.pkh_to_address(pkh) - self._log.debug('sign_for_addr converted %s', sign_for_addr) - - signature = self.rpc_callback('signmessage', [sign_for_addr, sign_for_addr + '_swap_proof_' + extra_commit_bytes.hex()]) - - return (sign_for_addr, signature) - def getNewAddress(self, use_segwit: bool, label: str = 'swap_receive') -> str: address: str = self.rpc_callback('getnewaddress', [label,]) if use_segwit: @@ -350,7 +414,6 @@ class NAVInterface(BTCInterface): if not self.isAddressMine(dest_address, or_watch_only=True): self.importWatchOnlyAddress(dest_address, 'bid') self._log.info('Imported watch-only addr: {}'.format(dest_address)) - # Importing triggers a rescan self._log.info('Rescanning {} chain from height: {}'.format(self.coin_name(), rescan_from)) self.rescanBlockchainForAddress(rescan_from, dest_address) @@ -503,14 +566,14 @@ class NAVInterface(BTCInterface): return tx.serialize() - def fundTx(self, tx, feerate, lock_unspents: bool = True): + def fundTx(self, tx_hex: str, feerate: int, lock_unspents: bool = True): feerate_str = self.format_amount(feerate) # TODO: unlock unspents if bid cancelled options = { 'lockUnspents': lock_unspents, 'feeRate': feerate_str, } - rv = self.rpc_callback('fundrawtransaction', [tx.hex(), options]) + rv = self.rpc_callback('fundrawtransaction', [tx_hex, options]) # Sign transaction then strip witness data to fill scriptsig rv = self.rpc_callback('signrawtransaction', [rv['hex']]) @@ -524,8 +587,8 @@ class NAVInterface(BTCInterface): return tx_signed.serialize_without_witness() - def fundSCLockTx(self, tx_bytes: bytes, feerate, vkbv=None): - tx_funded = self.fundTx(tx_bytes, feerate) + def fundSCLockTx(self, tx_bytes: bytes, feerate, vkbv=None) -> bytes: + tx_funded = self.fundTx(tx_bytes.hex(), feerate) return tx_funded def createSCLockRefundTx(self, tx_lock_bytes, script_lock, Kal, Kaf, lock1_value, csv_val, tx_fee_rate, vkbv=None): @@ -664,3 +727,17 @@ class NAVInterface(BTCInterface): i2h(tx.sha256), tx_fee_rate, vsize, pay_fee) return tx.serialize() + + def get_fee_rate(self, conf_target: int = 2): + + try: + fee_rate = self.rpc_callback('estimatesmartfee', [conf_target])['feerate'] + assert (fee_rate > 0.0), 'Non positive feerate' + return fee_rate, 'estimatesmartfee' + except Exception: + try: + fee_rate = self.rpc_callback('getwalletinfo')['paytxfee'] + assert (fee_rate > 0.0), 'Non positive feerate' + return fee_rate, 'paytxfee' + except Exception: + return self.rpc_callback('getnetworkinfo')['relayfee'], 'relayfee' diff --git a/basicswap/messages.proto b/basicswap/messages.proto index 73be986..87cf273 100644 --- a/basicswap/messages.proto +++ b/basicswap/messages.proto @@ -33,6 +33,8 @@ message OfferMessage { uint32 protocol_version = 16; bool amount_negotiable = 17; bool rate_negotiable = 18; + + bytes proof_utxos = 19; /* 32 byte txid 2 byte vout, repeated */ } /* Step 2, buyer -> seller */ @@ -46,8 +48,25 @@ message BidMessage { string proof_signature = 7; uint32 protocol_version = 8; + + bytes proof_utxos = 9; /* 32 byte txid 2 byte vout, repeated */ } +/* For tests */ +message BidMessage_v1Deprecated { + bytes offer_msg_id = 1; + uint64 time_valid = 2; /* seconds bid is valid for */ + uint64 amount = 3; /* amount of amount_from bid is for */ + uint64 rate = 4; + bytes pkhash_buyer = 5; /* buyer's address to receive amount_from */ + string proof_address = 6; + string proof_signature = 7; + + uint32 protocol_version = 8; +} + + + /* Step 3, seller -> buyer */ message BidAcceptMessage { bytes bid_msg_id = 1; diff --git a/basicswap/messages_pb2.py b/basicswap/messages_pb2.py index 7399d9e..7f889e4 100644 --- a/basicswap/messages_pb2.py +++ b/basicswap/messages_pb2.py @@ -2,10 +2,10 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: messages.proto """Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -13,39 +13,41 @@ _sym_db = _symbol_database.Default() -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0emessages.proto\x12\tbasicswap\"\xa6\x04\n\x0cOfferMessage\x12\x11\n\tcoin_from\x18\x01 \x01(\r\x12\x0f\n\x07\x63oin_to\x18\x02 \x01(\r\x12\x13\n\x0b\x61mount_from\x18\x03 \x01(\x04\x12\x0c\n\x04rate\x18\x04 \x01(\x04\x12\x16\n\x0emin_bid_amount\x18\x05 \x01(\x04\x12\x12\n\ntime_valid\x18\x06 \x01(\x04\x12\x33\n\tlock_type\x18\x07 \x01(\x0e\x32 .basicswap.OfferMessage.LockType\x12\x12\n\nlock_value\x18\x08 \x01(\r\x12\x11\n\tswap_type\x18\t \x01(\r\x12\x15\n\rproof_address\x18\n \x01(\t\x12\x17\n\x0fproof_signature\x18\x0b \x01(\t\x12\x15\n\rpkhash_seller\x18\x0c \x01(\x0c\x12\x13\n\x0bsecret_hash\x18\r \x01(\x0c\x12\x15\n\rfee_rate_from\x18\x0e \x01(\x04\x12\x13\n\x0b\x66\x65\x65_rate_to\x18\x0f \x01(\x04\x12\x18\n\x10protocol_version\x18\x10 \x01(\r\x12\x19\n\x11\x61mount_negotiable\x18\x11 \x01(\x08\x12\x17\n\x0frate_negotiable\x18\x12 \x01(\x08\"q\n\x08LockType\x12\x0b\n\x07NOT_SET\x10\x00\x12\x18\n\x14SEQUENCE_LOCK_BLOCKS\x10\x01\x12\x16\n\x12SEQUENCE_LOCK_TIME\x10\x02\x12\x13\n\x0f\x41\x42S_LOCK_BLOCKS\x10\x03\x12\x11\n\rABS_LOCK_TIME\x10\x04\"\xb4\x01\n\nBidMessage\x12\x14\n\x0coffer_msg_id\x18\x01 \x01(\x0c\x12\x12\n\ntime_valid\x18\x02 \x01(\x04\x12\x0e\n\x06\x61mount\x18\x03 \x01(\x04\x12\x0c\n\x04rate\x18\x04 \x01(\x04\x12\x14\n\x0cpkhash_buyer\x18\x05 \x01(\x0c\x12\x15\n\rproof_address\x18\x06 \x01(\t\x12\x17\n\x0fproof_signature\x18\x07 \x01(\t\x12\x18\n\x10protocol_version\x18\x08 \x01(\r\"V\n\x10\x42idAcceptMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x15\n\rinitiate_txid\x18\x02 \x01(\x0c\x12\x17\n\x0f\x63ontract_script\x18\x03 \x01(\x0c\"=\n\x12OfferRevokeMessage\x12\x14\n\x0coffer_msg_id\x18\x01 \x01(\x0c\x12\x11\n\tsignature\x18\x02 \x01(\x0c\";\n\x10\x42idRejectMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x13\n\x0breject_code\x18\x02 \x01(\r\"\xb2\x01\n\rXmrBidMessage\x12\x14\n\x0coffer_msg_id\x18\x01 \x01(\x0c\x12\x12\n\ntime_valid\x18\x02 \x01(\x04\x12\x0e\n\x06\x61mount\x18\x03 \x01(\x04\x12\x0c\n\x04rate\x18\x04 \x01(\x04\x12\x0c\n\x04pkaf\x18\x05 \x01(\x0c\x12\x0c\n\x04kbvf\x18\x06 \x01(\x0c\x12\x12\n\nkbsf_dleag\x18\x07 \x01(\x0c\x12\x0f\n\x07\x64\x65st_af\x18\x08 \x01(\x0c\x12\x18\n\x10protocol_version\x18\t \x01(\r\"T\n\x0fXmrSplitMessage\x12\x0e\n\x06msg_id\x18\x01 \x01(\x0c\x12\x10\n\x08msg_type\x18\x02 \x01(\r\x12\x10\n\x08sequence\x18\x03 \x01(\r\x12\r\n\x05\x64leag\x18\x04 \x01(\x0c\"\x80\x02\n\x13XmrBidAcceptMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x0c\n\x04pkal\x18\x03 \x01(\x0c\x12\x0c\n\x04kbvl\x18\x04 \x01(\x0c\x12\x12\n\nkbsl_dleag\x18\x05 \x01(\x0c\x12\x11\n\ta_lock_tx\x18\x06 \x01(\x0c\x12\x18\n\x10\x61_lock_tx_script\x18\x07 \x01(\x0c\x12\x18\n\x10\x61_lock_refund_tx\x18\x08 \x01(\x0c\x12\x1f\n\x17\x61_lock_refund_tx_script\x18\t \x01(\x0c\x12\x1e\n\x16\x61_lock_refund_spend_tx\x18\n \x01(\x0c\x12\x1d\n\x15\x61l_lock_refund_tx_sig\x18\x0b \x01(\x0c\"r\n\x17XmrBidLockTxSigsMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12$\n\x1c\x61\x66_lock_refund_spend_tx_esig\x18\x02 \x01(\x0c\x12\x1d\n\x15\x61\x66_lock_refund_tx_sig\x18\x03 \x01(\x0c\"X\n\x18XmrBidLockSpendTxMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x17\n\x0f\x61_lock_spend_tx\x18\x02 \x01(\x0c\x12\x0f\n\x07kal_sig\x18\x03 \x01(\x0c\"M\n\x18XmrBidLockReleaseMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x1d\n\x15\x61l_lock_spend_tx_esig\x18\x02 \x01(\x0c\"\x8f\x01\n\x13\x41\x44SBidIntentMessage\x12\x14\n\x0coffer_msg_id\x18\x01 \x01(\x0c\x12\x12\n\ntime_valid\x18\x02 \x01(\x04\x12\x13\n\x0b\x61mount_from\x18\x03 \x01(\x04\x12\x11\n\tamount_to\x18\x04 \x01(\x04\x12\x0c\n\x04rate\x18\x05 \x01(\x04\x12\x18\n\x10protocol_version\x18\x06 \x01(\r\"p\n\x19\x41\x44SBidIntentAcceptMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x0c\n\x04pkaf\x18\x02 \x01(\x0c\x12\x0c\n\x04kbvf\x18\x03 \x01(\x0c\x12\x12\n\nkbsf_dleag\x18\x04 \x01(\x0c\x12\x0f\n\x07\x64\x65st_af\x18\x05 \x01(\x0c\x62\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0emessages.proto\x12\tbasicswap\"\xa6\x04\n\x0cOfferMessage\x12\x11\n\tcoin_from\x18\x01 \x01(\r\x12\x0f\n\x07\x63oin_to\x18\x02 \x01(\r\x12\x13\n\x0b\x61mount_from\x18\x03 \x01(\x04\x12\x0c\n\x04rate\x18\x04 \x01(\x04\x12\x16\n\x0emin_bid_amount\x18\x05 \x01(\x04\x12\x12\n\ntime_valid\x18\x06 \x01(\x04\x12\x33\n\tlock_type\x18\x07 \x01(\x0e\x32 .basicswap.OfferMessage.LockType\x12\x12\n\nlock_value\x18\x08 \x01(\r\x12\x11\n\tswap_type\x18\t \x01(\r\x12\x15\n\rproof_address\x18\n \x01(\t\x12\x17\n\x0fproof_signature\x18\x0b \x01(\t\x12\x15\n\rpkhash_seller\x18\x0c \x01(\x0c\x12\x13\n\x0bsecret_hash\x18\r \x01(\x0c\x12\x15\n\rfee_rate_from\x18\x0e \x01(\x04\x12\x13\n\x0b\x66\x65\x65_rate_to\x18\x0f \x01(\x04\x12\x18\n\x10protocol_version\x18\x10 \x01(\r\x12\x19\n\x11\x61mount_negotiable\x18\x11 \x01(\x08\x12\x17\n\x0frate_negotiable\x18\x12 \x01(\x08\"q\n\x08LockType\x12\x0b\n\x07NOT_SET\x10\x00\x12\x18\n\x14SEQUENCE_LOCK_BLOCKS\x10\x01\x12\x16\n\x12SEQUENCE_LOCK_TIME\x10\x02\x12\x13\n\x0f\x41\x42S_LOCK_BLOCKS\x10\x03\x12\x11\n\rABS_LOCK_TIME\x10\x04\"\xc9\x01\n\nBidMessage\x12\x14\n\x0coffer_msg_id\x18\x01 \x01(\x0c\x12\x12\n\ntime_valid\x18\x02 \x01(\x04\x12\x0e\n\x06\x61mount\x18\x03 \x01(\x04\x12\x0c\n\x04rate\x18\x04 \x01(\x04\x12\x14\n\x0cpkhash_buyer\x18\x05 \x01(\x0c\x12\x15\n\rproof_address\x18\x06 \x01(\t\x12\x17\n\x0fproof_signature\x18\x07 \x01(\t\x12\x18\n\x10protocol_version\x18\x08 \x01(\r\x12\x13\n\x0bproof_utxos\x18\t \x01(\x0c\"\xc1\x01\n\x17\x42idMessage_v1Deprecated\x12\x14\n\x0coffer_msg_id\x18\x01 \x01(\x0c\x12\x12\n\ntime_valid\x18\x02 \x01(\x04\x12\x0e\n\x06\x61mount\x18\x03 \x01(\x04\x12\x0c\n\x04rate\x18\x04 \x01(\x04\x12\x14\n\x0cpkhash_buyer\x18\x05 \x01(\x0c\x12\x15\n\rproof_address\x18\x06 \x01(\t\x12\x17\n\x0fproof_signature\x18\x07 \x01(\t\x12\x18\n\x10protocol_version\x18\x08 \x01(\r\"V\n\x10\x42idAcceptMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x15\n\rinitiate_txid\x18\x02 \x01(\x0c\x12\x17\n\x0f\x63ontract_script\x18\x03 \x01(\x0c\"=\n\x12OfferRevokeMessage\x12\x14\n\x0coffer_msg_id\x18\x01 \x01(\x0c\x12\x11\n\tsignature\x18\x02 \x01(\x0c\";\n\x10\x42idRejectMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x13\n\x0breject_code\x18\x02 \x01(\r\"\xb2\x01\n\rXmrBidMessage\x12\x14\n\x0coffer_msg_id\x18\x01 \x01(\x0c\x12\x12\n\ntime_valid\x18\x02 \x01(\x04\x12\x0e\n\x06\x61mount\x18\x03 \x01(\x04\x12\x0c\n\x04rate\x18\x04 \x01(\x04\x12\x0c\n\x04pkaf\x18\x05 \x01(\x0c\x12\x0c\n\x04kbvf\x18\x06 \x01(\x0c\x12\x12\n\nkbsf_dleag\x18\x07 \x01(\x0c\x12\x0f\n\x07\x64\x65st_af\x18\x08 \x01(\x0c\x12\x18\n\x10protocol_version\x18\t \x01(\r\"T\n\x0fXmrSplitMessage\x12\x0e\n\x06msg_id\x18\x01 \x01(\x0c\x12\x10\n\x08msg_type\x18\x02 \x01(\r\x12\x10\n\x08sequence\x18\x03 \x01(\r\x12\r\n\x05\x64leag\x18\x04 \x01(\x0c\"\x80\x02\n\x13XmrBidAcceptMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x0c\n\x04pkal\x18\x03 \x01(\x0c\x12\x0c\n\x04kbvl\x18\x04 \x01(\x0c\x12\x12\n\nkbsl_dleag\x18\x05 \x01(\x0c\x12\x11\n\ta_lock_tx\x18\x06 \x01(\x0c\x12\x18\n\x10\x61_lock_tx_script\x18\x07 \x01(\x0c\x12\x18\n\x10\x61_lock_refund_tx\x18\x08 \x01(\x0c\x12\x1f\n\x17\x61_lock_refund_tx_script\x18\t \x01(\x0c\x12\x1e\n\x16\x61_lock_refund_spend_tx\x18\n \x01(\x0c\x12\x1d\n\x15\x61l_lock_refund_tx_sig\x18\x0b \x01(\x0c\"r\n\x17XmrBidLockTxSigsMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12$\n\x1c\x61\x66_lock_refund_spend_tx_esig\x18\x02 \x01(\x0c\x12\x1d\n\x15\x61\x66_lock_refund_tx_sig\x18\x03 \x01(\x0c\"X\n\x18XmrBidLockSpendTxMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x17\n\x0f\x61_lock_spend_tx\x18\x02 \x01(\x0c\x12\x0f\n\x07kal_sig\x18\x03 \x01(\x0c\"M\n\x18XmrBidLockReleaseMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x1d\n\x15\x61l_lock_spend_tx_esig\x18\x02 \x01(\x0c\"\x8f\x01\n\x13\x41\x44SBidIntentMessage\x12\x14\n\x0coffer_msg_id\x18\x01 \x01(\x0c\x12\x12\n\ntime_valid\x18\x02 \x01(\x04\x12\x13\n\x0b\x61mount_from\x18\x03 \x01(\x04\x12\x11\n\tamount_to\x18\x04 \x01(\x04\x12\x0c\n\x04rate\x18\x05 \x01(\x04\x12\x18\n\x10protocol_version\x18\x06 \x01(\r\"p\n\x19\x41\x44SBidIntentAcceptMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x0c\n\x04pkaf\x18\x02 \x01(\x0c\x12\x0c\n\x04kbvf\x18\x03 \x01(\x0c\x12\x12\n\nkbsf_dleag\x18\x04 \x01(\x0c\x12\x0f\n\x07\x64\x65st_af\x18\x05 \x01(\x0c\x62\x06proto3') -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'messages_pb2', globals()) +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'messages_pb2', _globals) if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - _OFFERMESSAGE._serialized_start=30 - _OFFERMESSAGE._serialized_end=580 - _OFFERMESSAGE_LOCKTYPE._serialized_start=467 - _OFFERMESSAGE_LOCKTYPE._serialized_end=580 - _BIDMESSAGE._serialized_start=583 - _BIDMESSAGE._serialized_end=763 - _BIDACCEPTMESSAGE._serialized_start=765 - _BIDACCEPTMESSAGE._serialized_end=851 - _OFFERREVOKEMESSAGE._serialized_start=853 - _OFFERREVOKEMESSAGE._serialized_end=914 - _BIDREJECTMESSAGE._serialized_start=916 - _BIDREJECTMESSAGE._serialized_end=975 - _XMRBIDMESSAGE._serialized_start=978 - _XMRBIDMESSAGE._serialized_end=1156 - _XMRSPLITMESSAGE._serialized_start=1158 - _XMRSPLITMESSAGE._serialized_end=1242 - _XMRBIDACCEPTMESSAGE._serialized_start=1245 - _XMRBIDACCEPTMESSAGE._serialized_end=1501 - _XMRBIDLOCKTXSIGSMESSAGE._serialized_start=1503 - _XMRBIDLOCKTXSIGSMESSAGE._serialized_end=1617 - _XMRBIDLOCKSPENDTXMESSAGE._serialized_start=1619 - _XMRBIDLOCKSPENDTXMESSAGE._serialized_end=1707 - _XMRBIDLOCKRELEASEMESSAGE._serialized_start=1709 - _XMRBIDLOCKRELEASEMESSAGE._serialized_end=1786 - _ADSBIDINTENTMESSAGE._serialized_start=1789 - _ADSBIDINTENTMESSAGE._serialized_end=1932 - _ADSBIDINTENTACCEPTMESSAGE._serialized_start=1934 - _ADSBIDINTENTACCEPTMESSAGE._serialized_end=2046 + _globals['_OFFERMESSAGE']._serialized_start=30 + _globals['_OFFERMESSAGE']._serialized_end=580 + _globals['_OFFERMESSAGE_LOCKTYPE']._serialized_start=467 + _globals['_OFFERMESSAGE_LOCKTYPE']._serialized_end=580 + _globals['_BIDMESSAGE']._serialized_start=583 + _globals['_BIDMESSAGE']._serialized_end=784 + _globals['_BIDMESSAGE_V1DEPRECATED']._serialized_start=787 + _globals['_BIDMESSAGE_V1DEPRECATED']._serialized_end=980 + _globals['_BIDACCEPTMESSAGE']._serialized_start=982 + _globals['_BIDACCEPTMESSAGE']._serialized_end=1068 + _globals['_OFFERREVOKEMESSAGE']._serialized_start=1070 + _globals['_OFFERREVOKEMESSAGE']._serialized_end=1131 + _globals['_BIDREJECTMESSAGE']._serialized_start=1133 + _globals['_BIDREJECTMESSAGE']._serialized_end=1192 + _globals['_XMRBIDMESSAGE']._serialized_start=1195 + _globals['_XMRBIDMESSAGE']._serialized_end=1373 + _globals['_XMRSPLITMESSAGE']._serialized_start=1375 + _globals['_XMRSPLITMESSAGE']._serialized_end=1459 + _globals['_XMRBIDACCEPTMESSAGE']._serialized_start=1462 + _globals['_XMRBIDACCEPTMESSAGE']._serialized_end=1718 + _globals['_XMRBIDLOCKTXSIGSMESSAGE']._serialized_start=1720 + _globals['_XMRBIDLOCKTXSIGSMESSAGE']._serialized_end=1834 + _globals['_XMRBIDLOCKSPENDTXMESSAGE']._serialized_start=1836 + _globals['_XMRBIDLOCKSPENDTXMESSAGE']._serialized_end=1924 + _globals['_XMRBIDLOCKRELEASEMESSAGE']._serialized_start=1926 + _globals['_XMRBIDLOCKRELEASEMESSAGE']._serialized_end=2003 + _globals['_ADSBIDINTENTMESSAGE']._serialized_start=2006 + _globals['_ADSBIDINTENTMESSAGE']._serialized_end=2149 + _globals['_ADSBIDINTENTACCEPTMESSAGE']._serialized_start=2151 + _globals['_ADSBIDINTENTACCEPTMESSAGE']._serialized_end=2263 # @@protoc_insertion_point(module_scope) diff --git a/basicswap/rpc.py b/basicswap/rpc.py index f545d20..782d770 100644 --- a/basicswap/rpc.py +++ b/basicswap/rpc.py @@ -111,7 +111,7 @@ def callrpc(rpc_port, auth, method, params=[], wallet=None, host='127.0.0.1'): r = json.loads(v.decode('utf-8')) except Exception as ex: traceback.print_exc() - raise ValueError('RPC server error ' + str(ex)) + raise ValueError('RPC server error ' + str(ex) + ', method: ' + method) if 'error' in r and r['error'] is not None: raise ValueError('RPC error ' + str(r['error'])) diff --git a/bin/basicswap_prepare.py b/bin/basicswap_prepare.py index 603c81a..3d9ae58 100755 --- a/bin/basicswap_prepare.py +++ b/bin/basicswap_prepare.py @@ -1588,7 +1588,7 @@ def main(): 'datadir': os.getenv('FIRO_DATA_DIR', os.path.join(data_dir, 'firo')), 'bindir': os.path.join(bin_dir, 'firo'), 'use_segwit': False, - 'use_csv': True, + 'use_csv': False, 'blocks_confirmed': 1, 'conf_target': 2, 'core_version_group': 18, diff --git a/tests/basicswap/extended/test_firo.py b/tests/basicswap/extended/test_firo.py index 0683a3f..3c92165 100644 --- a/tests/basicswap/extended/test_firo.py +++ b/tests/basicswap/extended/test_firo.py @@ -44,7 +44,7 @@ from tests.basicswap.common import ( wait_for_in_progress, wait_for_bid_tx_state, ) -from basicswap.contrib.test_framework.messages import ( +from basicswap.interface.contrib.firo_test_framework.mininode import ( FromHex, CTransaction, ) @@ -168,7 +168,8 @@ class Test(BaseTest): # Set future block rewards to nowhere (a random address), so wallet amounts stay constant void_block_rewards_pubkey = cls.getRandomPubkey() cls.firo_addr = cls.swap_clients[0].ci(Coins.FIRO).pubkey_to_address(void_block_rewards_pubkey) - num_blocks = 100 + chain_height = callnoderpc(0, 'getblockcount', base_rpc_port=FIRO_BASE_RPC_PORT) + num_blocks = 1352 - chain_height # Activate CTLV (bip65) logging.info('Mining %d Firo blocks to %s', num_blocks, cls.firo_addr) callnoderpc(0, 'generatetoaddress', [num_blocks, cls.firo_addr], base_rpc_port=FIRO_BASE_RPC_PORT) @@ -189,7 +190,7 @@ class Test(BaseTest): 'rpcpassword': 'test_pass' + str(node_id), 'datadir': os.path.join(datadir, 'firo_' + str(node_id)), 'bindir': FIRO_BINDIR, - 'use_csv': True, + 'use_csv': False, 'use_segwit': False, } @@ -207,6 +208,9 @@ class Test(BaseTest): def callnoderpc(self, method, params=[], wallet=None, node_id=0): return callnoderpc(node_id, method, params, wallet, base_rpc_port=FIRO_BASE_RPC_PORT) + def mineBlock(self, num_blocks: int = 1): + self.callnoderpc('generatetoaddress', [num_blocks, self.firo_addr]) + def test_001_firo(self): logging.info('---------- Test {} segwit'.format(self.test_coin_from.name)) @@ -251,7 +255,8 @@ class Test(BaseTest): decoded_tx = CTransaction() decoded_tx = FromHex(decoded_tx, tx_funded) decoded_tx.vin[0].scriptSig = bytes.fromhex('16' + addr_witness_info['hex']) - txid_with_scriptsig = decoded_tx.rehash() + decoded_tx.rehash() + txid_with_scriptsig = decoded_tx.hash tx_funded_decoded = firoCli(f'decoderawtransaction {tx_funded}') tx_signed_decoded = firoCli(f'decoderawtransaction {tx_signed}') @@ -268,6 +273,55 @@ class Test(BaseTest): assert ('490ba1e2c3894d5534c467141ee3cdf77292c362' == ci.getWalletSeedID()) assert swap_client.checkWalletSeed(self.test_coin_from) is True + def test_008_gettxout(self): + logging.info('---------- Test {} gettxout'.format(self.test_coin_from.name)) + + swap_client = self.swap_clients[0] + + addr_plain = self.callnoderpc('getnewaddress', ['gettxout test',]) + + addr_plain1 = self.callnoderpc('getnewaddress', ['gettxout test 1',]) + addr_witness = self.callnoderpc('addwitnessaddress', [addr_plain1,]) + + txid = self.callnoderpc('sendtoaddress', [addr_witness, 1.0]) + assert len(txid) == 64 + + self.callnoderpc('generatetoaddress', [1, self.firo_addr]) + + unspents = self.callnoderpc('listunspent') + + for u in unspents: + if u['spendable'] is not True: + continue + if u['address'] == addr_witness: + print(u) + + unspents = self.callnoderpc('listunspent', [0, 999999999, [addr_witness,]]) + assert (len(unspents) == 1) + + utxo = unspents[0] + txout = self.callnoderpc('gettxout', [utxo['txid'], utxo['vout']]) + assert (addr_witness in txout['scriptPubKey']['addresses']) + # Spend + addr_plain2 = self.callnoderpc('getnewaddress', ['gettxout test 2',]) + addr_witness2 = self.callnoderpc('addwitnessaddress', [addr_plain2,]) + tx_funded = self.callnoderpc('createrawtransaction', [[{'txid': utxo['txid'], 'vout': utxo['vout']}], {addr_witness2: 0.99}]) + tx_signed = self.callnoderpc('signrawtransaction', [tx_funded,])['hex'] + self.callnoderpc('sendrawtransaction', [tx_signed,]) + + # utxo should be unavailable when spent in the mempool + txout = self.callnoderpc('gettxout', [utxo['txid'], utxo['vout']]) + assert (txout is None) + + self.callnoderpc('generatetoaddress', [1, self.firo_addr]) + + ci = swap_client.ci(Coins.FIRO) + require_amount: int = ci.make_int(1) + funds_proof = ci.getProofOfFunds(require_amount, 'test'.encode('utf-8')) + + amount_proved = ci.verifyProofOfFunds(funds_proof[0], funds_proof[1], funds_proof[2], 'test'.encode('utf-8')) + assert (amount_proved >= require_amount) + def test_02_part_coin(self): logging.info('---------- Test PART to {}'.format(self.test_coin_from.name)) if not self.test_atomic: @@ -275,7 +329,7 @@ class Test(BaseTest): return swap_clients = self.swap_clients - offer_id = swap_clients[0].postOffer(Coins.PART, self.test_coin_from, 100 * COIN, 0.1 * COIN, 100 * COIN, SwapTypes.SELLER_FIRST) + offer_id = swap_clients[0].postOffer(Coins.PART, self.test_coin_from, 100 * COIN, 0.1 * COIN, 100 * COIN, SwapTypes.SELLER_FIRST, TxLockTypes.ABS_LOCK_TIME) wait_for_offer(test_delay_event, swap_clients[1], offer_id) offer = swap_clients[1].getOffer(offer_id) @@ -297,7 +351,7 @@ class Test(BaseTest): logging.info('---------- Test {} to PART'.format(self.test_coin_from.name)) swap_clients = self.swap_clients - offer_id = swap_clients[1].postOffer(self.test_coin_from, Coins.PART, 10 * COIN, 9.0 * COIN, 10 * COIN, SwapTypes.SELLER_FIRST) + offer_id = swap_clients[1].postOffer(self.test_coin_from, Coins.PART, 10 * COIN, 9.0 * COIN, 10 * COIN, SwapTypes.SELLER_FIRST, TxLockTypes.ABS_LOCK_TIME) wait_for_offer(test_delay_event, swap_clients[0], offer_id) offer = swap_clients[0].getOffer(offer_id) @@ -320,7 +374,7 @@ class Test(BaseTest): logging.info('---------- Test {} to BTC'.format(self.test_coin_from.name)) swap_clients = self.swap_clients - offer_id = swap_clients[0].postOffer(self.test_coin_from, Coins.BTC, 10 * COIN, 0.1 * COIN, 10 * COIN, SwapTypes.SELLER_FIRST) + offer_id = swap_clients[0].postOffer(self.test_coin_from, Coins.BTC, 10 * COIN, 0.1 * COIN, 10 * COIN, SwapTypes.SELLER_FIRST, TxLockTypes.ABS_LOCK_TIME) wait_for_offer(test_delay_event, swap_clients[1], offer_id) offer = swap_clients[1].getOffer(offer_id) @@ -348,7 +402,7 @@ class Test(BaseTest): swap_clients = self.swap_clients offer_id = swap_clients[0].postOffer(self.test_coin_from, Coins.BTC, 10 * COIN, 0.1 * COIN, 10 * COIN, SwapTypes.SELLER_FIRST, - TxLockTypes.SEQUENCE_LOCK_BLOCKS, 10) + TxLockTypes.ABS_LOCK_BLOCKS, 10) wait_for_offer(test_delay_event, swap_clients[1], offer_id) offer = swap_clients[1].getOffer(offer_id) @@ -372,7 +426,7 @@ class Test(BaseTest): js_0_before = read_json_api(1800) - offer_id = swap_clients[0].postOffer(self.test_coin_from, Coins.BTC, 10 * COIN, 10 * COIN, 10 * COIN, SwapTypes.SELLER_FIRST) + offer_id = swap_clients[0].postOffer(self.test_coin_from, Coins.BTC, 10 * COIN, 10 * COIN, 10 * COIN, SwapTypes.SELLER_FIRST, TxLockTypes.ABS_LOCK_TIME) wait_for_offer(test_delay_event, swap_clients[0], offer_id) offer = swap_clients[0].getOffer(offer_id) @@ -394,7 +448,7 @@ class Test(BaseTest): js_0_before = read_json_api(1800) - offer_id = swap_clients[0].postOffer(self.test_coin_from, Coins.BTC, 0.001 * COIN, 1.0 * COIN, 0.001 * COIN, SwapTypes.SELLER_FIRST) + offer_id = swap_clients[0].postOffer(self.test_coin_from, Coins.BTC, 0.001 * COIN, 1.0 * COIN, 0.001 * COIN, SwapTypes.SELLER_FIRST, TxLockTypes.ABS_LOCK_TIME) wait_for_offer(test_delay_event, swap_clients[0], offer_id) offer = swap_clients[0].getOffer(offer_id) @@ -491,7 +545,7 @@ class Test(BaseTest): extra_options = {'prefunded_itx': itx} rate_swap = ci_to.make_int(random.uniform(0.2, 10.0), r=1) - offer_id = swap_clients[2].postOffer(coin_from, coin_to, swap_value, rate_swap, swap_value, swap_type, extra_options=extra_options) + offer_id = swap_clients[2].postOffer(coin_from, coin_to, swap_value, rate_swap, swap_value, swap_type, TxLockTypes.ABS_LOCK_TIME, extra_options=extra_options) wait_for_offer(test_delay_event, swap_clients[1], offer_id) offer = swap_clients[1].getOffer(offer_id) diff --git a/tests/basicswap/test_btc_xmr.py b/tests/basicswap/test_btc_xmr.py index 3e8cc72..cd22afa 100644 --- a/tests/basicswap/test_btc_xmr.py +++ b/tests/basicswap/test_btc_xmr.py @@ -505,6 +505,10 @@ class BasicSwapTest(TestFunctions): logging.info('---------- Test {} cltv'.format(self.test_coin_from.name)) ci = self.swap_clients[0].ci(self.test_coin_from) + deploymentinfo = self.callnoderpc('getdeploymentinfo') + bip65_active = deploymentinfo['deployments']['bip65']['active'] + assert (bip65_active) + chain_height = self.callnoderpc('getblockcount') script = CScript([chain_height + 3, OP_CHECKLOCKTIMEVERIFY, ]) @@ -518,7 +522,7 @@ class BasicSwapTest(TestFunctions): tx_signed = self.callnoderpc('signrawtransactionwithwallet', [tx_funded['hex'], ])['hex'] txid = self.callnoderpc('sendrawtransaction', [tx_signed, ]) - addr_out = self.callnoderpc('getnewaddress', ['csv test', 'bech32']) + addr_out = self.callnoderpc('getnewaddress', ['cltv test', 'bech32']) pkh = ci.decodeSegwitAddress(addr_out) script_out = ci.getScriptForPubkeyHash(pkh) @@ -530,14 +534,26 @@ class BasicSwapTest(TestFunctions): tx_spend.wit.vtxinwit.append(CTxInWitness()) tx_spend.wit.vtxinwit[0].scriptWitness.stack = [script, ] tx_spend_hex = ToHex(tx_spend) + + tx_spend.nLockTime = chain_height + 2 + tx_spend_invalid_hex = ToHex(tx_spend) + + for tx_hex in [tx_spend_invalid_hex, tx_spend_hex]: + try: + txid = self.callnoderpc('sendrawtransaction', [tx_hex, ]) + except Exception as e: + assert ('non-final' in str(e)) + else: + assert False, 'Should fail' + + self.mineBlock(5) try: - txid = self.callnoderpc('sendrawtransaction', [tx_spend_hex, ]) + txid = self.callnoderpc('sendrawtransaction', [tx_spend_invalid_hex, ]) except Exception as e: - assert ('non-final' in str(e)) + assert ('Locktime requirement not satisfied' in str(e)) else: assert False, 'Should fail' - self.mineBlock(5) txid = self.callnoderpc('sendrawtransaction', [tx_spend_hex, ]) self.mineBlock() ro = self.callnoderpc('listreceivedbyaddress', [0, ]) @@ -547,6 +563,10 @@ class BasicSwapTest(TestFunctions): sum_addr += entry['amount'] assert (sum_addr == 1.0999) + # Ensure tx was mined + tx_wallet = self.callnoderpc('gettransaction', [txid, ]) + assert (len(tx_wallet['blockhash']) == 64) + def test_004_csv(self): logging.info('---------- Test {} csv'.format(self.test_coin_from.name)) swap_clients = self.swap_clients diff --git a/tests/basicswap/test_other.py b/tests/basicswap/test_other.py index 0f0e629..1fa6d7d 100644 --- a/tests/basicswap/test_other.py +++ b/tests/basicswap/test_other.py @@ -36,6 +36,11 @@ from basicswap.util import ( DeserialiseNum, validate_amount) +from basicswap.messages_pb2 import ( + BidMessage, + BidMessage_v1Deprecated, +) + class Test(unittest.TestCase): REQUIRED_SETTINGS = {'blocks_confirmed': 1, 'conf_target': 1, 'use_segwit': True, 'connection_type': 'rpc'} @@ -312,6 +317,18 @@ class Test(unittest.TestCase): input_data = b'hash this' assert (ripemd160(input_data).hex() == 'd5443a154f167e2c1332f6de72cfb4c6ab9c8c17') + def test_protobuf(self): + # Ensure old protobuf templates can be read + + msg_buf = BidMessage_v1Deprecated() + msg_buf.protocol_version = 2 + serialised_msg = msg_buf.SerializeToString() + + msg_buf_v2 = BidMessage() + msg_buf_v2.ParseFromString(serialised_msg) + + assert (msg_buf_v2.protocol_version == 2) + if __name__ == '__main__': unittest.main() diff --git a/tests/basicswap/test_run.py b/tests/basicswap/test_run.py index dd3763d..89a7b30 100644 --- a/tests/basicswap/test_run.py +++ b/tests/basicswap/test_run.py @@ -52,6 +52,17 @@ from tests.basicswap.common import ( compare_bid_states, extract_states_from_xu_file, ) +from basicswap.contrib.test_framework.messages import ( + ToHex, + CTxIn, + COutPoint, + CTransaction, + CTxInWitness, +) +from basicswap.contrib.test_framework.script import ( + CScript, + OP_CHECKLOCKTIMEVERIFY, +) from .test_xmr import BaseTest, test_delay_event, callnoderpc @@ -196,6 +207,77 @@ class Test(BaseTest): for case in should_fail: self.assertRaises(ValueError, sc.validateSwapType, case[0], case[1], case[2]) + def test_003_cltv(self): + test_coin_from = Coins.PART + logging.info('---------- Test {} cltv'.format(test_coin_from.name)) + ci = self.swap_clients[0].ci(test_coin_from) + + deploymentinfo = callnoderpc(0, 'getdeploymentinfo') + bip65_active = deploymentinfo['deployments']['bip65']['active'] + assert (bip65_active) + + chain_height = callnoderpc(0, 'getblockcount') + script = CScript([chain_height + 3, OP_CHECKLOCKTIMEVERIFY, ]) + + script_dest = ci.getScriptDest(script) + tx = CTransaction() + tx.nVersion = ci.txVersion() + tx.vout.append(ci.txoType()(ci.make_int(1.1), script_dest)) + tx_hex = ToHex(tx) + tx_funded = callnoderpc(0, 'fundrawtransaction', [tx_hex]) + utxo_pos = 0 if tx_funded['changepos'] == 1 else 1 + tx_signed = callnoderpc(0, 'signrawtransactionwithwallet', [tx_funded['hex'], ])['hex'] + txid = callnoderpc(0, 'sendrawtransaction', [tx_signed, ]) + + addr_out = callnoderpc(0, 'getnewaddress', ['cltv test', ]) + pkh = ci.decodeAddress(addr_out) + script_out = ci.getScriptForPubkeyHash(pkh) + + tx_spend = CTransaction() + tx_spend.nVersion = ci.txVersion() + tx_spend.nLockTime = chain_height + 3 + tx_spend.vin.append(CTxIn(COutPoint(int(txid, 16), utxo_pos))) + tx_spend.vout.append(ci.txoType()(ci.make_int(1.0999), script_out)) + tx_spend.wit.vtxinwit.append(CTxInWitness()) + tx_spend.wit.vtxinwit[0].scriptWitness.stack = [script, ] + tx_spend_hex = ToHex(tx_spend) + + tx_spend.nLockTime = chain_height + 2 + tx_spend_invalid_hex = ToHex(tx_spend) + + for tx_hex in [tx_spend_invalid_hex, tx_spend_hex]: + try: + txid = callnoderpc(0, 'sendrawtransaction', [tx_hex, ]) + except Exception as e: + assert ('non-final' in str(e)) + else: + assert False, 'Should fail' + + self.waitForParticlHeight(chain_height + 3) + try: + txid = callnoderpc(0, 'sendrawtransaction', [tx_spend_invalid_hex, ]) + except Exception as e: + assert ('Locktime requirement not satisfied' in str(e)) + else: + assert False, 'Should fail' + + chain_height = callnoderpc(0, 'getblockcount') + txid = callnoderpc(0, 'sendrawtransaction', [tx_spend_hex, ]) + ro = callnoderpc(0, 'listreceivedbyaddress', [0, ]) + sum_addr = 0 + for entry in ro: + if entry['address'] == addr_out: + sum_addr += entry['amount'] + assert (sum_addr == 1.0999) + + # Ensure tx was mined + for i in range(5): + self.waitForParticlHeight(chain_height + i) + tx_wallet = callnoderpc(0, 'gettransaction', [txid, ]) + if 'blockhash' in tx_wallet: + break + assert (len(tx_wallet['blockhash']) == 64) + def test_01_verifyrawtransaction(self): txn = '0200000001eb6e5c4ebba4efa32f40c7314cad456a64008e91ee30b2dd0235ab9bb67fbdbb01000000ee47304402200956933242dde94f6cf8f195a470f8d02aef21ec5c9b66c5d3871594bdb74c9d02201d7e1b440de8f4da672d689f9e37e98815fb63dbc1706353290887eb6e8f7235012103dc1b24feb32841bc2f4375da91fa97834e5983668c2a39a6b7eadb60e7033f9d205a803b28fe2f86c17db91fa99d7ed2598f79b5677ffe869de2e478c0d1c02cc7514c606382012088a8201fe90717abb84b481c2a59112414ae56ec8acc72273642ca26cc7a5812fdc8f68876a914225fbfa4cb725b75e511810ac4d6f74069bdded26703520140b27576a914207eb66b2fd6ed9924d6217efc7fa7b38dfabe666888acffffffff01e0167118020000001976a9140044e188928710cecba8311f1cf412135b98145c88ac00000000' prevout = {