api: Add set_max_concurrent_bids helper.
Some checks failed
lint / build (3.12) (push) Has been cancelled

This commit is contained in:
tecnovert 2024-11-23 18:48:46 +02:00
parent 01f6a1d877
commit 33105a832f
No known key found for this signature in database
GPG key ID: 8ED6D8750C4E3F93
6 changed files with 203 additions and 14 deletions

View file

@ -937,7 +937,9 @@ class BasicSwap(BaseApp):
def start(self): def start(self):
import platform import platform
self.log.info(f"Starting BasicSwap {__version__}, database v{self.db_version}\n\n") self.log.info(
f"Starting BasicSwap {__version__}, database v{self.db_version}\n\n"
)
self.log.info(f"Python version: {platform.python_version()}") self.log.info(f"Python version: {platform.python_version()}")
self.log.info(f"SQLite version: {sqlite3.sqlite_version}") self.log.info(f"SQLite version: {sqlite3.sqlite_version}")
self.log.info(f"Timezone offset: {time.timezone} ({time.tzname[0]})") self.log.info(f"Timezone offset: {time.timezone} ({time.tzname[0]})")
@ -7308,6 +7310,9 @@ class BasicSwap(BaseApp):
if active_bid[2] != BidStates.SWAP_COMPLETED: if active_bid[2] != BidStates.SWAP_COMPLETED:
num_not_completed += 1 num_not_completed += 1
max_concurrent_bids = opts.get("max_concurrent_bids", 1) max_concurrent_bids = opts.get("max_concurrent_bids", 1)
self.log.debug(
f"active_bids {num_not_completed}, max_concurrent_bids {max_concurrent_bids}"
)
if num_not_completed >= max_concurrent_bids: if num_not_completed >= max_concurrent_bids:
raise AutomationConstraint( raise AutomationConstraint(
"Already have {} bids to complete".format(num_not_completed) "Already have {} bids to complete".format(num_not_completed)
@ -10524,6 +10529,7 @@ class BasicSwap(BaseApp):
if filter_bid_id is not None: if filter_bid_id is not None:
query_str += "AND bids.bid_id = :filter_bid_id " query_str += "AND bids.bid_id = :filter_bid_id "
query_data["filter_bid_id"] = filter_bid_id query_data["filter_bid_id"] = filter_bid_id
if offer_id is not None: if offer_id is not None:
query_str += "AND bids.offer_id = :filter_offer_id " query_str += "AND bids.offer_id = :filter_offer_id "
query_data["filter_offer_id"] = offer_id query_data["filter_offer_id"] = offer_id
@ -10734,14 +10740,37 @@ class BasicSwap(BaseApp):
finally: finally:
self.closeDB(cursor, commit=False) self.closeDB(cursor, commit=False)
def updateAutomationStrategy(self, strategy_id: int, data, note: str) -> None: def updateAutomationStrategy(self, strategy_id: int, data: dict) -> None:
self.log.debug(f"updateAutomationStrategy {strategy_id}")
try: try:
cursor = self.openDB() cursor = self.openDB()
strategy = firstOrNone( strategy = firstOrNone(
self.query(AutomationStrategy, cursor, {"record_id": strategy_id}) self.query(AutomationStrategy, cursor, {"record_id": strategy_id})
) )
strategy.data = json.dumps(data).encode("utf-8") if "data" in data:
strategy.note = note strategy.data = json.dumps(data["data"]).encode("utf-8")
self.log.debug("data {}".format(data["data"]))
if "note" in data:
strategy.note = data["note"]
if "label" in data:
strategy.label = data["label"]
if "only_known_identities" in data:
strategy.only_known_identities = int(data["only_known_identities"])
if "set_max_concurrent_bids" in data:
new_max_concurrent_bids = data["set_max_concurrent_bids"]
ensure(
isinstance(new_max_concurrent_bids, int),
"set_max_concurrent_bids must be an integer",
)
strategy_data = (
{}
if strategy.data is None
else json.loads(strategy.data.decode("utf-8"))
)
strategy_data["max_concurrent_bids"] = new_max_concurrent_bids
strategy.data = json.dumps(strategy_data).encode("utf-8")
self.updateDB(strategy, cursor, ["record_id"]) self.updateDB(strategy, cursor, ["record_id"])
finally: finally:
self.closeDB(cursor) self.closeDB(cursor)

View file

@ -551,7 +551,7 @@ class AutomationStrategy(Table):
label = Column("string") label = Column("string")
type_ind = Column("integer") type_ind = Column("integer")
only_known_identities = Column("integer") only_known_identities = Column("integer")
num_concurrent = Column("integer") num_concurrent = Column("integer") # Deprecated, use data["max_concurrent"]
data = Column("blob") data = Column("blob")
note = Column("string") note = Column("string")

View file

@ -718,6 +718,8 @@ def js_automationstrategies(self, url_split, post_string: str, is_json: bool) ->
"sort_dir": "desc", "sort_dir": "desc",
} }
strat_id = int(url_split[3]) if len(url_split) > 3 else None
if post_string != "": if post_string != "":
post_data = getFormData(post_string, is_json) post_data = getFormData(post_string, is_json)
@ -738,15 +740,36 @@ def js_automationstrategies(self, url_split, post_string: str, is_json: bool) ->
filters["limit"] > 0 and filters["limit"] <= PAGE_LIMIT filters["limit"] > 0 and filters["limit"] <= PAGE_LIMIT
), "Invalid limit" ), "Invalid limit"
if len(url_split) > 3: set_data = {}
strat_id = int(url_split[3]) if have_data_entry(post_data, "set_label"):
set_data["label"] = get_data_entry(post_data, "set_label")
if have_data_entry(post_data, "set_data"):
set_data["data"] = json.loads(get_data_entry(post_data, "set_data"))
if have_data_entry(post_data, "set_note"):
set_data["note"] = get_data_entry(post_data, "set_note")
if have_data_entry(post_data, "set_only_known_identities"):
set_data["only_known_identities"] = get_data_entry(
post_data, "set_only_known_identities"
)
if have_data_entry(post_data, "set_max_concurrent_bids"):
if "data" in set_data:
raise ValueError("set_max_concurrent_bids can't be used with set_data")
set_data["set_max_concurrent_bids"] = int(
get_data_entry(post_data, "set_max_concurrent_bids")
)
if set_data:
ensure(strat_id is not None, "Must specify a strategy to modify")
swap_client.updateAutomationStrategy(strat_id, set_data)
if strat_id is not None:
strat_data = swap_client.getAutomationStrategy(strat_id) strat_data = swap_client.getAutomationStrategy(strat_id)
rv = { rv = {
"record_id": strat_data.record_id, "record_id": strat_data.record_id,
"label": strat_data.label, "label": strat_data.label,
"type_ind": strat_data.type_ind, "type_ind": strat_data.type_ind,
"only_known_identities": strat_data.only_known_identities, "only_known_identities": strat_data.only_known_identities,
"num_concurrent": strat_data.num_concurrent,
"data": json.loads(strat_data.data.decode("utf-8")), "data": json.loads(strat_data.data.decode("utf-8")),
"note": "" if strat_data.note is None else strat_data.note, "note": "" if strat_data.note is None else strat_data.note,
} }

View file

@ -115,9 +115,11 @@ def page_automation_strategy(self, url_split, post_string):
show_edit_form = True show_edit_form = True
if have_data_entry(form_data, "apply"): if have_data_entry(form_data, "apply"):
try: try:
data = json.loads(get_data_entry_or(form_data, "data", "")) data = {
note = get_data_entry_or(form_data, "note", "") "data": json.loads(get_data_entry_or(form_data, "data", "")),
swap_client.updateAutomationStrategy(strategy_id, data, note) "note": get_data_entry_or(form_data, "note", ""),
}
swap_client.updateAutomationStrategy(strategy_id, data)
messages.append("Updated") messages.append("Updated")
except Exception as e: except Exception as e:
err_messages.append(str(e)) err_messages.append(str(e))

View file

@ -131,16 +131,31 @@ def clear_offers(delay_event, node_id) -> None:
raise ValueError("clear_offers failed") raise ValueError("clear_offers failed")
def wait_for_offers(delay_event, node_id, num_offers) -> None: def wait_for_offers(delay_event, node_id, num_offers, offer_id=None) -> None:
logging.info(f"Waiting for {num_offers} offers on node {node_id}") logging.info(f"Waiting for {num_offers} offers on node {node_id}")
for i in range(20): for i in range(20):
delay_event.wait(1) delay_event.wait(1)
offers = read_json_api(UI_PORT + node_id, "offers") offers = read_json_api(
UI_PORT + node_id, "offers" if offer_id is None else f"offers/{offer_id}"
)
if len(offers) >= num_offers: if len(offers) >= num_offers:
return return
raise ValueError("wait_for_offers failed") raise ValueError("wait_for_offers failed")
def wait_for_bids(delay_event, node_id, num_bids, offer_id=None) -> None:
logging.info(f"Waiting for {num_bids} bids on node {node_id}")
for i in range(20):
delay_event.wait(1)
if offer_id is not None:
bids = read_json_api(UI_PORT + node_id, "bids", {"offer_id": offer_id})
else:
bids = read_json_api(UI_PORT + node_id, "bids")
if len(bids) >= num_bids:
return bids
raise ValueError("wait_for_bids failed")
def delete_file(filepath: str) -> None: def delete_file(filepath: str) -> None:
if os.path.exists(filepath): if os.path.exists(filepath):
os.remove(filepath) os.remove(filepath)
@ -881,6 +896,126 @@ class Test(unittest.TestCase):
assert math.isclose(float(bid["amt_from"]), 21.0) assert math.isclose(float(bid["amt_from"]), 21.0)
assert bid["addr_from"] == addr_bid_from assert bid["addr_from"] == addr_bid_from
def test_auto_accept(self):
waitForServer(self.delay_event, UI_PORT + 0)
waitForServer(self.delay_event, UI_PORT + 1)
logging.info("Reset test")
clear_offers(self.delay_event, 0)
delete_file(self.node0_statefile)
delete_file(self.node1_statefile)
wait_for_offers(self.delay_event, 1, 0)
logging.info("Prepare node 2 balance")
node2_xmr_wallet = read_json_api(UI_PORT + 2, "wallets/xmr")
node2_xmr_wallet_balance = float(node2_xmr_wallet["balance"])
expect_balance = 300.0
if node2_xmr_wallet_balance < expect_balance:
post_json = {
"value": expect_balance,
"address": node2_xmr_wallet["deposit_address"],
"sweepall": False,
}
json_rv = read_json_api(UI_PORT + 1, "wallets/xmr/withdraw", post_json)
assert len(json_rv["txid"]) == 64
wait_for_balance(
self.delay_event,
f"http://127.0.0.1:{UI_PORT + 2}/json/wallets/xmr",
"balance",
expect_balance,
)
# Try post bids at the same time
from multiprocessing import Process
def postBid(node_from, offer_id, amount):
post_json = {"offer_id": offer_id, "amount_from": amount}
read_json_api(UI_PORT + node_from, "bids/new", post_json)
def test_bid_pair(amount_1, amount_2, expect_inactive, delay_event):
logging.debug(f"test_bid_pair {amount_1} {amount_2}, {expect_inactive}")
wait_for_balance(
self.delay_event,
f"http://127.0.0.1:{UI_PORT + 2}/json/wallets/xmr",
"balance",
100.0,
)
offer_json = {
"coin_from": "btc",
"coin_to": "xmr",
"amt_from": 10.0,
"amt_to": 100.0,
"amt_var": True,
"lockseconds": 3600,
"automation_strat_id": 1,
}
offer_id = read_json_api(UI_PORT + 0, "offers/new", offer_json)["offer_id"]
logging.debug(f"offer_id {offer_id}")
wait_for_offers(self.delay_event, 1, 1, offer_id)
wait_for_offers(self.delay_event, 2, 1, offer_id)
pbid1 = Process(target=postBid, args=(1, offer_id, amount_1))
pbid2 = Process(target=postBid, args=(2, offer_id, amount_2))
pbid1.start()
pbid2.start()
pbid1.join()
pbid2.join()
for i in range(5):
logging.info("Waiting for bids to settle")
delay_event.wait(8)
bids = wait_for_bids(self.delay_event, 0, 2, offer_id)
if any(bid["bid_state"] == "Receiving" for bid in bids):
continue
break
num_received_state = 0
for bid in bids:
if bid["bid_state"] == "Received":
num_received_state += 1
assert num_received_state == expect_inactive
# Bids with a combined value less than the offer value should both be accepted
test_bid_pair(1.1, 1.2, 0, self.delay_event)
# Only one bid of bids with a combined value greater than the offer value should be accepted
test_bid_pair(1.1, 9.2, 1, self.delay_event)
logging.debug("Change max_concurrent_bids to 1")
try:
json_rv = read_json_api(UI_PORT + 0, "automationstrategies/1")
assert json_rv["data"]["max_concurrent_bids"] == 5
data = json_rv["data"]
data["max_concurrent_bids"] = 1
post_json = {
"set_label": "changed",
"set_note": "changed",
"set_data": json.dumps(data),
}
json_rv = read_json_api(UI_PORT + 0, "automationstrategies/1", post_json)
assert json_rv["data"]["max_concurrent_bids"] == 1
assert json_rv["label"] == "changed"
assert json_rv["note"] == "changed"
# Only one bid should be active
test_bid_pair(1.1, 1.2, 1, self.delay_event)
finally:
logging.debug("Reset max_concurrent_bids")
post_json = {
"set_max_concurrent_bids": 5,
}
json_rv = read_json_api(UI_PORT + 0, "automationstrategies/1", post_json)
assert json_rv["data"]["max_concurrent_bids"] == 5
if __name__ == "__main__": if __name__ == "__main__":
unittest.main() unittest.main()

View file

@ -18,7 +18,7 @@ python tests/basicswap/extended/test_xmr_persistent.py
# Copy coin releases to permanent storage for faster subsequent startups # Copy coin releases to permanent storage for faster subsequent startups
cp -r ${TEST_PATH}/bin/ ~/tmp/basicswap_bin cp -r ${TEST_PATH}/bin/ ~/tmp/basicswap_bin/
""" """