From 40b2c2a858b2e9eeadb6cd11b89a3a80c67ec944 Mon Sep 17 00:00:00 2001 From: SChernykh Date: Sun, 5 Nov 2023 17:59:32 +0100 Subject: [PATCH] Merge mining RPC: added `merge_mining_get_job` --- docs/MERGE_MINING.MD | 18 ++++-- src/merge_mining_client.cpp | 125 ++++++++++++++++++++++++++++++++---- src/merge_mining_client.h | 21 +++++- src/util.h | 19 ++++++ 4 files changed, 163 insertions(+), 20 deletions(-) diff --git a/docs/MERGE_MINING.MD b/docs/MERGE_MINING.MD index abd6a48..28492a1 100644 --- a/docs/MERGE_MINING.MD +++ b/docs/MERGE_MINING.MD @@ -55,28 +55,34 @@ Field|Description -|- `chain_id`|A unique 32-byte hex-encoded value that identifies this merge mined chain. -Example response 1: `{"jsonrpc":"2.0","id":"0","result":{"chain_id":"f89175d2ce8ce92eaa062eea5c433d0d70f89f5e1554c066dc27943e8cfc37b0"}}` +Example response 1: `{"jsonrpc":"2.0","id":"0","result":{"chain_id":"0f28c4960d96647e77e7ab6d13b85bd16c7ca56f45df802cdc763a5e5c0c7863"}}` + Example response 2: `{"jsonrpc":"2.0","id":"0","error":"something went wrong"}` ### merge_mining_get_job +Example request: `{"jsonrpc":"2.0","id":"0","method":"merge_mining_get_job","params":{"address":"MERGE_MINED_CHAIN_ADDRESS","aux_hash":"f6952d6eef555ddd87aca66e56b91530222d6e318414816f3ba7cf5bf694bf0f","height":3000000,"prev_id":"ad505b0be8a49b89273e307106fa42133cbd804456724c5e7635bd953215d92a"}}` + Request: a JSON containing these fields: Field|Description -|- -`height`|Monero height -`prev_id`|Hash of the previous Monero block `address`|A wallet address on the merge mined chain `aux_hash`|Merge mining job that is currently being used +`height`|Monero height +`prev_id`|Hash of the previous Monero block Response: a JSON containing these fields: Field|Description -|- -`result`|`OK` or an error message `aux_blob`|A hex-encoded blob of data. Merge mined chain defines the contents of this blob. It's opaque to P2Pool and will not be changed by it. -`aux_hash`|A 32-byte hex-encoded hash of the `aux_blob`. Merge mined chain defines how exactly this hash is calculated. It's opaque to P2Pool. `aux_diff`|Mining difficulty (decimal number). +`aux_hash`|A 32-byte hex-encoded hash of the `aux_blob`. Merge mined chain defines how exactly this hash is calculated. It's opaque to P2Pool. -If `aux_hash` is the same as in the request, all other fields will be ignored by P2Pool, so they don't have to be included in the response. Moreover, `{"result":"OK"}` response will be interpreted as a response having the same `aux_hash` as in the request. This enables an efficient polling. +If `aux_hash` is the same as in the request, all other fields will be ignored by P2Pool, so they don't have to be included in the response. Moreover, empty response will be interpreted as a response having the same `aux_hash` as in the request. This enables an efficient polling. + +Example response 1: `{"jsonrpc":"2.0","id":"0","result":{"aux_blob":"4c6f72656d20697073756d","aux_diff":123456,"aux_hash":"f6952d6eef555ddd87aca66e56b91530222d6e318414816f3ba7cf5bf694bf0f"}}` + +Example response 2: `{"jsonrpc":"2.0","id":"0","result":{}}` ### merge_mining_submit_solution diff --git a/src/merge_mining_client.cpp b/src/merge_mining_client.cpp index 242d9ee..dbef2c9 100644 --- a/src/merge_mining_client.cpp +++ b/src/merge_mining_client.cpp @@ -27,12 +27,15 @@ LOG_CATEGORY(MergeMiningClient) namespace p2pool { -MergeMiningClient::MergeMiningClient(p2pool* pool, const std::string& host) +MergeMiningClient::MergeMiningClient(p2pool* pool, const std::string& host, const std::string& address) : m_host(host) , m_port(80) + , m_auxAddress(address) , m_pool(pool) , m_loop{} , m_loopThread{} + , m_timer{} + , m_getJobRunning(false) , m_shutdownAsync{} { const size_t k = host.find_last_of(':'); @@ -63,6 +66,13 @@ MergeMiningClient::MergeMiningClient(p2pool* pool, const std::string& host) } m_shutdownAsync.data = this; + err = uv_timer_init(&m_loop, &m_timer); + if (err) { + LOGERR(1, "failed to create timer, error " << uv_err_name(err)); + uv_loop_close(&m_loop); + throw std::exception(); + } + err = uv_thread_create(&m_loopThread, loop, this); if (err) { LOGERR(1, "failed to start event loop thread, error " << uv_err_name(err)); @@ -81,19 +91,30 @@ MergeMiningClient::~MergeMiningClient() LOGINFO(1, "stopped"); } +void MergeMiningClient::on_timer() +{ + MinerData data = m_pool->miner_data(); + merge_mining_get_job(data.height, data.prev_id, m_auxAddress, m_auxHash); +} + void MergeMiningClient::merge_mining_get_chain_id() { constexpr char req[] = "{\"jsonrpc\":\"2.0\",\"id\":\"0\",\"method\":\"merge_mining_get_chain_id\"}"; JSONRPCRequest::call(m_host, m_port, req, std::string(), m_pool->params().m_socks5Proxy, [this](const char* data, size_t size, double) { - parse_merge_mining_get_chain_id(data, size); + if (parse_merge_mining_get_chain_id(data, size)) { + const int err = uv_timer_start(&m_timer, on_timer, 0, 500); + if (err) { + LOGERR(1, "failed to start timer, error " << uv_err_name(err)); + } + } }, [](const char* data, size_t size, double) { if (size > 0) { LOGERR(1, "couldn't get merge mining id, error " << log::const_buf(data, size)); } - }); + }, &m_loop); } bool MergeMiningClient::parse_merge_mining_get_chain_id(const char* data, size_t size) @@ -123,22 +144,94 @@ bool MergeMiningClient::parse_merge_mining_get_chain_id(const char* data, size_t const auto& chain_id = result["chain_id"]; - if (!chain_id.IsString() || (chain_id.GetStringLength() != HASH_SIZE * 2)) { + if (!chain_id.IsString() || !from_hex(chain_id.GetString(), chain_id.GetStringLength(), m_chainID)) { return err("invalid chain_id"); } - const char* s = chain_id.GetString(); - hash id; + return true; +} - for (uint32_t i = 0; i < HASH_SIZE; ++i) { - uint8_t d[2]; - if (!from_hex(s[i * 2], d[0]) || !from_hex(s[i * 2 + 1], d[1])) { - return err("chain_id is not hex-encoded"); - } - id.h[i] = (d[0] << 4) | d[1]; +void MergeMiningClient::merge_mining_get_job(uint64_t height, const hash& prev_id, const std::string& address, const hash& aux_hash) +{ + if (m_getJobRunning) { + return; } - m_chainID = id; + m_getJobRunning = true; + + char buf[log::Stream::BUF_SIZE + 1]; + log::Stream s(buf); + + s << "{\"jsonrpc\":\"2.0\",\"id\":\"0\",\"method\":\"merge_mining_get_job\",\"params\":{" + << "\"address\":\"" << address << '"' + << ",\"aux_hash\":\"" << aux_hash << '"' + << ",\"height\":" << height + << ",\"prev_id\":\"" << prev_id << '"' + << "}}\0"; + + JSONRPCRequest::call(m_host, m_port, buf, std::string(), m_pool->params().m_socks5Proxy, + [this](const char* data, size_t size, double) { + parse_merge_mining_get_job(data, size); + }, + [this](const char* data, size_t size, double) { + if (size > 0) { + LOGERR(1, "couldn't get merge mining job, error " << log::const_buf(data, size)); + } + m_getJobRunning = false; + }, &m_loop); +} + +bool MergeMiningClient::parse_merge_mining_get_job(const char* data, size_t size) +{ + auto err = [](const char* msg) { + LOGWARN(1, "merge_mining_get_job RPC call failed: " << msg); + return false; + }; + + rapidjson::Document doc; + + if (doc.Parse(data, size).HasParseError() || !doc.IsObject()) { + return err("parsing failed"); + } + + if (doc.HasMember("error")) { + return err(doc["error"].IsString() ? doc["error"].GetString() : "an unknown error occurred"); + } + + const auto& result = doc["result"]; + + if (!result.IsObject()) { + return err("couldn't parse result"); + } + + if (!result.HasMember("aux_hash")) { + return true; + } + + const auto& aux_hash = result["aux_hash"]; + + hash h; + if (!aux_hash.IsString() || !from_hex(aux_hash.GetString(), aux_hash.GetStringLength(), h)) { + return err("invalid aux_hash"); + } + + if (h == m_auxHash) { + return true; + } + + if (!result.HasMember("aux_blob") || !result["aux_blob"].IsString()) { + return err("invalid aux_blob"); + } + + if (!result.HasMember("aux_diff") || !result["aux_diff"].IsUint64()) { + return err("invalid aux_diff"); + } + + m_auxBlob = result["aux_blob"].GetString(); + m_auxHash = h; + m_auxDiff.lo = result["aux_diff"].GetUint64(); + m_auxDiff.hi = 0; + return true; } @@ -161,4 +254,10 @@ void MergeMiningClient::loop(void* data) LOGINFO(1, "event loop stopped"); } +void MergeMiningClient::on_shutdown() +{ + uv_timer_stop(&m_timer); + uv_close(reinterpret_cast(&m_timer), nullptr); +} + } // namespace p2pool diff --git a/src/merge_mining_client.h b/src/merge_mining_client.h index 8a35ade..55bbbf5 100644 --- a/src/merge_mining_client.h +++ b/src/merge_mining_client.h @@ -26,18 +26,29 @@ class p2pool; class MergeMiningClient { public: - MergeMiningClient(p2pool* pool, const std::string& host); + MergeMiningClient(p2pool* pool, const std::string& host, const std::string& address); ~MergeMiningClient(); private: static void loop(void* data); + static void on_timer(uv_timer_t* timer) { reinterpret_cast(timer->data)->on_timer(); } + void on_timer(); + void merge_mining_get_chain_id(); bool parse_merge_mining_get_chain_id(const char* data, size_t size); + void merge_mining_get_job(uint64_t height, const hash& prev_id, const std::string& address, const hash& aux_hash); + bool parse_merge_mining_get_job(const char* data, size_t size); + std::string m_host; uint32_t m_port; + std::string m_auxAddress; + std::string m_auxBlob; + hash m_auxHash; + difficulty_type m_auxDiff; + hash m_chainID; p2pool* m_pool; @@ -45,15 +56,23 @@ private: uv_loop_t m_loop; uv_thread_t m_loopThread; + uv_timer_t m_timer; + + bool m_getJobRunning; + uv_async_t m_shutdownAsync; static void on_shutdown(uv_async_t* async) { MergeMiningClient* client = reinterpret_cast(async->data); + client->on_shutdown(); + uv_close(reinterpret_cast(&client->m_shutdownAsync), nullptr); delete GetLoopUserData(&client->m_loop, false); } + + void on_shutdown(); }; } // namespace p2pool diff --git a/src/util.h b/src/util.h index 10bbdac..0233a26 100644 --- a/src/util.h +++ b/src/util.h @@ -99,6 +99,25 @@ static FORCEINLINE bool from_hex(char c, T& out_value) { return false; } +static FORCEINLINE bool from_hex(const char* s, size_t len, hash& h) { + if (len != HASH_SIZE * 2) { + return false; + } + + hash result; + + for (uint32_t i = 0; i < HASH_SIZE; ++i) { + uint8_t d[2]; + if (!from_hex(s[i * 2], d[0]) || !from_hex(s[i * 2 + 1], d[1])) { + return false; + } + result.h[i] = (d[0] << 4) | d[1]; + } + + h = result; + return true; +} + template struct is_negative_helper {}; template struct is_negative_helper { static FORCEINLINE bool value(T) { return false; } }; template struct is_negative_helper { static FORCEINLINE bool value(T x) { return (x < 0); } };